xtask: Fix docs generation

Publish user/kernel space docs seperately.
Add an index.html at the root for navigation.
Ensure that search engines don't index these pages.

Signed-off-by: Dave Tucker <dave@dtucker.co.uk>
pull/320/head
Dave Tucker 2 years ago
parent 713cd4e858
commit 67951cd2d7

2
.gitignore vendored

@ -3,3 +3,5 @@ target/
libbpf/ libbpf/
.vscode/ .vscode/
!.vscode/settings.json !.vscode/settings.json
site/
header.html

@ -1,7 +1,3 @@
[build] [build]
publish = "target/doc" publish = "site"
command = "rustup toolchain install nightly && cargo xtask docs" command = "rustup toolchain install nightly && cargo xtask docs"
[[redirects]]
from = "/"
to = "/aya"

@ -12,3 +12,4 @@ syn = "1"
quote = "1" quote = "1"
proc-macro2 = "1" proc-macro2 = "1"
indexmap = "1.6" indexmap = "1.6"
indoc = "1.0"

@ -5,9 +5,31 @@ use std::{
use std::{fs, io, io::Write}; use std::{fs, io, io::Write};
use indoc::indoc;
pub fn docs() -> Result<(), anyhow::Error> { pub fn docs() -> Result<(), anyhow::Error> {
let mut working_dir = PathBuf::from("."); let mut working_dir = PathBuf::from(".");
let replace = Command::new("sed")
.current_dir(&working_dir)
.args(vec![
"-i.bak",
"s/crabby.svg/crabby_dev.svg/",
"aya/src/lib.rs",
])
.status()
.expect("failed to replace logo");
assert!(replace.success());
let mut header_path = PathBuf::from(".");
header_path.push("header.html");
let mut header = fs::File::create(&header_path).expect("can't create header.html");
header
.write(r#"<meta name="robots" content="noindex">"#.as_bytes())
.expect("can't write header.html contents");
header.flush().expect("couldn't flush contents");
let abs_header_path = fs::canonicalize(&header_path).unwrap();
let args = vec![ let args = vec![
"+nightly", "+nightly",
"doc", "doc",
@ -18,27 +40,73 @@ pub fn docs() -> Result<(), anyhow::Error> {
let status = Command::new("cargo") let status = Command::new("cargo")
.current_dir(&working_dir) .current_dir(&working_dir)
.env(
"RUSTDOCFLAGS",
format!("--html-in-header {}", abs_header_path.to_str().unwrap()),
)
.args(&args) .args(&args)
.status() .status()
.expect("failed to build aya docs"); .expect("failed to build aya docs");
assert!(status.success()); assert!(status.success());
working_dir.push("bpf"); working_dir.push("bpf");
let replace = Command::new("sed")
.current_dir(&working_dir)
.args(vec![
"-i.bak",
"s/crabby.svg/crabby_dev.svg/",
"aya-bpf/src/lib.rs",
])
.status()
.expect("failed to replace logo");
assert!(replace.success());
let status = Command::new("cargo") let status = Command::new("cargo")
.current_dir(&working_dir) .current_dir(&working_dir)
.env(
"RUSTDOCFLAGS",
format!("--html-in-header {}", abs_header_path.to_str().unwrap()),
)
.args(&args) .args(&args)
.status() .status()
.expect("failed to build aya-bpf docs"); .expect("failed to build aya-bpf docs");
assert!(status.success()); assert!(status.success());
copy_dir_all("./bpf/target/doc", "./target/doc")?; copy_dir_all("./target/doc", "site/user")?;
copy_dir_all("./bpf/target/doc", "site/bpf")?;
let mut robots = fs::File::create("site/robots.txt").expect("can't create robots.txt");
robots
.write(
indoc! {r#"
User-Agent:*
Disallow: /
"#}
.as_bytes(),
)
.expect("can't write robots.txt");
let mut index = fs::File::create("site/index.html").expect("can't create index.html");
index
.write(
indoc! {r#"
<html>
<meta name="robots" content="noindex">
<body>
<ul>
<li><a href="user/aya/index.html">Aya User-space Development Documentation</a></li>
<li><a href="bpf/aya_bpf/index.html">Aya Kernel-space Development Documentation</a></li>
</ul>
</body>
</html>
"#}
.as_bytes(),
)
.expect("can't write index.html");
let crates_js = b"window.ALL_CRATES = [\"aya\", \"aya_bpf\", \"aya_bpf_bindings\", \"aya_bpf_cty\", \"aya_bpf_macros\", \"aya_gen\"];\n"; fs::rename("aya/src/lib.rs.bak", "aya/src/lib.rs").unwrap();
let mut file = fs::File::options() fs::rename("bpf/aya-bpf/src/lib.rs.bak", "bpf/aya-bpf/src/lib.rs").unwrap();
.read(true)
.write(true)
.open("./target/doc/crates.js")?;
file.write_all(crates_js)?;
Ok(()) Ok(())
} }

Loading…
Cancel
Save