Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

document edges #53

Open
wants to merge 15 commits into
base: main
Choose a base branch
from
6 changes: 6 additions & 0 deletions Justfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
watch-docs:
cargo +nightly watch -s 'cargo +nightly docs-rs -p scopegraphs && browser-sync start --ss target/x86_64-unknown-linux-gnu/doc -s target/x86_64-unknown-linux-gnu/doc --directory --no-open'

build-docs:
rm -rf /tmp/SG_TARGET*
cargo +nightly docs-rs -p scopegraphs
rm -rf /tmp/SG_TARGET*


publish:
cargo publish -p scopegraphs-render-docs
sleep 10
Expand Down
6 changes: 5 additions & 1 deletion scopegraphs-render-docs/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
[package]
name = "scopegraphs-render-docs"
version.workspace = true
authors = ["Mike Lubinets <[email protected]>", "Frank Rehberger <[email protected]>", "Jonathan Dönszelmann <[email protected]>"]
authors = [
"Mike Lubinets <[email protected]>",
"Frank Rehberger <[email protected]>",
"Jonathan Dönszelmann <[email protected]>",
]
description = "Derived from Aquamarine, a mermaid.js integration for rustdoc, renders scopegraphs by executing doctests to generate mermaid"
keywords = ["proc_macro", "docs", "rustdoc", "mermaid", "diagram"]
categories = ["visualization", "development-tools::build-utils"]
Expand Down
159 changes: 84 additions & 75 deletions scopegraphs-render-docs/src/attrs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,44 +96,67 @@ impl quote::ToTokens for Attrs {
.map(Attr::expect_diagram_entry_text)
.collect::<Vec<_>>();

tokens.extend(quote! {#[doc = "```rust"]});
for i in &diagram {
tokens.extend(quote! {
#[doc = #i]
});
let no_graph = diagram.iter().any(|i| i.contains("no-graph"));

if !diagram
.iter()
.filter(|i| !i.trim().is_empty())
.all(|i| i.trim().starts_with('#'))
&& !diagram.is_empty()
{
tokens.extend(quote! {#[doc = "```rust"]});
let first_nonempty =
diagram.iter().take_while(|i| i.trim().is_empty()).count();
let last_nonempty = diagram
.iter()
.rev()
.take_while(|i| i.trim().is_empty())
.count();

for i in &diagram[first_nonempty..diagram.len() - last_nonempty] {
tokens.extend(quote! {
#[doc = #i]
});
}
tokens.extend(quote! {#[doc = "```"]});
}
tokens.extend(quote! {#[doc = "```"]});

match generate_diagram_rustdoc(&diagram) {
Ok(i) => {
tokens.extend(i);
}
Err(e) => match e {
EvalError::CreateDir(i) => {
emit_error!(
if !no_graph {
match generate_diagram_rustdoc(&diagram) {
Ok(i) => {
tokens.extend(i);
}
Err(e) => match e {
EvalError::CreateDir(i) => {
emit_error!(
Span::call_site(),
"failed to create temporary directory to generate mermaid files {:?}",
i,
);
}
EvalError::WriteProject(i) => {
emit_error!(
}
EvalError::WriteProject(i) => {
emit_error!(
Span::call_site(),
"failed to write project file in temporary directory to generate mermaid files {:?}",
i,
);
}
EvalError::RunCargo(i) => {
emit_error!(Span::call_site(), "error while running cargo {:?}", i);
}
EvalError::ReadDir(i) => {
emit_error!(
Span::call_site(),
"error while looking for output files {:?}",
i
);
}
},
}
EvalError::RunCargo(i) => {
emit_error!(
Span::call_site(),
"error while running cargo {:?}",
i
);
}
EvalError::ReadDir(i) => {
emit_error!(
Span::call_site(),
"error while looking for output files {:?}",
i
);
}
},
}
}
}
// If that happens, then the parsing stage is faulty: doc comments outside of
Expand Down Expand Up @@ -171,7 +194,19 @@ impl quote::ToTokens for Attrs {

fn place_mermaid_js() -> io::Result<()> {
let target_dir = std::env::var("CARGO_TARGET_DIR").unwrap_or("./target".to_string());
let docs_dir = Path::new(&target_dir).join("doc");
if !Path::new(&target_dir).exists() {
eprintln!("NO TARGET DIR");
}

let mut docs_dir = Path::new(&target_dir).join("doc");

for i in fs::read_dir(target_dir)?.filter_map(Result::ok) {
if i.path().join("doc").exists() {
docs_dir = i.path().join("doc");
break;
}
}

// extract mermaid module iff rustdoc folder exists already
if docs_dir.exists() {
let static_files_mermaid_dir = docs_dir.join(MERMAID_JS_LOCAL_DIR);
Expand Down Expand Up @@ -232,7 +267,7 @@ const MERMAID_INIT_SCRIPT: &str = r#"
// enable file acecss in browser.
try {
var rootPath = document
.getElementById(rustdocVarsId)
.getElementsByName(rustdocVarsId)[0]
.attributes[dataRootPathAttr]
.value;
const {
Expand Down Expand Up @@ -307,42 +342,10 @@ fn run_code(code: &str) -> Result<Vec<String>, EvalError> {
)
};

fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> {
fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir() {
copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
}
}
Ok(())
}

let code_hash = Uuid::new_v4().to_string();
let out_dir = &temp_dir().join("render-docs").join(&code_hash);
println!("testing in {out_dir:?}");

let sg_target_dir = temp_dir().join(format!("SG_TARGET-{}", code_hash));
if sg_target_dir.exists() {
let _ = fs::remove_dir_all(&sg_target_dir);
}
let _ = copy_dir_all(target_dir, &sg_target_dir);
if sg_target_dir.join("debug").exists() {
let _ = fs::remove_file(sg_target_dir.join("debug").join(".cargo-lock"));
let _ = fs::remove_file(sg_target_dir.join("debug").join(".fingerprint"));
let _ = fs::remove_file(sg_target_dir.join("debug").join("tmp"));
let _ = fs::remove_file(sg_target_dir.join("debug").join("incremental"));
}
if sg_target_dir.join("release").exists() {
let _ = fs::remove_file(sg_target_dir.join("release").join(".cargo-lock"));
let _ = fs::remove_file(sg_target_dir.join("release").join(".fingerprint"));
let _ = fs::remove_file(sg_target_dir.join("release").join("tmp"));
let _ = fs::remove_file(sg_target_dir.join("release").join("incremental"));
}

let cargo = PathBuf::from(std::env::var("CARGO").expect("$CARGO is set during compilation"));

if out_dir.exists() {
Expand Down Expand Up @@ -385,7 +388,7 @@ fn documented() {{}}
let mut command = Command::new(cargo);
let command = command
.current_dir(out_dir)
.env("CARGO_TARGET_DIR", &sg_target_dir)
.env("CARGO_TARGET_DIR", &target_dir)
.env_remove("CARGO_MAKEFLAGS")
.arg("test");

Expand All @@ -407,7 +410,7 @@ fn documented() {{}}
panic!("build docs didn't work")
}

let _ = fs::remove_dir_all(&sg_target_dir);
// let _ = fs::remove_dir_all(&sg_target_dir);

find_diagrams(out_dir)
}
Expand Down Expand Up @@ -529,9 +532,9 @@ fn split_attr_body(ident: &Ident, input: &str, loc: &mut Location) -> Vec<Attr>

let flush_buffer_as_diagram_entry = |ctx: &mut Ctx| {
let s = ctx.buffer.drain(..).join(" ");
if !s.trim().is_empty() {
ctx.attrs.push(Attr::DiagramEntry(ident.clone(), s));
}
// if !s.trim().is_empty() {
ctx.attrs.push(Attr::DiagramEntry(ident.clone(), s));
// }
};

while let Some(token) = tokens.next() {
Expand Down Expand Up @@ -563,13 +566,13 @@ fn split_attr_body(ident: &Ident, input: &str, loc: &mut Location) -> Vec<Attr>
}
}

if !ctx.buffer.is_empty() {
if loc.is_inside() {
flush_buffer_as_diagram_entry(&mut ctx);
} else {
flush_buffer_as_doc_comment(&mut ctx);
};
}
// if !ctx.buffer.is_empty() {
if loc.is_inside() {
flush_buffer_as_diagram_entry(&mut ctx);
} else {
flush_buffer_as_doc_comment(&mut ctx);
};
// }

ctx.attrs
}
Expand Down Expand Up @@ -684,6 +687,9 @@ mod tests {
fn check(case: TestCase) {
let mut loc = case.location;
let attrs = split_attr_body(&case.ident, case.input, &mut loc);
println!("{attrs:?}");
println!("---");
println!("{:?}", case.expect_attrs);
assert_eq!(loc, case.expect_location);
assert_eq!(attrs, case.expect_attrs);
}
Expand Down Expand Up @@ -785,7 +791,10 @@ mod tests {
location: Location::InsideDiagram,
input: "```",
expect_location: Location::OutsideDiagram,
expect_attrs: vec![Attr::DiagramEnd(i())],
expect_attrs: vec![
Attr::DiagramEntry(i(), "".to_string()),
Attr::DiagramEnd(i()),
],
};

check(case)
Expand Down
9 changes: 6 additions & 3 deletions scopegraphs/src/completeness/explicit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,12 @@
}
}

type GetEdgesResult<'rslv> = EdgesOrDelay<Vec<Scope>, LABEL>
where
Self: 'rslv, LABEL: 'rslv, DATA: 'rslv;
type GetEdgesResult<'rslv>
= EdgesOrDelay<Vec<Scope>, LABEL>
where
Self: 'rslv,
LABEL: 'rslv,
DATA: 'rslv;

fn cmpl_get_edges<'rslv>(
&self,
Expand Down Expand Up @@ -109,7 +112,7 @@
}
}

impl<'sg, LABEL: Hash + Eq, DATA> ScopeGraph<'sg, LABEL, DATA, ExplicitClose<LABEL>> {

Check failure on line 115 in scopegraphs/src/completeness/explicit.rs

View workflow job for this annotation

GitHub Actions / clippy

the following explicit lifetimes could be elided: 'sg

error: the following explicit lifetimes could be elided: 'sg --> scopegraphs/src/completeness/explicit.rs:115:6 | 115 | impl<'sg, LABEL: Hash + Eq, DATA> ScopeGraph<'sg, LABEL, DATA, ExplicitClose<LABEL>> { | ^^^ ^^^ | = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#needless_lifetimes help: elide the lifetimes | 115 - impl<'sg, LABEL: Hash + Eq, DATA> ScopeGraph<'sg, LABEL, DATA, ExplicitClose<LABEL>> { 115 + impl<LABEL: Hash + Eq, DATA> ScopeGraph<'_, LABEL, DATA, ExplicitClose<LABEL>> { |
// TODO: fix this sentence
/// Closes an edge, (i.e., prohibit future new
///
Expand Down Expand Up @@ -199,7 +202,7 @@
}
}

impl<'sg, LABEL: Hash + Eq + Copy, DATA> ScopeGraph<'sg, LABEL, DATA, FutureCompleteness<LABEL>> {

Check failure on line 205 in scopegraphs/src/completeness/explicit.rs

View workflow job for this annotation

GitHub Actions / clippy

the following explicit lifetimes could be elided: 'sg

error: the following explicit lifetimes could be elided: 'sg --> scopegraphs/src/completeness/explicit.rs:205:6 | 205 | impl<'sg, LABEL: Hash + Eq + Copy, DATA> ScopeGraph<'sg, LABEL, DATA, FutureCompleteness<LABEL>> { | ^^^ ^^^ | = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#needless_lifetimes help: elide the lifetimes | 205 - impl<'sg, LABEL: Hash + Eq + Copy, DATA> ScopeGraph<'sg, LABEL, DATA, FutureCompleteness<LABEL>> { 205 + impl<LABEL: Hash + Eq + Copy, DATA> ScopeGraph<'_, LABEL, DATA, FutureCompleteness<LABEL>> { |
// TODO: update this example to use futures
// TODO: fix this sentence
/// Closes an edge, (i.e., prohibit future new
Expand Down
7 changes: 6 additions & 1 deletion scopegraphs/src/completeness/future.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,12 @@ impl<LABEL: Hash + Eq + Label + Copy, DATA> Completeness<LABEL, DATA>
.cmpl_new_edge(inner_scope_graph, src, lbl, dst)
}

type GetEdgesResult<'rslv> = FutureWrapper<'rslv, Vec<Scope>> where Self: 'rslv, LABEL: 'rslv, DATA: 'rslv;
type GetEdgesResult<'rslv>
= FutureWrapper<'rslv, Vec<Scope>>
where
Self: 'rslv,
LABEL: 'rslv,
DATA: 'rslv;

fn cmpl_get_edges<'rslv>(
&'rslv self,
Expand Down
9 changes: 6 additions & 3 deletions scopegraphs/src/completeness/implicit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,12 @@ impl<LABEL: Hash + Eq + Label, DATA> Completeness<LABEL, DATA> for ImplicitClose
}
}

type GetEdgesResult<'rslv> = Vec<Scope>
where
Self: 'rslv, LABEL: 'rslv, DATA: 'rslv;
type GetEdgesResult<'rslv>
= Vec<Scope>
where
Self: 'rslv,
LABEL: 'rslv,
DATA: 'rslv;

fn cmpl_get_edges<'rslv>(
&self,
Expand Down
9 changes: 6 additions & 3 deletions scopegraphs/src/completeness/unchecked.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,12 @@ impl<LABEL: Hash + Eq, DATA> Completeness<LABEL, DATA> for UncheckedCompleteness
inner_scope_graph.add_edge(src, lbl, dst)
}

type GetEdgesResult<'rslv> = Vec<Scope>
where
Self: 'rslv, LABEL: 'rslv, DATA: 'rslv;
type GetEdgesResult<'rslv>
= Vec<Scope>
where
Self: 'rslv,
LABEL: 'rslv,
DATA: 'rslv;

fn cmpl_get_edges<'rslv>(
&self,
Expand Down
Loading
Loading