From ab7023b80186cabf27da5844c05f69cf9dde44fc Mon Sep 17 00:00:00 2001 From: Max Mindlin <35264981+maxmindlin@users.noreply.github.com> Date: Fri, 9 Aug 2024 19:12:33 -0400 Subject: [PATCH] json repr (#8) * json repr * update dist * align versions * align versions --- .github/workflows/release.yml | 72 ++++++----- Cargo.lock | 71 +++++++---- Cargo.toml | 13 +- scout-interpreter/Cargo.toml | 1 + scout-interpreter/src/builtin.rs | 12 ++ scout-interpreter/src/eval.rs | 6 +- scout-interpreter/src/import.rs | 26 ++-- scout-interpreter/src/lib.rs | 16 +++ scout-json/Cargo.toml | 12 ++ scout-json/src/lib.rs | 139 ++++++++++++++++++++++ scout-parser/src/ast.rs | 197 ++++++++++++++++++++++++++++++- scout-parser/src/lib.rs | 22 ++-- scout-worker/Cargo.toml | 2 +- 13 files changed, 499 insertions(+), 90 deletions(-) create mode 100644 scout-json/Cargo.toml create mode 100644 scout-json/src/lib.rs diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c816275..d9f9b3d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,9 +12,8 @@ # title/body based on your changelogs. name: Release - permissions: - contents: write + "contents": "write" # This task will run whenever you push a git tag that looks like a version # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. @@ -62,7 +61,12 @@ jobs: # we specify bash to get pipefail; it guards against the `curl` command # failing. otherwise `sh` won't catch that `curl` returned non-0 shell: bash - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.15.1/cargo-dist-installer.sh | sh" + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.20.0/cargo-dist-installer.sh | sh" + - name: Cache cargo-dist + uses: actions/upload-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/cargo-dist # sure would be cool if github gave us proper conditionals... # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible # functionality based on whether this is a pull_request, and whether it's from a fork. @@ -111,9 +115,6 @@ jobs: - uses: actions/checkout@v4 with: submodules: recursive - - uses: swatinem/rust-cache@v2 - with: - key: ${{ join(matrix.targets, '-') }} - name: Install cargo-dist run: ${{ matrix.install_dist }} # Get the dist-manifest @@ -165,9 +166,12 @@ jobs: - uses: actions/checkout@v4 with: submodules: recursive - - name: Install cargo-dist - shell: bash - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.15.1/cargo-dist-installer.sh | sh" + - name: Install cached cargo-dist + uses: actions/download-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/ + - run: chmod +x ~/.cargo/bin/cargo-dist # Get all the local artifacts for the global tasks to use (for e.g. checksums) - name: Fetch local artifacts uses: actions/download-artifact@v4 @@ -211,8 +215,12 @@ jobs: - uses: actions/checkout@v4 with: submodules: recursive - - name: Install cargo-dist - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.15.1/cargo-dist-installer.sh | sh" + - name: Install cached cargo-dist + uses: actions/download-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/ + - run: chmod +x ~/.cargo/bin/cargo-dist # Fetch artifacts from scratch-storage - name: Fetch artifacts uses: actions/download-artifact@v4 @@ -220,7 +228,6 @@ jobs: pattern: artifacts-* path: target/distrib/ merge-multiple: true - # This is a harmless no-op for GitHub Releases, hosting for that happens in "announce" - id: host shell: bash run: | @@ -234,8 +241,29 @@ jobs: # Overwrite the previous copy name: artifacts-dist-manifest path: dist-manifest.json + # Create a GitHub Release while uploading all files to it + - name: "Download GitHub Artifacts" + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: artifacts + merge-multiple: true + - name: Cleanup + run: | + # Remove the granular manifests + rm -f artifacts/*-dist-manifest.json + - name: Create GitHub Release + env: + PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" + ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" + ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" + RELEASE_COMMIT: "${{ github.sha }}" + run: | + # Write and read notes from a file to avoid quoting breaking things + echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt + + gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* - # Create a GitHub Release while uploading all files to it announce: needs: - plan @@ -251,21 +279,3 @@ jobs: - uses: actions/checkout@v4 with: submodules: recursive - - name: "Download GitHub Artifacts" - uses: actions/download-artifact@v4 - with: - pattern: artifacts-* - path: artifacts - merge-multiple: true - - name: Cleanup - run: | - # Remove the granular manifests - rm -f artifacts/*-dist-manifest.json - - name: Create GitHub Release - uses: ncipollo/release-action@v1 - with: - tag: ${{ needs.plan.outputs.tag }} - name: ${{ fromJson(needs.host.outputs.val).announcement_title }} - body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }} - prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }} - artifacts: "artifacts/*" diff --git a/Cargo.lock b/Cargo.lock index baa42e8..8a9bf68 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -95,16 +95,16 @@ dependencies = [ [[package]] name = "actix-server" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b02303ce8d4e8be5b855af6cf3c3a08f3eff26880faad82bab679c22d3650cb5" +checksum = "7ca2549781d8dd6d75c40cf6b6051260a2cc2f3c62343d761a969a0640646894" dependencies = [ "actix-rt", "actix-service", "actix-utils", "futures-core", "futures-util", - "mio", + "mio 1.0.1", "socket2 0.5.7", "tokio", "tracing", @@ -1460,7 +1460,7 @@ dependencies = [ "http 1.1.0", "hyper 1.4.1", "hyper-util", - "rustls 0.23.5", + "rustls 0.23.11", "rustls-pki-types", "tokio", "tokio-rustls 0.26.0", @@ -1836,11 +1836,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", - "log", "wasi", "windows-sys 0.48.0", ] +[[package]] +name = "mio" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "log", + "wasi", + "windows-sys 0.52.0", +] + [[package]] name = "native-tls" version = "0.2.11" @@ -2395,9 +2407,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.5" +version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", @@ -2568,9 +2580,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.5" +version = "0.23.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afabcee0551bd1aa3e18e5adbf2c0544722014b899adb31bd186ec638d3da97e" +checksum = "4828ea528154ae444e5a642dbb7d5623354030dc9822b83fd9bb79683c7399d0" dependencies = [ "once_cell", "rustls-pki-types", @@ -2682,6 +2694,7 @@ dependencies = [ "get-port", "image", "reqwest", + "scout-json", "scout-lexer", "scout-parser", "serde", @@ -2690,6 +2703,16 @@ dependencies = [ "url", ] +[[package]] +name = "scout-json" +version = "0.6.0" +dependencies = [ + "scout-parser", + "serde", + "serde_json", + "test-case", +] + [[package]] name = "scout-lexer" version = "0.6.0" @@ -2707,7 +2730,7 @@ dependencies = [ [[package]] name = "scout-worker" -version = "0.1.0" +version = "0.6.0" dependencies = [ "actix-web", "futures-lite 2.3.0", @@ -3124,7 +3147,7 @@ dependencies = [ "backtrace", "bytes", "libc", - "mio", + "mio 0.8.11", "num_cpus", "parking_lot", "pin-project-lite", @@ -3172,7 +3195,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.5", + "rustls 0.23.11", "rustls-pki-types", "tokio", ] @@ -3192,9 +3215,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.16" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81967dd0dd2c1ab0bc3468bd7caecc32b8a4aa47d0c8c695d8c2b2108168d62c" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", @@ -3204,18 +3227,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8fb9f64314842840f1d940ac544da178732128f1c78c21772e876579e0da1db" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.17" +version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d9f8729f5aea9562aac1cc0441f5d6de3cff1ee0c5d67293eeca5eb36ee7c16" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ "indexmap", "serde", @@ -3747,9 +3770,9 @@ checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "winnow" -version = "0.6.11" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c52728401e1dc672a56e81e593e912aa54c78f40246869f78359a2bf24d29d" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] @@ -3801,18 +3824,18 @@ dependencies = [ [[package]] name = "zstd-safe" -version = "7.2.0" +version = "7.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa556e971e7b568dc775c136fc9de8c779b1c2fc3a63defaafadffdbd3181afa" +checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.12+zstd.1.5.6" +version = "2.0.13+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4e40c320c3cb459d9a9ff6de98cff88f4751ee9275d140e2be94a2b74e4c13" +checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index d0a7cc2..eddce57 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,25 +1,22 @@ [workspace] -members = ["scout-interpreter", "scout-lexer", "scout-parser", "scout-worker"] +members = ["scout-interpreter", "scout-json", "scout-lexer", "scout-parser", "scout-worker"] # Config for 'cargo dist' [workspace.metadata.dist] # The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax) -cargo-dist-version = "0.15.1" +cargo-dist-version = "0.20.0" # CI backends to support ci = "github" # The installers to generate for each app installers = ["shell"] # Target platforms to build apps for (Rust target-triple syntax) -targets = [ - "aarch64-apple-darwin", - "x86_64-apple-darwin", - "x86_64-unknown-linux-gnu", - "x86_64-pc-windows-msvc", -] +targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] # Publish jobs to run in CI pr-run-mode = "plan" # Whether to install an updater program install-updater = false +# Path that installers should place binaries in +install-path = "CARGO_HOME" [[bin]] name = "scout" diff --git a/scout-interpreter/Cargo.toml b/scout-interpreter/Cargo.toml index be24fa3..648817c 100644 --- a/scout-interpreter/Cargo.toml +++ b/scout-interpreter/Cargo.toml @@ -18,6 +18,7 @@ keywords = [ [dependencies] scout-parser = { version = "0.6.0", path = "../scout-parser/" } +scout-json = { version = "0.6", path = "../scout-json" } fantoccini = "0.19.3" futures = "0.3.30" serde = { version = "1.0", features = ["derive"] } diff --git a/scout-interpreter/src/builtin.rs b/scout-interpreter/src/builtin.rs index 45fa2e4..7a50f97 100644 --- a/scout-interpreter/src/builtin.rs +++ b/scout-interpreter/src/builtin.rs @@ -50,6 +50,7 @@ pub enum BuiltinKind { SetCookies, ToJson, HttpRequest, + SetViewport, } impl BuiltinKind { @@ -77,6 +78,7 @@ impl BuiltinKind { "setCookies" => Some(SetCookies), "toJson" => Some(ToJson), "httpRequest" => Some(HttpRequest), + "setViewport" => Some(SetViewport), _ => None, } } @@ -149,6 +151,16 @@ impl BuiltinKind { _ => Err(EvalError::InvalidFnParams), } } + SetViewport => { + assert_param_len!(args, 2); + match (&*args[0], &*args[1]) { + (Object::Number(w), Object::Number(h)) => { + crawler.set_window_size(*w as u32, *h as u32).await?; + Ok(Arc::new(Object::Null)) + } + _ => Err(EvalError::InvalidFnParams), + } + } ToJson => { assert_param_len!(args, 1); let json = args[0].to_json().await; diff --git a/scout-interpreter/src/eval.rs b/scout-interpreter/src/eval.rs index 88e37f0..46bf08a 100644 --- a/scout-interpreter/src/eval.rs +++ b/scout-interpreter/src/eval.rs @@ -216,7 +216,7 @@ fn eval_statement<'a>( StmtKind::Assign(lhs, expr) => { let val = eval_expression(expr, crawler, env.clone(), results.clone()).await?; match lhs { - ExprKind::Infix(lhs, TokenKind::LBracket, rhs) => { + ExprKind::Infix(lhs, t, rhs) if t.kind == TokenKind::LBracket => { let r_obj = eval_expression(rhs, crawler, env.clone(), results.clone()).await?; let l_obj = @@ -788,7 +788,7 @@ fn eval_expression<'a>( let l_obj = eval_expression(lhs, crawler, env.clone(), results.clone()).await?; let res = eval_infix( l_obj.clone(), - op, + &op.kind, rhs, crawler, env.clone(), @@ -810,7 +810,7 @@ fn eval_expression<'a>( } ExprKind::Prefix(rhs, op) => { let r_obj = eval_expression(rhs, crawler, env.clone(), results.clone()).await?; - let res = eval_prefix(r_obj, op).await?; + let res = eval_prefix(r_obj, &op.kind).await?; Ok(res) } } diff --git a/scout-interpreter/src/import.rs b/scout-interpreter/src/import.rs index ab4a8de..4434274 100644 --- a/scout-interpreter/src/import.rs +++ b/scout-interpreter/src/import.rs @@ -55,18 +55,22 @@ fn convert_path_buf(buf: PathBuf) -> Result { fn resolve_module_file(module: &ExprKind) -> Result { match module { ExprKind::Ident(ident) => resolve_std_file(ident), - ExprKind::Infix(lhs, TokenKind::DbColon, rhs) => match (lhs.as_ref(), rhs.as_ref()) { - (ExprKind::Ident(base), ExprKind::Ident(file)) => { - let buf = resolve_std_file(base)?.join(&file.name); - Ok(buf) + ExprKind::Infix(lhs, t, rhs) if t.kind == TokenKind::DbColon => { + match (lhs.as_ref(), rhs.as_ref()) { + (ExprKind::Ident(base), ExprKind::Ident(file)) => { + let buf = resolve_std_file(base)?.join(&file.name); + Ok(buf) + } + (l @ ExprKind::Infix(_, t, _), ExprKind::Ident(file)) + if t.kind == TokenKind::DbColon => + { + let base = resolve_module_file(l)?; + let buf = base.join(&file.name); + Ok(buf) + } + _ => Err(EvalError::InvalidImport(ImportError::UnknownModule)), } - (l @ ExprKind::Infix(_, TokenKind::DbColon, _), ExprKind::Ident(file)) => { - let base = resolve_module_file(l)?; - let buf = base.join(&file.name); - Ok(buf) - } - _ => Err(EvalError::InvalidImport(ImportError::UnknownModule)), - }, + } _ => Err(EvalError::InvalidImport(ImportError::UnknownModule)), } } diff --git a/scout-interpreter/src/lib.rs b/scout-interpreter/src/lib.rs index 7789ec0..ea3d251 100644 --- a/scout-interpreter/src/lib.rs +++ b/scout-interpreter/src/lib.rs @@ -6,6 +6,7 @@ use std::{ use env::EnvPointer; use eval::{eval, EvalError, ScrapeResultsPtr}; use object::Object; +use scout_json::ScoutJSON; use scout_lexer::Lexer; use scout_parser::{ast::NodeKind, ParseError, Parser}; use serde::Deserialize; @@ -47,6 +48,7 @@ impl EnvVars { pub enum InterpreterError { EvalError(EvalError), ParserError(ParseError), + InvalidJson, } pub struct GeckDriverProc(Child); @@ -88,6 +90,7 @@ impl Interpreter { _geckodriver_proc: geckodriver_proc, } } + pub async fn eval(&self, content: &str) -> Result, InterpreterError> { let lexer = Lexer::new(content); let mut parser = Parser::new(lexer); @@ -103,6 +106,19 @@ impl Interpreter { } } + pub async fn eval_json(&self, content: &str) -> Result, InterpreterError> { + let ast = serde_json::from_str::(content) + .map_err(|_| InterpreterError::InvalidJson)? + .to_ast(); + Ok(eval( + NodeKind::Program(ast), + &self.crawler, + self.env.clone(), + self.results.clone(), + ) + .await?) + } + pub fn results(&self) -> ScrapeResultsPtr { self.results.clone() } diff --git a/scout-json/Cargo.toml b/scout-json/Cargo.toml new file mode 100644 index 0000000..b43c1f9 --- /dev/null +++ b/scout-json/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "scout-json" +version = "0.6.0" +edition = "2021" + +[dependencies] +scout-parser = { version = "0.6.0", path = "../scout-parser/" } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" + +[dev-dependencies] +test-case = "3.3.1" diff --git a/scout-json/src/lib.rs b/scout-json/src/lib.rs new file mode 100644 index 0000000..345a197 --- /dev/null +++ b/scout-json/src/lib.rs @@ -0,0 +1,139 @@ +use scout_parser::ast::{CallLiteral, ExprKind, Identifier, Program, StmtKind}; +use serde::Deserialize; + +/// ScoutJson is a JSON representation of a subset of the Scout AST. +/// It is meant to model after the Google Chrome Recorder API. +#[derive(Debug, Deserialize)] +pub struct ScoutJSON { + steps: Vec, +} + +#[derive(Debug, Deserialize)] +#[serde(tag = "type")] +#[serde(rename_all = "camelCase")] +pub enum Step { + SetViewport { width: u32, height: u32 }, + Navigate { url: String }, + Click { selectors: Vec> }, +} + +impl ScoutJSON { + pub fn to_ast(&self) -> Program { + let mut stmts = Vec::new(); + for step in &self.steps { + stmts.push(step.to_stmt()); + } + + Program { stmts } + } +} + +impl Step { + pub fn to_stmt(&self) -> StmtKind { + use Step::*; + match self { + SetViewport { width, height } => { + let lit = CallLiteral { + ident: Identifier::new("setViewport".to_string()), + args: vec![ + ExprKind::Number(*width as f64), + ExprKind::Number(*height as f64), + ], + kwargs: Vec::new(), + }; + StmtKind::Expr(ExprKind::Call(lit)) + } + Navigate { url } => StmtKind::Goto(ExprKind::Str(url.clone())), + Click { selectors } => { + // By default, chrome outputs an arry and the length depends upon what + // outputs are set in the recording. We will assume only CSS is set as + // the others are not usable by scout yet. + // The css value is an array of length 1, ex: + // + // "selectors": [ + // [ + // "#question-summary-78853169 h3 > a" + // ] + // ] + let elem = ExprKind::Select(selectors[0][0].clone(), None); + let lit = CallLiteral { + ident: Identifier::new("click".to_string()), + args: vec![elem], + kwargs: Vec::new(), + }; + StmtKind::Expr(ExprKind::Call(lit)) + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use test_case::test_case; + + #[test_case( + r#"{ + "type": "navigate", + "url": "https://stackoverflow.com/", + "assertedEvents": [ + { + "type": "navigation", + "url": "https://stackoverflow.com/", + "title": "" + } + ] + }"#, + StmtKind::Goto(ExprKind::Str("https://stackoverflow.com/".to_string())); + "navigate step" + )] + #[test_case( + r##"{ + "type": "click", + "target": "main", + "selectors": [ + [ + "#question-summary-78853169 h3 > a" + ] + ], + "offsetY": 2.875, + "offsetX": 183, + "assertedEvents": [ + { + "type": "navigation", + "url": "https://stackoverflow.com/questions/78853169/how-can-i-pass-variables-to-svelte-through-csv", + "title": "typescript - How can I pass variables to svelte through CSV - Stack Overflow" + } + ] + }"##, + StmtKind::Expr(ExprKind::Call(CallLiteral { + ident: Identifier::new("click".to_string()), + args: vec![ExprKind::Select("#question-summary-78853169 h3 > a".to_string(), None)], + kwargs: Vec::new(), + })); + "click step" + )] + #[test_case( + r#"{ + "type": "setViewport", + "width": 1365, + "height": 945, + "deviceScaleFactor": 1, + "isMobile": false, + "hasTouch": false, + "isLandscape": false + }"#, + StmtKind::Expr(ExprKind::Call(CallLiteral { + ident: Identifier::new("setViewport".to_string()), + args: vec![ + ExprKind::Number(1365.), + ExprKind::Number(945.), + ], + kwargs: Vec::new(), + })); + "setViewport step" + )] + fn parse_step_json(input: &str, exp: StmtKind) { + assert_eq!(exp, serde_json::from_str::(input).unwrap().to_stmt()) + } +} diff --git a/scout-parser/src/ast.rs b/scout-parser/src/ast.rs index 8ab8c3e..70dcb80 100644 --- a/scout-parser/src/ast.rs +++ b/scout-parser/src/ast.rs @@ -1,6 +1,6 @@ use std::{collections::HashMap, fmt::Display}; -use scout_lexer::TokenKind; +use scout_lexer::Token; #[derive(Debug)] pub enum NodeKind { @@ -49,8 +49,8 @@ pub enum ExprKind { // Rest Call(CallLiteral), Chain(Vec), - Infix(Box, TokenKind, Box), - Prefix(Box, TokenKind), + Infix(Box, Token, Box), + Prefix(Box, Token), } #[derive(Debug, PartialEq, Clone)] @@ -203,3 +203,194 @@ impl Block { Self { stmts } } } + +impl std::fmt::Display for FnParam { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.ident)?; + if let Some(default) = &self.default { + write!(f, " = {default}")?; + } + + Ok(()) + } +} + +impl std::fmt::Display for FuncDef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut param_str = String::new(); + for (idx, param) in self.params.iter().enumerate() { + param_str.push_str(param.to_string().as_str()); + if idx != self.params.len() - 1 { + param_str.push_str(", "); + } + } + writeln!(f, "def {}({param_str}) do\n{}\nend", self.ident, self.body) + } +} + +impl std::fmt::Display for CallLiteral { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}(", self.ident)?; + for (idx, arg) in self.args.iter().enumerate() { + write!(f, "{arg}")?; + if idx != self.args.len() - 1 { + write!(f, ", ")?; + } + } + + if !self.kwargs.is_empty() { + write!(f, ", ")?; + + for (idx, kwarg) in self.kwargs.iter().enumerate() { + write!(f, "{kwarg}")?; + if idx != self.kwargs.len() - 1 { + write!(f, ", ")?; + } + } + } + + write!(f, ")") + } +} + +impl std::fmt::Display for Kwarg { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{} = {}", self.ident, self.expr) + } +} + +impl std::fmt::Display for HashLiteral { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{{ ")?; + for (idx, (i, o)) in self.pairs.iter().enumerate() { + write!(f, "{}: {}", i, o)?; + if idx != self.pairs.len() - 1 { + write!(f, ", ")?; + } + } + write!(f, " }}") + } +} + +impl std::fmt::Display for Block { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for stmt in &self.stmts { + write!(f, "{stmt}\n")?; + } + Ok(()) + } +} + +impl std::fmt::Display for ExprKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use ExprKind::*; + match self { + Str(s) => write!(f, r#""{s}""#), + Number(n) => write!(f, "{n}"), + Boolean(b) => write!(f, "{b}"), + Ident(ident) => write!(f, "{ident}"), + List(l) => { + write!(f, "[")?; + for (i, obj) in l.iter().enumerate() { + write!(f, "{obj}")?; + if i != l.len() - 1 { + write!(f, ", ")?; + } + } + + write!(f, "]") + } + Map(hash) => write!(f, "{hash}"), + Null => write!(f, "null"), + Select(s, mb_ident) => match mb_ident { + Some(ident) => write!(f, r#"$({ident})"{s}""#), + None => write!(f, r#"$"{s}""#), + }, + SelectAll(s, mb_ident) => match mb_ident { + Some(ident) => write!(f, r#"$$({ident})"{s}""#), + None => write!(f, r#"$$"{s}""#), + }, + Call(lit) => write!(f, "{lit}"), + Chain(exprs) => { + for (i, expr) in exprs.iter().enumerate() { + write!(f, "{expr}")?; + if i != exprs.len() - 1 { + write!(f, " |> ")?; + } + } + Ok(()) + } + Infix(lhs, op, rhs) => write!(f, "{lhs} {} {rhs}", op.literal), + Prefix(lhs, op) => write!(f, "{lhs} {}", op.literal), + } + } +} + +impl std::fmt::Display for StmtKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use StmtKind::*; + match self { + Assign(lhs, rhs) => write!(f, "{lhs} = {rhs}"), + Crawl(lit) => { + write!(f, "crawl ")?; + + if let Some(bindings) = &lit.bindings { + write!(f, "{}, {} ", bindings.link, bindings.depth)?; + } + + if let Some(filter) = &lit.filter { + write!(f, "where {filter}")?; + } + + write!(f, "do\n{}end\n", lit.body) + } + Expr(expr) => write!(f, "{expr}"), + ForLoop(floop) => { + write!( + f, + "for {} in {} do\n{}end\n", + floop.ident, floop.iterable, floop.block + ) + } + WhileLoop(cond, block) => write!(f, "while {cond} do\n{block}end\n"), + Func(def) => write!(f, "{def}"), + Goto(expr) => write!(f, "goto {expr}"), + IfElse(lit) => { + writeln!(f, "if {} do\n{}", lit.if_lit.cond, lit.if_lit.block)?; + for elif in &lit.elifs { + writeln!(f, "elif {} do\n{}", elif.cond, elif.block)?; + } + if let Some(el) = &lit.else_lit { + writeln!(f, "else\n{}", el.block)?; + } + writeln!(f, "end") + } + Return(mb_expr) => { + write!(f, "return")?; + if let Some(expr) = mb_expr { + write!(f, "{expr}")?; + } + Ok(()) + } + Scrape(hash) => write!(f, "scrape {hash}"), + Screenshot(s) => write!(f, "screenshot {s}"), + TryCatch(t, c) => { + write!(f, "try\n{t}\n")?; + if let Some(catch) = c { + write!(f, "catch\n{catch}\n")?; + } + write!(f, "end\n") + } + Use(expr) => write!(f, "use {expr}"), + } + } +} + +impl std::fmt::Display for Program { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for stmt in &self.stmts { + writeln!(f, "{stmt}")?; + } + Ok(()) + } +} diff --git a/scout-parser/src/lib.rs b/scout-parser/src/lib.rs index 9bc9763..df63199 100644 --- a/scout-parser/src/lib.rs +++ b/scout-parser/src/lib.rs @@ -289,7 +289,7 @@ impl Parser { } fn parse_prefix(&mut self) -> ParseResult { - let op = self.curr.kind; + let op = self.curr.clone(); self.next_token(); let expr = self.parse_expr(Precedence::Lowest)?; Ok(ExprKind::Prefix(Box::new(expr), op)) @@ -479,7 +479,7 @@ impl Parser { fn parse_infix(&mut self, lhs: ExprKind) -> ParseResult { // self.next_token(); - let op = self.curr.kind; + let op = self.curr.clone(); let prec = self.curr_precedence(); self.next_token(); let rhs = self.parse_expr(prec)?; @@ -734,7 +734,7 @@ mod tests { r#"x = 1 == 2"#, StmtKind::Assign( ExprKind::Ident(Identifier::new("x".to_string())), - ExprKind::Infix(Box::new(ExprKind::Number(1.)), TokenKind::EQ, Box::new(ExprKind::Number(2.))) + ExprKind::Infix(Box::new(ExprKind::Number(1.)), Token::new(TokenKind::EQ, "==".to_string()), Box::new(ExprKind::Number(2.))) ); "assign eq infix" )] #[test_case( @@ -744,7 +744,7 @@ mod tests { Box::new( ExprKind::Ident(Identifier::new("a".into())) ), - TokenKind::LBracket, + Token::new(TokenKind::LBracket, "[".to_string()), Box::new( ExprKind::Number(0.) ) @@ -870,7 +870,7 @@ mod tests { StmtKind::Expr( ExprKind::Infix( Box::new(ExprKind::Ident(Identifier::new("a".into()))), - TokenKind::LBracket, + Token::new(TokenKind::LBracket, "[".to_string()), Box::new(ExprKind::Number(0.)) ) ); "index" @@ -887,14 +887,18 @@ mod tests { link: Identifier::new("link".into()), depth: Identifier::new("depth".into()) }), - Some(ExprKind::Infix(Box::new(ExprKind::Ident(Identifier::new("depth".into()))), TokenKind::LT, Box::new(ExprKind::Number(1.)))), + Some(ExprKind::Infix( + Box::new(ExprKind::Ident(Identifier::new("depth".into()))), + Token::new(TokenKind::LT, "<".to_string()), + Box::new(ExprKind::Number(1.)) + )), Block::default() ) ); "crawl stmt with bindings" )] #[test_case( "!true", - StmtKind::Expr(ExprKind::Prefix(Box::new(ExprKind::Boolean(true)), TokenKind::Bang,)); "bang prefix" + StmtKind::Expr(ExprKind::Prefix(Box::new(ExprKind::Boolean(true)), Token::new(TokenKind::Bang, "!".to_string()))); "bang prefix" )] #[test_case( "a::b", @@ -903,7 +907,7 @@ mod tests { Box::new( ExprKind::Ident(Identifier::new("a".into())) ), - TokenKind::DbColon, + Token::new(TokenKind::DbColon, "::".to_string()), Box::new( ExprKind::Ident(Identifier::new("b".into())) ) @@ -917,7 +921,7 @@ mod tests { Box::new( ExprKind::Ident(Identifier::new("a".into())) ), - TokenKind::LT, + Token::new(TokenKind::LT, "<".to_string()), Box::new( ExprKind::Number(1.) ) diff --git a/scout-worker/Cargo.toml b/scout-worker/Cargo.toml index ef11991..83ccb74 100644 --- a/scout-worker/Cargo.toml +++ b/scout-worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "scout-worker" -version = "0.1.0" +version = "0.6.0" edition = "2021" [dependencies]