Skip to content

Commit

Permalink
Add parallel garbage collection test runner (#6665)
Browse files Browse the repository at this point in the history
## Description
Adds a comprehensive test suite that exercises garbage collection across
all language test examples. The test:

- Runs each example in its own isolated process to prevent
cross-contamination
- Uses rayon for parallel execution, reducing total test time to ~60s
for all 267 tests
- Provides a clear summary of passing/failing tests with specific error
cases
- Continues running all tests even when individual tests fail

The test identified 26 examples that need investigation for proper
garbage collection handling, particularly around:
- Constants and their declarations
- Module dependencies
- Type system (arrays, slices, strings)
- Associated constants
- References and self implementations

By running tests in isolation, we can now identify all problematic cases
rather than stopping at the first failure.

The current projects that have garbage collection failures are:

```
associated_const_trait_const
references
reexport
main_args
fixing_generic_type
shadowing
far_jumps
str_slice
match_expressions
slice
const_decl_literal
raw_ptr
import_star_name_clash
module_dep_self
module_dep_multiple
self_impl_reassignment
const_decl
associated_const_abi_default
string_slice
fix_opcode_bug
tuple_field_reassignment
const_decl_with_call_path
module_dep
array_basics
u256
associated_const_impl_self_order
```

Note: I've temporarily disabled the `run_all_garbage_collection_tests`
so this PR can be reviewed and pass CI. Then #6613 can rebase and
reenable the test to ensure that PR allows these failing tests to pass.

## Checklist

- [ ] I have linked to any relevant issues.
- [x] I have commented my code, particularly in hard-to-understand
areas.
- [ ] I have updated the documentation where relevant (API docs, the
reference, and the Sway book).
- [ ] If my change requires substantial documentation changes, I have
[requested support from the DevRel
team](https://github.com/FuelLabs/devrel-requests/issues/new/choose)
- [x] I have added tests that prove my fix is effective or that my
feature works.
- [ ] I have added (or requested a maintainer to add) the necessary
`Breaking*` or `New Feature` labels where relevant.
- [x] I have done my best to ensure that my PR adheres to [the Fuel Labs
Code Review
Standards](https://github.com/FuelLabs/rfcs/blob/master/text/code-standards/external-contributors.md).
- [x] I have requested a review from the relevant team or maintainers.
  • Loading branch information
JoshuaBatty authored Oct 25, 2024
1 parent 058f4e2 commit 6b05844
Show file tree
Hide file tree
Showing 2 changed files with 173 additions and 63 deletions.
213 changes: 165 additions & 48 deletions sway-lsp/tests/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ pub mod integration;

use crate::integration::{code_actions, lsp};
use lsp_types::*;
use std::{fs, path::PathBuf};
use rayon::prelude::*;
use std::{fs, panic, path::PathBuf, process::Command, sync::Mutex};
use sway_lsp::{
config::LspClient,
handlers::{notification, request},
Expand Down Expand Up @@ -69,7 +70,7 @@ async fn init_and_open(service: &mut LspService<ServerState>, entry_point: PathB
uri
}

async fn shutdown_and_exit(service: &mut LspService<ServerState>) {
pub async fn shutdown_and_exit(service: &mut LspService<ServerState>) {
let _ = lsp::shutdown_request(service).await;
lsp::exit_notification(service).await;
}
Expand Down Expand Up @@ -257,52 +258,6 @@ fn did_change_stress_test_random_wait() {
});
}

fn garbage_collection_runner(path: PathBuf) {
run_async!({
setup_panic_hook();
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(&mut service, path).await;
let times = 60;

// Initialize cursor position
let mut cursor_line = 20;

for version in 1..times {
//eprintln!("version: {}", version);
let params = lsp::simulate_keypress(&uri, version, &mut cursor_line);
let _ = lsp::did_change_request(&mut service, &uri, version, Some(params)).await;
if version == 0 {
service.inner().wait_for_parsing().await;
}
// wait for a random amount of time to simulate typing
random_delay().await;
}
shutdown_and_exit(&mut service).await;
});
}

#[test]
fn garbage_collection_storage() {
let p = sway_workspace_dir()
.join("sway-lsp/tests/fixtures/garbage_collection/storage_contract")
.join("src/main.sw");
garbage_collection_runner(p);
}

#[test]
fn garbage_collection_paths() {
let p = test_fixtures_dir().join("tokens/paths/src/main.sw");
garbage_collection_runner(p);
}

#[test]
fn garbage_collection_minimal_script() {
let p = sway_workspace_dir()
.join("sway-lsp/tests/fixtures/garbage_collection/minimal_script")
.join("src/main.sw");
garbage_collection_runner(p);
}

#[test]
fn lsp_syncs_with_workspace_edits() {
run_async!({
Expand Down Expand Up @@ -2181,3 +2136,165 @@ fn test_url_to_session_existing_session() {
shutdown_and_exit(&mut service).await;
});
}

//------------------- GARBAGE COLLECTION TESTS -------------------//

async fn garbage_collection_runner(path: PathBuf) {
setup_panic_hook();
let (mut service, _) = LspService::new(ServerState::new);
let uri = init_and_open(&mut service, path).await;
let times = 20;

// Initialize cursor position
let mut cursor_line = 1;

for version in 1..times {
//eprintln!("version: {}", version);
let params = lsp::simulate_keypress(&uri, version, &mut cursor_line);
let _ = lsp::did_change_request(&mut service, &uri, version, Some(params)).await;
if version == 0 {
service.inner().wait_for_parsing().await;
}
// wait for a random amount of time to simulate typing
random_delay().await;
}
shutdown_and_exit(&mut service).await;
}

#[test]
fn garbage_collection_storage() {
let p = sway_workspace_dir()
.join("sway-lsp/tests/fixtures/garbage_collection/storage_contract")
.join("src/main.sw");
run_async!({
garbage_collection_runner(p).await;
});
}

#[test]
fn garbage_collection_paths() {
let p = test_fixtures_dir().join("tokens/paths/src/main.sw");
run_async!({
garbage_collection_runner(p).await;
});
}

#[test]
fn garbage_collection_minimal_script() {
let p = sway_workspace_dir()
.join("sway-lsp/tests/fixtures/garbage_collection/minimal_script")
.join("src/main.sw");
run_async!({
garbage_collection_runner(p).await;
});
}

/// Tests garbage collection across all language test examples in parallel.
///
/// # Overview
/// This test suite takes a unique approach to handling test isolation and error reporting:
///
/// 1. Process Isolation: Each test is run in its own process to ensure complete isolation
/// between test runs. This allows us to catch all failures rather than stopping at
/// the first panic or error.
///
/// 2. Parallel Execution: Uses rayon to run tests concurrently, significantly reducing
/// total test time from several minutes to under a minute.
///
/// 3. Full Coverage: Unlike traditional test approaches that stop at the first failure,
/// this runner continues through all tests, providing a complete picture of which
/// examples need garbage collection fixes.
///
/// # Implementation Details
/// - Uses std::process::Command to spawn each test in isolation
/// - Collects results through a thread-safe Mutex
/// - Provides detailed error reporting for failed tests
/// - Categorizes different types of failures (exit codes vs signals)
// #[test]
#[allow(dead_code)]
fn run_all_garbage_collection_tests() {
let base_dir = sway_workspace_dir().join(e2e_language_dir());
let entries: Vec<_> = std::fs::read_dir(base_dir)
.unwrap()
.filter_map(|e| e.ok())
.filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
.collect();

let results = Mutex::new(Vec::new());

println!("\n=== Starting Garbage Collection Tests ===\n");

entries.par_iter().for_each(|entry| {
let project_dir = entry.path();
let project_name = project_dir
.file_name()
.unwrap()
.to_string_lossy()
.to_string();
let main_file = project_dir.join("src/main.sw");

println!("▶ Testing: {}", project_name);
println!(" Path: {}", main_file.display());

let status = Command::new(std::env::current_exe().unwrap())
.args(["--test", "test_single_project", "--exact", "--nocapture"])
.env("TEST_FILE", main_file.to_string_lossy().to_string())
.status()
.unwrap();

let test_result = if status.success() {
println!(" ✅ Passed: {}\n", project_name);
(project_name, true, None)
} else {
println!(" ❌ Failed: {} ({})\n", project_name, status);
(project_name, false, Some(format!("Exit code: {}", status)))
};

results.lock().unwrap().push(test_result);
});

let results = results.into_inner().unwrap();

// Print final results
println!("=== Garbage Collection Test Results ===\n");

let total = results.len();
let passed = results.iter().filter(|r| r.1).count();
let failed = total - passed;

println!("Total tests: {}", total);
println!("✅ Passed: {}", passed);
println!("❌ Failed: {}", failed);

if failed > 0 {
println!("\nFailed Projects:");
for (name, _, error) in results.iter().filter(|r| !r.1) {
println!("- {} (Error: {})", name, error.as_ref().unwrap());
}

panic!("{} projects failed garbage collection testing", failed);
}
}

/// Individual test runner executed in a separate process for each test.
///
/// This function is called by the main test runner through a new process invocation
/// for each test file. The file path is passed via the TEST_FILE environment
/// variable to maintain process isolation.
///
/// # Process Isolation
/// Running each test in its own process ensures that:
/// 1. Tests are completely isolated from each other
/// 2. Panics in one test don't affect others
/// 3. Resource cleanup happens automatically on process exit
// #[tokio::test]
#[allow(dead_code)]
async fn test_single_project() {
if let Ok(file) = std::env::var("TEST_FILE") {
println!("Running single test for file: {}", file);
let path = PathBuf::from(file);
garbage_collection_runner(path).await;
} else {
panic!("TEST_FILE environment variable not set");
}
}
23 changes: 8 additions & 15 deletions sway-lsp/tests/utils/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use assert_json_diff::assert_json_include;
use futures::StreamExt;
use lsp_types::Url;
use rand::Rng;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use serde_json::Value;
use std::{
env, fs,
Expand Down Expand Up @@ -117,23 +117,16 @@ pub async fn assert_server_requests(

/// Introduces a random delay between 1 to 30 milliseconds with a chance of additional longer delays based on predefined probabilities.
pub async fn random_delay() {
// Create a thread-safe RNG
let mut rng = SmallRng::from_entropy();

// wait for a random amount of time between 1-30ms
tokio::time::sleep(tokio::time::Duration::from_millis(
rand::thread_rng().gen_range(1..=30),
))
.await;
tokio::time::sleep(tokio::time::Duration::from_millis(rng.gen_range(1..=30))).await;

// there is a 10% chance that a longer 100-800ms wait will be added
if rand::thread_rng().gen_ratio(1, 10) {
tokio::time::sleep(tokio::time::Duration::from_millis(
rand::thread_rng().gen_range(100..=1200),
))
.await;
}
// 20% chance to introduce a longer delay of 200 to 1500 milliseconds.
if rand::thread_rng().gen_ratio(2, 10) {
// 20% chance to introduce a longer delay of 100 to 1200 milliseconds.
if rng.gen_ratio(2, 10) {
tokio::time::sleep(tokio::time::Duration::from_millis(
rand::thread_rng().gen_range(400..=2800),
rng.gen_range(100..=1200),
))
.await;
}
Expand Down

0 comments on commit 6b05844

Please sign in to comment.