diff --git a/demo/multi_sorcha.py b/demo/multi_sorcha.py new file mode 100644 index 00000000..8b6a7496 --- /dev/null +++ b/demo/multi_sorcha.py @@ -0,0 +1,63 @@ +import os +import astropy.table as tb +from multiprocessing import Pool +import pandas as pd +import sqlite3 + +def run_sorcha(i, args, path_inputs, pointings, instance, config): + print(f"sorcha run -c {config} -pd {pointings} -o {args.path}{instance}/ -t {instance}_{i} -ob {args.path}{instance}/orbits_{i}.csv -p {args.path}{instance}/physical_{i}.csv", flush=True) + os.system(f"sorcha run -c {config} -pd {pointings} -o {args.path}{instance}/ -t {instance}_{i} -ob {args.path}{instance}/orbits_{i}.csv -p {args.path}{instance}/physical_{i}.csv") + +if __name__ == '__main__': + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument('--input_orbits', type=str) + parser.add_argument('--input_physical', type=str) + parser.add_argument('--path', type=str) + parser.add_argument('--chunksize', type=int) + parser.add_argument('--norbits', type=int) + parser.add_argument('--cores', type=int) + parser.add_argument('--instance', type=int) + parser.add_argument('--cleanup', action='store_true') + parser.add_argument('--copy_inputs', action='store_true') + parser.add_argument('--pointings', type=str) + parser.add_argument('--config', type=str) + args = parser.parse_args() + chunk = args.chunksize + instance = args.instance + norbits = args.norbits + pointings = args.pointings + path = args.path + config = args.config + + orbits = tb.Table.read(args.input_orbits) + orbits = orbits[instance*chunk:(instance+1)*chunk] + physical = tb.Table.read(args.input_physical) + physical = physical[instance*chunk:(instance+1)*chunk] + + os.system(f'mkdir {instance}') + + + if args.copy_inputs: + os.system(f'cp {pointings} {instance}/') + path_inputs = f'{instance}' + + for i in range(args.cores): + sub_orb = orbits[i*norbits:(i+1)*norbits] + sub_phys = physical[i*norbits:(i+1)*norbits] + sub_orb.write(f"{args.path}{instance}/orbits_{i}.csv", overwrite=True) + sub_phys.write(f"{args.path}{instance}/physical_{i}.csv", overwrite=True) + + with Pool(processes=args.cores) as pool: + pool.starmap(run_sorcha, [(i, args, path_inputs, pointings, instance, config) for i in range(args.cores)]) + + data = [] + for i in range(args.cores): + data.append(pd.read_sql("select * from sorcha_results", sqlite3.connect(f"{args.path}{instance}/{instance}_{i}.db"))) + data = pd.concat(data) + data.to_sql("sorcha_results", sqlite3.connect(f"{args.path}output_{instance}.db")) + if args.cleanup: + os.system(f"rm {args.path}{instance}/*") + os.system(f"rmdir {args.path}{instance}") + diff --git a/demo/multi_sorcha.sh b/demo/multi_sorcha.sh new file mode 100644 index 00000000..f24c3b0c --- /dev/null +++ b/demo/multi_sorcha.sh @@ -0,0 +1,10 @@ +#!/bin/bash +#SBATCH --job-name=my_job +#SBATCH --account=my_account +#SBATCH --partition=my_partition +#SBATCH --ntasks=10 # Run a single task +#SBATCH --mem=40gb # Job Memory +#SBATCH --time=2:00:00 # Time limit hrs:min:sec +#SBATCH --output=log-%a.log # Standard output and error log + +python3 multi_sorcha.py --config my_config.ini --input_orbits my_orbits.csv --input_physical my_colors.csv --pointings my_pointings.db --path ./ --chunksize $(($1 * $2)) --norbits $1 --cores $2 --instance ${SLURM_ARRAY_TASK_ID} --cleanup --copy_inputs