Skip to content

Commit

Permalink
Add supress_print attribute to the Executor class for silencing pri…
Browse files Browse the repository at this point in the history
…nts and tqdm (#361)

* add supress print flag for easy_run
  • Loading branch information
danlessa committed Apr 19, 2024
1 parent e0c2696 commit a06eaed
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 11 deletions.
23 changes: 15 additions & 8 deletions cadCAD/engine/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def distroduce_proc(

class Executor:
def __init__(self,
exec_context: ExecutionContext, configs: List[Configuration], sc=None, empty_return=False
exec_context: ExecutionContext, configs: List[Configuration], sc=None, empty_return=False, supress_print=False
) -> None:
self.sc = sc
self.SimExecutor = SimExecutor
Expand All @@ -79,6 +79,7 @@ def __init__(self,
self.additional_objs = exec_context.additional_objs
self.configs = configs
self.empty_return = empty_return
self.supress_print = supress_print

def execute(self) -> Tuple[object, object, Dict[str, object]]:
if self.empty_return is True:
Expand All @@ -97,12 +98,14 @@ def execute(self) -> Tuple[object, object, Dict[str, object]]:
config_idx = 0

# Execution Info
print_exec_info(self.exec_context, configs_as_objs(self.configs))
if self.supress_print is False:
print_exec_info(self.exec_context, configs_as_objs(self.configs))

t1 = time()
for x in tqdm(self.configs,
total=len(self.configs),
desc="Initializing configurations"):
desc="Initializing configurations",
disable=self.supress_print):
sessions.append(
{
'user_id': x.user_id, 'experiment_id': x.experiment_id, 'session_id': x.session_id,
Expand Down Expand Up @@ -180,7 +183,8 @@ def get_final_results(simulations: List[StateHistory],
flat_timesteps, tensor_fields = [], []
for sim_result, psu, ep in tqdm(list(zip(simulations, psus, eps)),
total=len(simulations),
desc='Flattening results'):
desc='Flattening results',
disable=self.supress_print):
if do_flatten:
flat_timesteps.append(flatten(sim_result))
tensor_fields.append(create_tensor_field(psu, ep))
Expand Down Expand Up @@ -209,8 +213,9 @@ def get_final_results(simulations: List[StateHistory],
else:
raise ValueError("Invalid execution mode specified")


print("Execution Method: " + self.exec_method.__name__)
if self.supress_print is False:
print("Execution Method: " + self.exec_method.__name__)

simulations_results = self.exec_method(
sim_executors, var_dict_list, states_lists, configs_structs, env_processes_list, Ts, SimIDs, RunIDs,
ExpIDs, SubsetIDs, SubsetWindows, original_N, self.additional_objs
Expand All @@ -219,7 +224,8 @@ def get_final_results(simulations: List[StateHistory],
final_result = get_final_results(
simulations_results, partial_state_updates, eps, sessions, remote_threshold)
elif self.exec_context == ExecutionMode.distributed:
print("Execution Method: " + self.exec_method.__name__)
if self.supress_print is False:
print("Execution Method: " + self.exec_method.__name__)
simulations_results = self.exec_method(
sim_executors, var_dict_list, states_lists, configs_structs, env_processes_list, Ts,
SimIDs, RunIDs, ExpIDs, SubsetIDs, SubsetWindows, original_N, self.sc
Expand All @@ -228,6 +234,7 @@ def get_final_results(simulations: List[StateHistory],
simulations_results, partial_state_updates, eps, sessions)

t2 = time()
print(f"Total execution time: {t2 - t1 :.2f}s")
if self.supress_print is False:
print(f"Total execution time: {t2 - t1 :.2f}s")

return final_result
2 changes: 0 additions & 2 deletions cadCAD/engine/execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def single_proc_exec(
Ts, SimIDs, Ns, SubsetIDs, SubsetWindows, var_dict_list)

results: List = []
print(f'Execution Mode: single_threaded')
for raw_param in zip(*raw_params):
simulation_exec, states_list, config, env_processes, T, sim_id, N, subset_id, subset_window, var_dict = raw_param
result = simulation_exec(
Expand All @@ -60,7 +59,6 @@ def parallelize_simulations(
additional_objs=None
):

print(f'Execution Mode: parallelized')
params = list(
zip(
simulation_execs, var_dict_list, states_lists, configs_structs, env_processes_list,
Expand Down
3 changes: 2 additions & 1 deletion cadCAD/tools/execution/easy_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def easy_run(
drop_substeps=True,
exec_mode='local',
deepcopy_off=False,
supress_print=False
) -> pd.DataFrame:
"""
Run cadCAD simulations without headaches.
Expand All @@ -69,7 +70,7 @@ def easy_run(
elif exec_mode == 'single':
_exec_mode = ExecutionMode().single_mode
exec_context = ExecutionContext(_exec_mode, additional_objs={'deepcopy_off': deepcopy_off})
executor = Executor(exec_context=exec_context, configs=configs)
executor = Executor(exec_context=exec_context, configs=configs, supress_print=supress_print)

# Execute the cadCAD experiment
(records, tensor_field, _) = executor.execute()
Expand Down
77 changes: 77 additions & 0 deletions testing/test_print.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
from cadCAD.configuration import Experiment
from cadCAD.configuration.utils import config_sim
from cadCAD.engine import Executor, ExecutionContext, ExecutionMode
import pytest

P_no_lst = {'pA': 1, 'pB': 2, 'pC': 3}
P_single_lst = {'pA': [1], 'pB': [1], 'pC': [3]}
P_single_swp = {'pA': [4, 5, 6], 'pB': [1], 'pC': [3]}
P_all_swp = {'pA': [7, 8, 9], 'pB': [1, 2, 3], 'pC': [1, 2, 3]}
P_all_but_one_swp = {'pA': [7, 8, 9], 'pB': [1, 2, 3], 'pC': [1]}
Ps = [P_no_lst, P_single_lst, P_single_swp, P_all_swp, P_all_but_one_swp]

CONFIG_SIGNATURES_TO_TEST = [(3, 3, 3, 3, 3),
(1, 3, 3, 3, 3),
(3, 1, 3, 3, 3),
(1, 1, 3, 3, 3),
(3, 3, 1, 3, 3),
(1, 3, 1, 3, 3),
(1, 1, 1, 3, 3)]


def run_experiment(exp: Experiment, mode: str, supress_print=False):
exec_context = ExecutionContext(mode)
executor = Executor(exec_context=exec_context, configs=exp.configs, supress_print=supress_print)
(records, tensor_field, _) = executor.execute()
return records


def param_count_test_suf_generator(provided_params):
def s_test_param_count(params, _2, _3, _4, _5):
assert params.keys() == provided_params.keys(), 'Params are not matching'
return ('varA', None)
return s_test_param_count


def param_count_test_policy_generator(provided_params):
def p_test_param_count(params, _2, _3, _4):
assert params.keys() == provided_params.keys(), 'Params are not matching'
return {'sigA': None}
return p_test_param_count


def create_experiments(N_simulations=3, N_sweeps=3, N_runs=3, N_timesteps=3, N_substeps=3, params={}) -> Experiment:

INITIAL_STATE = {'varA': None}
PSUBs = [{'policies': {'sigA': param_count_test_policy_generator(
params)}, 'variables': {'varA': param_count_test_suf_generator(params)}}] * N_substeps

SIM_CONFIG = config_sim(
{
"N": N_runs,
"T": range(N_timesteps),
"M": params, # Optional
}
)

exp = Experiment()
for i_sim in range(N_simulations):
exp.append_model(
sim_configs=SIM_CONFIG,
initial_state=INITIAL_STATE,
partial_state_update_blocks=PSUBs
)
return exp



def test_print(capfd):
exp = run_experiment(create_experiments(N_simulations=3, N_sweeps=3, N_runs=3, N_timesteps=3, N_substeps=3, params={'a': 0}), 'single_proc', supress_print=False)
out, err = capfd.readouterr()
assert " ___________ ____\n ________ __ ___/ / ____/ | / __ \\\n / ___/ __` / __ / / / /| | / / / /\n/ /__/ /_/ / /_/ / /___/ ___ |/ /_/ /\n\\___/\\__,_/\\__,_/\\____/_/ |_/_____/\nby cadCAD" in out
assert 'Initializing configurations' in err

exp = run_experiment(create_experiments(N_simulations=3, N_sweeps=3, N_runs=3, N_timesteps=3, N_substeps=3, params={'a': 0}), 'single_proc', supress_print=True)
out, err = capfd.readouterr()
assert out == ''
assert err == ''

0 comments on commit a06eaed

Please sign in to comment.