From 6280e883a6836cb7d346e004eb1d74a5e3f9dea5 Mon Sep 17 00:00:00 2001 From: Hilary James Oliver Date: Mon, 14 Aug 2023 00:09:25 +1200 Subject: [PATCH] Fix unit and integration tests. --- cylc/flow/flow_mgr.py | 2 +- cylc/flow/task_events_mgr.py | 3 +++ tests/integration/test_data_store_mgr.py | 2 +- tests/integration/test_task_pool.py | 3 ++- tests/integration/test_trigger.py | 2 +- tests/unit/scripts/test_trigger.py | 15 ++++++++++----- tests/unit/test_id_match.py | 17 +++++++++-------- 7 files changed, 27 insertions(+), 17 deletions(-) diff --git a/cylc/flow/flow_mgr.py b/cylc/flow/flow_mgr.py index c80fcc0d4ba..7d0d00a38dd 100644 --- a/cylc/flow/flow_mgr.py +++ b/cylc/flow/flow_mgr.py @@ -35,7 +35,7 @@ ERR_OPT_FLOW_INT = "Multiple flow options must all be integer valued" ERR_OPT_FLOW_META = "Metadata is only for new flows" ERR_OPT_FLOW_WAIT = ( - f'Flow wait is not compatible with "{FLOW_NEW}" or "{FLOW_NONE}" flows.' + f"--wait is not compatible with --flow={FLOW_NEW} or --flow={FLOW_NONE}" ) diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index 46d46aae942..72aaffa31c1 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -1651,6 +1651,9 @@ def reset_bad_hosts(self): def spawn_children(self, itask, output): # update DB task outputs + print(f">>>>>>>>>>>>>>>>>>>>>>. {itask}, {output}") self.workflow_db_mgr.put_update_task_outputs(itask) + self.workflow_db_mgr.process_queued_ops() + # spawn child-tasks self.spawn_func(itask, output) diff --git a/tests/integration/test_data_store_mgr.py b/tests/integration/test_data_store_mgr.py index d679588a977..d54904dcc6d 100644 --- a/tests/integration/test_data_store_mgr.py +++ b/tests/integration/test_data_store_mgr.py @@ -292,7 +292,7 @@ def test_delta_task_prerequisite(harness): schd.pool.reset([ t.identity for t in schd.pool.get_tasks() - ], (TASK_STATUS_SUCCEEDED,), "flow1") + ], [], [(TASK_STATUS_SUCCEEDED,)], ['all']) assert all({ p.satisfied for t in schd.data_store_mgr.updated[TASK_PROXIES].values() diff --git a/tests/integration/test_task_pool.py b/tests/integration/test_task_pool.py index 8459ae2d2ff..1e2e9929521 100644 --- a/tests/integration/test_task_pool.py +++ b/tests/integration/test_task_pool.py @@ -646,6 +646,7 @@ async def test_restart_prereqs( # Mark 1/a as succeeded and spawn 1/z schd.pool.get_tasks()[0].state_reset('succeeded') + schd.pool.spawn_on_output(schd.pool.get_tasks()[0], 'succeeded') assert list_tasks(schd) == expected_2 @@ -810,7 +811,7 @@ async def _test_restart_prereqs_sat(): for itask in schd.pool.get_tasks(): itask.state_reset('succeeded') schd.pool.spawn_on_output(itask, 'succeeded') - schd.workflow_db_mgr.put_insert_task_outputs(itask) + schd.workflow_db_mgr.put_update_task_outputs(itask) schd.pool.remove_if_complete(itask) schd.workflow_db_mgr.process_queued_ops() assert list_tasks(schd) == [ diff --git a/tests/integration/test_trigger.py b/tests/integration/test_trigger.py index e008caa4a7c..e0d0e0eaf76 100644 --- a/tests/integration/test_trigger.py +++ b/tests/integration/test_trigger.py @@ -37,7 +37,7 @@ async def test_trigger_invalid(mod_one, start, log_filter, flow_strs): """Ensure invalid flow values are rejected.""" async with start(mod_one) as log: log.clear() - assert mod_one.pool.force_trigger_tasks(['*'], flow_strs) == 0 + assert mod_one.pool.force_trigger_tasks(['*'], flow_strs) == None assert len(log_filter(log, level=logging.WARN)) == 1 diff --git a/tests/unit/scripts/test_trigger.py b/tests/unit/scripts/test_trigger.py index d464bda0a34..a5469fb0071 100644 --- a/tests/unit/scripts/test_trigger.py +++ b/tests/unit/scripts/test_trigger.py @@ -18,12 +18,17 @@ from optparse import Values import pytest -from typing import Iterable, Optional, Tuple, Type +from typing import Optional, Tuple, Type from cylc.flow.exceptions import InputError from cylc.flow.option_parsers import Options -from cylc.flow.flow_mgr import FLOW_ALL, FLOW_NEW, FLOW_NONE -from cylc.flow.scripts.trigger import get_option_parser, _validate +from cylc.flow.flow_mgr import ( + FLOW_ALL, + FLOW_NEW, + FLOW_NONE, + validate_flow_opts +) +from cylc.flow.scripts.trigger import get_option_parser Opts = Options(get_option_parser()) @@ -117,7 +122,7 @@ def test_validate( if expected_err: err, msg = expected_err with pytest.raises(err) as exc: - _validate(opts) + validate_flow_opts(opts) assert msg in str(exc.value) else: - _validate(opts) + validate_flow_opts(opts) diff --git a/tests/unit/test_id_match.py b/tests/unit/test_id_match.py index d26e85b092d..f68c1458dd1 100644 --- a/tests/unit/test_id_match.py +++ b/tests/unit/test_id_match.py @@ -60,6 +60,7 @@ def _task_pool(pool, hier) -> 'Pool': } set_cycling_type(CYCLER_TYPE_INTEGER) + print(_task_pool, '<<<<<<<') return _task_pool @@ -127,7 +128,7 @@ def test_filter_ids_task_mode(task_pool, ids, matched, not_matched): {} ) - _matched, _not_matched = filter_ids([pool], ids) + _matched, _not_matched = filter_ids(pool, ids) assert [get_task_id(itask) for itask in _matched] == matched assert _not_matched == not_matched @@ -188,21 +189,21 @@ def test_filter_ids_cycle_mode(task_pool, ids, matched, not_matched): {} ) - _matched, _not_matched = filter_ids([pool], ids, out=IDTokens.Cycle) + _matched, _not_matched = filter_ids(pool, ids, out=IDTokens.Cycle) assert _matched == [IntegerPoint(i) for i in matched] assert _not_matched == not_matched def test_filter_ids_invalid(caplog): """Ensure invalid IDs are handled elegantly.""" - matched, not_matched = filter_ids([{}], ['#']) + matched, not_matched = filter_ids({}, ['#']) assert matched == [] assert not_matched == ['#'] assert caplog.record_tuples == [ ('cylc', 30, 'No active tasks matching: #'), ] caplog.clear() - matched, not_matched = filter_ids([{}], ['#'], warn=False) + matched, not_matched = filter_ids({}, ['#'], warn=False) assert caplog.record_tuples == [] @@ -216,7 +217,7 @@ def test_filter_ids_pattern_match_off(task_pool): ) _matched, _not_matched = filter_ids( - [pool], + pool, ['1/a'], out=IDTokens.Task, pattern_match=False, @@ -238,7 +239,7 @@ def test_filter_ids_toggle_pattern_matching(task_pool, caplog): # ensure pattern matching works _matched, _not_matched = filter_ids( - [pool], + pool, ids, out=IDTokens.Task, pattern_match=True, @@ -249,7 +250,7 @@ def test_filter_ids_toggle_pattern_matching(task_pool, caplog): # ensure pattern matching can be disabled caplog.clear() _matched, _not_matched = filter_ids( - [pool], + pool, ids, out=IDTokens.Task, pattern_match=False, @@ -285,7 +286,7 @@ def test_filter_ids_namespace_hierarchy(task_pool, ids, matched, not_matched): ) _matched, _not_matched = filter_ids( - [pool], + pool, ids, pattern_match=False, )