Skip to content

Commit

Permalink
Merge pull request #25 from IDEMSInternational/respect-uuids-from-sheet
Browse files Browse the repository at this point in the history
Respect uuids from sheet
  • Loading branch information
geoo89 authored Oct 12, 2022
2 parents 46c9df2 + 1ee3f9b commit 0494684
Show file tree
Hide file tree
Showing 5 changed files with 149 additions and 45 deletions.
40 changes: 25 additions & 15 deletions parsers/creation/standard_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ def add_exit(self, destination_uuid, condition):
# TODO: Should this be a warning rather than an error?
raise ValueError("No Response exit only exists for wait_for_response rows with non-zero timeout.")


# We have a non-trivial condition. Fill in default values if necessary
condition_type = condition.type
comparison_arguments=[condition.value]
Expand Down Expand Up @@ -104,8 +103,14 @@ def add_exit(self, destination_uuid, condition):

class Parser:

def __init__(self, rows, flow_name=None, container=None):
self.container = container or FlowContainer(flow_name=flow_name)
def __init__(self, rapidpro_container, rows, flow_name, flow_uuid=None):
'''
rapidpro_container: The parent RapidProContainer to contain the flow generated by this parser.
rows: The sheet rows generating the flow.
'''
self.rapidpro_container = rapidpro_container
self.flow_container = FlowContainer(flow_name=flow_name, uuid=flow_uuid)
self.rapidpro_container.record_flow_uuid(flow_name, self.flow_container.uuid)
row_parser = RowParser(RowData, CellParser())
self.data_rows = [row_parser.parse_row(row) for row in rows]

Expand Down Expand Up @@ -161,26 +166,31 @@ def _get_or_create_group(self, name, uuid=None):
return Group(name=name, uuid=uuid)

def get_row_node(self, row):
if row.type in ['add_to_group', 'remove_from_group', 'split_by_group'] and row.obj_id:
self.rapidpro_container.record_group_uuid(row.mainarg_groups[0], row.obj_id)

node_uuid = row.node_uuid or None
if row.type in ['send_message', 'save_value', 'add_to_group', 'remove_from_group', 'save_flow_result']:
node = BasicNode()
node = BasicNode(uuid=node_uuid)
node.update_default_exit(None)
return node
elif row.type in ['start_new_flow']:
return EnterFlowNode(row.mainarg_flow_name)
if row.obj_id:
self.rapidpro_container.record_flow_uuid(row.mainarg_flow_name, row.obj_id)
return EnterFlowNode(row.mainarg_flow_name, uuid=node_uuid)
elif row.type in ['wait_for_response']:
# TODO: Support timeout and timeout category
if row.no_response:
return SwitchRouterNode('@input.text', result_name=row.save_name, wait_timeout=int(row.no_response))
return SwitchRouterNode('@input.text', result_name=row.save_name, wait_timeout=int(row.no_response), uuid=node_uuid)
else:
return SwitchRouterNode('@input.text', result_name=row.save_name, wait_timeout=None)
return SwitchRouterNode('@input.text', result_name=row.save_name, wait_timeout=None, uuid=node_uuid)
elif row.type in ['split_by_value']:
return SwitchRouterNode(row.mainarg_expression, result_name=row.save_name, wait_timeout=None)
return SwitchRouterNode(row.mainarg_expression, result_name=row.save_name, wait_timeout=None, uuid=node_uuid)
elif row.type in ['split_by_group']:
return SwitchRouterNode('@contact.groups', result_name=row.save_name, wait_timeout=None)
return SwitchRouterNode('@contact.groups', result_name=row.save_name, wait_timeout=None, uuid=node_uuid)
elif row.type in ['split_random']:
return RandomRouterNode(result_name=row.save_name)
return RandomRouterNode(result_name=row.save_name, uuid=node_uuid)
else:
return BasicNode()
return BasicNode(uuid=node_uuid)

def get_node_name(self, row):
return row.node_uuid or row.node_name
Expand All @@ -190,7 +200,7 @@ def _add_row_edge(self, edge, destination_uuid):
from_node_group = self.row_id_to_nodegroup[edge.from_]
created_node = from_node_group.add_exit(destination_uuid, edge.condition)
if created_node:
self.container.add_node(created_node)
self.flow_container.add_node(created_node)

def _parse_goto_row(self, row):
# If there is a single destination, connect all edges to that destination.
Expand Down Expand Up @@ -235,7 +245,7 @@ def _parse_row(self, row):
# it might be cleaner to go through the list of NodeGroups at
# the end and compile the list of nodes.
# Caveat: Need to identify which is the starting node.
self.container.add_node(new_node)
self.flow_container.add_node(new_node)
self.row_id_to_nodegroup[row.row_id] = NodeGroup(new_node, row.type)
self.node_name_to_node_map[self.get_node_name(row)] = new_node

Expand All @@ -245,4 +255,4 @@ def get_flow(self):
Add the flow to a RapidProContainer and run update_global_uuids
to fill in these missing UUIDs in a consistent way.
'''
return self.container
return self.flow_container
100 changes: 86 additions & 14 deletions parsers/creation/tests/test_standard_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@

from parsers.creation.standard_parser import Parser
from tests.utils import get_dict_from_csv, find_destination_uuid, Context, find_node_by_uuid
from rapidpro.models.containers import RapidProContainer, FlowContainer
from rapidpro.models.actions import Group, AddContactGroupAction
from rapidpro.models.nodes import BasicNode

from parsers.common.rowparser import RowParser
from parsers.creation.standard_models import RowData
Expand Down Expand Up @@ -50,10 +53,10 @@ def setUp(self) -> None:
self.row_parser = RowParser(RowData, CellParser())

def test_send_message(self):
parser = Parser(rows=[], flow_name='send_message')
parser = Parser(RapidProContainer(), rows=[], flow_name='send_message')
parser.data_rows = [get_start_row()]
parser.parse()
render_output = parser.container.render()
render_output = parser.flow_container.render()

node_0 = render_output['nodes'][0]
node_0_actions = node_0['actions']
Expand All @@ -66,10 +69,10 @@ def test_send_message(self):
def test_linear(self):
data_row1 = get_start_row()
data_row2 = get_unconditional_node_from_1()
parser = Parser(rows=[], flow_name='linear')
parser = Parser(RapidProContainer(), rows=[], flow_name='linear')
parser.data_rows = [data_row1, data_row2]
parser.parse()
render_output = parser.container.render()
render_output = parser.flow_container.render()

node_0 = render_output['nodes'][0]
node_1 = render_output['nodes'][1]
Expand All @@ -84,10 +87,10 @@ def test_linear(self):
def test_only_conditional(self):
data_row1 = get_start_row()
data_row3 = get_conditional_node_from_1()
parser = Parser(rows=[], flow_name='only_conditional')
parser = Parser(RapidProContainer(), rows=[], flow_name='only_conditional')
parser.data_rows = [data_row1, data_row3]
parser.parse()
render_output = parser.container.render()
render_output = parser.flow_container.render()

self.assertEqual(len(render_output['nodes']), 3)
node_0 = render_output['nodes'][0]
Expand Down Expand Up @@ -119,10 +122,10 @@ def test_split2(self):
self.check_split(data_rows)

def check_split(self, data_rows):
parser = Parser(rows=[], flow_name='split')
parser = Parser(RapidProContainer(), rows=[], flow_name='split')
parser.data_rows = data_rows
parser.parse()
render_output = parser.container.render()
render_output = parser.flow_container.render()

node_start = render_output['nodes'][0]
node_switch = find_node_by_uuid(render_output, node_start['exits'][0]['destination_uuid'])
Expand All @@ -139,14 +142,19 @@ def check_split(self, data_rows):
self.assertIsNone(node_start.get('router'))
self.assertIsNotNone(node_switch.get('router'))


def test_no_switch_node_rows(self):
rows = get_dict_from_csv('input/no_switch_nodes.csv')
no_switch_node_rows = [self.row_parser.parse_row(row) for row in rows]
parser = Parser(rows=[], flow_name='no_switch_node')
parser = Parser(RapidProContainer(), rows=[], flow_name='no_switch_node')
parser.data_rows = no_switch_node_rows
parser.parse()
render_output = parser.container.render()
render_output = parser.flow_container.render()

# Check that node UUIDs are maintained
nodes = render_output['nodes']
actual_node_uuids = [node['uuid'] for node in nodes]
expected_node_uuids = [row['_nodeId'] for row in rows][3:] # The first 4 rows are actions joined into a single node.
self.assertEqual(expected_node_uuids, actual_node_uuids)

self.assertEqual(render_output['name'], 'no_switch_node')
self.assertEqual(render_output['type'], 'messaging')
Expand Down Expand Up @@ -230,15 +238,79 @@ def test_no_switch_node_rows(self):
def test_switch_node_rows(self):
rows = get_dict_from_csv('input/switch_nodes.csv')
switch_node_rows = [self.row_parser.parse_row(row) for row in rows]
parser = Parser(rows=[], flow_name='switch_node')
parser = Parser(RapidProContainer(), rows=[], flow_name='switch_node')
parser.data_rows = switch_node_rows
parser.parse()

render_output = parser.container.render()
render_output = parser.flow_container.render()

# Check that node UUIDs are maintained
nodes = render_output['nodes']
actual_node_uuids = [node['uuid'] for node in nodes]
expected_node_uuids = [row['_nodeId'] for row in rows]
self.assertEqual(expected_node_uuids, actual_node_uuids)

# Check that No Response category is created even if not connected
last_node = render_output['nodes'][-1]
last_node = nodes[-1]
self.assertEqual('No Response', last_node['router']['categories'][-1]['name'])

# TODO: Ideally, there should be more explicit tests here.
# At least the functionality is covered by the integration tests simulating the flow.
# print(json.dumps(render_output, indent=2))

def test_groups_and_flows(self):
# We check that references flows and group are assigned uuids consistently
tiny_uuid = '00000000-acec-434f-bc7c-14c584fc4bc8'
test_uuid = '8224bfe2-acec-434f-bc7c-14c584fc4bc8'
other_uuid = '12345678-acec-434f-bc7c-14c584fc4bc8'
test_group_dict = {'name': 'test group', 'uuid' : test_uuid}
other_group_dict = {'name': 'other group', 'uuid' : other_uuid}
tiny_flow_dict = {'name': 'tiny_flow', 'uuid' : tiny_uuid}

# Make a flow with a single group node (no UUIDs), and put it into new container
node = BasicNode()
node.add_action(AddContactGroupAction([Group('test group'), Group('other group')]))
tiny_flow = FlowContainer('tiny_flow', uuid=tiny_uuid)
tiny_flow.add_node(node)
container = RapidProContainer(groups=[Group('other group', other_uuid)])
container.add_flow(tiny_flow)

# Add flow from sheet into container
rows = get_dict_from_csv('input/groups_and_flows.csv')
switch_node_rows = [self.row_parser.parse_row(row) for row in rows]
parser = Parser(container, rows=[], flow_name='groups_and_flows')
parser.data_rows = switch_node_rows
parser.parse()
container.add_flow(parser.get_flow())

# Render also invokes filling in all the flow/group UUIDs
render_output = container.render()

# Ensure container groups are complete and have correct UUIDs
self.assertIn(test_group_dict, render_output['groups'])
self.assertIn(other_group_dict, render_output['groups'])
# These UUIDs are inferred from the sheet/the container groups, respectively
self.assertEqual(render_output['flows'][0]['nodes'][0]['actions'][0]['groups'], [test_group_dict, other_group_dict])

nodes = render_output['flows'][1]['nodes']
# This UUID appears only in a later occurrence of the group in the sheet
self.assertEqual(nodes[0]['actions'][0]['groups'], [test_group_dict])
# This UUID is missing from the sheet, but explicit in the flow definition
self.assertEqual(nodes[1]['actions'][0]['flow'], tiny_flow_dict)
# This UUID is explicit in the sheet
self.assertEqual(nodes[2]['actions'][0]['groups'], [test_group_dict])
# This UUID appears in a previous occurrence of the group in the sheet
self.assertEqual(nodes[3]['router']['cases'][0]['arguments'], [test_uuid, 'test group'])
# This UUID was part of the groups in the container, but not in the sheet
self.assertEqual(nodes[7]['actions'][0]['groups'], [other_group_dict])

tiny_flow.uuid = 'something else'
with self.assertRaises(ValueError):
# The enter_flow node has a different uuid than the flow
container.validate()

tiny_flow.uuid = tiny_uuid
container.flows[1].nodes[2].actions[0].groups[0].uuid = 'something else'
with self.assertRaises(ValueError):
# The group is referenced by 2 different UUIDs
container.validate()
39 changes: 26 additions & 13 deletions rapidpro/models/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ def __init__(self, campaigns=None, fields=None, flows=None, groups=None, site=No
self.site = site or 'https://rapidpro.idems.international'
self.triggers = triggers or []
self.version = version
self.uuid_dict = UUIDDict()

def from_dict(data):
data_copy = copy.deepcopy(data)
Expand All @@ -28,22 +29,25 @@ def from_dict(data):
def add_flow(self, flow):
self.flows.append(flow)

def update_global_uuids(self, uuid_dict=None):
if uuid_dict is None:
uuid_dict = UUIDDict()
def record_group_uuid(self, name, uuid):
self.uuid_dict.record_group_uuid(name, uuid)

def record_flow_uuid(self, name, uuid):
self.uuid_dict.record_flow_uuid(name, uuid)

def update_global_uuids(self):
# Prefill with existings flows and groups
for group in self.groups:
uuid_dict.record_group_uuid(group.name, group.uuid)
self.uuid_dict.record_group_uuid(group.name, group.uuid)
for flow in self.flows:
uuid_dict.record_flow_uuid(flow.name, flow.uuid)
self.uuid_dict.record_flow_uuid(flow.name, flow.uuid)

# Update group/flow UUIDs referenced within the flows
for flow in self.flows:
flow.record_global_uuids(uuid_dict)
uuid_dict.generate_missing_uuids()
flow.record_global_uuids(self.uuid_dict)
self.uuid_dict.generate_missing_uuids()
for flow in self.flows:
flow.assign_global_uuids(uuid_dict)
# TODO: Update self.groups
flow.assign_global_uuids(self.uuid_dict)

def merge(self, container):
'''Merge another RapidPro container into this one.
Expand All @@ -53,7 +57,13 @@ def merge(self, container):
'''
raise NotImplementedError

def validate(self):
self.update_global_uuids()
self.groups = self.uuid_dict.get_group_list()
# TODO: Update self.fields

def render(self):
self.validate()
return {
"campaigns": self.campaigns,
"fields": self.fields,
Expand Down Expand Up @@ -140,12 +150,12 @@ def generate_missing_uuids(self):
self.group_dict[k] = generate_new_uuid()

def record_group_uuid(self, name, uuid):
self.record_uuid(self.group_dict, name, uuid)
self._record_uuid(self.group_dict, name, uuid)

def record_flow_uuid(self, name, uuid):
self.record_uuid(self.flow_dict, name, uuid)
self._record_uuid(self.flow_dict, name, uuid)

def record_uuid(self, uuid_dict, name, uuid):
def _record_uuid(self, uuid_dict, name, uuid):
recorded_uuid = uuid_dict.get(name)
if recorded_uuid:
if uuid and uuid != recorded_uuid:
Expand All @@ -157,4 +167,7 @@ def get_group_uuid(self, name):
return self.group_dict[name]

def get_flow_uuid(self, name):
return self.flow_dict[name]
return self.flow_dict[name]

def get_group_list(self):
return [Group(name, uuid) for name, uuid in self.group_dict.items()]
9 changes: 9 additions & 0 deletions tests/input/groups_and_flows.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
"row_id","type","from","condition","condition_var","condition_type","condition_name","save_name","message_text","obj_id","_nodeId"
"1","add_to_group","start",,,,,,"test group",,"db66d0df-0906-45ca-8378-cf1d39ce0281"
"2","start_new_flow","1",,,,,,"tiny_flow",,"3ecfb56f-627b-4afe-8448-a14eaccfbe0e"
"3","remove_from_group","2;2","completed;expired",,,,,"test group","8224bfe2-acec-434f-bc7c-14c584fc4bc8","21be0fc2-f28c-4ad0-8c02-4afd63ad31ab"
"4","split_by_group","3",,,,,,"test group",,"bac467c9-e699-4211-bc70-3440414fd301"
"5","send_message","4","test group",,,,,"this is a send message node",,"769804d7-7343-4e78-97f3-7a113e8222d2"
"6","save_flow_result","4",,,,,"result name","result value",,"88c593b0-22ba-4a36-8f6a-ffe4e41818d1"
"7","start_new_flow","5",,,,,,"tiny_flow",,
"8","add_to_group","7","completed",,,,,"other group",,
6 changes: 3 additions & 3 deletions tests/test_standard_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,17 @@

from .utils import traverse_flow, Context, get_dict_from_csv
from parsers.creation.standard_parser import Parser

from rapidpro.models.containers import RapidProContainer

class TestStandardParser(unittest.TestCase):
def setUp(self) -> None:
pass

def run_example(self, filename, flow_name, context):
rows = get_dict_from_csv(filename)
parser = Parser(rows=rows, flow_name=flow_name)
parser = Parser(RapidProContainer(), rows=rows, flow_name=flow_name)
parser.parse()
output_flow = parser.container.render()
output_flow = parser.flow_container.render()
# print(json.dumps(output_flow, indent=2))

with open("tests/output/all_test_flows.json", 'r') as file:
Expand Down

0 comments on commit 0494684

Please sign in to comment.