Skip to content

Commit

Permalink
IVS-315 Quadratic complexity in GEM003 (#337)
Browse files Browse the repository at this point in the history
* Fix values unique|identical performance

* Fix GRF

* not any, not all, but all without None
  • Loading branch information
rw-bsi committed Jan 16, 2025
1 parent e01ae08 commit 7de30df
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 41 deletions.
2 changes: 1 addition & 1 deletion features/GEM003_Unique-representation-identifier.feature
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ The rule verifies that Shape Representation identifier is unique within the prod
Given Its attribute Representations
Given its attribute RepresentationIdentifier

Then The values must be unique
Then The values must be unique at depth 1
2 changes: 1 addition & 1 deletion features/GRF001_Identical-coordinate-operations.feature
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@ Currently, for GRF001, this is only permitted if (1) there is a single context o
Given Its Attribute HasCoordinateOperation
Given Its values excluding SourceCRS

Then The values must be identical at depth 1
Then The values must be identical at depth 2
46 changes: 17 additions & 29 deletions features/steps/thens/values.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@

from parse_type import TypeBuilder
from utils import misc
register_type(unique_or_identical=TypeBuilder.make_enum(dict(map(lambda x: (x, x), ("be unique", "be identical"))))) # todo @gh remove 'be' from enum values
register_type(value_or_type=TypeBuilder.make_enum(dict(map(lambda x: (x, x), ("value", "type"))))) # todo @gh remove 'be' from enum values
register_type(values_or_types=TypeBuilder.make_enum(dict(map(lambda x: (x, x), ("values", "types"))))) # todo @gh remove 'be' from enum values
register_type(unique_or_identical=TypeBuilder.make_enum(dict(map(lambda x: (x, x), ("unique", "identical")))))
register_type(value_or_type=TypeBuilder.make_enum(dict(map(lambda x: (x, x), ("value", "type")))))
register_type(values_or_types=TypeBuilder.make_enum(dict(map(lambda x: (x, x), ("values", "types")))))

def apply_is_a(inst):
if isinstance(inst, (list, tuple)):
Expand Down Expand Up @@ -59,32 +59,20 @@ def step_impl(context, inst, constraint, num):
yield ValidationOutcome(inst=inst, expected= constraint, observed = f"Not {constraint}", severity=OutcomeSeverity.ERROR)


@gherkin_ifc.step("The {value} must {constraint:unique_or_identical}")
@gherkin_ifc.step("The values must {constraint:unique_or_identical}")
@gherkin_ifc.step("The values must {constraint:unique_or_identical} at depth 1")
def step_impl(context, inst, constraint, num=None):

within_model = getattr(context, 'within_model', False)

#to account for order-dependency of removing characters from constraint
while constraint.startswith('be ') or constraint.startswith('in '):
constraint = constraint[3:]

instances = [context.instances] if within_model else context.instances

if constraint in ('identical', 'unique'):
for i, values in enumerate(instances):
if not values:
continue
if constraint == 'identical':
if not all([values[0] == i for i in values]):
yield ValidationOutcome(inst=inst, expected= constraint, observed = f"Not {constraint}", severity=OutcomeSeverity.ERROR)
if constraint == 'unique':
seen = set()
duplicates = [x for x in values if x in seen or seen.add(x)]
if not duplicates:
continue
yield ValidationOutcome(inst=inst, expected= constraint, observed = f"Not {constraint}", severity=OutcomeSeverity.ERROR)
@gherkin_ifc.step("The values must be {constraint:unique_or_identical} at depth {depth_level:d}")
def step_impl(context, inst, constraint, depth_level=None):
if not inst:
return

if constraint == 'identical':
if not all([inst[0] == i for i in inst]):
yield ValidationOutcome(inst=inst, expected= constraint, observed = inst, severity=OutcomeSeverity.ERROR)

if constraint == 'unique':
seen = set()
duplicates = [x for x in inst if x in seen or seen.add(x)]
if duplicates:
yield ValidationOutcome(inst=inst, expected= constraint, observed = inst, severity=OutcomeSeverity.ERROR)


def recursive_unpack_value(item):
Expand Down
20 changes: 10 additions & 10 deletions features/steps/validation_handling.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import functools
import json
import re
from utils import misc
from functools import wraps
import ifcopenshell
Expand Down Expand Up @@ -148,9 +149,9 @@ def handle_given(context, fn, **kwargs):
pass # (1) -> context.applicable is set within the function ; replace this with a simple True/False and set applicability here?
else:
context._push('attribute') # for attribute stacking
if 'at depth 1' in context.step.name:
#todo @gh develop a more standardize approach
context.instances = list(filter(None, map_given_state(context.instances, fn, context, depth=1, **kwargs)))
depth = next(map(int, re.findall('at depth (\d+)$', context.step.name)), 0)
if depth:
context.instances = list(filter(None, map_given_state(context.instances, fn, context, depth=depth, **kwargs)))
else:
context.instances = map_given_state(context.instances, fn, context, **kwargs)

Expand All @@ -167,9 +168,8 @@ def is_nested(val):
def should_apply(values, depth):
if depth == 0:
return not is_nested(values)
elif depth == 1:
return is_nested(values) and all(not is_nested(v) for v in values)
return False
else:
return is_nested(values) and all(should_apply(v, depth-1) for v in values if v is not None)

if should_apply(values, depth):
return None if values is None else apply_operation(fn, values, context, **kwargs)
Expand Down Expand Up @@ -275,9 +275,8 @@ def is_nested(val):
def should_apply(items, depth):
if depth == 0:
return not is_nested(items)
elif depth == 1:
return is_nested(items) and all(not is_nested(v) for v in items)
return False
else:
return is_nested(items) and all(should_apply(v, depth-1) for v in items if v is not None)

if context.is_global_rule:
return apply_then_operation(fn, [items], context, current_path=None, **kwargs)
Expand All @@ -293,7 +292,8 @@ def should_apply(items, depth):
# so we take note of the outcomes that already existed. This is necessary since we accumulate
# outcomes per feature and no longer per scenario.
num_preexisting_outcomes = len(context.gherkin_outcomes)
map_then_state(instances, fn, context, depth = 1 if 'at depth 1' in context.step.name.lower() else 0, **kwargs)
depth = next(map(int, re.findall('at depth (\d+)$', context.step.name)), 0)
map_then_state(instances, fn, context, depth = depth, **kwargs)

# evokes behave error
generate_error_message(context, [gherkin_outcome for gherkin_outcome in context.gherkin_outcomes[num_preexisting_outcomes:] if gherkin_outcome.severity in [OutcomeSeverity.WARNING, OutcomeSeverity.ERROR]])
Expand Down

0 comments on commit 7de30df

Please sign in to comment.