diff --git a/docs/user_manual/components.md b/docs/user_manual/components.md index 86e0a714a..17517ccaa 100644 --- a/docs/user_manual/components.md +++ b/docs/user_manual/components.md @@ -675,13 +675,13 @@ Valid combinations of `power_sigma`, `p_sigma` and `q_sigma` are: | `power_sigma` | `p_sigma` | `q_sigma` | result | | :-----------: | :-------: | :-------: | :------: | -| x | x | x | ✔ | -| x | x | | ❌ | -| x | | x | ❌ | -| x | | | ✔ | -| | x | x | ✔ | -| | x | | ❌ | -| | | x | ❌ | +| ✔ | ✔ | ✔ | ✔ | +| ✔ | ✔ | | ❌ | +| ✔ | | ✔ | ❌ | +| ✔ | | | ✔ | +| | ✔ | ✔ | ✔ | +| | ✔ | | ❌ | +| | | ✔ | ❌ | | | | | ❌ | ```{note} diff --git a/src/power_grid_model/validation/errors.py b/src/power_grid_model/validation/errors.py index 840f1794e..17d8210cb 100644 --- a/src/power_grid_model/validation/errors.py +++ b/src/power_grid_model/validation/errors.py @@ -472,6 +472,14 @@ class FaultPhaseError(MultiFieldValidationError): _message = "The fault phase is not applicable to the corresponding fault type for {n} {objects}." +class PQSigmaPairError(MultiFieldValidationError): + """ + The combination of p_sigma and q_sigma is not valid. They should be both present or both absent. + """ + + _message = "The combination of p_sigma and q_sigma is not valid for {n} {objects}." + + class InvalidAssociatedEnumValueError(MultiFieldValidationError): """ The value is not a valid value in combination with the other specified attributes. diff --git a/src/power_grid_model/validation/rules.py b/src/power_grid_model/validation/rules.py index 6f603b4a2..c7a06967a 100644 --- a/src/power_grid_model/validation/rules.py +++ b/src/power_grid_model/validation/rules.py @@ -53,7 +53,6 @@ InvalidIdError, MissingValueError, MultiComponentNotUniqueError, - MultiFieldValidationError, NotBetweenError, NotBetweenOrAtError, NotBooleanError, @@ -63,6 +62,7 @@ NotLessOrEqualError, NotLessThanError, NotUniqueError, + PQSigmaPairError, SameValueError, TransformerClockError, TwoValuesZeroError, @@ -754,12 +754,7 @@ def all_finite(data: SingleDataset, exceptions: dict[ComponentType, list[str]] | return errors -def none_missing( - data: SingleDataset, - component: ComponentType, - fields: list[str | list[str]] | str | list[str], - index: int = 0, -) -> list[MissingValueError]: +def none_missing(data: SingleDataset, component: ComponentType, fields: str | list[str]) -> list[MissingValueError]: """ Check that for all records of a particular type of component, the values in the 'fields' columns are not NaN. Returns an empty list on success, or a list containing a single error object on failure. @@ -777,23 +772,21 @@ def none_missing( if isinstance(fields, str): fields = [fields] for field in fields: - if isinstance(field, list): - field = field[0] nan = _nan_type(component, field) if np.isnan(nan): - invalid = np.isnan(data[component][field][index]) + invalid = np.isnan(data[component][field]) else: - invalid = np.equal(data[component][field][index], nan) + invalid = np.equal(data[component][field], nan) if invalid.any(): - if isinstance(invalid, np.ndarray): - invalid = np.any(invalid) + # handle asymmetric values + invalid = np.any(invalid, axis=tuple(range(1, invalid.ndim))) ids = data[component]["id"][invalid].flatten().tolist() errors.append(MissingValueError(component, field, ids)) return errors -def valid_p_q_sigma(data: SingleDataset, component: ComponentType) -> list[MultiFieldValidationError]: +def valid_p_q_sigma(data: SingleDataset, component: ComponentType) -> list[PQSigmaPairError]: """ Check validity of the pair `(p_sigma, q_sigma)` for 'sym_power_sensor' and 'asym_power_sensor'. @@ -802,7 +795,7 @@ def valid_p_q_sigma(data: SingleDataset, component: ComponentType) -> list[Multi component: The component of interest, in this case only 'sym_power_sensor' or 'asym_power_sensor' Returns: - A list containing zero or one MultiFieldValidationError, listing the p_sigma and q_sigma mismatch. + A list containing zero or one PQSigmaPairError, listing the p_sigma and q_sigma mismatch. Note that with asymetric power sensors, partial assignment of p_sigma and q_sigma is also considered mismatch. """ errors = [] @@ -812,16 +805,18 @@ def valid_p_q_sigma(data: SingleDataset, component: ComponentType) -> list[Multi q_nan = np.isnan(q_sigma) p_inf = np.isinf(p_sigma) q_inf = np.isinf(q_sigma) - if p_sigma.ndim > 1: # if component == 'asym_power_sensor': - p_nan = p_nan.any(axis=-1) - q_nan = q_nan.any(axis=-1) - p_inf = p_inf.any(axis=-1) - q_inf = q_inf.any(axis=-1) mis_match = p_nan != q_nan - mis_match |= np.logical_or(p_inf, q_inf) + mis_match |= np.logical_xor(p_inf, q_inf) # infinite sigmas are supported if they are both infinite + if p_sigma.ndim > 1: # if component == 'asym_power_sensor': + mis_match = mis_match.any(axis=-1) + mis_match |= np.logical_xor(p_nan.any(axis=-1), p_nan.all(axis=-1)) + mis_match |= np.logical_xor(q_nan.any(axis=-1), q_nan.all(axis=-1)) + mis_match |= np.logical_xor(p_inf.any(axis=-1), p_inf.all(axis=-1)) + mis_match |= np.logical_xor(q_inf.any(axis=-1), q_inf.all(axis=-1)) + if mis_match.any(): ids = data[component]["id"][mis_match].flatten().tolist() - errors.append(MultiFieldValidationError(component, ["p_sigma", "q_sigma"], ids)) + errors.append(PQSigmaPairError(component, ["p_sigma", "q_sigma"], ids)) return errors diff --git a/src/power_grid_model/validation/validation.py b/src/power_grid_model/validation/validation.py index 69baa895f..fd2a16255 100644 --- a/src/power_grid_model/validation/validation.py +++ b/src/power_grid_model/validation/validation.py @@ -14,7 +14,6 @@ import copy from collections.abc import Sized as ABCSized from itertools import chain -from typing import cast import numpy as np @@ -247,40 +246,26 @@ def validate_ids(update_data: SingleDataset, input_data: SingleDataset) -> list[ def _process_power_sigma_and_p_q_sigma( data: SingleDataset, sensor: ComponentType, - required_list: dict[ComponentType | str, list[str | list[str]]], ) -> None: """ Helper function to process the required list when both `p_sigma` and `q_sigma` exist and valid but `power_sigma` is missing. The field `power_sigma` is set to the norm of - `p_sigma` and `q_sigma`in this case. Happens only on proxy data (not the original data). + `p_sigma` and `q_sigma` in this case. Happens only on proxy data (not the original data). However, note that this value is eventually not used in the calculation. """ - - def _check_sensor_in_data(_data, _sensor): - return _sensor in _data and isinstance(_data[_sensor], np.ndarray) - - def _contains_p_q_sigma(_sensor_data): - return "p_sigma" in _sensor_data.dtype.names and "q_sigma" in _sensor_data.dtype.names - - def _process_power_sigma_in_list(_sensor_mask, _power_sigma, _p_sigma, _q_sigma): - _mask = np.logical_not(np.logical_or(np.isnan(_p_sigma), np.isnan(_q_sigma))) - if _power_sigma.ndim < _mask.ndim: - _mask = np.any(_mask, axis=tuple(range(_power_sigma.ndim, _mask.ndim))) - - for sublist, should_remove in zip(_sensor_mask, _mask): - if should_remove and "power_sigma" in sublist: - sublist = cast(list[str], sublist) - sublist.remove("power_sigma") - - if _check_sensor_in_data(data, sensor): + if sensor in data: sensor_data = data[sensor] - sensor_mask = required_list[sensor] - if _contains_p_q_sigma(sensor_data): - p_sigma = sensor_data["p_sigma"] - q_sigma = sensor_data["q_sigma"] - power_sigma = sensor_data["power_sigma"] + power_sigma = sensor_data["power_sigma"] + p_sigma = sensor_data["p_sigma"] + q_sigma = sensor_data["q_sigma"] + + # virtual patch to handle missing power_sigma + asym_axes = tuple(range(sensor_data.ndim, p_sigma.ndim)) + mask = np.logical_and(np.isnan(power_sigma), np.any(np.logical_not(np.isnan(p_sigma)), axis=asym_axes)) + power_sigma[mask] = np.nansum(p_sigma[mask], axis=asym_axes) - _process_power_sigma_in_list(sensor_mask, power_sigma, p_sigma, q_sigma) + mask = np.logical_and(np.isnan(power_sigma), np.any(np.logical_not(np.isnan(q_sigma)), axis=asym_axes)) + power_sigma[mask] = np.nansum(q_sigma[mask], axis=asym_axes) def validate_required_values( @@ -298,7 +283,7 @@ def validate_required_values( An empty list if all required data is available, or a list of MissingValueErrors. """ # Base - required: dict[ComponentType | str, list[str | list[str]]] = {"base": ["id"]} + required: dict[ComponentType | str, list[str]] = {"base": ["id"]} # Nodes required["node"] = required["base"] + ["u_rated"] @@ -382,12 +367,7 @@ def validate_required_values( required["asym_voltage_sensor"] = required["voltage_sensor"].copy() # Different requirements for individual sensors. Avoid shallow copy. for sensor_type in ("sym_power_sensor", "asym_power_sensor"): - try: - required[sensor_type] = [ - required["power_sensor"].copy() for _ in range(data[sensor_type].shape[0]) # type: ignore - ] - except KeyError: - pass + required[sensor_type] = required["power_sensor"].copy() # Faults required["fault"] = required["base"] + ["fault_object"] @@ -404,13 +384,13 @@ def validate_required_values( required["line"] += ["r0", "x0", "c0", "tan0"] required["shunt"] += ["g0", "b0"] - _process_power_sigma_and_p_q_sigma(data, ComponentType.sym_power_sensor, required) - _process_power_sigma_and_p_q_sigma(data, ComponentType.asym_power_sensor, required) + _process_power_sigma_and_p_q_sigma(data, ComponentType.sym_power_sensor) + _process_power_sigma_and_p_q_sigma(data, ComponentType.asym_power_sensor) return _validate_required_in_data(data, required) -def _validate_required_in_data(data, required): +def _validate_required_in_data(data: SingleDataset, required: dict[ComponentType | str, list[str]]): """ Checks if all required data is available. @@ -429,25 +409,14 @@ def is_valid_component(data, component): and isinstance(data[component], ABCSized) ) - def is_nested_list(items): - return isinstance(items, list) and all(isinstance(i, list) for i in items) - - def process_nested_items(component, items, data, results): - for index, item in enumerate(sublist for sublist in items): - if index < len(data[component]): - results.append(_none_missing(data, component, item, index)) - - results = [] + results: list[MissingValueError] = [] for component in data: if is_valid_component(data, component): items = required.get(component, []) - if is_nested_list(items): - process_nested_items(component, items, data, results) - else: - results.append(_none_missing(data, component, items, 0)) + results += _none_missing(data, component, items) - return list(chain(*results)) + return results def validate_values(data: SingleDataset, calculation_type: CalculationType | None = None) -> list[ValidationError]: diff --git a/tests/unit/validation/test_rules.py b/tests/unit/validation/test_rules.py index ff22676e0..18cbd37c5 100644 --- a/tests/unit/validation/test_rules.py +++ b/tests/unit/validation/test_rules.py @@ -2,12 +2,14 @@ # # SPDX-License-Identifier: MPL-2.0 -from enum import IntEnum +from unittest import mock import numpy as np import pytest from power_grid_model import ComponentType, LoadGenType, initialize_array, power_grid_meta_data +from power_grid_model._core.dataset_definitions import ComponentTypeLike +from power_grid_model._utils import compatibility_convert_row_columnar_dataset from power_grid_model.enum import Branch3Side, BranchSide, FaultPhase, FaultType from power_grid_model.validation.errors import ( ComparisonError, @@ -15,6 +17,7 @@ InfinityError, InvalidEnumValueError, InvalidIdError, + MissingValueError, MultiComponentNotUniqueError, NotBetweenError, NotBetweenOrAtError, @@ -471,9 +474,97 @@ def test_all_finite(): assert InfinityError("bar_test", "bar", [6]) in errors -@pytest.mark.skip("No unit tests available for none_missing") def test_none_missing(): - raise NotImplementedError(f"Unit test for {none_missing}") + dfoo = [("id", "i4"), ("foo", "f8"), ("bar", "(3,)f8"), ("baz", "i4"), ("bla", "i1"), ("ok", "i1")] + dbar = [("id", "i4"), ("foobar", "f8")] + + def _mock_nan_type(component: ComponentTypeLike, field: str): + return { + "foo_test": { + "id": np.iinfo("i4").min, + "foo": np.nan, + "bar": np.nan, + "baz": np.iinfo("i4").min, + "bla": np.iinfo("i1").min, + "ok": -1, + }, + "bar_test": {"id": np.iinfo("i4").min, "foobar": np.nan}, + }[component][field] + + with mock.patch("power_grid_model.validation.rules._nan_type", _mock_nan_type): + valid = { + "foo_test": np.array( + [ + (1, 3.1, (4.2, 4.3, 4.4), 1, 6, 0), + (2, 5.2, (3.3, 3.4, 3.5), 2, 7, 0), + (3, 7.3, (8.4, 8.5, 8.6), 3, 8, 0), + ], + dtype=dfoo, + ), + "bar_test": np.array([(4, 0.4), (5, 0.5)], dtype=dbar), + } + errors = none_missing(data=valid, component="foo_test", fields=["foo", "bar", "baz"]) + assert len(errors) == 0 + + invalid = { + "foo_test": np.array( + [ + (1, np.nan, (np.nan, np.nan, np.nan), np.iinfo("i4").min, np.iinfo("i1").min, 0), + (2, np.nan, (4.2, 4.3, 4.4), 3, 7, 0), + (3, 7.3, (np.nan, np.nan, np.nan), 5, 8, 0), + (4, 8.3, (8.4, 8.5, 8.6), np.iinfo("i4").min, 9, 0), + (5, 9.3, (9.4, 9.5, 9.6), 6, np.iinfo("i1").min, 0), + (6, 10.3, (10.4, 10.5, 10.6), 7, 11, 0), + ], + dtype=dfoo, + ), + "bar_test": np.array([(4, 0.4), (5, np.nan)], dtype=dbar), + } + + errors = none_missing(data=invalid, component="foo_test", fields="foo") + assert len(errors) == 1 + assert errors == [MissingValueError("foo_test", "foo", [1, 2])] + + errors = none_missing(data=invalid, component="foo_test", fields="bar") + assert len(errors) == 1 + assert errors == [MissingValueError("foo_test", "bar", [1, 3])] + + errors = none_missing(data=invalid, component="foo_test", fields="baz") + assert len(errors) == 1 + assert errors == [MissingValueError("foo_test", "baz", [1, 4])] + + errors = none_missing(data=invalid, component="foo_test", fields="bla") + assert len(errors) == 1 + assert errors == [MissingValueError("foo_test", "bla", [1, 5])] + + errors = none_missing(data=invalid, component="foo_test", fields="ok") + assert len(errors) == 0 + + for fields in (("foo", "bar", "baz", "bla", "ok"), ("foo", "bar"), ()): + errors = none_missing(data=invalid, component="foo_test", fields=fields) + expected = [] + for field in fields: + expected += none_missing(data=invalid, component="foo_test", fields=field) + assert errors == expected + + assert none_missing( + data={ + "foo_test": { + "id": invalid["foo_test"]["id"], + "foo": invalid["foo_test"]["foo"], + "bar": invalid["foo_test"]["bar"], + "baz": invalid["foo_test"]["baz"], + "bla": invalid["foo_test"]["bla"], + "ok": invalid["foo_test"]["ok"], + }, + "bar_test": { + "id": invalid["bar_test"]["id"], + "foobar": invalid["bar_test"]["foobar"], + }, + }, + component="foo_test", + fields=("foo", "bar", "baz", "bla", "ok"), + ) == none_missing(data=invalid, component="foo_test", fields=("foo", "bar", "baz", "bla", "ok")) @pytest.mark.skip("No unit tests available for all_valid_clocks") diff --git a/tests/unit/validation/test_validation_functions.py b/tests/unit/validation/test_validation_functions.py index 6349106a9..3c1a00fec 100644 --- a/tests/unit/validation/test_validation_functions.py +++ b/tests/unit/validation/test_validation_functions.py @@ -2,6 +2,7 @@ # # SPDX-License-Identifier: MPL-2.0 +import copy from itertools import product from unittest.mock import ANY, MagicMock, patch @@ -21,14 +22,15 @@ InvalidIdError, MissingValueError, MultiComponentNotUniqueError, - MultiFieldValidationError, NotUniqueError, + PQSigmaPairError, UnsupportedTransformerRegulationError, ) from power_grid_model.validation.validation import ( assert_valid_data_structure, validate_generic_power_sensor, validate_ids, + validate_input_data, validate_required_values, validate_unique_ids_across_components, validate_values, @@ -479,67 +481,96 @@ def test_validate_values__infinite_sigmas(sensor_type, parameter): [ ( "sym_power_sensor", - [[np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[np.nan, np.nan], [], []], [InvalidIdError, NotUniqueError], ), ( "sym_power_sensor", - [[0.1, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [[0.1, np.nan], [], []], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "sym_power_sensor", - [[np.nan, 0.1], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [[np.nan, 0.1], [], []], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "sym_power_sensor", - [[0.1, 0.1], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[0.1, 0.1], [], []], [InvalidIdError, NotUniqueError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]], + [[], [[np.nan, np.nan, np.nan]] * 3, [[np.nan, np.nan, np.nan]] * 3], [InvalidIdError, NotUniqueError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [0.1, np.nan, 0.1], [np.nan, 0.1, np.nan]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [ + [], + [[0.1, np.nan, 0.1], [0.1, np.nan, 0.1], [0.1, 0.1, 0.1]], + [[np.nan, 0.1, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + ], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [0.1, np.nan, np.nan], [np.nan, np.nan, np.nan]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [[], [[0.1, np.nan, np.nan]] * 3, [[np.nan, np.nan, np.nan]] * 3], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [np.nan, np.nan, np.nan], [0.1, np.nan, np.nan]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [[], [[np.nan, np.nan, np.nan]] * 3, [[0.1, np.nan, np.nan]] * 3], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [0.1, 0.1, 0.1], [np.nan, np.nan, np.nan]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [ + [], + [[0.1, 0.1, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + [[np.nan, np.nan, 0.1], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + ], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [np.nan, np.nan, np.nan], [0.1, 0.1, 0.1]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [ + [], + [[0.1, 0.1, 0.1], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + [[np.nan, np.nan, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + ], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [np.nan, np.nan, np.nan], [0.1, np.nan, np.nan]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [ + [], + [[np.nan, np.nan, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + [[0.1, 0.1, 0.1], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + ], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [0.1, np.nan, np.nan], [0.1, np.nan, np.nan]], - [InvalidIdError, NotUniqueError, MultiFieldValidationError], + [ + [], + [[np.nan, np.nan, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + [[0.1, np.nan, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + ], + [InvalidIdError, NotUniqueError, PQSigmaPairError], ), ( "asym_power_sensor", - [[np.nan, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + [ + [], + [[0.1, np.nan, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + [[0.1, np.nan, np.nan], [0.1, 0.1, 0.1], [0.1, 0.1, 0.1]], + ], + [InvalidIdError, NotUniqueError, PQSigmaPairError], + ), + ( + "asym_power_sensor", + [[], [[0.1, 0.1, 0.1]] * 3, [[0.1, 0.1, 0.1]] * 3], [InvalidIdError, NotUniqueError], ), ], @@ -573,19 +604,19 @@ def arbitrary_fill(array, sensor_type, values): ([[np.nan, np.nan], [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]]], [InvalidIdError]), ( [[0.1, np.nan], [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]]], - [InvalidIdError, MultiFieldValidationError], + [InvalidIdError, PQSigmaPairError], ), ( [[np.nan, np.nan], [[np.nan, 0.1, np.nan], [np.nan, np.nan, np.nan]]], - [InvalidIdError, MultiFieldValidationError], + [InvalidIdError, PQSigmaPairError], ), ( [[np.nan, np.nan], [[np.nan, 0.1, np.nan], [np.nan, 0.1, np.nan]]], - [InvalidIdError, MultiFieldValidationError], + [InvalidIdError, PQSigmaPairError], ), ( [[0.1, 0.1], [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]]], - [InvalidIdError, MultiFieldValidationError], + [InvalidIdError, PQSigmaPairError], ), ([[0.1, 0.1], [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]]], [InvalidIdError]), ([[np.nan, np.nan], [[0.1, 0.1, 0.1], [0.1, 0.1, 0.1]]], [InvalidIdError]), @@ -635,8 +666,8 @@ def single_component_twice_data(): data = single_component_twice_data() all_errors = validate_values(data) for error in all_errors: - assert any(isinstance(error, error_type) for error_type in [InvalidIdError, MultiFieldValidationError]) - if isinstance(error, MultiFieldValidationError): + assert any(isinstance(error, error_type) for error_type in [InvalidIdError, PQSigmaPairError]) + if isinstance(error, PQSigmaPairError): assert error.ids[0] == 789 @@ -745,6 +776,22 @@ def test_power_sigma_or_p_q_sigma(): sym_power_sensor["p_sigma"] = [1e4, np.nan, 1e4] sym_power_sensor["q_sigma"] = [1e9, np.nan, 1e9] + # power sensor + asym_power_sensor = initialize_array("input", "asym_power_sensor", 4) + asym_power_sensor["id"] = [66, 77, 88, 99] + asym_power_sensor["measured_object"] = [2, 4, 9, 9] + asym_power_sensor["measured_terminal_type"] = [ + MeasuredTerminalType.branch_from, + MeasuredTerminalType.load, + MeasuredTerminalType.load, + MeasuredTerminalType.load, + ] + asym_power_sensor["p_measured"] = [[1e6, 1e6, 1e6], [-1e6, -1e6, -1e6], [-1e6, -1e6, -1e6], [-1e6, -1e6, -1e6]] + asym_power_sensor["q_measured"] = [[1e6, 1e6, 1e6], [-1e6, -1e6, -1e6], [-1e6, -1e6, -1e6], [-1e6, -1e6, -1e6]] + asym_power_sensor["power_sigma"] = [np.nan, 1e9, 1e9, 1e9] + asym_power_sensor["p_sigma"] = [[1e4, 1e4, 1e4], [np.nan, np.nan, np.nan], [1e4, 1e4, 1e4], [1e4, 1e4, 1e4]] + asym_power_sensor["q_sigma"] = [[1e9, 1e9, 1e9], [np.nan, np.nan, np.nan], [1e9, 1e4, 1e4], [1e9, 1e4, 1e4]] + # all input_data = { "node": node, @@ -753,10 +800,72 @@ def test_power_sigma_or_p_q_sigma(): "source": source, "sym_voltage_sensor": voltage_sensor, "sym_power_sensor": sym_power_sensor, + "asym_power_sensor": asym_power_sensor, } assert_valid_input_data(input_data=input_data, calculation_type=CalculationType.state_estimation) + np.testing.assert_array_equal(sym_power_sensor["power_sigma"], [np.nan, 1e9, 1e9]) + np.testing.assert_array_equal(sym_power_sensor["p_sigma"], [1e4, np.nan, 1e4]) + np.testing.assert_array_equal(sym_power_sensor["q_sigma"], [1e9, np.nan, 1e9]) + np.testing.assert_array_equal(asym_power_sensor["power_sigma"], [np.nan, 1e9, 1e9, 1e9]) + np.testing.assert_array_equal( + asym_power_sensor["p_sigma"], [[1e4, 1e4, 1e4], [np.nan, np.nan, np.nan], [1e4, 1e4, 1e4], [1e4, 1e4, 1e4]] + ) + np.testing.assert_array_equal( + asym_power_sensor["q_sigma"], [[1e9, 1e9, 1e9], [np.nan, np.nan, np.nan], [1e9, 1e4, 1e4], [1e9, 1e4, 1e4]] + ) + + # bad weather + bad_input_data = copy.deepcopy(input_data) + bad_sym_power_sensor = bad_input_data["sym_power_sensor"] + bad_sym_power_sensor["power_sigma"] = [np.nan, np.nan, 1e9] + bad_sym_power_sensor["p_sigma"] = [np.nan, np.nan, 1e4] + bad_sym_power_sensor["q_sigma"] = [np.nan, 1e9, np.nan] + errors = validate_input_data(input_data=bad_input_data, calculation_type=CalculationType.state_estimation) + assert len(errors) == 2 + assert errors == [ + MissingValueError("sym_power_sensor", "power_sigma", [6]), + PQSigmaPairError("sym_power_sensor", ("p_sigma", "q_sigma"), [7, 8]), + ] + + np.testing.assert_array_equal(bad_sym_power_sensor["power_sigma"], [np.nan, np.nan, 1e9]) + np.testing.assert_array_equal(bad_sym_power_sensor["p_sigma"], [np.nan, np.nan, 1e4]) + np.testing.assert_array_equal(bad_sym_power_sensor["q_sigma"], [np.nan, 1e9, np.nan]) + + # bad weather + bad_input_data = copy.deepcopy(input_data) + bad_asym_power_sensor = bad_input_data["asym_power_sensor"] + bad_asym_power_sensor["power_sigma"] = [np.nan, np.nan, 1e9, np.nan] + bad_asym_power_sensor["p_sigma"] = [ + [np.nan, np.nan, np.nan], + [np.nan, np.nan, np.nan], + [1e4, np.nan, np.nan], + [1e4, np.nan, np.nan], + ] + bad_asym_power_sensor["q_sigma"] = [ + [np.nan, np.nan, np.nan], + [1e9, 1e9, 1e9], + [np.nan, 1e4, 1e4], + [np.nan, 1e4, 1e4], + ] + errors = validate_input_data(input_data=bad_input_data, calculation_type=CalculationType.state_estimation) + assert len(errors) == 2 + assert errors == [ + MissingValueError("asym_power_sensor", "power_sigma", [66]), + PQSigmaPairError("asym_power_sensor", ("p_sigma", "q_sigma"), [77, 88, 99]), + ] + + np.testing.assert_array_equal(bad_asym_power_sensor["power_sigma"], [np.nan, np.nan, 1e9, np.nan]) + np.testing.assert_array_equal( + bad_asym_power_sensor["p_sigma"], + [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [1e4, np.nan, np.nan], [1e4, np.nan, np.nan]], + ) + np.testing.assert_array_equal( + bad_asym_power_sensor["q_sigma"], + [[np.nan, np.nan, np.nan], [1e9, 1e9, 1e9], [np.nan, 1e4, 1e4], [np.nan, 1e4, 1e4]], + ) + def test_all_default_values(): """