Skip to content

Commit

Permalink
Fix silent OOB params post-optimization failure (#236)
Browse files Browse the repository at this point in the history
* Make Qty raise more specific OOB exception

* Renamed exception to error

* Added extend_bounds arg to load_best method

* Made pmap better cach and raise OOB exception

* Made optimizer raise error if opt params are OOB

* Linting

* Created test to ensure that OOB optimizations are caught

* Updated changelog

* Linting

* Made test be more general and robust

* Allow float-esque equality

* Changed/set extend_bounds defaults

* Use pytest.raises
  • Loading branch information
YonatanGideoni authored Sep 5, 2022
1 parent 2d95723 commit f1ce409
Show file tree
Hide file tree
Showing 8 changed files with 93 additions and 14 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ This Changelog tracks all past changes to this project as well as details about
- `fixed` bug where changing a gate's name didn't change its ideal gate #233
- `fixed` bug in Instruction where ideal gate's weren't properly set #229
- `fixed` wrong direction of rotations on bloch sphere #231
- `fixed` error is raised if optimizer gives OOB results #235

## Version `1.4` - 23 Dec 2021

Expand Down
16 changes: 14 additions & 2 deletions c3/c3objs.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
import copy


class QuantityOOBException(ValueError):
pass


class C3obj:
"""
Represents an abstract object with parameters. To be inherited from.
Expand Down Expand Up @@ -281,8 +285,16 @@ def _set_value(self, val) -> None:
2 * (tf.reshape(val, self.shape) * self.pref - self.offset) / self.scale - 1
)

if np.any(tf.math.abs(tmp) > tf.constant(1.0, tf.float64)):
raise ValueError(
const_1 = tf.constant(1.0, tf.float64)
if np.any(
tf.math.logical_and(
tf.math.abs(tmp) > const_1,
tf.math.logical_not(
tf.experimental.numpy.isclose(tf.math.abs(tmp), const_1)
),
)
):
raise QuantityOOBException(
f"Value {num3str(val.numpy())}{self.unit} out of bounds for quantity with "
f"min_val: {num3str(self.get_limits()[0])}{self.unit} and "
f"max_val: {num3str(self.get_limits()[1])}{self.unit}",
Expand Down
17 changes: 16 additions & 1 deletion c3/optimizers/optimalcontrol.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,22 @@

import os
import shutil

import tensorflow as tf
from typing import Callable, List

from c3.optimizers.optimizer import Optimizer
from c3.parametermap import ParameterMapOOBUpdateException
from c3.utils.utils import log_setup

from c3.libraries.algorithms import algorithms
from c3.libraries.fidelities import fidelities


class OptResultOOBError(Exception):
pass


class OptimalControl(Optimizer):
"""
Object that deals with the open loop optimal control.
Expand Down Expand Up @@ -179,7 +185,16 @@ def optimize_controls(self, setup_log: bool = True) -> None:
)
except KeyboardInterrupt:
pass
self.load_best(self.logdir + "best_point_" + self.logname)

try:
self.load_best(
self.logdir + "best_point_" + self.logname, extend_bounds=False
)
except ParameterMapOOBUpdateException as e:
raise OptResultOOBError(
"The optimization resulted in some of the parameters being out of bounds."
) from e

self.end_log()

@tf.function
Expand Down
7 changes: 4 additions & 3 deletions c3/optimizers/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def set_created_by(self, config) -> None:
"""
self.created_by = config

def load_best(self, init_point) -> None:
def load_best(self, init_point, extend_bounds=False) -> None:
"""
Load a previous parameter point to start the optimization from. Legacy wrapper.
Method moved to Parametermap.
Expand All @@ -111,9 +111,10 @@ def load_best(self, init_point) -> None:
----------
init_point : str
File location of the initial point
extend_bounds: bool
Whether or not to allow the loaded optimal parameters' bounds to be extended if they exceed those specified.
"""
self.pmap.load_values(init_point)
self.pmap.load_values(init_point, extend_bounds=extend_bounds)

def start_log(self) -> None:
"""
Expand Down
18 changes: 12 additions & 6 deletions c3/parametermap.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,16 @@
import copy
import numpy as np
import tensorflow as tf
from c3.c3objs import Quantity, hjson_decode, hjson_encode
from c3.c3objs import Quantity, hjson_decode, hjson_encode, QuantityOOBException
from c3.signal.gates import Instruction
from typing import Union
from tensorflow.errors import InvalidArgumentError


class ParameterMapOOBUpdateException(Exception):
pass


class ParameterMap:
"""
Collects information about control and model parameters and provides different
Expand Down Expand Up @@ -66,22 +70,24 @@ def __initialize_parameters(self) -> None:
def update_parameters(self):
self.__initialize_parameters()

def load_values(self, init_point):
def load_values(self, init_point, extend_bounds=False):
"""
Load a previous parameter point to start the optimization from.
Parameters
----------
init_point : str
File location of the initial point
extend_bounds: bool
Whether or not to allow the loaded parameters' bounds to be extended if they exceed those specified.
"""
with open(init_point) as init_file:
best = hjson.load(init_file, object_pairs_hook=hjson_decode)

best_opt_map = best["opt_map"]
init_p = best["optim_status"]["params"]
self.set_parameters(init_p, best_opt_map, extend_bounds=True)

self.set_parameters(init_p, best_opt_map, extend_bounds=extend_bounds)

def store_values(self, path: str, optim_status=None) -> None:
"""
Expand Down Expand Up @@ -308,9 +314,9 @@ def set_parameters(
raise Exception(f"C3:ERROR:{key} not defined.") from ve
try:
par.set_value(values[val_indx], extend_bounds=extend_bounds)
except (ValueError, InvalidArgumentError) as ve:
except (QuantityOOBException, InvalidArgumentError) as ve:
try:
raise Exception(
raise ParameterMapOOBUpdateException(
f"C3:ERROR:Trying to set {key} "
f"to value {values[val_indx]} "
f"but has to be within {par.offset} .."
Expand Down
1 change: 0 additions & 1 deletion test/test_model_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ def test_model_learning() -> None:
if "xy_angle" in pulse.params:
pulse.params["xy_angle"] *= -1


opt = ModelLearning(**cfg, pmap=exp.pmap)
opt.set_exp(exp)
opt.set_created_by(OPT_CONFIG_FILE_NAME)
Expand Down
45 changes: 45 additions & 0 deletions test/test_optimalcontrol.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import numpy as np
import pytest

import c3.libraries.fidelities as fidelities
from c3.optimizers.optimalcontrol import OptimalControl, OptResultOOBError
from examples.single_qubit_experiment import create_experiment


@pytest.mark.integration
def test_raises_OOB_for_bad_optimizer() -> None:
exp = create_experiment()
exp.set_opt_gates(["rx90p[0]"])

opt_gates = ["rx90p[0]"]
gateset_opt_map = [
[
("rx90p[0]", "d1", "carrier", "framechange"),
],
[
("rx90p[0]", "d1", "gauss", "amp"),
],
[
("rx90p[0]", "d1", "gauss", "freq_offset"),
],
]

exp.pmap.set_opt_map(gateset_opt_map)

def bad_alg(x_init, fun, *args, **kwargs):
res = np.array([x * -99 for x in x_init])
fun(res)
return res

opt = OptimalControl(
fid_func=fidelities.unitary_infid_set,
fid_subspace=["Q1"],
pmap=exp.pmap,
algorithm=bad_alg,
)

exp.set_opt_gates(opt_gates)
opt.set_exp(exp)

with pytest.raises(OptResultOOBError):
opt.optimize_controls()
2 changes: 1 addition & 1 deletion test/test_parameter_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def test_parameter_extend() -> None:
Test the setting in optimizer format. Parameter is out of bounds for the
original pmap and should be extended.
"""
pmap.load_values("test/sample_optim_log.c3log")
pmap.load_values("test/sample_optim_log.c3log",extend_bounds=True)
np.testing.assert_almost_equal(
pmap.get_parameter(("rx90p[0]", "d1", "gauss", "freq_offset")).numpy(),
-82997604.24565414,
Expand Down

0 comments on commit f1ce409

Please sign in to comment.