Skip to content

Commit

Permalink
Rename bias -> bias_factor (#46)
Browse files Browse the repository at this point in the history
  • Loading branch information
timothyas authored Oct 20, 2023
1 parent 72f5a92 commit 596d65b
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 29 deletions.
18 changes: 9 additions & 9 deletions xesn/esn.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def adjacency_factor(self):
return self.adjacency_kwargs["factor"]

@property
def bias(self):
def bias_factor(self):
return self.bias_kwargs["factor"]

def __init__(self,
Expand All @@ -50,10 +50,10 @@ def __init__(self,
n_reservoir,
input_factor,
adjacency_factor,
connectedness,
bias,
bias_factor,
leak_rate,
tikhonov_parameter,
connectedness,
input_kwargs=None,
adjacency_kwargs=None,
bias_kwargs=None):
Expand Down Expand Up @@ -104,20 +104,20 @@ def __init__(self,
# Handle bias vector options
self.bias_kwargs = {
"distribution" : "uniform",
"factor" : bias,
"factor" : bias_factor,
"random_seed" : None
}
if bias_kwargs is not None:
self.bias_kwargs.update(bias_kwargs)

if self.bias_kwargs["factor"] != bias:
raise ValueError(f"ESN.__init__: conflicting bias factor given with options 'bias' and 'bias_kwargs[''factor'']'")
if self.bias_kwargs["factor"] != bias_factor:
raise ValueError(f"ESN.__init__: conflicting bias factor given with options 'bias_factor' and 'bias_kwargs[''factor'']'")

# Check inputs
try:
assert self.bias >= 0.0
assert self.bias_factor >= 0.0
except AssertionError:
raise ValueError(f"ESN.__init__: bias must be non-negative, got {self.bias}")
raise ValueError(f"ESN.__init__: bias_factor must be non-negative, got {self.bias_factor}")

try:
assert self.connectedness < self.n_reservoir
Expand All @@ -138,7 +138,7 @@ def __str__(self):
f' {"n_reservoir:":<24s}{self.n_reservoir}\n'+\
'--- \n'+\
f' {"connectedness:":<24s}{self.connectedness}\n'+\
f' {"bias:":<24s}{self.bias}\n'+\
f' {"bias_factor:":<24s}{self.bias_factor}\n'+\
f' {"leak_rate:":<24s}{self.leak_rate}\n'+\
f' {"tikhonov_parameter:":<24s}{self.tikhonov_parameter}\n'+\
'--- \n'+\
Expand Down
8 changes: 4 additions & 4 deletions xesn/lazyesn.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,10 @@ def __init__(self,
n_reservoir,
input_factor,
adjacency_factor,
connectedness,
bias,
bias_factor,
leak_rate,
tikhonov_parameter,
connectedness,
overlap,
persist,
boundary=xp.nan,
Expand Down Expand Up @@ -98,10 +98,10 @@ def __init__(self,
n_reservoir=n_reservoir,
input_factor=input_factor,
adjacency_factor=adjacency_factor,
connectedness=connectedness,
bias=bias,
bias_factor=bias_factor,
leak_rate=leak_rate,
tikhonov_parameter=tikhonov_parameter,
connectedness=connectedness,
input_kwargs=input_kwargs,
adjacency_kwargs=adjacency_kwargs,
bias_kwargs=bias_kwargs)
Expand Down
8 changes: 4 additions & 4 deletions xesn/optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,12 @@ def transform(params, transformations):
transformed_params (dict): with updated parameters based on transformations, or untouched if not specified
Example:
>>> params = {"input_factor": 0.5, "adjacency_factor": 0.5, "bias": 0.5}
>>> params = {"input_factor": 0.5, "adjacency_factor": 0.5, "bias_factor": 0.5}
>>> transforms = {"input_factor": "log", "adjacency_factor": "log10"}
>>> transform(params, transforms)
{'input_factor': -0.6931471805599453,
'adjacency_factor': -0.3010299956639812,
'bias': 0.5}
'bias_factor': 0.5}
"""

transformed_params = params.copy()
Expand Down Expand Up @@ -96,12 +96,12 @@ def inverse_transform(transformed_params, transformations):
transformed_params (dict): with updated parameters based on transformations, or untouched if not specified
Example:
>>> params = {"input_factor": -0.69, "adjacency_factor": -0.3, "bias": 0.5}
>>> params = {"input_factor": -0.69, "adjacency_factor": -0.3, "bias_factor": 0.5}
>>> transforms = {"input_factor": "log", "adjacency_factor": "log10"}
>>> inverse_transform(params, transforms)
{'input_factor': 0.5015760690660556,
'adjacency_factor': 0.5011872336272722,
'bias': 0.5}
'bias_factor': 0.5}
"""

params = transformed_params.copy()
Expand Down
4 changes: 2 additions & 2 deletions xesn/test/config-eager.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ esn:
is_sparse : True
random_seed : 1
#
bias : 0.5
bias_factor : 0.5
bias_kwargs:
distribution : uniform
normalization : svd
Expand All @@ -58,7 +58,7 @@ macro_training:
parameters:
input_factor : [1.e-2, 2.]
adjacency_factor: [1.e-2, 2.]
bias : [0. , 2.]
bias_factor : [0. , 2.]
transformations:
input_factor : log10
adjacency_factor: log
Expand Down
4 changes: 2 additions & 2 deletions xesn/test/config-lazy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ lazyesn:
is_sparse : True
random_seed : 1
#
bias : 0.5
bias_factor : 0.5
bias_kwargs:
distribution : uniform
normalization : svd
Expand All @@ -69,7 +69,7 @@ macro_training:
parameters:
input_factor : [1.e-2, 1e2]
adjacency_factor: [1.e-2, 2.]
bias : [0. , 2.]
bias_factor : [0. , 2.]

transformations:
input_factor : log10
Expand Down
8 changes: 4 additions & 4 deletions xesn/test/esn.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class TestESN:
n_reservoir = 100
n_train = 500
connectedness = 5
bias = 0.1
bias_factor = 0.1
leak_rate = 0.5
tikhonov_parameter = 1e-6

Expand All @@ -24,10 +24,10 @@ class TestESN:
@property
def kw(self):
return {key: getattr(self, key) for key in [
"n_input", "n_output", "n_reservoir", "connectedness", "bias", "leak_rate", "tikhonov_parameter", "input_factor", "adjacency_factor"]}
"n_input", "n_output", "n_reservoir", "connectedness", "bias_factor", "leak_rate", "tikhonov_parameter", "input_factor", "adjacency_factor"]}

equal_list = ("n_input", "n_output", "n_reservoir")
close_list = ("input_factor", "adjacency_factor", "connectedness", "bias", "leak_rate", "tikhonov_parameter")
close_list = ("input_factor", "adjacency_factor", "connectedness", "bias_factor", "leak_rate", "tikhonov_parameter")


class TestInit(TestESN):
Expand All @@ -50,7 +50,7 @@ def test_basic(self):
@pytest.mark.parametrize(
"key, val, raises, error",
[
("bias", -1., pytest.raises, ValueError),
("bias_factor", -1., pytest.raises, ValueError),
("connectedness", 101, pytest.raises, ValueError),
("connectedness", 90, pytest.warns, RuntimeWarning),
],
Expand Down
2 changes: 1 addition & 1 deletion xesn/test/lazy.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class TestLazy(TestESN):
overlap = {"x": 1, "time": 0}
persist = True
equal_list = ("overlap", "esn_chunks", "persist", "overlap", "n_reservoir", "boundary")
close_list = ("input_factor", "adjacency_factor", "connectedness", "bias", "leak_rate", "tikhonov_parameter")
close_list = ("input_factor", "adjacency_factor", "connectedness", "bias_factor", "leak_rate", "tikhonov_parameter")

@property
def kw(self):
Expand Down
6 changes: 3 additions & 3 deletions xesn/test/optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def transform_params():
params = {
"input_factor" : 0.5,
"adjacency_factor" : 0.5,
"bias" : 0.5}
"bias_factor" : 0.5}

transforms ={
"input_factor" : "log10",
Expand Down Expand Up @@ -38,7 +38,7 @@ def test_transform(transform_inputs, request):

assert_allclose(np.array(ptest["input_factor"]), np.log10(np.array(params["input_factor"])))
assert_allclose(np.array(ptest["adjacency_factor"]), np.log(np.array(params["adjacency_factor"])))
assert_allclose(np.array(ptest["bias"]), np.array(params["bias"]))
assert_allclose(np.array(ptest["bias_factor"]), np.array(params["bias_factor"]))


@pytest.mark.parametrize(
Expand All @@ -51,7 +51,7 @@ def test_inverse_transform(transform_inputs, request):

assert_allclose(np.array(ptest["input_factor"]), 10.** np.array(params["input_factor"]))
assert_allclose(np.array(ptest["adjacency_factor"]), np.exp(np.array(params["adjacency_factor"])))
assert_allclose(np.array(ptest["bias"]), np.array(params["bias"]))
assert_allclose(np.array(ptest["bias_factor"]), np.array(params["bias_factor"]))

@pytest.mark.parametrize(
"transformer", (transform, inverse_transform)
Expand Down

0 comments on commit 596d65b

Please sign in to comment.