Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Configure defaults for optimal Loihi performance #126

Merged
merged 3 commits into from
Jun 11, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .nengobones.yml
Original file line number Diff line number Diff line change
Expand Up @@ -409,6 +409,10 @@ setup_cfg:
test_solvers.py:test_non_compositional_solver_transform_error:
1D convolution not supported

# sparse transforms not supported
test_transforms.py:test_sparse[*:
sparse transforms not supported


docs_conf_py:
intersphinx_mapping:
Expand Down
6 changes: 6 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ Release history
`#209 <https://github.com/nengo/nengo-loihi/issues/209>`__)
- Nengo Loihi now requires at least NxSDK version 0.8.0.
(`#218 <https://github.com/nengo/nengo-loihi/pull/218>`__)
- The default intercept range set by ``nengo_loihi.set_defaults()`` is now
(-1, 0.5), instead of (-0.5, 0.5).
(`#126 <https://github.com/nengo/nengo-loihi/pull/126>`__)

**Fixed**

Expand All @@ -62,6 +65,9 @@ Release history
- We no longer disable the Nengo decoder cache for all models.
(`#202 <https://github.com/nengo/nengo-loihi/pull/202>`__,
`#207 <https://github.com/nengo/nengo-loihi/issues/207>`__)
- Transforms to on-chip neurons are now applied on-chip,
which avoids scaling issues and large off-chip transforms.
(`#126 <https://github.com/nengo/nengo-loihi/pull/126>`__)

0.6.0 (February 22, 2019)
=========================
Expand Down
73 changes: 49 additions & 24 deletions nengo_loihi/builder/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,44 @@ def build_host_neurons_to_chip(model, conn):

def build_host_to_chip(model, conn):
rng = np.random.RandomState(model.seeds[conn])
dim = conn.size_out
host = model.host_model(base_obj(conn.pre))

if nengo_transforms is not None:
if isinstance(conn.transform, nengo_transforms.Convolution):
raise BuildError(
"Conv2D transforms not supported for off-chip to "
"on-chip connections where `pre` is not a Neurons object.")
elif not isinstance(conn.transform, nengo_transforms.Dense):
raise BuildError(
"nengo-loihi does not yet support %r transforms "
"on host to chip connections"
% (type(conn.transform).__name__,))

# Scale the input spikes based on the radius of the target ensemble
weights = sample_transform(conn, rng=rng)

if isinstance(conn.post_obj, Ensemble):
weights = weights / conn.post_obj.radius

if nengo_transforms is None:
transform = weights
else:
# copy the Transform information, setting `init` to the sampled weights
transform = copy.copy(conn.transform)
type(transform).init.data[transform] = weights

if isinstance(conn.post_obj, Neurons):
# we don't have encoders, and the transform could have large output,
# so do it on the chip
host_transform = 1.
chip_transform = transform
dim = conn.size_mid
else:
# we have encoders on the chip, so do the transform off-chip
host_transform = transform
chip_transform = 1.
dim = conn.size_out

logger.debug("Creating ChipReceiveNode for %s", conn)
receive = ChipReceiveNode(
dim * 2,
Expand All @@ -135,6 +170,7 @@ def build_host_to_chip(model, conn):
receive2post = Connection(
receive,
conn.post,
transform=chip_transform,
synapse=model.decode_tau,
label=None if conn.label is None else "%s_chip" % conn.label,
add_to_container=False,
Expand All @@ -148,25 +184,6 @@ def build_host_to_chip(model, conn):
_inherit_seed(host, ens, model, conn)
host.build(ens)

if nengo_transforms is not None and isinstance(
conn.transform, nengo_transforms.Convolution):
raise BuildError(
"Conv2D transforms not supported for off-chip to "
"on-chip connections where `pre` is not a Neurons object.")

# Scale the input spikes based on the radius of the target ensemble
weights = sample_transform(conn, rng=rng)

if isinstance(conn.post_obj, Ensemble):
weights = weights / conn.post_obj.radius

if nengo_transforms is None:
transform = weights
else:
# copy the Transform information, setting `init` to the sampled weights
transform = copy.copy(conn.transform)
type(transform).init.data[transform] = weights

pre2ens = Connection(
conn.pre,
ens,
Expand All @@ -175,7 +192,7 @@ def build_host_to_chip(model, conn):
eval_points=conn.eval_points,
scale_eval_points=conn.scale_eval_points,
synapse=conn.synapse,
transform=transform,
transform=host_transform,
label=None if conn.label is None else "%s_enc" % conn.label,
add_to_container=False,
)
Expand Down Expand Up @@ -253,6 +270,13 @@ def build_chip_to_host(model, conn):


def build_host_to_learning_rule(model, conn):
if (nengo_transforms is not None
and not isinstance(conn.transform, nengo_transforms.Dense)):
raise BuildError(
"nengo-loihi does not yet support %r transforms "
"on host to chip learning rule connections"
% (type(conn.transform).__name__,))

dim = conn.size_out
host = model.host_model(base_obj(conn.pre))

Expand Down Expand Up @@ -397,9 +421,10 @@ def build_chip_connection(model, conn): # noqa: C901
if isinstance(conn.transform, nengo_transforms.Convolution):
return build_conv2d_connection(model, conn)
elif not isinstance(conn.transform, nengo_transforms.Dense):
raise NotImplementedError(
"nengo-loihi does not yet support %s transforms"
% conn.transform)
raise BuildError(
"nengo-loihi does not yet support %r transforms "
"on chip to chip connections"
% (type(conn.transform).__name__,))

# Create random number generator
rng = np.random.RandomState(model.seeds[conn])
Expand Down
2 changes: 1 addition & 1 deletion nengo_loihi/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,4 @@ def set_defaults():

"""
nengo.Ensemble.max_rates.default = nengo.dists.Uniform(100, 120)
nengo.Ensemble.intercepts.default = nengo.dists.Uniform(-0.5, 0.5)
nengo.Ensemble.intercepts.default = nengo.dists.Uniform(-1.0, 0.5)
51 changes: 51 additions & 0 deletions nengo_loihi/tests/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import numpy as np
import pytest

from nengo_loihi.compat import nengo_transforms
from nengo_loihi.config import add_params
from nengo_loihi.neurons import nengo_rates

Expand Down Expand Up @@ -599,3 +600,53 @@ def test_n2n_on_host(precompute, allclose, Simulator, seed_ens, seed, plt):

assert allclose(sim.data[p_pre], sim2.data[p_pre], atol=0.1)
assert allclose(sim.data[p_post], sim2.data[p_post], atol=0.1)


@pytest.mark.skipif(nengo_transforms is None,
reason="Requires new nengo.transforms")
def test_sparse_host_to_chip_error(Simulator):
with nengo.Network() as net:
stim = nengo.Node(np.ones(4))
ens = nengo.Ensemble(100, 2)
nengo.Connection(stim, ens, transform=nengo_transforms.Sparse(
shape=(2, 4), indices=[[0, 0], [1, 1]], init=[-1, -1])
)

with pytest.raises(BuildError, match="on host to chip connections"):
with Simulator(net):
pass


@pytest.mark.skipif(nengo_transforms is None,
reason="Requires new nengo.transforms")
def test_sparse_host_to_learning_rule_error(Simulator):
with nengo.Network() as net:
err = nengo.Node(np.ones(4))
pre = nengo.Ensemble(100, 2)
post = nengo.Ensemble(100, 2)
conn = nengo.Connection(pre, post, learning_rule_type=nengo.PES())
nengo.Connection(
err,
conn.learning_rule,
transform=nengo_transforms.Sparse(
shape=(2, 4), indices=[[0, 0], [1, 1]], init=[-1, -1],
)
)

with pytest.raises(BuildError, match="on host to chip learning rule"):
with Simulator(net):
pass


@pytest.mark.skipif(nengo_transforms is None,
reason="Requires new nengo.transforms")
def test_sparse_chip_to_chip_error(Simulator):
with nengo.Network() as net:
pre = nengo.Ensemble(100, 4)
post = nengo.Ensemble(100, 2)
nengo.Connection(pre, post, transform=nengo_transforms.Sparse(
shape=(2, 4), indices=[[0, 0], [1, 1]], init=[-1, -1]))

with pytest.raises(BuildError, match="on chip to chip"):
with Simulator(net):
pass
4 changes: 2 additions & 2 deletions nengo_loihi/tests/test_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def test_pes_overflow(plt, seed, Simulator):

loihi_model = Model()
# set learning_wgt_exp low to create overflow in weight values
loihi_model.pes_wgt_exp = -1
loihi_model.pes_wgt_exp = -2

with Simulator(model, model=loihi_model) as loihi_sim:
loihi_sim.run(simtime)
Expand Down Expand Up @@ -143,7 +143,7 @@ def test_pes_overflow(plt, seed, Simulator):
assert errors_j[i] < 0.1, ("Learning output for dim %d did not match "
"any scaled version of the target output"
% j)
assert scale[i] > 0.4, "Learning output for dim %d is too small" % j
assert scale[i] > 0.25, "Learning output for dim %d is too small" % j
assert scale[i] < 0.7, ("Learning output for dim %d is too large "
"(weights or traces not clipping as expected)"
% j)
Expand Down
8 changes: 4 additions & 4 deletions nengo_loihi/tests/test_simulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,8 @@ def test_nengo_comm_channel_compare(simtype, Simulator, seed, plt, allclose):
nengo.Connection(a, b, function=lambda x: x**2,
solver=nengo.solvers.LstsqL2(weights=True))

ap = nengo.Probe(a, synapse=0.03)
bp = nengo.Probe(b, synapse=0.03)
ap = nengo.Probe(a, synapse=nengo.synapses.Alpha(0.02))
bp = nengo.Probe(b, synapse=nengo.synapses.Alpha(0.02))

with nengo.Simulator(model) as nengo_sim:
nengo_sim.run(simtime)
Expand All @@ -164,8 +164,8 @@ def test_nengo_comm_channel_compare(simtype, Simulator, seed, plt, allclose):
plt.plot(nengo_sim.trange(), nengo_sim.data[bp])
plt.plot(loihi_sim.trange(), loihi_sim.data[bp])

assert allclose(loihi_sim.data[ap], nengo_sim.data[ap], atol=0.1, rtol=0.2)
assert allclose(loihi_sim.data[bp], nengo_sim.data[bp], atol=0.1, rtol=0.2)
assert allclose(loihi_sim.data[ap], nengo_sim.data[ap], atol=0.07, xtol=3)
assert allclose(loihi_sim.data[bp], nengo_sim.data[bp], atol=0.07, xtol=6)


@pytest.mark.parametrize("precompute", (True, False))
Expand Down
2 changes: 2 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,8 @@ nengo_test_unsupported =
"decoded connection optimized away"
test_solvers.py:test_non_compositional_solver_transform_error
"1D convolution not supported"
test_transforms.py:test_sparse[*
"sparse transforms not supported"

[pylint]
# note: pylint doesn't look in setup.cfg by default, need to call it with
Expand Down