Skip to content

Commit

Permalink
Connections to ensembles have proper synapse
Browse files Browse the repository at this point in the history
This fixes (for example) neuron->ensemble connections (of which
we currently have no test cases).

Unfortunately, this also changes the behaviour of node->ensemble
connections, which caused node_ens_ens to fail. The synapse set on
the connection (None) overrides the default INTER_TAU synapse,
removing the filtering on the ReLU neurons used to turn the node
values into spikes.

To fix this, I lowered the frequency of the input signal.
  • Loading branch information
hunse authored and tbekolay committed Sep 24, 2018
1 parent 619e45b commit fd8511e
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 10 deletions.
2 changes: 1 addition & 1 deletion docs/examples/node_ens_ens.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def a_fn(x):
nengo_loihi.set_defaults()
with nengo.Network(seed=seed) as model:
u = nengo.Node(
output=nengo.processes.WhiteSignal(tend, high=5, seed=seed + 1),
output=nengo.processes.WhiteSignal(tend, high=2, seed=seed + 1),
size_out=d)
up = nengo.Probe(u, synapse=None)

Expand Down
12 changes: 9 additions & 3 deletions nengo_loihi/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,7 @@ def build_ensemble(model, ens):
group.bias[:] = bias
model.build(ens.neuron_type, ens.neurons, group)

# set default filter just in case no other filter gets set
group.configure_filter(INTER_TAU, dt=model.dt, default=True)

if ens.noise is not None:
Expand Down Expand Up @@ -469,7 +470,7 @@ def build_connection(model, conn):
transform = get_samples(
conn.transform, conn.size_out, d=conn.size_mid, rng=rng)

tau_s = 0.0
tau_s = 0.0 # `synapse is None` gets mapped to `tau_s = 0.0`
if isinstance(conn.synapse, nengo.synapses.Lowpass):
tau_s = conn.synapse.tau
elif conn.synapse is not None:
Expand Down Expand Up @@ -526,6 +527,7 @@ def build_connection(model, conn):

mid_cx = pre_cx
mid_axon_inds = slice(None)
post_tau = tau_s
if needs_interneurons and not isinstance(conn.post_obj, Neurons):
# --- add interneurons
assert weights.ndim == 2
Expand Down Expand Up @@ -578,7 +580,9 @@ def build_connection(model, conn):
weights2 = 0.5 * gain * np.vstack([weights,
-weights] * INTER_N).T

# use tau_s for filter into interneurons, and INTER_TAU for filter out
dec_cx.configure_filter(tau_s, dt=model.dt)
post_tau = INTER_TAU

dec_syn.set_full_weights(weights2)
dec_cx.add_synapses(dec_syn)
Expand Down Expand Up @@ -632,7 +636,7 @@ def build_connection(model, conn):
ax.target = syn
mid_cx.add_axons(ax)

post_cx.configure_filter(tau_s, dt=model.dt)
post_cx.configure_filter(post_tau, dt=model.dt)

if conn.learning_rule_type is not None:
raise NotImplementedError()
Expand All @@ -654,7 +658,7 @@ def build_connection(model, conn):
ax.target = syn
mid_cx.add_axons(ax)

post_cx.configure_filter(tau_s, dt=model.dt)
post_cx.configure_filter(post_tau, dt=model.dt)

if conn.learning_rule_type is not None:
raise NotImplementedError()
Expand All @@ -667,6 +671,8 @@ def build_connection(model, conn):
mid_ax.target_inds = mid_axon_inds
mid_cx.add_axons(mid_ax)
model.objs[conn]['mid_axons'] = mid_ax

post_cx.configure_filter(post_tau, dt=model.dt)
elif isinstance(conn.post_obj, Node):
raise NotImplementedError()
else:
Expand Down
12 changes: 6 additions & 6 deletions nengo_loihi/tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,15 +100,15 @@ def test_node_ens_ens(allclose, plt):
plt.subplot(413)
plt.plot(t, a[:, 0] ** 2, c="b", label="a[0]**2")
plt.plot(t, b[:, 0], c="g", label="b[0]")
plt.ylim([-1, 1])
plt.ylim([-0.05, 1])
plt.legend(loc=0)

plt.subplot(414)
plt.plot(t, a[:, 0] ** 2, c="b", label="a[1]**2")
plt.plot(t, b[:, 0], c="g", label="b[1]")
plt.ylim([-1, 1])
plt.plot(t, a[:, 1] ** 2, c="b", label="a[1]**2")
plt.plot(t, b[:, 1], c="g", label="b[1]")
plt.ylim([-0.05, 1])
plt.legend(loc=0)

tmask = t > 0.1 # ignore transients at the beginning
assert allclose(a[tmask], np.clip(u[tmask], -1, 1), atol=0.4, rtol=0.25)
assert allclose(b[tmask], a[tmask]**2, atol=0.35, rtol=0.0)
assert allclose(a[tmask], np.clip(u[tmask], -1, 1), atol=0.1, rtol=0.1)
assert allclose(b[tmask], a[tmask]**2, atol=0.1, rtol=0.2)

0 comments on commit fd8511e

Please sign in to comment.