How to use the nengo.LIF function in nengo

To help you get started, we’ve selected a few nengo examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github hunse / nef-rbm / deep-auto.py View on Github external
i = rng.randint(low=0, high=M-m+1, size=n_hid)
            j = rng.randint(low=0, high=N-n+1, size=n_hid)

            mask = np.zeros((n_hid, M, N), dtype='bool')
            for k in xrange(n_hid):
                mask[k, i[k]:i[k]+m, j[k]:j[k]+n] = True

            mask = mask.reshape(n_hid, n_vis)

        if mask is not None:
            encoders = encoders * mask
        encoders /= norm(encoders, axis=1, keepdims=True)

        self.tau_rc = 20e-3
        self.tau_ref = 2e-3
        neurons = nengo.LIF(tau_rc=self.tau_rc, tau_ref=self.tau_ref)
        gain, bias = neurons.gain_bias(max_rates, intercepts)

        self.vis_shape = vis_shape
        self.n_vis = n_vis
        self.n_hid = n_hid
        self.rf_shape = rf_shape
        self.seed = seed

        dtype = theano.config.floatX
        encoders = encoders.astype(dtype)
        max_rates = max_rates.astype(dtype)
        gain = gain.astype(dtype)
        bias = bias.astype(dtype)

        self.encoders = theano.shared(encoders, name='encoders')
        self.max_rates = theano.shared(max_rates, name='max_rates')
github hunse / nef-rbm / nlif-deep-nengo.py View on Github external
with gzip.open(filename, 'rb') as f:
    train, valid, test = pickle.load(f)

test_images, test_labels = test

# shuffle
rng = np.random.RandomState(92)
inds = rng.permutation(len(test_images))
test_images = test_images[inds]
test_labels = test_labels[inds]

labels = np.unique(test_labels)
n_labels = labels.size

# --- create the model
neuron_type = nengo.LIF(tau_rc=0.02, tau_ref=0.002)
# alpha = 1
# beta = 1
max_rate = 63
intercept = 0
assert np.allclose(neuron_type.gain_bias(max_rate, intercept), (1, 1), atol=1e-2)
amp = 1. / 65
neuron_params = dict(max_rates=max_rate, intercepts=intercept, neuron_type=neuron_type)

model = nengo.Network()
with model:
    input_images = nengo.Node(output=get_image, label='images')

    # W, b = weights[0], biases[0]
    # n = b.size
    # layer0 = nengo.Ensemble(n, 1, label='layer 0', neuron_type=neuron_type,
    #                         max_rates=max_rate*np.ones(n),
github hunse / nef-rbm / sigmoid-rbm / find_neuron_params.py View on Github external
def residual(encoders, max_rates, intercepts, eval_points, show=False):
    radius = 5
    neurons = nengo.LIF(N)
    gains, biases = neurons.gain_bias(max_rates, intercepts)
    A = neurons.rates(np.dot(eval_points, encoders.T), gains, biases)
    y = sigmoid_radius(eval_points)
    d, _ = nengo.decoders.LstsqL2()(A, y)
    r = np.dot(A, d) - y
    r2 = np.sqrt(np.dot(r.T, r))

    if show:
        plt.figure(101)
        plt.clf()
        x = np.linspace(-1, 1, 501).reshape(-1, 1)
        a = neurons.rates(np.dot(x, encoders.T), gains, biases)
        y = sigmoid_radius(x)
        yhat = np.dot(a, d)
        plt.plot(x, y, 'k--')
        plt.plot(x, yhat)
github hunse / nef-rbm / auto / run_lif_nocode.py View on Github external
# try dot product classifier
code_mean = codes.mean(0)
dists = np.dot(codes - code_mean, (code_means - code_mean).T)
errors = (test_labels != labels[np.argmax(dists, axis=1)])
print "ANN dot error:", errors.mean()

if 1:
    plt.figure(101)
    plt.clf()
    r = len(layers)
    for i, layer in enumerate(layers):
        plt.subplot(r, 1, i+1)
        plt.hist(layer.flatten(), bins=15)

# --- create the model
neuron_type = nengo.LIF(tau_rc=0.02, tau_ref=0.002)
max_rate = 63.04
intercept = 0
amp = 1. / max_rate
assert np.allclose(neuron_type.gain_bias(max_rate, intercept), (1, 1), atol=1e-2)

dt = 1e-3
model = nengo.Network(seed=97)
with model:
    input_images = nengo.Node(output=get_image, label='images')

    # --- make nonlinear layers
    layers = []
    for i, [W, b] in enumerate(zip(weights, biases)):
        n = b.size
        layer = nengo.Ensemble(n, 1, label='layer %d' % i, neuron_type=neuron_type,
                               max_rates=max_rate*np.ones(n),
github hunse / nef-rbm / encoder-learning.py View on Github external
# --- set up network parameters
n_vis = train_images.shape[1]
n_hid = 500
rng = np.random

if 1:
    rf_shape = (9, 9)
    mask = create_mask(n_hid, (28, 28), rf_shape)
    weights = rng.normal(size=(n_hid, n_vis)) * mask

weights = weights.T
mask = mask.T
weights /= norm(weights, axis=0, keepdims=True)

neurons = nengo.LIF()
gain, bias = neurons.gain_bias(200, -0.5)

def encode(x):
    return neurons.rates(np.dot(x, weights), gain, bias)

# --- determine initial decoders
x = train_images[:1000]
decoders, _ = nengo.decoders.LstsqL2()(encode(x), x)

# x = train_images[:1000]
# A = encode(x)
# dshape = (n_hid, n_vis)

# def func(d):
#     n = x.shape[0]
#     xhat = np.dot(A, d.reshape(dshape))
github BINDS-LAB-UMASS / bindsnet_experiments / experiments / benchmark / benchmark.py View on Github external
def Nengo(n_neurons, time):
    t0 = t()
    t1 = t()

    model = nengo.Network()
    with model:
        X = nengo.Ensemble(n_neurons, dimensions=1, neuron_type=nengo.LIF())
        Y = nengo.Ensemble(n_neurons, dimensions=2, neuron_type=nengo.LIF())
        nengo.Connection(X, Y, transform=np.random.rand(n_neurons, n_neurons))

    with nengo.Simulator(model) as sim:
        sim.run(time / 1000) 

    return t() - t0, t() - t1
github BINDS-LAB-UMASS / bindsnet_experiments / experiments / benchmark / benchmark.py View on Github external
def Nengo(n_neurons, time):
    t0 = t()
    t1 = t()

    model = nengo.Network()
    with model:
        X = nengo.Ensemble(n_neurons, dimensions=1, neuron_type=nengo.LIF())
        Y = nengo.Ensemble(n_neurons, dimensions=2, neuron_type=nengo.LIF())
        nengo.Connection(X, Y, transform=np.random.rand(n_neurons, n_neurons))

    with nengo.Simulator(model) as sim:
        sim.run(time / 1000) 

    return t() - t0, t() - t1