import nef net = nef.Network('Recurrent Test 1') net.make('A', neurons=1000, dimensions=1) def feedback(x): return x[0]+1 net.connect('A', 'A', func=feedback, pstc=0.1) net.add_to_nengo() net.view() import nef net = nef.Network('Recurrent Test 1') net.make('A', neurons=100, dimensions=1) def feedback(x): return -x[0] net.connect('A', 'A', func=feedback) net.add_to_nengo() net.view() import nef net = nef.Network('Recurrent Test 1') net.make('A', neurons=100, dimensions=1) def feedback(x): return -x[0] net.connect('A', 'A', func=feedback) net.make_input('input', [0]) net.connect('input', 'A') net.add_to_nengo() net.view() import nef net = nef.Network('Recurrent Test 1') net.make('A', neurons=100, dimensions=1) def feedback(x): return x[0]*x[0] net.connect('A', 'A', func=feedback) net.make_input('input', [0]) net.connect('input', 'A') net.add_to_nengo() net.view() import syde556 dt = 0.001 t = numpy.arange(1000)*dt x = numpy.zeros(1000) x[300:]=1 A = syde556.Ensemble(neurons=80, dimensions=1, max_rate=(50,100)) decoder = A.compute_decoder(x, x, noise=0.2) spikes_A, xhat = A.simulate_spikes(x, decoder, tau=0.1) import pylab pylab.plot(t, x, label='$x$') pylab.plot(t, xhat, label='$\hat{x}$') pylab.legend(loc='best') pylab.ylim(-0.2, 1.2) pylab.show() import syde556 dt = 0.001 t = numpy.arange(1000)*dt x = numpy.zeros(1000) x[300:]=1 A = syde556.Ensemble(neurons=80, dimensions=1, max_rate=(50,100)) decoder = A.compute_decoder(x, x, noise=0.2) spikes_A, xhat = A.simulate_spikes(x, decoder, tau=0.03) import pylab pylab.plot(t, x, label='$x$') pylab.plot(t, xhat, label='$\hat{x}$') pylab.legend(loc='best') pylab.ylim(-0.2, 1.2) pylab.show() import syde556 dt = 0.001 t = numpy.arange(1000)*dt x = numpy.zeros(1000) x[300:]=1 h = syde556.psc_filter(t-0.5, 0.1) fx = syde556.apply_filter(numpy.array(x)[:,None], h)*dt A = syde556.Ensemble(neurons=80, dimensions=1, max_rate=(50,100)) decoder = A.compute_decoder(x, x, noise=0.2) spikes_A, xhat = A.simulate_spikes(x, decoder, tau=0.1) import pylab pylab.plot(t, x, label='$x(t)$') pylab.plot(t, xhat, label='$\hat{x}(t)$') pylab.plot(t, fx, label='$x(t)*h(t)$') pylab.legend(loc='lower right') pylab.ylim(-0.2, 1.2) pylab.show() import nef tau=0.1 net = nef.Network('Neural Integrator') net.make('velocity', neurons=100, dimensions=1) net.make('position', neurons=100, dimensions=1) def recurrent(x): return x[0] net.connect('position', 'position', func=recurrent, pstc=tau) net.connect('velocity', 'position', transform=[[tau]]) net.make_input('input', [0]) net.connect('input', 'velocity') net.add_to_nengo() net.view() import syde556 reload(syde556) x = numpy.linspace(-1, 1, 100) A = syde556.Ensemble(neurons=200, dimensions=1, seed=1, tau_rc=0.02) decoder = A.compute_decoder(x, x, noise=0.1) activity, xhat = A.simulate_rate(x, decoder) import pylab pylab.axhline(0, color='k') pylab.plot(x, x-xhat, label='$x-\hat{x}$') pylab.legend(loc='best') pylab.xlabel('$x$') pylab.show() import nef tau=0.1 tau_centre=1.0 net = nef.Network('Neural Integrator') net.make('velocity', neurons=100, dimensions=1) net.make('position', neurons=100, dimensions=1) def recurrent(x): return (1-tau/tau_centre)*x[0] net.connect('position', 'position', func=recurrent, pstc=tau) net.connect('velocity', 'position', transform=[[tau]]) net.make_input('input', [0]) net.connect('input', 'velocity') net.add_to_nengo() net.view() import nef tau=0.1 tau_centre=1.0 net = nef.Network('Controlled Neural Integrator') net.make('decay', neurons=100, dimensions=1) net.make('velocity', neurons=100, dimensions=1) net.make('position', neurons=200, dimensions=2) def recurrent(x): return x[0]-x[1]*x[0], 0 net.connect('position', 'position', func=recurrent, pstc=tau) net.connect('velocity', 'position', transform=[tau, 0]) net.connect('decay', 'position', transform=[0, 1]) net.make_input('input', [0]) net.connect('input', 'velocity') net.make_input('input_decay', [0]) net.connect('input_decay', 'decay') net.add_to_nengo() net.view() import nef net = nef.Network('Linear Oscillator') net.make('A', neurons=100, dimensions=2) def feedback(x): return x[0]+x[1], -x[0]+x[1] net.connect('A', 'A', func=feedback, pstc=0.01) net.make_input('input', [0]) net.connect('input', 'A') net.add_to_nengo() net.view() tau = 0.1 sigma = 10 beta = 8.0/3 rho = 28 import nef net = nef.Network('Lorenz attractor', seed=6) net.make('A', 2000, 3, radius=60) def feedback(x): dx0 = -sigma * x[0] + sigma * x[1] dx1 = -x[0] * x[2] - x[1] dx2 = x[0] * x[1] - beta * (x[2] + rho) - rho return [dx0 * tau + x[0], dx1 * tau + x[1], dx2 * tau + x[2]] net.connect('A', 'A', func=feedback, pstc=tau) net.view() import nef net = nef.Network('Square Oscillator') net.make('A', neurons=1000, dimensions=2) tau = 0.02 r=4 def feedback(x): if abs(x[1])>abs(x[0]): if x[1]>0: dx=[r, 0] else: dx=[-r, 0] else: if x[0]>0: dx=[0, -r] else: dx=[0, r] return [tau*dx[0]+x[0], tau*dx[1]+x[1]] net.connect('A', 'A', func=feedback, pstc=tau) net.make_input('input', [0,0]) net.connect('input', 'A') net.add_to_nengo() net.view() import nef net = nef.Network('Heart Oscillator') net.make('A', neurons=1000, dimensions=2) tau = 0.02 r=4 def feedback(x): return [-tau*r*x[1]+x[0], tau*r*x[0]+x[1]] net.connect('A', 'A', func=feedback, pstc=tau) from math import atan2, sin, cos, sqrt def heart(x): theta = atan2(x[1], x[0]) r = 2 - 2 * sin(theta) + sin(theta)*sqrt(abs(cos(theta)))/(sin(theta)+1.4) return [-r*cos(theta), r*sin(theta)] net.make('B', neurons=10, dimensions=2) net.connect('A', 'B', func=heart, pstc=tau) net.make_input('input', [0,0]) net.connect('input', 'A') net.add_to_nengo() net.view() import nef net = nef.Network('Controlled Oscillator') net.make('A', neurons=500, dimensions=3, radius=1.7) tau = 0.1 scale = 10 def feedback(x): return x[1]*x[2]*scale*tau+1.1*x[0], -x[0]*x[2]*scale*tau+1.1*x[1], 0 net.connect('A', 'A', func=feedback, pstc=tau) net.make_input('input', [0,0,0]) net.connect('input', 'A') net.add_to_nengo() net.view()