-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathLGneurons.py
1310 lines (1105 loc) · 55.9 KB
/
LGneurons.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/python
# -*- coding: utf-8 -*-
interactive = False # avoid loading X dependent things
# set to False for simulations on Sango
storeGDF = True # unless overriden by run.py, keep spike rasters
import nstrand
import pandas as pd
from modelParams import *
import nest
import numpy as np
import numpy.random as rnd
import csv
from math import sqrt, cosh, exp, pi
AMPASynapseCounter = 0 # counter variable for the fast connect
import nest.topology as nesttopo
#-------------------------------------------------------------------------------
# Loads a given LG14 model parameterization
# ID must be in [0,14]
#-------------------------------------------------------------------------------
def loadLG14params(ID):
# Load the file with the Lienard solutions:
LG14SolutionsReader = csv.DictReader(open("solutions_simple_unique.csv"),delimiter=';')
LG14Solutions = []
for row in LG14SolutionsReader:
LG14Solutions.append(row)
print '### Parameterization #'+str(ID)+' from (Lienard & Girard, 2014) is used. ###'
for k,v in alpha.iteritems():
try:
#print k,v,round(float(LG14Solutions[ID]['ALPHA_'+k.replace('->','_')]),0)
alpha[k] = round(float(LG14Solutions[ID]['ALPHA_'+k.replace('->','_')]),0)
except:
print('Could not find LG14 alpha parameters for connection `'+k+'`, trying to run anyway.')
for k,v in p.iteritems():
try:
#print 'dist:',k,v,round(float(LG14Solutions[ID]['DIST_'+k.replace('->','_')]),2)
p[k] = round(float(LG14Solutions[ID]['DIST_'+k.replace('->','_')]),2)
except:
print('Could not find LG14 distance (p) parameters for connection `'+k+'`, trying to run anyway.')
for k,v in BGparams.iteritems():
try:
BGparams[k]['V_th'] = round(float(LG14Solutions[ID]['THETA_'+k]),1)
except:
print('Could not find LG14 theta parameters for connection `'+k+'`, trying to run anyway.')
#-------------------------------------------------------------------------------
# Overrides the neuron threshold parameters from LG14 with those defined in parameters
# Added by Jean during his stay at OIST, should not be used anymore.
#-------------------------------------------------------------------------------
#def loadThetaFromCustomparams(params):
# for k,v in BGparams.items():
# try:
# newval = round(float(params['THETA_'+k]), 2)
# print("WARNING: overwriting LG14 value for theta in "+k+" from original value of "+str(BGparams[k]['V_th'])+" to new value: "+str(newval))
# BGparams[k]['V_th'] = newval # firing threshold
# except:
# print("INFO: keeping LG14 value for theta in "+k+" to its original value of "+str(BGparams[k]['V_th']))
# pass
#-------------------------------------------------------------------------------
# Changes the default of the iaf_psc_alpha_multisynapse neurons
# Very important because it defines the 3 types of receptors (AMPA, NMDA, GABA) that will be needed
# Has to be called after any KernelReset
#-------------------------------------------------------------------------------
def initNeurons():
nest.SetDefaults("iaf_psc_alpha_multisynapse", CommonParams)
#-------------------------------------------------------------------------------
# Creates a population of neurons
# name: string naming the population, as defined in NUCLEI list
# fake: if fake is True, the neurons will be replaced by Poisson generators, firing
# at the rate indicated in the "rate" dictionary
# parrot: do we use parrot neurons or not? If not, there will be no correlations in the inputs, and a waste of computation power...
#-------------------------------------------------------------------------------
def create(name,fake=False,parrot=True):
if nbSim[name] == 0:
print 'ERROR: create(): nbSim['+name+'] = 0'
exit()
if fake:
if rate[name] == 0:
print 'ERROR: create(): rate['+name+'] = 0 Hz'
print '* '+name+'(fake):',nbSim[name],'Poisson generators with avg rate:',rate[name]
if not parrot:
print "/!\ /!\ /!\ /!\ \nWARNING: parrot neurons not used, no correlations in inputs\n"
Pop[name] = nest.Create('poisson_generator',int(nbSim[name]))
nest.SetStatus(Pop[name],{'rate':rate[name]})
else:
Fake[name] = nest.Create('poisson_generator',int(nbSim[name]))
nest.SetStatus(Fake[name],{'rate':rate[name]})
Pop[name] = nest.Create('parrot_neuron',int(nbSim[name]))
nest.Connect(pre=Fake[name],post=Pop[name],conn_spec={'rule':'one_to_one'})
else:
print '* '+name+':',nbSim[name],'neurons with parameters:',BGparams[name]
Pop[name] = nest.Create("iaf_psc_alpha_multisynapse",int(nbSim[name]),params=BGparams[name])
#-------------------------------------------------------------------------------
# Creates a popolation of neurons subdivided in Multiple Channels
#
# name: string naming the population, as defined in NUCLEI list
# nbCh: integer stating the number of channels to be created
# fake: if fake is True, the neurons will be replaced by Poisson generators, firing
# at the rate indicated in the "rate" dictionary
# parrot: do we use parrot neurons or not? If not, there will be no correlations in the inputs, and a waste of computation power...
#-------------------------------------------------------------------------------
def createMC(name,nbCh,fake=False,parrot=True):
if nbSim[name] == 0:
print 'ERROR: create(): nbSim['+name+'] = 0'
exit()
Pop[name]=[]
if fake:
Fake[name]=[]
if rate[name] == 0:
print 'ERROR: create(): rate['+name+'] = 0 Hz'
print '* '+name+'(fake):',nbSim[name]*nbCh,'Poisson generators (divided in',nbCh,'channels) with avg rate:',rate[name]
if not parrot:
print "/!\ /!\ /!\ /!\ \nWARNING: parrot neurons not used, no correlations in inputs\n"
for i in range(nbCh):
Pop[name].append(nest.Create('poisson_generator',int(nbSim[name])))
nest.SetStatus(Pop[name][i],{'rate':rate[name]})
else:
for i in range(nbCh):
Fake[name].append(nest.Create('poisson_generator',int(nbSim[name])))
nest.SetStatus(Fake[name][i],{'rate':rate[name]})
Pop[name].append(nest.Create('parrot_neuron',int(nbSim[name])))
nest.Connect(pre=Fake[name][i],post=Pop[name][i],conn_spec={'rule':'one_to_one'})
else:
print '* '+name+':',nbSim[name]*nbCh,'neurons (divided in',nbCh,'channels) with parameters:',BGparams[name]
for i in range(nbCh):
Pop[name].append(nest.Create("iaf_psc_alpha_multisynapse",int(nbSim[name]),params=BGparams[name]))
#------------------------------------------------------------------------------
# Routine to perform the fast connection using nest built-in `connect` function
# - `source` & `dest` are lists defining Nest IDs of source & target population
# - `synapse_label` is used to tag connections and be able to find them quickly
# with function `mass_mirror`, that adds NMDA on top of AMPA connections
# - `inDegree`, `receptor_type`, `weight`, `delay` are Nest connection params
#------------------------------------------------------------------------------
def mass_connect(source, dest, synapse_label, inDegree, receptor_type, weight, delay, stochastic_delays=None, verbose=False):
def printv(text):
if verbose:
print(text)
sigmaDependentInterval = True # Hugo's method
# potential initialization of stochastic delays
if stochastic_delays != None and delay > 0 and stochastic_delays > 0.:
printv('Using stochastic delays in mass-connect')
sigma = delay * stochastic_delays
if sigmaDependentInterval:
n = 2 # number of standard deviation to include in the distribution
if stochastic_delays >= 1./n:
print 'Error : stochastic_delays >= 1/n and the distribution of delays therefore includes 0 which is not possible -> Jean\'s method is used'
sigmaDependentInterval = False
else:
low = delay - n*sigma
high = delay + n*sigma
if not sigmaDependentInterval:
low = .5*delay
high = 1.5*delay
delay = {'distribution': 'normal_clipped', 'low': low, 'high': high, 'mu': delay, 'sigma': sigma}
# The first `fixed_indegree` connection ensures that all neurons in `dest`
# are targeted by the same number of axons (an integer number)
integer_inDegree = np.floor(inDegree)
if integer_inDegree>0:
printv('Adding '+str(int(integer_inDegree*len(dest)))+' connections with rule `fixed_indegree`\n')
nest.Connect(source,
dest,
{'rule': 'fixed_indegree', 'indegree': int(integer_inDegree)},
{'model': 'static_synapse_lbl', 'synapse_label': synapse_label, 'receptor_type': receptor_type, 'weight': weight, 'delay':delay})
# The second `fixed_total_number` connection distributes remaining axonal
# contacts at random (i.e. the remaining fractional part after the first step)
float_inDegree = inDegree - integer_inDegree
remaining_connections = np.round(float_inDegree * len(dest))
if remaining_connections > 0:
printv('Adding '+str(remaining_connections)+' remaining connections with rule `fixed_total_number`\n')
nest.Connect(source,
dest,
{'rule': 'fixed_total_number', 'N': int(remaining_connections)},
{'model': 'static_synapse_lbl', 'synapse_label': synapse_label, 'receptor_type': receptor_type, 'weight': weight, 'delay':delay})
#------------------------------------------------------------------------------
# Routine to duplicate a connection made with a specific receptor, with another
# receptor (typically to add NMDA connections to existing AMPA connections)
# - `source` & `synapse_label` should uniquely define the connections of
# interest - typically, they are the same as in the call to `mass_connect`
# - `receptor_type`, `weight`, `delay` are Nest connection params
#------------------------------------------------------------------------------
def mass_mirror(source, synapse_label, receptor_type, weight, delay, stochastic_delays, verbose=False):
def printv(text):
if verbose:
print(text)
# find all AMPA connections for the given projection type
printv('looking for AMPA connections to mirror with NMDA...\n')
ampa_conns = nest.GetConnections(source=source, synapse_label=synapse_label)
# in rare cases, there may be no connections, guard against that
if ampa_conns:
# extract just source and target GID lists, all other information is irrelevant here
printv('found '+str(len(ampa_conns))+' AMPA connections\n')
if stochastic_delays != None and delay > 0:
printv('Using stochastic delays in mass-mirror')
delay = np.array(nest.GetStatus(ampa_conns, keys=['delay'])).flatten()
src, tgt, _, _, _ = zip(*ampa_conns)
nest.Connect(src, tgt, 'one_to_one',
{'model': 'static_synapse_lbl',
'synapse_label': synapse_label, # tag with the same number (doesn't matter)
'receptor_type': receptor_type, 'weight': weight, 'delay':delay})
#-------------------------------------------------------------------------------
# Creates a topological population of neurons subdivided in Multiple Channels
#
# name: string naming the population, as defined in NUCLEI list
# nbCh: integer stating the number of channels to be created
# c: distance to the center (small distance means more channels in competition)
# r: radius of each channel (leading to larger overlap and thus broader competition)
# fake: if fake is True, the neurons will be replaced by Poisson generators, firing
# at the rate indicated in the "rate" dictionary
# parrot: do we use parrot neurons or not? If not, there will be no correlations in the inputs, and a waste of computation power...
#-------------------------------------------------------------------------------
def createTopoMC(name, nbCh, layout, c=0.3, r=0.25, fake=False, parrot=True):
if nbSim[name] == 0:
print 'ERROR: create(): nbSim['+name+'] = 0'
exit()
Pop[name]=[]
# helper function that gives the channel center
def circular_center(nbCh, c, Ch=None):
# equi-distant points on a circle
if Ch == None:
indices = np.arange(0, nbCh, dtype=float) + 0.5
else:
indices = np.array(Ch) + 0.5
angles = (1. - indices/nbCh) * 2. * np.pi
x, y = np.cos(angles)*c, np.sin(angles)*c
## re-project in [0,1]x[0,1]
#x = (x + 1.) / 2.
#y = (y + 1.) / 2.
return {'x': x, 'y': y}
def circular_positions(nbCh, c, r, sim_pts, Ch=None):
if Ch == None:
Ch = range(nbCh)
center_xy = circular_center(nbCh, c, Ch=Ch)
xSim = []
ySim = []
for i in range(len(Ch)):
angleSim = np.random.uniform(0., 2.*np.pi, int(sim_pts))
rSim = np.random.uniform(0., r, int(sim_pts))
xSim = xSim + (np.cos(angleSim)*rSim + center_xy['x'][i]).tolist()
ySim = ySim + (np.sin(angleSim)*rSim + center_xy['y'][i]).tolist()
return (xSim, ySim)
def grid_positions(nbCh, sim_pts):
n = int(sim_pts*nbCh)
n_squared = np.ceil(np.sqrt(n))
coord = [[x/n_squared*2.-1., y/n_squared*2.-1.] for x in np.arange(0,n_squared, dtype=float) for y in np.arange(0,n_squared, dtype=float)]
# too many points due to square root rounding? remove at random
if len(coord) > n:
coord = np.array(coord)[np.sort(np.random.choice(range(len(coord)), size=n, replace=False))].tolist()
return ([coord[i][0] for i in range(len(coord))], [coord[i][1] for i in range(len(coord))])
# compute the neuron coordinates
if layout == 'circular':
positions = circular_positions(nbCh, c, r, nbSim[name])
edge_wrap = False
elif layout == 'grid':
positions = grid_positions(nbCh, nbSim[name])
edge_wrap = True
else:
raise KeyError('`layout` must be `circular` or `grid`.')
if fake:
Fake[name]=[]
if rate[name] == 0:
print 'ERROR: create(): rate['+name+'] = 0 Hz'
print '* '+name+'(fake):',nbSim[name]*nbCh,'Poisson generators (divided in',nbCh,'channels) with avg rate:',rate[name]
if not parrot:
print "/!\ /!\ /!\ /!\ \nWARNING: parrot neurons not used, no correlations in inputs\n"
Topo[name] = nesttopo.CreateLayer({'positions': [[positions[0][i], positions[1][i]] for i in range(len(positions[0]))], 'elements': 'poisson_generator', 'extent': [2., 2.], 'center':[0., 0.], 'edge_wrap': edge_wrap})
all_nodes = nest.GetNodes(Topo[name])
for i in range(nbCh):
Pop[name].append([all_nodes[0][j] for j in np.arange(int(i*nbSim[name]), int((i+1)*nbSim[name]))])
nest.SetStatus(Pop[name][i],{'rate':rate[name]})
else:
Topo[name] = nesttopo.CreateLayer({'positions': [[positions[0][i], positions[1][i]] for i in range(len(positions[0]))], 'elements': 'parrot_neuron', 'extent': [2., 2.], 'center':[0., 0.], 'edge_wrap': edge_wrap})
all_nodes = nest.GetNodes(Topo[name])
for i in range(nbCh):
Fake[name].append(nest.Create('poisson_generator',int(nbSim[name])))
nest.SetStatus(Fake[name][i],{'rate':rate[name]})
for i in range(nbCh):
Pop[name].append([all_nodes[0][j] for j in np.arange(int(i*nbSim[name]), int((i+1)*nbSim[name]))])
nest.Connect(pre=Fake[name][i],post=Pop[name][i],conn_spec={'rule':'one_to_one'})
else:
print '* '+name+':',nbSim[name]*nbCh,'neurons (divided in',nbCh,'channels) with parameters:',BGparams[name]
nest.SetDefaults('iaf_psc_alpha_multisynapse', BGparams[name])
Topo[name] = nesttopo.CreateLayer({'positions': [[positions[0][i], positions[1][i]] for i in range(len(positions[0]))], 'elements': 'iaf_psc_alpha_multisynapse', 'extent': [2., 2.], 'center':[0., 0.], 'edge_wrap': edge_wrap})
all_nodes = nest.GetNodes(Topo[name])
for i in range(nbCh):
Pop[name].append([all_nodes[0][j] for j in np.arange(int(i*nbSim[name]), int((i+1)*nbSim[name]))])
# writes the layout to a file
dataPath='log/'
topoPositions = 'ID, Ch, X, Y, Z\n'
topoFile=open(dataPath+'topo_'+name+'.csv','w',1)
ni = 0
for i in range(nbCh):
for j in range(int(nbSim[name])):
topoPositions += str(Pop[name][i][j])+', '+str(i)+', '+str(positions[0][ni])+', '+str(positions[1][ni])+', '+str(0)+'\n'
ni += 1
topoFile.write(topoPositions)
topoFile.close()
#------------------------------------------------------------------------------
# Routine to perform the fast connection using nest built-in `connect` function
# - `source` & `dest` are lists defining Nest IDs of source & target population
# - `synapse_label` is used to tag connections and be able to find them quickly
# with function `mass_mirror`, that adds NMDA on top of AMPA connections
# - `inDegree`, `receptor_type`, `weight`, `delay` are Nest connection params
#------------------------------------------------------------------------------
def mass_connect(source, dest, synapse_label, inDegree, receptor_type, weight, delay, stochastic_delays=None, verbose=False):
def printv(text):
if verbose:
print(text)
sigmaDependentInterval = True # using Hugo's method
# potential initialization of stochastic delays
if stochastic_delays != None and delay > 0 and stochastic_delays > 0.:
printv('Using stochastic delays in mass-connect')
sigma = delay * stochastic_delays
if sigmaDependentInterval:
n = 2 # number of standard deviation to include in the distribution
if stochastic_delays >= 1./n:
print 'Error : stochastic_delays >= 1/n and the distribution of delays therefore includes 0 which is not possible -> Jean\'s method is used'
sigmaDependentInterval = False
else:
low = delay - n*sigma
high = delay + n*sigma
else:
low = .5*delay
high = 1.5*delay
delay = {'distribution': 'normal_clipped', 'low': low, 'high': high, 'mu': delay, 'sigma': sigma}
# The first `fixed_indegree` connection ensures that all neurons in `dest`
# are targeted by the same number of axons (an integer number)
integer_inDegree = np.floor(inDegree)
if integer_inDegree>0:
printv('Adding '+str(int(integer_inDegree*len(dest)))+' connections with rule `fixed_indegree`\n')
nest.Connect(source,
dest,
{'rule': 'fixed_indegree', 'indegree': int(integer_inDegree)},
{'model': 'static_synapse_lbl', 'synapse_label': synapse_label, 'receptor_type': receptor_type, 'weight': weight, 'delay':delay})
# The second `fixed_total_number` connection distributes remaining axonal
# contacts at random (i.e. the remaining fractional part after the first step)
float_inDegree = inDegree - integer_inDegree
remaining_connections = np.round(float_inDegree * len(dest))
if remaining_connections > 0:
printv('Adding '+str(remaining_connections)+' remaining connections with rule `fixed_total_number`\n')
nest.Connect(source,
dest,
{'rule': 'fixed_total_number', 'N': int(remaining_connections)},
{'model': 'static_synapse_lbl', 'synapse_label': synapse_label, 'receptor_type': receptor_type, 'weight': weight, 'delay':delay})
#------------------------------------------------------------------------------
# Routine to perform the fast connection using nest built-in `connect` function
# And in the topological case
# - `sourceName` & `destName` are names of two different layers
# - `synapse_label` is used to tag connections and be able to find them quickly
# with function `mass_mirror`, that adds NMDA on top of AMPA connections
# - `inDegree`, `receptor_type`, `weight`, `delay` are Nest connection params
# - `spread` is a parameter that affects the diffusion level of the connection
#------------------------------------------------------------------------------
def mass_connect_topo(sourceName, destName, synapse_label, inDegree, receptor_type, weight, delay, spread, stochastic_delays=None, verbose=False):
def printv(text):
if verbose:
print(text)
# potential initialization of stochastic delays
if stochastic_delays != None and delay > 0:
printv('Using stochastic delays in mass-connect')
low = delay * 0.5
high = delay * 1.5
sigma = delay * stochastic_delays
delay = {'distribution': 'normal_clipped', 'low': low, 'high': high, 'mu': delay, 'sigma': sigma}
## creation of the synapse model
#nest.CopyModel('static_synapse_lbl', 'mass_connected_'+sourceName+'_'+destName, {'synapse_label': synapse_label, 'receptor_type': receptor_type, 'weight': weight, 'delay':delay})
## creation of the topological connection dict
#conndict = {'connection_type': 'convergent',
# 'mask': {'circular': {'radius': spread}},
# 'synapse_model': 'mass_connected_'+sourceName+'_'+destName
# }
#nest.CopyModel('static_synapse_lbl', 'mass_connected_'+sourceName+'_'+destName, {'synapse_label': synapse_label, 'receptor_type': receptor_type, 'weight': weight, 'delay':delay})
nest.SetDefaults('static_synapse_lbl', {'synapse_label': synapse_label, 'receptor_type': receptor_type})
# creation of the topological connection dict
conndict = {'connection_type': 'convergent',
'mask': {'circular': {'radius': spread}},
'synapse_model': 'static_synapse_lbl', 'weights': weight, 'delays':delay,
'allow_oversized_mask': True, 'allow_multapses': True}
# The first call ensures that all neurons in `destName`
# have at least `int(inDegree)` incoming connections
integer_inDegree = np.floor(inDegree)
if integer_inDegree>0:
printv('Adding '+str(int(integer_inDegree*len(Pop[destName])))+' connections with rule `fixed_indegree`\n')
integer_conndict = conndict.copy()
integer_conndict.update({'number_of_connections': int(integer_inDegree)})
nesttopo.ConnectLayers(Topo[sourceName], Topo[destName], integer_conndict)
# The second call distributes the approximate number of remaining axonal
# contacts at random (i.e. the remaining fractional part after the first step)
# Why "approximate"? Because with pynest layers, there are only two ways to specify
# the number of axons in a connection:
# 1) with an integer, specified with respect to each source (alt. target) neurons
# 2) as a probability
# Here, we have a fractional part - not an integer number - so that leaves us option 2.
# However, because the new axonal contacts are drawn at random, we will not have the
# exact number of connections
float_inDegree = inDegree - integer_inDegree
remaining_connections = np.round(float_inDegree * len(Pop[destName]))
if remaining_connections > 0:
printv('Adding '+str(remaining_connections)+' remaining connections with rule `fixed_total_number`\n')
float_conndict = conndict.copy()
float_conndict.update({'kernel': 1. / (nbSim[sourceName] * float(remaining_connections))})
nesttopo.ConnectLayers(Topo[sourceName], Topo[destName], float_conndict)
#------------------------------------------------------------------------------
# Routine to duplicate a connection made with a specific receptor, with another
# receptor (typically to add NMDA connections to existing AMPA connections)
# - `source` & `synapse_label` should uniquely define the connections of
# interest - typically, they are the same as in the call to `mass_connect`
# - `receptor_type`, `weight`, `delay` are Nest connection params
#------------------------------------------------------------------------------
def mass_mirror(source, synapse_label, receptor_type, weight, delay, stochastic_delays, verbose=False):
def printv(text):
if verbose:
print(text)
# find all AMPA connections for the given projection type
printv('looking for AMPA connections to mirror with NMDA...\n')
ampa_conns = nest.GetConnections(source=source, synapse_label=synapse_label)
# in rare cases, there may be no connections, guard against that
if ampa_conns:
# extract just source and target GID lists, all other information is irrelevant here
printv('found '+str(len(ampa_conns))+' AMPA connections\n')
if stochastic_delays != None and delay > 0:
printv('Using stochastic delays in mass-mirror')
delay = np.array(nest.GetStatus(ampa_conns, keys=['delay'])).flatten()
src, tgt, _, _, _ = zip(*ampa_conns)
nest.Connect(src, tgt, 'one_to_one',
{'model': 'static_synapse_lbl',
'synapse_label': synapse_label, # tag with the same number (doesn't matter)
'receptor_type': receptor_type, 'weight': weight, 'delay':delay})
#-------------------------------------------------------------------------------
# Establishes a connexion between two populations, following the results of LG14
# type : a string 'ex' or 'in', defining whether it is excitatory or inhibitory
# nameTgt, nameSrc : strings naming the populations, as defined in NUCLEI list
# redundancy : value that characterizes the number of repeated axonal contacts from one neuron of Src to one neuron of Tgt (see RedundancyType for interpretation of this value)
# RedundancyType : string
# if 'inDegreeAbs': `redundancy` is the number of neurons from Src that project to a single Tgt neuron
# if 'outDegreeAbs': `redundancy` is number of axonal contacts from an individual Src neuron onto a single Tgt neuron
# if 'outDegreeCons': `redundancy` is a scaled proportion of axonal contacts between each neuron from Src onto a single Tgt neuron given arithmetical constraints, ranging from 0 (minimal number of contacts to achieve required axonal bouton counts) to 1 (maximal number of contacts with respect to population numbers)
# LCGDelays: shall we use the delays obtained by (Liénard, Cos, Girard, in prep) or not (default = True)
# gain : allows to amplify the weight normally deduced from LG14
#-------------------------------------------------------------------------------
def connect(type, nameSrc, nameTgt, redundancy, RedundancyType, LCGDelays=True, gain=1., stochastic_delays=None, verbose=False, projType=''):
def printv(text):
if verbose:
print(text)
printv("* connecting "+nameSrc+" -> "+nameTgt+" with "+type+" connection")
if RedundancyType == 'inDegreeAbs':
# inDegree is already provided in the right form
inDegree = float(redundancy)
elif RedundancyType == 'outDegreeAbs':
#### fractional outDegree is expressed as a fraction of max axo-dendritic contacts
inDegree = get_frac(1./redundancy, nameSrc, nameTgt, neuronCounts[nameSrc], neuronCounts[nameTgt], verbose=verbose)
elif RedundancyType == 'outDegreeCons':
#### fractional outDegree is expressed as a ratio of min/max axo-dendritic contacts
inDegree = get_frac(redundancy, nameSrc, nameTgt, neuronCounts[nameSrc], neuronCounts[nameTgt], useMin=True, verbose=verbose)
else:
raise KeyError('`RedundancyType` should be one of `inDegreeAbs`, `outDegreeAbs`, or `outDegreeCons`.')
# check if in degree acceptable (not larger than number of neurons in the source nucleus)
if inDegree > nbSim[nameSrc]:
printv("/!\ WARNING: required 'in degree' ("+str(inDegree)+") larger than number of neurons in the source population ("+str(nbSim[nameSrc])+"), thus reduced to the latter value")
inDegree = nbSim[nameSrc]
if inDegree == 0.:
printv("/!\ WARNING: non-existent connection strength, will skip")
return
global AMPASynapseCounter
# process receptor types
if type == 'ex':
lRecType = ['AMPA','NMDA']
AMPASynapseCounter = AMPASynapseCounter + 1
lbl = AMPASynapseCounter # needs to add NMDA later
elif type == 'AMPA':
lRecType = ['AMPA']
lbl = 0
elif type == 'NMDA':
lRecType = ['NMDA']
lbl = 0
elif type == 'in':
lRecType = ['GABA']
lbl = 0
else:
raise KeyError('Undefined connexion type: '+type)
W = computeW(lRecType, nameSrc, nameTgt, inDegree, gain, verbose=False)
printv(" W="+str(W)+" and inDegree="+str(inDegree))
#if nameSrc+'->'+nameTgt in ConnectMap:
# loadConnectMap = True
#else:
# loadConnectMap = False
# ConnectMap[nameSrc+'->'+nameTgt] = []
# determine which transmission delay to use:
if LCGDelays:
delay= tau[nameSrc+'->'+nameTgt]
else:
delay= 1.
mass_connect(Pop[nameSrc], Pop[nameTgt], lbl, inDegree, recType[lRecType[0]], W[lRecType[0]], delay, stochastic_delays = stochastic_delays)
if type == 'ex':
# mirror the AMPA connection with similarly connected NMDA connections
mass_mirror(Pop[nameSrc], lbl, recType['NMDA'], W['NMDA'], delay, stochastic_delays = stochastic_delays)
return W
#-------------------------------------------------------------------------------
# Establishes a connexion between two populations, following the results of LG14, in a MultiChannel context
# type : a string 'ex' or 'in', defining whether it is excitatory or inhibitory
# nameTgt, nameSrc : strings naming the populations, as defined in NUCLEI list
# projType : type of projections. For the moment: 'focused' (only channel-to-channel connection) and
# 'diffuse' (all-to-one with uniform distribution)
# redundancy, RedundancyType : contrains the inDegree - see function `connect` for details
# LCGDelays: shall we use the delays obtained by (Liénard, Cos, Girard, in prep) or not (default = True)
# gain : allows to amplify the weight normally deduced from LG14
# source_channels : By default with `source_channels=None`, the connection is implemented using all source channels
# Specify a custom list of channels to implement connections only from these channels
# For example, calling successively `connectMC(...,projType='focused',source_channels=[0])` and then `connectMC(...,projType='diffuse',source_channels=[1])` would implement first a focused projection using only source channel 0 and then a diffuse connection using only source channel 1:
# Src channels: (0) (1)
# | / |
# Tgt channels: (0) (1)
#-------------------------------------------------------------------------------
def connectMC(type, nameSrc, nameTgt, projType, redundancy, RedundancyType, LCGDelays=True, gain=1., source_channels = None, stochastic_delays=None, verbose=False):
def printv(text):
if verbose:
print(text)
printv("* connecting "+nameSrc+" -> "+nameTgt+" with "+projType+" "+type+" connection")
if source_channels == None:
# if not specified, assume that the connection originates from all channels
source_channels = range(len(Pop[nameSrc]))
if RedundancyType == 'inDegreeAbs':
# inDegree is already provided in the right form
inDegree = float(redundancy)
elif RedundancyType == 'outDegreeAbs':
#### fractional outDegree is expressed as a fraction of max axo-dendritic contacts
inDegree = get_frac(1./redundancy, nameSrc, nameTgt, neuronCounts[nameSrc], neuronCounts[nameTgt], verbose=verbose)
elif RedundancyType == 'outDegreeCons':
#### fractional outDegree is expressed as a ratio of min/max axo-dendritic contacts
inDegree = get_frac(redundancy, nameSrc, nameTgt, neuronCounts[nameSrc], neuronCounts[nameTgt], useMin=True, verbose=verbose)
else:
raise KeyError('`RedundancyType` should be one of `inDegreeAbs`, `outDegreeAbs`, or `outDegreeCons`.')
# check if in degree acceptable (not larger than number of neurons in the source nucleus)
if projType == 'focused' and inDegree > nbSim[nameSrc]:
printv("/!\ WARNING: required 'in degree' ("+str(inDegree)+") larger than number of neurons in individual source channels ("+str(nbSim[nameSrc])+"), thus reduced to the latter value")
inDegree = nbSim[nameSrc]
if projType == 'diffuse' and inDegree > nbSim[nameSrc]*len(source_channels):
printv("/!\ WARNING: required 'in degree' ("+str(inDegree)+") larger than number of neurons in the overall source population ("+str(nbSim[nameSrc]*len(source_channels))+"), thus reduced to the latter value")
inDegree = nbSim[nameSrc]*len(source_channels)
if inDegree == 0.:
printv("/!\ WARNING: non-existent connection strength, will skip")
return
global AMPASynapseCounter
inDegree = inDegree * (float(len(source_channels)) / float(len(Pop[nameSrc])))
# prepare receptor type lists:
if type == 'ex':
lRecType = ['AMPA','NMDA']
AMPASynapseCounter = AMPASynapseCounter + 1
lbl = AMPASynapseCounter # needs to add NMDA later
elif type == 'AMPA':
lRecType = ['AMPA']
lbl = 0
elif type == 'NMDA':
lRecType = ['NMDA']
lbl = 0
elif type == 'in':
lRecType = ['GABA']
lbl = 0
else:
raise KeyError('Undefined connexion type: '+type)
# compute the global weight of the connection, for each receptor type:
W = computeW(lRecType, nameSrc, nameTgt, inDegree, gain, verbose=False)
printv(" W="+str(W)+" and inDegree="+str(inDegree))
## check whether a connection map has already been drawn or not:
#if nameSrc+'->'+nameTgt in ConnectMap:
# #print "Using existing connection map"
# loadConnectMap = True
#else:
# #print "Will create a connection map"
# loadConnectMap = False
# ConnectMap[nameSrc+'->'+nameTgt] = [[] for i in range(len(Pop[nameTgt]))]
# determine which transmission delay to use:
if LCGDelays:
delay = tau[nameSrc+'->'+nameTgt]
else:
delay = 1.
if projType == 'focused': # if projections focused, input come only from the same channel as tgtChannel
for src_channel in source_channels: # for each relevant channel of the Source nucleus
mass_connect(Pop[nameSrc][src_channel], Pop[nameTgt][src_channel-source_channels[0]], lbl, inDegree, recType[lRecType[0]], W[lRecType[0]], delay, stochastic_delays = stochastic_delays)
elif projType == 'diffuse': # if projections diffused, input connections are shared among each possible input channel equally
for src_channel in source_channels: # for each relevant channel of the Source nucleus
for tgt_channel in range(len(Pop[nameTgt])): # for each channel of the Target nucleus
mass_connect(Pop[nameSrc][src_channel], Pop[nameTgt][tgt_channel], lbl, inDegree/len(Pop[nameTgt]), recType[lRecType[0]], W[lRecType[0]], delay, stochastic_delays = stochastic_delays)
if type == 'ex':
# mirror the AMPA connection with similarly connected NMDA connections
for src_channel in source_channels: # for each relevant channel of the Source nucleus
mass_mirror(Pop[nameSrc][src_channel], lbl, recType['NMDA'], W['NMDA'], delay, stochastic_delays = stochastic_delays)
return W
#-------------------------------------------------------------------------------
# Establishes a topological connection between two populations
# type : a string 'ex' or 'in', defining whether it is excitatory or inhibitory
# nameTgt, nameSrc : strings naming the populations, as defined in NUCLEI list
# projType : type of projections. For the moment: 'focused' (only channel-to-channel connection) and
# 'diffuse' (all-to-one with uniform distribution)
# redundancy, RedundancyType : contrains the inDegree - see function `connect` for details
# LCGDelays : shall we use the delays obtained by (Liénard, Cos, Girard, in prep) or not (default = True)
# gain : allows to amplify the weight normally deduced from LG14
# source_channels : By default with `source_channels=None`, the connection is implemented using all source channels
# Specify a custom list of channels to implement connections only from these channels
# For example, calling successively `connectMC(...,projType='focused',source_channels=[0])` and then `connectMC(...,projType='diffuse',source_channels=[1])` would implement first a focused projection using only source channel 0 and then a diffuse connection using only source channel 1:
# Src channels: (0) (1)
# | / |
# Tgt channels: (0) (1)
#-------------------------------------------------------------------------------
def connectTopoMC(type, nameSrc, nameTgt, projType, redundancy, RedundancyType, LCGDelays=True, gain=1., source_channels = None, stochastic_delays=None, spreads=None, verbose=False):
def printv(text):
if verbose:
print(text)
printv("* connecting "+nameSrc+" -> "+nameTgt+" with "+projType+" "+type+" connection")
if source_channels == None:
# if not specified, assume that the connection originates from all channels
source_channels = range(len(Pop[nameSrc]))
if RedundancyType == 'inDegreeAbs':
# inDegree is already provided in the right form
inDegree = float(redundancy)
elif RedundancyType == 'outDegreeAbs':
#### fractional outDegree is expressed as a fraction of max axo-dendritic contacts
inDegree = get_frac(1./redundancy, nameSrc, nameTgt, neuronCounts[nameSrc], neuronCounts[nameTgt], verbose=verbose)
elif RedundancyType == 'outDegreeCons':
#### fractional outDegree is expressed as a ratio of min/max axo-dendritic contacts
inDegree = get_frac(redundancy, nameSrc, nameTgt, neuronCounts[nameSrc], neuronCounts[nameTgt], useMin=True, verbose=verbose)
else:
raise KeyError('`RedundancyType` should be one of `inDegreeAbs`, `outDegreeAbs`, or `outDegreeCons`.')
# check if in degree acceptable (not larger than number of neurons in the source nucleus)
if projType == 'focused' and inDegree > nbSim[nameSrc]:
printv("/!\ WARNING: required 'in degree' ("+str(inDegree)+") larger than number of neurons in individual source channels ("+str(nbSim[nameSrc])+"), thus reduced to the latter value")
inDegree = nbSim[nameSrc]
if projType == 'diffuse' and inDegree > nbSim[nameSrc]*len(source_channels):
printv("/!\ WARNING: required 'in degree' ("+str(inDegree)+") larger than number of neurons in the overall source population ("+str(nbSim[nameSrc]*len(source_channels))+"), thus reduced to the latter value")
inDegree = nbSim[nameSrc]*len(source_channels)
if inDegree == 0.:
printv("/!\ WARNING: non-existent connection strength, will skip")
return
global AMPASynapseCounter
inDegree = inDegree * (float(len(source_channels)) / float(len(Pop[nameSrc])))
# prepare receptor type lists:
if type == 'ex':
lRecType = ['AMPA','NMDA']
AMPASynapseCounter = AMPASynapseCounter + 1
lbl = AMPASynapseCounter # needs to add NMDA later
elif type == 'AMPA':
lRecType = ['AMPA']
lbl = 0
elif type == 'NMDA':
lRecType = ['NMDA']
lbl = 0
elif type == 'in':
lRecType = ['GABA']
lbl = 0
else:
raise KeyError('Undefined connexion type: '+type)
# compute the global weight of the connection, for each receptor type:
W = computeW(lRecType, nameSrc, nameTgt, inDegree, gain, verbose=False)
printv(" W="+str(W)+" and inDegree="+str(inDegree))
## check whether a connection map has already been drawn or not:
#if nameSrc+'->'+nameTgt in ConnectMap:
# #print "Using existing connection map"
# loadConnectMap = True
#else:
# #print "Will create a connection map"
# loadConnectMap = False
# ConnectMap[nameSrc+'->'+nameTgt] = [[] for i in range(len(Pop[nameTgt]))]
# determine which transmission delay to use:
if LCGDelays:
delay = tau[nameSrc+'->'+nameTgt]
else:
delay = 1.
if projType == 'focused': # if projections focused, input come only from the same channel as tgtChannel
mass_connect_topo(nameSrc, nameTgt, lbl, inDegree, recType[lRecType[0]], W[lRecType[0]], delay, spread=spreads[0], stochastic_delays = stochastic_delays) # 0.5 spread for now
elif projType == 'diffuse': # if projections diffused, input connections are shared among each possible input channel equally
mass_connect_topo(nameSrc, nameTgt, lbl, inDegree, recType[lRecType[0]], W[lRecType[0]], delay, spread=spreads[1], stochastic_delays = stochastic_delays) # for now, arbitrary high spread
if type == 'ex':
# mirror the AMPA connection with similarly connected NMDA connections
for src_channel in source_channels: # for each relevant channel of the Source nucleus
mass_mirror(Pop[nameSrc][src_channel], lbl, recType['NMDA'], W['NMDA'], delay, stochastic_delays = stochastic_delays)
return W
#-------------------------------------------------------------------------------
# returns the minimal & maximal numbers of distinct input neurons for one connection
#-------------------------------------------------------------------------------
def get_input_range(nameSrc, nameTgt, cntSrc, cntTgt, verbose=False):
if nameSrc=='CSN' or nameSrc=='PTN':
nu = alpha[nameSrc+'->'+nameTgt]
nu0 = 0
if verbose:
print('\tMaximal number of distinct input neurons (nu): '+str(nu))
print('\tMinimal number of distinct input neurons : unknown (set to 0)')
else:
nu = cntSrc / float(cntTgt) * P[nameSrc+'->'+nameTgt] * alpha[nameSrc+'->'+nameTgt]
nu0 = cntSrc / float(cntTgt) * P[nameSrc+'->'+nameTgt]
if verbose:
print('\tMaximal number of distinct input neurons (nu): '+str(nu))
print('\tMinimal number of distinct input neurons : '+str(nu0))
return [nu0, nu]
#-------------------------------------------------------------------------------
# computes the inDegree as a fraction of maximal possible inDegree
# FractionalOutDegree: outDegree, expressed as a fraction
#-------------------------------------------------------------------------------
def get_frac(FractionalOutDegree, nameSrc, nameTgt, cntSrc, cntTgt, useMin=False, verbose=False):
if useMin == False:
# 'FractionalOutDegree' is taken to be relative to the maximal number of axo-dendritic contacts
inDegree = get_input_range(nameSrc, nameTgt, cntSrc, cntTgt, verbose=verbose)[1] * FractionalOutDegree
else:
# 'FractionalOutDegree' is taken to be relative to the maximal number of axo-dendritic contacts and their minimal number
r = get_input_range(nameSrc, nameTgt, cntSrc, cntTgt, verbose=verbose)
inDegree = (r[1] - r[0]) * FractionalOutDegree + r[0]
if verbose:
print('\tConverting the fractional outDegree of '+nameSrc+' -> '+nameTgt+' from '+str(FractionalOutDegree)+' to inDegree neuron count: '+str(round(inDegree, 2))+' (relative to minimal value possible? '+str(useMin)+')')
return inDegree
#-------------------------------------------------------------------------------
# computes the weight of a connection, based on LG14 parameters
#-------------------------------------------------------------------------------
def computeW(listRecType, nameSrc, nameTgt, inDegree, gain=1.,verbose=False):
nu = get_input_range(nameSrc, nameTgt, neuronCounts[nameSrc], neuronCounts[nameTgt], verbose=verbose)[1]
if verbose:
print('\tCompare with the effective chosen inDegree : '+str(inDegree))
# attenuation due to the distance from the receptors to the soma of tgt:
attenuation = cosh(LX[nameTgt]*(1-p[nameSrc+'->'+nameTgt])) / cosh(LX[nameTgt])
w={}
for r in listRecType:
w[r] = nu / float(inDegree) * attenuation * wPSP[recType[r]-1] * gain
return w
#-------------------------------------------------------------------------------
#rnd.seed(17)
#nest.SetKernelStatus({'local_num_threads':2, "data_path": "log/", "overwrite_files":True})
#nest.SetKernelStatus({'local_num_threads':2, "data_path": "log/"})
dt = 0.01 # ms
simDuration = 10000. # in ms
# Acceptable firing rate ranges (FRR) in normal and deactivation experiments
# extracted from LG14 Table 5
FRRNormal = {'MSN': [0,1],
'FSI': [7.8,14.0], # the refined constraint of 10.9 +/- 3.1 Hz was extracted from the following papers: Adler et al., 2016; Yamada et al., 2016 (summarizing date from three different experiments); and Marche and Apicella, 2017 # old values: [0,20]
'STN': [15.2,22.8],
'GPe': [55.7,74.5],
'Arky': [55.7,74.5],
'Prot': [55.7,74.5],
'GPi': [59.1,79.5],
}
FRRGPi = {'AMPA+NMDA+GABAA':[53.4,96.8],
'NMDA':[27.2451,78.6255],
'NMDA+AMPA':[6.811275,52.364583],
'AMPA':[5.7327,66.0645],
'GABAA':[44.1477,245.8935],
}
FRRGPe = {'AMPA':[4.2889,58.7805],
'AMPA+GABAA':[10.0017148,137.076126],
'NMDA':[29.5767,61.1645],
'GABAA':[74.8051,221.4885],
}
FRRAnt = {'Arky':FRRGPe,'Prot':FRRGPe,'GPe':FRRGPe,'GPi':FRRGPi}
# imported from Chadoeuf "connexweights"
# All the parameters needed to replicate Lienard model
#
#-------------------------
# fixed parameters
A_GABA=-0.25 # mV
A_AMPA= 1.
A_NMDA= 0.025
D_GABA=5./exp(1) # ms ; /e because Dn is peak half-time in LG14, while it is supposed to be tau_peak in NEST
D_AMPA=5./exp(1)
D_NMDA=100./exp(1)
Ri=200.E-2 # Ohms.m
Rm=20000.E-4 # Ohms.m^2
if params['splitGPe']:
NUCLEI=['MSN','FSI','STN','Arky','Prot','GPi']
else:
NUCLEI=['MSN','FSI','STN','GPe','GPi']
# Number of neurons in the real macaque brain
# one hemisphere only, based on Hardman et al. 2002 paper, except for striatum & CM/Pf
neuronCounts={'MSN': 26448.0E3,
'FSI': 532.0E3,
'STN': 77.0E3,
'GPe': 251.0E3,
'Arky': 251.0E3,
'Prot': 251.0E3,
'GPi': 143.0E3,
'CMPf': 86.0E3,
'CSN': None, 'PTN': None # prevents key error
}
# Number of neurons that will be simulated
nbSim = {'MSN': 0.,
'FSI': 0.,
'STN': 0.,
'GPe': 0.,
'Arky': 0.,
'Prot': 0.,
'GPi': 0.,
'CMPf':0.,
'CSN': 0.,
'PTN': 0.,}
# P(X->Y): probability that a given neuron from X projects to at least neuron of Y
P = {'MSN->GPe': 1.,
'MSN->Arky': 1.,
'MSN->Prot': 1.,
'MSN->GPi': 0.82,
'MSN->MSN': 1.,
'FSI->MSN': 1.,
'FSI->FSI': 1.,
'STN->GPe': 0.83,
'STN->Arky': 0.83,
'STN->Prot': 0.83,
'STN->GPi': 0.72,
'STN->MSN': 0.17,
'STN->FSI': 0.17,
'GPe->STN': 1.,
'GPe->GPe': 0.84,
'GPe->GPi': 0.84,
'GPe->MSN': 0.16,
'GPe->FSI': 0.16,
'Arky->Arky': 0.84,
'Arky->Prot': 0.84,
'Arky->MSN': 0.16,
'Arky->FSI': 0.16,
'Prot->STN': 1.,
'Prot->Arky': 0.84,
'Prot->Prot': 0.84,
'Prot->GPi': 0.84,
'CSN->MSN': 1.,
'CSN->FSI': 1.,
'PTN->MSN': 1.,
'PTN->FSI': 1.,
'PTN->STN': 1.,
'CMPf->STN': 1.,
'CMPf->MSN': 1.,
'CMPf->FSI': 1.,
'CMPf->GPe': 1.,
'CMPf->Arky': 1.,
'CMPf->Prot': 1.,
'CMPf->GPi': 1.,}
# alpha X->Y: average number of synaptic contacts made by one neuron of X to one neuron of Y, when there is a connexion
# for the moment set from one specific parameterization, should be read from Jean's solution file
alpha = {'MSN->GPe': 171,
'MSN->Arky': 171,
'MSN->Prot': 171,
'MSN->GPi': 210,
'MSN->MSN': 210,
'FSI->MSN': 4362,
'FSI->FSI': 116,
'STN->GPe': 428,
'STN->Arky': 428,
'STN->Prot': 428,
'STN->GPi': 233,
'STN->MSN': 0,
'STN->FSI': 91,
'GPe->STN': 19,
'GPe->GPe': 38,
'GPe->GPi': 16,
'GPe->MSN': 0,
'GPe->FSI': 353,
'Arky->Arky': 38,
'Arky->Prot': 38,
'Arky->MSN': 0,
'Arky->FSI': 353,
'Prot->STN': 19,
'Prot->Arky': 38,
'Prot->Prot': 38,
'Prot->GPi': 16,
'CSN->MSN': 342, # here, represents directly \nu
'CSN->FSI': 250, # here, represents directly \nu
'PTN->MSN': 5, # here, represents directly \nu
'PTN->FSI': 5, # here, represents directly \nu
'PTN->STN': 259, # here, represents directly \nu
'CMPf->MSN': 4965,
'CMPf->FSI': 1053,