Publications of Nicolas Brunel    :recent first  alphabetical  combined  by tags listing:

%% Papers Published   
@article{fds328550,
   Author = {Brunel, N and Nadal, J-P and Toulouse, G},
   Title = {Information capacity of a perceptron},
   Journal = {Journal of Physics A: Mathematical and General},
   Volume = {25},
   Number = {19},
   Pages = {5017-5038},
   Publisher = {IOP Publishing},
   Year = {1992},
   Month = {December},
   url = {http://dx.doi.org/10.1088/0305-4470/25/19/015},
   Abstract = {The authors study the information storage capacity of a
             simple perceptron in the error regime. For random unbiased
             patterns the geometrical analysis gives a logarithmic
             dependence for the information content in the asymptotic
             limit. In this case, the statistical physics approach, when
             used at the simplest level of replica theory, does not give
             satisfactory results. However for perceptrons with finite
             stability, the information content can be simply calculated
             with statistical physics methods in a region above the
             critical storage level, for biased as well as for unbiased
             patterns.},
   Doi = {10.1088/0305-4470/25/19/015},
   Key = {fds328550}
}

@article{fds328549,
   Author = {Amit, DJ and Brunel, N},
   Title = {Adequate input for learning in attractor neural
             networks},
   Journal = {Network: Computation in Neural Systems},
   Volume = {4},
   Number = {2},
   Pages = {177-194},
   Publisher = {Informa UK Limited},
   Year = {1993},
   Month = {January},
   url = {http://dx.doi.org/10.1088/0954-898X_4_2_003},
   Abstract = {In the context of learning in attractor neural networks
             (ANN) the authors discuss the issue of the constraints
             imposed by there requirements that the afferents arriving at
             the neurons in the attractor network from the stimulus,
             compete successfully with the afferents generated by the
             recurrent activity inside the network, in a situation in
             which both sets of synaptic efficacies are weak and
             approximately equal. We simulate and analyse a two-component
             network: one representing the stimulus, the other an ANN.
             They show that if stimuli art correlated with the receptive
             fields of neurons in the ANN, and are of sufficient
             contrast, the stimulus can provide the necessary information
             to the recurrent network to allow learning new stimulus,
             even in the very disfavoured situation of synaptic
             predominance in the recurrent part. Stimuli which are
             insufficiently correlated with the receptive fields, or are
             of insufficient contrast, are submerged by the recurrent
             activity. © 1993 Informa UK Ltd All rights reserved:
             reproduction in whole or part not permitted.},
   Doi = {10.1088/0954-898X_4_2_003},
   Key = {fds328549}
}

@article{fds328548,
   Author = {Brunel, N},
   Title = {Effect of synapse dilution on the memory retrieval in
             structured attractor neural networks},
   Journal = {Journal de Physique I},
   Volume = {3},
   Number = {8},
   Pages = {1693-1715},
   Publisher = {EDP Sciences},
   Year = {1993},
   Month = {August},
   url = {http://dx.doi.org/10.1051/jp1:1993210},
   Doi = {10.1051/jp1:1993210},
   Key = {fds328548}
}

@article{fds328546,
   Author = {Brunel, N and Zecchina, R},
   Title = {Response functions improving performance in analog attractor
             neural networks.},
   Journal = {Phys Rev E Stat Phys Plasmas Fluids Relat Interdiscip
             Topics},
   Volume = {49},
   Number = {3},
   Pages = {R1823-R1826},
   Year = {1994},
   Month = {March},
   url = {http://dx.doi.org/10.1103/physreve.49.r1823},
   Doi = {10.1103/physreve.49.r1823},
   Key = {fds328546}
}

@article{fds328543,
   Author = {Brunel, N},
   Title = {Dynamics of an attractor neural network converting temporal
             into spatial correlations},
   Journal = {Network: Computation in Neural Systems},
   Volume = {5},
   Number = {4},
   Pages = {449-470},
   Publisher = {Informa UK Limited},
   Year = {1994},
   Month = {November},
   url = {http://dx.doi.org/10.1088/0954-898x/5/4/003},
   Doi = {10.1088/0954-898x/5/4/003},
   Key = {fds328543}
}

@article{fds328544,
   Author = {Amit, DJ and Brunel, N and Tsodyks, MV},
   Title = {Correlations of cortical Hebbian reverberations: theory
             versus experiment.},
   Journal = {J Neurosci},
   Volume = {14},
   Number = {11 Pt 1},
   Pages = {6435-6445},
   Year = {1994},
   Month = {November},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.14-11-06435.1994},
   Abstract = {Interpreting recent single-unit recordings of delay
             activities in delayed match-to-sample experiments in
             anterior ventral temporal (AVT) cortex of monkeys in terms
             of reverberation dynamics, we present a model neural network
             of quasi-realistic elements that reproduces the empirical
             results in great detail. Information about the contiguity of
             successive stimuli in the training sequence, representing
             the fact that training is done on a set of uncorrelated
             stimuli presented in a fixed temporal sequence, is embedded
             in the synaptic structure. The model reproduces quite
             accurately the correlations between delay activity
             distributions corresponding to stimulation with the
             uncorrelated stimuli used for training. It reproduces also
             the activity distributions of spike rates on sample cells as
             a function of the stimulating pattern. It is, in our view,
             the first time that a computational phenomenon, represented
             on the neurophysiological level, is reproduced in all its
             quantitative aspects. The model is then used to make
             predictions about further features of the physiology of such
             experiments. Those include further properties of the
             correlations, features of selective cells as discriminators
             of stimuli provoking different delay activity distributions,
             and activity distributions among the neurons in a delay
             activity produced by a given pattern. The model has
             predictive implications also for the dependence of the delay
             activities on different training protocols. Finally, we
             discuss the perspectives of the interplay between such
             models and neurophysiology as well as its limitations and
             possible extensions.},
   Doi = {10.1523/JNEUROSCI.14-11-06435.1994},
   Key = {fds328544}
}

@article{fds328545,
   Author = {Brunel, N},
   Title = {Storage capacity of neural networks: Effect of the
             fluctuations of the number of active neurons per
             memory},
   Journal = {Journal of Physics A: Mathematical and General},
   Volume = {27},
   Number = {14},
   Pages = {4783-4789},
   Publisher = {IOP Publishing},
   Year = {1994},
   Month = {December},
   url = {http://dx.doi.org/10.1088/0305-4470/27/14/009},
   Abstract = {The storage capacity in an attractor neural network with
             excitatory couplings is shown to depend not only on the
             fraction of active neurons per pattern (or coding rate), but
             also on the fluctuations around this value, in the
             thermodynamical limit. The capacity is calculated in the
             case of exactly the same number of active neurons in every
             pattern. For every coding level the capacity is increased
             with respect to the case of random patterns. Results are
             supported by numerical simulations done with an exhaustive
             search algorithm, and partly solve in the sparse coding
             limit the paradox of the discrepancy of the capacity of the
             Willshaw model with optimal capacity.},
   Doi = {10.1088/0305-4470/27/14/009},
   Key = {fds328545}
}

@article{fds328540,
   Author = {Brunel, N and Amit, DJ},
   Title = {Learning internal representations in an analog attractor
             neural network},
   Journal = {INTERNATIONAL JOURNAL OF NEURAL SYSTEMS, SUPPLEMENTARY
             ISSUE, 1995},
   Pages = {19-23},
   Publisher = {WORLD SCIENTIFIC PUBL CO PTE LTD},
   Editor = {Amit, DJ and delGiudice, P and Denby, B and Rolls, ET and Treves,
             A},
   Year = {1995},
   Month = {January},
   ISBN = {981-02-2482-6},
   Key = {fds328540}
}

@article{fds328541,
   Author = {Brunel, N},
   Title = {Quantitative modeling of local Hebbian reverberations in
             primate cortex},
   Journal = {INTERNATIONAL JOURNAL OF NEURAL SYSTEMS, SUPPLEMENTARY
             ISSUE, 1995},
   Pages = {13-17},
   Publisher = {WORLD SCIENTIFIC PUBL CO PTE LTD},
   Editor = {Amit, DJ and delGiudice, P and Denby, B and Rolls, ET and Treves,
             A},
   Year = {1995},
   Month = {January},
   ISBN = {981-02-2482-6},
   Key = {fds328541}
}

@article{fds328538,
   Author = {Amit, D and Brunel, N},
   Title = {Learning internal representations in an attractor neural
             network with analogue neurons},
   Journal = {Network: Computation in Neural Systems},
   Volume = {6},
   Number = {3},
   Pages = {359-388},
   Publisher = {Informa UK Limited},
   Year = {1995},
   Month = {August},
   url = {http://dx.doi.org/10.1088/0954-898x/6/3/004},
   Doi = {10.1088/0954-898x/6/3/004},
   Key = {fds328538}
}

@article{fds328537,
   Author = {Brunel, N and Zecchina, R},
   Title = {A SIMPLE GEOMETRICAL BOUND FOR REPLICA SYMMETRY STABILITY IN
             NEURAL NETWORKS MODELS},
   Journal = {Modern Physics Letters B},
   Volume = {09},
   Number = {18},
   Pages = {1159-1164},
   Publisher = {World Scientific Pub Co Pte Lt},
   Year = {1995},
   Month = {August},
   url = {http://dx.doi.org/10.1142/s0217984995001157},
   Doi = {10.1142/s0217984995001157},
   Key = {fds328537}
}

@article{fds328536,
   Author = {Ninio, J and Brunel, N},
   Title = {Time to detect a single difference between two correlated
             images},
   Journal = {PERCEPTION},
   Volume = {25},
   Pages = {89-89},
   Publisher = {PION LTD},
   Year = {1996},
   Month = {January},
   Key = {fds328536}
}

@article{fds328535,
   Author = {Brunel, N},
   Title = {Hebbian learning of context in recurrent neural
             networks.},
   Journal = {Neural Comput},
   Volume = {8},
   Number = {8},
   Pages = {1677-1710},
   Year = {1996},
   Month = {November},
   url = {http://dx.doi.org/10.1162/neco.1996.8.8.1677},
   Abstract = {Single electrode recording in the inferotemporal cortex of
             monkeys during delayed visual memory tasks provide evidence
             for attractor dynamics in the observed region. The
             persistent elevated delay activities could be internal
             representations of features of the learned visual stimuli
             shown to the monkey during training. When uncorrelated
             stimuli are presented during training in a fixed sequence,
             these experiments display significant correlations between
             the internal representations. Recently a simple model of
             attractor neural network has reproduced quantitatively the
             measured correlations. An underlying assumption of the model
             is that the synaptic matrix formed during the training phase
             contains in its efficacies information about the contiguity
             of persistent stimuli in the training sequence. We present
             here a simple unsupervised learning dynamics that produces
             such a synaptic matrix if sequences of stimuli are
             repeatedly presented to the network at fixed order. The
             resulting matrix is then shown to convert temporal
             correlations during training into spatial correlations
             between attractors. The scenario is that, in the presence of
             selective delay activity, at the presentation of each
             stimulus, the activity distribution in the neural assembly
             contain information of both the current stimulus and the
             previous one (carried by the attractor). Thus the recurrent
             synaptic matrix can code not only for each of the stimuli
             presented to the network but also for their context. We
             combine the idea that for learning to be effective, synaptic
             modification should be stochastic, with the fact that
             attractors provide learnable information about two
             consecutive stimuli. We calculate explicitly the probability
             distribution of synaptic efficacies as a function of
             training protocol, that is, the order in which stimuli are
             presented to the network. We then solve for the dynamics of
             a network composed of integrate-and-fire excitatory and
             inhibitory neurons with a matrix of synaptic collaterals
             resulting from the learning dynamics. The network has a
             stable spontaneous activity, and stable delay activity
             develops after a critical learning stage. The availability
             of a learning dynamics makes possible a number of
             experimental predictions for the dependence of the delay
             activity distributions and the correlations between them, on
             the learning stage and the learning protocol. In particular
             it makes specific predictions for pair-associates delay
             experiments.},
   Doi = {10.1162/neco.1996.8.8.1677},
   Key = {fds328535}
}

@article{fds328532,
   Author = {Amit, DJ and Brunel, N},
   Title = {Model of global spontaneous activity and local structured
             activity during delay periods in the cerebral
             cortex.},
   Journal = {Cereb Cortex},
   Volume = {7},
   Number = {3},
   Pages = {237-252},
   Year = {1997},
   url = {http://dx.doi.org/10.1093/cercor/7.3.237},
   Abstract = {We investigate self-sustaining stable states (attractors) in
             networks of integrate-and-fire neurons. First, we study the
             stability of spontaneous activity in an unstructured
             network. It is shown that the stochastic background
             activity, of 1-5 spikes/s, is unstable if all neurons are
             excitatory. On the other hand, spontaneous activity becomes
             self-stabilizing in presence of local inhibition, given
             reasonable values of the parameters of the network. Second,
             in a network sustaining physiological spontaneous rates, we
             study the effect of learning in a local module, expressed in
             synaptic modifications in specific populations of synapses.
             We find that if the average synaptic potentiation (LTP) is
             too low, no stimulus specific activity manifests itself in
             the delay period. Instead, following the presentation and
             removal of any stimulus there is, in the local module, a
             delay activity in which all neurons selective (responding
             visually) to any of the stimuli presented for learning have
             rates which gradually increase with the amplitude of
             synaptic potentiation. When the average LTP increases beyond
             a critical value, specific local attractors (stable states)
             appear abruptly against the background of the global uniform
             spontaneous attractor. In this case the local module has two
             available types of collective delay activity: if the
             stimulus is unfamiliar, the activity is spontaneous; if it
             is similar to a learned stimulus, delay activity is
             selective. These new attractors reflect the synaptic
             structure developed during learning. In each of them a small
             population of neurons have elevated rates, which depend on
             the strength of LTP. The remaining neurons of the module
             have their activity at spontaneous rates. The predictions
             made in this paper could be checked by single unit
             recordings in delayed response experiments.},
   Doi = {10.1093/cercor/7.3.237},
   Key = {fds328532}
}

@article{fds328534,
   Author = {Brunel, N and Nadal, J-P},
   Title = {Optimal tuning curves for neurons spiking as a Poisson
             process.},
   Journal = {ESANN},
   Publisher = {D-Facto public},
   Editor = {Verleysen, M},
   Year = {1997},
   ISBN = {2-9600049-7-3},
   Key = {fds328534}
}

@article{fds328530,
   Author = {Amit, D and Brunel, N},
   Title = {Dynamics of a recurrent network of spiking neurons before
             and following learning},
   Journal = {Network: Computation in Neural Systems},
   Volume = {8},
   Number = {4},
   Pages = {373-404},
   Publisher = {Informa UK Limited},
   Year = {1997},
   Month = {January},
   url = {http://dx.doi.org/10.1088/0954-898X_8_4_003},
   Abstract = {Extensive simulations of large recurrent networks of
             integrate-and-fire excitatory and inhibitory neurons in
             realistic cortical conditions (before and after Hebbian
             unsupervised learning of uncorrelated stimuli) exhibit a
             rich phenomenology of stochastic neural spike dynamics and,
             in particular, coexistence between two types of stable
             states: spontaneous activity upon stimulation by an
             unlearned stimulus, and 'working memory' states strongly
             correlated with learned stimuli. Firing rates have very wide
             distributions, due to the variability in the connectivity
             from neuron to neuron. ISI histograms are exponential,
             except for small intervals. Thus the spike emission
             processes are well approximated by a Poisson process. The
             variability of the spike emission process is effectively
             controlled by the magnitude of the post-spike reset
             potential relative to the mean depolarization of the cell.
             Cross-correlations (CC) exhibit a central peak near zero
             delay, flanked by damped oscillations. The magnitude of the
             central peak in the CCs depends both on the probability that
             a spike emitted by a neuron affects another randomly chosen
             neuron and on firing rates. It increases when average rates
             decrease. Individual CCs depend very weakly on the synaptic
             interactions between the pairs of neurons. The dependence of
             individual CCs on the rates of the pair of neurons is in
             agreement with experimental data. The distribution of firing
             rates among neurons is in very good agreement with a simple
             theory, indicating that correlations between spike emission
             processes in the network are effectively small. © 1997 IOP
             Publishing Ltd.},
   Doi = {10.1088/0954-898X_8_4_003},
   Key = {fds328530}
}

@article{fds328533,
   Author = {Brunel, N},
   Title = {Cross-correlations in sparsely connected recurrent networks
             of spiking neurons},
   Journal = {Lecture Notes in Computer Science (including subseries
             Lecture Notes in Artificial Intelligence and Lecture Notes
             in Bioinformatics)},
   Volume = {1327},
   Pages = {31-36},
   Year = {1997},
   Month = {January},
   ISBN = {9783540636311},
   url = {http://dx.doi.org/10.1007/bfb0020128},
   Abstract = {We study the dynamics of sparsely connected recurrent
             networks composed of excitatory and inhibitory
             integrate-and-fire (IF) neurons firing at low rates, and in
             particular cross-correlations (CC) between spike times of
             pairs of neurons using both numerical simulations and a
             recent theory. CCs exhibit damped oscillations with a
             frequency which depends on synaptic time constants.
             Individual CCs are shown to depend weakly on synaptic
             connectivity. They depend more strongly on the firing rates
             of individual neurons.},
   Doi = {10.1007/bfb0020128},
   Key = {fds328533}
}

@article{fds328531,
   Author = {Brunel, N and Ninio, J},
   Title = {Time to detect the difference between two images presented
             side by side.},
   Journal = {Brain Res Cogn Brain Res},
   Volume = {5},
   Number = {4},
   Pages = {273-282},
   Year = {1997},
   Month = {June},
   url = {http://dx.doi.org/10.1016/s0926-6410(97)00003-7},
   Abstract = {The time to locate a difference between two artificial
             images presented side by side on a CRT screen was studied as
             a function of their complexity. The images were square
             lattices of black or white squares or quadrangles, in some
             cases delineated by a blue grid. Each pair differed at a
             single position, chosen at random. For images of size N x N,
             the median reaction time varied as cN2, from N = 3-15, with
             c being around 50 ms in the absence of grid (i.e., when the
             quadrangles were associated into continuous shapes). For N <
             or = 9, when the lattice was made irregular, performance did
             not deteriorate, up to a rather high level of irregularity.
             Furthermore, the presence of uncorrelated distortions in the
             left and right images did not affect performance for N < or
             = 6. In the presence of a grid, the reaction times were on
             average higher by 20%. The results taken together indicate
             that the detection of differences does not proceed on a
             point-by-point basis and must be mediated by some abstract
             shape analysis, in agreement with current views on
             short-term visual memory (e.g., Phillips, W.A., On the
             distinction between sensory storage and short-term visual
             memory, Percept. Psychophys., 16 (1974) 283-290 [13]). In
             complementary experiments, the subjects had to judge whether
             two images presented side by side were the same or
             different, with N varying from 1 to 5. For N < 3, the same
             and the different responses were similar in all their
             statistical aspects. For N > or = 4, the "same" responses
             took a significantly larger time than the "different"
             responses and were accompanied by a significant increase in
             errors. The qualitative change from N = 3 to N = 4 is
             interpreted as a shift from a "single inspection" analysis
             to an obligatory scanning procedure. On the whole, we
             suggest that visual information in our simultaneous
             comparison task is extracted by chunks of about 12 +/- 3
             bits, and that the visual processing and matching tasks take
             about 50 ms per couple of quadrangles. In Section 4, we
             compare these values to the values obtained through other
             experimental paradigms.},
   Doi = {10.1016/s0926-6410(97)00003-7},
   Key = {fds328531}
}

@article{fds328528,
   Author = {Brunel, N and Nadal, JP},
   Title = {Modeling memory: what do we learn from attractor neural
             networks?},
   Journal = {C R Acad Sci III},
   Volume = {321},
   Number = {2-3},
   Pages = {249-252},
   Year = {1998},
   url = {http://dx.doi.org/10.1016/s0764-4469(97)89830-7},
   Abstract = {In this paper we summarize some of the main contributions of
             models of recurrent neural networks with associative memory
             properties. We compare the behavior of these attractor
             neural networks with empirical data from both physiology and
             psychology. This type of network could be used in models
             with more complex functions.},
   Doi = {10.1016/s0764-4469(97)89830-7},
   Key = {fds328528}
}

@article{fds328529,
   Author = {Brunel, N and Trullier, O},
   Title = {Plasticity of directional place fields in a model of rodent
             CA3.},
   Journal = {Hippocampus},
   Volume = {8},
   Number = {6},
   Pages = {651-665},
   Year = {1998},
   url = {http://dx.doi.org/10.1002/(SICI)1098-1063(1998)8:6<651::AID-HIPO8>3.0.CO;2-L},
   Abstract = {We propose a computational model of the CA3 region of the
             rat hippocampus that is able to reproduce the available
             experimental data concerning the dependence of directional
             selectivity of the place cell discharge on the environment
             and on the spatial task. The main feature of our model is a
             continuous, unsupervised Hebbian learning dynamics of
             recurrent connections, which is driven by the neuronal
             activities imposed upon the network by the
             environment-dependent external input. In our simulations,
             the environment and the movements of the rat are chosen to
             mimic those commonly observed in neurophysiological
             experiments. The environment is represented as local views
             that depend on both the position and the heading direction
             of the rat. We hypothesize that place cells are
             intrinsically directional, that is, they respond to local
             views. We show that the synaptic dynamics in the recurrent
             neural network rapidly modify the discharge correlates of
             the place cells: Cells tend to become omnidirectional place
             cells in open fields, while their directionality tends to
             get stronger in radial-arm mazes. We also find that the
             synaptic learning mechanisms account for other properties of
             place cell activity, such as an increase in the place cell
             peak firing rates as well as clustering of place fields
             during exploration. Our model makes several experimental
             predictions that can be tested using current
             techniques.},
   Doi = {10.1002/(SICI)1098-1063(1998)8:6<651::AID-HIPO8>3.0.CO;2-L},
   Key = {fds328529}
}

@article{fds328527,
   Author = {Brunel, N and Carusi, F and Fusi, S},
   Title = {Slow stochastic Hebbian learning of classes of stimuli in a
             recurrent neural network.},
   Journal = {Network},
   Volume = {9},
   Number = {1},
   Pages = {123-152},
   Year = {1998},
   Month = {February},
   url = {http://dx.doi.org/10.1088/0954-898x/9/1/007},
   Abstract = {We study unsupervised Hebbian learning in a recurrent
             network in which synapses have a finite number of stable
             states. Stimuli received by the network are drawn at random
             at each presentation from a set of classes. Each class is
             defined as a cluster in stimulus space, centred on the class
             prototype. The presentation protocol is chosen to mimic the
             protocols of visual memory experiments in which a set of
             stimuli is presented repeatedly in a random way. The
             statistics of the input stream may be stationary, or
             changing. Each stimulus induces, in a stochastic way,
             transitions between stable synaptic states. Learning
             dynamics is studied analytically in the slow learning limit,
             in which a given stimulus has to be presented many times
             before it is memorized, i.e. before synaptic modifications
             enable a pattern of activity correlated with the stimulus to
             become an attractor of the recurrent network. We show that
             in this limit the synaptic matrix becomes more correlated
             with the class prototypes than with any of the instances of
             the class. We also show that the number of classes that can
             be learned increases sharply when the coding level
             decreases, and determine the speeds of learning and
             forgetting of classes in the case of changes in the
             statistics of the input stream.},
   Doi = {10.1088/0954-898x/9/1/007},
   Key = {fds328527}
}

@article{fds328526,
   Author = {Nadal, JP and Brunel, N and Parga, N},
   Title = {Nonlinear feedforward networks with stochastic outputs:
             infomax implies redundancy reduction.},
   Journal = {Network},
   Volume = {9},
   Number = {2},
   Pages = {207-217},
   Year = {1998},
   Month = {May},
   url = {http://dx.doi.org/10.1088/0954-898x/9/2/004},
   Abstract = {We prove that maximization of mutual information between the
             output and the input of a feedforward neural network leads
             to full redundancy reduction under the following sufficient
             conditions: (i) the input signal is a (possibly nonlinear)
             invertible mixture of independent components; (ii) there is
             no input noise; (iii) the activity of each output neuron is
             a (possibly) stochastic variable with a probability
             distribution depending on the stimulus through a
             deterministic function of the inputs (where both the
             probability distributions and the functions can be different
             from neuron to neuron); (iv) optimization of the mutual
             information is performed over all these deterministic
             functions. This result extends that obtained by Nadal and
             Parga (1994) who considered the case of deterministic
             outputs.},
   Doi = {10.1088/0954-898x/9/2/004},
   Key = {fds328526}
}

@article{fds328525,
   Author = {Brunel, N and Nadal, JP},
   Title = {Mutual information, Fisher information, and population
             coding.},
   Journal = {Neural Comput},
   Volume = {10},
   Number = {7},
   Pages = {1731-1757},
   Year = {1998},
   Month = {October},
   url = {http://dx.doi.org/10.1162/089976698300017115},
   Abstract = {In the context of parameter estimation and model selection,
             it is only quite recently that a direct link between the
             Fisher information and information-theoretic quantities has
             been exhibited. We give an interpretation of this link
             within the standard framework of information theory. We show
             that in the context of population coding, the mutual
             information between the activity of a large array of neurons
             and a stimulus to which the neurons are tuned is naturally
             related to the Fisher information. In the light of this
             result, we consider the optimization of the tuning curves
             parameters in the case of neurons responding to a stimulus
             represented by an angular variable.},
   Doi = {10.1162/089976698300017115},
   Key = {fds328525}
}

@article{fds328524,
   Author = {Brunel, N and Sergi, S},
   Title = {Firing frequency of leaky intergrate-and-fire neurons with
             synaptic current dynamics.},
   Journal = {J Theor Biol},
   Volume = {195},
   Number = {1},
   Pages = {87-95},
   Year = {1998},
   Month = {November},
   url = {http://dx.doi.org/10.1006/jtbi.1998.0782},
   Abstract = {We consider a model of an integrate-and-fire neuron with
             synaptic current dynamics, in which the synaptic time
             constant tau' is much smaller than the membrane time
             constant tau. We calculate analytically the firing frequency
             of such a neuron for inputs described by a random Gaussian
             process. We find that the first order correction to the
             frequency due to tau' is proportional to the square root of
             the ratio between these time constants radicaltau'/tau. This
             implies that the correction is important even when the
             synaptic time constant is small compared with that of the
             potential. The frequency of a neuron with tau'>0 can be
             reduced to that of the basic IF neuron (corresponding to
             tau'=1) using an "effective" threshold which has a linear
             dependence on radical tau'/tau. Numerical simulations show a
             very good agreement with the analytical result, and permit
             an extrapolation of the "effective" threshold to higher
             orders in radical tau'/tau. The obtained frequency agrees
             with simulation data for a wide range of
             parameters.},
   Doi = {10.1006/jtbi.1998.0782},
   Key = {fds328524}
}

@article{fds328522,
   Author = {Brunel, N and Hakim, V},
   Title = {Fast global oscillations in networks of integrate-and-fire
             neurons with low firing rates.},
   Journal = {Neural Comput},
   Volume = {11},
   Number = {7},
   Pages = {1621-1671},
   Year = {1999},
   Month = {October},
   url = {http://dx.doi.org/10.1162/089976699300016179},
   Abstract = {We study analytically the dynamics of a network of sparsely
             connected inhibitory integrate-and-fire neurons in a regime
             where individual neurons emit spikes irregularly and at a
             low rate. In the limit when the number of neurons -->
             infinity, the network exhibits a sharp transition between a
             stationary and an oscillatory global activity regime where
             neurons are weakly synchronized. The activity becomes
             oscillatory when the inhibitory feedback is strong enough.
             The period of the global oscillation is found to be mainly
             controlled by synaptic times but depends also on the
             characteristics of the external input. In large but finite
             networks, the analysis shows that global oscillations of
             finite coherence time generically exist both above and below
             the critical inhibition threshold. Their characteristics are
             determined as functions of systems parameters in these two
             different regions. The results are found to be in good
             agreement with numerical simulations.},
   Doi = {10.1162/089976699300016179},
   Key = {fds328522}
}

@article{fds328518,
   Author = {Brunel, N},
   Title = {Dynamics of networks of randomly connected excitatory and
             inhibitory spiking neurons.},
   Journal = {J Physiol Paris},
   Volume = {94},
   Number = {5-6},
   Pages = {445-463},
   Year = {2000},
   url = {http://dx.doi.org/10.1016/s0928-4257(00)01084-6},
   Abstract = {Recent advances in the understanding of the dynamics of
             populations of spiking neurones are reviewed. These studies
             shed light on how a population of neurones can follow
             arbitrary variations in input stimuli, how the dynamics of
             the population depends on the type of noise, and how
             recurrent connections influence the dynamics. The importance
             of inhibitory feedback for the generation of irregularity in
             single cell behaviour is emphasized. Examples of computation
             that recurrent networks with excitatory and inhibitory cells
             can perform are then discussed. Maintenance of a network
             state as an attractor of the system is discussed as a model
             for working memory function, in both object and spatial
             modalities. These models can be used to interpret and make
             predictions about electrophysiological data in the awake
             monkey.},
   Doi = {10.1016/s0928-4257(00)01084-6},
   Key = {fds328518}
}

@article{fds328520,
   Author = {Brunel, N},
   Title = {Dynamics of sparsely connected networks of excitatory and
             inhibitory spiking neurons.},
   Journal = {J Comput Neurosci},
   Volume = {8},
   Number = {3},
   Pages = {183-208},
   Year = {2000},
   url = {http://dx.doi.org/10.1023/a:1008925309027},
   Abstract = {The dynamics of networks of sparsely connected excitatory
             and inhibitory integrate-and-fire neurons are studied
             analytically. The analysis reveals a rich repertoire of
             states, including synchronous states in which neurons fire
             regularly; asynchronous states with stationary global
             activity and very irregular individual cell activity; and
             states in which the global activity oscillates but
             individual cells fire irregularly, typically at rates lower
             than the global oscillation frequency. The network can
             switch between these states, provided the external
             frequency, or the balance between excitation and inhibition,
             is varied. Two types of network oscillations are observed.
             In the fast oscillatory state, the network frequency is
             almost fully controlled by the synaptic time scale. In the
             slow oscillatory state, the network frequency depends mostly
             on the membrane time constant. Finite size effects in the
             asynchronous state are also discussed.},
   Doi = {10.1023/a:1008925309027},
   Key = {fds328520}
}

@article{fds328521,
   Author = {Brunel, N and Wang, XJ},
   Title = {Fast network oscillations with intermittent principal cell
             firing in a model of a recurrent excitatory-inhibitory
             circuit},
   Journal = {EUROPEAN JOURNAL OF NEUROSCIENCE},
   Volume = {12},
   Pages = {79-79},
   Publisher = {BLACKWELL SCIENCE LTD},
   Year = {2000},
   Month = {January},
   Key = {fds328521}
}

@article{fds328519,
   Author = {Brunel, N},
   Title = {Phase diagrams of sparsely connected networks of excitatory
             and inhibitory spiking neurons},
   Journal = {Neurocomputing},
   Volume = {32-33},
   Pages = {307-312},
   Publisher = {Elsevier BV},
   Year = {2000},
   Month = {June},
   url = {http://dx.doi.org/10.1016/S0925-2312(00)00179-X},
   Abstract = {The dynamics of networks of sparsely connected excitatory
             and inhibitory integrate-and-fire neurons is studied
             analytically. The 'phase diagrams' of such systems include:
             synchronous states in which neurons fire regularly;
             Asynchronous states with stationary global activity and very
             irregular individual cell activity; synchronous states in
             which the global activity oscillates but individual cells
             fire irregularly, typically at frequencies lower than the
             global oscillation frequency. The network can switch between
             these states, provided the external frequency, or the
             balance between excitation and inhibition, is varied. (C)
             2000 Published by Elsevier Science B.V. All rights
             reserved.},
   Doi = {10.1016/S0925-2312(00)00179-X},
   Key = {fds328519}
}

@article{fds328517,
   Author = {Compte, A and Brunel, N and Goldman-Rakic, PS and Wang,
             XJ},
   Title = {Synaptic mechanisms and network dynamics underlying spatial
             working memory in a cortical network model.},
   Journal = {Cereb Cortex},
   Volume = {10},
   Number = {9},
   Pages = {910-923},
   Year = {2000},
   Month = {September},
   url = {http://dx.doi.org/10.1093/cercor/10.9.910},
   Abstract = {Single-neuron recordings from behaving primates have
             established a link between working memory processes and
             information-specific neuronal persistent activity in the
             prefrontal cortex. Using a network model endowed with a
             columnar architecture and based on the physiological
             properties of cortical neurons and synapses, we have
             examined the synaptic mechanisms of selective persistent
             activity underlying spatial working memory in the prefrontal
             cortex. Our model reproduces the phenomenology of the
             oculomotor delayed-response experiment of Funahashi et al.
             (S. Funahashi, C.J. Bruce and P.S. Goldman-Rakic, Mnemonic
             coding of visual space in the monkey's dorsolateral
             prefrontal cortex. J Neurophysiol 61:331-349, 1989). To
             observe stable spontaneous and persistent activity, we find
             that recurrent synaptic excitation should be primarily
             mediated by NMDA receptors, and that overall recurrent
             synaptic interactions should be dominated by inhibition.
             Isodirectional tuning of adjacent pyramidal cells and
             interneurons can be accounted for by a structured
             pyramid-to-interneuron connectivity. Robust memory storage
             against random drift of the tuned persistent activity and
             against distractors (intervening stimuli during the delay
             period) may be enhanced by neuromodulation of recurrent
             synapses. Experimentally testable predictions concerning the
             neural basis of working memory are discussed.},
   Doi = {10.1093/cercor/10.9.910},
   Key = {fds328517}
}

@article{fds328516,
   Author = {Brunel, N},
   Title = {Persistent activity and the single-cell frequency-current
             curve in a cortical network model.},
   Journal = {Network},
   Volume = {11},
   Number = {4},
   Pages = {261-280},
   Year = {2000},
   Month = {November},
   url = {http://dx.doi.org/10.1088/0954-898x/11/4/302},
   Abstract = {Neurophysiological experiments indicate that working memory
             of an object is maintained by the persistent activity of
             cells in the prefrontal cortex and infero-temporal cortex of
             the monkey. This paper considers a cortical network model in
             which this persistent activity appears due to recurrent
             synaptic interactions. The conditions under which the
             magnitude of spontaneous and persistent activity are close
             to one another (as is found empirically) are investigated
             using a simplified mean-field description in which firing
             rates in these states are given by the intersections of a
             straight line with the f-I curve of a single pyramidal cell.
             The present analysis relates a network phenomenon -
             persistent activity in a 'working memory' state - to
             single-cell data which are accessible to experiment. It
             predicts that, in networks of the cerebral cortex in which
             persistent activity phenomena are observed, average synaptic
             inputs in both spontaneous and persistent activity should
             bring the cells close to firing threshold. Cells should be
             slightly sub-threshold in spontaneous activity, and slightly
             supra-threshold in persistent activity. The results are
             shown to be robust to the inclusion of inhomogeneities that
             produce wide distributions of firing rates, in both
             spontaneous and working memory states.},
   Doi = {10.1088/0954-898x/11/4/302},
   Key = {fds328516}
}

@article{fds328514,
   Author = {Brunel, N and Wang, XJ},
   Title = {Effects of neuromodulation in a cortical network model of
             object working memory dominated by recurrent
             inhibition.},
   Journal = {J Comput Neurosci},
   Volume = {11},
   Number = {1},
   Pages = {63-85},
   Year = {2001},
   url = {http://dx.doi.org/10.1023/a:1011204814320},
   Abstract = {Experimental evidence suggests that the maintenance of an
             item in working memory is achieved through persistent
             activity in selective neural assemblies of the cortex. To
             understand the mechanisms underlying this phenomenon, it is
             essential to investigate how persistent activity is affected
             by external inputs or neuromodulation. We have addressed
             these questions using a recurrent network model of object
             working memory. Recurrence is dominated by inhibition,
             although persistent activity is generated through recurrent
             excitation in small subsets of excitatory neurons. Our main
             findings are as follows. (1) Because of the strong feedback
             inhibition, persistent activity shows an inverted U shape as
             a function of increased external drive to the network. (2) A
             transient external excitation can switch off a network from
             a selective persistent state to its spontaneous state. (3)
             The maintenance of the sample stimulus in working memory is
             not affected by intervening stimuli (distractors) during the
             delay period, provided the stimulation intensity is not
             large. On the other hand, if stimulation intensity is large
             enough, distractors disrupt sample-related persistent
             activity, and the network is able to maintain a memory only
             of the last shown stimulus. (4) A concerted modulation of
             GABA(A) and NMDA conductances leads to a decrease of
             spontaneous activity but an increase of persistent activity;
             the enhanced signal-to-noise ratio is shown to increase the
             resistance of the network to distractors. (5) Two mechanisms
             are identified that produce an inverted U shaped dependence
             of persistent activity on modulation. The present study
             therefore points to several mechanisms that enhance the
             signal-to-noise ratio in working memory states. These
             mechanisms could be implemented in the prefrontal cortex by
             dopaminergic projections from the midbrain.},
   Doi = {10.1023/a:1011204814320},
   Key = {fds328514}
}

@article{fds328515,
   Author = {Brunel, N and Chance, FS and Fourcaud, N and Abbott,
             LF},
   Title = {Effects of synaptic noise and filtering on the frequency
             response of spiking neurons.},
   Journal = {Phys Rev Lett},
   Volume = {86},
   Number = {10},
   Pages = {2186-2189},
   Year = {2001},
   Month = {March},
   url = {http://dx.doi.org/10.1103/PhysRevLett.86.2186},
   Abstract = {Noise can have a significant impact on the response dynamics
             of a nonlinear system. For neurons, the primary source of
             noise comes from background synaptic input activity. If this
             is approximated as white noise, the amplitude of the
             modulation of the firing rate in response to an input
             current oscillating at frequency omega decreases as 1/square
             root[omega] and lags the input by 45 degrees in phase.
             However, if filtering due to realistic synaptic dynamics is
             included, the firing rate is modulated by a finite amount
             even in the limit omega-->infinity and the phase lag is
             eliminated. Thus, through its effect on noise inputs,
             realistic synaptic dynamics can ensure unlagged neuronal
             responses to high-frequency inputs.},
   Doi = {10.1103/PhysRevLett.86.2186},
   Key = {fds328515}
}

@article{fds328513,
   Author = {Fourcaud, N and Brunel, N},
   Title = {Dynamics of the firing probability of noisy
             integrate-and-fire neurons.},
   Journal = {Neural Comput},
   Volume = {14},
   Number = {9},
   Pages = {2057-2110},
   Year = {2002},
   Month = {September},
   url = {http://dx.doi.org/10.1162/089976602320264015},
   Abstract = {Cortical neurons in vivo undergo a continuous bombardment
             due to synaptic activity, which acts as a major source of
             noise. Here, we investigate the effects of the noise
             filtering by synapses with various levels of realism on
             integrate-and-fire neuron dynamics. The noise input is
             modeled by white (for instantaneous synapses) or colored
             (for synapses with a finite relaxation time) noise.
             Analytical results for the modulation of firing probability
             in response to an oscillatory input current are obtained by
             expanding a Fokker-Planck equation for small parameters of
             the problem - when both the amplitude of the modulation is
             small compared to the background firing rate and the
             synaptic time constant is small compared to the membrane
             time constant. We report here the detailed calculations
             showing that if a synaptic decay time constant is included
             in the synaptic current model, the firing-rate modulation of
             the neuron due to an oscillatory input remains finite in the
             high-frequency limit with no phase lag. In addition, we
             characterize the low-frequency behavior and the behavior of
             the high-frequency limit for intermediate decay times. We
             also characterize the effects of introducing a rise time to
             the synaptic currents and the presence of several synaptic
             receptors with different kinetics. In both cases, we
             determine, using numerical simulations, an effective decay
             time constant that describes the neuronal response
             completely.},
   Doi = {10.1162/089976602320264015},
   Key = {fds328513}
}

@article{fds328509,
   Author = {Brunel, N and Frégnac, Y and Meunier, C and Nadal,
             J-P},
   Title = {Neuroscience and computation.},
   Journal = {J Physiol Paris},
   Volume = {97},
   Number = {4-6},
   Pages = {387-390},
   Year = {2003},
   url = {http://dx.doi.org/10.1016/j.jphysparis.2004.02.001},
   Doi = {10.1016/j.jphysparis.2004.02.001},
   Key = {fds328509}
}

@article{fds328511,
   Author = {Brunel, N and Hakim, V and Richardson, MJE},
   Title = {Firing-rate resonance in a generalized integrate-and-fire
             neuron with subthreshold resonance.},
   Journal = {Phys Rev E Stat Nonlin Soft Matter Phys},
   Volume = {67},
   Number = {5 Pt 1},
   Pages = {051916},
   Year = {2003},
   Month = {May},
   url = {http://dx.doi.org/10.1103/PhysRevE.67.051916},
   Abstract = {Neurons that exhibit a peak at finite frequency in their
             membrane potential response to oscillatory inputs are
             widespread in the nervous system. However, the influence of
             this subthreshold resonance on spiking properties has not
             yet been thoroughly analyzed. To this end, generalized
             integrate-and-fire models are introduced that reproduce at
             the linear level the subthreshold behavior of any given
             conductance-based model. A detailed analysis is presented of
             the simplest resonant model of this kind that has two
             variables: the membrane potential and a supplementary
             voltage-gated resonant variable. The firing-rate modulation
             created by a noisy weak oscillatory drive, mimicking an in
             vivo environment, is computed numerically and analytically
             when the dynamics of the resonant variable is slow compared
             to that of the membrane potential. The results show that the
             firing-rate modulation is shaped by the subthreshold
             resonance. For weak noise, the firing-rate modulation has a
             minimum near the preferred subthreshold frequency. For
             higher noise, such as that prevailing in vivo, the
             firing-rate modulation peaks near the preferred subthreshold
             frequency.},
   Doi = {10.1103/PhysRevE.67.051916},
   Key = {fds328511}
}

@article{fds328512,
   Author = {Richardson, MJE and Brunel, N and Hakim, V},
   Title = {From subthreshold to firing-rate resonance.},
   Journal = {J Neurophysiol},
   Volume = {89},
   Number = {5},
   Pages = {2538-2554},
   Year = {2003},
   Month = {May},
   url = {http://dx.doi.org/10.1152/jn.00955.2002},
   Abstract = {Many types of neurons exhibit subthreshold resonance.
             However, little is known about whether this frequency
             preference influences spike emission. Here, the link between
             subthreshold resonance and firing rate is examined in the
             framework of conductance-based models. A classification of
             the subthreshold properties of a general class of neurons is
             first provided. In particular, a class of neurons is
             identified in which the input impedance exhibits a
             suppression at a nonzero low frequency as well as a peak at
             higher frequency. The analysis is then extended to the
             effect of subthreshold resonance on the dynamics of the
             firing rate. The considered input current comprises a
             background noise term, mimicking the massive synaptic
             bombardment in vivo. Of interest is the modulatory effect an
             additional weak oscillating current has on the instantaneous
             firing rate. When the noise is weak and firing regular, the
             frequency most preferentially modulated is the firing rate
             itself. Conversely, when the noise is strong and firing
             irregular, the modulation is strongest at the subthreshold
             resonance frequency. These results are demonstrated for two
             specific conductance-based models and for a generalization
             of the integrate-and-fire model that captures subthreshold
             resonance. They suggest that resonant neurons are able to
             communicate their frequency preference to postsynaptic
             targets when the level of noise is comparable to that
             prevailing in vivo.},
   Doi = {10.1152/jn.00955.2002},
   Key = {fds328512}
}

@article{fds328510,
   Author = {Brunel, N and Wang, X-J},
   Title = {What determines the frequency of fast network oscillations
             with irregular neural discharges? I. Synaptic dynamics and
             excitation-inhibition balance.},
   Journal = {J Neurophysiol},
   Volume = {90},
   Number = {1},
   Pages = {415-430},
   Year = {2003},
   Month = {July},
   url = {http://dx.doi.org/10.1152/jn.01095.2002},
   Abstract = {When the local field potential of a cortical network
             displays coherent fast oscillations ( approximately 40-Hz
             gamma or approximately 200-Hz sharp-wave ripples), the spike
             trains of constituent neurons are typically irregular and
             sparse. The dichotomy between rhythmic local field and
             stochastic spike trains presents a challenge to the theory
             of brain rhythms in the framework of coupled oscillators.
             Previous studies have shown that when noise is large and
             recurrent inhibition is strong, a coherent network rhythm
             can be generated while single neurons fire intermittently at
             low rates compared to the frequency of the oscillation.
             However, these studies used too simplified synaptic kinetics
             to allow quantitative predictions of the population rhythmic
             frequency. Here we show how to derive quantitatively the
             coherent oscillation frequency for a randomly connected
             network of leaky integrate-and-fire neurons with realistic
             synaptic parameters. In a noise-dominated interneuronal
             network, the oscillation frequency depends much more on the
             shortest synaptic time constants (delay and rise time) than
             on the longer synaptic decay time, and approximately 200-Hz
             frequency can be realized with synaptic time constants taken
             from slice data. In a network composed of both interneurons
             and excitatory cells, the rhythmogenesis is a compromise
             between two scenarios: the fast purely interneuronal
             mechanism, and the slower feedback mechanism (relying on the
             excitatory-inhibitory loop). The properties of the rhythm
             are determined essentially by the ratio of time scales of
             excitatory and inhibitory currents and by the balance
             between the mean recurrent excitation and inhibition. Faster
             excitation than inhibition, or a higher excitation/inhibition
             ratio, favors the feedback loop and a much slower
             oscillation (typically in the gamma range).},
   Doi = {10.1152/jn.01095.2002},
   Key = {fds328510}
}

@article{fds328507,
   Author = {Mongillo, G and Amit, DJ and Brunel, N},
   Title = {Retrospective and prospective persistent activity induced by
             Hebbian learning in a recurrent cortical
             network.},
   Journal = {Eur J Neurosci},
   Volume = {18},
   Number = {7},
   Pages = {2011-2024},
   Year = {2003},
   Month = {October},
   url = {http://dx.doi.org/10.1046/j.1460-9568.2003.02908.x},
   Abstract = {Recordings from cells in the associative cortex of monkeys
             performing visual working memory tasks link persistent
             neuronal activity, long-term memory and associative memory.
             In particular, delayed pair-associate tasks have revealed
             neuronal correlates of long-term memory of associations
             between stimuli. Here, a recurrent cortical network model
             with Hebbian plastic synapses is subjected to the
             pair-associate protocol. In a first stage, learning leads to
             the appearance of delay activity, representing individual
             images ('retrospective' activity). As learning proceeds, the
             same learning mechanism uses retrospective delay activity
             together with choice stimulus activity to potentiate
             synapses connecting neural populations representing
             associated images. As a result, the neural population
             corresponding to the pair-associate of the image presented
             is activated prior to its visual stimulation ('prospective'
             activity). The probability of appearance of prospective
             activity is governed by the strength of the inter-population
             connections, which in turn depends on the frequency of
             pairings during training. The time course of the transitions
             from retrospective to prospective activity during the delay
             period is found to depend on the fraction of slow,
             N-methyl-d-aspartate-like receptors at excitatory synapses.
             For fast recurrent excitation, transitions are abrupt; slow
             recurrent excitation renders transitions gradual. Both
             scenarios lead to a gradual rise of delay activity when
             averaged over many trials, because of the stochastic nature
             of the transitions. The model reproduces most of the
             neuro-physiological data obtained during such tasks, makes
             experimentally testable predictions and demonstrates how
             persistent activity (working memory) brings about the
             learning of long-term associations.},
   Doi = {10.1046/j.1460-9568.2003.02908.x},
   Key = {fds328507}
}

@article{fds328508,
   Author = {Brunel, N and Latham, PE},
   Title = {Firing rate of the noisy quadratic integrate-and-fire
             neuron.},
   Journal = {Neural Comput},
   Volume = {15},
   Number = {10},
   Pages = {2281-2306},
   Year = {2003},
   Month = {October},
   url = {http://dx.doi.org/10.1162/089976603322362365},
   Abstract = {We calculate the firing rate of the quadratic
             integrate-and-fire neuron in response to a colored noise
             input current. Such an input current is a good approximation
             to the noise due to the random bombardment of spikes, with
             the correlation time of the noise corresponding to the decay
             time of the synapses. The key parameter that determines the
             firing rate is the ratio of the correlation time of the
             colored noise, tau(s), to the neuronal time constant,
             tau(m). We calculate the firing rate exactly in two limits:
             when the ratio, tau(s)/tau(m), goes to zero (white noise)
             and when it goes to infinity. The correction to the short
             correlation time limit is O(tau(s)/tau(m)), which is qualita
             tively different from that of the leaky integrate-and-fire
             neuron, where the correction is O( radical tau(s)/tau(m)).
             The difference is due to the different boundary conditions
             of the probability density function of the membrane
             potential of the neuron at firing threshold. The correction
             to the long correlation time limit is O(tau(m)/tau(s)). By
             combining the short and long correlation time limits, we
             derive an expression that provides a good approximation to
             the firing rate over the whole range of tau(s)/tau(m) in the
             suprathreshold regime-that is, in a regime in which the
             average current is sufficient to make the cell fire. In the
             subthreshold regime, the expression breaks down somewhat
             when tau(s) becomes large compared to tau(m).},
   Doi = {10.1162/089976603322362365},
   Key = {fds328508}
}

@article{fds328506,
   Author = {Brunel, N},
   Title = {Dynamics and plasticity of stimulus-selective persistent
             activity in cortical network models.},
   Journal = {Cereb Cortex},
   Volume = {13},
   Number = {11},
   Pages = {1151-1161},
   Year = {2003},
   Month = {November},
   url = {http://dx.doi.org/10.1093/cercor/bhg096},
   Abstract = {Persistent neuronal activity is widespread in many areas of
             the cerebral cortex of monkeys performing cognitive tasks
             with a working memory component. Modeling studies have
             helped understanding of the conditions under which
             persistent activity can be sustained in cortical circuits.
             Here, we first review several basic models of persistent
             activity, including bistable models with excitation only and
             multistable models for working memory of a discrete set of
             pictures or objects with structured excitation and global
             inhibition. In many experiments, persistent activity has
             been shown to be subject to changes due to associative
             learning. In cortical network models, Hebbian learning
             shapes the synaptic structure and, in turn, the properties
             of persistent activity when pictures are associated together
             in the course of a task. It is shown how the theoretical
             models can reproduce basic experimental findings of
             neurophysiological recordings from inferior temporal and
             perirhinal cortices obtained using the following
             experimental protocols: (i) the pair-associate task; (ii)
             the pair-associate task with color switch; and (iii) the
             delay match to sample task with a fixed sequence of
             samples.},
   Doi = {10.1093/cercor/bhg096},
   Key = {fds328506}
}

@article{fds328505,
   Author = {Fourcaud-Trocmé, N and Hansel, D and van Vreeswijk, C and Brunel,
             N},
   Title = {How spike generation mechanisms determine the neuronal
             response to fluctuating inputs.},
   Journal = {J Neurosci},
   Volume = {23},
   Number = {37},
   Pages = {11628-11640},
   Year = {2003},
   Month = {December},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.23-37-11628.2003},
   Abstract = {This study examines the ability of neurons to track
             temporally varying inputs, namely by investigating how the
             instantaneous firing rate of a neuron is modulated by a
             noisy input with a small sinusoidal component with frequency
             (f). Using numerical simulations of conductance-based
             neurons and analytical calculations of one-variable
             nonlinear integrate-and-fire neurons, we characterized the
             dependence of this modulation on f. For sufficiently high
             noise, the neuron acts as a low-pass filter. The modulation
             amplitude is approximately constant for frequencies up to a
             cutoff frequency, fc, after which it decays. The cutoff
             frequency increases almost linearly with the firing rate.
             For higher frequencies, the modulation amplitude decays as
             C/falpha, where the power alpha depends on the spike
             initiation mechanism. For conductance-based models, alpha =
             1, and the prefactor C depends solely on the average firing
             rate and a spike "slope factor," which determines the
             sharpness of the spike initiation. These results are
             attributable to the fact that near threshold, the sodium
             activation variable can be approximated by an exponential
             function. Using this feature, we propose a simplified
             one-variable model, the "exponential integrate-and-fire
             neuron," as an approximation of a conductance-based model.
             We show that this model reproduces the dynamics of a simple
             conductance-based model extremely well. Our study shows how
             an intrinsic neuronal property (the characteristics of fast
             sodium channels) determines the speed with which neurons can
             track changes in input.},
   Doi = {10.1523/JNEUROSCI.23-37-11628.2003},
   Key = {fds328505}
}

@article{fds328504,
   Author = {Brunel, N and Hakim, V and Isope, P and Nadal, J-P and Barbour,
             B},
   Title = {Optimal information storage and the distribution of synaptic
             weights: perceptron versus Purkinje cell.},
   Journal = {Neuron},
   Volume = {43},
   Number = {5},
   Pages = {745-757},
   Year = {2004},
   Month = {September},
   url = {http://dx.doi.org/10.1016/j.neuron.2004.08.023},
   Abstract = {It is widely believed that synaptic modifications underlie
             learning and memory. However, few studies have examined what
             can be deduced about the learning process from the
             distribution of synaptic weights. We analyze the perceptron,
             a prototypical feedforward neural network, and obtain the
             optimal synaptic weight distribution for a perceptron with
             excitatory synapses. It contains more than 50% silent
             synapses, and this fraction increases with storage
             reliability: silent synapses are therefore a necessary
             byproduct of optimizing learning and reliability. Exploiting
             the classical analogy between the perceptron and the
             cerebellar Purkinje cell, we fitted the optimal weight
             distribution to that measured for granule cell-Purkinje cell
             synapses. The two distributions agreed well, suggesting that
             the Purkinje cell can learn up to 5 kilobytes of
             information, in the form of 40,000 input-output
             associations.},
   Doi = {10.1016/j.neuron.2004.08.023},
   Key = {fds328504}
}

@article{fds328503,
   Author = {Boucheny, C and Brunel, N and Arleo, A},
   Title = {A continuous attractor network model without recurrent
             excitation: maintenance and integration in the head
             direction cell system.},
   Journal = {J Comput Neurosci},
   Volume = {18},
   Number = {2},
   Pages = {205-227},
   Year = {2005},
   url = {http://dx.doi.org/10.1007/s10827-005-6559-y},
   Abstract = {Motivated by experimental observations of the head direction
             system, we study a three population network model that
             operates as a continuous attractor network. This network is
             able to store in a short-term memory an angular variable
             (the head direction) as a spatial profile of activity across
             neurons in the absence of selective external inputs, and to
             accurately update this variable on the basis of angular
             velocity inputs. The network is composed of one excitatory
             population and two inhibitory populations, with
             inter-connections between populations but no connections
             within the neurons of a same population. In particular,
             there are no excitatory-to-excitatory connections. Angular
             velocity signals are represented as inputs in one inhibitory
             population (clockwise turns) or the other (counterclockwise
             turns). The system is studied using a combination of
             analytical and numerical methods. Analysis of a simplified
             model composed of threshold-linear neurons gives the
             conditions on the connectivity for (i) the emergence of the
             spatially selective profile, (ii) reliable integration of
             angular velocity inputs, and (iii) the range of angular
             velocities that can be accurately integrated by the model.
             Numerical simulations allow us to study the proposed
             scenario in a large network of spiking neurons and compare
             their dynamics with that of head direction cells recorded in
             the rat limbic system. In particular, we show that the
             directional representation encoded by the attractor network
             can be rapidly updated by external cues, consistent with the
             very short update latencies observed experimentally by
             Zugaro et al. (2003) in thalamic head direction
             cells.},
   Doi = {10.1007/s10827-005-6559-y},
   Key = {fds328503}
}

@article{fds328499,
   Author = {Brunel, N},
   Title = {Course 10 Network models of memory},
   Volume = {80},
   Number = {C},
   Pages = {407-476},
   Publisher = {Elsevier},
   Year = {2005},
   Month = {January},
   url = {http://dx.doi.org/10.1016/S0924-8099(05)80016-2},
   Doi = {10.1016/S0924-8099(05)80016-2},
   Key = {fds328499}
}

@article{fds328502,
   Author = {Fourcaud-Trocmé, N and Brunel, N},
   Title = {Dynamics of the instantaneous firing rate in response to
             changes in input statistics.},
   Journal = {J Comput Neurosci},
   Volume = {18},
   Number = {3},
   Pages = {311-321},
   Year = {2005},
   Month = {June},
   url = {http://dx.doi.org/10.1007/s10827-005-0337-8},
   Abstract = {We review and extend recent results on the instantaneous
             firing rate dynamics of simplified models of spiking neurons
             in response to noisy current inputs. It has been shown
             recently that the response of the instantaneous firing rate
             to small amplitude oscillations in the mean inputs depends
             in the large frequency limit f on the spike initiation
             dynamics. A particular simplified model, the exponential
             integrate-and-fire (EIF) model, has a response that decays
             as 1/f in the large frequency limit and describes very well
             the response of conductance-based models with a
             Hodgkin-Huxley type fast sodium current. Here, we show that
             the response of the EIF instantaneous firing rate also
             decays as 1/f in the case of an oscillation in the variance
             of the inputs for both white and colored noise. We then
             compute the initial transient response of the firing rate of
             the EIF model to a step change in its mean inputs and/or in
             the variance of its inputs. We show that in both cases the
             response speed is proportional to the neuron stationary
             firing rate and inversely proportional to a 'spike slope
             factor' Delta(T) that controls the sharpness of spike
             initiation: as 1/Delta(T) for a step change in mean inputs,
             and as 1/Delta(T) (2) for a step change in the variance in
             the inputs.},
   Doi = {10.1007/s10827-005-0337-8},
   Key = {fds328502}
}

@article{fds328501,
   Author = {Roxin, A and Brunel, N and Hansel, D},
   Title = {Role of delays in shaping spatiotemporal dynamics of
             neuronal activity in large networks.},
   Journal = {Phys Rev Lett},
   Volume = {94},
   Number = {23},
   Pages = {238103},
   Year = {2005},
   Month = {June},
   url = {http://dx.doi.org/10.1103/PhysRevLett.94.238103},
   Abstract = {We study the effect of delays on the dynamics of large
             networks of neurons. We show that delays give rise to a
             wealth of bifurcations and to a rich phase diagram, which
             includes oscillatory bumps, traveling waves, lurching waves,
             standing waves arising via a period-doubling bifurcation,
             aperiodic regimes, and regimes of multistability. We study
             the existence and the stability of the various dynamical
             patterns analytically and numerically in a simplified rate
             model as a function of the interaction parameters. The
             results derived in that framework allow us to understand the
             origin of the diversity of dynamical states observed in
             large networks of spiking neurons.},
   Doi = {10.1103/PhysRevLett.94.238103},
   Key = {fds328501}
}

@article{fds328500,
   Author = {Geisler, C and Brunel, N and Wang, X-J},
   Title = {Contributions of intrinsic membrane dynamics to fast network
             oscillations with irregular neuronal discharges.},
   Journal = {J Neurophysiol},
   Volume = {94},
   Number = {6},
   Pages = {4344-4361},
   Year = {2005},
   Month = {December},
   url = {http://dx.doi.org/10.1152/jn.00510.2004},
   Abstract = {During fast oscillations in the local field potential
             (40-100 Hz gamma, 100-200 Hz sharp-wave ripples) single
             cortical neurons typically fire irregularly at rates that
             are much lower than the oscillation frequency. Recent
             computational studies have provided a mathematical
             description of such fast oscillations, using the leaky
             integrate-and-fire (LIF) neuron model. Here, we extend this
             theoretical framework to populations of more realistic
             Hodgkin-Huxley-type conductance-based neurons. In a noisy
             network of GABAergic neurons that are connected randomly and
             sparsely by chemical synapses, coherent oscillations emerge
             with a frequency that depends sensitively on the single
             cell's membrane dynamics. The population frequency can be
             predicted analytically from the synaptic time constants and
             the preferred phase of discharge during the oscillatory
             cycle of a single cell subjected to noisy sinusoidal input.
             The latter depends significantly on the single cell's
             membrane properties and can be understood in the context of
             the simplified exponential integrate-and-fire (EIF) neuron.
             We find that 200-Hz oscillations can be generated, provided
             the effective input conductance of single cells is large, so
             that the single neuron's phase shift is sufficiently small.
             In a two-population network of excitatory pyramidal cells
             and inhibitory neurons, recurrent excitation can either
             decrease or increase the population rhythmic frequency,
             depending on whether in a neuron the excitatory synaptic
             current follows or precedes the inhibitory synaptic current
             in an oscillatory cycle. Detailed single-cell properties
             have a substantial impact on population oscillations, even
             though rhythmicity does not originate from pacemaker neurons
             and is an emergent network phenomenon.},
   Doi = {10.1152/jn.00510.2004},
   Key = {fds328500}
}

@article{fds328497,
   Author = {Brunel, N and Hansel, D},
   Title = {How noise affects the synchronization properties of
             recurrent networks of inhibitory neurons.},
   Journal = {Neural Comput},
   Volume = {18},
   Number = {5},
   Pages = {1066-1110},
   Year = {2006},
   Month = {May},
   url = {http://dx.doi.org/10.1162/089976606776241048},
   Abstract = {GABAergic interneurons play a major role in the emergence of
             various types of synchronous oscillatory patterns of
             activity in the central nervous system. Motivated by these
             experimental facts, modeling studies have investigated
             mechanisms for the emergence of coherent activity in
             networks of inhibitory neurons. However, most of these
             studies have focused either when the noise in the network is
             absent or weak or in the opposite situation when it is
             strong. Hence, a full picture of how noise affects the
             dynamics of such systems is still lacking. The aim of this
             letter is to provide a more comprehensive understanding of
             the mechanisms by which the asynchronous states in large,
             fully connected networks of inhibitory neurons are
             destabilized as a function of the noise level. Three types
             of single neuron models are considered: the leaky
             integrate-and-fire (LIF) model, the exponential
             integrate-and-fire (EIF), model and conductance-based models
             involving sodium and potassium Hodgkin-Huxley (HH) currents.
             We show that in all models, the instabilities of the
             asynchronous state can be classified in two classes. The
             first one consists of clustering instabilities, which exist
             in a restricted range of noise. These instabilities lead to
             synchronous patterns in which the population of neurons is
             broken into clusters of synchronously firing neurons. The
             irregularity of the firing patterns of the neurons is weak.
             The second class of instabilities, termed oscillatory firing
             rate instabilities, exists at any value of noise. They lead
             to cluster state at low noise. As the noise is increased,
             the instability occurs at larger coupling, and the pattern
             of firing that emerges becomes more irregular. In the regime
             of high noise and strong coupling, these instabilities lead
             to stochastic oscillations in which neurons fire in an
             approximately Poisson way with a common instantaneous
             probability of firing that oscillates in
             time.},
   Doi = {10.1162/089976606776241048},
   Key = {fds328497}
}

@article{fds328498,
   Author = {Roxin, A and Brunel, N and Hansel, D},
   Title = {Rate models with delays and the dynamics of large networks
             of spiking neurons},
   Journal = {Progress of Theoretical Physics Supplement},
   Volume = {161},
   Pages = {68-85},
   Publisher = {Oxford University Press (OUP)},
   Year = {2006},
   Month = {June},
   url = {http://dx.doi.org/10.1143/PTPS.161.68},
   Abstract = {We investigate the dynamics of a one-dimensional network of
             spiking neurons with spatially modulated excitatory and
             inhibitory interactions through extensive numerical
             simulations. We find that the network displays a rich
             repertoire of dynamical states as a function of the
             interaction parameters, including homogeneous oscillations,
             oscillatory bumps, traveling waves, lurching waves, standing
             waves, quasi-periodic and chaotic states as well as regimes
             of multistability. Combining analytical calculations and
             simulations we show that similar dynamics are found in a
             reduced rate model provided that the interactions are
             delayed.},
   Doi = {10.1143/PTPS.161.68},
   Key = {fds328498}
}

@article{fds328495,
   Author = {Baldassi, C and Braunstein, A and Brunel, N and Zecchina,
             R},
   Title = {Efficient supervised learning in networks with binary
             synapses.},
   Journal = {Proc. Natl. Acad. Sci. USA},
   Volume = {104},
   Number = {26},
   Pages = {11079-11084},
   Year = {2007},
   url = {http://dx.doi.org/10.1073/pnas.0700324104},
   Abstract = {Recent experimental studies indicate that synaptic changes
             induced by neuronal activity are discrete jumps between a
             small number of stable states. Learning in systems with
             discrete synapses is known to be a computationally hard
             problem. Here, we study a neurobiologically plausible
             on-line learning algorithm that derives from belief
             propagation algorithms. We show that it performs remarkably
             well in a model neuron with binary synapses, and a finite
             number of "hidden" states per synapse, that has to learn a
             random classification task. Such a system is able to learn a
             number of associations close to the theoretical limit in
             time that is sublinear in system size. This is to our
             knowledge the first on-line algorithm that is able to
             achieve efficiently a finite number of patterns learned per
             binary synapse. Furthermore, we show that performance is
             optimal for a finite number of hidden states that becomes
             very small for sparse coding. The algorithm is similar to
             the standard "perceptron" learning algorithm, with an
             additional rule for synaptic transitions that occur only if
             a currently presented pattern is "barely correct." In this
             case, the synaptic changes are metaplastic only (change in
             hidden states and not in actual synaptic state), stabilizing
             the synapse in its current state. Finally, we show that a
             system with two visible states and K hidden states is much
             more robust to noise than a system with K visible states. We
             suggest that this rule is sufficiently simple to be easily
             implemented by neurobiological systems or in
             hardware.},
   Doi = {10.1073/pnas.0700324104},
   Key = {fds328495}
}

@article{fds328496,
   Author = {Barbieri, F and Brunel, N},
   Title = {Irregular persistent activity induced by synaptic excitatory
             feedback.},
   Journal = {Front Comput Neurosci},
   Volume = {1},
   Pages = {5},
   Year = {2007},
   url = {http://dx.doi.org/10.3389/neuro.10.005.2007},
   Abstract = {Neurophysiological experiments on monkeys have reported
             highly irregular persistent activity during the performance
             of an oculomotor delayed-response task. These experiments
             show that during the delay period the coefficient of
             variation (CV) of interspike intervals (ISI) of prefrontal
             neurons is above 1, on average, and larger than during the
             fixation period. In the present paper, we show that this
             feature can be reproduced in a network in which persistent
             activity is induced by excitatory feedback, provided that
             (i) the post-spike reset is close enough to threshold , (ii)
             synaptic efficacies are a non-linear function of the
             pre-synaptic firing rate. Non-linearity between pre-synaptic
             rate and effective synaptic strength is implemented by a
             standard short-term depression mechanism (STD). First, we
             consider the simplest possible network with excitatory
             feedback: a fully connected homogeneous network of
             excitatory leaky integrate-and-fire neurons, using both
             numerical simulations and analytical techniques. The results
             are then confirmed in a network with selective excitatory
             neurons and inhibition. In both the cases there is a large
             range of values of the synaptic efficacies for which the
             statistics of firing of single cells is similar to
             experimental data.},
   Doi = {10.3389/neuro.10.005.2007},
   Key = {fds328496}
}

@article{fds328494,
   Author = {Graupner, M and Brunel, N},
   Title = {STDP in a bistable synapse model based on CaMKII and
             associated signaling pathways.},
   Journal = {PLoS Comput Biol},
   Volume = {3},
   Number = {11},
   Pages = {e221},
   Year = {2007},
   Month = {November},
   url = {http://dx.doi.org/10.1371/journal.pcbi.0030221},
   Abstract = {The calcium/calmodulin-dependent protein kinase II (CaMKII)
             plays a key role in the induction of long-term postsynaptic
             modifications following calcium entry. Experiments suggest
             that these long-term synaptic changes are all-or-none
             switch-like events between discrete states. The biochemical
             network involving CaMKII and its regulating protein
             signaling cascade has been hypothesized to durably maintain
             the evoked synaptic state in the form of a bistable switch.
             However, it is still unclear whether experimental LTP/LTD
             protocols lead to corresponding transitions between the two
             states in realistic models of such a network. We present a
             detailed biochemical model of the CaMKII autophosphorylation
             and the protein signaling cascade governing the CaMKII
             dephosphorylation. As previously shown, two stable states of
             the CaMKII phosphorylation level exist at resting
             intracellular calcium concentration, and high calcium
             transients can switch the system from the weakly
             phosphorylated (DOWN) to the highly phosphorylated (UP)
             state of the CaMKII (similar to a LTP event). We show here
             that increased CaMKII dephosphorylation activity at
             intermediate Ca(2+) concentrations can lead to switching
             from the UP to the DOWN state (similar to a LTD event). This
             can be achieved if protein phosphatase activity promoting
             CaMKII dephosphorylation activates at lower Ca(2+) levels
             than kinase activity. Finally, it is shown that the CaMKII
             system can qualitatively reproduce results of plasticity
             outcomes in response to spike-timing dependent plasticity
             (STDP) and presynaptic stimulation protocols. This shows
             that the CaMKII protein network can account for both
             induction, through LTP/LTD-like transitions, and storage,
             due to its bistability, of synaptic changes.},
   Doi = {10.1371/journal.pcbi.0030221},
   Key = {fds328494}
}

@article{fds328492,
   Author = {Barbour, B and Brunel, N and Hakim, V and Nadal, J-P},
   Title = {What can we learn from synaptic weight distributions?},
   Journal = {Trends Neurosci},
   Volume = {30},
   Number = {12},
   Pages = {622-629},
   Year = {2007},
   Month = {December},
   url = {http://dx.doi.org/10.1016/j.tins.2007.09.005},
   Abstract = {Much research effort into synaptic plasticity has been
             motivated by the idea that modifications of synaptic weights
             (or strengths or efficacies) underlie learning and memory.
             Here, we examine the possibility of exploiting the
             statistics of experimentally measured synaptic weights to
             deduce information about the learning process. Analysing
             distributions of synaptic weights requires a theoretical
             framework to interpret the experimental measurements, but
             the results can be unexpectedly powerful, yielding strong
             constraints on possible learning theories as well as
             information that is difficult to obtain by other means, such
             as the information storage capacity of a cell. We review the
             available experimental and theoretical techniques as well as
             important open issues.},
   Doi = {10.1016/j.tins.2007.09.005},
   Key = {fds328492}
}

@article{fds328493,
   Author = {Brunel, N and van Rossum, MCW},
   Title = {Lapicque's 1907 paper: from frogs to integrate-and-fire.},
   Journal = {Biol Cybern},
   Volume = {97},
   Number = {5-6},
   Pages = {337-339},
   Year = {2007},
   Month = {December},
   url = {http://dx.doi.org/10.1007/s00422-007-0190-0},
   Abstract = {Exactly 100 years ago, Louis Lapicque published a paper on
             the excitability of nerves that is often cited in the
             context of integrate-and-fire neurons. We discuss Lapicque's
             contributions along with a translation of the original
             publication.},
   Doi = {10.1007/s00422-007-0190-0},
   Key = {fds328493}
}

@article{fds328491,
   Author = {Battaglia, D and Brunel, N and Hansel, D},
   Title = {Temporal decorrelation of collective oscillations in neural
             networks with local inhibition and long-range
             excitation.},
   Journal = {Phys Rev Lett},
   Volume = {99},
   Number = {23},
   Pages = {238106},
   Year = {2007},
   Month = {December},
   url = {http://dx.doi.org/10.1103/PhysRevLett.99.238106},
   Abstract = {We consider two neuronal networks coupled by long-range
             excitatory interactions. Oscillations in the gamma frequency
             band are generated within each network by local inhibition.
             When long-range excitation is weak, these oscillations phase
             lock with a phase shift dependent on the strength of local
             inhibition. Increasing the strength of long-range excitation
             induces a transition to chaos via period doubling or
             quasiperiodic scenarios. In the chaotic regime, oscillatory
             activity undergoes fast temporal decorrelation. The
             generality of these dynamical properties is assessed in
             firing-rate models as well as in large networks of
             conductance-based neurons.},
   Doi = {10.1103/PhysRevLett.99.238106},
   Key = {fds328491}
}

@article{fds328490,
   Author = {Brunel, N},
   Title = {Daniel Amit (1938-2007).},
   Journal = {Network},
   Volume = {19},
   Number = {1},
   Pages = {3-8},
   Year = {2008},
   url = {http://dx.doi.org/10.1080/09548980801915391},
   Doi = {10.1080/09548980801915391},
   Key = {fds328490}
}

@article{fds328489,
   Author = {Brunel, N and Hakim, V},
   Title = {Sparsely synchronized neuronal oscillations.},
   Journal = {Chaos},
   Volume = {18},
   Number = {1},
   Pages = {015113},
   Year = {2008},
   Month = {March},
   url = {http://dx.doi.org/10.1063/1.2779858},
   Abstract = {We discuss here the properties of fast global oscillations
             that emerge in networks of neurons firing irregularly at a
             low rate. We first provide a simple introduction to these
             sparsely synchronized oscillations, then show how they can
             be studied analytically in the simple setting of rate models
             and leaky integrate-and-fire neurons, and finally describe
             how various neurophysiological features can be incorporated
             in this framework. We end by a comparison of experimental
             data and theoretical results.},
   Doi = {10.1063/1.2779858},
   Key = {fds328489}
}

@article{fds328488,
   Author = {de Solages, C and Szapiro, G and Brunel, N and Hakim, V and Isope, P and Buisseret, P and Rousseau, C and Barbour, B and Léna,
             C},
   Title = {High-frequency organization and synchrony of activity in the
             purkinje cell layer of the cerebellum.},
   Journal = {Neuron},
   Volume = {58},
   Number = {5},
   Pages = {775-788},
   Year = {2008},
   Month = {June},
   url = {http://dx.doi.org/10.1016/j.neuron.2008.05.008},
   Abstract = {The cerebellum controls complex, coordinated, and rapid
             movements, a function requiring precise timing abilities.
             However, the network mechanisms that underlie the temporal
             organization of activity in the cerebellum are largely
             unexplored, because in vivo recordings have usually targeted
             single units. Here, we use tetrode and multisite recordings
             to demonstrate that Purkinje cell activity is synchronized
             by a high-frequency (approximately 200 Hz) population
             oscillation. We combine pharmacological experiments and
             modeling to show how the recurrent inhibitory connections
             between Purkinje cells are sufficient to generate these
             oscillations. A key feature of these oscillations is a fixed
             population frequency that is independent of the firing rates
             of the individual cells. Convergence in the deep cerebellar
             nuclei of Purkinje cell activity, synchronized by these
             oscillations, likely organizes temporally the cerebellar
             output.},
   Doi = {10.1016/j.neuron.2008.05.008},
   Key = {fds328488}
}

@article{fds328487,
   Author = {Barbieri, F and Brunel, N},
   Title = {Can attractor network models account for the statistics of
             firing during persistent activity in prefrontal
             cortex?},
   Journal = {Front Neurosci},
   Volume = {2},
   Number = {1},
   Pages = {114-122},
   Year = {2008},
   Month = {July},
   url = {http://dx.doi.org/10.3389/neuro.01.003.2008},
   Abstract = {Persistent activity observed in neurophysiological
             experiments in monkeys is thought to be the neuronal
             correlate of working memory. Over the last decade, network
             modellers have strived to reproduce the main features of
             these experiments. In particular, attractor network models
             have been proposed in which there is a coexistence between a
             non-selective attractor state with low background activity
             with selective attractor states in which sub-groups of
             neurons fire at rates which are higher (but not much higher)
             than background rates. A recent detailed statistical
             analysis of the data seems however to challenge such
             attractor models: the data indicates that firing during
             persistent activity is highly irregular (with an average CV
             larger than 1), while models predict a more regular firing
             process (CV smaller than 1). We discuss here recent
             proposals that allow to reproduce this feature of the
             experiments.},
   Doi = {10.3389/neuro.01.003.2008},
   Key = {fds328487}
}

@article{fds328486,
   Author = {Roxin, A and Hakim, V and Brunel, N},
   Title = {The statistics of repeating patterns of cortical activity
             can be reproduced by a model network of stochastic binary
             neurons.},
   Journal = {J Neurosci},
   Volume = {28},
   Number = {42},
   Pages = {10734-10745},
   Year = {2008},
   Month = {October},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.1016-08.2008},
   Abstract = {Calcium imaging of the spontaneous activity in cortical
             slices has revealed repeating spatiotemporal patterns of
             transitions between so-called down states and up states
             (Ikegaya et al., 2004). Here we fit a model network of
             stochastic binary neurons to data from these experiments,
             and in doing so reproduce the distributions of such
             patterns. We use two versions of this model: (1) an
             unconnected network in which neurons are activated as
             independent Poisson processes; and (2) a network with an
             interaction matrix, estimated from the data, representing
             effective interactions between the neurons. The unconnected
             model (model 1) is sufficient to account for the statistics
             of repeating patterns in 11 of the 15 datasets studied.
             Model 2, with interactions between neurons, is required to
             account for pattern statistics of the remaining four. Three
             of these four datasets are the ones that contain the largest
             number of transitions, suggesting that long datasets are in
             general necessary to render interactions statistically
             visible. We then study the topology of the matrix of
             interactions estimated for these four datasets. For three of
             the four datasets, we find sparse matrices with long-tailed
             degree distributions and an overrepresentation of certain
             network motifs. The remaining dataset exhibits a strongly
             interconnected, spatially localized subgroup of neurons. In
             all cases, we find that interactions between neurons
             facilitate the generation of long patterns that do not
             repeat exactly.},
   Doi = {10.1523/JNEUROSCI.1016-08.2008},
   Key = {fds328486}
}

@article{fds328485,
   Author = {Mazzoni, A and Panzeri, S and Logothetis, NK and Brunel,
             N},
   Title = {Encoding of naturalistic stimuli by local field potential
             spectra in networks of excitatory and inhibitory
             neurons.},
   Journal = {PLoS Comput Biol},
   Volume = {4},
   Number = {12},
   Pages = {e1000239},
   Year = {2008},
   Month = {December},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1000239},
   Abstract = {Recordings of local field potentials (LFPs) reveal that the
             sensory cortex displays rhythmic activity and fluctuations
             over a wide range of frequencies and amplitudes. Yet, the
             role of this kind of activity in encoding sensory
             information remains largely unknown. To understand the rules
             of translation between the structure of sensory stimuli and
             the fluctuations of cortical responses, we simulated a
             sparsely connected network of excitatory and inhibitory
             neurons modeling a local cortical population, and we
             determined how the LFPs generated by the network encode
             information about input stimuli. We first considered simple
             static and periodic stimuli and then naturalistic input
             stimuli based on electrophysiological recordings from the
             thalamus of anesthetized monkeys watching natural movie
             scenes. We found that the simulated network produced
             stimulus-related LFP changes that were in striking agreement
             with the LFPs obtained from the primary visual cortex.
             Moreover, our results demonstrate that the network encoded
             static input spike rates into gamma-range oscillations
             generated by inhibitory-excitatory neural interactions and
             encoded slow dynamic features of the input into slow LFP
             fluctuations mediated by stimulus-neural interactions. The
             model cortical network processed dynamic stimuli with
             naturalistic temporal structure by using low and high
             response frequencies as independent communication channels,
             again in agreement with recent reports from visual cortex
             responses to naturalistic movies. One potential function of
             this frequency decomposition into independent information
             channels operated by the cortical network may be that of
             enhancing the capacity of the cortical column to encode our
             complex sensory environment.},
   Doi = {10.1371/journal.pcbi.1000239},
   Key = {fds328485}
}

@article{fds328484,
   Author = {Brunel, N and Hakim, V},
   Title = {Neuronal Dynamics},
   Pages = {6099-6116},
   Booktitle = {Encyclopedia of Complexity and Systems Science},
   Publisher = {Springer New York},
   Editor = {Meyers, RA},
   Year = {2009},
   ISBN = {9780387758886},
   url = {http://dx.doi.org/10.1007/978-0-387-30440-3_359},
   Doi = {10.1007/978-0-387-30440-3_359},
   Key = {fds328484}
}

@article{fds366925,
   Author = {Brunel, N},
   Title = {Modeling Point Neurons: From Hodgkin-Huxley to
             Integrate-and-Fire},
   Pages = {161-185},
   Booktitle = {COMPUTATIONAL MODELING METHODS FOR NEUROSCIENTISTS},
   Year = {2009},
   Key = {fds366925}
}

@article{fds328482,
   Author = {Dugué, GP and Brunel, N and Hakim, V and Schwartz, E and Chat, M and Lévesque, M and Courtemanche, R and Léna, C and Dieudonné,
             S},
   Title = {Electrical coupling mediates tunable low-frequency
             oscillations and resonance in the cerebellar Golgi cell
             network.},
   Journal = {Neuron},
   Volume = {61},
   Number = {1},
   Pages = {126-139},
   Year = {2009},
   Month = {January},
   url = {http://dx.doi.org/10.1016/j.neuron.2008.11.028},
   Abstract = {Tonic motor control involves oscillatory synchronization of
             activity at low frequency (5-30 Hz) throughout the
             sensorimotor system, including cerebellar areas. We
             investigated the mechanisms underpinning cerebellar
             oscillations. We found that Golgi interneurons, which gate
             information transfer in the cerebellar cortex input layer,
             are extensively coupled through electrical synapses. When
             depolarized in vitro, these neurons displayed low-frequency
             oscillatory synchronization, imposing rhythmic inhibition
             onto granule cells. Combining experiments and modeling, we
             show that electrical transmission of the spike
             afterhyperpolarization is the essential component for
             oscillatory population synchronization. Rhythmic firing
             arises in spite of strong heterogeneities, is frequency
             tuned by the mean excitatory input to Golgi cells, and
             displays pronounced resonance when the modeled network is
             driven by oscillating inputs. In vivo, unitary Golgi cell
             activity was found to synchronize with low-frequency LFP
             oscillations occurring during quiet waking. These results
             suggest a major role for Golgi cells in coordinating
             cerebellar sensorimotor integration during oscillatory
             interactions.},
   Doi = {10.1016/j.neuron.2008.11.028},
   Key = {fds328482}
}

@article{fds328481,
   Author = {Zillmer, R and Brunel, N and Hansel, D},
   Title = {Very long transients, irregular firing, and chaotic dynamics
             in networks of randomly connected inhibitory
             integrate-and-fire neurons.},
   Journal = {Phys Rev E Stat Nonlin Soft Matter Phys},
   Volume = {79},
   Number = {3 Pt 1},
   Pages = {031909},
   Year = {2009},
   Month = {March},
   url = {http://dx.doi.org/10.1103/PhysRevE.79.031909},
   Abstract = {We present results of an extensive numerical study of the
             dynamics of networks of integrate-and-fire neurons connected
             randomly through inhibitory interactions. We first consider
             delayed interactions with infinitely fast rise and decay.
             Depending on the parameters, the network displays transients
             which are short or exponentially long in the network size.
             At the end of these transients, the dynamics settle on a
             periodic attractor. If the number of connections per neuron
             is large ( approximately 1000) , this attractor is a cluster
             state with a short period. In contrast, if the number of
             connections per neuron is small ( approximately 100) , the
             attractor has complex dynamics and very long period. During
             the long transients the neurons fire in a highly irregular
             manner. They can be viewed as quasistationary states in
             which, depending on the coupling strength, the pattern of
             activity is asynchronous or displays population
             oscillations. In the first case, the average firing rates
             and the variability of the single-neuron activity are well
             described by a mean-field theory valid in the thermodynamic
             limit. Bifurcations of the long transient dynamics from
             asynchronous to synchronous activity are also well predicted
             by this theory. The transient dynamics display features
             reminiscent of stable chaos. In particular, despite being
             linearly stable, the trajectories of the transient dynamics
             are destabilized by finite perturbations as small as O(1/N)
             . We further show that stable chaos is also observed for
             postsynaptic currents with finite decay time. However, we
             report in this type of network that chaotic dynamics
             characterized by positive Lyapunov exponents can also be
             observed. We show in fact that chaos occurs when the decay
             time of the synaptic currents is long compared to the
             synaptic delay, provided that the network is sufficiently
             large.},
   Doi = {10.1103/PhysRevE.79.031909},
   Key = {fds328481}
}

@article{fds328480,
   Author = {Ostojic, S and Brunel, N and Hakim, V},
   Title = {Synchronization properties of networks of electrically
             coupled neurons in the presence of noise and
             heterogeneities.},
   Journal = {J Comput Neurosci},
   Volume = {26},
   Number = {3},
   Pages = {369-392},
   Year = {2009},
   Month = {June},
   url = {http://dx.doi.org/10.1007/s10827-008-0117-3},
   Abstract = {We investigate how synchrony can be generated or induced in
             networks of electrically coupled integrate-and-fire neurons
             subject to noisy and heterogeneous inputs. Using analytical
             tools, we find that in a network under constant external
             inputs, synchrony can appear via a Hopf bifurcation from the
             asynchronous state to an oscillatory state. In a homogeneous
             net work, in the oscillatory state all neurons fire in
             synchrony, while in a heterogeneous network synchrony is
             looser, many neurons skipping cycles of the oscillation. If
             the transmission of action potentials via the electrical
             synapses is effectively excitatory, the Hopf bifurcation is
             supercritical, while effectively inhibitory transmission due
             to pronounced hyperpolarization leads to a subcritical
             bifurcation. In the latter case, the network exhibits
             bistability between an asynchronous state and an oscillatory
             state where all the neurons fire in synchrony. Finally we
             show that for time-varying external inputs, electrical
             coupling enhances the synchronization in an asynchronous
             network via a resonance at the firing-rate
             frequency.},
   Doi = {10.1007/s10827-008-0117-3},
   Key = {fds328480}
}

@article{fds328479,
   Author = {Ostojic, S and Brunel, N and Hakim, V},
   Title = {How connectivity, background activity, and synaptic
             properties shape the cross-correlation between spike
             trains.},
   Journal = {J Neurosci},
   Volume = {29},
   Number = {33},
   Pages = {10234-10253},
   Year = {2009},
   Month = {August},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.1275-09.2009},
   Abstract = {Functional interactions between neurons in vivo are often
             quantified by cross-correlation functions (CCFs) between
             their spike trains. It is therefore essential to understand
             quantitatively how CCFs are shaped by different factors,
             such as connectivity, synaptic parameters, and background
             activity. Here, we study the CCF between two neurons using
             analytical calculations and numerical simulations. We
             quantify the role of synaptic parameters, such as peak
             conductance, decay time, and reversal potential, and analyze
             how various patterns of connectivity influence CCF shapes.
             In particular, we find that the symmetry of the CCF
             distinguishes in general, but not always, the case of shared
             inputs between two neurons from the case in which they are
             directly synaptically connected. We systematically examine
             the influence of background synaptic inputs from the
             surrounding network that set the baseline firing statistics
             of the neurons and modulate their response properties. We
             find that variations in the background noise modify the
             amplitude of the cross-correlation function as strongly as
             variations of synaptic strength. In particular, we show that
             the postsynaptic neuron spiking regularity has a pronounced
             influence on CCF amplitude. This suggests an efficient and
             flexible mechanism for modulating functional
             interactions.},
   Doi = {10.1523/JNEUROSCI.1275-09.2009},
   Key = {fds328479}
}

@article{fds328483,
   Author = {Graupner, M and Brunel, N},
   Title = {A bitable synaptic model with transitions between states
             induced by calcium dynamics: theory vs experiment},
   Journal = {BMC Neuroscience},
   Volume = {10},
   Number = {S1},
   Pages = {O15-O15},
   Publisher = {Springer Science and Business Media LLC},
   Year = {2009},
   Month = {September},
   url = {http://dx.doi.org/10.1186/1471-2202-10-s1-o15},
   Doi = {10.1186/1471-2202-10-s1-o15},
   Key = {fds328483}
}

@article{fds328478,
   Author = {Brunel, N and Lavigne, F},
   Title = {Semantic priming in a cortical network model.},
   Journal = {J Cogn Neurosci},
   Volume = {21},
   Number = {12},
   Pages = {2300-2319},
   Year = {2009},
   Month = {December},
   url = {http://dx.doi.org/10.1162/jocn.2008.21156},
   Abstract = {Contextual recall in humans relies on the semantic
             relationships between items stored in memory. These
             relationships can be probed by priming experiments. Such
             experiments have revealed a rich phenomenology on how
             reaction times depend on various factors such as strength
             and nature of associations, time intervals between stimulus
             presentations, and so forth. Experimental protocols on
             humans present striking similarities with pair association
             task experiments in monkeys. Electrophysiological recordings
             of cortical neurons in such tasks have found two types of
             task-related activity, "retrospective" (related to a
             previously shown stimulus), and "prospective" (related to a
             stimulus that the monkey expects to appear, due to learned
             association between both stimuli). Mathematical models of
             cortical networks allow theorists to understand the link
             between the physiology of single neurons and synapses, and
             network behavior giving rise to retrospective and/or
             prospective activity. Here, we show that this type of
             network model can account for a large variety of priming
             effects. Furthermore, the model allows us to interpret
             semantic priming differences between the two hemispheres as
             depending on a single association strength
             parameter.},
   Doi = {10.1162/jocn.2008.21156},
   Key = {fds328478}
}

@article{fds328477,
   Author = {Graupner, M and Brunel, N},
   Title = {Mechanisms of induction and maintenance of spike-timing
             dependent plasticity in biophysical synapse
             models.},
   Journal = {Front Comput Neurosci},
   Volume = {4},
   Year = {2010},
   url = {http://dx.doi.org/10.3389/fncom.2010.00136},
   Abstract = {We review biophysical models of synaptic plasticity, with a
             focus on spike-timing dependent plasticity (STDP). The
             common property of the discussed models is that synaptic
             changes depend on the dynamics of the intracellular calcium
             concentration, which itself depends on pre- and postsynaptic
             activity. We start by discussing simple models in which
             plasticity changes are based directly on calcium amplitude
             and dynamics. We then consider models in which dynamic
             intracellular signaling cascades form the link between the
             calcium dynamics and the plasticity changes. Both mechanisms
             of induction of STDP (through the ability of
             pre/postsynaptic spikes to evoke changes in the state of the
             synapse) and of maintenance of the evoked changes (through
             bistability) are discussed.},
   Doi = {10.3389/fncom.2010.00136},
   Key = {fds328477}
}

@article{fds328476,
   Author = {Panzeri, S and Brunel, N and Logothetis, NK and Kayser,
             C},
   Title = {Sensory neural codes using multiplexed temporal
             scales.},
   Journal = {Trends Neurosci},
   Volume = {33},
   Number = {3},
   Pages = {111-120},
   Year = {2010},
   Month = {March},
   url = {http://dx.doi.org/10.1016/j.tins.2009.12.001},
   Abstract = {Determining how neuronal activity represents sensory
             information is central for understanding perception. Recent
             work shows that neural responses at different timescales can
             encode different stimulus attributes, resulting in a
             temporal multiplexing of sensory information. Multiplexing
             increases the encoding capacity of neural responses, enables
             disambiguation of stimuli that cannot be discriminated at a
             single response timescale, and makes sensory representations
             stable to the presence of variability in the sensory world.
             Thus, as we discuss here, temporal multiplexing could be a
             key strategy used by the brain to form an information-rich
             and stable representation of the environment.},
   Doi = {10.1016/j.tins.2009.12.001},
   Key = {fds328476}
}

@article{fds328475,
   Author = {Mazzoni, A and Whittingstall, K and Brunel, N and Logothetis, NK and Panzeri, S},
   Title = {Understanding the relationships between spike rate and
             delta/gamma frequency bands of LFPs and EEGs using a local
             cortical network model.},
   Journal = {Neuroimage},
   Volume = {52},
   Number = {3},
   Pages = {956-972},
   Year = {2010},
   Month = {September},
   url = {http://dx.doi.org/10.1016/j.neuroimage.2009.12.040},
   Abstract = {Despite the widespread use of EEGs to measure the
             large-scale dynamics of the human brain, little is known on
             how the dynamics of EEGs relates to that of the underlying
             spike rates of cortical neurons. However, progress was made
             by recent neurophysiological experiments reporting that EEG
             delta-band phase and gamma-band amplitude reliably predict
             some complementary aspects of the time course of spikes of
             visual cortical neurons. To elucidate the mechanisms behind
             these findings, here we hypothesize that the EEG delta phase
             reflects shifts of local cortical excitability arising from
             slow fluctuations in the network input due to entrainment to
             sensory stimuli or to fluctuations in ongoing activity, and
             that the resulting local excitability fluctuations modulate
             both the spike rate and the engagement of
             excitatory-inhibitory loops producing gamma-band
             oscillations. We quantitatively tested these hypotheses by
             simulating a recurrent network of excitatory and inhibitory
             neurons stimulated with dynamic inputs presenting temporal
             regularities similar to that of thalamic responses during
             naturalistic visual stimulation and during spontaneous
             activity. The network model reproduced in detail the
             experimental relationships between spike rate and EEGs, and
             suggested that the complementariness of the prediction of
             spike rates obtained from EEG delta phase or gamma amplitude
             arises from nonlinearities in the engagement of
             excitatory-inhibitory loops and from temporal modulations in
             the amplitude of the network input, which respectively limit
             the predictability of spike rates from gamma amplitude or
             delta phase alone. The model suggested also ways to improve
             and extend current algorithms for online prediction of spike
             rates from EEGs.},
   Doi = {10.1016/j.neuroimage.2009.12.040},
   Key = {fds328475}
}

@article{fds328472,
   Author = {Ledoux, E and Brunel, N},
   Title = {Dynamics of networks of excitatory and inhibitory neurons in
             response to time-dependent inputs.},
   Journal = {Front Comput Neurosci},
   Volume = {5},
   Pages = {25},
   Year = {2011},
   url = {http://dx.doi.org/10.3389/fncom.2011.00025},
   Abstract = {We investigate the dynamics of recurrent networks of
             excitatory (E) and inhibitory (I) neurons in the presence of
             time-dependent inputs. The dynamics is characterized by the
             network dynamical transfer function, i.e., how the
             population firing rate is modulated by sinusoidal inputs at
             arbitrary frequencies. Two types of networks are studied and
             compared: (i) a Wilson-Cowan type firing rate model; and
             (ii) a fully connected network of leaky integrate-and-fire
             (LIF) neurons, in a strong noise regime. We first
             characterize the region of stability of the "asynchronous
             state" (a state in which population activity is constant in
             time when external inputs are constant) in the space of
             parameters characterizing the connectivity of the network.
             We then systematically characterize the qualitative
             behaviors of the dynamical transfer function, as a function
             of the connectivity. We find that the transfer function can
             be either low-pass, or with a single or double resonance,
             depending on the connection strengths and synaptic time
             constants. Resonances appear when the system is close to
             Hopf bifurcations, that can be induced by two separate
             mechanisms: the I-I connectivity and the E-I connectivity.
             Double resonances can appear when excitatory delays are
             larger than inhibitory delays, due to the fact that two
             distinct instabilities exist with a finite gap between the
             corresponding frequencies. In networks of LIF neurons,
             changes in external inputs and external noise are shown to
             be able to change qualitatively the network transfer
             function. Firing rate models are shown to exhibit the same
             diversity of transfer functions as the LIF network, provided
             delays are present. They can also exhibit input-dependent
             changes of the transfer function, provided a suitable static
             non-linearity is incorporated.},
   Doi = {10.3389/fncom.2011.00025},
   Key = {fds328472}
}

@article{fds328473,
   Author = {Mazzoni, A and Brunel, N and Cavallari, S and Logothetis, NK and Panzeri, S},
   Title = {Cortical dynamics during naturalistic sensory stimulations:
             experiments and models.},
   Journal = {J Physiol Paris},
   Volume = {105},
   Number = {1-3},
   Pages = {2-15},
   Year = {2011},
   url = {http://dx.doi.org/10.1016/j.jphysparis.2011.07.014},
   Abstract = {We report the results of our experimental and theoretical
             investigations of the neural response dynamics in primary
             visual cortex (V1) during naturalistic visual stimulation.
             We recorded Local Field Potentials (LFPs) and spiking
             activity from V1 of anaesthetized macaques during binocular
             presentation of Hollywood color movies. We analyzed these
             recordings with information theoretic methods, and found
             that visual information was encoded mainly by two bands of
             LFP responses: the network fluctuations measured by the
             phase and power of low-frequency (less than 12 Hz) LFPs; and
             fast gamma-range (50-100 Hz) oscillations. Both the power
             and phase of low frequency LFPs carried information largely
             complementary to that carried by spikes, whereas gamma range
             oscillations carried information largely redundant to that
             of spikes. To interpret these results within a quantitative
             theoretical framework, we then simulated a sparsely
             connected recurrent network of excitatory and inhibitory
             neurons receiving slowly varying naturalistic inputs, and we
             determined how the LFPs generated by the network encoded
             information about the inputs. We found that this simulated
             recurrent network reproduced well the experimentally
             observed dependency of LFP information upon frequency. This
             network encoded the overall strength of the input into the
             power of gamma-range oscillations generated by
             inhibitory-excitatory neural interactions, and encoded slow
             variations in the input by entraining the network LFP at the
             corresponding frequency. This dynamical behavior accounted
             quantitatively for the independent information carried by
             high and low frequency LFPs, and for the experimentally
             observed cross-frequency coupling between phase of slow LFPs
             and the power of gamma LFPs. We also present new results
             showing that the model's dynamics also accounted for the
             extra visual information that the low-frequency LFP phase of
             spike firing carries beyond that carried by spike rates.
             Overall, our results suggest biological mechanisms by which
             cortex can multiplex information about naturalistic sensory
             environments.},
   Doi = {10.1016/j.jphysparis.2011.07.014},
   Key = {fds328473}
}

@article{fds328474,
   Author = {Hamaguchi, K and Riehle, A and Brunel, N},
   Title = {Estimating network parameters from combined dynamics of
             firing rate and irregularity of single neurons.},
   Journal = {J Neurophysiol},
   Volume = {105},
   Number = {1},
   Pages = {487-500},
   Year = {2011},
   Month = {January},
   url = {http://dx.doi.org/10.1152/jn.00858.2009},
   Abstract = {High firing irregularity is a hallmark of cortical neurons
             in vivo, and modeling studies suggest a balance of
             excitation and inhibition is necessary to explain this high
             irregularity. Such a balance must be generated, at least
             partly, from local interconnected networks of excitatory and
             inhibitory neurons, but the details of the local network
             structure are largely unknown. The dynamics of the neural
             activity depends on the local network structure; this in
             turn suggests the possibility of estimating network
             structure from the dynamics of the firing statistics. Here
             we report a new method to estimate properties of the local
             cortical network from the instantaneous firing rate and
             irregularity (CV(2)) under the assumption that recorded
             neurons are a part of a randomly connected sparse network.
             The firing irregularity, measured in monkey motor cortex,
             exhibits two features; many neurons show relatively stable
             firing irregularity in time and across different task
             conditions; the time-averaged CV(2) is widely distributed
             from quasi-regular to irregular (CV(2) = 0.3-1.0). For each
             recorded neuron, we estimate the three parameters of a local
             network [balance of local excitation-inhibition, number of
             recurrent connections per neuron, and excitatory
             postsynaptic potential (EPSP) size] that best describe the
             dynamics of the measured firing rates and irregularities.
             Our analysis shows that optimal parameter sets form a
             two-dimensional manifold in the three-dimensional parameter
             space that is confined for most of the neurons to the
             inhibition-dominated region. High irregularity neurons tend
             to be more strongly connected to the local network, either
             in terms of larger EPSP and inhibitory PSP size or larger
             number of recurrent connections, compared with the low
             irregularity neurons, for a given excitatory/inhibitory
             balance. Incorporating either synaptic short-term depression
             or conductance-based synapses leads many low CV(2) neurons
             to move to the excitation-dominated region as well as to an
             increase of EPSP size.},
   Doi = {10.1152/jn.00858.2009},
   Key = {fds328474}
}

@article{fds328471,
   Author = {Ostojic, S and Brunel, N},
   Title = {From spiking neuron models to linear-nonlinear
             models.},
   Journal = {PLoS Comput Biol},
   Volume = {7},
   Number = {1},
   Pages = {e1001056},
   Year = {2011},
   Month = {January},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1001056},
   Abstract = {Neurons transform time-varying inputs into action potentials
             emitted stochastically at a time dependent rate. The mapping
             from current input to output firing rate is often
             represented with the help of phenomenological models such as
             the linear-nonlinear (LN) cascade, in which the output
             firing rate is estimated by applying to the input
             successively a linear temporal filter and a static
             non-linear transformation. These simplified models leave out
             the biophysical details of action potential generation. It
             is not a priori clear to which extent the input-output
             mapping of biophysically more realistic, spiking neuron
             models can be reduced to a simple linear-nonlinear cascade.
             Here we investigate this question for the leaky
             integrate-and-fire (LIF), exponential integrate-and-fire
             (EIF) and conductance-based Wang-Buzsáki models in presence
             of background synaptic activity. We exploit available
             analytic results for these models to determine the
             corresponding linear filter and static non-linearity in a
             parameter-free form. We show that the obtained functions are
             identical to the linear filter and static non-linearity
             determined using standard reverse correlation analysis. We
             then quantitatively compare the output of the corresponding
             linear-nonlinear cascade with numerical simulations of
             spiking neurons, systematically varying the parameters of
             input signal and background noise. We find that the LN
             cascade provides accurate estimates of the firing rates of
             spiking neurons in most of parameter space. For the EIF and
             Wang-Buzsáki models, we show that the LN cascade can be
             reduced to a firing rate model, the timescale of which we
             determine analytically. Finally we introduce an adaptive
             timescale rate model in which the timescale of the linear
             filter depends on the instantaneous firing rate. This model
             leads to highly accurate estimates of instantaneous firing
             rates.},
   Doi = {10.1371/journal.pcbi.1001056},
   Key = {fds328471}
}

@article{fds328470,
   Author = {Roxin, A and Brunel, N and Hansel, D and Mongillo, G and van Vreeswijk,
             C},
   Title = {On the distribution of firing rates in networks of cortical
             neurons.},
   Journal = {J Neurosci},
   Volume = {31},
   Number = {45},
   Pages = {16217-16226},
   Year = {2011},
   Month = {November},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.1677-11.2011},
   Abstract = {The distribution of in vivo average firing rates within
             local cortical networks has been reported to be highly
             skewed and long tailed. The distribution of average
             single-cell inputs, conversely, is expected to be Gaussian
             by the central limit theorem. This raises the issue of how a
             skewed distribution of firing rates might result from a
             symmetric distribution of inputs. We argue that skewed rate
             distributions are a signature of the nonlinearity of the in
             vivo f-I curve. During in vivo conditions, ongoing synaptic
             activity produces significant fluctuations in the membrane
             potential of neurons, resulting in an expansive nonlinearity
             of the f-I curve for low and moderate inputs. Here, we
             investigate the effects of single-cell and network
             parameters on the shape of the f-I curve and, by extension,
             on the distribution of firing rates in randomly connected
             networks.},
   Doi = {10.1523/JNEUROSCI.1677-11.2011},
   Key = {fds328470}
}

@article{fds328469,
   Author = {Clopath, C and Nadal, J-P and Brunel, N},
   Title = {Storage of correlated patterns in standard and bistable
             Purkinje cell models.},
   Journal = {PLoS Comput Biol},
   Volume = {8},
   Number = {4},
   Pages = {e1002448},
   Year = {2012},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1002448},
   Abstract = {The cerebellum has long been considered to undergo
             supervised learning, with climbing fibers acting as a
             'teaching' or 'error' signal. Purkinje cells (PCs), the sole
             output of the cerebellar cortex, have been considered as
             analogs of perceptrons storing input/output associations. In
             support of this hypothesis, a recent study found that the
             distribution of synaptic weights of a perceptron at maximal
             capacity is in striking agreement with experimental data in
             adult rats. However, the calculation was performed using
             random uncorrelated inputs and outputs. This is a clearly
             unrealistic assumption since sensory inputs and motor
             outputs carry a substantial degree of temporal correlations.
             In this paper, we consider a binary output neuron with a
             large number of inputs, which is required to store
             associations between temporally correlated sequences of
             binary inputs and outputs, modelled as Markov chains.
             Storage capacity is found to increase with both input and
             output correlations, and diverges in the limit where both go
             to unity. We also investigate the capacity of a bistable
             output unit, since PCs have been shown to be bistable in
             some experimental conditions. Bistability is shown to
             enhance storage capacity whenever the output correlation is
             stronger than the input correlation. Distribution of
             synaptic weights at maximal capacity is shown to be
             independent on correlations, and is also unaffected by the
             presence of bistability.},
   Doi = {10.1371/journal.pcbi.1002448},
   Key = {fds328469}
}

@article{fds328468,
   Author = {Graupner, M and Brunel, N},
   Title = {Calcium-based plasticity model explains sensitivity of
             synaptic changes to spike pattern, rate, and dendritic
             location.},
   Journal = {Proc Natl Acad Sci U S A},
   Volume = {109},
   Number = {10},
   Pages = {3991-3996},
   Year = {2012},
   Month = {March},
   url = {http://dx.doi.org/10.1073/pnas.1109359109},
   Abstract = {Multiple stimulation protocols have been found to be
             effective in changing synaptic efficacy by inducing
             long-term potentiation or depression. In many of those
             protocols, increases in postsynaptic calcium concentration
             have been shown to play a crucial role. However, it is still
             unclear whether and how the dynamics of the postsynaptic
             calcium alone determine the outcome of synaptic plasticity.
             Here, we propose a calcium-based model of a synapse in which
             potentiation and depression are activated above calcium
             thresholds. We show that this model gives rise to a large
             diversity of spike timing-dependent plasticity curves, most
             of which have been observed experimentally in different
             systems. It accounts quantitatively for plasticity outcomes
             evoked by protocols involving patterns with variable spike
             timing and firing rate in hippocampus and neocortex.
             Furthermore, it allows us to predict that differences in
             plasticity outcomes in different studies are due to
             differences in parameters defining the calcium dynamics. The
             model provides a mechanistic understanding of how various
             stimulation protocols provoke specific synaptic changes
             through the dynamics of calcium concentration and thresholds
             implementing in simplified fashion protein signaling
             cascades, leading to long-term potentiation and long-term
             depression. The combination of biophysical realism and
             analytical tractability makes it the ideal candidate to
             study plasticity at the synapse, neuron, and network
             levels.},
   Doi = {10.1073/pnas.1109359109},
   Key = {fds328468}
}

@article{fds328466,
   Author = {Brunel, N and Hakim, V},
   Title = {Fokker-Planck Equation},
   Pages = {1-6},
   Booktitle = {Encyclopedia of Computational Neuroscience},
   Publisher = {Springer New York},
   Editor = {Jaeger, D and Jung, R},
   Year = {2013},
   ISBN = {9781461466741},
   url = {http://dx.doi.org/10.1007/978-1-4614-7320-6_60-1},
   Doi = {10.1007/978-1-4614-7320-6_60-1},
   Key = {fds328466}
}

@article{fds328467,
   Author = {Clopath, C and Brunel, N},
   Title = {Optimal properties of analog perceptrons with excitatory
             weights.},
   Journal = {PLoS Comput Biol},
   Volume = {9},
   Number = {2},
   Pages = {e1002919},
   Year = {2013},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1002919},
   Abstract = {The cerebellum is a brain structure which has been
             traditionally devoted to supervised learning. According to
             this theory, plasticity at the Parallel Fiber (PF) to
             Purkinje Cell (PC) synapses is guided by the Climbing fibers
             (CF), which encode an 'error signal'. Purkinje cells have
             thus been modeled as perceptrons, learning input/output
             binary associations. At maximal capacity, a perceptron with
             excitatory weights expresses a large fraction of zero-weight
             synapses, in agreement with experimental findings. However,
             numerous experiments indicate that the firing rate of
             Purkinje cells varies in an analog, not binary, manner. In
             this paper, we study the perceptron with analog inputs and
             outputs. We show that the optimal input has a sparse binary
             distribution, in good agreement with the burst firing of the
             Granule cells. In addition, we show that the weight
             distribution consists of a large fraction of silent
             synapses, as in previously studied binary perceptron models,
             and as seen experimentally.},
   Doi = {10.1371/journal.pcbi.1002919},
   Key = {fds328467}
}

@article{fds356866,
   Author = {Brunel, N and Hakim, V},
   Title = {Population Density Models},
   Pages = {1-24},
   Booktitle = {Encyclopedia of Computational Neuroscience},
   Publisher = {Springer New York},
   Year = {2013},
   url = {http://dx.doi.org/10.1007/978-1-4614-7320-6_74-1},
   Doi = {10.1007/978-1-4614-7320-6_74-1},
   Key = {fds356866}
}

@article{fds339267,
   Author = {Brunel, N},
   Title = {Dynamics of neural networks},
   Pages = {489-512},
   Booktitle = {Principles of Neural Coding},
   Publisher = {CRC Press},
   Year = {2013},
   Month = {January},
   ISBN = {9781439853313},
   url = {http://dx.doi.org/10.1201/b14756},
   Abstract = {© 2013 by Taylor & Francis Group, LLC. Animals are
             constantly submitted to a bombardment of information through
             their sensory systems. This information is transmitted to
             the central nervous system (CNS) in the form of spike
             trains. Traditional views of how this information is
             processed by the CNS consist in a series of networks (or
             layers) of neurons, connected in a predominantly feedforward
             manner. However, neurons in any cortical network receive
             their inputs not only from the previous layers (i.e., LGN
             for V1 neurons, V1 for V2 neurons, etc.), but also from
             nearby neurons that are part of the same network
             (“lateral” or “recurrent " connections), and from
             neurons in higher areas in the feedforward hierarchy
             (“top-down” connections). In fact, anatomy shows that
             feedforward inputs are typically a small minority of the
             inputs received by a cortical neuron (Binzegger et al.
             2004). Therefore, to understand how networks of neurons in
             the CNS transmit the information that they receive, it is
             not enough to understand the input/output transformation at
             the single neuron level. In addition, one has to understand
             how the dynamics of networks of neurons shape the response
             of the population as a whole to dynamic inputs.},
   Doi = {10.1201/b14756},
   Key = {fds339267}
}

@article{fds328464,
   Author = {Hertäg, L and Durstewitz, D and Brunel, N},
   Title = {Analytical approximations of the firing rate of an adaptive
             exponential integrate-and-fire neuron in the presence of
             synaptic noise.},
   Journal = {Front Comput Neurosci},
   Volume = {8},
   Pages = {116},
   Year = {2014},
   url = {http://dx.doi.org/10.3389/fncom.2014.00116},
   Abstract = {Computational models offer a unique tool for understanding
             the network-dynamical mechanisms which mediate between
             physiological and biophysical properties, and behavioral
             function. A traditional challenge in computational
             neuroscience is, however, that simple neuronal models which
             can be studied analytically fail to reproduce the diversity
             of electrophysiological behaviors seen in real neurons,
             while detailed neuronal models which do reproduce such
             diversity are intractable analytically and computationally
             expensive. A number of intermediate models have been
             proposed whose aim is to capture the diversity of firing
             behaviors and spike times of real neurons while entailing
             the simplest possible mathematical description. One such
             model is the exponential integrate-and-fire neuron with
             spike rate adaptation (aEIF) which consists of two
             differential equations for the membrane potential (V) and an
             adaptation current (w). Despite its simplicity, it can
             reproduce a wide variety of physiologically observed spiking
             patterns, can be fit to physiological recordings
             quantitatively, and, once done so, is able to predict spike
             times on traces not used for model fitting. Here we compute
             the steady-state firing rate of aEIF in the presence of
             Gaussian synaptic noise, using two approaches. The first
             approach is based on the 2-dimensional Fokker-Planck
             equation that describes the (V,w)-probability distribution,
             which is solved using an expansion in the ratio between the
             time constants of the two variables. The second is based on
             the firing rate of the EIF model, which is averaged over the
             distribution of the w variable. These analytically derived
             closed-form expressions were tested on simulations from a
             large variety of model cells quantitatively fitted to in
             vitro electrophysiological recordings from pyramidal cells
             and interneurons. Theoretical predictions closely agreed
             with the firing rate of the simulated cells fed with
             in-vivo-like synaptic noise.},
   Doi = {10.3389/fncom.2014.00116},
   Key = {fds328464}
}

@article{fds328463,
   Author = {Tartaglia, EM and Mongillo, G and Brunel, N},
   Title = {On the relationship between persistent delay activity,
             repetition enhancement and priming.},
   Journal = {Front Psychol},
   Volume = {5},
   Pages = {1590},
   Year = {2014},
   url = {http://dx.doi.org/10.3389/fpsyg.2014.01590},
   Abstract = {Human efficiency in processing incoming stimuli (in terms of
             speed and/or accuracy) is typically enhanced by previous
             exposure to the same, or closely related stimuli-a
             phenomenon referred to as priming. In spite of the large
             body of knowledge accumulated in behavioral studies about
             the conditions conducive to priming, and its relationship
             with other forms of memory, the underlying neuronal
             correlates of priming are still under debate. The idea has
             repeatedly been advanced that a major neuronal mechanism
             supporting behaviorally-expressed priming is repetition
             suppression, a widespread reduction of spiking activity upon
             stimulus repetition which has been routinely exposed by
             single-unit recordings in non-human primates performing
             delayed-response, as well as passive fixation tasks. This
             proposal is mainly motivated by the observation that, in
             human fMRI studies, priming is associated to a significant
             reduction of the BOLD signal (widely interpreted as a proxy
             of the level of spiking activity) upon stimulus repetition.
             Here, we critically re-examine a large part of the
             electrophysiological literature on repetition suppression in
             non-human primates and find that repetition suppression is
             systematically accompanied by stimulus-selective delay
             period activity, together with repetition enhancement, an
             increase of spiking activity upon stimulus repetition in
             small neuronal populations. We argue that repetition
             enhancement constitutes a more viable candidate for a
             putative neuronal substrate of priming, and propose a
             minimal framework that links together, mechanistically and
             functionally, repetition suppression, stimulus-selective
             delay activity and repetition enhancement.},
   Doi = {10.3389/fpsyg.2014.01590},
   Key = {fds328463}
}

@article{fds356865,
   Author = {Brunel, N and Hakim, V},
   Title = {Fokker-Planck Equation},
   Pages = {1-5},
   Booktitle = {Encyclopedia of Computational Neuroscience},
   Publisher = {Springer New York},
   Year = {2014},
   ISBN = {9781461466741},
   url = {http://dx.doi.org/10.1007/978-1-4614-7320-6_60-2},
   Doi = {10.1007/978-1-4614-7320-6_60-2},
   Key = {fds356865}
}

@article{fds328462,
   Author = {Brunel, N and Hakim, V and Richardson, MJE},
   Title = {Single neuron dynamics and computation.},
   Journal = {Curr Opin Neurobiol},
   Volume = {25},
   Pages = {149-155},
   Year = {2014},
   Month = {April},
   url = {http://dx.doi.org/10.1016/j.conb.2014.01.005},
   Abstract = {At the single neuron level, information processing involves
             the transformation of input spike trains into an appropriate
             output spike train. Building upon the classical view of a
             neuron as a threshold device, models have been developed in
             recent years that take into account the diverse
             electrophysiological make-up of neurons and accurately
             describe their input-output relations. Here, we review these
             recent advances and survey the computational roles that they
             have uncovered for various electrophysiological properties,
             for dendritic arbor anatomy as well as for short-term
             synaptic plasticity.},
   Doi = {10.1016/j.conb.2014.01.005},
   Key = {fds328462}
}

@article{fds328461,
   Author = {Clopath, C and Badura, A and De Zeeuw and CI and Brunel,
             N},
   Title = {A cerebellar learning model of vestibulo-ocular reflex
             adaptation in wild-type and mutant mice.},
   Journal = {J Neurosci},
   Volume = {34},
   Number = {21},
   Pages = {7203-7215},
   Year = {2014},
   Month = {May},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.2791-13.2014},
   Abstract = {Mechanisms of cerebellar motor learning are still poorly
             understood. The standard Marr-Albus-Ito theory posits that
             learning involves plasticity at the parallel fiber to
             Purkinje cell synapses under control of the climbing fiber
             input, which provides an error signal as in classical
             supervised learning paradigms. However, a growing body of
             evidence challenges this theory, in that additional sites of
             plasticity appear to contribute to motor adaptation. Here,
             we consider phase-reversal training of the vestibulo-ocular
             reflex (VOR), a simple form of motor learning for which a
             large body of experimental data is available in wild-type
             and mutant mice, in which the excitability of granule cells
             or inhibition of Purkinje cells was affected in a
             cell-specific fashion. We present novel electrophysiological
             recordings of Purkinje cell activity measured in naive
             wild-type mice subjected to this VOR adaptation task. We
             then introduce a minimal model that consists of learning at
             the parallel fibers to Purkinje cells with the help of the
             climbing fibers. Although the minimal model reproduces the
             behavior of the wild-type animals and is analytically
             tractable, it fails at reproducing the behavior of mutant
             mice and the electrophysiology data. Therefore, we build a
             detailed model involving plasticity at the parallel fibers
             to Purkinje cells' synapse guided by climbing fibers,
             feedforward inhibition of Purkinje cells, and plasticity at
             the mossy fiber to vestibular nuclei neuron synapse. The
             detailed model reproduces both the behavioral and
             electrophysiological data of both the wild-type and mutant
             mice and allows for experimentally testable
             predictions.},
   Doi = {10.1523/JNEUROSCI.2791-13.2014},
   Key = {fds328461}
}

@article{fds328460,
   Author = {Dubreuil, AM and Amit, Y and Brunel, N},
   Title = {Memory capacity of networks with stochastic binary
             synapses.},
   Journal = {PLoS Comput Biol},
   Volume = {10},
   Number = {8},
   Pages = {e1003727},
   Year = {2014},
   Month = {August},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1003727},
   Abstract = {In standard attractor neural network models, specific
             patterns of activity are stored in the synaptic matrix, so
             that they become fixed point attractors of the network
             dynamics. The storage capacity of such networks has been
             quantified in two ways: the maximal number of patterns that
             can be stored, and the stored information measured in bits
             per synapse. In this paper, we compute both quantities in
             fully connected networks of N binary neurons with binary
             synapses, storing patterns with coding level [Formula: see
             text], in the large [Formula: see text] and sparse coding
             limits ([Formula: see text]). We also derive finite-size
             corrections that accurately reproduce the results of
             simulations in networks of tens of thousands of neurons.
             These methods are applied to three different scenarios: (1)
             the classic Willshaw model, (2) networks with stochastic
             learning in which patterns are shown only once (one shot
             learning), (3) networks with stochastic learning in which
             patterns are shown multiple times. The storage capacities
             are optimized over network parameters, which allows us to
             compare the performance of the different models. We show
             that finite-size effects strongly reduce the capacity, even
             for networks of realistic sizes. We discuss the implications
             of these results for memory storage in the hippocampus and
             cerebral cortex.},
   Doi = {10.1371/journal.pcbi.1003727},
   Key = {fds328460}
}

@article{fds328458,
   Author = {Higgins, D and Graupner, M and Brunel, N},
   Title = {Memory maintenance in synapses with calcium-based plasticity
             in the presence of background activity.},
   Journal = {PLoS Comput Biol},
   Volume = {10},
   Number = {10},
   Pages = {e1003834},
   Year = {2014},
   Month = {October},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1003834},
   Abstract = {Most models of learning and memory assume that memories are
             maintained in neuronal circuits by persistent synaptic
             modifications induced by specific patterns of pre- and
             postsynaptic activity. For this scenario to be viable,
             synaptic modifications must survive the ubiquitous ongoing
             activity present in neural circuits in vivo. In this paper,
             we investigate the time scales of memory maintenance in a
             calcium-based synaptic plasticity model that has been shown
             recently to be able to fit different experimental data-sets
             from hippocampal and neocortical preparations. We find that
             in the presence of background activity on the order of 1 Hz
             parameters that fit pyramidal layer 5 neocortical data lead
             to a very fast decay of synaptic efficacy, with time scales
             of minutes. We then identify two ways in which this memory
             time scale can be extended: (i) the extracellular calcium
             concentration in the experiments used to fit the model are
             larger than estimated concentrations in vivo. Lowering
             extracellular calcium concentration to in vivo levels leads
             to an increase in memory time scales of several orders of
             magnitude; (ii) adding a bistability mechanism so that each
             synapse has two stable states at sufficiently low background
             activity leads to a further boost in memory time scale,
             since memory decay is no longer described by an exponential
             decay from an initial state, but by an escape from a
             potential well. We argue that both features are expected to
             be present in synapses in vivo. These results are obtained
             first in a single synapse connecting two independent Poisson
             neurons, and then in simulations of a large network of
             excitatory and inhibitory integrate-and-fire neurons. Our
             results emphasise the need for studying plasticity at
             physiological extracellular calcium concentration, and
             highlight the role of synaptic bi- or multistability in the
             stability of learned synaptic structures.},
   Doi = {10.1371/journal.pcbi.1003834},
   Key = {fds328458}
}

@article{fds328459,
   Author = {Barbieri, F and Mazzoni, A and Logothetis, NK and Panzeri, S and Brunel,
             N},
   Title = {Stimulus dependence of local field potential spectra:
             experiment versus theory.},
   Journal = {J Neurosci},
   Volume = {34},
   Number = {44},
   Pages = {14589-14605},
   Year = {2014},
   Month = {October},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.5365-13.2014},
   Abstract = {The local field potential (LFP) captures different neural
             processes, including integrative synaptic dynamics that
             cannot be observed by measuring only the spiking activity of
             small populations. Therefore, investigating how LFP power is
             modulated by external stimuli can offer important insights
             into sensory neural representations. However, gaining such
             insight requires developing data-driven computational models
             that can identify and disambiguate the neural contributions
             to the LFP. Here, we investigated how networks of excitatory
             and inhibitory integrate-and-fire neurons responding to
             time-dependent inputs can be used to interpret sensory
             modulations of LFP spectra. We computed analytically from
             such models the LFP spectra and the information that they
             convey about input and used these analytical expressions to
             fit the model to LFPs recorded in V1 of anesthetized
             macaques (Macaca mulatta) during the presentation of color
             movies. Our expressions explain 60%-98% of the variance of
             the LFP spectrum shape and its dependency upon movie scenes
             and we achieved this with realistic values for the best-fit
             parameters. In particular, synaptic best-fit parameters were
             compatible with experimental measurements and the
             predictions of firing rates, based only on the fit of LFP
             data, correlated with the multiunit spike rate recorded from
             the same location. Moreover, the parameters characterizing
             the input to the network across different movie scenes
             correlated with cross-scene changes of several image
             features. Our findings suggest that analytical descriptions
             of spiking neuron networks may become a crucial tool for the
             interpretation of field recordings.},
   Doi = {10.1523/JNEUROSCI.5365-13.2014},
   Key = {fds328459}
}

@article{fds328465,
   Author = {Brunel, N and Hakim, V},
   Title = {Population Density Model},
   Pages = {2447-2465},
   Booktitle = {Encyclopedia of Computational Neuroscience},
   Publisher = {Springer New York},
   Editor = {Jaeger, D and Jung, R},
   Year = {2015},
   ISBN = {9781461466741},
   url = {http://dx.doi.org/10.1007/978-1-4614-6675-8_74},
   Doi = {10.1007/978-1-4614-6675-8_74},
   Key = {fds328465}
}

@article{fds328457,
   Author = {Tartaglia, EM and Brunel, N and Mongillo, G},
   Title = {Modulation of network excitability by persistent activity:
             how working memory affects the response to incoming
             stimuli.},
   Journal = {PLoS Comput Biol},
   Volume = {11},
   Number = {2},
   Pages = {e1004059},
   Year = {2015},
   Month = {February},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1004059},
   Abstract = {Persistent activity and match effects are widely regarded as
             neuronal correlates of short-term storage and manipulation
             of information, with the first serving active maintenance
             and the latter supporting the comparison between memory
             contents and incoming sensory information. The mechanistic
             and functional relationship between these two basic
             neurophysiological signatures of working memory remains
             elusive. We propose that match signals are generated as a
             result of transient changes in local network excitability
             brought about by persistent activity. Neurons more active
             will be more excitable, and thus more responsive to external
             inputs. Accordingly, network responses are jointly
             determined by the incoming stimulus and the ongoing pattern
             of persistent activity. Using a spiking model network, we
             show that this mechanism is able to reproduce most of the
             experimental phenomenology of match effects as exposed by
             single-cell recordings during delayed-response tasks. The
             model provides a unified, parsimonious mechanistic account
             of the main neuronal correlates of working memory, makes
             several experimentally testable predictions, and
             demonstrates a new functional role for persistent
             activity.},
   Doi = {10.1371/journal.pcbi.1004059},
   Key = {fds328457}
}

@article{fds328456,
   Author = {Ostojic, S and Szapiro, G and Schwartz, E and Barbour, B and Brunel, N and Hakim, V},
   Title = {Neuronal morphology generates high-frequency firing
             resonance.},
   Journal = {J Neurosci},
   Volume = {35},
   Number = {18},
   Pages = {7056-7068},
   Year = {2015},
   Month = {May},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.3924-14.2015},
   Abstract = {The attenuation of neuronal voltage responses to
             high-frequency current inputs by the membrane capacitance is
             believed to limit single-cell bandwidth. However, neuronal
             populations subject to stochastic fluctuations can follow
             inputs beyond this limit. We investigated this apparent
             paradox theoretically and experimentally using Purkinje
             cells in the cerebellum, a motor structure that benefits
             from rapid information transfer. We analyzed the modulation
             of firing in response to the somatic injection of sinusoidal
             currents. Computational modeling suggested that, instead of
             decreasing with frequency, modulation amplitude can increase
             up to high frequencies because of cellular morphology.
             Electrophysiological measurements in adult rat slices
             confirmed this prediction and displayed a marked resonance
             at 200 Hz. We elucidated the underlying mechanism, showing
             that the two-compartment morphology of the Purkinje cell,
             interacting with a simple spiking mechanism and dendritic
             fluctuations, is sufficient to create high-frequency signal
             amplification. This mechanism, which we term
             morphology-induced resonance, is selective for somatic
             inputs, which in the Purkinje cell are exclusively
             inhibitory. The resonance sensitizes Purkinje cells in the
             frequency range of population oscillations observed in
             vivo.},
   Doi = {10.1523/JNEUROSCI.3924-14.2015},
   Key = {fds328456}
}

@article{fds328455,
   Author = {Alemi, A and Baldassi, C and Brunel, N and Zecchina,
             R},
   Title = {A Three-Threshold Learning Rule Approaches the Maximal
             Capacity of Recurrent Neural Networks.},
   Journal = {PLoS Comput Biol},
   Volume = {11},
   Number = {8},
   Pages = {e1004439},
   Year = {2015},
   Month = {August},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1004439},
   Abstract = {Understanding the theoretical foundations of how memories
             are encoded and retrieved in neural populations is a central
             challenge in neuroscience. A popular theoretical scenario
             for modeling memory function is the attractor neural network
             scenario, whose prototype is the Hopfield model. The model
             simplicity and the locality of the synaptic update rules
             come at the cost of a poor storage capacity, compared with
             the capacity achieved with perceptron learning algorithms.
             Here, by transforming the perceptron learning rule, we
             present an online learning rule for a recurrent neural
             network that achieves near-maximal storage capacity without
             an explicit supervisory error signal, relying only upon
             locally accessible information. The fully-connected network
             consists of excitatory binary neurons with plastic recurrent
             connections and non-plastic inhibitory feedback stabilizing
             the network dynamics; the memory patterns to be memorized
             are presented online as strong afferent currents, producing
             a bimodal distribution for the neuron synaptic inputs.
             Synapses corresponding to active inputs are modified as a
             function of the value of the local fields with respect to
             three thresholds. Above the highest threshold, and below the
             lowest threshold, no plasticity occurs. In between these two
             thresholds, potentiation/depression occurs when the local
             field is above/below an intermediate threshold. We simulated
             and analyzed a network of binary neurons implementing this
             rule and measured its storage capacity for different sizes
             of the basins of attraction. The storage capacity obtained
             through numerical simulations is shown to be close to the
             value predicted by analytical calculations. We also measured
             the dependence of capacity on the strength of external
             inputs. Finally, we quantified the statistics of the
             resulting synaptic connectivity matrix, and found that both
             the fraction of zero weight synapses and the degree of
             symmetry of the weight matrix increase with the number of
             stored patterns.},
   Doi = {10.1371/journal.pcbi.1004439},
   Key = {fds328455}
}

@article{fds328454,
   Author = {Lim, S and McKee, JL and Woloszyn, L and Amit, Y and Freedman, DJ and Sheinberg, DL and Brunel, N},
   Title = {Inferring learning rules from distributions of firing rates
             in cortical neurons.},
   Journal = {Nat Neurosci},
   Volume = {18},
   Number = {12},
   Pages = {1804-1810},
   Year = {2015},
   Month = {December},
   url = {http://dx.doi.org/10.1038/nn.4158},
   Abstract = {Information about external stimuli is thought to be stored
             in cortical circuits through experience-dependent
             modifications of synaptic connectivity. These modifications
             of network connectivity should lead to changes in neuronal
             activity as a particular stimulus is repeatedly encountered.
             Here we ask what plasticity rules are consistent with the
             differences in the statistics of the visual response to
             novel and familiar stimuli in inferior temporal cortex, an
             area underlying visual object recognition. We introduce a
             method that allows one to infer the dependence of the
             presumptive learning rule on postsynaptic firing rate, and
             we show that the inferred learning rule exhibits depression
             for low postsynaptic rates and potentiation for high rates.
             The threshold separating depression from potentiation is
             strongly correlated with both mean and s.d. of the firing
             rate distribution. Finally, we show that network models
             implementing a rule extracted from data show stable learning
             dynamics and lead to sparser representations of
             stimuli.},
   Doi = {10.1038/nn.4158},
   Key = {fds328454}
}

@article{fds328453,
   Author = {De Pittà and M and Brunel, N},
   Title = {Modulation of Synaptic Plasticity by Glutamatergic
             Gliotransmission: A Modeling Study.},
   Journal = {Neural Plast},
   Volume = {2016},
   Pages = {7607924},
   Year = {2016},
   url = {http://dx.doi.org/10.1155/2016/7607924},
   Abstract = {Glutamatergic gliotransmission, that is, the release of
             glutamate from perisynaptic astrocyte processes in an
             activity-dependent manner, has emerged as a potentially
             crucial signaling pathway for regulation of synaptic
             plasticity, yet its modes of expression and function in vivo
             remain unclear. Here, we focus on two experimentally
             well-identified gliotransmitter pathways, (i) modulations of
             synaptic release and (ii) postsynaptic slow inward currents
             mediated by glutamate released from astrocytes, and
             investigate their possible functional relevance on synaptic
             plasticity in a biophysical model of an astrocyte-regulated
             synapse. Our model predicts that both pathways could
             profoundly affect both short- and long-term plasticity. In
             particular, activity-dependent glutamate release from
             astrocytes could dramatically change spike-timing-dependent
             plasticity, turning potentiation into depression (and vice
             versa) for the same induction protocol.},
   Doi = {10.1155/2016/7607924},
   Key = {fds328453}
}

@article{fds366924,
   Author = {Brunel, N},
   Title = {Basic Neuron and Network Models},
   Pages = {73-99},
   Booktitle = {FROM NEURON TO COGNITION VIA COMPUTATIONAL
             NEUROSCIENCE},
   Year = {2016},
   Key = {fds366924}
}

@article{fds328451,
   Author = {Dubreuil, AM and Brunel, N},
   Title = {Storing structured sparse memories in a multi-modular
             cortical network model.},
   Journal = {J Comput Neurosci},
   Volume = {40},
   Number = {2},
   Pages = {157-175},
   Year = {2016},
   Month = {April},
   url = {http://dx.doi.org/10.1007/s10827-016-0590-z},
   Abstract = {We study the memory performance of a class of modular
             attractor neural networks, where modules are potentially
             fully-connected networks connected to each other via diluted
             long-range connections. On this anatomical architecture we
             store memory patterns of activity using a Willshaw-type
             learning rule. P patterns are split in categories, such that
             patterns of the same category activate the same set of
             modules. We first compute the maximal storage capacity of
             these networks. We then investigate their error-correction
             properties through an exhaustive exploration of parameter
             space, and identify regions where the networks behave as an
             associative memory device. The crucial parameters that
             control the retrieval abilities of the network are (1) the
             ratio between the number of synaptic contacts of long- and
             short-range origins (2) the number of categories in which a
             module is activated and (3) the amount of local inhibition.
             We discuss the relationship between our model and networks
             of cortical patches that have been observed in different
             cortical areas.},
   Doi = {10.1007/s10827-016-0590-z},
   Key = {fds328451}
}

@article{fds328452,
   Author = {Bouvier, G and Higgins, D and Spolidoro, M and Carrel, D and Mathieu, B and Léna, C and Dieudonné, S and Barbour, B and Brunel, N and Casado,
             M},
   Title = {Burst-Dependent Bidirectional Plasticity in the Cerebellum
             Is Driven by Presynaptic NMDA Receptors.},
   Journal = {Cell Rep},
   Volume = {15},
   Number = {1},
   Pages = {104-116},
   Year = {2016},
   Month = {April},
   url = {http://dx.doi.org/10.1016/j.celrep.2016.03.004},
   Abstract = {Numerous studies have shown that cerebellar function is
             related to the plasticity at the synapses between parallel
             fibers and Purkinje cells. How specific input patterns
             determine plasticity outcomes, as well as the biophysics
             underlying plasticity of these synapses, remain unclear.
             Here, we characterize the patterns of activity that lead to
             postsynaptically expressed LTP using both in vivo and in
             vitro experiments. Similar to the requirements of LTD, we
             find that high-frequency bursts are necessary to trigger LTP
             and that this burst-dependent plasticity depends on
             presynaptic NMDA receptors and nitric oxide (NO) signaling.
             We provide direct evidence for calcium entry through
             presynaptic NMDA receptors in a subpopulation of parallel
             fiber varicosities. Finally, we develop and experimentally
             verify a mechanistic plasticity model based on NO and
             calcium signaling. The model reproduces plasticity outcomes
             from data and predicts the effect of arbitrary patterns of
             synaptic inputs on Purkinje cells, thereby providing a
             unified description of plasticity.},
   Doi = {10.1016/j.celrep.2016.03.004},
   Key = {fds328452}
}

@article{fds328449,
   Author = {Brunel, N},
   Title = {Is cortical connectivity optimized for storing
             information?},
   Journal = {Nat Neurosci},
   Volume = {19},
   Number = {5},
   Pages = {749-755},
   Year = {2016},
   Month = {May},
   url = {http://dx.doi.org/10.1038/nn.4286},
   Abstract = {Cortical networks are thought to be shaped by
             experience-dependent synaptic plasticity. Theoretical
             studies have shown that synaptic plasticity allows a network
             to store a memory of patterns of activity such that they
             become attractors of the dynamics of the network. Here we
             study the properties of the excitatory synaptic connectivity
             in a network that maximizes the number of stored patterns of
             activity in a robust fashion. We show that the resulting
             synaptic connectivity matrix has the following properties:
             it is sparse, with a large fraction of zero synaptic weights
             ('potential' synapses); bidirectionally coupled pairs of
             neurons are over-represented in comparison to a random
             network; and bidirectionally connected pairs have stronger
             synapses on average than unidirectionally connected pairs.
             All these features reproduce quantitatively available data
             on connectivity in cortex. This suggests synaptic
             connectivity in cortex is optimized to store a large number
             of attractor states in a robust fashion.},
   Doi = {10.1038/nn.4286},
   Key = {fds328449}
}

@article{fds328450,
   Author = {De Pittà and M and Brunel, N and Volterra, A},
   Title = {Astrocytes: Orchestrating synaptic plasticity?},
   Journal = {Neuroscience},
   Volume = {323},
   Pages = {43-61},
   Year = {2016},
   Month = {May},
   url = {http://dx.doi.org/10.1016/j.neuroscience.2015.04.001},
   Abstract = {Synaptic plasticity is the capacity of a preexisting
             connection between two neurons to change in strength as a
             function of neural activity. Because synaptic plasticity is
             the major candidate mechanism for learning and memory, the
             elucidation of its constituting mechanisms is of crucial
             importance in many aspects of normal and pathological brain
             function. In particular, a prominent aspect that remains
             debated is how the plasticity mechanisms, that encompass a
             broad spectrum of temporal and spatial scales, come to play
             together in a concerted fashion. Here we review and discuss
             evidence that pinpoints to a possible non-neuronal, glial
             candidate for such orchestration: the regulation of synaptic
             plasticity by astrocytes.},
   Doi = {10.1016/j.neuroscience.2015.04.001},
   Key = {fds328450}
}

@article{fds328448,
   Author = {Zampini, V and Liu, JK and Diana, MA and Maldonado, PP and Brunel, N and Dieudonné, S},
   Title = {Mechanisms and functional roles of glutamatergic synapse
             diversity in a cerebellar circuit.},
   Journal = {Elife},
   Volume = {5},
   Year = {2016},
   Month = {September},
   url = {http://dx.doi.org/10.7554/eLife.15872},
   Abstract = {Synaptic currents display a large degree of heterogeneity of
             their temporal characteristics, but the functional role of
             such heterogeneities remains unknown. We investigated in rat
             cerebellar slices synaptic currents in Unipolar Brush Cells
             (UBCs), which generate intrinsic mossy fibers relaying
             vestibular inputs to the cerebellar cortex. We show that
             UBCs respond to sinusoidal modulations of their sensory
             input with heterogeneous amplitudes and phase shifts.
             Experiments and modeling indicate that this variability
             results both from the kinetics of synaptic glutamate
             transients and from the diversity of postsynaptic receptors.
             While phase inversion is produced by an mGluR2-activated
             outward conductance in OFF-UBCs, the phase delay of ON UBCs
             is caused by a late rebound current resulting from AMPAR
             recovery from desensitization. Granular layer network
             modeling indicates that phase dispersion of UBC responses
             generates diverse phase coding in the granule cell
             population, allowing climbing-fiber-driven Purkinje cell
             learning at arbitrary phases of the vestibular
             input.},
   Doi = {10.7554/eLife.15872},
   Key = {fds328448}
}

@article{fds328447,
   Author = {Titley, HK and Brunel, N and Hansel, C},
   Title = {Toward a Neurocentric View of Learning.},
   Journal = {Neuron},
   Volume = {95},
   Number = {1},
   Pages = {19-32},
   Year = {2017},
   Month = {July},
   url = {http://dx.doi.org/10.1016/j.neuron.2017.05.021},
   Abstract = {Synaptic plasticity (e.g., long-term potentiation [LTP]) is
             considered the cellular correlate of learning. Recent
             optogenetic studies on memory engram formation assign a
             critical role in learning to suprathreshold activation of
             neurons and their integration into active engrams ("engram
             cells"). Here we review evidence that ensemble integration
             may result from LTP but also from cell-autonomous changes in
             membrane excitability. We propose that synaptic plasticity
             determines synaptic connectivity maps, whereas intrinsic
             plasticity-possibly separated in time-amplifies neuronal
             responsiveness and acutely drives engram integration. Our
             proposal marks a move away from an exclusively
             synaptocentric toward a non-exclusive, neurocentric view of
             learning.},
   Doi = {10.1016/j.neuron.2017.05.021},
   Key = {fds328447}
}

@article{fds328910,
   Author = {Tartaglia, EM and Brunel, N},
   Title = {Bistability and up/down state alternations in
             inhibition-dominated randomly connected networks of LIF
             neurons.},
   Journal = {Sci Rep},
   Volume = {7},
   Number = {1},
   Pages = {11916},
   Year = {2017},
   Month = {September},
   url = {http://dx.doi.org/10.1038/s41598-017-12033-y},
   Abstract = {Electrophysiological recordings in cortex in vivo have
             revealed a rich variety of dynamical regimes ranging from
             irregular asynchronous states to a diversity of synchronized
             states, depending on species, anesthesia, and external
             stimulation. The average population firing rate in these
             states is typically low. We study analytically and
             numerically a network of sparsely connected excitatory and
             inhibitory integrate-and-fire neurons in the
             inhibition-dominated, low firing rate regime. For
             sufficiently high values of the external input, the network
             exhibits an asynchronous low firing frequency state (L).
             Depending on synaptic time constants, we show that two
             scenarios may occur when external inputs are decreased: (1)
             the L state can destabilize through a Hopf bifucation as the
             external input is decreased, leading to synchronized
             oscillations spanning d δ to β frequencies; (2) the
             network can reach a bistable region, between the low firing
             frequency network state (L) and a quiescent one (Q). Adding
             an adaptation current to excitatory neurons leads to
             spontaneous alternations between L and Q states, similar to
             experimental observations on UP and DOWN states
             alternations.},
   Doi = {10.1038/s41598-017-12033-y},
   Key = {fds328910}
}

@article{fds339215,
   Author = {Martí, D and Brunel, N and Ostojic, S},
   Title = {Correlations between synapses in pairs of neurons slow down
             dynamics in randomly connected neural networks.},
   Journal = {Phys Rev E},
   Volume = {97},
   Number = {6-1},
   Pages = {062314},
   Year = {2018},
   Month = {June},
   url = {http://dx.doi.org/10.1103/PhysRevE.97.062314},
   Abstract = {Networks of randomly connected neurons are among the most
             popular models in theoretical neuroscience. The connectivity
             between neurons in the cortex is however not fully random,
             the simplest and most prominent deviation from randomness
             found in experimental data being the overrepresentation of
             bidirectional connections among pyramidal cells. Using
             numerical and analytical methods, we investigate the effects
             of partially symmetric connectivity on the dynamics in
             networks of rate units. We consider the two dynamical
             regimes exhibited by random neural networks: the
             weak-coupling regime, where the firing activity decays to a
             single fixed point unless the network is stimulated, and the
             strong-coupling or chaotic regime, characterized by
             internally generated fluctuating firing rates. In the
             weak-coupling regime, we compute analytically, for an
             arbitrary degree of symmetry, the autocorrelation of network
             activity in the presence of external noise. In the chaotic
             regime, we perform simulations to determine the timescale of
             the intrinsic fluctuations. In both cases, symmetry
             increases the characteristic asymptotic decay time of the
             autocorrelation function and therefore slows down the
             dynamics in the network.},
   Doi = {10.1103/PhysRevE.97.062314},
   Key = {fds339215}
}

@article{fds336918,
   Author = {Pereira, U and Brunel, N},
   Title = {Attractor Dynamics in Networks with Learning Rules Inferred
             from In Vivo Data.},
   Journal = {Neuron},
   Volume = {99},
   Number = {1},
   Pages = {227-238.e4},
   Year = {2018},
   Month = {July},
   url = {http://dx.doi.org/10.1016/j.neuron.2018.05.038},
   Abstract = {The attractor neural network scenario is a popular scenario
             for memory storage in the association cortex, but there is
             still a large gap between models based on this scenario and
             experimental data. We study a recurrent network model in
             which both learning rules and distribution of stored
             patterns are inferred from distributions of visual responses
             for novel and familiar images in the inferior temporal
             cortex (ITC). Unlike classical attractor neural network
             models, our model exhibits graded activity in retrieval
             states, with distributions of firing rates that are close to
             lognormal. Inferred learning rules are close to maximizing
             the number of stored patterns within a family of
             unsupervised Hebbian learning rules, suggesting that
             learning rules in ITC are optimized to store a large number
             of attractor states. Finally, we show that there exist two
             types of retrieval states: one in which firing rates are
             constant in time and another in which firing rates fluctuate
             chaotically.},
   Doi = {10.1016/j.neuron.2018.05.038},
   Key = {fds336918}
}

@article{fds339859,
   Author = {Bouvier, G and Aljadeff, J and Clopath, C and Bimbard, C and Ranft, J and Blot, A and Nadal, J-P and Brunel, N and Hakim, V and Barbour,
             B},
   Title = {Cerebellar learning using perturbations.},
   Journal = {Elife},
   Volume = {7},
   Pages = {e31599},
   Year = {2018},
   Month = {November},
   url = {http://dx.doi.org/10.7554/eLife.31599},
   Abstract = {The cerebellum aids the learning of fast, coordinated
             movements. According to current consensus, erroneously
             active parallel fibre synapses are depressed by complex
             spikes signalling movement errors. However, this theory
             cannot solve the credit assignment problem of processing a
             global movement evaluation into multiple cell-specific error
             signals. We identify a possible implementation of an
             algorithm solving this problem, whereby spontaneous complex
             spikes perturb ongoing movements, create eligibility traces
             and signal error changes guiding plasticity. Error changes
             are extracted by adaptively cancelling the average error.
             This framework, stochastic gradient descent with estimated
             global errors (SGDEGE), predicts synaptic plasticity rules
             that apparently contradict the current consensus but were
             supported by plasticity experiments in slices from mice
             under conditions designed to be physiological, highlighting
             the sensitivity of plasticity studies to experimental
             conditions. We analyse the algorithm's convergence and
             capacity. Finally, we suggest SGDEGE may also operate in the
             basal ganglia.},
   Doi = {10.7554/eLife.31599},
   Key = {fds339859}
}

@article{fds348568,
   Author = {Pereira, U and Brunel, N},
   Title = {Unsupervised Learning of Persistent and Sequential
             Activity.},
   Journal = {Front Comput Neurosci},
   Volume = {13},
   Pages = {97},
   Year = {2019},
   url = {http://dx.doi.org/10.3389/fncom.2019.00097},
   Abstract = {Two strikingly distinct types of activity have been observed
             in various brain structures during delay periods of delayed
             response tasks: Persistent activity (PA), in which a
             sub-population of neurons maintains an elevated firing rate
             throughout an entire delay period; and Sequential activity
             (SA), in which sub-populations of neurons are activated
             sequentially in time. It has been hypothesized that both
             types of dynamics can be "learned" by the relevant networks
             from the statistics of their inputs, thanks to mechanisms of
             synaptic plasticity. However, the necessary conditions for a
             synaptic plasticity rule and input statistics to learn these
             two types of dynamics in a stable fashion are still unclear.
             In particular, it is unclear whether a single learning rule
             is able to learn both types of activity patterns, depending
             on the statistics of the inputs driving the network. Here,
             we first characterize the complete bifurcation diagram of a
             firing rate model of multiple excitatory populations with an
             inhibitory mechanism, as a function of the parameters
             characterizing its connectivity. We then investigate how an
             unsupervised temporally asymmetric Hebbian plasticity rule
             shapes the dynamics of the network. Consistent with previous
             studies, we find that for stable learning of PA and SA, an
             additional stabilization mechanism is necessary. We show
             that a generalized version of the standard multiplicative
             homeostatic plasticity (Renart et al., 2003; Toyoizumi et
             al., 2014) stabilizes learning by effectively masking
             excitatory connections during stimulation and unmasking
             those connections during retrieval. Using the bifurcation
             diagram derived for fixed connectivity, we study
             analytically the temporal evolution and the steady state of
             the learned recurrent architecture as a function of
             parameters characterizing the external inputs. Slow changing
             stimuli lead to PA, while fast changing stimuli lead to SA.
             Our network model shows how a network with plastic synapses
             can stably and flexibly learn PA and SA in an unsupervised
             manner.},
   Doi = {10.3389/fncom.2019.00097},
   Key = {fds348568}
}

@article{fds341757,
   Author = {Vaz, AP and Inati, SK and Brunel, N and Zaghloul,
             KA},
   Title = {Coupled ripple oscillations between the medial temporal lobe
             and neocortex retrieve human memory.},
   Journal = {Science},
   Volume = {363},
   Number = {6430},
   Pages = {975-978},
   Publisher = {American Association for the Advancement of Science
             (AAAS)},
   Year = {2019},
   Month = {March},
   url = {http://dx.doi.org/10.1126/science.aau8956},
   Abstract = {Episodic memory retrieval relies on the recovery of neural
             representations of waking experience. This process is
             thought to involve a communication dynamic between the
             medial temporal lobe memory system and the neocortex. How
             this occurs is largely unknown, however, especially as it
             pertains to awake human memory retrieval. Using intracranial
             electroencephalographic recordings, we found that ripple
             oscillations were dynamically coupled between the human
             medial temporal lobe (MTL) and temporal association cortex.
             Coupled ripples were more pronounced during successful
             verbal memory retrieval and recover the cortical neural
             representations of remembered items. Together, these data
             provide direct evidence that coupled ripples between the MTL
             and association cortex may underlie successful memory
             retrieval in the human brain.},
   Doi = {10.1126/science.aau8956},
   Key = {fds341757}
}

@article{fds361409,
   Author = {Oleskiw, TD and Bair, W and Shea-Brown, E and Brunel,
             N},
   Title = {Firing rate of the leaky integrate-and-fire neuron with
             stochastic conductance-based synaptic inputs with short
             decay times},
   Year = {2020},
   Month = {February},
   Abstract = {We compute the firing rate of a leaky integrate-and-fire
             (LIF) neuron with stochastic conductance-based inputs in the
             limit when synaptic decay times are much shorter than the
             membrane time constant. A comparison of our analytical
             results to numeric simulations is presented for a range of
             biophysically-realistic parameters.},
   Key = {fds361409}
}

@article{fds349025,
   Author = {Fore, TR and Taylor, BN and Brunel, N and Hull, C},
   Title = {Acetylcholine Modulates Cerebellar Granule Cell Spiking by
             Regulating the Balance of Synaptic Excitation and
             Inhibition.},
   Journal = {J Neurosci},
   Volume = {40},
   Number = {14},
   Pages = {2882-2894},
   Year = {2020},
   Month = {April},
   url = {http://dx.doi.org/10.1523/JNEUROSCI.2148-19.2020},
   Abstract = {Sensorimotor integration in the cerebellum is essential for
             refining motor output, and the first stage of this
             processing occurs in the granule cell layer. Recent evidence
             suggests that granule cell layer synaptic integration can be
             contextually modified, although the circuit mechanisms that
             could mediate such modulation remain largely unknown. Here
             we investigate the role of ACh in regulating granule cell
             layer synaptic integration in male rats and mice of both
             sexes. We find that Golgi cells, interneurons that provide
             the sole source of inhibition to the granule cell layer,
             express both nicotinic and muscarinic cholinergic receptors.
             While acute ACh application can modestly depolarize some
             Golgi cells, the net effect of longer, optogenetically
             induced ACh release is to strongly hyperpolarize Golgi
             cells. Golgi cell hyperpolarization by ACh leads to a
             significant reduction in both tonic and evoked granule cell
             synaptic inhibition. ACh also reduces glutamate release from
             mossy fibers by acting on presynaptic muscarinic receptors.
             Surprisingly, despite these consistent effects on Golgi
             cells and mossy fibers, ACh can either increase or decrease
             the spike probability of granule cells as measured by
             noninvasive cell-attached recordings. By constructing an
             integrate-and-fire model of granule cell layer population
             activity, we find that the direction of spike rate
             modulation can be accounted for predominately by the initial
             balance of excitation and inhibition onto individual granule
             cells. Together, these experiments demonstrate that ACh can
             modulate population-level granule cell responses by altering
             the ratios of excitation and inhibition at the first stage
             of cerebellar processing.SIGNIFICANCE STATEMENT The
             cerebellum plays a key role in motor control and motor
             learning. While it is known that behavioral context can
             modify motor learning, the circuit basis of such modulation
             has remained unclear. Here we find that a key
             neuromodulator, ACh, can alter the balance of excitation and
             inhibition at the first stage of cerebellar processing.
             These results suggest that ACh could play a key role in
             altering cerebellar learning by modifying how sensorimotor
             input is represented at the input layer of the
             cerebellum.},
   Doi = {10.1523/JNEUROSCI.2148-19.2020},
   Key = {fds349025}
}

@article{fds350568,
   Author = {Sanzeni, A and Akitake, B and Goldbach, HC and Leedy, CE and Brunel, N and Histed, MH},
   Title = {Inhibition stabilization is a widespread property of
             cortical networks.},
   Journal = {Elife},
   Volume = {9},
   Year = {2020},
   Month = {June},
   url = {http://dx.doi.org/10.7554/eLife.54875},
   Abstract = {Many cortical network models use recurrent coupling strong
             enough to require inhibition for stabilization. Yet it has
             been experimentally unclear whether inhibition-stabilized
             network (ISN) models describe cortical function well across
             areas and states. Here, we test several ISN predictions,
             including the counterintuitive (paradoxical) suppression of
             inhibitory firing in response to optogenetic inhibitory
             stimulation. We find clear evidence for ISN operation in
             mouse visual, somatosensory, and motor cortex. Simple
             two-population ISN models describe the data well and let us
             quantify coupling strength. Although some models predict a
             non-ISN to ISN transition with increasingly strong sensory
             stimuli, we find ISN effects without sensory stimulation and
             even during light anesthesia. Additionally, average
             paradoxical effects result only with transgenic, not viral,
             opsin expression in parvalbumin (PV)-positive neurons;
             theory and expression data show this is consistent with ISN
             operation. Taken together, these results show strong
             coupling and inhibition stabilization are common features of
             the cortex.},
   Doi = {10.7554/eLife.54875},
   Key = {fds350568}
}

@article{fds352530,
   Author = {Sanzeni, A and Histed, MH and Brunel, N},
   Title = {Response nonlinearities in networks of spiking
             neurons.},
   Journal = {PLoS Comput Biol},
   Volume = {16},
   Number = {9},
   Pages = {e1008165},
   Year = {2020},
   Month = {September},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1008165},
   Abstract = {Combining information from multiple sources is a fundamental
             operation performed by networks of neurons in the brain,
             whose general principles are still largely unknown.
             Experimental evidence suggests that combination of inputs in
             cortex relies on nonlinear summation. Such nonlinearities
             are thought to be fundamental to perform complex
             computations. However, these non-linearities are
             inconsistent with the balanced-state model, one of the most
             popular models of cortical dynamics, which predicts networks
             have a linear response. This linearity is obtained in the
             limit of very large recurrent coupling strength. We
             investigate the stationary response of networks of spiking
             neurons as a function of coupling strength. We show that,
             while a linear transfer function emerges at strong coupling,
             nonlinearities are prominent at finite coupling, both at
             response onset and close to saturation. We derive a general
             framework to classify nonlinear responses in these networks
             and discuss which of them can be captured by rate models.
             This framework could help to understand the diversity of
             non-linearities observed in cortical networks.},
   Doi = {10.1371/journal.pcbi.1008165},
   Key = {fds352530}
}

@article{fds361500,
   Author = {Sanzeni, A and Histed, MH and Brunel, N},
   Title = {Emergence of irregular activity in networks of strongly
             coupled conductance-based neurons},
   Year = {2020},
   Month = {September},
   Abstract = {Cortical neurons are characterized by irregular firing and a
             broad distribution of rates. The balanced state model
             explains these observations with a cancellation of mean
             excitatory and inhibitory currents, which makes fluctuations
             drive firing. In networks of neurons with current-based
             synapses, the balanced state emerges dynamically if coupling
             is strong, i.e. if the mean number of synapses per neuron
             $K$ is large and synaptic efficacy is of order $1/\sqrt{K}$.
             When synapses are conductance-based, current fluctuations
             are suppressed when coupling is strong, questioning the
             applicability of the balanced state idea to biological
             neural networks. We analyze networks of strongly coupled
             conductance-based neurons and show that asynchronous
             irregular activity and broad distributions of rates emerge
             if synapses are of order $1/\log(K)$. In such networks,
             unlike in the standard balanced state model, current
             fluctuations are small and firing is maintained by a
             drift-diffusion balance. This balance emerges dynamically,
             without fine tuning, if inputs are smaller than a critical
             value, which depends on synaptic time constants and coupling
             strength, and is significantly more robust to connection
             heterogeneities than the classical balanced state model. Our
             analysis makes experimentally testable predictions of how
             the network response properties should evolve as input
             increases.},
   Key = {fds361500}
}

@article{fds353292,
   Author = {Gillett, M and Pereira, U and Brunel, N},
   Title = {Characteristics of sequential activity in networks with
             temporally asymmetric Hebbian learning.},
   Journal = {Proc Natl Acad Sci U S A},
   Volume = {117},
   Number = {47},
   Pages = {29948-29958},
   Year = {2020},
   Month = {November},
   url = {http://dx.doi.org/10.1073/pnas.1918674117},
   Abstract = {Sequential activity has been observed in multiple neuronal
             circuits across species, neural structures, and behaviors.
             It has been hypothesized that sequences could arise from
             learning processes. However, it is still unclear whether
             biologically plausible synaptic plasticity rules can
             organize neuronal activity to form sequences whose
             statistics match experimental observations. Here, we
             investigate temporally asymmetric Hebbian rules in sparsely
             connected recurrent rate networks and develop a theory of
             the transient sequential activity observed after learning.
             These rules transform a sequence of random input patterns
             into synaptic weight updates. After learning, recalled
             sequential activity is reflected in the transient
             correlation of network activity with each of the stored
             input patterns. Using mean-field theory, we derive a
             low-dimensional description of the network dynamics and
             compute the storage capacity of these networks. Multiple
             temporal characteristics of the recalled sequential activity
             are consistent with experimental observations. We find that
             the degree of sparseness of the recalled sequences can be
             controlled by nonlinearities in the learning rule.
             Furthermore, sequences maintain robust decoding, but display
             highly labile dynamics, when synaptic connectivity is
             continuously modified due to noise or storage of other
             patterns, similar to recent observations in hippocampus and
             parietal cortex. Finally, we demonstrate that our results
             also hold in recurrent networks of spiking neurons with
             separate excitatory and inhibitory populations.},
   Doi = {10.1073/pnas.1918674117},
   Key = {fds353292}
}

@article{fds354283,
   Author = {Inglebert, Y and Aljadeff, J and Brunel, N and Debanne,
             D},
   Title = {Synaptic plasticity rules with physiological calcium
             levels.},
   Journal = {Proc Natl Acad Sci U S A},
   Volume = {117},
   Number = {52},
   Pages = {33639-33648},
   Year = {2020},
   Month = {December},
   url = {http://dx.doi.org/10.1073/pnas.2013663117},
   Abstract = {Spike-timing-dependent plasticity (STDP) is considered as a
             primary mechanism underlying formation of new memories
             during learning. Despite the growing interest in
             activity-dependent plasticity, it is still unclear whether
             synaptic plasticity rules inferred from in vitro experiments
             are correct in physiological conditions. The abnormally high
             calcium concentration used in in vitro studies of STDP
             suggests that in vivo plasticity rules may differ
             significantly from in vitro experiments, especially since
             STDP depends strongly on calcium for induction. We therefore
             studied here the influence of extracellular calcium on
             synaptic plasticity. Using a combination of experimental
             (patch-clamp recording and Ca2+ imaging at CA3-CA1 synapses)
             and theoretical approaches, we show here that the classic
             STDP rule in which pairs of single pre- and postsynaptic
             action potentials induce synaptic modifications is not valid
             in the physiological Ca2+ range. Rather, we found that these
             pairs of single stimuli are unable to induce any synaptic
             modification in 1.3 and 1.5 mM calcium and lead to
             depression in 1.8 mM. Plasticity can only be recovered when
             bursts of postsynaptic spikes are used, or when neurons fire
             at sufficiently high frequency. In conclusion, the STDP rule
             is profoundly altered in physiological Ca2+, but specific
             activity regimes restore a classical STDP
             profile.},
   Doi = {10.1073/pnas.2013663117},
   Key = {fds354283}
}

@article{fds361689,
   Author = {Goldt, S and Krzakala, F and Zdeborová, L and Brunel,
             N},
   Title = {Bayesian reconstruction of memories stored in neural
             networks from their connectivity},
   Journal = {PLOS Computational Biology 19(1): e1010813
             2023},
   Year = {2021},
   Month = {May},
   Abstract = {The advent of comprehensive synaptic wiring diagrams of
             large neural circuits has created the field of connectomics
             and given rise to a number of open research questions. One
             such question is whether it is possible to reconstruct the
             information stored in a recurrent network of neurons, given
             its synaptic connectivity matrix. Here, we address this
             question by determining when solving such an inference
             problem is theoretically possible in specific attractor
             network models and by providing a practical algorithm to do
             so. The algorithm builds on ideas from statistical physics
             to perform approximate Bayesian inference and is amenable to
             exact analysis. We study its performance on three different
             models, compare the algorithm to standard algorithms such as
             PCA, and explore the limitations of reconstructing stored
             patterns from synaptic connectivity.},
   Key = {fds361689}
}

@article{fds357513,
   Author = {Aljadeff, J and Gillett, M and Pereira Obilinovic and U and Brunel,
             N},
   Title = {From synapse to network: models of information storage and
             retrieval in neural circuits.},
   Journal = {Curr Opin Neurobiol},
   Volume = {70},
   Pages = {24-33},
   Year = {2021},
   Month = {October},
   url = {http://dx.doi.org/10.1016/j.conb.2021.05.005},
   Abstract = {The mechanisms of information storage and retrieval in brain
             circuits are still the subject of debate. It is widely
             believed that information is stored at least in part through
             changes in synaptic connectivity in networks that encode
             this information and that these changes lead in turn to
             modifications of network dynamics, such that the stored
             information can be retrieved at a later time. Here, we
             review recent progress in deriving synaptic plasticity rules
             from experimental data and in understanding how plasticity
             rules affect the dynamics of recurrent networks. We show
             that the dynamics generated by such networks exhibit a large
             degree of diversity, depending on parameters, similar to
             experimental observations in vivo during delayed response
             tasks.},
   Doi = {10.1016/j.conb.2021.05.005},
   Key = {fds357513}
}

@article{fds361499,
   Author = {Pereira-Obilinovic, U and Aljadeff, J and Brunel,
             N},
   Title = {Forgetting leads to chaos in attractor networks},
   Year = {2021},
   Month = {November},
   Abstract = {Attractor networks are an influential theory for memory
             storage in brain systems. This theory has recently been
             challenged by the observation of strong temporal variability
             in neuronal recordings during memory tasks. In this work, we
             study a sparsely connected attractor network where memories
             are learned according to a Hebbian synaptic plasticity rule.
             After recapitulating known results for the continuous,
             sparsely connected Hopfield model, we investigate a model in
             which new memories are learned continuously and old memories
             are forgotten, using an online synaptic plasticity rule. We
             show that for a forgetting time scale that optimizes storage
             capacity, the qualitative features of the network's memory
             retrieval dynamics are age-dependent: most recent memories
             are retrieved as fixed-point attractors while older memories
             are retrieved as chaotic attractors characterized by strong
             heterogeneity and temporal fluctuations. Therefore,
             fixed-point and chaotic attractors co-exist in the network
             phase space. The network presents a continuum of
             statistically distinguishable memory states, where chaotic
             fluctuations appear abruptly above a critical age and then
             increase gradually until the memory disappears. We develop a
             dynamical mean field theory (DMFT) to analyze the
             age-dependent dynamics and compare the theory with
             simulations of large networks. Our numerical simulations
             show that a high-degree of sparsity is necessary for the
             DMFT to accurately predict the network capacity. Finally,
             our theory provides specific predictions for delay response
             tasks with aging memoranda. Our theory of attractor networks
             that continuously learn new information at the price of
             forgetting old memories can account for the observed
             diversity of retrieval states in the cortex, and in
             particular the strong temporal fluctuations of cortical
             activity.},
   Key = {fds361499}
}

@article{fds361498,
   Author = {Feng, Y and Brunel, N},
   Title = {Storage capacity of networks with discrete synapses and
             sparsely encoded memories},
   Year = {2021},
   Month = {December},
   Abstract = {Attractor neural networks (ANNs) are one of the leading
             theoretical frameworks for the formation and retrieval of
             memories in networks of biological neurons. In this
             framework, a pattern imposed by external inputs to the
             network is said to be learned when this pattern becomes a
             fixed point attractor of the network dynamics. The storage
             capacity is the maximum number of patterns that can be
             learned by the network. In this paper, we study the storage
             capacity of fully-connected and sparsely-connected networks
             with a binarized Hebbian rule, for arbitrary coding levels.
             Our results show that a network with discrete synapses has a
             similar storage capacity as the model with continuous
             synapses, and that this capacity tends asymptotically
             towards the optimal capacity, in the space of all possible
             binary connectivity matrices, in the sparse coding limit. We
             also derive finite coding level corrections for the
             asymptotic solution in the sparse coding limit. The result
             indicates the capacity of network with Hebbian learning
             rules converges to the optimal capacity extremely slowly
             when the coding level becomes small. Our results also show
             that in networks with sparse binary connectivity matrices,
             the information capacity per synapse is larger than in the
             fully connected case, and thus such networks store
             information more efficiently.},
   Key = {fds361498}
}

@article{fds363004,
   Author = {Sanzeni, A and Histed, MH and Brunel, N},
   Title = {Emergence of Irregular Activity in Networks of Strongly
             Coupled Conductance-Based Neurons.},
   Journal = {Phys Rev X},
   Volume = {12},
   Number = {1},
   Year = {2022},
   url = {http://dx.doi.org/10.1103/physrevx.12.011044},
   Abstract = {Cortical neurons are characterized by irregular firing and a
             broad distribution of rates. The balanced state model
             explains these observations with a cancellation of mean
             excitatory and inhibitory currents, which makes fluctuations
             drive firing. In networks of neurons with current-based
             synapses, the balanced state emerges dynamically if coupling
             is strong, i.e., if the mean number of synapses per neuron K
             is large and synaptic efficacy is of the order of 1 / K .
             When synapses are conductance-based, current fluctuations
             are suppressed when coupling is strong, questioning the
             applicability of the balanced state idea to biological
             neural networks. We analyze networks of strongly coupled
             conductance-based neurons and show that asynchronous
             irregular activity and broad distributions of rates emerge
             if synaptic efficacy is of the order of 1/ log(K). In such
             networks, unlike in the standard balanced state model,
             current fluctuations are small and firing is maintained by a
             drift-diffusion balance. This balance emerges dynamically,
             without fine-tuning, if inputs are smaller than a critical
             value, which depends on synaptic time constants and coupling
             strength, and is significantly more robust to connection
             heterogeneities than the classical balanced state model. Our
             analysis makes experimentally testable predictions of how
             the network response properties should evolve as input
             increases.},
   Doi = {10.1103/physrevx.12.011044},
   Key = {fds363004}
}

@article{fds363900,
   Author = {Feng, Y and Brunel, N},
   Title = {Storage capacity of networks with discrete synapses and
             sparsely encoded memories.},
   Journal = {Phys Rev E},
   Volume = {105},
   Number = {5-1},
   Pages = {054408},
   Year = {2022},
   Month = {May},
   url = {http://dx.doi.org/10.1103/PhysRevE.105.054408},
   Abstract = {Attractor neural networks are one of the leading theoretical
             frameworks for the formation and retrieval of memories in
             networks of biological neurons. In this framework, a pattern
             imposed by external inputs to the network is said to be
             learned when this pattern becomes a fixed point attractor of
             the network dynamics. The storage capacity is the maximum
             number of patterns that can be learned by the network. In
             this paper, we study the storage capacity of fully connected
             and sparsely connected networks with a binarized Hebbian
             rule, for arbitrary coding levels. Our results show that a
             network with discrete synapses has a similar storage
             capacity as the model with continuous synapses, and that
             this capacity tends asymptotically towards the optimal
             capacity, in the space of all possible binary connectivity
             matrices, in the sparse coding limit. We also derive finite
             coding level corrections for the asymptotic solution in the
             sparse coding limit. The result indicates the capacity of
             networks with Hebbian learning rules converges to the
             optimal capacity extremely slowly when the coding level
             becomes small. Our results also show that in networks with
             sparse binary connectivity matrices, the information
             capacity per synapse is larger than in the fully connected
             case, and thus such networks store information more
             efficiently.},
   Doi = {10.1103/PhysRevE.105.054408},
   Key = {fds363900}
}

@article{fds363743,
   Author = {Abed Zadeh and A and Turner, BD and Calakos, N and Brunel,
             N},
   Title = {Non-monotonic effects of GABAergic synaptic inputs on
             neuronal firing.},
   Journal = {PLoS Comput Biol},
   Volume = {18},
   Number = {6},
   Pages = {e1010226},
   Year = {2022},
   Month = {June},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1010226},
   Abstract = {GABA is generally known as the principal inhibitory
             neurotransmitter in the nervous system, usually acting by
             hyperpolarizing membrane potential. However, GABAergic
             currents sometimes exhibit non-inhibitory effects, depending
             on the brain region, developmental stage or pathological
             condition. Here, we investigate the diverse effects of GABA
             on the firing rate of several single neuron models, using
             both analytical calculations and numerical simulations. We
             find that GABAergic synaptic conductance and output firing
             rate exhibit three qualitatively different regimes as a
             function of GABA reversal potential, EGABA: monotonically
             decreasing for sufficiently low EGABA (inhibitory),
             monotonically increasing for EGABA above firing threshold
             (excitatory); and a non-monotonic region for intermediate
             values of EGABA. In the non-monotonic regime, small GABA
             conductances have an excitatory effect while large GABA
             conductances show an inhibitory effect. We provide a phase
             diagram of different GABAergic effects as a function of GABA
             reversal potential and glutamate conductance. We find that
             noisy inputs increase the range of EGABA for which the
             non-monotonic effect can be observed. We also construct a
             micro-circuit model of striatum to explain observed effects
             of GABAergic fast spiking interneurons on spiny projection
             neurons, including non-monotonicity, as well as the
             heterogeneity of the effects. Our work provides a
             mechanistic explanation of paradoxical effects of GABAergic
             synaptic inputs, with implications for understanding the
             effects of GABA in neural computation and
             development.},
   Doi = {10.1371/journal.pcbi.1010226},
   Key = {fds363743}
}

@article{fds367466,
   Author = {De Pittà and M and Brunel, N},
   Title = {Multiple forms of working memory emerge from
             synapse-astrocyte interactions in a neuron-glia network
             model.},
   Journal = {Proc Natl Acad Sci U S A},
   Volume = {119},
   Number = {43},
   Pages = {e2207912119},
   Year = {2022},
   Month = {October},
   url = {http://dx.doi.org/10.1073/pnas.2207912119},
   Abstract = {Persistent activity in populations of neurons, time-varying
             activity across a neural population, or activity-silent
             mechanisms carried out by hidden internal states of the
             neural population have been proposed as different mechanisms
             of working memory (WM). Whether these mechanisms could be
             mutually exclusive or occur in the same neuronal circuit
             remains, however, elusive, and so do their biophysical
             underpinnings. While WM is traditionally regarded to depend
             purely on neuronal mechanisms, cortical networks also
             include astrocytes that can modulate neural activity. We
             propose and investigate a network model that includes both
             neurons and glia and show that glia-synapse interactions can
             lead to multiple stable states of synaptic transmission.
             Depending on parameters, these interactions can lead in turn
             to distinct patterns of network activity that can serve as
             substrates for WM.},
   Doi = {10.1073/pnas.2207912119},
   Key = {fds367466}
}

@article{fds369113,
   Author = {Goldt, S and Krzakala, F and Zdeborová, L and Brunel,
             N},
   Title = {Bayesian reconstruction of memories stored in neural
             networks from their connectivity.},
   Journal = {PLoS Comput Biol},
   Volume = {19},
   Number = {1},
   Pages = {e1010813},
   Year = {2023},
   Month = {January},
   url = {https://arxiv.org/abs/2105.07416},
   Abstract = {The advent of comprehensive synaptic wiring diagrams of
             large neural circuits has created the field of connectomics
             and given rise to a number of open research questions. One
             such question is whether it is possible to reconstruct the
             information stored in a recurrent network of neurons, given
             its synaptic connectivity matrix. Here, we address this
             question by determining when solving such an inference
             problem is theoretically possible in specific attractor
             network models and by providing a practical algorithm to do
             so. The algorithm builds on ideas from statistical physics
             to perform approximate Bayesian inference and is amenable to
             exact analysis. We study its performance on three different
             models, compare the algorithm to standard algorithms such as
             PCA, and explore the limitations of reconstructing stored
             patterns from synaptic connectivity.},
   Doi = {10.1371/journal.pcbi.1010813},
   Key = {fds369113}
}

@article{fds369744,
   Author = {Pereira-Obilinovic, U and Aljadeff, J and Brunel,
             N},
   Title = {Forgetting Leads to Chaos in Attractor Networks},
   Journal = {Physical Review X},
   Volume = {13},
   Number = {1},
   Year = {2023},
   Month = {January},
   url = {https://arxiv.org/abs/2112.00119},
   Abstract = {Attractor networks are an influential theory for memory
             storage in brain systems. This theory has recently been
             challenged by the observation of strong temporal variability
             in neuronal recordings during memory tasks. In this work, we
             study a sparsely connected attractor network where memories
             are learned according to a Hebbian synaptic plasticity rule.
             After recapitulating known results for the continuous,
             sparsely connected Hopfield model, we investigate a model in
             which new memories are learned continuously and old memories
             are forgotten, using an online synaptic plasticity rule. We
             show that for a forgetting timescale that optimizes storage
             capacity, the qualitative features of the network's memory
             retrieval dynamics are age dependent: most recent memories
             are retrieved as fixed-point attractors while older memories
             are retrieved as chaotic attractors characterized by strong
             heterogeneity and temporal fluctuations. Therefore,
             fixed-point and chaotic attractors coexist in the network
             phase space. The network presents a continuum of
             statistically distinguishable memory states, where chaotic
             fluctuations appear abruptly above a critical age and then
             increase gradually until the memory disappears. We develop a
             dynamical mean field theory to analyze the age-dependent
             dynamics and compare the theory with simulations of large
             networks. We compute the optimal forgetting timescale for
             which the number of stored memories is maximized. We found
             that the maximum age at which memories can be retrieved is
             given by an instability at which old memories destabilize
             and the network converges instead to a more recent one. Our
             numerical simulations show that a high degree of sparsity is
             necessary for the dynamical mean field theory to accurately
             predict the network capacity. To test the robustness and
             biological plausibility of our results, we study numerically
             the dynamics of a network with learning rules and transfer
             function inferred from in vivo data in the online learning
             scenario. We found that all aspects of the network's
             dynamics characterized analytically in the simpler model
             also hold in this model. These results are highly robust to
             noise. Finally, our theory provides specific predictions for
             delay response tasks with aging memoranda. In particular, it
             predicts a higher degree of temporal fluctuations in
             retrieval states associated with older memories, and it also
             predicts fluctuations should be faster in older memories.
             Overall, our theory of attractor networks that continuously
             learn new information at the price of forgetting old
             memories can account for the observed diversity of retrieval
             states in the cortex, and in particular, the strong temporal
             fluctuations of cortical activity.},
   Doi = {10.1103/PhysRevX.13.011009},
   Key = {fds369744}
}

@article{fds373502,
   Author = {Brunel, N and Monasson, R and Sompolinsky, H and Leo van Hemmen,
             J},
   Title = {From the Statistical Physics of Disordered Systems to
             Neuroscience},
   Pages = {499-521},
   Booktitle = {Spin Glass Theory and Far Beyond: Replica Symmetry Breaking
             after 40 Years},
   Year = {2023},
   Month = {January},
   ISBN = {9789811273919},
   url = {http://dx.doi.org/10.1142/9789811273926_0025},
   Abstract = {This chapter studies the bridges and differences between the
             statistical physics of disordered systems, as developed
             notably in the context of spin glass theory, and problems in
             neuroscience. In a first contribution (Sec. 25.1), Nicolas
             Brunel, Rémi Monasson and Haim Sompolinsky first recall the
             main lines of the statistical physics approach to neural
             networks models as developed in the 1980s and 1990s. They
             then survey more recent developments at the interface
             between statistical physics and neuroscience, including the
             inference of synaptic plasticity rules and the statistics of
             synaptic connectivity. Finally they present the Tempotron
             model for learning temporal patterns. In a second
             contribution (Sec. 25.2), Leo van Hemmen discusses the
             difference between real spin glasses, neuronal networks (of
             real biological neurons) and neural networks (of artificial
             neurons); with illustrations ranging from site-disorder
             models of spin glasses to temporal coding in neuronal
             networks and unlearning.},
   Doi = {10.1142/9789811273926_0025},
   Key = {fds373502}
}

@article{fds370618,
   Author = {Bachschmid-Romano, L and Hatsopoulos, NG and Brunel,
             N},
   Title = {Interplay between external inputs and recurrent dynamics
             during movement preparation and execution in a network model
             of motor cortex.},
   Journal = {Elife},
   Volume = {12},
   Year = {2023},
   Month = {May},
   url = {https://www.biorxiv.org/content/10.1101/2022.02.19.481140v1},
   Abstract = {The primary motor cortex has been shown to coordinate
             movement preparation and execution through computations in
             approximately orthogonal subspaces. The underlying network
             mechanisms, and the roles played by external and recurrent
             connectivity, are central open questions that need to be
             answered to understand the neural substrates of motor
             control. We develop a recurrent neural network model that
             recapitulates the temporal evolution of neuronal activity
             recorded from the primary motor cortex of a macaque monkey
             during an instructed delayed-reach task. In particular, it
             reproduces the observed dynamic patterns of covariation
             between neural activity and the direction of motion. We
             explore the hypothesis that the observed dynamics emerges
             from a synaptic connectivity structure that depends on the
             preferred directions of neurons in both preparatory and
             movement-related epochs, and we constrain the strength of
             both synaptic connectivity and external input parameters
             from data. While the model can reproduce neural activity for
             multiple combinations of the feedforward and recurrent
             connections, the solution that requires minimum external
             inputs is one where the observed patterns of covariance are
             shaped by external inputs during movement preparation, while
             they are dominated by strong direction-specific recurrent
             connectivity during movement execution. Our model also
             demonstrates that the way in which single-neuron tuning
             properties change over time can explain the level of
             orthogonality of preparatory and movement-related
             subspaces.},
   Doi = {10.7554/eLife.77690},
   Key = {fds370618}
}

@article{fds373674,
   Author = {Sanzeni, A and Palmigiano, A and Nguyen, TH and Luo, J and Nassi, JJ and Reynolds, JH and Histed, MH and Miller, KD and Brunel,
             N},
   Title = {Mechanisms underlying reshuffling of visual responses by
             optogenetic stimulation in mice and monkeys.},
   Journal = {Neuron},
   Volume = {111},
   Number = {24},
   Pages = {4102-4115.e9},
   Year = {2023},
   Month = {December},
   url = {https://www.biorxiv.org/content/10.1101/2022.07.13.499597v1},
   Abstract = {The ability to optogenetically perturb neural circuits opens
             an unprecedented window into mechanisms governing circuit
             function. We analyzed and theoretically modeled neuronal
             responses to visual and optogenetic inputs in mouse and
             monkey V1. In both species, optogenetic stimulation of
             excitatory neurons strongly modulated the activity of single
             neurons yet had weak or no effects on the distribution of
             firing rates across the population. Thus, the optogenetic
             inputs reshuffled firing rates across the network. Key
             statistics of mouse and monkey responses lay on a continuum,
             with mice/monkeys occupying the low-/high-rate regions,
             respectively. We show that neuronal reshuffling emerges
             generically in randomly connected excitatory/inhibitory
             networks, provided the coupling strength (combination of
             recurrent coupling and external input) is sufficient that
             powerful inhibitory feedback cancels the mean optogenetic
             input. A more realistic model, distinguishing tuned visual
             vs. untuned optogenetic input in a structured network,
             reduces the coupling strength needed to explain
             reshuffling.},
   Doi = {10.1016/j.neuron.2023.09.018},
   Key = {fds373674}
}

@article{fds375962,
   Author = {Feng, Y and Brunel, N},
   Title = {Attractor neural networks with double well
             synapses.},
   Journal = {PLoS Comput Biol},
   Volume = {20},
   Number = {2},
   Pages = {e1011354},
   Year = {2024},
   Month = {February},
   url = {http://dx.doi.org/10.1371/journal.pcbi.1011354},
   Abstract = {It is widely believed that memory storage depends on
             activity-dependent synaptic modifications. Classical studies
             of learning and memory in neural networks describe synaptic
             efficacy either as continuous or discrete. However, recent
             results suggest an intermediate scenario in which synaptic
             efficacy can be described by a continuous variable, but
             whose distribution is peaked around a small set of discrete
             values. Motivated by these results, we explored a model in
             which each synapse is described by a continuous variable
             that evolves in a potential with multiple minima. External
             inputs to the network can switch synapses from one potential
             well to another. Our analytical and numerical results show
             that this model can interpolate between models with discrete
             synapses which correspond to the deep potential limit, and
             models in which synapses evolve in a single quadratic
             potential. We find that the storage capacity of the network
             with double well synapses exhibits a power law dependence on
             the network size, rather than the logarithmic dependence
             observed in models with single well synapses. In addition,
             synapses with deeper potential wells lead to more robust
             information storage in the presence of noise. When memories
             are sparsely encoded, the scaling of the capacity with
             network size is similar to previously studied network models
             in the sparse coding limit.},
   Doi = {10.1371/journal.pcbi.1011354},
   Key = {fds375962}
}


%% Papers Submitted   
@article{fds360702,
   Author = {A Sanzeni and M Histed and N Brunel},
   Title = {Emergence of irregular states in networks with
             conductance-based synapses},
   Journal = {Phys Rev X},
   Year = {2021},
   Key = {fds360702}
}


%% Preprints   
@article{fds374524,
   Author = {Li, Y and An, X and Qian, Y and Xu, XH and Zhao, S and Mohan, H and Bachschmid-Romano, L and Brunel, N and Whishaw, IQ and Huang,
             ZJ},
   Title = {Cortical network and projection neuron types that articulate
             serial order in a skilled motor behavior.},
   Year = {2023},
   Month = {October},
   url = {http://dx.doi.org/10.1101/2023.10.25.563871},
   Doi = {10.1101/2023.10.25.563871},
   Key = {fds374524}
}