@misc{9752, abstract = {Redundancies and correlations in the responses of sensory neurons may seem to waste neural resources, but they can also carry cues about structured stimuli and may help the brain to correct for response errors. To investigate the effect of stimulus structure on redundancy in retina, we measured simultaneous responses from populations of retinal ganglion cells presented with natural and artificial stimuli that varied greatly in correlation structure; these stimuli and recordings are publicly available online. Responding to spatio-temporally structured stimuli such as natural movies, pairs of ganglion cells were modestly more correlated than in response to white noise checkerboards, but they were much less correlated than predicted by a non-adapting functional model of retinal response. Meanwhile, responding to stimuli with purely spatial correlations, pairs of ganglion cells showed increased correlations consistent with a static, non-adapting receptive field and nonlinearity. We found that in response to spatio-temporally correlated stimuli, ganglion cells had faster temporal kernels and tended to have stronger surrounds. These properties of individual cells, along with gain changes that opposed changes in effective contrast at the ganglion cell input, largely explained the pattern of pairwise correlations across stimuli where receptive field measurements were possible.}, author = {Simmons, Kristina and Prentice, Jason and Tkačik, Gašper and Homann, Jan and Yee, Heather and Palmer, Stephanie and Nelson, Philip and Balasubramanian, Vijay}, publisher = {Dryad}, title = {{Data from: Transformation of stimulus correlations by the retina}}, doi = {10.5061/dryad.246qg}, year = {2014}, } @article{2257, abstract = {Maximum entropy models are the least structured probability distributions that exactly reproduce a chosen set of statistics measured in an interacting network. Here we use this principle to construct probabilistic models which describe the correlated spiking activity of populations of up to 120 neurons in the salamander retina as it responds to natural movies. Already in groups as small as 10 neurons, interactions between spikes can no longer be regarded as small perturbations in an otherwise independent system; for 40 or more neurons pairwise interactions need to be supplemented by a global interaction that controls the distribution of synchrony in the population. Here we show that such “K-pairwise” models—being systematic extensions of the previously used pairwise Ising models—provide an excellent account of the data. We explore the properties of the neural vocabulary by: 1) estimating its entropy, which constrains the population's capacity to represent visual information; 2) classifying activity patterns into a small set of metastable collective modes; 3) showing that the neural codeword ensembles are extremely inhomogenous; 4) demonstrating that the state of individual neurons is highly predictable from the rest of the population, allowing the capacity for error correction.}, author = {Tkacik, Gasper and Marre, Olivier and Amodei, Dario and Schneidman, Elad and Bialek, William and Berry, Michael}, issn = {1553734X}, journal = {PLoS Computational Biology}, number = {1}, publisher = {Public Library of Science}, title = {{Searching for collective behavior in a large network of sensory neurons}}, doi = {10.1371/journal.pcbi.1003408}, volume = {10}, year = {2014}, } @inbook{2413, abstract = {Progress in understanding the global brain dynamics has remained slow to date in large part because of the highly multiscale nature of brain activity. Indeed, normal brain dynamics is characterized by complex interactions between multiple levels: from the microscopic scale of single neurons to the mesoscopic level of local groups of neurons, and finally to the macroscopic level of the whole brain. Among the most difficult tasks are those of identifying which scales are significant for a given particular function and describing how the scales affect each other. It is important to realize that the scales of time and space are linked together, or even intertwined, and that causal inference is far more ambiguous between than within levels. We approach this problem from the perspective of our recent work on simultaneous recording from micro- and macroelectrodes in the human brain. We propose a physiological description of these multilevel interactions, based on phase–amplitude coupling of neuronal oscillations that operate at multiple frequencies and on different spatial scales. Specifically, the amplitude of the oscillations on a particular spatial scale is modulated by phasic variations in neuronal excitability induced by lower frequency oscillations that emerge on a larger spatial scale. Following this general principle, it is possible to scale up or scale down the multiscale brain dynamics. It is expected that large-scale network oscillations in the low-frequency range, mediating downward effects, may play an important role in attention and consciousness.}, author = {Valderrama, Mario and Botella Soler, Vicente and Le Van Quyen, Michel}, booktitle = {Multiscale Analysis and Nonlinear Dynamics: From Genes to the Brain}, editor = {Meyer, Misha and Pesenson, Z.}, isbn = {9783527411986 }, publisher = {Wiley-VCH}, title = {{Neuronal oscillations scale up and scale down the brain dynamics }}, doi = {10.1002/9783527671632.ch08}, year = {2013}, } @article{2818, abstract = {Models of neural responses to stimuli with complex spatiotemporal correlation structure often assume that neurons are selective for only a small number of linear projections of a potentially high-dimensional input. In this review, we explore recent modeling approaches where the neural response depends on the quadratic form of the input rather than on its linear projection, that is, the neuron is sensitive to the local covariance structure of the signal preceding the spike. To infer this quadratic dependence in the presence of arbitrary (e.g., naturalistic) stimulus distribution, we review several inference methods, focusing in particular on two information theory–based approaches (maximization of stimulus energy and of noise entropy) and two likelihood-based approaches (Bayesian spike-triggered covariance and extensions of generalized linear models). We analyze the formal relationship between the likelihood-based and information-based approaches to demonstrate how they lead to consistent inference. We demonstrate the practical feasibility of these procedures by using model neurons responding to a flickering variance stimulus.}, author = {Rajan, Kanaka and Marre, Olivier and Tkacik, Gasper}, journal = {Neural Computation}, number = {7}, pages = {1661 -- 1692}, publisher = {MIT Press }, title = {{Learning quadratic receptive fields from neural responses to natural stimuli}}, doi = {10.1162/NECO_a_00463}, volume = {25}, year = {2013}, } @article{2850, abstract = {Recent work emphasizes that the maximum entropy principle provides a bridge between statistical mechanics models for collective behavior in neural networks and experiments on networks of real neurons. Most of this work has focused on capturing the measured correlations among pairs of neurons. Here we suggest an alternative, constructing models that are consistent with the distribution of global network activity, i.e. the probability that K out of N cells in the network generate action potentials in the same small time bin. The inverse problem that we need to solve in constructing the model is analytically tractable, and provides a natural 'thermodynamics' for the network in the limit of large N. We analyze the responses of neurons in a small patch of the retina to naturalistic stimuli, and find that the implied thermodynamics is very close to an unusual critical point, in which the entropy (in proper units) is exactly equal to the energy. © 2013 IOP Publishing Ltd and SISSA Medialab srl. }, author = {Tkacik, Gasper and Marre, Olivier and Mora, Thierry and Amodei, Dario and Berry, Michael and Bialek, William}, journal = {Journal of Statistical Mechanics Theory and Experiment}, number = {3}, publisher = {IOP Publishing Ltd.}, title = {{The simplest maximum entropy model for collective behavior in a neural network}}, doi = {10.1088/1742-5468/2013/03/P03011}, volume = {2013}, year = {2013}, } @article{2851, abstract = {The number of possible activity patterns in a population of neurons grows exponentially with the size of the population. Typical experiments explore only a tiny fraction of the large space of possible activity patterns in the case of populations with more than 10 or 20 neurons. It is thus impossible, in this undersampled regime, to estimate the probabilities with which most of the activity patterns occur. As a result, the corresponding entropy - which is a measure of the computational power of the neural population - cannot be estimated directly. We propose a simple scheme for estimating the entropy in the undersampled regime, which bounds its value from both below and above. The lower bound is the usual 'naive' entropy of the experimental frequencies. The upper bound results from a hybrid approximation of the entropy which makes use of the naive estimate, a maximum entropy fit, and a coverage adjustment. We apply our simple scheme to artificial data, in order to check their accuracy; we also compare its performance to those of several previously defined entropy estimators. We then apply it to actual measurements of neural activity in populations with up to 100 cells. Finally, we discuss the similarities and differences between the proposed simple estimation scheme and various earlier methods. © 2013 IOP Publishing Ltd and SISSA Medialab srl.}, author = {Berry, Michael and Tkacik, Gasper and Dubuis, Julien and Marre, Olivier and Da Silveira, Ravá}, journal = {Journal of Statistical Mechanics Theory and Experiment}, number = {3}, publisher = {IOP Publishing Ltd.}, title = {{A simple method for estimating the entropy of neural activity}}, doi = {10.1088/1742-5468/2013/03/P03015}, volume = {2013}, year = {2013}, } @article{2863, abstract = {Neural populations encode information about their stimulus in a collective fashion, by joint activity patterns of spiking and silence. A full account of this mapping from stimulus to neural activity is given by the conditional probability distribution over neural codewords given the sensory input. For large populations, direct sampling of these distributions is impossible, and so we must rely on constructing appropriate models. We show here that in a population of 100 retinal ganglion cells in the salamander retina responding to temporal white-noise stimuli, dependencies between cells play an important encoding role. We introduce the stimulus-dependent maximum entropy (SDME) model—a minimal extension of the canonical linear-nonlinear model of a single neuron, to a pairwise-coupled neural population. We find that the SDME model gives a more accurate account of single cell responses and in particular significantly outperforms uncoupled models in reproducing the distributions of population codewords emitted in response to a stimulus. We show how the SDME model, in conjunction with static maximum entropy models of population vocabulary, can be used to estimate information-theoretic quantities like average surprise and information transmission in a neural population.}, author = {Granot Atedgi, Einat and Tkacik, Gasper and Segev, Ronen and Schneidman, Elad}, journal = {PLoS Computational Biology}, number = {3}, publisher = {Public Library of Science}, title = {{Stimulus-dependent maximum entropy models of neural population codes}}, doi = {10.1371/journal.pcbi.1002922}, volume = {9}, year = {2013}, } @article{2861, abstract = {We consider a two-parameter family of piecewise linear maps in which the moduli of the two slopes take different values. We provide numerical evidence of the existence of some parameter regions in which the Lyapunov exponent and the topological entropy remain constant. Analytical proof of this phenomenon is also given for certain cases. Surprisingly however, the systems with that property are not conjugate as we prove by using kneading theory.}, author = {Botella Soler, Vicente and Oteo, José and Ros, Javier and Glendinning, Paul}, journal = {Journal of Physics A: Mathematical and Theoretical}, number = {12}, publisher = {IOP Publishing Ltd.}, title = {{Lyapunov exponent and topological entropy plateaus in piecewise linear maps}}, doi = {10.1088/1751-8113/46/12/125101}, volume = {46}, year = {2013}, } @article{2913, abstract = {The ability of an organism to distinguish between various stimuli is limited by the structure and noise in the population code of its sensory neurons. Here we infer a distance measure on the stimulus space directly from the recorded activity of 100 neurons in the salamander retina. In contrast to previously used measures of stimulus similarity, this "neural metric" tells us how distinguishable a pair of stimulus clips is to the retina, based on the similarity between the induced distributions of population responses. We show that the retinal distance strongly deviates from Euclidean, or any static metric, yet has a simple structure: we identify the stimulus features that the neural population is jointly sensitive to, and show the support-vector-machine- like kernel function relating the stimulus and neural response spaces. We show that the non-Euclidean nature of the retinal distance has important consequences for neural decoding.}, author = {Tkacik, Gasper and Granot Atedgi, Einat and Segev, Ronen and Schneidman, Elad}, journal = {Physical Review Letters}, number = {5}, publisher = {American Physical Society}, title = {{Retinal metric: a stimulus distance measure derived from population neural responses}}, doi = {10.1103/PhysRevLett.110.058104}, volume = {110}, year = {2013}, } @article{3261, abstract = {Cells in a developing embryo have no direct way of "measuring" their physical position. Through a variety of processes, however, the expression levels of multiple genes come to be correlated with position, and these expression levels thus form a code for "positional information." We show how to measure this information, in bits, using the gap genes in the Drosophila embryo as an example. Individual genes carry nearly two bits of information, twice as much as expected if the expression patterns consisted only of on/off domains separated by sharp boundaries. Taken together, four gap genes carry enough information to define a cell's location with an error bar of ~1% along the anterior-posterior axis of the embryo. This precision is nearly enough for each cell to have a unique identity, which is the maximum information the system can use, and is nearly constant along the length of the embryo. We argue that this constancy is a signature of optimality in the transmission of information from primary morphogen inputs to the output of the gap gene network.}, author = {Dubuis, Julien and Tkacik, Gasper and Wieschaus, Eric and Gregor, Thomas and Bialek, William}, journal = {PNAS}, number = {41}, pages = {16301 -- 16308}, publisher = {National Academy of Sciences}, title = {{Positional information, in bits}}, doi = {10.1073/pnas.1315642110}, volume = {110}, year = {2013}, } @article{499, abstract = {Exposure of an isogenic bacterial population to a cidal antibiotic typically fails to eliminate a small fraction of refractory cells. Historically, fractional killing has been attributed to infrequently dividing or nondividing "persisters." Using microfluidic cultures and time-lapse microscopy, we found that Mycobacterium smegmatis persists by dividing in the presence of the drug isoniazid (INH). Although persistence in these studies was characterized by stable numbers of cells, this apparent stability was actually a dynamic state of balanced division and death. Single cells expressed catalase-peroxidase (KatG), which activates INH, in stochastic pulses that were negatively correlated with cell survival. These behaviors may reflect epigenetic effects, because KatG pulsing and death were correlated between sibling cells. Selection of lineages characterized by infrequent KatG pulsing could allow nonresponsive adaptation during prolonged drug exposure.}, author = {Wakamoto, Yurichi and Dhar, Neraaj and Chait, Remy P and Schneider, Katrin and Signorino Gelo, François and Leibler, Stanislas and Mckinney, John}, journal = {Science}, number = {6115}, pages = {91 -- 95}, publisher = {American Association for the Advancement of Science}, title = {{Dynamic persistence of antibiotic-stressed mycobacteria}}, doi = {10.1126/science.1229858}, volume = {339}, year = {2013}, } @article{2277, abstract = {Redundancies and correlations in the responses of sensory neurons may seem to waste neural resources, but they can also carry cues about structured stimuli and may help the brain to correct for response errors. To investigate the effect of stimulus structure on redundancy in retina, we measured simultaneous responses from populations of retinal ganglion cells presented with natural and artificial stimuli that varied greatly in correlation structure; these stimuli and recordings are publicly available online. Responding to spatio-temporally structured stimuli such as natural movies, pairs of ganglion cells were modestly more correlated than in response to white noise checkerboards, but they were much less correlated than predicted by a non-adapting functional model of retinal response. Meanwhile, responding to stimuli with purely spatial correlations, pairs of ganglion cells showed increased correlations consistent with a static, non-adapting receptive field and nonlinearity. We found that in response to spatio-temporally correlated stimuli, ganglion cells had faster temporal kernels and tended to have stronger surrounds. These properties of individual cells, along with gain changes that opposed changes in effective contrast at the ganglion cell input, largely explained the pattern of pairwise correlations across stimuli where receptive field measurements were possible.}, author = {Simmons, Kristina and Prentice, Jason and Tkacik, Gasper and Homann, Jan and Yee, Heather and Palmer, Stephanie and Nelson, Philip and Balasubramanian, Vijay}, journal = {PLoS Computational Biology}, number = {12}, publisher = {Public Library of Science}, title = {{Transformation of stimulus correlations by the retina}}, doi = {10.1371/journal.pcbi.1003344}, volume = {9}, year = {2013}, } @article{2914, abstract = {The scale invariance of natural images suggests an analogy to the statistical mechanics of physical systems at a critical point. Here we examine the distribution of pixels in small image patches and show how to construct the corresponding thermodynamics. We find evidence for criticality in a diverging specific heat, which corresponds to large fluctuations in how "surprising" we find individual images, and in the quantitative form of the entropy vs energy. We identify special image configurations as local energy minima and show that average patches within each basin are interpretable as lines and edges in all orientations.}, author = {Stephens, Greg and Mora, Thierry and Tkacik, Gasper and Bialek, William}, journal = {Physical Review Letters}, number = {1}, publisher = {American Physical Society}, title = {{Statistical thermodynamics of natural images}}, doi = {10.1103/PhysRevLett.110.018701}, volume = {110}, year = {2013}, } @article{3262, abstract = {Living cells must control the reading out or "expression" of information encoded in their genomes, and this regulation often is mediated by transcription factors--proteins that bind to DNA and either enhance or repress the expression of nearby genes. But the expression of transcription factor proteins is itself regulated, and many transcription factors regulate their own expression in addition to responding to other input signals. Here we analyze the simplest of such self-regulatory circuits, asking how parameters can be chosen to optimize information transmission from inputs to outputs in the steady state. Some nonzero level of self-regulation is almost always optimal, with self-activation dominant when transcription factor concentrations are low and self-repression dominant when concentrations are high. In steady state the optimal self-activation is never strong enough to induce bistability, although there is a limit in which the optimal parameters are very close to the critical point.}, author = {Tkacik, Gasper and Walczak, Aleksandra and Bialek, William}, journal = { Physical Review E statistical nonlinear and soft matter physics }, number = {4}, publisher = {American Institute of Physics}, title = {{Optimizing information flow in small genetic networks. III. A self-interacting gene}}, doi = {10.1103/PhysRevE.85.041903}, volume = {85}, year = {2012}, } @article{3274, abstract = {A boundary element model of a tunnel running through horizontally layered soil with anisotropic material properties is presented. Since there is no analytical fundamental solution for wave propagation inside a layered orthotropic medium in 3D, the fundamental displacements and stresses have to be calculated numerically. In our model this is done in the Fourier domain with respect to space and time. The assumption of a straight tunnel with infinite extension in the x direction makes it possible to decouple the system for every wave number kx, leading to a 2.5D-problem, which is suited for parallel computation. The special form of the fundamental solution, resulting from our Fourier ansatz, and the fact, that the calculation of the boundary integral equation is performed in the Fourier domain, enhances the stability and efficiency of the numerical calculations.}, author = {Rieckh, Georg and Kreuzer, Wolfgang and Waubke, Holger and Balazs, Peter}, journal = { Engineering Analysis with Boundary Elements}, number = {6}, pages = {960 -- 967}, publisher = {Elsevier}, title = {{A 2.5D-Fourier-BEM model for vibrations in a tunnel running through layered anisotropic soil}}, doi = {10.1016/j.enganabound.2011.12.014}, volume = {36}, year = {2012}, } @article{3374, abstract = {Genetic regulatory networks enable cells to respond to changes in internal and external conditions by dynamically coordinating their gene expression profiles. Our ability to make quantitative measurements in these biochemical circuits has deepened our understanding of what kinds of computations genetic regulatory networks can perform, and with what reliability. These advances have motivated researchers to look for connections between the architecture and function of genetic regulatory networks. Transmitting information between a network's inputs and outputs has been proposed as one such possible measure of function, relevant in certain biological contexts. Here we summarize recent developments in the application of information theory to gene regulatory networks. We first review basic concepts in information theory necessary for understanding recent work. We then discuss the functional complexity of gene regulation, which arises from the molecular nature of the regulatory interactions. We end by reviewing some experiments that support the view that genetic networks responsible for early development of multicellular organisms might be maximizing transmitted 'positional information'.}, author = {Tkacik, Gasper and Walczak, Aleksandra}, journal = {Journal of Physics: Condensed Matter}, number = {15}, publisher = {IOP Publishing Ltd.}, title = {{Information transmission in genetic regulatory networks a review}}, doi = {10.1088/0953-8984/23/15/153102}, volume = {23}, year = {2011}, } @article{3384, abstract = {Here we introduce a database of calibrated natural images publicly available through an easy-to-use web interface. Using a Nikon D70 digital SLR camera, we acquired about six-megapixel images of Okavango Delta of Botswana, a tropical savanna habitat similar to where the human eye is thought to have evolved. Some sequences of images were captured unsystematically while following a baboon troop, while others were designed to vary a single parameter such as aperture, object distance, time of day or position on the horizon. Images are available in the raw RGB format and in grayscale. Images are also available in units relevant to the physiology of human cone photoreceptors, where pixel values represent the expected number of photoisomerizations per second for cones sensitive to long (L), medium (M) and short (S) wavelengths. This database is distributed under a Creative Commons Attribution-Noncommercial Unported license to facilitate research in computer vision, psychophysics of perception, and visual neuroscience.}, author = {Tkacik, Gasper and Garrigan, Patrick and Ratliff, Charles and Milcinski, Grega and Klein, Jennifer and Seyfarth, Lucia and Sterling, Peter and Brainard, David and Balasubramanian, Vijay}, journal = {PLoS One}, number = {6}, publisher = {Public Library of Science}, title = {{Natural images from the birthplace of the human eye}}, doi = {10.1371/journal.pone.0020409}, volume = {6}, year = {2011}, }