[{"title":"Interplay between syllable duration and pitch during whistle matching in wild nightingales","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"month":"01","department":[{"_id":"GradSch"},{"_id":"TiVo"}],"date_updated":"2026-01-20T07:33:32Z","publication_status":"epub_ahead","doi":"10.1016/j.cub.2025.12.025","publication_identifier":{"issn":["0960-9822"],"eissn":["1879-0445"]},"publisher":"Elsevier","article_processing_charge":"Yes (in subscription journal)","citation":{"ama":"Calderon Garcia JS, Costalunga G, Vogels TP, Vallentin D. Interplay between syllable duration and pitch during whistle matching in wild nightingales. <i>Current Biology</i>. 2026. doi:<a href=\"https://doi.org/10.1016/j.cub.2025.12.025\">10.1016/j.cub.2025.12.025</a>","chicago":"Calderon Garcia, Juan Sebastian, Giacomo Costalunga, Tim P Vogels, and Daniela Vallentin. “Interplay between Syllable Duration and Pitch during Whistle Matching in Wild Nightingales.” <i>Current Biology</i>. Elsevier, 2026. <a href=\"https://doi.org/10.1016/j.cub.2025.12.025\">https://doi.org/10.1016/j.cub.2025.12.025</a>.","short":"J.S. Calderon Garcia, G. Costalunga, T.P. Vogels, D. Vallentin, Current Biology (2026).","apa":"Calderon Garcia, J. S., Costalunga, G., Vogels, T. P., &#38; Vallentin, D. (2026). Interplay between syllable duration and pitch during whistle matching in wild nightingales. <i>Current Biology</i>. Elsevier. <a href=\"https://doi.org/10.1016/j.cub.2025.12.025\">https://doi.org/10.1016/j.cub.2025.12.025</a>","ieee":"J. S. Calderon Garcia, G. Costalunga, T. P. Vogels, and D. Vallentin, “Interplay between syllable duration and pitch during whistle matching in wild nightingales,” <i>Current Biology</i>. Elsevier, 2026.","mla":"Calderon Garcia, Juan Sebastian, et al. “Interplay between Syllable Duration and Pitch during Whistle Matching in Wild Nightingales.” <i>Current Biology</i>, Elsevier, 2026, doi:<a href=\"https://doi.org/10.1016/j.cub.2025.12.025\">10.1016/j.cub.2025.12.025</a>.","ista":"Calderon Garcia JS, Costalunga G, Vogels TP, Vallentin D. 2026. Interplay between syllable duration and pitch during whistle matching in wild nightingales. Current Biology."},"date_published":"2026-01-12T00:00:00Z","year":"2026","OA_type":"hybrid","main_file_link":[{"url":"https://doi.org/10.1016/j.cub.2025.12.025","open_access":"1"}],"abstract":[{"lang":"eng","text":"During complex vocal interactions, different features of acoustic stimuli are integrated to produce appropriate vocal responses,1 such as copying sounds during vocal matching behavior in some animals.2,3,4,5,6,7,8,9,10,11,12 However, little is known about the interplay and possible trade-offs between the different temporal and spectral acoustic features during these vocal exchanges.2,13,14 Nightingales can flexibly match the pitch of their tonal “whistle songs” in real time during counter-singing duels.15,16 Here, we show that the syllable duration of whistle playbacks could alter the song responses of wild nightingales, causing their whistle duration distribution to shift toward the presented stimulus duration. When exposed to whistle playbacks featuring unnatural combinations of pitch and duration, nightingales demonstrate a flexible trade-off between pitch matching and temporal imitation, yet they are constrained by their vocal repertoire. They selectively adapted their vocal responses to approximate these novel stimuli, aligning them with their natural whistle repertoire. We developed a computational model of nightingale whistle-matching behavior that revealed a hierarchical organization of acoustic feature production. During whistle matching, the feature integration process is constrained by the duration of syllables, and pitch matching follows within this temporal framework, forcing a trade-off between the two features. Our findings reveal a complex interplay between the spectral and temporal domains that shapes song-matching behavior."}],"article_type":"original","project":[{"call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","grant_number":"819603"}],"quality_controlled":"1","oa_version":"Published Version","day":"12","ec_funded":1,"type":"journal_article","acknowledgement":"We would like to thank J. Benichov and N. Hein for their help with fieldwork; M. Ramadas for helping with the segmentation analysis; T. Eliav, C. Chintaluri, G. Tkacik, and A. Navas for providing helpful comments to the project and manuscript; and A. Costalunga for the drawings of nightingales. Funding sources: The Joachim Herz Stiftung Add-on Fellowships for Interdisciplinary Life Science, awarded to G.C.; the ERC Consolidator Grant 819603 SYNAPSEEK, awarded to T.P.V.; and DFG Research Unit 5768–532521431, DFG Research Grant-547921981, DFG SFB 1315–327654276, and the ERC Starting Grant 757459 MIDNIGHT, awarded to D.V.","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","author":[{"id":"1271b54b-dbcd-11ea-9d1d-d92da838fe2c","full_name":"Calderon Garcia, Juan Sebastian","last_name":"Calderon Garcia","first_name":"Juan Sebastian"},{"full_name":"Costalunga, Giacomo","last_name":"Costalunga","first_name":"Giacomo"},{"last_name":"Vogels","full_name":"Vogels, Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","orcid":"0000-0003-3295-6181","first_name":"Tim P"},{"first_name":"Daniela","last_name":"Vallentin","full_name":"Vallentin, Daniela"}],"OA_place":"publisher","_id":"20986","oa":1,"ddc":["570","577"],"has_accepted_license":"1","language":[{"iso":"eng"}],"PlanS_conform":"1","publication":"Current Biology","scopus_import":"1","status":"public","date_created":"2026-01-14T12:00:29Z"},{"project":[{"grant_number":"819603","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning."}],"ec_funded":1,"oa_version":"Published Version","day":"01","volume":25,"OA_type":"gold","article_type":"original","abstract":[{"lang":"eng","text":"Whether or not the neuron emits a spike in response to stimulation by an excitatory current pulse is determined by a strength-duration curve (SDC) for the pulse parameters. The SDC is a dependence of the minimal pulse amplitude required to elicit the spiking response on either the pulse duration or its decay time. Excitatory neurons affect the others through pulses of excitatory postsynaptic current. A simple yet plausible approximation for the time course of such a pulse is the alpha function, with linear rise at the start and exponential decay at the end. However, an exact analytical SDC for this case is hitherto not known, even for the leaky integrate-and-fire (LIF) neuron, the simplest spiking neuron model used in practice. We have obtained general SDC equations for the LIF neuron. Using the Lambert W function — a widely-implemented special function, we have found the exact analytical SDC for the spiking response of the LIF neuron stimulated by an excitatory current pulse in the form of the alpha function. To compare results in a unified way, we have also derived the analytical SDCs for (i) rectangular pulse, (ii) ascending ramp pulse, and (iii) instantly rising and exponentially decaying pulse. In the limit of no leakage, we show that the SDC is reduced to the classical hyperbola for all considered cases."}],"publisher":"Elsevier","article_processing_charge":"Yes","publication_identifier":{"eissn":["2590-0374"]},"doi":"10.1016/j.rinam.2025.100548","publication_status":"published","year":"2025","date_published":"2025-02-01T00:00:00Z","citation":{"ista":"Paraskevov A. 2025. Analytical strength-duration curve for the spiking response of the LIF neuron to an alpha-function-shaped excitatory current pulse. Results in Applied Mathematics. 25, 100548.","mla":"Paraskevov, Alexander. “Analytical Strength-Duration Curve for the Spiking Response of the LIF Neuron to an Alpha-Function-Shaped Excitatory Current Pulse.” <i>Results in Applied Mathematics</i>, vol. 25, 100548, Elsevier, 2025, doi:<a href=\"https://doi.org/10.1016/j.rinam.2025.100548\">10.1016/j.rinam.2025.100548</a>.","ieee":"A. Paraskevov, “Analytical strength-duration curve for the spiking response of the LIF neuron to an alpha-function-shaped excitatory current pulse,” <i>Results in Applied Mathematics</i>, vol. 25. Elsevier, 2025.","apa":"Paraskevov, A. (2025). Analytical strength-duration curve for the spiking response of the LIF neuron to an alpha-function-shaped excitatory current pulse. <i>Results in Applied Mathematics</i>. Elsevier. <a href=\"https://doi.org/10.1016/j.rinam.2025.100548\">https://doi.org/10.1016/j.rinam.2025.100548</a>","short":"A. Paraskevov, Results in Applied Mathematics 25 (2025).","chicago":"Paraskevov, Alexander. “Analytical Strength-Duration Curve for the Spiking Response of the LIF Neuron to an Alpha-Function-Shaped Excitatory Current Pulse.” <i>Results in Applied Mathematics</i>. Elsevier, 2025. <a href=\"https://doi.org/10.1016/j.rinam.2025.100548\">https://doi.org/10.1016/j.rinam.2025.100548</a>.","ama":"Paraskevov A. Analytical strength-duration curve for the spiking response of the LIF neuron to an alpha-function-shaped excitatory current pulse. <i>Results in Applied Mathematics</i>. 2025;25. doi:<a href=\"https://doi.org/10.1016/j.rinam.2025.100548\">10.1016/j.rinam.2025.100548</a>"},"article_number":"100548","title":"Analytical strength-duration curve for the spiking response of the LIF neuron to an alpha-function-shaped excitatory current pulse","date_updated":"2025-04-14T07:54:31Z","department":[{"_id":"TiVo"}],"month":"02","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"publication":"Results in Applied Mathematics","language":[{"iso":"eng"}],"has_accepted_license":"1","date_created":"2025-02-23T23:01:55Z","status":"public","scopus_import":"1","intvolume":"        25","ddc":["570","510"],"file":[{"access_level":"open_access","success":1,"file_size":853322,"relation":"main_file","date_created":"2025-02-24T13:18:47Z","checksum":"58fd02e951857859f39d06661a27bcc9","file_id":"19083","creator":"dernst","file_name":"2025_ResultsApplMath_Paraskevov.pdf","date_updated":"2025-02-24T13:18:47Z","content_type":"application/pdf"}],"DOAJ_listed":"1","_id":"19068","OA_place":"publisher","related_material":{"link":[{"relation":"software","url":"https://doi.org/10.6084/m9.figshare.24081849"}]},"oa":1,"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","author":[{"last_name":"Paraskevov","id":"d05e3c56-9262-11ed-9231-be692464e5ac","full_name":"Paraskevov, Alexander","first_name":"Alexander"}],"acknowledgement":"The author thanks T.S. Zemskova and N.D. Efimova for verifying some of the results. This work was supported by a European Research Council Consolidator Grant (SYNAPSEEK, 819603, to Tim P. Vogels).\r\nThe Supplementary Material for this article contains (i) the data for graphs in Figure 1 and (ii) ready-to-use MATLAB codes for reproducing the data. It is available online at https://doi.org/10.6084/m9.figshare.24081849.","type":"journal_article","corr_author":"1","file_date_updated":"2025-02-24T13:18:47Z"},{"quality_controlled":"1","external_id":{"pmid":["39217864"],"isi":["001316474600001"]},"project":[{"call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","grant_number":"819603"}],"day":"01","volume":180,"oa_version":"Published Version","ec_funded":1,"OA_type":"hybrid","abstract":[{"text":"Thin pancake-like neuronal networks cultured on top of a planar microelectrode array have been extensively tried out in neuroengineering, as a substrate for the mobile robot’s control unit, i.e., as a cyborg’s brain. Most of these attempts failed due to intricate self-organizing dynamics in the neuronal systems. In particular, the networks may exhibit an emergent spatial map of steady nucleation sites (“n-sites”) of spontaneous population spikes. Being unpredictable and independent of the surface electrode locations, the n-sites drastically change local ability of the network to generate spikes. Here, using a spiking neuronal network model with generative spatially-embedded connectome, we systematically show in simulations that the number, location, and relative activity of spontaneously formed n-sites (“the vitals”) crucially depend on the samplings of three distributions: (1) the network distribution of neuronal excitability, (2) the distribution of connections between neurons of the network, and (3) the distribution of maximal amplitudes of a single synaptic current pulse. Moreover, blocking the dynamics of a small fraction (about 4%) of non-pacemaker neurons having the highest excitability was enough to completely suppress the occurrence of population spikes and their n-sites. This key result is explained theoretically. Remarkably, the n-sites occur taking into account only short-term synaptic plasticity, i.e., without a Hebbian-type plasticity. As the spiking network model used in this study is strictly deterministic, all simulation results can be accurately reproduced. The model, which has already demonstrated a very high richness-to-complexity ratio, can also be directly extended into the three-dimensional case, e.g., for targeting peculiarities of spiking dynamics in cerebral (or brain) organoids. We recommend the model as an excellent illustrative tool for teaching network-level computational neuroscience, complementing a few benchmark models.","lang":"eng"}],"article_type":"original","publication_status":"published","article_processing_charge":"Yes (via OA deal)","publisher":"Elsevier","publication_identifier":{"eissn":["1879-2782"],"issn":["0893-6080"]},"doi":"10.1016/j.neunet.2024.106589","year":"2024","date_published":"2024-12-01T00:00:00Z","citation":{"ista":"Zendrikov D, Paraskevov A. 2024. The vitals for steady nucleation maps of spontaneous spiking coherence in autonomous two-dimensional neuronal networks. Neural Networks. 180, 106589.","ieee":"D. Zendrikov and A. Paraskevov, “The vitals for steady nucleation maps of spontaneous spiking coherence in autonomous two-dimensional neuronal networks,” <i>Neural Networks</i>, vol. 180. Elsevier, 2024.","mla":"Zendrikov, Dmitrii, and Alexander Paraskevov. “The Vitals for Steady Nucleation Maps of Spontaneous Spiking Coherence in Autonomous Two-Dimensional Neuronal Networks.” <i>Neural Networks</i>, vol. 180, 106589, Elsevier, 2024, doi:<a href=\"https://doi.org/10.1016/j.neunet.2024.106589\">10.1016/j.neunet.2024.106589</a>.","short":"D. Zendrikov, A. Paraskevov, Neural Networks 180 (2024).","apa":"Zendrikov, D., &#38; Paraskevov, A. (2024). The vitals for steady nucleation maps of spontaneous spiking coherence in autonomous two-dimensional neuronal networks. <i>Neural Networks</i>. Elsevier. <a href=\"https://doi.org/10.1016/j.neunet.2024.106589\">https://doi.org/10.1016/j.neunet.2024.106589</a>","ama":"Zendrikov D, Paraskevov A. The vitals for steady nucleation maps of spontaneous spiking coherence in autonomous two-dimensional neuronal networks. <i>Neural Networks</i>. 2024;180. doi:<a href=\"https://doi.org/10.1016/j.neunet.2024.106589\">10.1016/j.neunet.2024.106589</a>","chicago":"Zendrikov, Dmitrii, and Alexander Paraskevov. “The Vitals for Steady Nucleation Maps of Spontaneous Spiking Coherence in Autonomous Two-Dimensional Neuronal Networks.” <i>Neural Networks</i>. Elsevier, 2024. <a href=\"https://doi.org/10.1016/j.neunet.2024.106589\">https://doi.org/10.1016/j.neunet.2024.106589</a>."},"title":"The vitals for steady nucleation maps of spontaneous spiking coherence in autonomous two-dimensional neuronal networks","isi":1,"article_number":"106589","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"date_updated":"2025-09-08T09:12:20Z","month":"12","department":[{"_id":"TiVo"}],"has_accepted_license":"1","publication":"Neural Networks","language":[{"iso":"eng"}],"status":"public","date_created":"2024-09-08T22:01:10Z","scopus_import":"1","pmid":1,"intvolume":"       180","file":[{"access_level":"open_access","success":1,"relation":"main_file","file_size":6162281,"date_created":"2025-01-13T08:26:08Z","checksum":"6a194323234e01d4ae725f674529cdb1","creator":"dernst","file_id":"18825","file_name":"2024_NeuralNetworks_Zendrikov.pdf","date_updated":"2025-01-13T08:26:08Z","content_type":"application/pdf"}],"ddc":["570"],"_id":"17886","OA_place":"publisher","oa":1,"user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","acknowledgement":"A.P. is grateful to Chaitanya Chintaluri, Douglas Feitosa Tomé, and Tim P. Vogels for useful discussions. This work was supported by a European Research Council Consolidator Grant (SYNAPSEEK, 819603, to Tim P. Vogels).","author":[{"first_name":"Dmitrii","last_name":"Zendrikov","full_name":"Zendrikov, Dmitrii"},{"id":"d05e3c56-9262-11ed-9231-be692464e5ac","full_name":"Paraskevov, Alexander","last_name":"Paraskevov","first_name":"Alexander"}],"type":"journal_article","file_date_updated":"2025-01-13T08:26:08Z","corr_author":"1"},{"OA_type":"hybrid","article_type":"original","abstract":[{"lang":"eng","text":"De novo heterozygous variants in KCNC2 encoding the voltage-gated potassium (K+) channel subunit Kv3.2 are a recently described cause of developmental and epileptic encephalopathy (DEE). A de novo variant in KCNC2 c.374G > A (p.Cys125Tyr) was identified via exome sequencing in a patient with DEE. Relative to wild-type Kv3.2, Kv3.2-p.Cys125Tyr induces K+ currents exhibiting a large hyperpolarizing shift in the voltage dependence of activation, accelerated activation, and delayed deactivation consistent with a relative stabilization of the open conformation, along with increased current density. Leveraging the cryogenic electron microscopy (cryo-EM) structure of Kv3.1, molecular dynamic simulations suggest that a strong π-π stacking interaction between the variant Tyr125 and Tyr156 in the α-6 helix of the T1 domain promotes a relative stabilization of the open conformation of the channel, which underlies the observed gain of function. A multicompartment computational model of a Kv3-expressing parvalbumin-positive cerebral cortex fast-spiking γ-aminobutyric acidergic (GABAergic) interneuron (PV-IN) demonstrates how the Kv3.2-Cys125Tyr variant impairs neuronal excitability and dysregulates inhibition in cerebral cortex circuits to explain the resulting epilepsy."}],"project":[{"call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603"}],"quality_controlled":"1","external_id":{"pmid":["38194456"],"isi":["001167401000001"]},"ec_funded":1,"day":"16","oa_version":"Published Version","volume":121,"article_number":"e2307776121","isi":1,"title":"A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction","department":[{"_id":"TiVo"}],"month":"01","date_updated":"2025-09-04T11:47:47Z","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode","image":"/images/cc_by_nc_nd.png","short":"CC BY-NC-ND (4.0)","name":"Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)"},"license":"https://creativecommons.org/licenses/by-nc-nd/4.0/","doi":"10.1073/pnas.2307776121","article_processing_charge":"Yes (in subscription journal)","publication_identifier":{"eissn":["1091-6490"]},"publisher":"National Academy of Sciences","publication_status":"published","citation":{"ista":"Clatot J, Currin C, Liang Q, Pipatpolkai T, Massey SL, Helbig I, Delemotte L, Vogels TP, Covarrubias M, Goldberg EM. 2024. A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. Proceedings of the National Academy of Sciences of the United States of America. 121(3), e2307776121.","mla":"Clatot, Jerome, et al. “A Structurally Precise Mechanism Links an Epilepsy-Associated KCNC2 Potassium Channel Mutation to Interneuron Dysfunction.” <i>Proceedings of the National Academy of Sciences of the United States of America</i>, vol. 121, no. 3, e2307776121, National Academy of Sciences, 2024, doi:<a href=\"https://doi.org/10.1073/pnas.2307776121\">10.1073/pnas.2307776121</a>.","ieee":"J. Clatot <i>et al.</i>, “A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction,” <i>Proceedings of the National Academy of Sciences of the United States of America</i>, vol. 121, no. 3. National Academy of Sciences, 2024.","apa":"Clatot, J., Currin, C., Liang, Q., Pipatpolkai, T., Massey, S. L., Helbig, I., … Goldberg, E. M. (2024). A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. <i>Proceedings of the National Academy of Sciences of the United States of America</i>. National Academy of Sciences. <a href=\"https://doi.org/10.1073/pnas.2307776121\">https://doi.org/10.1073/pnas.2307776121</a>","short":"J. Clatot, C. Currin, Q. Liang, T. Pipatpolkai, S.L. Massey, I. Helbig, L. Delemotte, T.P. Vogels, M. Covarrubias, E.M. Goldberg, Proceedings of the National Academy of Sciences of the United States of America 121 (2024).","chicago":"Clatot, Jerome, Christopher Currin, Qiansheng Liang, Tanadet Pipatpolkai, Shavonne L. Massey, Ingo Helbig, Lucie Delemotte, Tim P Vogels, Manuel Covarrubias, and Ethan M. Goldberg. “A Structurally Precise Mechanism Links an Epilepsy-Associated KCNC2 Potassium Channel Mutation to Interneuron Dysfunction.” <i>Proceedings of the National Academy of Sciences of the United States of America</i>. National Academy of Sciences, 2024. <a href=\"https://doi.org/10.1073/pnas.2307776121\">https://doi.org/10.1073/pnas.2307776121</a>.","ama":"Clatot J, Currin C, Liang Q, et al. A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. <i>Proceedings of the National Academy of Sciences of the United States of America</i>. 2024;121(3). doi:<a href=\"https://doi.org/10.1073/pnas.2307776121\">10.1073/pnas.2307776121</a>"},"year":"2024","date_published":"2024-01-16T00:00:00Z","pmid":1,"intvolume":"       121","ddc":["570"],"file":[{"content_type":"application/pdf","file_name":"2024_PNAS_Clatot.pdf","date_updated":"2025-04-23T13:51:16Z","file_id":"19613","creator":"dernst","checksum":"f498c643be81895dd3a69ee90115a782","date_created":"2025-04-23T13:51:16Z","relation":"main_file","file_size":3060109,"success":1,"access_level":"open_access"}],"language":[{"iso":"eng"}],"publication":"Proceedings of the National Academy of Sciences of the United States of America","has_accepted_license":"1","status":"public","date_created":"2024-01-21T23:00:56Z","scopus_import":"1","type":"journal_article","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","acknowledgement":"This work was supported by an ERC Consolidator Grant (SYNAPSEEK) to T.P.V., the NOMIS Foundation through the NOMIS Fellowships program at IST Austria to C.B.C., a Jefferson Synaptic Biology Center Pilot Project Grant to M.C., NIH NINDS U54 NS108874 (PI, Alfred L. George), and NIH NINDS R01 NS122887 to E.M.G. The computations were enabled by resources provided by the Swedish National Infrastructure for Computing (SNIC) at the PDC Center for High-Performance Computing, KTH Royal Institute of Technology, partially funded by the Swedish Research Council through grant agreement no. 2018-05973. We thank Akshay Sridhar for the fruitful discussion of the project.","author":[{"first_name":"Jerome","full_name":"Clatot, Jerome","last_name":"Clatot"},{"full_name":"Currin, Christopher","id":"e8321fc5-3091-11eb-8a53-83f309a11ac9","last_name":"Currin","orcid":"0000-0002-4809-5059","first_name":"Christopher"},{"full_name":"Liang, Qiansheng","last_name":"Liang","first_name":"Qiansheng"},{"first_name":"Tanadet","full_name":"Pipatpolkai, Tanadet","last_name":"Pipatpolkai"},{"full_name":"Massey, Shavonne L.","last_name":"Massey","first_name":"Shavonne L."},{"first_name":"Ingo","last_name":"Helbig","full_name":"Helbig, Ingo"},{"last_name":"Delemotte","full_name":"Delemotte, Lucie","first_name":"Lucie"},{"last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","first_name":"Tim P"},{"first_name":"Manuel","full_name":"Covarrubias, Manuel","last_name":"Covarrubias"},{"first_name":"Ethan M.","last_name":"Goldberg","full_name":"Goldberg, Ethan M."}],"issue":"3","file_date_updated":"2025-04-23T13:51:16Z","OA_place":"publisher","_id":"14841","related_material":{"link":[{"relation":"software","url":"https://github.com/ChrisCurrin/pv-kcnc2 "}]},"oa":1},{"quality_controlled":"1","external_id":{"pmid":["38509348 "],"isi":["001190081400001"]},"project":[{"_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603"}],"oa_version":"Published Version","day":"01","volume":27,"ec_funded":1,"OA_type":"hybrid","abstract":[{"lang":"eng","text":"The brain’s functionality is developed and maintained through synaptic plasticity. As synapses undergo plasticity, they also affect each other. The nature of such ‘co-dependency’ is difficult to disentangle experimentally, because multiple synapses must be monitored simultaneously. To help understand the experimentally observed phenomena, we introduce a framework that formalizes synaptic co-dependency between different connection types. The resulting model explains how inhibition can gate excitatory plasticity while neighboring excitatory–excitatory interactions determine the strength of long-term potentiation. Furthermore, we show how the interplay between excitatory and inhibitory synapses can account for the quick rise and long-term stability of a variety of synaptic weight profiles, such as orientation tuning and dendritic clustering of co-active synapses. In recurrent neuronal networks, co-dependent plasticity produces rich and stable motor cortex-like dynamics with high input sensitivity. Our results suggest an essential role for the neighborly synaptic interaction during learning, connecting micro-level physiology with network-wide phenomena."}],"article_type":"original","publication_status":"published","publication_identifier":{"eissn":["1546-1726"],"issn":["1097-6256"]},"publisher":"Springer Nature","article_processing_charge":"Yes (via OA deal)","doi":"10.1038/s41593-024-01597-4","date_published":"2024-05-01T00:00:00Z","year":"2024","citation":{"short":"E.J. Agnes, T.P. Vogels, Nature Neuroscience 27 (2024) 964–974.","apa":"Agnes, E. J., &#38; Vogels, T. P. (2024). Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks. <i>Nature Neuroscience</i>. Springer Nature. <a href=\"https://doi.org/10.1038/s41593-024-01597-4\">https://doi.org/10.1038/s41593-024-01597-4</a>","ama":"Agnes EJ, Vogels TP. Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks. <i>Nature Neuroscience</i>. 2024;27:964-974. doi:<a href=\"https://doi.org/10.1038/s41593-024-01597-4\">10.1038/s41593-024-01597-4</a>","chicago":"Agnes, Everton J., and Tim P Vogels. “Co-Dependent Excitatory and Inhibitory Plasticity Accounts for Quick, Stable and Long-Lasting Memories in Biological Networks.” <i>Nature Neuroscience</i>. Springer Nature, 2024. <a href=\"https://doi.org/10.1038/s41593-024-01597-4\">https://doi.org/10.1038/s41593-024-01597-4</a>.","ista":"Agnes EJ, Vogels TP. 2024. Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks. Nature Neuroscience. 27, 964–974.","ieee":"E. J. Agnes and T. P. Vogels, “Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks,” <i>Nature Neuroscience</i>, vol. 27. Springer Nature, pp. 964–974, 2024.","mla":"Agnes, Everton J., and Tim P. Vogels. “Co-Dependent Excitatory and Inhibitory Plasticity Accounts for Quick, Stable and Long-Lasting Memories in Biological Networks.” <i>Nature Neuroscience</i>, vol. 27, Springer Nature, 2024, pp. 964–74, doi:<a href=\"https://doi.org/10.1038/s41593-024-01597-4\">10.1038/s41593-024-01597-4</a>."},"title":"Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks","isi":1,"tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"date_updated":"2025-09-04T13:06:06Z","month":"05","department":[{"_id":"TiVo"}],"has_accepted_license":"1","publication":"Nature Neuroscience","language":[{"iso":"eng"}],"status":"public","date_created":"2024-03-24T23:01:00Z","scopus_import":"1","intvolume":"        27","pmid":1,"page":"964-974","file":[{"content_type":"application/pdf","date_updated":"2025-06-25T08:45:32Z","file_name":"2025_NatureNeuroscience_Agnes.pdf","creator":"dernst","file_id":"19902","checksum":"dfca68a24749575b912b3a78a7de4516","date_created":"2025-06-25T08:45:32Z","relation":"main_file","file_size":10508018,"success":1,"access_level":"open_access"}],"ddc":["570"],"_id":"15171","OA_place":"publisher","oa":1,"user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","author":[{"last_name":"Agnes","full_name":"Agnes, Everton J.","first_name":"Everton J."},{"orcid":"0000-0003-3295-6181","first_name":"Tim P","last_name":"Vogels","full_name":"Vogels, Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"}],"acknowledgement":"We thank C. Currin, B. Podlaski and the members of the Vogels group for fruitful discussions. E.J.A. and T.P.V. were supported by a Research Project Grant from the Leverhulme Trust (RPG-2016-446; TPV), a Sir Henry Dale Fellowship from the Wellcome Trust and the Royal Society (WT100000; T.P.V.), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z; T.P.V.) and a European Research Council Consolidator Grant (SYNAPSEEK, 819603; T.P.V.). For the purpose of open access, the authors have applied a CC BY public copyright license to any author accepted manuscript version arising from this submission. Open access funding provided by University of Basel.","type":"journal_article","file_date_updated":"2025-06-25T08:45:32Z"},{"page":"148","ddc":["610"],"file":[{"checksum":"7f636555eae7803323df287672fd13ed","date_created":"2023-10-12T14:53:50Z","file_size":30599717,"relation":"main_file","access_level":"open_access","content_type":"application/pdf","embargo":"2024-10-12","file_name":"Confavreux_Thesis_2A.pdf","date_updated":"2024-10-13T22:30:04Z","file_id":"14424","creator":"cchlebak"},{"file_id":"14440","creator":"cchlebak","embargo_to":"open_access","content_type":"application/x-zip-compressed","date_updated":"2024-10-13T22:30:04Z","file_name":"Confavreux Thesis.zip","relation":"source_file","file_size":68406739,"access_level":"closed","checksum":"725e85946db92290a4583a0de9779e1b","date_created":"2023-10-18T07:38:34Z"}],"language":[{"iso":"eng"}],"has_accepted_license":"1","status":"public","date_created":"2023-10-12T14:13:25Z","supervisor":[{"orcid":"0000-0003-3295-6181","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","full_name":"Vogels, Tim P","last_name":"Vogels"}],"author":[{"last_name":"Confavreux","full_name":"Confavreux, Basile J","id":"C7610134-B532-11EA-BD9F-F5753DDC885E","first_name":"Basile J"}],"user_id":"ba8df636-2132-11f1-aed0-ed93e2281fdd","type":"dissertation","corr_author":"1","file_date_updated":"2024-10-13T22:30:04Z","_id":"14422","OA_place":"publisher","related_material":{"record":[{"relation":"part_of_dissertation","status":"public","id":"9633"}]},"oa":1,"abstract":[{"lang":"eng","text":"Animals exhibit a remarkable ability to learn and remember new behaviors, skills, and associations throughout their lifetime. These capabilities are made possible thanks to a variety of\r\nchanges in the brain throughout adulthood, regrouped under the term \"plasticity\". Some cells\r\nin the brain —neurons— and specifically changes in the connections between neurons, the\r\nsynapses, were shown to be crucial for the formation, selection, and consolidation of memories\r\nfrom past experiences. These ongoing changes of synapses across time are called synaptic\r\nplasticity. Understanding how a myriad of biochemical processes operating at individual\r\nsynapses can somehow work in concert to give rise to meaningful changes in behavior is a\r\nfascinating problem and an active area of research.\r\nHowever, the experimental search for the precise plasticity mechanisms at play in the brain\r\nis daunting, as it is difficult to control and observe synapses during learning. Theoretical\r\napproaches have thus been the default method to probe the plasticity-behavior connection. Such\r\nstudies attempt to extract unifying principles across synapses and model all observed synaptic\r\nchanges using plasticity rules: equations that govern the evolution of synaptic strengths across\r\ntime in neuronal network models. These rules can use many relevant quantities to determine\r\nthe magnitude of synaptic changes, such as the precise timings of pre- and postsynaptic\r\naction potentials, the recent neuronal activity levels, the state of neighboring synapses, etc.\r\nHowever, analytical studies rely heavily on human intuition and are forced to make simplifying\r\nassumptions about plasticity rules.\r\nIn this thesis, we aim to assist and augment human intuition in this search for plasticity rules.\r\nWe explore whether a numerical approach could automatically discover the plasticity rules\r\nthat elicit desired behaviors in large networks of interconnected neurons. This approach is\r\ndubbed meta-learning synaptic plasticity: learning plasticity rules which themselves will make\r\nneuronal networks learn how to solve a desired task. We first write all the potential plasticity\r\nmechanisms to consider using a single expression with adjustable parameters. We then optimize\r\nthese plasticity parameters using evolutionary strategies or Bayesian inference on tasks known\r\nto involve synaptic plasticity, such as familiarity detection and network stabilization.\r\nWe show that these automated approaches are powerful tools, able to complement established\r\nanalytical methods. By comprehensively screening plasticity rules at all synapse types in\r\nrealistic, spiking neuronal network models, we discover entire sets of degenerate plausible\r\nplasticity rules that reliably elicit memory-related behaviors. Our approaches allow for more\r\nrobust experimental predictions, by abstracting out the idiosyncrasies of individual plasticity\r\nrules, and provide fresh insights on synaptic plasticity in spiking network models.\r\n"}],"alternative_title":["ISTA Thesis"],"project":[{"_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","call_identifier":"H2020","grant_number":"819603"}],"ec_funded":1,"day":"12","oa_version":"Published Version","title":"Synapseek: Meta-learning synaptic plasticity rules","date_updated":"2026-04-07T13:53:13Z","month":"10","department":[{"_id":"GradSch"},{"_id":"TiVo"}],"license":"https://creativecommons.org/licenses/by-nc-sa/4.0/","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode","image":"/images/cc_by_nc_sa.png","name":"Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)","short":"CC BY-NC-SA (4.0)"},"publication_identifier":{"issn":["2663-337X"]},"publisher":"Institute of Science and Technology Austria","article_processing_charge":"No","doi":"10.15479/at:ista:14422","publication_status":"published","degree_awarded":"PhD","date_published":"2023-10-12T00:00:00Z","year":"2023","citation":{"short":"B.J. Confavreux, Synapseek: Meta-Learning Synaptic Plasticity Rules, Institute of Science and Technology Austria, 2023.","apa":"Confavreux, B. J. (2023). <i>Synapseek: Meta-learning synaptic plasticity rules</i>. Institute of Science and Technology Austria. <a href=\"https://doi.org/10.15479/at:ista:14422\">https://doi.org/10.15479/at:ista:14422</a>","ama":"Confavreux BJ. Synapseek: Meta-learning synaptic plasticity rules. 2023. doi:<a href=\"https://doi.org/10.15479/at:ista:14422\">10.15479/at:ista:14422</a>","chicago":"Confavreux, Basile J. “Synapseek: Meta-Learning Synaptic Plasticity Rules.” Institute of Science and Technology Austria, 2023. <a href=\"https://doi.org/10.15479/at:ista:14422\">https://doi.org/10.15479/at:ista:14422</a>.","ista":"Confavreux BJ. 2023. Synapseek: Meta-learning synaptic plasticity rules. Institute of Science and Technology Austria.","ieee":"B. J. Confavreux, “Synapseek: Meta-learning synaptic plasticity rules,” Institute of Science and Technology Austria, 2023.","mla":"Confavreux, Basile J. <i>Synapseek: Meta-Learning Synaptic Plasticity Rules</i>. Institute of Science and Technology Austria, 2023, doi:<a href=\"https://doi.org/10.15479/at:ista:14422\">10.15479/at:ista:14422</a>."}},{"file_date_updated":"2022-09-05T08:55:11Z","type":"journal_article","acknowledgement":"We would like to thank the Vogels Lab for feedback on an earlier version of this manuscript. D.W.J. was supported by a Marshall Scholarship and a Clarendon Scholarship. R.P.C. and T.P.V. were supported by a Wellcome Trust and Royal Society Sir Henry Dale Fellowship (WT 100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and an ERC Consolidator Grant (SYNAPSEEK).","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","author":[{"first_name":"David W.","full_name":"Jia, David W.","last_name":"Jia"},{"orcid":"0000-0003-3295-6181","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","full_name":"Vogels, Tim P","last_name":"Vogels"},{"first_name":"Rui Ponte","last_name":"Costa","full_name":"Costa, Rui Ponte"}],"oa":1,"_id":"12009","file":[{"success":1,"access_level":"open_access","file_size":2491191,"relation":"main_file","date_created":"2022-09-05T08:55:11Z","checksum":"3ec724c4f6d3440028c217305e32915f","file_id":"12022","creator":"dernst","file_name":"2022_CommBiology_Jia.pdf","date_updated":"2022-09-05T08:55:11Z","content_type":"application/pdf"}],"ddc":["570"],"intvolume":"         5","scopus_import":"1","status":"public","date_created":"2022-09-04T22:02:02Z","has_accepted_license":"1","language":[{"iso":"eng"}],"publication":"Communications biology","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"department":[{"_id":"TiVo"}],"month":"08","date_updated":"2025-04-14T09:44:14Z","title":"Developmental depression-to-facilitation shift controls excitation-inhibition balance","article_number":"873","isi":1,"citation":{"short":"D.W. Jia, T.P. Vogels, R.P. Costa, Communications Biology 5 (2022).","apa":"Jia, D. W., Vogels, T. P., &#38; Costa, R. P. (2022). Developmental depression-to-facilitation shift controls excitation-inhibition balance. <i>Communications Biology</i>. Springer Nature. <a href=\"https://doi.org/10.1038/s42003-022-03801-2\">https://doi.org/10.1038/s42003-022-03801-2</a>","ama":"Jia DW, Vogels TP, Costa RP. Developmental depression-to-facilitation shift controls excitation-inhibition balance. <i>Communications biology</i>. 2022;5. doi:<a href=\"https://doi.org/10.1038/s42003-022-03801-2\">10.1038/s42003-022-03801-2</a>","chicago":"Jia, David W., Tim P Vogels, and Rui Ponte Costa. “Developmental Depression-to-Facilitation Shift Controls Excitation-Inhibition Balance.” <i>Communications Biology</i>. Springer Nature, 2022. <a href=\"https://doi.org/10.1038/s42003-022-03801-2\">https://doi.org/10.1038/s42003-022-03801-2</a>.","ista":"Jia DW, Vogels TP, Costa RP. 2022. Developmental depression-to-facilitation shift controls excitation-inhibition balance. Communications biology. 5, 873.","ieee":"D. W. Jia, T. P. Vogels, and R. P. Costa, “Developmental depression-to-facilitation shift controls excitation-inhibition balance,” <i>Communications biology</i>, vol. 5. Springer Nature, 2022.","mla":"Jia, David W., et al. “Developmental Depression-to-Facilitation Shift Controls Excitation-Inhibition Balance.” <i>Communications Biology</i>, vol. 5, 873, Springer Nature, 2022, doi:<a href=\"https://doi.org/10.1038/s42003-022-03801-2\">10.1038/s42003-022-03801-2</a>."},"year":"2022","date_published":"2022-08-25T00:00:00Z","publication_status":"published","doi":"10.1038/s42003-022-03801-2","publication_identifier":{"eissn":["2399-3642"]},"publisher":"Springer Nature","article_processing_charge":"No","abstract":[{"lang":"eng","text":"Changes in the short-term dynamics of excitatory synapses over development have been observed throughout cortex, but their purpose and consequences remain unclear. Here, we propose that developmental changes in synaptic dynamics buffer the effect of slow inhibitory long-term plasticity, allowing for continuously stable neural activity. Using computational modeling we demonstrate that early in development excitatory short-term depression quickly stabilises neural activity, even in the face of strong, unbalanced excitation. We introduce a model of the commonly observed developmental shift from depression to facilitation and show that neural activity remains stable throughout development, while inhibitory synaptic plasticity slowly balances excitation, consistent with experimental observations. Our model predicts changes in the input responses from phasic to phasic-and-tonic and more precise spike timings. We also observe a gradual emergence of short-lasting memory traces governed by short-term plasticity development. We conclude that the developmental depression-to-facilitation shift may control excitation-inhibition balance throughout development with important functional consequences."}],"article_type":"original","day":"25","volume":5,"oa_version":"Published Version","ec_funded":1,"project":[{"grant_number":"214316/Z/18/Z","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","name":"Whatâs in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks."},{"grant_number":"819603","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","call_identifier":"H2020"}],"external_id":{"isi":["000844814800007"]},"quality_controlled":"1"},{"project":[{"grant_number":"819603","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning."}],"quality_controlled":"1","volume":199,"oa_version":"Published Version","day":"01","ec_funded":1,"abstract":[{"lang":"eng","text":"Brains are thought to engage in predictive learning - learning to predict upcoming stimuli - to construct an internal model of their environment. This is especially notable for spatial navigation, as first described by Tolman’s latent learning tasks. However, predictive learning has also been observed in sensory cortex, in settings unrelated to spatial navigation. Apart from normative frameworks such as active inference or efficient coding, what could be the utility of learning to predict the patterns of occurrence of correlated stimuli? Here we show that prediction, and thereby the construction of an internal model of sequential stimuli, can bootstrap the learning process of a working memory task in a recurrent neural network. We implemented predictive learning alongside working memory match-tasks, and networks emerged to solve the prediction task first by encoding information across time to predict upcoming stimuli, and then eavesdropped on this solution to solve the matching task. Eavesdropping was most beneficial when neural resources were limited. Hence, predictive learning acts as a general neural mechanism to learn to store sensory information that can later be essential for working memory tasks."}],"publication_status":"published","publisher":"ML Research Press","publication_identifier":{"eissn":["2640-3498"]},"article_processing_charge":"No","citation":{"short":"T.L. Van Der Plas, T.P. Vogels, S.G. Manohar, in:, Proceedings of Machine Learning Research, ML Research Press, 2022, pp. 518–531.","apa":"Van Der Plas, T. L., Vogels, T. P., &#38; Manohar, S. G. (2022). Predictive learning enables neural networks to learn complex working memory tasks. In <i>Proceedings of Machine Learning Research</i> (Vol. 199, pp. 518–531). ML Research Press.","ama":"Van Der Plas TL, Vogels TP, Manohar SG. Predictive learning enables neural networks to learn complex working memory tasks. In: <i>Proceedings of Machine Learning Research</i>. Vol 199. ML Research Press; 2022:518-531.","chicago":"Van Der Plas, Thijs L., Tim P Vogels, and Sanjay G. Manohar. “Predictive Learning Enables Neural Networks to Learn Complex Working Memory Tasks.” In <i>Proceedings of Machine Learning Research</i>, 199:518–31. ML Research Press, 2022.","ista":"Van Der Plas TL, Vogels TP, Manohar SG. 2022. Predictive learning enables neural networks to learn complex working memory tasks. Proceedings of Machine Learning Research. vol. 199, 518–531.","ieee":"T. L. Van Der Plas, T. P. Vogels, and S. G. Manohar, “Predictive learning enables neural networks to learn complex working memory tasks,” in <i>Proceedings of Machine Learning Research</i>, 2022, vol. 199, pp. 518–531.","mla":"Van Der Plas, Thijs L., et al. “Predictive Learning Enables Neural Networks to Learn Complex Working Memory Tasks.” <i>Proceedings of Machine Learning Research</i>, vol. 199, ML Research Press, 2022, pp. 518–31."},"date_published":"2022-12-01T00:00:00Z","year":"2022","title":"Predictive learning enables neural networks to learn complex working memory tasks","department":[{"_id":"TiVo"}],"month":"12","date_updated":"2025-04-14T07:54:31Z","has_accepted_license":"1","language":[{"iso":"eng"}],"publication":"Proceedings of Machine Learning Research","date_created":"2023-07-16T22:01:12Z","scopus_import":"1","status":"public","page":"518-531","intvolume":"       199","file":[{"content_type":"application/pdf","file_name":"2022_PMLR_vanderPlas.pdf","date_updated":"2023-07-18T06:32:38Z","creator":"dernst","file_id":"13243","checksum":"7530a93ef42e10b4db1e5e4b69796e93","date_created":"2023-07-18T06:32:38Z","file_size":585135,"relation":"main_file","access_level":"open_access","success":1}],"ddc":["000"],"_id":"13239","oa":1,"type":"conference","author":[{"last_name":"Van Der Plas","full_name":"Van Der Plas, Thijs L.","first_name":"Thijs L."},{"last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","full_name":"Vogels, Tim P","first_name":"Tim P","orcid":"0000-0003-3295-6181"},{"full_name":"Manohar, Sanjay G.","last_name":"Manohar","first_name":"Sanjay G."}],"acknowledgement":"The authors would like to thank members of the Vogels lab and Manohar lab, as well as Adam Packer, Andrew Saxe, Stefano Sarao Mannelli and Jacob Bakermans for fruitful discussions and comments on earlier versions of the manuscript.\r\nTLvdP was supported by funding from the Biotechnology and Biological Sciences Research Council (BBSRC) [grant number BB/M011224/1]. TPV was supported by an ERC Consolidator Grant (SYNAPSEEK). SGM was funded by a MRC Clinician Scientist Fellowship MR/P00878X and Leverhulme Grant RPG-2018-310.","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","file_date_updated":"2023-07-18T06:32:38Z"},{"date_created":"2022-04-10T22:01:39Z","scopus_import":"1","status":"public","language":[{"iso":"eng"}],"publication":"Cell Reports","has_accepted_license":"1","ddc":["570"],"file":[{"creator":"dernst","file_id":"11172","file_name":"2022_CellReports_Kaneko.pdf","date_updated":"2022-04-15T11:00:58Z","content_type":"application/pdf","success":1,"access_level":"open_access","relation":"main_file","file_size":4774216,"date_created":"2022-04-15T11:00:58Z","checksum":"49105c6c27c9af0f37f50a8bbb4d380d"}],"pmid":1,"intvolume":"        38","oa":1,"_id":"11143","issue":"13","file_date_updated":"2022-04-15T11:00:58Z","type":"journal_article","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","acknowledgement":"We would like to thank Bernardo Rudy, Joanna Mattis, and Laura Mcgarry for comments on a previous version of the manuscript; Xiaohong Zhang for expert technical support and mouse colony maintenance; Melody Cheng for assistance with generation of the graphical abstract; and Jennifer Kearney for the gift of Scn1a+/− mice. This work was supported by the National Institute of Neurological Disorders and Stroke of the National Institutes of Health under F31NS111803 (to K.M.G.) and K08NS097633 and R01NS110869 (to E.M.G.), the Dravet Syndrome Foundation (to A.S.), an ERC Consolidator Grant (SYNAPSEEK) (to T.P.V.), and the NOMIS Foundation through the NOMIS Fellowships program at IST Austria (to C.C.). The graphical abstract was prepared using BioRender software (BioRender.com).","author":[{"last_name":"Kaneko","full_name":"Kaneko, Keisuke","first_name":"Keisuke"},{"last_name":"Currin","id":"e8321fc5-3091-11eb-8a53-83f309a11ac9","full_name":"Currin, Christopher","orcid":"0000-0002-4809-5059","first_name":"Christopher"},{"first_name":"Kevin M.","full_name":"Goff, Kevin M.","last_name":"Goff"},{"first_name":"Eric R.","last_name":"Wengert","full_name":"Wengert, Eric R."},{"full_name":"Somarowthu, Ala","last_name":"Somarowthu","first_name":"Ala"},{"last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","first_name":"Tim P"},{"full_name":"Goldberg, Ethan M.","last_name":"Goldberg","first_name":"Ethan M."}],"ec_funded":1,"oa_version":"Published Version","day":"29","volume":38,"project":[{"name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","call_identifier":"H2020","grant_number":"819603"},{"_id":"9B861AAC-BA93-11EA-9121-9846C619BF3A","name":"NOMIS Fellowship Program"}],"external_id":{"isi":["000779794000001"],"pmid":["35354025"]},"quality_controlled":"1","article_type":"original","abstract":[{"lang":"eng","text":"Dravet syndrome is a neurodevelopmental disorder characterized by epilepsy, intellectual disability, and sudden death due to pathogenic variants in SCN1A with loss of function of the sodium channel subunit Nav1.1. Nav1.1-expressing parvalbumin GABAergic interneurons (PV-INs) from young Scn1a+/− mice show impaired action potential generation. An approach assessing PV-IN function in the same mice at two time points shows impaired spike generation in all Scn1a+/− mice at postnatal days (P) 16–21, whether deceased prior or surviving to P35, with normalization by P35 in surviving mice. However, PV-IN synaptic transmission is dysfunctional in young Scn1a+/− mice that did not survive and in Scn1a+/− mice ≥ P35. Modeling confirms that PV-IN axonal propagation is more sensitive to decreased sodium conductance than spike generation. These results demonstrate dynamic dysfunction in Dravet syndrome: combined abnormalities of PV-IN spike generation and propagation drives early disease severity, while ongoing dysfunction of synaptic transmission contributes to chronic pathology."}],"citation":{"ieee":"K. Kaneko <i>et al.</i>, “Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome,” <i>Cell Reports</i>, vol. 38, no. 13. Elsevier, 2022.","mla":"Kaneko, Keisuke, et al. “Developmentally Regulated Impairment of Parvalbumin Interneuron Synaptic Transmission in an Experimental Model of Dravet Syndrome.” <i>Cell Reports</i>, vol. 38, no. 13, 110580, Elsevier, 2022, doi:<a href=\"https://doi.org/10.1016/j.celrep.2022.110580\">10.1016/j.celrep.2022.110580</a>.","ista":"Kaneko K, Currin C, Goff KM, Wengert ER, Somarowthu A, Vogels TP, Goldberg EM. 2022. Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. Cell Reports. 38(13), 110580.","ama":"Kaneko K, Currin C, Goff KM, et al. Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. <i>Cell Reports</i>. 2022;38(13). doi:<a href=\"https://doi.org/10.1016/j.celrep.2022.110580\">10.1016/j.celrep.2022.110580</a>","chicago":"Kaneko, Keisuke, Christopher Currin, Kevin M. Goff, Eric R. Wengert, Ala Somarowthu, Tim P Vogels, and Ethan M. Goldberg. “Developmentally Regulated Impairment of Parvalbumin Interneuron Synaptic Transmission in an Experimental Model of Dravet Syndrome.” <i>Cell Reports</i>. Elsevier, 2022. <a href=\"https://doi.org/10.1016/j.celrep.2022.110580\">https://doi.org/10.1016/j.celrep.2022.110580</a>.","short":"K. Kaneko, C. Currin, K.M. Goff, E.R. Wengert, A. Somarowthu, T.P. Vogels, E.M. Goldberg, Cell Reports 38 (2022).","apa":"Kaneko, K., Currin, C., Goff, K. M., Wengert, E. R., Somarowthu, A., Vogels, T. P., &#38; Goldberg, E. M. (2022). Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. <i>Cell Reports</i>. Elsevier. <a href=\"https://doi.org/10.1016/j.celrep.2022.110580\">https://doi.org/10.1016/j.celrep.2022.110580</a>"},"year":"2022","date_published":"2022-03-29T00:00:00Z","doi":"10.1016/j.celrep.2022.110580","article_processing_charge":"No","publisher":"Elsevier","publication_identifier":{"eissn":["2211-1247"]},"publication_status":"published","department":[{"_id":"TiVo"}],"month":"03","date_updated":"2025-06-11T14:00:11Z","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode","image":"/images/cc_by_nc_nd.png","short":"CC BY-NC-ND (4.0)","name":"Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)"},"article_number":"110580","isi":1,"title":"Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome"},{"oa":1,"_id":"8253","corr_author":"1","issue":"4","file_date_updated":"2022-04-08T06:05:39Z","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","author":[{"last_name":"Zenke","full_name":"Zenke, Friedemann","orcid":"0000-0003-1883-644X","first_name":"Friedemann"},{"first_name":"Tim P","orcid":"0000-0003-3295-6181","last_name":"Vogels","full_name":"Vogels, Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"}],"acknowledgement":"F.Z. was supported by the Wellcome Trust (110124/Z/15/Z) and the Novartis Research Foundation. T.P.V. was supported by a Wellcome Trust Sir Henry Dale Research fellowship (WT100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and an ERC Consolidator Grant SYNAPSEEK.","type":"journal_article","status":"public","date_created":"2020-08-12T12:08:24Z","scopus_import":"1","publication":"Neural Computation","language":[{"iso":"eng"}],"has_accepted_license":"1","ddc":["000","570"],"file":[{"content_type":"application/pdf","date_updated":"2022-04-08T06:05:39Z","file_name":"2021_NeuralComputation_Zenke.pdf","file_id":"11131","creator":"dernst","checksum":"eac5a51c24c8989ae7cf9ae32ec3bc95","date_created":"2022-04-08T06:05:39Z","file_size":1611614,"relation":"main_file","access_level":"open_access","success":1}],"page":"899-925","pmid":1,"intvolume":"        33","year":"2021","date_published":"2021-03-01T00:00:00Z","citation":{"ieee":"F. Zenke and T. P. Vogels, “The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks,” <i>Neural Computation</i>, vol. 33, no. 4. MIT Press, pp. 899–925, 2021.","mla":"Zenke, Friedemann, and Tim P. Vogels. “The Remarkable Robustness of Surrogate Gradient Learning for Instilling Complex Function in Spiking Neural Networks.” <i>Neural Computation</i>, vol. 33, no. 4, MIT Press, 2021, pp. 899–925, doi:<a href=\"https://doi.org/10.1162/neco_a_01367\">10.1162/neco_a_01367</a>.","ista":"Zenke F, Vogels TP. 2021. The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. Neural Computation. 33(4), 899–925.","ama":"Zenke F, Vogels TP. The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. <i>Neural Computation</i>. 2021;33(4):899-925. doi:<a href=\"https://doi.org/10.1162/neco_a_01367\">10.1162/neco_a_01367</a>","chicago":"Zenke, Friedemann, and Tim P Vogels. “The Remarkable Robustness of Surrogate Gradient Learning for Instilling Complex Function in Spiking Neural Networks.” <i>Neural Computation</i>. MIT Press, 2021. <a href=\"https://doi.org/10.1162/neco_a_01367\">https://doi.org/10.1162/neco_a_01367</a>.","short":"F. Zenke, T.P. Vogels, Neural Computation 33 (2021) 899–925.","apa":"Zenke, F., &#38; Vogels, T. P. (2021). The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. <i>Neural Computation</i>. MIT Press. <a href=\"https://doi.org/10.1162/neco_a_01367\">https://doi.org/10.1162/neco_a_01367</a>"},"publication_identifier":{"eissn":["1530-888X"],"issn":["0899-7667"]},"article_processing_charge":"No","publisher":"MIT Press","doi":"10.1162/neco_a_01367","publication_status":"published","date_updated":"2025-04-14T09:44:14Z","month":"03","department":[{"_id":"TiVo"}],"tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"isi":1,"title":"The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks","ec_funded":1,"day":"01","volume":33,"oa_version":"Published Version","external_id":{"isi":["000663433900003"],"pmid":["33513328"]},"quality_controlled":"1","project":[{"grant_number":"819603","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning."},{"_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","name":"Whatâs in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","grant_number":"214316/Z/18/Z"}],"article_type":"original","abstract":[{"text":"Brains process information in spiking neural networks. Their intricate connections shape the diverse functions these networks perform. In comparison, the functional capabilities of models of spiking networks are still rudimentary. This shortcoming is mainly due to the lack of insight and practical algorithms to construct the necessary connectivity. Any such algorithm typically attempts to build networks by iteratively reducing the error compared to a desired output. But assigning credit to hidden units in multi-layered spiking networks has remained challenging due to the non-differentiable nonlinearity of spikes. To avoid this issue, one can employ surrogate gradients to discover the required connectivity in spiking network models. However, the choice of a surrogate is not unique, raising the question of how its implementation influences the effectiveness of the method. Here, we use numerical simulations to systematically study how essential design parameters of surrogate gradients impact learning performance on a range of classification problems. We show that surrogate gradient learning is robust to different shapes of underlying surrogate derivatives, but the choice of the derivative’s scale can substantially affect learning performance. When we combine surrogate gradients with a suitable activity regularization technique, robust information processing can be achieved in spiking networks even at the sparse activity limit. Our study provides a systematic account of the remarkable robustness of surrogate gradient learning and serves as a practical guide to model functional spiking neural networks.","lang":"eng"}]},{"title":"Training deep neural density estimators to identify mechanistic models of neural dynamics","article_number":"e56261","isi":1,"tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"date_updated":"2025-04-14T07:54:31Z","department":[{"_id":"TiVo"}],"month":"09","publication_status":"published","publisher":"eLife Sciences Publications","publication_identifier":{"eissn":["2050-084X"]},"article_processing_charge":"No","doi":"10.7554/eLife.56261","date_published":"2020-09-17T00:00:00Z","year":"2020","citation":{"ama":"Gonçalves PJ, Lueckmann J-M, Deistler M, et al. Training deep neural density estimators to identify mechanistic models of neural dynamics. <i>eLife</i>. 2020;9. doi:<a href=\"https://doi.org/10.7554/eLife.56261\">10.7554/eLife.56261</a>","chicago":"Gonçalves, Pedro J., Jan-Matthis Lueckmann, Michael Deistler, Marcel Nonnenmacher, Kaan Öcal, Giacomo Bassetto, Chaitanya Chintaluri, et al. “Training Deep Neural Density Estimators to Identify Mechanistic Models of Neural Dynamics.” <i>ELife</i>. eLife Sciences Publications, 2020. <a href=\"https://doi.org/10.7554/eLife.56261\">https://doi.org/10.7554/eLife.56261</a>.","short":"P.J. Gonçalves, J.-M. Lueckmann, M. Deistler, M. Nonnenmacher, K. Öcal, G. Bassetto, C. Chintaluri, W.F. Podlaski, S.A. Haddad, T.P. Vogels, D.S. Greenberg, J.H. Macke, ELife 9 (2020).","apa":"Gonçalves, P. J., Lueckmann, J.-M., Deistler, M., Nonnenmacher, M., Öcal, K., Bassetto, G., … Macke, J. H. (2020). Training deep neural density estimators to identify mechanistic models of neural dynamics. <i>ELife</i>. eLife Sciences Publications. <a href=\"https://doi.org/10.7554/eLife.56261\">https://doi.org/10.7554/eLife.56261</a>","ieee":"P. J. Gonçalves <i>et al.</i>, “Training deep neural density estimators to identify mechanistic models of neural dynamics,” <i>eLife</i>, vol. 9. eLife Sciences Publications, 2020.","mla":"Gonçalves, Pedro J., et al. “Training Deep Neural Density Estimators to Identify Mechanistic Models of Neural Dynamics.” <i>ELife</i>, vol. 9, e56261, eLife Sciences Publications, 2020, doi:<a href=\"https://doi.org/10.7554/eLife.56261\">10.7554/eLife.56261</a>.","ista":"Gonçalves PJ, Lueckmann J-M, Deistler M, Nonnenmacher M, Öcal K, Bassetto G, Chintaluri C, Podlaski WF, Haddad SA, Vogels TP, Greenberg DS, Macke JH. 2020. Training deep neural density estimators to identify mechanistic models of neural dynamics. eLife. 9, e56261."},"abstract":[{"lang":"eng","text":"Mechanistic modeling in neuroscience aims to explain observed phenomena in terms of underlying causes. However, determining which model parameters agree with complex and stochastic neural data presents a significant challenge. We address this challenge with a machine learning tool which uses deep neural density estimators—trained using model simulations—to carry out Bayesian inference and retrieve the full space of parameters compatible with raw data or selected data features. Our method is scalable in parameters and data features and can rapidly analyze new data after initial training. We demonstrate the power and flexibility of our approach on receptive fields, ion channels, and Hodgkin–Huxley models. We also characterize the space of circuit configurations giving rise to rhythmic activity in the crustacean stomatogastric ganglion, and use these results to derive hypotheses for underlying compensation mechanisms. Our approach will help close the gap between data-driven and theory-driven models of neural dynamics."}],"article_type":"original","external_id":{"isi":["000584989400001"],"pmid":["32940606"]},"quality_controlled":"1","project":[{"name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","grant_number":"819603"}],"oa_version":"Published Version","day":"17","volume":9,"ec_funded":1,"author":[{"first_name":"Pedro J.","orcid":"0000-0002-6987-4836","last_name":"Gonçalves","full_name":"Gonçalves, Pedro J."},{"orcid":"0000-0003-4320-4663","first_name":"Jan-Matthis","full_name":"Lueckmann, Jan-Matthis","last_name":"Lueckmann"},{"orcid":"0000-0002-3573-0404","first_name":"Michael","last_name":"Deistler","full_name":"Deistler, Michael"},{"last_name":"Nonnenmacher","full_name":"Nonnenmacher, Marcel","orcid":"0000-0001-6044-6627","first_name":"Marcel"},{"orcid":"0000-0002-8528-6858","first_name":"Kaan","full_name":"Öcal, Kaan","last_name":"Öcal"},{"full_name":"Bassetto, Giacomo","last_name":"Bassetto","first_name":"Giacomo"},{"first_name":"Chaitanya","orcid":"0000-0003-4252-1608","last_name":"Chintaluri","id":"BA06AFEE-A4BA-11EA-AE5C-14673DDC885E","full_name":"Chintaluri, Chaitanya"},{"full_name":"Podlaski, William F.","last_name":"Podlaski","orcid":"0000-0001-6619-7502","first_name":"William F."},{"last_name":"Haddad","full_name":"Haddad, Sara A.","orcid":"0000-0003-0807-0823","first_name":"Sara A."},{"last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","first_name":"Tim P"},{"first_name":"David S.","full_name":"Greenberg, David S.","last_name":"Greenberg"},{"full_name":"Macke, Jakob H.","last_name":"Macke","orcid":"0000-0001-5154-8912","first_name":"Jakob H."}],"acknowledgement":"We thank Mahmood S Hoseini and Michael Stryker for sharing their data for Figure 2, and Philipp Berens, Sean Bittner, Jan Boelts, John Cunningham, Richard Gao, Scott Linderman, Eve Marder, Iain Murray, George Papamakarios, Astrid Prinz, Auguste Schulz and Srinivas Turaga for discussions and/or comments on the manuscript. This work was supported by the German Research Foundation (DFG) through SFB 1233 ‘Robust Vision’, (276693517), SFB 1089 ‘Synaptic Microcircuits’, SPP 2041 ‘Computational Connectomics’ and Germany's Excellence Strategy – EXC-Number 2064/1 – Project number 390727645 and the German Federal Ministry of Education and Research (BMBF, project ‘ADIMEM’, FKZ 01IS18052 A-D) to JHM, a Sir Henry Dale Fellowship by the Wellcome Trust and the Royal Society (WT100000; WFP and TPV), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z; TPV), a ERC Consolidator Grant (SYNAPSEEK; WPF and CC), and a UK Research and Innovation, Biotechnology and Biological Sciences Research Council (CC, UKRI-BBSRC BB/N019512/1). We gratefully acknowledge the Leibniz Supercomputing Centre for funding this project by providing computing time on its Linux-Cluster.","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","type":"journal_article","file_date_updated":"2020-10-27T11:37:32Z","_id":"8127","oa":1,"intvolume":"         9","pmid":1,"file":[{"access_level":"open_access","success":1,"relation":"main_file","file_size":17355867,"date_created":"2020-10-27T11:37:32Z","checksum":"c4300ddcd93ed03fc9c6cdf1f77890be","creator":"cziletti","file_id":"8709","date_updated":"2020-10-27T11:37:32Z","file_name":"2020_eLife_Gonçalves.pdf","content_type":"application/pdf"}],"ddc":["570"],"has_accepted_license":"1","publication":"eLife","language":[{"iso":"eng"}],"status":"public","scopus_import":"1","date_created":"2020-07-16T12:26:04Z"},{"title":"A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network","department":[{"_id":"TiVo"}],"month":"12","date_updated":"2026-04-22T22:30:22Z","publication_status":"published","publication_identifier":{"issn":["1049-5258"]},"article_processing_charge":"No","citation":{"short":"B.J. Confavreux, F. Zenke, E.J. Agnes, T. Lillicrap, T.P. Vogels, in:, Advances in Neural Information Processing Systems, 2020, pp. 16398–16408.","apa":"Confavreux, B. J., Zenke, F., Agnes, E. J., Lillicrap, T., &#38; Vogels, T. P. (2020). A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. In <i>Advances in Neural Information Processing Systems</i> (Vol. 33, pp. 16398–16408). Vancouver, Canada.","ama":"Confavreux BJ, Zenke F, Agnes EJ, Lillicrap T, Vogels TP. A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. In: <i>Advances in Neural Information Processing Systems</i>. Vol 33. ; 2020:16398-16408.","chicago":"Confavreux, Basile J, Friedemann Zenke, Everton J. Agnes, Timothy Lillicrap, and Tim P Vogels. “A Meta-Learning Approach to (Re)Discover Plasticity Rules That Carve a Desired Function into a Neural Network.” In <i>Advances in Neural Information Processing Systems</i>, 33:16398–408, 2020.","ista":"Confavreux BJ, Zenke F, Agnes EJ, Lillicrap T, Vogels TP. 2020. A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. Advances in Neural Information Processing Systems. NeurIPS: Conference on Neural Information Processing Systems vol. 33, 16398–16408.","ieee":"B. J. Confavreux, F. Zenke, E. J. Agnes, T. Lillicrap, and T. P. Vogels, “A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network,” in <i>Advances in Neural Information Processing Systems</i>, Vancouver, Canada, 2020, vol. 33, pp. 16398–16408.","mla":"Confavreux, Basile J., et al. “A Meta-Learning Approach to (Re)Discover Plasticity Rules That Carve a Desired Function into a Neural Network.” <i>Advances in Neural Information Processing Systems</i>, vol. 33, 2020, pp. 16398–408."},"date_published":"2020-12-06T00:00:00Z","year":"2020","conference":{"end_date":"2020-12-12","name":"NeurIPS: Conference on Neural Information Processing Systems","start_date":"2020-12-06","location":"Vancouver, Canada"},"abstract":[{"lang":"eng","text":"The search for biologically faithful synaptic plasticity rules has resulted in a large body of models. They are usually inspired by – and fitted to – experimental data, but they rarely produce neural dynamics that serve complex functions. These failures suggest that current plasticity models are still under-constrained by existing data. Here, we present an alternative approach that uses meta-learning to discover plausible synaptic plasticity rules. Instead of experimental data, the rules are constrained by the functions they implement and the structure they are meant to produce. Briefly, we parameterize synaptic plasticity rules by a Volterra expansion and then use supervised learning methods (gradient descent or evolutionary strategies) to minimize a problem-dependent loss function that quantifies how effectively a candidate plasticity rule transforms an initially random network into one with the desired function. We first validate our approach by re-discovering previously described plasticity rules, starting at the single-neuron level and “Oja’s rule”, a simple Hebbian plasticity rule that captures the direction of most variability of inputs to a neuron (i.e., the first principal component). We expand the problem to the network level and ask the framework to find Oja’s rule together with an anti-Hebbian rule such that an initially random two-layer firing-rate network will recover several principal components of the input space after learning. Next, we move to networks of integrate-and-fire neurons with plastic inhibitory afferents. We train for rules that achieve a target firing rate by countering tuned excitation. Our algorithm discovers a specific subset of the manifold of rules that can solve this task. Our work is a proof of principle of an automated and unbiased approach to unveil synaptic plasticity rules that obey biological constraints and can solve complex functions."}],"main_file_link":[{"open_access":"1","url":"https://proceedings.neurips.cc/paper/2020/hash/bdbd5ebfde4934142c8a88e7a3796cd5-Abstract.html"}],"project":[{"grant_number":"819603","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","call_identifier":"H2020"},{"grant_number":"214316/Z/18/Z","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87"}],"quality_controlled":"1","volume":33,"day":"06","oa_version":"Published Version","ec_funded":1,"type":"conference","author":[{"full_name":"Confavreux, Basile J","id":"C7610134-B532-11EA-BD9F-F5753DDC885E","last_name":"Confavreux","first_name":"Basile J"},{"last_name":"Zenke","full_name":"Zenke, Friedemann","first_name":"Friedemann"},{"first_name":"Everton J.","last_name":"Agnes","full_name":"Agnes, Everton J."},{"first_name":"Timothy","last_name":"Lillicrap","full_name":"Lillicrap, Timothy"},{"last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","first_name":"Tim P"}],"acknowledgement":"We would like to thank Chaitanya Chintaluri, Georgia Christodoulou, Bill Podlaski and Merima Šabanovic for useful discussions and comments. This work was supported by a Wellcome Trust ´ Senior Research Fellowship (214316/Z/18/Z), a BBSRC grant (BB/N019512/1), an ERC consolidator Grant (SYNAPSEEK), a Leverhulme Trust Project Grant (RPG-2016-446), and funding from École Polytechnique, Paris.","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","_id":"9633","related_material":{"link":[{"relation":"is_continued_by","url":"https://doi.org/10.1101/2020.10.24.353409"}],"record":[{"relation":"dissertation_contains","id":"14422","status":"public"}]},"oa":1,"page":"16398-16408","intvolume":"        33","language":[{"iso":"eng"}],"publication":"Advances in Neural Information Processing Systems","date_created":"2021-07-04T22:01:27Z","scopus_import":"1","status":"public"}]
