[{"status":"public","article_type":"original","type":"journal_article","_id":"14841","department":[{"_id":"TiVo"}],"date_updated":"2024-01-23T10:20:40Z","month":"01","intvolume":" 121","scopus_import":"1","pmid":1,"oa_version":"None","abstract":[{"text":"De novo heterozygous variants in KCNC2 encoding the voltage-gated potassium (K+) channel subunit Kv3.2 are a recently described cause of developmental and epileptic encephalopathy (DEE). A de novo variant in KCNC2 c.374G > A (p.Cys125Tyr) was identified via exome sequencing in a patient with DEE. Relative to wild-type Kv3.2, Kv3.2-p.Cys125Tyr induces K+ currents exhibiting a large hyperpolarizing shift in the voltage dependence of activation, accelerated activation, and delayed deactivation consistent with a relative stabilization of the open conformation, along with increased current density. Leveraging the cryogenic electron microscopy (cryo-EM) structure of Kv3.1, molecular dynamic simulations suggest that a strong π-π stacking interaction between the variant Tyr125 and Tyr156 in the α-6 helix of the T1 domain promotes a relative stabilization of the open conformation of the channel, which underlies the observed gain of function. A multicompartment computational model of a Kv3-expressing parvalbumin-positive cerebral cortex fast-spiking γ-aminobutyric acidergic (GABAergic) interneuron (PV-IN) demonstrates how the Kv3.2-Cys125Tyr variant impairs neuronal excitability and dysregulates inhibition in cerebral cortex circuits to explain the resulting epilepsy.","lang":"eng"}],"related_material":{"link":[{"url":"https://github.com/ChrisCurrin/pv-kcnc2 ","relation":"software"}]},"volume":121,"issue":"3","ec_funded":1,"language":[{"iso":"eng"}],"publication_identifier":{"eissn":["1091-6490"]},"publication_status":"published","project":[{"name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234"}],"article_number":"e2307776121","title":"A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction","author":[{"first_name":"Jerome","last_name":"Clatot","full_name":"Clatot, Jerome"},{"full_name":"Currin, Christopher","orcid":"0000-0002-4809-5059","last_name":"Currin","first_name":"Christopher","id":"e8321fc5-3091-11eb-8a53-83f309a11ac9"},{"last_name":"Liang","full_name":"Liang, Qiansheng","first_name":"Qiansheng"},{"full_name":"Pipatpolkai, Tanadet","last_name":"Pipatpolkai","first_name":"Tanadet"},{"first_name":"Shavonne L.","full_name":"Massey, Shavonne L.","last_name":"Massey"},{"full_name":"Helbig, Ingo","last_name":"Helbig","first_name":"Ingo"},{"first_name":"Lucie","full_name":"Delemotte, Lucie","last_name":"Delemotte"},{"last_name":"Vogels","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P"},{"last_name":"Covarrubias","full_name":"Covarrubias, Manuel","first_name":"Manuel"},{"last_name":"Goldberg","full_name":"Goldberg, Ethan M.","first_name":"Ethan M."}],"article_processing_charge":"No","external_id":{"pmid":["38194456"]},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"chicago":"Clatot, Jerome, Christopher Currin, Qiansheng Liang, Tanadet Pipatpolkai, Shavonne L. Massey, Ingo Helbig, Lucie Delemotte, Tim P Vogels, Manuel Covarrubias, and Ethan M. Goldberg. “A Structurally Precise Mechanism Links an Epilepsy-Associated KCNC2 Potassium Channel Mutation to Interneuron Dysfunction.” Proceedings of the National Academy of Sciences of the United States of America. Proceedings of the National Academy of Sciences, 2024. https://doi.org/10.1073/pnas.2307776121.","ista":"Clatot J, Currin C, Liang Q, Pipatpolkai T, Massey SL, Helbig I, Delemotte L, Vogels TP, Covarrubias M, Goldberg EM. 2024. A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. Proceedings of the National Academy of Sciences of the United States of America. 121(3), e2307776121.","mla":"Clatot, Jerome, et al. “A Structurally Precise Mechanism Links an Epilepsy-Associated KCNC2 Potassium Channel Mutation to Interneuron Dysfunction.” Proceedings of the National Academy of Sciences of the United States of America, vol. 121, no. 3, e2307776121, Proceedings of the National Academy of Sciences, 2024, doi:10.1073/pnas.2307776121.","short":"J. Clatot, C. Currin, Q. Liang, T. Pipatpolkai, S.L. Massey, I. Helbig, L. Delemotte, T.P. Vogels, M. Covarrubias, E.M. Goldberg, Proceedings of the National Academy of Sciences of the United States of America 121 (2024).","ieee":"J. Clatot et al., “A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction,” Proceedings of the National Academy of Sciences of the United States of America, vol. 121, no. 3. Proceedings of the National Academy of Sciences, 2024.","apa":"Clatot, J., Currin, C., Liang, Q., Pipatpolkai, T., Massey, S. L., Helbig, I., … Goldberg, E. M. (2024). A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. Proceedings of the National Academy of Sciences of the United States of America. Proceedings of the National Academy of Sciences. https://doi.org/10.1073/pnas.2307776121","ama":"Clatot J, Currin C, Liang Q, et al. A structurally precise mechanism links an epilepsy-associated KCNC2 potassium channel mutation to interneuron dysfunction. Proceedings of the National Academy of Sciences of the United States of America. 2024;121(3). doi:10.1073/pnas.2307776121"},"publisher":"Proceedings of the National Academy of Sciences","quality_controlled":"1","acknowledgement":"This work was supported by an ERC Consolidator Grant (SYNAPSEEK) to T.P.V., the NOMIS Foundation through the NOMIS Fellowships program at IST Austria to C.B.C., a Jefferson Synaptic Biology Center Pilot Project Grant to M.C., NIH NINDS U54 NS108874 (PI, Alfred L. George), and NIH NINDS R01 NS122887 to E.M.G. The computations were enabled by resources provided by the Swedish National Infrastructure for Computing (SNIC) at the PDC Center for High-Performance Computing, KTH Royal Institute of Technology, partially funded by the Swedish Research Council through grant agreement no. 2018-05973. We thank Akshay Sridhar for the fruitful discussion of the project.","doi":"10.1073/pnas.2307776121","date_published":"2024-01-16T00:00:00Z","date_created":"2024-01-21T23:00:56Z","day":"16","publication":"Proceedings of the National Academy of Sciences of the United States of America","year":"2024"},{"title":"Dynamic and selective engrams emerge with memory consolidation","external_id":{"isi":["001145442300001"]},"article_processing_charge":"Yes (in subscription journal)","author":[{"last_name":"Feitosa Tomé","full_name":"Feitosa Tomé, Douglas","first_name":"Douglas","id":"0eed2d40-3d48-11ec-8d38-f789cc2e40b2"},{"last_name":"Zhang","full_name":"Zhang, Ying","first_name":"Ying"},{"full_name":"Aida, Tomomi","last_name":"Aida","first_name":"Tomomi"},{"first_name":"Olivia","last_name":"Mosto","full_name":"Mosto, Olivia"},{"first_name":"Yifeng","full_name":"Lu, Yifeng","last_name":"Lu"},{"last_name":"Chen","full_name":"Chen, Mandy","first_name":"Mandy"},{"last_name":"Sadeh","full_name":"Sadeh, Sadra","first_name":"Sadra"},{"last_name":"Roy","full_name":"Roy, Dheeraj S.","first_name":"Dheeraj S."},{"last_name":"Clopath","full_name":"Clopath, Claudia","first_name":"Claudia"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"mla":"Feitosa Tomé, Douglas, et al. “Dynamic and Selective Engrams Emerge with Memory Consolidation.” Nature Neuroscience, Springer Nature, 2024, doi:10.1038/s41593-023-01551-w.","ieee":"D. Feitosa Tomé et al., “Dynamic and selective engrams emerge with memory consolidation,” Nature Neuroscience. Springer Nature, 2024.","short":"D. Feitosa Tomé, Y. Zhang, T. Aida, O. Mosto, Y. Lu, M. Chen, S. Sadeh, D.S. Roy, C. Clopath, Nature Neuroscience (2024).","apa":"Feitosa Tomé, D., Zhang, Y., Aida, T., Mosto, O., Lu, Y., Chen, M., … Clopath, C. (2024). Dynamic and selective engrams emerge with memory consolidation. Nature Neuroscience. Springer Nature. https://doi.org/10.1038/s41593-023-01551-w","ama":"Feitosa Tomé D, Zhang Y, Aida T, et al. Dynamic and selective engrams emerge with memory consolidation. Nature Neuroscience. 2024. doi:10.1038/s41593-023-01551-w","chicago":"Feitosa Tomé, Douglas, Ying Zhang, Tomomi Aida, Olivia Mosto, Yifeng Lu, Mandy Chen, Sadra Sadeh, Dheeraj S. Roy, and Claudia Clopath. “Dynamic and Selective Engrams Emerge with Memory Consolidation.” Nature Neuroscience. Springer Nature, 2024. https://doi.org/10.1038/s41593-023-01551-w.","ista":"Feitosa Tomé D, Zhang Y, Aida T, Mosto O, Lu Y, Chen M, Sadeh S, Roy DS, Clopath C. 2024. Dynamic and selective engrams emerge with memory consolidation. Nature Neuroscience."},"oa":1,"quality_controlled":"1","publisher":"Springer Nature","acknowledgement":"We thank S. Erisken from Inscopix for helping us establish in vivo one-photon calcium imaging for this work. We thank K. Su at Tsinghua University for assistance with this work. This work was funded by the President’s PhD Scholarship from Imperial College London (D.F.T.), the Wellcome Trust (225412/Z/22/Z) (S.S.), the Biotechnology and Biological Sciences Research Council (BB/N013956/1 and BB/N019008/1) (C.C.), the Wellcome Trust (200790/Z/16/Z) (C.C.), the Simons Foundation (564408) (C.C.) and the Engineering and Physical Sciences Research Council (EP/R035806/1) (CC). The School of Life Sciences and the IDG/McGovern Institute for Brain Research supported Y.Z. The Warren Alpert Distinguished Scholar Award and National Institutes of Health 1K99NS125131-01 supported D.S.R.","date_created":"2024-01-28T23:01:43Z","doi":"10.1038/s41593-023-01551-w","date_published":"2024-01-19T00:00:00Z","publication":"Nature Neuroscience","day":"19","year":"2024","isi":1,"status":"public","article_type":"original","type":"journal_article","_id":"14887","department":[{"_id":"TiVo"}],"date_updated":"2024-01-29T09:22:00Z","month":"01","main_file_link":[{"open_access":"1","url":"https://doi.org/10.1038/s41593-023-01551-w"}],"scopus_import":"1","oa_version":"Published Version","abstract":[{"lang":"eng","text":"Episodic memories are encoded by experience-activated neuronal ensembles that remain necessary and sufficient for recall. However, the temporal evolution of memory engrams after initial encoding is unclear. In this study, we employed computational and experimental approaches to examine how the neural composition and selectivity of engrams change with memory consolidation. Our spiking neural network model yielded testable predictions: memories transition from unselective to selective as neurons drop out of and drop into engrams; inhibitory activity during recall is essential for memory selectivity; and inhibitory synaptic plasticity during memory consolidation is critical for engrams to become selective. Using activity-dependent labeling, longitudinal calcium imaging and a combination of optogenetic and chemogenetic manipulations in mouse dentate gyrus, we conducted contextual fear conditioning experiments that supported our model’s predictions. Our results reveal that memory engrams are dynamic and that changes in engram composition mediated by inhibitory plasticity are crucial for the emergence of memory selectivity."}],"related_material":{"record":[{"relation":"research_data","id":"14892","status":"public"}]},"language":[{"iso":"eng"}],"publication_status":"epub_ahead","publication_identifier":{"eissn":["1546-1726"],"issn":["1097-6256"]}},{"quality_controlled":"1","publisher":"Springer Nature","oa":1,"acknowledgement":"We thank C. Currin, B. Podlaski and the members of the Vogels group for fruitful discussions. E.J.A. and T.P.V. were supported by a Research Project Grant from the Leverhulme Trust (RPG-2016-446; TPV), a Sir Henry Dale Fellowship from the Wellcome Trust and the Royal Society (WT100000; T.P.V.), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z; T.P.V.) and a European Research Council Consolidator Grant (SYNAPSEEK, 819603; T.P.V.). For the purpose of open access, the authors have applied a CC BY public copyright license to any author accepted manuscript version arising from this submission. Open access funding provided by University of Basel.","date_published":"2024-03-20T00:00:00Z","doi":"10.1038/s41593-024-01597-4","date_created":"2024-03-24T23:01:00Z","year":"2024","day":"20","publication":"Nature Neuroscience","project":[{"call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603"}],"author":[{"first_name":"Everton J.","full_name":"Agnes, Everton J.","last_name":"Agnes"},{"first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181"}],"article_processing_charge":"Yes (via OA deal)","title":"Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks","citation":{"ieee":"E. J. Agnes and T. P. Vogels, “Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks,” Nature Neuroscience. Springer Nature, 2024.","short":"E.J. Agnes, T.P. Vogels, Nature Neuroscience (2024).","ama":"Agnes EJ, Vogels TP. Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks. Nature Neuroscience. 2024. doi:10.1038/s41593-024-01597-4","apa":"Agnes, E. J., & Vogels, T. P. (2024). Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks. Nature Neuroscience. Springer Nature. https://doi.org/10.1038/s41593-024-01597-4","mla":"Agnes, Everton J., and Tim P. Vogels. “Co-Dependent Excitatory and Inhibitory Plasticity Accounts for Quick, Stable and Long-Lasting Memories in Biological Networks.” Nature Neuroscience, Springer Nature, 2024, doi:10.1038/s41593-024-01597-4.","ista":"Agnes EJ, Vogels TP. 2024. Co-dependent excitatory and inhibitory plasticity accounts for quick, stable and long-lasting memories in biological networks. Nature Neuroscience.","chicago":"Agnes, Everton J., and Tim P Vogels. “Co-Dependent Excitatory and Inhibitory Plasticity Accounts for Quick, Stable and Long-Lasting Memories in Biological Networks.” Nature Neuroscience. Springer Nature, 2024. https://doi.org/10.1038/s41593-024-01597-4."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","scopus_import":"1","main_file_link":[{"url":"https://doi.org/10.1038/s41593-024-01597-4","open_access":"1"}],"month":"03","abstract":[{"lang":"eng","text":"The brain’s functionality is developed and maintained through synaptic plasticity. As synapses undergo plasticity, they also affect each other. The nature of such ‘co-dependency’ is difficult to disentangle experimentally, because multiple synapses must be monitored simultaneously. To help understand the experimentally observed phenomena, we introduce a framework that formalizes synaptic co-dependency between different connection types. The resulting model explains how inhibition can gate excitatory plasticity while neighboring excitatory–excitatory interactions determine the strength of long-term potentiation. Furthermore, we show how the interplay between excitatory and inhibitory synapses can account for the quick rise and long-term stability of a variety of synaptic weight profiles, such as orientation tuning and dendritic clustering of co-active synapses. In recurrent neuronal networks, co-dependent plasticity produces rich and stable motor cortex-like dynamics with high input sensitivity. Our results suggest an essential role for the neighborly synaptic interaction during learning, connecting micro-level physiology with network-wide phenomena."}],"oa_version":"Published Version","ec_funded":1,"publication_identifier":{"issn":["1097-6256"],"eissn":["1546-1726"]},"publication_status":"epub_ahead","language":[{"iso":"eng"}],"type":"journal_article","article_type":"original","status":"public","_id":"15171","department":[{"_id":"TiVo"}],"date_updated":"2024-03-25T07:04:05Z"},{"language":[{"iso":"eng"}],"publication_identifier":{"issn":["1553-734X"],"eissn":["1553-7358"]},"publication_status":"published","issue":"3","related_material":{"link":[{"relation":"software","url":"https://github.com/Neuroinflab/kCSD-python"}]},"volume":20,"oa_version":"Published Version","abstract":[{"text":"Interpretation of extracellular recordings can be challenging due to the long range of electric field. This challenge can be mitigated by estimating the current source density (CSD). Here we introduce kCSD-python, an open Python package implementing Kernel Current Source Density (kCSD) method and related tools to facilitate CSD analysis of experimental data and the interpretation of results. We show how to counter the limitations imposed by noise and assumptions in the method itself. kCSD-python allows CSD estimation for an arbitrary distribution of electrodes in 1D, 2D, and 3D, assuming distributions of sources in tissue, a slice, or in a single cell, and includes a range of diagnostic aids. We demonstrate its features in a Jupyter Notebook tutorial which illustrates a typical analytical workflow and main functionalities useful in validating analysis results.","lang":"eng"}],"month":"03","intvolume":" 20","scopus_import":"1","date_updated":"2024-03-25T07:54:23Z","department":[{"_id":"TiVo"}],"_id":"15169","status":"public","article_type":"original","type":"journal_article","day":"14","publication":"PLoS Computational Biology","year":"2024","doi":"10.1371/journal.pcbi.1011941","date_published":"2024-03-14T00:00:00Z","date_created":"2024-03-24T23:00:59Z","acknowledgement":"The Python implementation of kCSD was started by Grzegorz Parka during Google Summer of Code project through the International Neuroinformatics Coordinating Facility. Jan Mąka implemented the first Python version of skCSD class. This work was supported by the Polish National Science Centre (2013/08/W/NZ4/00691 to DKW; 2015/17/B/ST7/04123 to DKW). ","quality_controlled":"1","publisher":"Public Library of Science","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"ista":"Chintaluri C, Bejtka M, Sredniawa W, Czerwinski M, Dzik JM, Jedrzejewska-Szmek J, Wojciki DK. 2024. kCSD-python, reliable current source density estimation with quality control. PLoS Computational Biology. 20(3), e1011941.","chicago":"Chintaluri, Chaitanya, Marta Bejtka, Wladyslaw Sredniawa, Michal Czerwinski, Jakub M. Dzik, Joanna Jedrzejewska-Szmek, and Daniel K. Wojciki. “KCSD-Python, Reliable Current Source Density Estimation with Quality Control.” PLoS Computational Biology. Public Library of Science, 2024. https://doi.org/10.1371/journal.pcbi.1011941.","ama":"Chintaluri C, Bejtka M, Sredniawa W, et al. kCSD-python, reliable current source density estimation with quality control. PLoS Computational Biology. 2024;20(3). doi:10.1371/journal.pcbi.1011941","apa":"Chintaluri, C., Bejtka, M., Sredniawa, W., Czerwinski, M., Dzik, J. M., Jedrzejewska-Szmek, J., & Wojciki, D. K. (2024). kCSD-python, reliable current source density estimation with quality control. PLoS Computational Biology. Public Library of Science. https://doi.org/10.1371/journal.pcbi.1011941","ieee":"C. Chintaluri et al., “kCSD-python, reliable current source density estimation with quality control,” PLoS Computational Biology, vol. 20, no. 3. Public Library of Science, 2024.","short":"C. Chintaluri, M. Bejtka, W. Sredniawa, M. Czerwinski, J.M. Dzik, J. Jedrzejewska-Szmek, D.K. Wojciki, PLoS Computational Biology 20 (2024).","mla":"Chintaluri, Chaitanya, et al. “KCSD-Python, Reliable Current Source Density Estimation with Quality Control.” PLoS Computational Biology, vol. 20, no. 3, e1011941, Public Library of Science, 2024, doi:10.1371/journal.pcbi.1011941."},"title":"kCSD-python, reliable current source density estimation with quality control","author":[{"last_name":"Chintaluri","full_name":"Chintaluri, Chaitanya","id":"E4EDB536-3485-11EA-98D2-20AF3DDC885E","first_name":"Chaitanya"},{"first_name":"Marta","last_name":"Bejtka","full_name":"Bejtka, Marta"},{"first_name":"Wladyslaw","full_name":"Sredniawa, Wladyslaw","last_name":"Sredniawa"},{"full_name":"Czerwinski, Michal","last_name":"Czerwinski","first_name":"Michal"},{"first_name":"Jakub M.","full_name":"Dzik, Jakub M.","last_name":"Dzik"},{"first_name":"Joanna","full_name":"Jedrzejewska-Szmek, Joanna","last_name":"Jedrzejewska-Szmek"},{"full_name":"Wojciki, Daniel K.","last_name":"Wojciki","first_name":"Daniel K."}],"article_processing_charge":"Yes","article_number":"e1011941"},{"quality_controlled":"1","publisher":"Elsevier","alternative_title":["Vol. 1: Biological Development and Physical Health"],"edition":"1","month":"02","abstract":[{"text":"Autism spectrum disorder (ASD) and epilepsy are frequently comorbid neurodevelopmental disorders. Extensive research has demonstrated shared pathological pathways, etiologies, and phenotypes. Many risk factors for these disorders, like genetic mutations and environmental pressures, are linked to changes in childhood brain development, which is a critical period for their manifestation.\r\nDecades of research have yielded many signatures for ASD and epilepsy, some shared and others unique or opposing. The anatomical, physiological, and behavioral correlates of these disorders are discussed in this chapter in the context of understanding shared pathological pathways. We end with important takeaways on the presentation, prevention, intervention, and policy changes for ASD and epilepsy. This chapter aims to explore the complexity of these disorders, both in etiology and phenotypes, with the further goal of appreciating the expanse of unknowns still to explore about the brain.","lang":"eng"}],"oa_version":"None","page":"86-98","doi":"10.1016/b978-0-12-818872-9.00129-1","date_published":"2023-02-01T00:00:00Z","date_created":"2023-04-25T07:52:43Z","publication_identifier":{"isbn":["9780128188736"]},"year":"2023","publication_status":"published","day":"01","language":[{"iso":"eng"}],"publication":"Encyclopedia of Child and Adolescent Health","type":"book_chapter","status":"public","_id":"12866","author":[{"first_name":"Christopher","id":"e8321fc5-3091-11eb-8a53-83f309a11ac9","full_name":"Currin, Christopher","orcid":"0000-0002-4809-5059","last_name":"Currin"},{"first_name":"Chad","full_name":"Beyer, Chad","last_name":"Beyer"}],"article_processing_charge":"No","department":[{"_id":"TiVo"}],"editor":[{"full_name":"Halpern-Felsher, Bonnie","last_name":"Halpern-Felsher","first_name":"Bonnie"}],"title":"Altered childhood brain development in autism and epilepsy","citation":{"ista":"Currin C, Beyer C. 2023.Altered childhood brain development in autism and epilepsy. In: Encyclopedia of Child and Adolescent Health. Vol. 1: Biological Development and Physical Health, , 86–98.","chicago":"Currin, Christopher, and Chad Beyer. “Altered Childhood Brain Development in Autism and Epilepsy.” In Encyclopedia of Child and Adolescent Health, edited by Bonnie Halpern-Felsher, 1st ed., 86–98. Elsevier, 2023. https://doi.org/10.1016/b978-0-12-818872-9.00129-1.","short":"C. Currin, C. Beyer, in:, B. Halpern-Felsher (Ed.), Encyclopedia of Child and Adolescent Health, 1st ed., Elsevier, 2023, pp. 86–98.","ieee":"C. Currin and C. Beyer, “Altered childhood brain development in autism and epilepsy,” in Encyclopedia of Child and Adolescent Health, 1st ed., B. Halpern-Felsher, Ed. Elsevier, 2023, pp. 86–98.","apa":"Currin, C., & Beyer, C. (2023). Altered childhood brain development in autism and epilepsy. In B. Halpern-Felsher (Ed.), Encyclopedia of Child and Adolescent Health (1st ed., pp. 86–98). Elsevier. https://doi.org/10.1016/b978-0-12-818872-9.00129-1","ama":"Currin C, Beyer C. Altered childhood brain development in autism and epilepsy. In: Halpern-Felsher B, ed. Encyclopedia of Child and Adolescent Health. 1st ed. Elsevier; 2023:86-98. doi:10.1016/b978-0-12-818872-9.00129-1","mla":"Currin, Christopher, and Chad Beyer. “Altered Childhood Brain Development in Autism and Epilepsy.” Encyclopedia of Child and Adolescent Health, edited by Bonnie Halpern-Felsher, 1st ed., Elsevier, 2023, pp. 86–98, doi:10.1016/b978-0-12-818872-9.00129-1."},"date_updated":"2023-04-25T09:25:40Z","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87"},{"date_created":"2023-10-12T14:13:25Z","date_published":"2023-10-12T00:00:00Z","doi":"10.15479/at:ista:14422","page":"148","day":"12","year":"2023","has_accepted_license":"1","publisher":"Institute of Science and Technology Austria","title":"Synapseek: Meta-learning synaptic plasticity rules","article_processing_charge":"No","author":[{"id":"C7610134-B532-11EA-BD9F-F5753DDC885E","first_name":"Basile J","last_name":"Confavreux","full_name":"Confavreux, Basile J"}],"user_id":"8b945eb4-e2f2-11eb-945a-df72226e66a9","citation":{"mla":"Confavreux, Basile J. Synapseek: Meta-Learning Synaptic Plasticity Rules. Institute of Science and Technology Austria, 2023, doi:10.15479/at:ista:14422.","ieee":"B. J. Confavreux, “Synapseek: Meta-learning synaptic plasticity rules,” Institute of Science and Technology Austria, 2023.","short":"B.J. Confavreux, Synapseek: Meta-Learning Synaptic Plasticity Rules, Institute of Science and Technology Austria, 2023.","ama":"Confavreux BJ. Synapseek: Meta-learning synaptic plasticity rules. 2023. doi:10.15479/at:ista:14422","apa":"Confavreux, B. J. (2023). Synapseek: Meta-learning synaptic plasticity rules. Institute of Science and Technology Austria. https://doi.org/10.15479/at:ista:14422","chicago":"Confavreux, Basile J. “Synapseek: Meta-Learning Synaptic Plasticity Rules.” Institute of Science and Technology Austria, 2023. https://doi.org/10.15479/at:ista:14422.","ista":"Confavreux BJ. 2023. Synapseek: Meta-learning synaptic plasticity rules. Institute of Science and Technology Austria."},"project":[{"grant_number":"819603","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234"}],"ec_funded":1,"license":"https://creativecommons.org/licenses/by-nc-sa/4.0/","related_material":{"record":[{"relation":"part_of_dissertation","status":"public","id":"9633"}]},"language":[{"iso":"eng"}],"file":[{"embargo":"2024-10-12","file_id":"14424","checksum":"7f636555eae7803323df287672fd13ed","relation":"main_file","access_level":"closed","embargo_to":"open_access","content_type":"application/pdf","file_name":"Confavreux_Thesis_2A.pdf","date_created":"2023-10-12T14:53:50Z","creator":"cchlebak","file_size":30599717,"date_updated":"2023-10-12T14:54:52Z"},{"file_name":"Confavreux Thesis.zip","date_created":"2023-10-18T07:38:34Z","creator":"cchlebak","file_size":68406739,"date_updated":"2023-10-18T07:56:08Z","checksum":"725e85946db92290a4583a0de9779e1b","file_id":"14440","relation":"source_file","access_level":"closed","content_type":"application/x-zip-compressed"}],"publication_status":"published","degree_awarded":"PhD","publication_identifier":{"issn":["2663 - 337X"]},"month":"10","alternative_title":["ISTA Thesis"],"oa_version":"Published Version","abstract":[{"lang":"eng","text":"Animals exhibit a remarkable ability to learn and remember new behaviors, skills, and associations throughout their lifetime. These capabilities are made possible thanks to a variety of\r\nchanges in the brain throughout adulthood, regrouped under the term \"plasticity\". Some cells\r\nin the brain —neurons— and specifically changes in the connections between neurons, the\r\nsynapses, were shown to be crucial for the formation, selection, and consolidation of memories\r\nfrom past experiences. These ongoing changes of synapses across time are called synaptic\r\nplasticity. Understanding how a myriad of biochemical processes operating at individual\r\nsynapses can somehow work in concert to give rise to meaningful changes in behavior is a\r\nfascinating problem and an active area of research.\r\nHowever, the experimental search for the precise plasticity mechanisms at play in the brain\r\nis daunting, as it is difficult to control and observe synapses during learning. Theoretical\r\napproaches have thus been the default method to probe the plasticity-behavior connection. Such\r\nstudies attempt to extract unifying principles across synapses and model all observed synaptic\r\nchanges using plasticity rules: equations that govern the evolution of synaptic strengths across\r\ntime in neuronal network models. These rules can use many relevant quantities to determine\r\nthe magnitude of synaptic changes, such as the precise timings of pre- and postsynaptic\r\naction potentials, the recent neuronal activity levels, the state of neighboring synapses, etc.\r\nHowever, analytical studies rely heavily on human intuition and are forced to make simplifying\r\nassumptions about plasticity rules.\r\nIn this thesis, we aim to assist and augment human intuition in this search for plasticity rules.\r\nWe explore whether a numerical approach could automatically discover the plasticity rules\r\nthat elicit desired behaviors in large networks of interconnected neurons. This approach is\r\ndubbed meta-learning synaptic plasticity: learning plasticity rules which themselves will make\r\nneuronal networks learn how to solve a desired task. We first write all the potential plasticity\r\nmechanisms to consider using a single expression with adjustable parameters. We then optimize\r\nthese plasticity parameters using evolutionary strategies or Bayesian inference on tasks known\r\nto involve synaptic plasticity, such as familiarity detection and network stabilization.\r\nWe show that these automated approaches are powerful tools, able to complement established\r\nanalytical methods. By comprehensively screening plasticity rules at all synapse types in\r\nrealistic, spiking neuronal network models, we discover entire sets of degenerate plausible\r\nplasticity rules that reliably elicit memory-related behaviors. Our approaches allow for more\r\nrobust experimental predictions, by abstracting out the idiosyncrasies of individual plasticity\r\nrules, and provide fresh insights on synaptic plasticity in spiking network models.\r\n"}],"department":[{"_id":"GradSch"},{"_id":"TiVo"}],"file_date_updated":"2023-10-18T07:56:08Z","ddc":["610"],"date_updated":"2023-10-18T09:20:56Z","supervisor":[{"first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P","last_name":"Vogels"}],"status":"public","tmp":{"name":"Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)","image":"/images/cc_by_nc_sa.png","legal_code_url":"https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode","short":"CC BY-NC-SA (4.0)"},"type":"dissertation","_id":"14422"},{"type":"journal_article","article_type":"original","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"status":"public","_id":"14666","department":[{"_id":"TiVo"}],"file_date_updated":"2023-12-11T12:45:12Z","date_updated":"2023-12-11T12:47:41Z","ddc":["570"],"scopus_import":"1","month":"11","intvolume":" 120","abstract":[{"lang":"eng","text":"So-called spontaneous activity is a central hallmark of most nervous systems. Such non-causal firing is contrary to the tenet of spikes as a means of communication, and its purpose remains unclear. We propose that self-initiated firing can serve as a release valve to protect neurons from the toxic conditions arising in mitochondria from lower-than-baseline energy consumption. To demonstrate the viability of our hypothesis, we built a set of models that incorporate recent experimental results indicating homeostatic control of metabolic products—Adenosine triphosphate (ATP), adenosine diphosphate (ADP), and reactive oxygen species (ROS)—by changes in firing. We explore the relationship of metabolic cost of spiking with its effect on the temporal patterning of spikes and reproduce experimentally observed changes in intrinsic firing in the fruitfly dorsal fan-shaped body neuron in a model with ROS-modulated potassium channels. We also show that metabolic spiking homeostasis can produce indefinitely sustained avalanche dynamics in cortical circuits. Our theory can account for key features of neuronal activity observed in many studies ranging from ion channel function all the way to resting state dynamics. We finish with a set of experimental predictions that would confirm an integrated, crucial role for metabolically regulated spiking and firmly link metabolic homeostasis and neuronal function."}],"pmid":1,"oa_version":"None","volume":120,"related_material":{"link":[{"url":"https://github.com/ccluri/metabolic_spiking","relation":"software"}]},"issue":"48","license":"https://creativecommons.org/licenses/by/4.0/","publication_identifier":{"eissn":["1091-6490"],"issn":["0027-8424"]},"publication_status":"published","file":[{"relation":"main_file","access_level":"open_access","content_type":"application/pdf","success":1,"checksum":"bf4ec38602a70dae4338077a5a4d497f","file_id":"14678","creator":"dernst","file_size":16891602,"date_updated":"2023-12-11T12:45:12Z","file_name":"2023_PNAS_Chintaluri.pdf","date_created":"2023-12-11T12:45:12Z"}],"language":[{"iso":"eng"}],"project":[{"_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","grant_number":"214316/Z/18/Z"}],"article_number":"e2306525120","author":[{"last_name":"Chintaluri","full_name":"Chintaluri, Chaitanya","id":"E4EDB536-3485-11EA-98D2-20AF3DDC885E","first_name":"Chaitanya"},{"id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P","last_name":"Vogels","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181"}],"external_id":{"pmid":["37988463"]},"article_processing_charge":"Yes (in subscription journal)","title":"Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species","citation":{"ista":"Chintaluri C, Vogels TP. 2023. Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species. Proceedings of the National Academy of Sciences of the United States of America. 120(48), e2306525120.","chicago":"Chintaluri, Chaitanya, and Tim P Vogels. “Metabolically Regulated Spiking Could Serve Neuronal Energy Homeostasis and Protect from Reactive Oxygen Species.” Proceedings of the National Academy of Sciences of the United States of America. National Academy of Sciences, 2023. https://doi.org/10.1073/pnas.2306525120.","short":"C. Chintaluri, T.P. Vogels, Proceedings of the National Academy of Sciences of the United States of America 120 (2023).","ieee":"C. Chintaluri and T. P. Vogels, “Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species,” Proceedings of the National Academy of Sciences of the United States of America, vol. 120, no. 48. National Academy of Sciences, 2023.","ama":"Chintaluri C, Vogels TP. Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species. Proceedings of the National Academy of Sciences of the United States of America. 2023;120(48). doi:10.1073/pnas.2306525120","apa":"Chintaluri, C., & Vogels, T. P. (2023). Metabolically regulated spiking could serve neuronal energy homeostasis and protect from reactive oxygen species. Proceedings of the National Academy of Sciences of the United States of America. National Academy of Sciences. https://doi.org/10.1073/pnas.2306525120","mla":"Chintaluri, Chaitanya, and Tim P. Vogels. “Metabolically Regulated Spiking Could Serve Neuronal Energy Homeostasis and Protect from Reactive Oxygen Species.” Proceedings of the National Academy of Sciences of the United States of America, vol. 120, no. 48, e2306525120, National Academy of Sciences, 2023, doi:10.1073/pnas.2306525120."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","publisher":"National Academy of Sciences","quality_controlled":"1","oa":1,"acknowledgement":"We thank Prof. C. Nazaret and Prof. J.-P. Mazat for sharing the code of their mitochondrial model. We also thank G. Miesenböck, E. Marder, L. Abbott, A. Kempf, P. Hasenhuetl, W. Podlaski, F. Zenke, E. Agnes, P. Bozelos, J. Watson, B. Confavreux, and G. Christodoulou, and the rest of the Vogels Lab for their feedback. This work was funded by Wellcome Trust and Royal Society Sir Henry Dale Research Fellowship (WT100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and a UK Research and Innovation, Biotechnology and Biological Sciences Research Council grant (UKRI-BBSRC BB/N019512/1).","doi":"10.1073/pnas.2306525120","date_published":"2023-11-21T00:00:00Z","date_created":"2023-12-10T23:01:00Z","has_accepted_license":"1","year":"2023","day":"21","publication":"Proceedings of the National Academy of Sciences of the United States of America"},{"day":"02","has_accepted_license":"1","year":"2023","related_material":{"record":[{"relation":"used_in_publication","status":"public","id":"14887"}]},"doi":"10.5281/ZENODO.10251087","date_published":"2023-12-02T00:00:00Z","date_created":"2024-01-29T09:06:43Z","oa_version":"None","abstract":[{"text":"Code and data necessary to reproduce the simulations and data analyses reported in our manuscript: Tomé, D.F., Zhang, Y., Aida, T., Mosto, O., Lu, Y., Chen, M., Sadeh, S., Roy, D. S., Clopath, C. Dynamic and selective engrams emerge with memory consolidation. 2023.","lang":"eng"}],"month":"12","publisher":"Zenodo","main_file_link":[{"open_access":"1","url":"https://doi.org/10.5281/zenodo.10251087"}],"oa":1,"ddc":["570"],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","date_updated":"2024-01-29T09:22:01Z","citation":{"short":"D. Feitosa Tomé, (2023).","ieee":"D. Feitosa Tomé, “douglastome/dynamic-engrams: Dynamic and selective engrams emerge with memory consolidation.” Zenodo, 2023.","apa":"Feitosa Tomé, D. (2023). douglastome/dynamic-engrams: Dynamic and selective engrams emerge with memory consolidation. Zenodo. https://doi.org/10.5281/ZENODO.10251087","ama":"Feitosa Tomé D. douglastome/dynamic-engrams: Dynamic and selective engrams emerge with memory consolidation. 2023. doi:10.5281/ZENODO.10251087","mla":"Feitosa Tomé, Douglas. Douglastome/Dynamic-Engrams: Dynamic and Selective Engrams Emerge with Memory Consolidation. Zenodo, 2023, doi:10.5281/ZENODO.10251087.","ista":"Feitosa Tomé D. 2023. douglastome/dynamic-engrams: Dynamic and selective engrams emerge with memory consolidation, Zenodo, 10.5281/ZENODO.10251087.","chicago":"Feitosa Tomé, Douglas. “Douglastome/Dynamic-Engrams: Dynamic and Selective Engrams Emerge with Memory Consolidation.” Zenodo, 2023. https://doi.org/10.5281/ZENODO.10251087."},"department":[{"_id":"TiVo"}],"title":"douglastome/dynamic-engrams: Dynamic and selective engrams emerge with memory consolidation","author":[{"first_name":"Douglas","id":"0eed2d40-3d48-11ec-8d38-f789cc2e40b2","full_name":"Feitosa Tomé, Douglas","last_name":"Feitosa Tomé"}],"article_processing_charge":"No","_id":"14892","status":"public","type":"research_data_reference","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"}},{"month":"03","publisher":"OpenReview","quality_controlled":"1","main_file_link":[{"url":"https://openreview.net/forum?id=jHY_G91R880","open_access":"1"}],"oa":1,"oa_version":"Published Version","acknowledgement":"Houcemeddine Turki’s contributions to this final output have been funded through the Adapting\r\nWikidata to support clinical practice using Data Science, Semantic Web and Machine Learning\r\nproject, which is part of the Wikimedia Research Fund maintained by the Wikimedia Foundation in San Francisco, California, United States of America.","abstract":[{"text":"Traditional top-down approaches for global health have historically failed to achieve social progress (Hoffman et al., 2015; Hoffman & Røttingen, 2015). Recently, however, a more holistic, multi-level approach termed One Health (OH) (Osterhaus et al., 2020) is being adopted. Several sets of challenges have been identified for the implementation of OH (dos S. Ribeiro et al., 2019), including policy and funding, education and training, and multi-actor, multi-domain, and multi-level collaborations. These exist despite the increasing accessibility to\r\nknowledge and digital collaborative research tools through the internet. To address some of these challenges, we propose a general framework for grassroots community-based means of participatory research. Additionally, we present a specific roadmap to create a Machine Learning for Global Health community in Africa. The proposed framework aims to enable any small group of individuals with scarce resources to build and sustain an online community within approximately two years. We provide a discussion on the potential impact of the proposed framework for global health research collaborations.","lang":"eng"}],"date_published":"2023-03-02T00:00:00Z","date_created":"2024-02-14T15:11:48Z","day":"02","language":[{"iso":"eng"}],"publication":"1st Workshop on Machine Learning & Global Health","year":"2023","publication_status":"published","status":"public","type":"conference","conference":{"name":"ICLR: International Conference on Learning Representations","end_date":"2023-05-05","location":"Kigali, Rwanda","start_date":"2023-05-05"},"_id":"14993","department":[{"_id":"TiVo"}],"title":"A framework for grassroots research collaboration in machine learning and global health","author":[{"last_name":"Currin","orcid":"0000-0002-4809-5059","full_name":"Currin, Christopher","id":"e8321fc5-3091-11eb-8a53-83f309a11ac9","first_name":"Christopher"},{"first_name":"Mercy Nyamewaa","full_name":"Asiedu , Mercy Nyamewaa","last_name":"Asiedu "},{"first_name":"Chris","last_name":"Fourie","full_name":"Fourie, Chris"},{"last_name":"Rosman","full_name":"Rosman, Benjamin","first_name":"Benjamin"},{"full_name":"Turki, Houcemeddine","last_name":"Turki","first_name":"Houcemeddine"},{"last_name":"Lambebo Tonja","full_name":"Lambebo Tonja, Atnafu","first_name":"Atnafu"},{"first_name":"Jade","full_name":"Abbott, Jade","last_name":"Abbott"},{"first_name":"Marvellous","full_name":"Ajala, Marvellous","last_name":"Ajala"},{"last_name":"Adedayo","full_name":"Adedayo, Sadiq Adewale","first_name":"Sadiq Adewale"},{"first_name":"Chris Chinenye","last_name":"Emezue","full_name":"Emezue, Chris Chinenye"},{"last_name":"Machangara","full_name":"Machangara, Daphne","first_name":"Daphne"}],"article_processing_charge":"No","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"ista":"Currin C, Asiedu MN, Fourie C, Rosman B, Turki H, Lambebo Tonja A, Abbott J, Ajala M, Adedayo SA, Emezue CC, Machangara D. 2023. A framework for grassroots research collaboration in machine learning and global health. 1st Workshop on Machine Learning & Global Health. ICLR: International Conference on Learning Representations.","chicago":"Currin, Christopher, Mercy Nyamewaa Asiedu , Chris Fourie, Benjamin Rosman, Houcemeddine Turki, Atnafu Lambebo Tonja, Jade Abbott, et al. “A Framework for Grassroots Research Collaboration in Machine Learning and Global Health.” In 1st Workshop on Machine Learning & Global Health. OpenReview, 2023.","apa":"Currin, C., Asiedu , M. N., Fourie, C., Rosman, B., Turki, H., Lambebo Tonja, A., … Machangara, D. (2023). A framework for grassroots research collaboration in machine learning and global health. In 1st Workshop on Machine Learning & Global Health. Kigali, Rwanda: OpenReview.","ama":"Currin C, Asiedu MN, Fourie C, et al. A framework for grassroots research collaboration in machine learning and global health. In: 1st Workshop on Machine Learning & Global Health. OpenReview; 2023.","ieee":"C. Currin et al., “A framework for grassroots research collaboration in machine learning and global health,” in 1st Workshop on Machine Learning & Global Health, Kigali, Rwanda, 2023.","short":"C. Currin, M.N. Asiedu , C. Fourie, B. Rosman, H. Turki, A. Lambebo Tonja, J. Abbott, M. Ajala, S.A. Adedayo, C.C. Emezue, D. Machangara, in:, 1st Workshop on Machine Learning & Global Health, OpenReview, 2023.","mla":"Currin, Christopher, et al. “A Framework for Grassroots Research Collaboration in Machine Learning and Global Health.” 1st Workshop on Machine Learning & Global Health, OpenReview, 2023."},"date_updated":"2024-02-28T12:12:00Z"},{"abstract":[{"lang":"eng","text":"Brains are thought to engage in predictive learning - learning to predict upcoming stimuli - to construct an internal model of their environment. This is especially notable for spatial navigation, as first described by Tolman’s latent learning tasks. However, predictive learning has also been observed in sensory cortex, in settings unrelated to spatial navigation. Apart from normative frameworks such as active inference or efficient coding, what could be the utility of learning to predict the patterns of occurrence of correlated stimuli? Here we show that prediction, and thereby the construction of an internal model of sequential stimuli, can bootstrap the learning process of a working memory task in a recurrent neural network. We implemented predictive learning alongside working memory match-tasks, and networks emerged to solve the prediction task first by encoding information across time to predict upcoming stimuli, and then eavesdropped on this solution to solve the matching task. Eavesdropping was most beneficial when neural resources were limited. Hence, predictive learning acts as a general neural mechanism to learn to store sensory information that can later be essential for working memory tasks."}],"oa_version":"Published Version","scopus_import":"1","month":"12","intvolume":" 199","publication_identifier":{"eissn":["2640-3498"]},"publication_status":"published","file":[{"file_name":"2022_PMLR_vanderPlas.pdf","date_created":"2023-07-18T06:32:38Z","file_size":585135,"date_updated":"2023-07-18T06:32:38Z","creator":"dernst","success":1,"checksum":"7530a93ef42e10b4db1e5e4b69796e93","file_id":"13243","content_type":"application/pdf","relation":"main_file","access_level":"open_access"}],"language":[{"iso":"eng"}],"volume":199,"ec_funded":1,"_id":"13239","type":"conference","status":"public","date_updated":"2023-07-18T06:36:28Z","ddc":["000"],"department":[{"_id":"TiVo"}],"file_date_updated":"2023-07-18T06:32:38Z","acknowledgement":"The authors would like to thank members of the Vogels lab and Manohar lab, as well as Adam Packer, Andrew Saxe, Stefano Sarao Mannelli and Jacob Bakermans for fruitful discussions and comments on earlier versions of the manuscript.\r\nTLvdP was supported by funding from the Biotechnology and Biological Sciences Research Council (BBSRC) [grant number BB/M011224/1]. TPV was supported by an ERC Consolidator Grant (SYNAPSEEK). SGM was funded by a MRC Clinician Scientist Fellowship MR/P00878X and Leverhulme Grant RPG-2018-310.","publisher":"ML Research Press","quality_controlled":"1","oa":1,"has_accepted_license":"1","year":"2022","day":"01","publication":"Proceedings of Machine Learning Research","page":"518-531","date_published":"2022-12-01T00:00:00Z","date_created":"2023-07-16T22:01:12Z","project":[{"call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603"}],"citation":{"chicago":"Van Der Plas, Thijs L., Tim P Vogels, and Sanjay G. Manohar. “Predictive Learning Enables Neural Networks to Learn Complex Working Memory Tasks.” In Proceedings of Machine Learning Research, 199:518–31. ML Research Press, 2022.","ista":"Van Der Plas TL, Vogels TP, Manohar SG. 2022. Predictive learning enables neural networks to learn complex working memory tasks. Proceedings of Machine Learning Research. vol. 199, 518–531.","mla":"Van Der Plas, Thijs L., et al. “Predictive Learning Enables Neural Networks to Learn Complex Working Memory Tasks.” Proceedings of Machine Learning Research, vol. 199, ML Research Press, 2022, pp. 518–31.","short":"T.L. Van Der Plas, T.P. Vogels, S.G. Manohar, in:, Proceedings of Machine Learning Research, ML Research Press, 2022, pp. 518–531.","ieee":"T. L. Van Der Plas, T. P. Vogels, and S. G. Manohar, “Predictive learning enables neural networks to learn complex working memory tasks,” in Proceedings of Machine Learning Research, 2022, vol. 199, pp. 518–531.","ama":"Van Der Plas TL, Vogels TP, Manohar SG. Predictive learning enables neural networks to learn complex working memory tasks. In: Proceedings of Machine Learning Research. Vol 199. ML Research Press; 2022:518-531.","apa":"Van Der Plas, T. L., Vogels, T. P., & Manohar, S. G. (2022). Predictive learning enables neural networks to learn complex working memory tasks. In Proceedings of Machine Learning Research (Vol. 199, pp. 518–531). ML Research Press."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","author":[{"first_name":"Thijs L.","full_name":"Van Der Plas, Thijs L.","last_name":"Van Der Plas"},{"id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","last_name":"Vogels"},{"last_name":"Manohar","full_name":"Manohar, Sanjay G.","first_name":"Sanjay G."}],"article_processing_charge":"No","title":"Predictive learning enables neural networks to learn complex working memory tasks"},{"acknowledgement":"We would like to thank Bernardo Rudy, Joanna Mattis, and Laura Mcgarry for comments on a previous version of the manuscript; Xiaohong Zhang for expert technical support and mouse colony maintenance; Melody Cheng for assistance with generation of the graphical abstract; and Jennifer Kearney for the gift of Scn1a+/− mice. This work was supported by the National Institute of Neurological Disorders and Stroke of the National Institutes of Health under F31NS111803 (to K.M.G.) and K08NS097633 and R01NS110869 (to E.M.G.), the Dravet Syndrome Foundation (to A.S.), an ERC Consolidator Grant (SYNAPSEEK) (to T.P.V.), and the NOMIS Foundation through the NOMIS Fellowships program at IST Austria (to C.C.). The graphical abstract was prepared using BioRender software (BioRender.com).","oa":1,"publisher":"Elsevier","quality_controlled":"1","publication":"Cell Reports","day":"29","year":"2022","isi":1,"has_accepted_license":"1","date_created":"2022-04-10T22:01:39Z","date_published":"2022-03-29T00:00:00Z","doi":"10.1016/j.celrep.2022.110580","article_number":"110580","project":[{"name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234"},{"_id":"9B861AAC-BA93-11EA-9121-9846C619BF3A","name":"NOMIS Fellowship Program"}],"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","citation":{"ieee":"K. Kaneko et al., “Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome,” Cell Reports, vol. 38, no. 13. Elsevier, 2022.","short":"K. Kaneko, C. Currin, K.M. Goff, E.R. Wengert, A. Somarowthu, T.P. Vogels, E.M. Goldberg, Cell Reports 38 (2022).","apa":"Kaneko, K., Currin, C., Goff, K. M., Wengert, E. R., Somarowthu, A., Vogels, T. P., & Goldberg, E. M. (2022). Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. Cell Reports. Elsevier. https://doi.org/10.1016/j.celrep.2022.110580","ama":"Kaneko K, Currin C, Goff KM, et al. Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. Cell Reports. 2022;38(13). doi:10.1016/j.celrep.2022.110580","mla":"Kaneko, Keisuke, et al. “Developmentally Regulated Impairment of Parvalbumin Interneuron Synaptic Transmission in an Experimental Model of Dravet Syndrome.” Cell Reports, vol. 38, no. 13, 110580, Elsevier, 2022, doi:10.1016/j.celrep.2022.110580.","ista":"Kaneko K, Currin C, Goff KM, Wengert ER, Somarowthu A, Vogels TP, Goldberg EM. 2022. Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome. Cell Reports. 38(13), 110580.","chicago":"Kaneko, Keisuke, Christopher Currin, Kevin M. Goff, Eric R. Wengert, Ala Somarowthu, Tim P Vogels, and Ethan M. Goldberg. “Developmentally Regulated Impairment of Parvalbumin Interneuron Synaptic Transmission in an Experimental Model of Dravet Syndrome.” Cell Reports. Elsevier, 2022. https://doi.org/10.1016/j.celrep.2022.110580."},"title":"Developmentally regulated impairment of parvalbumin interneuron synaptic transmission in an experimental model of Dravet syndrome","article_processing_charge":"No","external_id":{"isi":["000779794000001"]},"author":[{"full_name":"Kaneko, Keisuke","last_name":"Kaneko","first_name":"Keisuke"},{"last_name":"Currin","orcid":"0000-0002-4809-5059","full_name":"Currin, Christopher","id":"e8321fc5-3091-11eb-8a53-83f309a11ac9","first_name":"Christopher"},{"full_name":"Goff, Kevin M.","last_name":"Goff","first_name":"Kevin M."},{"last_name":"Wengert","full_name":"Wengert, Eric R.","first_name":"Eric R."},{"first_name":"Ala","last_name":"Somarowthu","full_name":"Somarowthu, Ala"},{"last_name":"Vogels","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"},{"first_name":"Ethan M.","full_name":"Goldberg, Ethan M.","last_name":"Goldberg"}],"oa_version":"Published Version","abstract":[{"lang":"eng","text":"Dravet syndrome is a neurodevelopmental disorder characterized by epilepsy, intellectual disability, and sudden death due to pathogenic variants in SCN1A with loss of function of the sodium channel subunit Nav1.1. Nav1.1-expressing parvalbumin GABAergic interneurons (PV-INs) from young Scn1a+/− mice show impaired action potential generation. An approach assessing PV-IN function in the same mice at two time points shows impaired spike generation in all Scn1a+/− mice at postnatal days (P) 16–21, whether deceased prior or surviving to P35, with normalization by P35 in surviving mice. However, PV-IN synaptic transmission is dysfunctional in young Scn1a+/− mice that did not survive and in Scn1a+/− mice ≥ P35. Modeling confirms that PV-IN axonal propagation is more sensitive to decreased sodium conductance than spike generation. These results demonstrate dynamic dysfunction in Dravet syndrome: combined abnormalities of PV-IN spike generation and propagation drives early disease severity, while ongoing dysfunction of synaptic transmission contributes to chronic pathology."}],"intvolume":" 38","month":"03","scopus_import":"1","language":[{"iso":"eng"}],"file":[{"date_created":"2022-04-15T11:00:58Z","file_name":"2022_CellReports_Kaneko.pdf","date_updated":"2022-04-15T11:00:58Z","file_size":4774216,"creator":"dernst","checksum":"49105c6c27c9af0f37f50a8bbb4d380d","file_id":"11172","success":1,"content_type":"application/pdf","access_level":"open_access","relation":"main_file"}],"publication_status":"published","publication_identifier":{"eissn":["2211-1247"]},"license":"https://creativecommons.org/licenses/by-nc-nd/4.0/","ec_funded":1,"volume":38,"issue":"13","_id":"11143","status":"public","tmp":{"short":"CC BY-NC-ND (4.0)","name":"Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)","legal_code_url":"https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode","image":"/images/cc_by_nc_nd.png"},"article_type":"original","type":"journal_article","ddc":["570"],"date_updated":"2023-08-03T06:32:55Z","file_date_updated":"2022-04-15T11:00:58Z","department":[{"_id":"TiVo"}]},{"year":"2022","has_accepted_license":"1","isi":1,"publication":"Communications biology","day":"25","date_created":"2022-09-04T22:02:02Z","doi":"10.1038/s42003-022-03801-2","date_published":"2022-08-25T00:00:00Z","acknowledgement":"We would like to thank the Vogels Lab for feedback on an earlier version of this manuscript. D.W.J. was supported by a Marshall Scholarship and a Clarendon Scholarship. R.P.C. and T.P.V. were supported by a Wellcome Trust and Royal Society Sir Henry Dale Fellowship (WT 100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and an ERC Consolidator Grant (SYNAPSEEK).","oa":1,"quality_controlled":"1","publisher":"Springer Nature","citation":{"ista":"Jia DW, Vogels TP, Costa RP. 2022. Developmental depression-to-facilitation shift controls excitation-inhibition balance. Communications biology. 5, 873.","chicago":"Jia, David W., Tim P Vogels, and Rui Ponte Costa. “Developmental Depression-to-Facilitation Shift Controls Excitation-Inhibition Balance.” Communications Biology. Springer Nature, 2022. https://doi.org/10.1038/s42003-022-03801-2.","short":"D.W. Jia, T.P. Vogels, R.P. Costa, Communications Biology 5 (2022).","ieee":"D. W. Jia, T. P. Vogels, and R. P. Costa, “Developmental depression-to-facilitation shift controls excitation-inhibition balance,” Communications biology, vol. 5. Springer Nature, 2022.","ama":"Jia DW, Vogels TP, Costa RP. Developmental depression-to-facilitation shift controls excitation-inhibition balance. Communications biology. 2022;5. doi:10.1038/s42003-022-03801-2","apa":"Jia, D. W., Vogels, T. P., & Costa, R. P. (2022). Developmental depression-to-facilitation shift controls excitation-inhibition balance. Communications Biology. Springer Nature. https://doi.org/10.1038/s42003-022-03801-2","mla":"Jia, David W., et al. “Developmental Depression-to-Facilitation Shift Controls Excitation-Inhibition Balance.” Communications Biology, vol. 5, 873, Springer Nature, 2022, doi:10.1038/s42003-022-03801-2."},"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","external_id":{"isi":["000844814800007"]},"article_processing_charge":"No","author":[{"last_name":"Jia","full_name":"Jia, David W.","first_name":"David W."},{"first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P"},{"first_name":"Rui Ponte","last_name":"Costa","full_name":"Costa, Rui Ponte"}],"title":"Developmental depression-to-facilitation shift controls excitation-inhibition balance","article_number":"873","project":[{"grant_number":"214316/Z/18/Z","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87"},{"grant_number":"819603","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234"}],"publication_status":"published","publication_identifier":{"eissn":["2399-3642"]},"language":[{"iso":"eng"}],"file":[{"success":1,"file_id":"12022","checksum":"3ec724c4f6d3440028c217305e32915f","relation":"main_file","access_level":"open_access","content_type":"application/pdf","file_name":"2022_CommBiology_Jia.pdf","date_created":"2022-09-05T08:55:11Z","creator":"dernst","file_size":2491191,"date_updated":"2022-09-05T08:55:11Z"}],"ec_funded":1,"volume":5,"abstract":[{"lang":"eng","text":"Changes in the short-term dynamics of excitatory synapses over development have been observed throughout cortex, but their purpose and consequences remain unclear. Here, we propose that developmental changes in synaptic dynamics buffer the effect of slow inhibitory long-term plasticity, allowing for continuously stable neural activity. Using computational modeling we demonstrate that early in development excitatory short-term depression quickly stabilises neural activity, even in the face of strong, unbalanced excitation. We introduce a model of the commonly observed developmental shift from depression to facilitation and show that neural activity remains stable throughout development, while inhibitory synaptic plasticity slowly balances excitation, consistent with experimental observations. Our model predicts changes in the input responses from phasic to phasic-and-tonic and more precise spike timings. We also observe a gradual emergence of short-lasting memory traces governed by short-term plasticity development. We conclude that the developmental depression-to-facilitation shift may control excitation-inhibition balance throughout development with important functional consequences."}],"oa_version":"Published Version","scopus_import":"1","intvolume":" 5","month":"08","date_updated":"2023-08-03T13:22:42Z","ddc":["570"],"file_date_updated":"2022-09-05T08:55:11Z","department":[{"_id":"TiVo"}],"_id":"12009","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"type":"journal_article","article_type":"original","status":"public"},{"date_published":"2022-08-15T00:00:00Z","doi":"10.1371/journal.pcbi.1010365","date_created":"2022-09-11T22:01:56Z","isi":1,"has_accepted_license":"1","year":"2022","day":"15","publication":"PLoS Computational Biology","quality_controlled":"1","publisher":"Public Library of Science","oa":1,"acknowledgement":"We thank Friedemann Zenke for his comments, especially on the effect of the self loops on the spectrum. We also thank Ken Miller and Bill Podlaski for helpful comments. This research was funded by a Wellcome Trust and Royal Society Henry Dale Research Fellowship (WT100000; TPV), a Wellcome Senior Research Fellowship (214316/Z/18/Z; GC, EJA, and TPV), and a Research Project Grant by the Leverhulme Trust (RPG-2016-446; EJA and TPV). ","author":[{"full_name":"Christodoulou, Georgia","last_name":"Christodoulou","first_name":"Georgia"},{"full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P"},{"first_name":"Everton J.","last_name":"Agnes","full_name":"Agnes, Everton J."}],"article_processing_charge":"No","external_id":{"isi":["000937227700001"]},"title":"Regimes and mechanisms of transient amplification in abstract and biological neural networks","citation":{"ista":"Christodoulou G, Vogels TP, Agnes EJ. 2022. Regimes and mechanisms of transient amplification in abstract and biological neural networks. PLoS Computational Biology. 18(8), e1010365.","chicago":"Christodoulou, Georgia, Tim P Vogels, and Everton J. Agnes. “Regimes and Mechanisms of Transient Amplification in Abstract and Biological Neural Networks.” PLoS Computational Biology. Public Library of Science, 2022. https://doi.org/10.1371/journal.pcbi.1010365.","ieee":"G. Christodoulou, T. P. Vogels, and E. J. Agnes, “Regimes and mechanisms of transient amplification in abstract and biological neural networks,” PLoS Computational Biology, vol. 18, no. 8. Public Library of Science, 2022.","short":"G. Christodoulou, T.P. Vogels, E.J. Agnes, PLoS Computational Biology 18 (2022).","apa":"Christodoulou, G., Vogels, T. P., & Agnes, E. J. (2022). Regimes and mechanisms of transient amplification in abstract and biological neural networks. PLoS Computational Biology. Public Library of Science. https://doi.org/10.1371/journal.pcbi.1010365","ama":"Christodoulou G, Vogels TP, Agnes EJ. Regimes and mechanisms of transient amplification in abstract and biological neural networks. PLoS Computational Biology. 2022;18(8). doi:10.1371/journal.pcbi.1010365","mla":"Christodoulou, Georgia, et al. “Regimes and Mechanisms of Transient Amplification in Abstract and Biological Neural Networks.” PLoS Computational Biology, vol. 18, no. 8, e1010365, Public Library of Science, 2022, doi:10.1371/journal.pcbi.1010365."},"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","project":[{"grant_number":"214316/Z/18/Z","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","_id":"c084a126-5a5b-11eb-8a69-d75314a70a87"}],"article_number":"e1010365","issue":"8","volume":18,"publication_identifier":{"eissn":["1553-7358"]},"publication_status":"published","file":[{"date_created":"2022-09-12T07:47:55Z","file_name":"2022_PLoSCompBio_Christodoulou.pdf","date_updated":"2022-09-12T07:47:55Z","file_size":2867337,"creator":"dernst","checksum":"8a81ab29f837991ee0ea770817c4a50e","file_id":"12090","success":1,"content_type":"application/pdf","access_level":"open_access","relation":"main_file"}],"language":[{"iso":"eng"}],"scopus_import":"1","month":"08","intvolume":" 18","abstract":[{"text":"Neuronal networks encode information through patterns of activity that define the networks’ function. The neurons’ activity relies on specific connectivity structures, yet the link between structure and function is not fully understood. Here, we tackle this structure-function problem with a new conceptual approach. Instead of manipulating the connectivity directly, we focus on upper triangular matrices, which represent the network dynamics in a given orthonormal basis obtained by the Schur decomposition. This abstraction allows us to independently manipulate the eigenspectrum and feedforward structures of a connectivity matrix. Using this method, we describe a diverse repertoire of non-normal transient amplification, and to complement the analysis of the dynamical regimes, we quantify the geometry of output trajectories through the effective rank of both the eigenvector and the dynamics matrices. Counter-intuitively, we find that shrinking the eigenspectrum’s imaginary distribution leads to highly amplifying regimes in linear and long-lasting dynamics in nonlinear networks. We also find a trade-off between amplification and dimensionality of neuronal dynamics, i.e., trajectories in neuronal state-space. Networks that can amplify a large number of orthogonal initial conditions produce neuronal trajectories that lie in the same subspace of the neuronal state-space. Finally, we examine networks of excitatory and inhibitory neurons. We find that the strength of global inhibition is directly linked with the amplitude of amplification, such that weakening inhibitory weights also decreases amplification, and that the eigenspectrum’s imaginary distribution grows with an increase in the ratio between excitatory-to-inhibitory and excitatory-to-excitatory connectivity strengths. Consequently, the strength of global inhibition reveals itself as a strong signature for amplification and a potential control mechanism to switch dynamical regimes. Our results shed a light on how biological networks, i.e., networks constrained by Dale’s law, may be optimised for specific dynamical regimes.","lang":"eng"}],"oa_version":"Published Version","file_date_updated":"2022-09-12T07:47:55Z","department":[{"_id":"TiVo"}],"date_updated":"2023-08-03T14:06:29Z","ddc":["570"],"article_type":"original","type":"journal_article","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"status":"public","_id":"12084"},{"date_published":"2022-06-02T00:00:00Z","doi":"10.1038/s41598-022-12494-w","date_created":"2023-01-16T09:48:30Z","has_accepted_license":"1","isi":1,"year":"2022","day":"02","publication":"Scientific Reports","publisher":"Springer Nature","quality_controlled":"1","oa":1,"acknowledgement":"CBC and AKN would like to thank Neuromatch Academy https://www.neuromatchacademy.org for introducing the authors to each other. We thank Dr. Krešimir Josic (University of Houston) , Fabian Baumann (Humboldt University) and Dr. Igor M. Sokolov (Humboldt University) for carefully reading the early versions of the manuscript and providing constructive feedback. CBC is supported by the German Deutscher Akademischer Austauschdienst (DAAD, https://daad.de), the South African National Research Foundation (NRF, https://nrf.ac.za), the University of Cape Town (UCT, https://uct.ac.za), and the NOMIS Foundation through the NOMIS Fellowships at IST Austria program (https://nomisfoundation.ch). SVV appreciate the generosity of Tecnológico de Monterrey for covering the publication fee.","author":[{"last_name":"Currin","full_name":"Currin, Christopher","orcid":"0000-0002-4809-5059","first_name":"Christopher","id":"e8321fc5-3091-11eb-8a53-83f309a11ac9"},{"first_name":"Sebastián Vallejo","full_name":"Vera, Sebastián Vallejo","last_name":"Vera"},{"first_name":"Ali","last_name":"Khaledi-Nasab","full_name":"Khaledi-Nasab, Ali"}],"external_id":{"pmid":["35654942"],"isi":["000805561200024"]},"article_processing_charge":"No","title":"Depolarization of echo chambers by random dynamical nudge","citation":{"mla":"Currin, Christopher, et al. “Depolarization of Echo Chambers by Random Dynamical Nudge.” Scientific Reports, vol. 12, 9234, Springer Nature, 2022, doi:10.1038/s41598-022-12494-w.","ama":"Currin C, Vera SV, Khaledi-Nasab A. Depolarization of echo chambers by random dynamical nudge. Scientific Reports. 2022;12. doi:10.1038/s41598-022-12494-w","apa":"Currin, C., Vera, S. V., & Khaledi-Nasab, A. (2022). Depolarization of echo chambers by random dynamical nudge. Scientific Reports. Springer Nature. https://doi.org/10.1038/s41598-022-12494-w","short":"C. Currin, S.V. Vera, A. Khaledi-Nasab, Scientific Reports 12 (2022).","ieee":"C. Currin, S. V. Vera, and A. Khaledi-Nasab, “Depolarization of echo chambers by random dynamical nudge,” Scientific Reports, vol. 12. Springer Nature, 2022.","chicago":"Currin, Christopher, Sebastián Vallejo Vera, and Ali Khaledi-Nasab. “Depolarization of Echo Chambers by Random Dynamical Nudge.” Scientific Reports. Springer Nature, 2022. https://doi.org/10.1038/s41598-022-12494-w.","ista":"Currin C, Vera SV, Khaledi-Nasab A. 2022. Depolarization of echo chambers by random dynamical nudge. Scientific Reports. 12, 9234."},"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","article_number":"9234","volume":12,"publication_identifier":{"issn":["2045-2322"]},"publication_status":"published","file":[{"creator":"dernst","file_size":3625627,"date_updated":"2023-01-27T08:56:18Z","file_name":"2022_ScientificReports_Currin.pdf","date_created":"2023-01-27T08:56:18Z","relation":"main_file","access_level":"open_access","content_type":"application/pdf","success":1,"checksum":"e024a75f14ce5667795a31e44a259c52","file_id":"12418"}],"language":[{"iso":"eng"}],"scopus_import":"1","month":"06","intvolume":" 12","abstract":[{"lang":"eng","text":"In social networks, users often engage with like-minded peers. This selective exposure to opinions might result in echo chambers, i.e., political fragmentation and social polarization of user interactions. When echo chambers form, opinions have a bimodal distribution with two peaks on opposite sides. In certain issues, where either extreme positions contain a degree of misinformation, neutral consensus is preferable for promoting discourse. In this paper, we use an opinion dynamics model that naturally forms echo chambers in order to find a feedback mechanism that bridges these communities and leads to a neutral consensus. We introduce the random dynamical nudge (RDN), which presents each agent with input from a random selection of other agents’ opinions and does not require surveillance of every person’s opinions. Our computational results in two different models suggest that the RDN leads to a unimodal distribution of opinions centered around the neutral consensus. Furthermore, the RDN is effective both for preventing the formation of echo chambers and also for depolarizing existing echo chambers. Due to the simple and robust nature of the RDN, social media networks might be able to implement a version of this self-feedback mechanism, when appropriate, to prevent the segregation of online communities on complex social issues."}],"pmid":1,"oa_version":"Published Version","file_date_updated":"2023-01-27T08:56:18Z","department":[{"_id":"TiVo"}],"date_updated":"2023-08-04T09:26:30Z","ddc":["570"],"type":"journal_article","article_type":"original","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"status":"public","keyword":["Multidisciplinary"],"_id":"12225"},{"_id":"10753","status":"public","article_type":"letter_note","type":"journal_article","date_updated":"2023-10-03T10:53:17Z","department":[{"_id":"TiVo"}],"pmid":1,"oa_version":"Published Version","abstract":[{"text":"This is a comment on \"Meta-learning synaptic plasticity and memory addressing for continual familiarity detection.\" Neuron. 2022 Feb 2;110(3):544-557.e8.","lang":"eng"}],"month":"02","intvolume":" 110","scopus_import":"1","main_file_link":[{"open_access":"1","url":"https://doi.org/10.1016/j.neuron.2022.01.014"}],"language":[{"iso":"eng"}],"publication_identifier":{"eissn":["1097-4199"]},"publication_status":"published","issue":"3","volume":110,"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"ista":"Confavreux BJ, Vogels TP. 2022. A familiar thought: Machines that replace us? Neuron. 110(3), 361–362.","chicago":"Confavreux, Basile J, and Tim P Vogels. “A Familiar Thought: Machines That Replace Us?” Neuron. Elsevier, 2022. https://doi.org/10.1016/j.neuron.2022.01.014.","ama":"Confavreux BJ, Vogels TP. A familiar thought: Machines that replace us? Neuron. 2022;110(3):361-362. doi:10.1016/j.neuron.2022.01.014","apa":"Confavreux, B. J., & Vogels, T. P. (2022). A familiar thought: Machines that replace us? Neuron. Elsevier. https://doi.org/10.1016/j.neuron.2022.01.014","short":"B.J. Confavreux, T.P. Vogels, Neuron 110 (2022) 361–362.","ieee":"B. J. Confavreux and T. P. Vogels, “A familiar thought: Machines that replace us?,” Neuron, vol. 110, no. 3. Elsevier, pp. 361–362, 2022.","mla":"Confavreux, Basile J., and Tim P. Vogels. “A Familiar Thought: Machines That Replace Us?” Neuron, vol. 110, no. 3, Elsevier, 2022, pp. 361–62, doi:10.1016/j.neuron.2022.01.014."},"title":"A familiar thought: Machines that replace us?","author":[{"first_name":"Basile J","id":"C7610134-B532-11EA-BD9F-F5753DDC885E","last_name":"Confavreux","full_name":"Confavreux, Basile J"},{"first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181"}],"external_id":{"pmid":["35114107"],"isi":["000751819100005"]},"article_processing_charge":"No","quality_controlled":"1","publisher":"Elsevier","oa":1,"day":"02","publication":"Neuron","isi":1,"year":"2022","date_published":"2022-02-02T00:00:00Z","doi":"10.1016/j.neuron.2022.01.014","date_created":"2022-02-13T23:01:34Z","page":"361-362"},{"publisher":"Cold Spring Harbor Laboratory","main_file_link":[{"open_access":"1","url":"https://doi.org/10.1101/2020.01.08.898528 "}],"oa":1,"month":"12","abstract":[{"text":"Context, such as behavioral state, is known to modulate memory formation and retrieval, but is usually ignored in associative memory models. Here, we propose several types of contextual modulation for associative memory networks that greatly increase their performance. In these networks, context inactivates specific neurons and connections, which modulates the effective connectivity of the network. Memories are stored only by the active components, thereby reducing interference from memories acquired in other contexts. Such networks exhibit several beneficial characteristics, including enhanced memory capacity, high robustness to noise, increased robustness to memory overloading, and better memory retention during continual learning. Furthermore, memories can be biased to have different relative strengths, or even gated on or off, according to contextual cues, providing a candidate model for cognitive control of memory and efficient memory search. An external context-encoding network can dynamically switch the memory network to a desired state, which we liken to experimentally observed contextual signals in prefrontal cortex and hippocampus. Overall, our work illustrates the benefits of organizing memory around context, and provides an important link between behavioral studies of memory and mechanistic details of neural circuits.SIGNIFICANCEMemory is context dependent — both encoding and recall vary in effectiveness and speed depending on factors like location and brain state during a task. We apply this idea to a simple computational model of associative memory through contextual gating of neurons and synaptic connections. Intriguingly, this results in several advantages, including vastly enhanced memory capacity, better robustness, and flexible memory gating. Our model helps to explain (i) how gating and inhibition contribute to memory processes, (ii) how memory access dynamically changes over time, and (iii) how context representations, such as those observed in hippocampus and prefrontal cortex, may interact with and control memory processes.","lang":"eng"}],"oa_version":"Preprint","doi":"10.1101/2020.01.08.898528","date_published":"2022-12-21T00:00:00Z","date_created":"2020-07-16T12:24:28Z","locked":"1","publication_status":"published","year":"2022","day":"21","language":[{"iso":"eng"}],"publication":"bioRxiv","type":"preprint","status":"public","_id":"8125","author":[{"first_name":"William F.","full_name":"Podlaski, William F.","orcid":"0000-0001-6619-7502","last_name":"Podlaski"},{"last_name":"Agnes","full_name":"Agnes, Everton J.","orcid":"0000-0001-7184-7311","first_name":"Everton J."},{"first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","last_name":"Vogels","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P"}],"article_processing_charge":"No","department":[{"_id":"TiVo"}],"title":"High capacity and dynamic accessibility in associative memory networks with context-dependent neuronal and synaptic gating","date_updated":"2024-03-06T12:03:59Z","citation":{"mla":"Podlaski, William F., et al. “High Capacity and Dynamic Accessibility in Associative Memory Networks with Context-Dependent Neuronal and Synaptic Gating.” BioRxiv, Cold Spring Harbor Laboratory, 2022, doi:10.1101/2020.01.08.898528.","ieee":"W. F. Podlaski, E. J. Agnes, and T. P. Vogels, “High capacity and dynamic accessibility in associative memory networks with context-dependent neuronal and synaptic gating,” bioRxiv. Cold Spring Harbor Laboratory, 2022.","short":"W.F. Podlaski, E.J. Agnes, T.P. Vogels, BioRxiv (2022).","apa":"Podlaski, W. F., Agnes, E. J., & Vogels, T. P. (2022). High capacity and dynamic accessibility in associative memory networks with context-dependent neuronal and synaptic gating. bioRxiv. Cold Spring Harbor Laboratory. https://doi.org/10.1101/2020.01.08.898528","ama":"Podlaski WF, Agnes EJ, Vogels TP. High capacity and dynamic accessibility in associative memory networks with context-dependent neuronal and synaptic gating. bioRxiv. 2022. doi:10.1101/2020.01.08.898528","chicago":"Podlaski, William F., Everton J. Agnes, and Tim P Vogels. “High Capacity and Dynamic Accessibility in Associative Memory Networks with Context-Dependent Neuronal and Synaptic Gating.” BioRxiv. Cold Spring Harbor Laboratory, 2022. https://doi.org/10.1101/2020.01.08.898528.","ista":"Podlaski WF, Agnes EJ, Vogels TP. 2022. High capacity and dynamic accessibility in associative memory networks with context-dependent neuronal and synaptic gating. bioRxiv, 10.1101/2020.01.08.898528."},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87"},{"project":[{"_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","grant_number":"214316/Z/18/Z"}],"title":"Online learning of neural computations from sparse temporal feedback","article_processing_charge":"No","author":[{"first_name":"Lukas","full_name":"Braun, Lukas","last_name":"Braun"},{"full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","citation":{"mla":"Braun, Lukas, and Tim P. Vogels. “Online Learning of Neural Computations from Sparse Temporal Feedback.” Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems, vol. 20, Neural Information Processing Systems Foundation, 2021, pp. 16437–50.","ama":"Braun L, Vogels TP. Online learning of neural computations from sparse temporal feedback. In: Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems. Vol 20. Neural Information Processing Systems Foundation; 2021:16437-16450.","apa":"Braun, L., & Vogels, T. P. (2021). Online learning of neural computations from sparse temporal feedback. In Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems (Vol. 20, pp. 16437–16450). Virtual, Online: Neural Information Processing Systems Foundation.","ieee":"L. Braun and T. P. Vogels, “Online learning of neural computations from sparse temporal feedback,” in Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems, Virtual, Online, 2021, vol. 20, pp. 16437–16450.","short":"L. Braun, T.P. Vogels, in:, Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2021, pp. 16437–16450.","chicago":"Braun, Lukas, and Tim P Vogels. “Online Learning of Neural Computations from Sparse Temporal Feedback.” In Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems, 20:16437–50. Neural Information Processing Systems Foundation, 2021.","ista":"Braun L, Vogels TP. 2021. Online learning of neural computations from sparse temporal feedback. Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems vol. 20, 16437–16450."},"oa":1,"publisher":"Neural Information Processing Systems Foundation","quality_controlled":"1","acknowledgement":"We would like to thank Professor Dr. Henning Sprekeler for his valuable suggestions and Dr. Andrew Saxe, Milan Klöwer and Anna Wallis for their constructive feedback on the manuscript. Lukas Braun was supported by the Network of European Neuroscience Schools through their NENS Exchange Grant program, by the European Union through their European Community Action Scheme for the Mobility of University Students, the Woodward Scholarship awarded by Wadham College, Oxford and the Medical Research Council [MR/N013468/1]. Tim P. Vogels was supported by a Wellcome Trust Senior Research Fellowship [214316/Z/18/Z].","date_created":"2022-06-19T22:01:59Z","date_published":"2021-12-01T00:00:00Z","page":"16437-16450","publication":"Advances in Neural Information Processing Systems - 35th Conference on Neural Information Processing Systems","day":"01","year":"2021","status":"public","conference":{"name":"NeurIPS: Neural Information Processing Systems","end_date":"2021-12-14","location":"Virtual, Online","start_date":"2021-12-06"},"type":"conference","_id":"11453","department":[{"_id":"TiVo"}],"date_updated":"2022-06-20T07:12:58Z","intvolume":" 20","month":"12","main_file_link":[{"url":"https://proceedings.neurips.cc/paper/2021/file/88e1ce84f9feef5a08d0df0334c53468-Paper.pdf","open_access":"1"}],"scopus_import":"1","oa_version":"Published Version","abstract":[{"lang":"eng","text":"Neuronal computations depend on synaptic connectivity and intrinsic electrophysiological properties. Synaptic connectivity determines which inputs from presynaptic neurons are integrated, while cellular properties determine how inputs are filtered over time. Unlike their biological counterparts, most computational approaches to learning in simulated neural networks are limited to changes in synaptic connectivity. However, if intrinsic parameters change, neural computations are altered drastically. Here, we include the parameters that determine the intrinsic properties,\r\ne.g., time constants and reset potential, into the learning paradigm. Using sparse feedback signals that indicate target spike times, and gradient-based parameter updates, we show that the intrinsic parameters can be learned along with the synaptic weights to produce specific input-output functions. Specifically, we use a teacher-student paradigm in which a randomly initialised leaky integrate-and-fire or resonate-and-fire neuron must recover the parameters of a teacher neuron. We show that complex temporal functions can be learned online and without backpropagation through time, relying on event-based updates only. Our results are a step towards online learning of neural computations from ungraded and unsigned sparse feedback signals with a biologically inspired learning mechanism."}],"volume":20,"language":[{"iso":"eng"}],"publication_status":"published","publication_identifier":{"issn":["1049-5258"],"isbn":["9781713845393"]}},{"title":"The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks","author":[{"first_name":"Friedemann","full_name":"Zenke, Friedemann","orcid":"0000-0003-1883-644X","last_name":"Zenke"},{"full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","last_name":"Vogels","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P"}],"external_id":{"pmid":["33513328"],"isi":["000663433900003"]},"article_processing_charge":"No","user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","citation":{"mla":"Zenke, Friedemann, and Tim P. Vogels. “The Remarkable Robustness of Surrogate Gradient Learning for Instilling Complex Function in Spiking Neural Networks.” Neural Computation, vol. 33, no. 4, MIT Press, 2021, pp. 899–925, doi:10.1162/neco_a_01367.","ieee":"F. Zenke and T. P. Vogels, “The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks,” Neural Computation, vol. 33, no. 4. MIT Press, pp. 899–925, 2021.","short":"F. Zenke, T.P. Vogels, Neural Computation 33 (2021) 899–925.","apa":"Zenke, F., & Vogels, T. P. (2021). The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. Neural Computation. MIT Press. https://doi.org/10.1162/neco_a_01367","ama":"Zenke F, Vogels TP. The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. Neural Computation. 2021;33(4):899-925. doi:10.1162/neco_a_01367","chicago":"Zenke, Friedemann, and Tim P Vogels. “The Remarkable Robustness of Surrogate Gradient Learning for Instilling Complex Function in Spiking Neural Networks.” Neural Computation. MIT Press, 2021. https://doi.org/10.1162/neco_a_01367.","ista":"Zenke F, Vogels TP. 2021. The remarkable robustness of surrogate gradient learning for instilling complex function in spiking neural networks. Neural Computation. 33(4), 899–925."},"project":[{"_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","call_identifier":"H2020","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603"},{"_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","grant_number":"214316/Z/18/Z"}],"date_published":"2021-03-01T00:00:00Z","doi":"10.1162/neco_a_01367","date_created":"2020-08-12T12:08:24Z","page":"899-925","day":"01","publication":"Neural Computation","has_accepted_license":"1","isi":1,"year":"2021","publisher":"MIT Press","quality_controlled":"1","oa":1,"acknowledgement":"F.Z. was supported by the Wellcome Trust (110124/Z/15/Z) and the Novartis Research Foundation. T.P.V. was supported by a Wellcome Trust Sir Henry Dale Research fellowship (WT100000), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z), and an ERC Consolidator Grant SYNAPSEEK.","file_date_updated":"2022-04-08T06:05:39Z","department":[{"_id":"TiVo"}],"ddc":["000","570"],"date_updated":"2023-08-04T10:53:14Z","status":"public","type":"journal_article","article_type":"original","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"_id":"8253","issue":"4","volume":33,"ec_funded":1,"file":[{"file_size":1611614,"date_updated":"2022-04-08T06:05:39Z","creator":"dernst","file_name":"2021_NeuralComputation_Zenke.pdf","date_created":"2022-04-08T06:05:39Z","content_type":"application/pdf","relation":"main_file","access_level":"open_access","success":1,"file_id":"11131","checksum":"eac5a51c24c8989ae7cf9ae32ec3bc95"}],"language":[{"iso":"eng"}],"publication_identifier":{"issn":["0899-7667"],"eissn":["1530-888X"]},"publication_status":"published","month":"03","intvolume":" 33","scopus_import":"1","oa_version":"Published Version","pmid":1,"abstract":[{"lang":"eng","text":"Brains process information in spiking neural networks. Their intricate connections shape the diverse functions these networks perform. In comparison, the functional capabilities of models of spiking networks are still rudimentary. This shortcoming is mainly due to the lack of insight and practical algorithms to construct the necessary connectivity. Any such algorithm typically attempts to build networks by iteratively reducing the error compared to a desired output. But assigning credit to hidden units in multi-layered spiking networks has remained challenging due to the non-differentiable nonlinearity of spikes. To avoid this issue, one can employ surrogate gradients to discover the required connectivity in spiking network models. However, the choice of a surrogate is not unique, raising the question of how its implementation influences the effectiveness of the method. Here, we use numerical simulations to systematically study how essential design parameters of surrogate gradients impact learning performance on a range of classification problems. We show that surrogate gradient learning is robust to different shapes of underlying surrogate derivatives, but the choice of the derivative’s scale can substantially affect learning performance. When we combine surrogate gradients with a suitable activity regularization technique, robust information processing can be achieved in spiking networks even at the sparse activity limit. Our study provides a systematic account of the remarkable robustness of surrogate gradient learning and serves as a practical guide to model functional spiking neural networks."}]},{"_id":"8757","status":"public","article_type":"letter_note","type":"journal_article","ddc":["570"],"date_updated":"2023-08-04T11:10:20Z","file_date_updated":"2021-02-04T10:34:22Z","department":[{"_id":"TiVo"}],"pmid":1,"oa_version":"Published Version","abstract":[{"lang":"eng","text":"Traditional scientific conferences and seminar events have been hugely disrupted by the COVID-19 pandemic, paving the way for virtual forms of scientific communication to take hold and be put to the test."}],"intvolume":" 22","month":"01","scopus_import":"1","language":[{"iso":"eng"}],"file":[{"checksum":"7985d7dff94c086e35b94a911d78d9ad","file_id":"9088","success":1,"content_type":"application/pdf","access_level":"open_access","relation":"main_file","date_created":"2021-02-04T10:34:22Z","file_name":"2021_NatureNeuroScience_Bozelos.pdf","date_updated":"2021-02-04T10:34:22Z","file_size":683634,"creator":"dernst"}],"publication_status":"published","publication_identifier":{"issn":["1471003X"],"eissn":["14710048"]},"issue":"1","volume":22,"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","citation":{"chicago":"Bozelos, Panagiotis, and Tim P Vogels. “Talking Science, Online.” Nature Reviews Neuroscience. Springer Nature, 2021. https://doi.org/10.1038/s41583-020-00408-6.","ista":"Bozelos P, Vogels TP. 2021. Talking science, online. Nature Reviews Neuroscience. 22(1), 1–2.","mla":"Bozelos, Panagiotis, and Tim P. Vogels. “Talking Science, Online.” Nature Reviews Neuroscience, vol. 22, no. 1, Springer Nature, 2021, pp. 1–2, doi:10.1038/s41583-020-00408-6.","short":"P. Bozelos, T.P. Vogels, Nature Reviews Neuroscience 22 (2021) 1–2.","ieee":"P. Bozelos and T. P. Vogels, “Talking science, online,” Nature Reviews Neuroscience, vol. 22, no. 1. Springer Nature, pp. 1–2, 2021.","apa":"Bozelos, P., & Vogels, T. P. (2021). Talking science, online. Nature Reviews Neuroscience. Springer Nature. https://doi.org/10.1038/s41583-020-00408-6","ama":"Bozelos P, Vogels TP. Talking science, online. Nature Reviews Neuroscience. 2021;22(1):1-2. doi:10.1038/s41583-020-00408-6"},"title":"Talking science, online","article_processing_charge":"No","external_id":{"isi":["000588256300001"],"pmid":["33173190"]},"author":[{"id":"52e9c652-2982-11eb-81d4-b43d94c63700","first_name":"Panagiotis","last_name":"Bozelos","full_name":"Bozelos, Panagiotis"},{"last_name":"Vogels","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"}],"oa":1,"quality_controlled":"1","publisher":"Springer Nature","publication":"Nature Reviews Neuroscience","day":"01","year":"2021","isi":1,"has_accepted_license":"1","date_created":"2020-11-15T23:01:18Z","doi":"10.1038/s41583-020-00408-6","date_published":"2021-01-01T00:00:00Z","page":"1-2"},{"issue":"4","volume":25,"language":[{"iso":"eng"}],"file":[{"creator":"dernst","file_size":380720,"date_updated":"2022-05-27T07:31:24Z","file_name":"2021_TrendsCognitiveSciences_Achakulvisut.pdf","date_created":"2022-05-27T07:31:24Z","relation":"main_file","access_level":"open_access","content_type":"application/pdf","success":1,"checksum":"87e39ea7bd266b976e8631b66979214d","file_id":"11415"}],"publication_status":"published","publication_identifier":{"issn":["1364-6613"],"eissn":["1879-307X"]},"intvolume":" 25","month":"04","scopus_import":"1","pmid":1,"oa_version":"Submitted Version","abstract":[{"lang":"eng","text":"Legacy conferences are costly and time consuming, and exclude scientists lacking various resources or abilities. During the 2020 pandemic, we created an online conference platform, Neuromatch Conferences (NMC), aimed at developing technological and cultural changes to make conferences more democratic, scalable, and accessible. We discuss the lessons we learned."}],"file_date_updated":"2022-05-27T07:31:24Z","department":[{"_id":"TiVo"}],"ddc":["570"],"date_updated":"2023-08-07T13:59:07Z","status":"public","article_type":"original","type":"journal_article","_id":"9228","date_created":"2021-03-07T23:01:25Z","date_published":"2021-04-01T00:00:00Z","doi":"10.1016/j.tics.2021.01.007","page":"265-268","publication":"Trends in Cognitive Sciences","day":"01","year":"2021","has_accepted_license":"1","isi":1,"oa":1,"quality_controlled":"1","publisher":"Elsevier","acknowledgement":"We thank all of our volunteers from the NMC conferences (list of names in the appendix). We also thank the NSF for support from 1734220 to B.W., and DARPA for support to T.A.","title":"Towards democratizing and automating online conferences: Lessons from the Neuromatch Conferences","article_processing_charge":"No","external_id":{"pmid":["33608214"],"isi":["000627418000001"]},"author":[{"full_name":"Achakulvisut, Titipat","last_name":"Achakulvisut","first_name":"Titipat"},{"first_name":"Tulakan","last_name":"Ruangrong","full_name":"Ruangrong, Tulakan"},{"last_name":"Mineault","full_name":"Mineault, Patrick","first_name":"Patrick"},{"id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P","last_name":"Vogels","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P"},{"first_name":"Megan A.K.","full_name":"Peters, Megan A.K.","last_name":"Peters"},{"first_name":"Panayiota","last_name":"Poirazi","full_name":"Poirazi, Panayiota"},{"last_name":"Rozell","full_name":"Rozell, Christopher","first_name":"Christopher"},{"full_name":"Wyble, Brad","last_name":"Wyble","first_name":"Brad"},{"first_name":"Dan F.M.","last_name":"Goodman","full_name":"Goodman, Dan F.M."},{"last_name":"Kording","full_name":"Kording, Konrad Paul","first_name":"Konrad Paul"}],"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","citation":{"chicago":"Achakulvisut, Titipat, Tulakan Ruangrong, Patrick Mineault, Tim P Vogels, Megan A.K. Peters, Panayiota Poirazi, Christopher Rozell, Brad Wyble, Dan F.M. Goodman, and Konrad Paul Kording. “Towards Democratizing and Automating Online Conferences: Lessons from the Neuromatch Conferences.” Trends in Cognitive Sciences. Elsevier, 2021. https://doi.org/10.1016/j.tics.2021.01.007.","ista":"Achakulvisut T, Ruangrong T, Mineault P, Vogels TP, Peters MAK, Poirazi P, Rozell C, Wyble B, Goodman DFM, Kording KP. 2021. Towards democratizing and automating online conferences: Lessons from the Neuromatch Conferences. Trends in Cognitive Sciences. 25(4), 265–268.","mla":"Achakulvisut, Titipat, et al. “Towards Democratizing and Automating Online Conferences: Lessons from the Neuromatch Conferences.” Trends in Cognitive Sciences, vol. 25, no. 4, Elsevier, 2021, pp. 265–68, doi:10.1016/j.tics.2021.01.007.","apa":"Achakulvisut, T., Ruangrong, T., Mineault, P., Vogels, T. P., Peters, M. A. K., Poirazi, P., … Kording, K. P. (2021). Towards democratizing and automating online conferences: Lessons from the Neuromatch Conferences. Trends in Cognitive Sciences. Elsevier. https://doi.org/10.1016/j.tics.2021.01.007","ama":"Achakulvisut T, Ruangrong T, Mineault P, et al. Towards democratizing and automating online conferences: Lessons from the Neuromatch Conferences. Trends in Cognitive Sciences. 2021;25(4):265-268. doi:10.1016/j.tics.2021.01.007","ieee":"T. Achakulvisut et al., “Towards democratizing and automating online conferences: Lessons from the Neuromatch Conferences,” Trends in Cognitive Sciences, vol. 25, no. 4. Elsevier, pp. 265–268, 2021.","short":"T. Achakulvisut, T. Ruangrong, P. Mineault, T.P. Vogels, M.A.K. Peters, P. Poirazi, C. Rozell, B. Wyble, D.F.M. Goodman, K.P. Kording, Trends in Cognitive Sciences 25 (2021) 265–268."}},{"day":"17","publication":"eLife","isi":1,"has_accepted_license":"1","year":"2020","doi":"10.7554/eLife.56261","date_published":"2020-09-17T00:00:00Z","date_created":"2020-07-16T12:26:04Z","acknowledgement":"We thank Mahmood S Hoseini and Michael Stryker for sharing their data for Figure 2, and Philipp Berens, Sean Bittner, Jan Boelts, John Cunningham, Richard Gao, Scott Linderman, Eve Marder, Iain Murray, George Papamakarios, Astrid Prinz, Auguste Schulz and Srinivas Turaga for discussions and/or comments on the manuscript. This work was supported by the German Research Foundation (DFG) through SFB 1233 ‘Robust Vision’, (276693517), SFB 1089 ‘Synaptic Microcircuits’, SPP 2041 ‘Computational Connectomics’ and Germany's Excellence Strategy – EXC-Number 2064/1 – Project number 390727645 and the German Federal Ministry of Education and Research (BMBF, project ‘ADIMEM’, FKZ 01IS18052 A-D) to JHM, a Sir Henry Dale Fellowship by the Wellcome Trust and the Royal Society (WT100000; WFP and TPV), a Wellcome Trust Senior Research Fellowship (214316/Z/18/Z; TPV), a ERC Consolidator Grant (SYNAPSEEK; WPF and CC), and a UK Research and Innovation, Biotechnology and Biological Sciences Research Council (CC, UKRI-BBSRC BB/N019512/1). We gratefully acknowledge the Leibniz Supercomputing Centre for funding this project by providing computing time on its Linux-Cluster.","quality_controlled":"1","publisher":"eLife Sciences Publications","oa":1,"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","citation":{"mla":"Gonçalves, Pedro J., et al. “Training Deep Neural Density Estimators to Identify Mechanistic Models of Neural Dynamics.” ELife, vol. 9, e56261, eLife Sciences Publications, 2020, doi:10.7554/eLife.56261.","apa":"Gonçalves, P. J., Lueckmann, J.-M., Deistler, M., Nonnenmacher, M., Öcal, K., Bassetto, G., … Macke, J. H. (2020). Training deep neural density estimators to identify mechanistic models of neural dynamics. ELife. eLife Sciences Publications. https://doi.org/10.7554/eLife.56261","ama":"Gonçalves PJ, Lueckmann J-M, Deistler M, et al. Training deep neural density estimators to identify mechanistic models of neural dynamics. eLife. 2020;9. doi:10.7554/eLife.56261","short":"P.J. Gonçalves, J.-M. Lueckmann, M. Deistler, M. Nonnenmacher, K. Öcal, G. Bassetto, C. Chintaluri, W.F. Podlaski, S.A. Haddad, T.P. Vogels, D.S. Greenberg, J.H. Macke, ELife 9 (2020).","ieee":"P. J. Gonçalves et al., “Training deep neural density estimators to identify mechanistic models of neural dynamics,” eLife, vol. 9. eLife Sciences Publications, 2020.","chicago":"Gonçalves, Pedro J., Jan-Matthis Lueckmann, Michael Deistler, Marcel Nonnenmacher, Kaan Öcal, Giacomo Bassetto, Chaitanya Chintaluri, et al. “Training Deep Neural Density Estimators to Identify Mechanistic Models of Neural Dynamics.” ELife. eLife Sciences Publications, 2020. https://doi.org/10.7554/eLife.56261.","ista":"Gonçalves PJ, Lueckmann J-M, Deistler M, Nonnenmacher M, Öcal K, Bassetto G, Chintaluri C, Podlaski WF, Haddad SA, Vogels TP, Greenberg DS, Macke JH. 2020. Training deep neural density estimators to identify mechanistic models of neural dynamics. eLife. 9, e56261."},"title":"Training deep neural density estimators to identify mechanistic models of neural dynamics","author":[{"last_name":"Gonçalves","orcid":"0000-0002-6987-4836","full_name":"Gonçalves, Pedro J.","first_name":"Pedro J."},{"first_name":"Jan-Matthis","last_name":"Lueckmann","full_name":"Lueckmann, Jan-Matthis","orcid":"0000-0003-4320-4663"},{"first_name":"Michael","orcid":"0000-0002-3573-0404","full_name":"Deistler, Michael","last_name":"Deistler"},{"last_name":"Nonnenmacher","full_name":"Nonnenmacher, Marcel","orcid":"0000-0001-6044-6627","first_name":"Marcel"},{"orcid":"0000-0002-8528-6858","full_name":"Öcal, Kaan","last_name":"Öcal","first_name":"Kaan"},{"last_name":"Bassetto","full_name":"Bassetto, Giacomo","first_name":"Giacomo"},{"full_name":"Chintaluri, Chaitanya","orcid":"0000-0003-4252-1608","last_name":"Chintaluri","first_name":"Chaitanya","id":"BA06AFEE-A4BA-11EA-AE5C-14673DDC885E"},{"orcid":"0000-0001-6619-7502","full_name":"Podlaski, William F.","last_name":"Podlaski","first_name":"William F."},{"first_name":"Sara A.","orcid":"0000-0003-0807-0823","full_name":"Haddad, Sara A.","last_name":"Haddad"},{"id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P","last_name":"Vogels","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181"},{"full_name":"Greenberg, David S.","last_name":"Greenberg","first_name":"David S."},{"first_name":"Jakob H.","full_name":"Macke, Jakob H.","orcid":"0000-0001-5154-8912","last_name":"Macke"}],"article_processing_charge":"No","external_id":{"isi":["000584989400001"],"pmid":["32940606"]},"article_number":"e56261","project":[{"grant_number":"819603","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234"}],"file":[{"file_name":"2020_eLife_Gonçalves.pdf","date_created":"2020-10-27T11:37:32Z","file_size":17355867,"date_updated":"2020-10-27T11:37:32Z","creator":"cziletti","success":1,"file_id":"8709","checksum":"c4300ddcd93ed03fc9c6cdf1f77890be","content_type":"application/pdf","relation":"main_file","access_level":"open_access"}],"language":[{"iso":"eng"}],"publication_identifier":{"eissn":["2050-084X"]},"publication_status":"published","volume":9,"ec_funded":1,"pmid":1,"oa_version":"Published Version","abstract":[{"lang":"eng","text":"Mechanistic modeling in neuroscience aims to explain observed phenomena in terms of underlying causes. However, determining which model parameters agree with complex and stochastic neural data presents a significant challenge. We address this challenge with a machine learning tool which uses deep neural density estimators—trained using model simulations—to carry out Bayesian inference and retrieve the full space of parameters compatible with raw data or selected data features. Our method is scalable in parameters and data features and can rapidly analyze new data after initial training. We demonstrate the power and flexibility of our approach on receptive fields, ion channels, and Hodgkin–Huxley models. We also characterize the space of circuit configurations giving rise to rhythmic activity in the crustacean stomatogastric ganglion, and use these results to derive hypotheses for underlying compensation mechanisms. Our approach will help close the gap between data-driven and theory-driven models of neural dynamics."}],"month":"09","intvolume":" 9","scopus_import":"1","ddc":["570"],"date_updated":"2023-08-22T07:54:52Z","department":[{"_id":"TiVo"}],"file_date_updated":"2020-10-27T11:37:32Z","_id":"8127","status":"public","type":"journal_article","article_type":"original","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"}},{"year":"2020","has_accepted_license":"1","isi":1,"publication":"The Journal of Neuroscience","day":"09","page":"9634-9649","date_created":"2020-07-16T12:25:04Z","doi":"10.1523/JNEUROSCI.0276-20.2020","date_published":"2020-12-09T00:00:00Z","oa":1,"publisher":"Society for Neuroscience","quality_controlled":"1","citation":{"mla":"Agnes, Everton J., et al. “Complementary Inhibitory Weight Profiles Emerge from Plasticity and Allow Attentional Switching of Receptive Fields.” The Journal of Neuroscience, vol. 40, no. 50, Society for Neuroscience, 2020, pp. 9634–49, doi:10.1523/JNEUROSCI.0276-20.2020.","apa":"Agnes, E. J., Luppi, A. I., & Vogels, T. P. (2020). Complementary inhibitory weight profiles emerge from plasticity and allow attentional switching of receptive fields. The Journal of Neuroscience. Society for Neuroscience. https://doi.org/10.1523/JNEUROSCI.0276-20.2020","ama":"Agnes EJ, Luppi AI, Vogels TP. Complementary inhibitory weight profiles emerge from plasticity and allow attentional switching of receptive fields. The Journal of Neuroscience. 2020;40(50):9634-9649. doi:10.1523/JNEUROSCI.0276-20.2020","short":"E.J. Agnes, A.I. Luppi, T.P. Vogels, The Journal of Neuroscience 40 (2020) 9634–9649.","ieee":"E. J. Agnes, A. I. Luppi, and T. P. Vogels, “Complementary inhibitory weight profiles emerge from plasticity and allow attentional switching of receptive fields,” The Journal of Neuroscience, vol. 40, no. 50. Society for Neuroscience, pp. 9634–9649, 2020.","chicago":"Agnes, Everton J., Andrea I. Luppi, and Tim P Vogels. “Complementary Inhibitory Weight Profiles Emerge from Plasticity and Allow Attentional Switching of Receptive Fields.” The Journal of Neuroscience. Society for Neuroscience, 2020. https://doi.org/10.1523/JNEUROSCI.0276-20.2020.","ista":"Agnes EJ, Luppi AI, Vogels TP. 2020. Complementary inhibitory weight profiles emerge from plasticity and allow attentional switching of receptive fields. The Journal of Neuroscience. 40(50), 9634–9649."},"user_id":"4359f0d1-fa6c-11eb-b949-802e58b17ae8","article_processing_charge":"No","external_id":{"isi":["000606706400009"],"pmid":["33168622"]},"author":[{"first_name":"Everton J.","full_name":"Agnes, Everton J.","orcid":"0000-0001-7184-7311","last_name":"Agnes"},{"first_name":"Andrea I.","last_name":"Luppi","full_name":"Luppi, Andrea I."},{"last_name":"Vogels","orcid":"0000-0003-3295-6181","full_name":"Vogels, Tim P","first_name":"Tim P","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425"}],"title":"Complementary inhibitory weight profiles emerge from plasticity and allow attentional switching of receptive fields","publication_status":"published","publication_identifier":{"eissn":["1529-2401"]},"language":[{"iso":"eng"}],"file":[{"creator":"dernst","date_updated":"2020-12-28T08:31:47Z","file_size":2750920,"date_created":"2020-12-28T08:31:47Z","file_name":"2020_JourNeuroscience_Agnes.pdf","access_level":"open_access","relation":"main_file","content_type":"application/pdf","file_id":"8977","checksum":"7977e4dd6b89357d1a5cc88babac56da","success":1}],"volume":40,"issue":"50","abstract":[{"lang":"eng","text":"Cortical areas comprise multiple types of inhibitory interneurons with stereotypical connectivity motifs, but their combined effect on postsynaptic dynamics has been largely unexplored. Here, we analyse the response of a single postsynaptic model neuron receiving tuned excitatory connections alongside inhibition from two plastic populations. Depending on the inhibitory plasticity rule, synapses remain unspecific (flat), become anti-correlated to, or mirror excitatory synapses. Crucially, the neuron’s receptive field, i.e., its response to presynaptic stimuli, depends on the modulatory state of inhibition. When both inhibitory populations are active, inhibition balances excitation, resulting in uncorrelated postsynaptic responses regardless of the inhibitory tuning profiles. Modulating the activity of a given inhibitory population produces strong correlations to either preferred or non-preferred inputs, in line with recent experimental findings showing dramatic context-dependent changes of neurons’ receptive fields. We thus confirm that a neuron’s receptive field doesn’t follow directly from the weight profiles of its presynaptic afferents."}],"oa_version":"Published Version","pmid":1,"scopus_import":"1","intvolume":" 40","month":"12","date_updated":"2023-08-22T07:54:26Z","ddc":["570"],"department":[{"_id":"TiVo"}],"file_date_updated":"2020-12-28T08:31:47Z","_id":"8126","tmp":{"legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)","short":"CC BY (4.0)"},"type":"journal_article","article_type":"original","status":"public"},{"acknowledgement":"We would like to thank Chaitanya Chintaluri, Georgia Christodoulou, Bill Podlaski and Merima Šabanovic for useful discussions and comments. This work was supported by a Wellcome Trust ´ Senior Research Fellowship (214316/Z/18/Z), a BBSRC grant (BB/N019512/1), an ERC consolidator Grant (SYNAPSEEK), a Leverhulme Trust Project Grant (RPG-2016-446), and funding from École Polytechnique, Paris.","quality_controlled":"1","oa":1,"day":"06","publication":"Advances in Neural Information Processing Systems","year":"2020","date_published":"2020-12-06T00:00:00Z","date_created":"2021-07-04T22:01:27Z","page":"16398-16408","project":[{"_id":"c084a126-5a5b-11eb-8a69-d75314a70a87","name":"What’s in a memory? Spatiotemporal dynamics in strongly coupled recurrent neuronal networks.","grant_number":"214316/Z/18/Z"},{"call_identifier":"H2020","_id":"0aacfa84-070f-11eb-9043-d7eb2c709234","name":"Learning the shape of synaptic plasticity rules for neuronal architectures and function through machine learning.","grant_number":"819603"}],"user_id":"6785fbc1-c503-11eb-8a32-93094b40e1cf","citation":{"chicago":"Confavreux, Basile J, Friedemann Zenke, Everton J. Agnes, Timothy Lillicrap, and Tim P Vogels. “A Meta-Learning Approach to (Re)Discover Plasticity Rules That Carve a Desired Function into a Neural Network.” In Advances in Neural Information Processing Systems, 33:16398–408, 2020.","ista":"Confavreux BJ, Zenke F, Agnes EJ, Lillicrap T, Vogels TP. 2020. A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. Advances in Neural Information Processing Systems. NeurIPS: Conference on Neural Information Processing Systems vol. 33, 16398–16408.","mla":"Confavreux, Basile J., et al. “A Meta-Learning Approach to (Re)Discover Plasticity Rules That Carve a Desired Function into a Neural Network.” Advances in Neural Information Processing Systems, vol. 33, 2020, pp. 16398–408.","apa":"Confavreux, B. J., Zenke, F., Agnes, E. J., Lillicrap, T., & Vogels, T. P. (2020). A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. In Advances in Neural Information Processing Systems (Vol. 33, pp. 16398–16408). Vancouver, Canada.","ama":"Confavreux BJ, Zenke F, Agnes EJ, Lillicrap T, Vogels TP. A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network. In: Advances in Neural Information Processing Systems. Vol 33. ; 2020:16398-16408.","short":"B.J. Confavreux, F. Zenke, E.J. Agnes, T. Lillicrap, T.P. Vogels, in:, Advances in Neural Information Processing Systems, 2020, pp. 16398–16408.","ieee":"B. J. Confavreux, F. Zenke, E. J. Agnes, T. Lillicrap, and T. P. Vogels, “A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network,” in Advances in Neural Information Processing Systems, Vancouver, Canada, 2020, vol. 33, pp. 16398–16408."},"title":"A meta-learning approach to (re)discover plasticity rules that carve a desired function into a neural network","author":[{"id":"C7610134-B532-11EA-BD9F-F5753DDC885E","first_name":"Basile J","last_name":"Confavreux","full_name":"Confavreux, Basile J"},{"full_name":"Zenke, Friedemann","last_name":"Zenke","first_name":"Friedemann"},{"first_name":"Everton J.","full_name":"Agnes, Everton J.","last_name":"Agnes"},{"first_name":"Timothy","full_name":"Lillicrap, Timothy","last_name":"Lillicrap"},{"last_name":"Vogels","full_name":"Vogels, Tim P","orcid":"0000-0003-3295-6181","id":"CB6FF8D2-008F-11EA-8E08-2637E6697425","first_name":"Tim P"}],"article_processing_charge":"No","oa_version":"Published Version","abstract":[{"text":"The search for biologically faithful synaptic plasticity rules has resulted in a large body of models. They are usually inspired by – and fitted to – experimental data, but they rarely produce neural dynamics that serve complex functions. These failures suggest that current plasticity models are still under-constrained by existing data. Here, we present an alternative approach that uses meta-learning to discover plausible synaptic plasticity rules. Instead of experimental data, the rules are constrained by the functions they implement and the structure they are meant to produce. Briefly, we parameterize synaptic plasticity rules by a Volterra expansion and then use supervised learning methods (gradient descent or evolutionary strategies) to minimize a problem-dependent loss function that quantifies how effectively a candidate plasticity rule transforms an initially random network into one with the desired function. We first validate our approach by re-discovering previously described plasticity rules, starting at the single-neuron level and “Oja’s rule”, a simple Hebbian plasticity rule that captures the direction of most variability of inputs to a neuron (i.e., the first principal component). We expand the problem to the network level and ask the framework to find Oja’s rule together with an anti-Hebbian rule such that an initially random two-layer firing-rate network will recover several principal components of the input space after learning. Next, we move to networks of integrate-and-fire neurons with plastic inhibitory afferents. We train for rules that achieve a target firing rate by countering tuned excitation. Our algorithm discovers a specific subset of the manifold of rules that can solve this task. Our work is a proof of principle of an automated and unbiased approach to unveil synaptic plasticity rules that obey biological constraints and can solve complex functions.","lang":"eng"}],"month":"12","intvolume":" 33","scopus_import":"1","main_file_link":[{"open_access":"1","url":"https://proceedings.neurips.cc/paper/2020/hash/bdbd5ebfde4934142c8a88e7a3796cd5-Abstract.html"}],"language":[{"iso":"eng"}],"publication_identifier":{"issn":["1049-5258"]},"publication_status":"published","related_material":{"link":[{"relation":"is_continued_by","url":"https://doi.org/10.1101/2020.10.24.353409"}],"record":[{"relation":"dissertation_contains","id":"14422","status":"public"}]},"volume":33,"ec_funded":1,"_id":"9633","status":"public","type":"conference","conference":{"name":"NeurIPS: Conference on Neural Information Processing Systems","start_date":"2020-12-06","location":"Vancouver, Canada","end_date":"2020-12-12"},"date_updated":"2023-10-18T09:20:55Z","department":[{"_id":"TiVo"}]}]