[{"abstract":[{"text":"We study the singularities of the moduli space of degree e maps from smooth genus g curves to an arbitrary smooth hypersurface of low degree. For e large compared to g, we show that these moduli spaces have at worst terminal singularities. Our main approach is to study the jet schemes of these moduli spaces by developing a suitable form of the circle method.","lang":"eng"}],"article_processing_charge":"No","related_material":{"record":[{"relation":"earlier_version","status":"public","id":"18295"}]},"citation":{"ista":"Glas J, Hase-Liu M. Terminal singularities of the moduli space of curves on low degree hypersurfaces and the circle method. arXiv, <a href=\"https://doi.org/10.48550/arXiv.2412.14923\">10.48550/arXiv.2412.14923</a>.","chicago":"Glas, Jakob, and Matthew  Hase-Liu. “Terminal Singularities of the Moduli Space of Curves on Low Degree Hypersurfaces and the Circle Method.” <i>ArXiv</i>, n.d. <a href=\"https://doi.org/10.48550/arXiv.2412.14923\">https://doi.org/10.48550/arXiv.2412.14923</a>.","apa":"Glas, J., &#38; Hase-Liu, M. (n.d.). Terminal singularities of the moduli space of curves on low degree hypersurfaces and the circle method. <i>arXiv</i>. <a href=\"https://doi.org/10.48550/arXiv.2412.14923\">https://doi.org/10.48550/arXiv.2412.14923</a>","ama":"Glas J, Hase-Liu M. Terminal singularities of the moduli space of curves on low degree hypersurfaces and the circle method. <i>arXiv</i>. doi:<a href=\"https://doi.org/10.48550/arXiv.2412.14923\">10.48550/arXiv.2412.14923</a>","ieee":"J. Glas and M. Hase-Liu, “Terminal singularities of the moduli space of curves on low degree hypersurfaces and the circle method,” <i>arXiv</i>. .","short":"J. Glas, M. Hase-Liu, ArXiv (n.d.).","mla":"Glas, Jakob, and Matthew Hase-Liu. “Terminal Singularities of the Moduli Space of Curves on Low Degree Hypersurfaces and the Circle Method.” <i>ArXiv</i>, doi:<a href=\"https://doi.org/10.48550/arXiv.2412.14923\">10.48550/arXiv.2412.14923</a>."},"_id":"19013","month":"12","title":"Terminal singularities of the moduli space of curves on low degree hypersurfaces and the circle method","date_created":"2025-02-07T12:04:11Z","external_id":{"arxiv":["2412.14923"]},"department":[{"_id":"TiBr"}],"doi":"10.48550/arXiv.2412.14923","corr_author":"1","language":[{"iso":"eng"}],"OA_place":"repository","arxiv":1,"publication_status":"draft","type":"preprint","main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2412.14923"}],"day":"19","status":"public","tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"oa":1,"author":[{"last_name":"Glas","full_name":"Glas, Jakob","first_name":"Jakob","id":"d6423cba-dc74-11ea-a0a7-ee61689ff5fb"},{"first_name":"Matthew ","full_name":"Hase-Liu, Matthew ","last_name":"Hase-Liu"}],"publication":"arXiv","user_id":"8b945eb4-e2f2-11eb-945a-df72226e66a9","date_published":"2024-12-19T00:00:00Z","date_updated":"2025-04-15T08:05:40Z","oa_version":"Preprint","year":"2024"},{"file":[{"success":1,"relation":"main_file","access_level":"open_access","date_created":"2025-02-18T07:56:36Z","file_id":"19052","creator":"dernst","file_name":"2024_IMRN_Browning.pdf","file_size":205750,"checksum":"b625b8adf018d2a97591813c1fc17b96","content_type":"application/pdf","date_updated":"2025-02-18T07:56:36Z"}],"oa":1,"publication_identifier":{"eissn":["1687-0247"],"issn":["1073-7928"]},"author":[{"orcid":"0000-0002-8314-0177","first_name":"Timothy D","id":"35827D50-F248-11E8-B48F-1D18A9856A87","full_name":"Browning, Timothy D","last_name":"Browning"}],"file_date_updated":"2025-02-18T07:56:36Z","scopus_import":"1","publication":"International Mathematics Research Notices","isi":1,"intvolume":"      2024","volume":2024,"quality_controlled":"1","abstract":[{"text":"This paper corrects an error in an earlier work of the author.","lang":"eng"}],"publication_status":"published","external_id":{"isi":["001196957300001"]},"department":[{"_id":"TiBr"}],"date_created":"2025-02-18T07:15:50Z","language":[{"iso":"eng"}],"corr_author":"1","status":"public","tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"day":"01","publisher":"Oxford University Press","type":"journal_article","issue":"13","has_accepted_license":"1","date_published":"2024-07-01T00:00:00Z","year":"2024","oa_version":"Published Version","date_updated":"2025-09-09T12:16:45Z","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","page":"10165-10168","_id":"19051","title":"The polynomial sieve and equal sums of like polynomials","month":"07","article_processing_charge":"Yes (via OA deal)","citation":{"ama":"Browning TD. The polynomial sieve and equal sums of like polynomials. <i>International Mathematics Research Notices</i>. 2024;2024(13):10165-10168. doi:<a href=\"https://doi.org/10.1093/imrn/rnae066\">10.1093/imrn/rnae066</a>","apa":"Browning, T. D. (2024). The polynomial sieve and equal sums of like polynomials. <i>International Mathematics Research Notices</i>. Oxford University Press. <a href=\"https://doi.org/10.1093/imrn/rnae066\">https://doi.org/10.1093/imrn/rnae066</a>","chicago":"Browning, Timothy D. “The Polynomial Sieve and Equal Sums of like Polynomials.” <i>International Mathematics Research Notices</i>. Oxford University Press, 2024. <a href=\"https://doi.org/10.1093/imrn/rnae066\">https://doi.org/10.1093/imrn/rnae066</a>.","ista":"Browning TD. 2024. The polynomial sieve and equal sums of like polynomials. International Mathematics Research Notices. 2024(13), 10165–10168.","mla":"Browning, Timothy D. “The Polynomial Sieve and Equal Sums of like Polynomials.” <i>International Mathematics Research Notices</i>, vol. 2024, no. 13, Oxford University Press, 2024, pp. 10165–68, doi:<a href=\"https://doi.org/10.1093/imrn/rnae066\">10.1093/imrn/rnae066</a>.","short":"T.D. Browning, International Mathematics Research Notices 2024 (2024) 10165–10168.","ieee":"T. D. Browning, “The polynomial sieve and equal sums of like polynomials,” <i>International Mathematics Research Notices</i>, vol. 2024, no. 13. Oxford University Press, pp. 10165–10168, 2024."},"related_material":{"record":[{"status":"public","relation":"earlier_version","id":"254"}]},"ddc":["510"],"doi":"10.1093/imrn/rnae066","OA_place":"publisher","OA_type":"hybrid","article_type":"original"},{"abstract":[{"lang":"eng","text":"Instruction-tuned Large Language Models (LLMs) show impressive results in numerous practical applications, but they lack essential safety features that are common in other areas of computer science, particularly an explicit separation of instructions and data. This makes them vulnerable to manipulations such as indirect prompt injections and generally unsuitable for safety-critical tasks. Surprisingly, there is currently no established definition or benchmark to quantify this phenomenon. In this work, we close this gap by introducing a formal measure for instruction-data separation and an empirical variant that is calculable from a model's outputs. We also present a new dataset, SEP, that allows estimating the measure for real-world models. Our results on various LLMs show that the problem of instruction-data separation is real: all models fail to achieve high separation, and canonical mitigation techniques, such as prompt engineering and fine-tuning, either fail to substantially improve separation or reduce model utility. The source code and SEP dataset are openly accessible at https://github.com/egozverev/Shold-It-Be-Executed-Or-Processed.\r\n"}],"license":"https://creativecommons.org/licenses/by-sa/4.0/","article_number":"2403.06833","acknowledged_ssus":[{"_id":"ScienComp"}],"external_id":{"arxiv":["2403.06833"]},"department":[{"_id":"GradSch"},{"_id":"ChLa"}],"date_created":"2025-02-20T10:13:42Z","corr_author":"1","language":[{"iso":"eng"}],"publication_status":"published","main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2403.06833"}],"file":[{"file_name":"2403.06833v3.pdf","creator":"ezverev","file_id":"19064","date_created":"2025-02-20T10:11:45Z","file_size":530972,"content_type":"application/pdf","checksum":"35eb43968684b87be59144603ef10af0","date_updated":"2025-02-20T10:11:45Z","success":1,"access_level":"open_access","relation":"main_file"}],"acknowledgement":"The authors would like to sincerely thank Juan Rocamonde for valuable feedback to our manuscript. We acknowledge the support from the Scientific Service Units (SSU) of ISTA through resources provided by Scientific Computing (SciComp). We thank Dan Alistarh for providing us with computational resources. This work was partially funded by the German Federal Ministry of Education and Research (BMBF) under the grant AIgenCY (16KIS2012) and ELSA – European Lighthouse on Secure and Safe AI funded by the European Union under grant agreement No. 101070617. Views and opinions expressed are however those of the authors only and do not necessarily reflect those of the European Union or European Commission. Neither the European Union nor the European Commission can be held responsible for them.","oa":1,"publication":"arXiv","author":[{"first_name":"Egor","id":"05162b19-1340-11ed-8f02-fa94e0e8c3bc","full_name":"Zverev, Egor","last_name":"Zverev"},{"last_name":"Abdelnabi","full_name":"Abdelnabi, Sahar","first_name":"Sahar"},{"orcid":"0009-0003-4119-6281","first_name":"Soroush","id":"06000900-6068-11ef-8d61-c2472ef2e752","full_name":"Tabesh, Soroush","last_name":"Tabesh"},{"first_name":"Mario","last_name":"Fritz","full_name":"Fritz, Mario"},{"last_name":"Lampert","full_name":"Lampert, Christoph","first_name":"Christoph","id":"40C20FD2-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0001-8622-7887"}],"file_date_updated":"2025-02-20T10:11:45Z","article_processing_charge":"No","citation":{"ieee":"E. Zverev, S. Abdelnabi, S. Tabesh, M. Fritz, and C. Lampert, “Can LLMs separate instructions from data? And what do we even mean by that?,” <i>arXiv</i>. 2024.","short":"E. Zverev, S. Abdelnabi, S. Tabesh, M. Fritz, C. Lampert, ArXiv (2024).","mla":"Zverev, Egor, et al. “Can LLMs Separate Instructions from Data? And What Do We Even Mean by That?” <i>ArXiv</i>, 2403.06833, 2024, doi:<a href=\"https://doi.org/10.48550/arXiv.2403.06833\">10.48550/arXiv.2403.06833</a>.","ista":"Zverev E, Abdelnabi S, Tabesh S, Fritz M, Lampert C. 2024. Can LLMs separate instructions from data? And what do we even mean by that? arXiv, 2403.06833.","chicago":"Zverev, Egor, Sahar Abdelnabi, Soroush Tabesh, Mario Fritz, and Christoph Lampert. “Can LLMs Separate Instructions from Data? And What Do We Even Mean by That?” <i>ArXiv</i>, 2024. <a href=\"https://doi.org/10.48550/arXiv.2403.06833\">https://doi.org/10.48550/arXiv.2403.06833</a>.","apa":"Zverev, E., Abdelnabi, S., Tabesh, S., Fritz, M., &#38; Lampert, C. (2024). Can LLMs separate instructions from data? And what do we even mean by that? <i>arXiv</i>. <a href=\"https://doi.org/10.48550/arXiv.2403.06833\">https://doi.org/10.48550/arXiv.2403.06833</a>","ama":"Zverev E, Abdelnabi S, Tabesh S, Fritz M, Lampert C. Can LLMs separate instructions from data? And what do we even mean by that? <i>arXiv</i>. 2024. doi:<a href=\"https://doi.org/10.48550/arXiv.2403.06833\">10.48550/arXiv.2403.06833</a>"},"related_material":{"link":[{"url":" https://github.com/egozverev/Shold-It-Be-Executed-Or-Processed","relation":"software"}]},"_id":"19063","title":"Can LLMs separate instructions from data? And what do we even mean by that?","month":"03","OA_place":"repository","doi":"10.48550/arXiv.2403.06833","OA_type":"green","arxiv":1,"ddc":["000"],"type":"preprint","day":"01","status":"public","tmp":{"short":"CC BY-SA (4.0)","legal_code_url":"https://creativecommons.org/licenses/by-sa/4.0/legalcode","name":"Creative Commons Attribution-ShareAlike 4.0 International Public License (CC BY-SA 4.0)","image":"/images/cc_by_sa.png"},"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","date_published":"2024-03-01T00:00:00Z","has_accepted_license":"1","oa_version":"Preprint","year":"2024","date_updated":"2025-02-24T12:52:23Z"},{"citation":{"mla":"Hwong, Yi-Ling, and Caroline J. Muller. <i>Data - The Unreasonable Efficiency of Total Rain Evaporation Removal in Triggering Convective Self-Aggregation</i>. Zenodo, 2024, doi:<a href=\"https://doi.org/10.5281/ZENODO.10687169\">10.5281/ZENODO.10687169</a>.","short":"Y.-L. Hwong, C.J. Muller, (2024).","ieee":"Y.-L. Hwong and C. J. Muller, “Data - The unreasonable efficiency of total rain evaporation removal in triggering convective self-aggregation.” Zenodo, 2024.","apa":"Hwong, Y.-L., &#38; Muller, C. J. (2024). Data - The unreasonable efficiency of total rain evaporation removal in triggering convective self-aggregation. Zenodo. <a href=\"https://doi.org/10.5281/ZENODO.10687169\">https://doi.org/10.5281/ZENODO.10687169</a>","ama":"Hwong Y-L, Muller CJ. Data - The unreasonable efficiency of total rain evaporation removal in triggering convective self-aggregation. 2024. doi:<a href=\"https://doi.org/10.5281/ZENODO.10687169\">10.5281/ZENODO.10687169</a>","ista":"Hwong Y-L, Muller CJ. 2024. Data - The unreasonable efficiency of total rain evaporation removal in triggering convective self-aggregation, Zenodo, <a href=\"https://doi.org/10.5281/ZENODO.10687169\">10.5281/ZENODO.10687169</a>.","chicago":"Hwong, Yi-Ling, and Caroline J Muller. “Data - The Unreasonable Efficiency of Total Rain Evaporation Removal in Triggering Convective Self-Aggregation.” Zenodo, 2024. <a href=\"https://doi.org/10.5281/ZENODO.10687169\">https://doi.org/10.5281/ZENODO.10687169</a>."},"related_material":{"record":[{"id":"15186","relation":"used_in_publication","status":"public"}]},"abstract":[{"text":"This repository contains the data, scripts, SAM codes and files required to reproduce the results of the manuscript \"The Unreasonable Efficiency of Total Rain Evaporation Removal in Triggering Convective Self-Aggregation\" submitted to the Geophysical Research Letters (GRL).\r\n\r\nBrief description of project: This project aims to examine the impact of rain evaporation removal or reduction in the planetary boundary layer (PBL) on convective self aggregation (CSA). Non-rotating radiative-convective equilibrium (RCE) simulations were conducted with the System for Atmospheric Modeling (SAM) cloud resolving model. Rain evaporation in the lowest 1 km was progressively reduced and the effect on CSA was investigated. The physical processes underlying this type of aggregation (referred to in the manuscript as no-evaporation CSA, or NE-CSA) were analyzed and described. \r\nThe default SAM code base (version 6.10.8) can be downloaded from here: http://rossby.msrc.sunysb.edu/~marat/SAM.html","lang":"eng"}],"article_processing_charge":"No","title":"Data - The unreasonable efficiency of total rain evaporation removal in triggering convective self-aggregation","month":"02","_id":"19307","doi":"10.5281/ZENODO.10687169","OA_place":"repository","corr_author":"1","OA_type":"green","department":[{"_id":"CaMu"}],"date_created":"2025-03-07T08:39:40Z","ddc":["550"],"day":"21","type":"research_data_reference","publisher":"Zenodo","main_file_link":[{"open_access":"1","url":"https://doi.org/10.5281/zenodo.8369509"}],"oa":1,"tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"status":"public","author":[{"full_name":"Hwong, Yi-Ling","last_name":"Hwong","id":"1217aa61-4dd1-11ec-9ac3-f2ba3f17ee22","first_name":"Yi-Ling","orcid":"0000-0001-9281-3479"},{"first_name":"Caroline J","id":"f978ccb0-3f7f-11eb-b193-b0e2bd13182b","last_name":"Muller","full_name":"Muller, Caroline J","orcid":"0000-0001-5836-5350"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","year":"2024","oa_version":"Published Version","date_updated":"2025-09-04T13:16:39Z","has_accepted_license":"1","date_published":"2024-02-21T00:00:00Z"},{"status":"public","tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"type":"journal_article","publisher":"Transactions on Machine Learning Research","day":"12","has_accepted_license":"1","date_published":"2024-04-12T00:00:00Z","date_updated":"2025-03-20T09:21:02Z","oa_version":"Published Version","year":"2024","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","_id":"19408","month":"04","title":"Continual learning: Applications and the road forward","article_processing_charge":"No","citation":{"ista":"Verwimp E, Aljundi R, Ben-David S, Bethge M, Cossu A, Gepperth A, Hayes TL, Hüllermeier E, Kanan C, Kudithipudi D, Lampert C, Mundt M, Pascanu R, Popescu A, Tolias AS, Van De Weijer J, Liu B, Lomonaco V, Tuytelaars T, Van De Ven GM. 2024. Continual learning: Applications and the road forward. Transactions on Machine Learning Research. 2024.","chicago":"Verwimp, Eli, Rahaf Aljundi, Shai Ben-David, Matthias Bethge, Andrea Cossu, Alexander Gepperth, Tyler L. Hayes, et al. “Continual Learning: Applications and the Road Forward.” <i>Transactions on Machine Learning Research</i>. Transactions on Machine Learning Research, 2024.","apa":"Verwimp, E., Aljundi, R., Ben-David, S., Bethge, M., Cossu, A., Gepperth, A., … Van De Ven, G. M. (2024). Continual learning: Applications and the road forward. <i>Transactions on Machine Learning Research</i>. Transactions on Machine Learning Research.","ama":"Verwimp E, Aljundi R, Ben-David S, et al. Continual learning: Applications and the road forward. <i>Transactions on Machine Learning Research</i>. 2024;2024.","ieee":"E. Verwimp <i>et al.</i>, “Continual learning: Applications and the road forward,” <i>Transactions on Machine Learning Research</i>, vol. 2024. Transactions on Machine Learning Research, 2024.","short":"E. Verwimp, R. Aljundi, S. Ben-David, M. Bethge, A. Cossu, A. Gepperth, T.L. Hayes, E. Hüllermeier, C. Kanan, D. Kudithipudi, C. Lampert, M. Mundt, R. Pascanu, A. Popescu, A.S. Tolias, J. Van De Weijer, B. Liu, V. Lomonaco, T. Tuytelaars, G.M. Van De Ven, Transactions on Machine Learning Research 2024 (2024).","mla":"Verwimp, Eli, et al. “Continual Learning: Applications and the Road Forward.” <i>Transactions on Machine Learning Research</i>, vol. 2024, Transactions on Machine Learning Research, 2024."},"ddc":["000"],"arxiv":1,"alternative_title":["TMLR"],"OA_type":"diamond","article_type":"original","OA_place":"publisher","file":[{"access_level":"open_access","relation":"main_file","success":1,"date_updated":"2025-03-20T09:02:18Z","content_type":"application/pdf","checksum":"0714e12f7423cd098976ed9974561155","file_size":1367966,"file_name":"2024_TMLR_Verwimp.pdf","creator":"dernst","date_created":"2025-03-20T09:02:18Z","file_id":"19426"}],"publication_identifier":{"eissn":["2835-8856"]},"oa":1,"scopus_import":"1","author":[{"full_name":"Verwimp, Eli","last_name":"Verwimp","first_name":"Eli"},{"first_name":"Rahaf","full_name":"Aljundi, Rahaf","last_name":"Aljundi"},{"first_name":"Shai","full_name":"Ben-David, Shai","last_name":"Ben-David"},{"full_name":"Bethge, Matthias","last_name":"Bethge","first_name":"Matthias"},{"last_name":"Cossu","full_name":"Cossu, Andrea","first_name":"Andrea"},{"last_name":"Gepperth","full_name":"Gepperth, Alexander","first_name":"Alexander"},{"first_name":"Tyler L.","full_name":"Hayes, Tyler L.","last_name":"Hayes"},{"full_name":"Hüllermeier, Eyke","last_name":"Hüllermeier","first_name":"Eyke"},{"last_name":"Kanan","full_name":"Kanan, Christopher","first_name":"Christopher"},{"first_name":"Dhireesha","last_name":"Kudithipudi","full_name":"Kudithipudi, Dhireesha"},{"orcid":"0000-0001-8622-7887","last_name":"Lampert","full_name":"Lampert, Christoph","first_name":"Christoph","id":"40C20FD2-F248-11E8-B48F-1D18A9856A87"},{"full_name":"Mundt, Martin","last_name":"Mundt","first_name":"Martin"},{"last_name":"Pascanu","full_name":"Pascanu, Razvan","first_name":"Razvan"},{"first_name":"Adrian","last_name":"Popescu","full_name":"Popescu, Adrian"},{"last_name":"Tolias","full_name":"Tolias, Andreas S.","first_name":"Andreas S."},{"first_name":"Joost","last_name":"Van De Weijer","full_name":"Van De Weijer, Joost"},{"first_name":"Bing","last_name":"Liu","full_name":"Liu, Bing"},{"full_name":"Lomonaco, Vincenzo","last_name":"Lomonaco","first_name":"Vincenzo"},{"first_name":"Tinne","full_name":"Tuytelaars, Tinne","last_name":"Tuytelaars"},{"full_name":"Van De Ven, Gido M.","last_name":"Van De Ven","first_name":"Gido M."}],"publication":"Transactions on Machine Learning Research","file_date_updated":"2025-03-20T09:02:18Z","intvolume":"      2024","volume":2024,"quality_controlled":"1","abstract":[{"text":"Continual learning is a subfield of machine learning, which aims to allow machine learning models to continuously learn on new data, by accumulating knowledge without forgetting what was learned in the past. In this work, we take a step back, and ask: \"Why should one care about continual learning in the first place?\". We set the stage by examining recent continual learning papers published at four major machine learning conferences, and show that memory-constrained settings dominate the field. Then, we discuss five open problems in machine learning, and even though they might seem unrelated to continual learning at first sight, we show that continual learning will inevitably be part of their solution. These problems are model editing, personalization and specialization, on-device learning, faster (re-)training and reinforcement learning. Finally, by comparing the desiderata from these unsolved problems and the current assumptions in continual learning, we highlight and discuss four future directions for continual learning research. We hope that this work offers an interesting perspective on the future of continual learning, while displaying its potential value and the paths we have to pursue in order to make it successful. This work is the result of the many discussions the authors had at the Dagstuhl seminar on Deep Continual Learning, in March 2023.","lang":"eng"}],"publication_status":"published","date_created":"2025-03-16T23:01:25Z","external_id":{"arxiv":["2311.11908"]},"department":[{"_id":"ChLa"}],"language":[{"iso":"eng"}]},{"issue":"10","type":"journal_article","day":"01","publisher":"Springer Nature","status":"public","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","page":"1124-1127","date_published":"2024-10-01T00:00:00Z","date_updated":"2025-03-25T08:28:39Z","year":"2024","oa_version":"None","article_processing_charge":"No","citation":{"apa":"Nees, F., Renner, P., Holz, N. E., Polemiti, E., Siehl, S., Hese, S., … Ogoh, G. (2024). Large-scale population data enrichment in mental health research. <i>Nature Mental Health</i>. Springer Nature. <a href=\"https://doi.org/10.1038/s44220-024-00316-z\">https://doi.org/10.1038/s44220-024-00316-z</a>","ama":"Nees F, Renner P, Holz NE, et al. Large-scale population data enrichment in mental health research. <i>Nature Mental Health</i>. 2024;2(10):1124-1127. doi:<a href=\"https://doi.org/10.1038/s44220-024-00316-z\">10.1038/s44220-024-00316-z</a>","ista":"Nees F, Renner P, Holz NE, Polemiti E, Siehl S, Hese S, Schepanski K, Schumann G, Walter H, Heinz A, Ralser M, Twardziok S, Vaidya N, Bernas A, Serin E, Jentsch M, Hitchen E, Kebir H, Lett TA, Roy JC, Eils R, Taron UH, Schütz T, Banks J, Banaschewski T, Jansone K, Christmann N, Meyer-Lindenberg A, Tost H, Holz N, Schwarz E, Stringaris A, Neidhart M, Seefried B, Aden R, Andreassen OA, Westlye LT, Van Der Meer D, Fernandez S, Kjelkenes R, Ask H, Rapp M, Tschorn M, Böttger SJ, Marquand A, Novarino G, Marr L, Slater M, Viapiana GF, Orosa FE, Gallego J, Pastor A, Forstner AJ, Hoffmann P, Nöthen MM, Claus I, Miller A, Mathey CM, Heilmann-Heimbach S, Sommer P, Patraskaki M, Wilbertz J, Schmitt K, Jirsa V, Petkoski S, Pitel S, Otten L, Athanasiadis AP, Pearmund C, Spanlang B, Alvarez E, Sanchez M, Giner A, Jia T, Gong Y, Xia Y, Chang X, Calhoun V, Liu J, Schwalber A, Thompson P, Clinton N, Desrivières S, Young AH, Stahl B, Ogoh G. 2024. Large-scale population data enrichment in mental health research. Nature Mental Health. 2(10), 1124–1127.","chicago":"Nees, Frauke, Paul Renner, Nathalie E. Holz, Elli Polemiti, Sebastian Siehl, Sören Hese, Kerstin Schepanski, et al. “Large-Scale Population Data Enrichment in Mental Health Research.” <i>Nature Mental Health</i>. Springer Nature, 2024. <a href=\"https://doi.org/10.1038/s44220-024-00316-z\">https://doi.org/10.1038/s44220-024-00316-z</a>.","mla":"Nees, Frauke, et al. “Large-Scale Population Data Enrichment in Mental Health Research.” <i>Nature Mental Health</i>, vol. 2, no. 10, Springer Nature, 2024, pp. 1124–27, doi:<a href=\"https://doi.org/10.1038/s44220-024-00316-z\">10.1038/s44220-024-00316-z</a>.","short":"F. Nees, P. Renner, N.E. Holz, E. Polemiti, S. Siehl, S. Hese, K. Schepanski, G. Schumann, H. Walter, A. Heinz, M. Ralser, S. Twardziok, N. Vaidya, A. Bernas, E. Serin, M. Jentsch, E. Hitchen, H. Kebir, T.A. Lett, J.C. Roy, R. Eils, U.H. Taron, T. Schütz, J. Banks, T. Banaschewski, K. Jansone, N. Christmann, A. Meyer-Lindenberg, H. Tost, N. Holz, E. Schwarz, A. Stringaris, M. Neidhart, B. Seefried, R. Aden, O.A. Andreassen, L.T. Westlye, D. Van Der Meer, S. Fernandez, R. Kjelkenes, H. Ask, M. Rapp, M. Tschorn, S.J. Böttger, A. Marquand, G. Novarino, L. Marr, M. Slater, G.F. Viapiana, F.E. Orosa, J. Gallego, A. Pastor, A.J. Forstner, P. Hoffmann, M.M. Nöthen, I. Claus, A. Miller, C.M. Mathey, S. Heilmann-Heimbach, P. Sommer, M. Patraskaki, J. Wilbertz, K. Schmitt, V. Jirsa, S. Petkoski, S. Pitel, L. Otten, A.P. Athanasiadis, C. Pearmund, B. Spanlang, E. Alvarez, M. Sanchez, A. Giner, T. Jia, Y. Gong, Y. Xia, X. Chang, V. Calhoun, J. Liu, A. Schwalber, P. Thompson, N. Clinton, S. Desrivières, A.H. Young, B. Stahl, G. Ogoh, Nature Mental Health 2 (2024) 1124–1127.","ieee":"F. Nees <i>et al.</i>, “Large-scale population data enrichment in mental health research,” <i>Nature Mental Health</i>, vol. 2, no. 10. Springer Nature, pp. 1124–1127, 2024."},"_id":"19446","month":"10","title":"Large-scale population data enrichment in mental health research","article_type":"letter_note","OA_type":"closed access","doi":"10.1038/s44220-024-00316-z","acknowledgement":"Funded by the European Union. Complementary funding was received by the UK Research and Innovation (UKRI) under the UK government’s Horizon Europe funding guarantee (10041392 and 10038599). Views and opinions expressed are however those of the author(s) only and do not necessarily reflect those of the European Union, the European Health and Digital Executive Agency (HADEA) or UKRI. The European Union, HADEA and UKRI cannot be held responsible for them. This work received also support from Chinese Ministry for Science and Technology (MOST), the Horizon 2020-funded European Research Council Advanced Grant ‘STRATIFY’ (695313), the German Research Foundation (COPE; 675346; NE 1383/15-1 (CoviDrug)) and the National Natural Science Foundation of China grant 82150710554.","publication_identifier":{"eissn":["2731-6076"]},"publication":"Nature Mental Health","author":[{"first_name":"Frauke","full_name":"Nees, Frauke","last_name":"Nees"},{"full_name":"Renner, Paul","last_name":"Renner","first_name":"Paul"},{"full_name":"Holz, Nathalie E.","last_name":"Holz","first_name":"Nathalie E."},{"first_name":"Elli","full_name":"Polemiti, Elli","last_name":"Polemiti"},{"first_name":"Sebastian","last_name":"Siehl","full_name":"Siehl, Sebastian"},{"first_name":"Sören","last_name":"Hese","full_name":"Hese, Sören"},{"last_name":"Schepanski","full_name":"Schepanski, Kerstin","first_name":"Kerstin"},{"first_name":"Gunter","full_name":"Schumann, Gunter","last_name":"Schumann"},{"last_name":"Walter","full_name":"Walter, Henrik","first_name":"Henrik"},{"full_name":"Heinz, Andreas","last_name":"Heinz","first_name":"Andreas"},{"last_name":"Ralser","full_name":"Ralser, Markus","first_name":"Markus"},{"full_name":"Twardziok, Sven","last_name":"Twardziok","first_name":"Sven"},{"last_name":"Vaidya","full_name":"Vaidya, Nilakshi","first_name":"Nilakshi"},{"last_name":"Bernas","full_name":"Bernas, Antoine","first_name":"Antoine"},{"last_name":"Serin","full_name":"Serin, Emin","first_name":"Emin"},{"last_name":"Jentsch","full_name":"Jentsch, Marcel","first_name":"Marcel"},{"full_name":"Hitchen, Esther","last_name":"Hitchen","first_name":"Esther"},{"first_name":"Hedi","full_name":"Kebir, Hedi","last_name":"Kebir"},{"first_name":"Tristram A.","full_name":"Lett, Tristram A.","last_name":"Lett"},{"first_name":"Jean Charles","last_name":"Roy","full_name":"Roy, Jean Charles"},{"last_name":"Eils","full_name":"Eils, Roland","first_name":"Roland"},{"full_name":"Taron, Ulrike Helene","last_name":"Taron","first_name":"Ulrike Helene"},{"full_name":"Schütz, Tatjana","last_name":"Schütz","first_name":"Tatjana"},{"first_name":"Jamie","full_name":"Banks, Jamie","last_name":"Banks"},{"first_name":"Tobias","last_name":"Banaschewski","full_name":"Banaschewski, Tobias"},{"last_name":"Jansone","full_name":"Jansone, Karina","first_name":"Karina"},{"full_name":"Christmann, Nina","last_name":"Christmann","first_name":"Nina"},{"first_name":"Andreas","last_name":"Meyer-Lindenberg","full_name":"Meyer-Lindenberg, Andreas"},{"first_name":"Heike","last_name":"Tost","full_name":"Tost, Heike"},{"last_name":"Holz","full_name":"Holz, Nathalie","first_name":"Nathalie"},{"full_name":"Schwarz, Emanuel","last_name":"Schwarz","first_name":"Emanuel"},{"first_name":"Argyris","full_name":"Stringaris, Argyris","last_name":"Stringaris"},{"first_name":"Maja","full_name":"Neidhart, Maja","last_name":"Neidhart"},{"first_name":"Beke","full_name":"Seefried, Beke","last_name":"Seefried"},{"first_name":"Rieke","last_name":"Aden","full_name":"Aden, Rieke"},{"first_name":"Ole A.","full_name":"Andreassen, Ole A.","last_name":"Andreassen"},{"last_name":"Westlye","full_name":"Westlye, Lars T.","first_name":"Lars T."},{"first_name":"Dennis","last_name":"Van Der Meer","full_name":"Van Der Meer, Dennis"},{"first_name":"Sara","full_name":"Fernandez, Sara","last_name":"Fernandez"},{"first_name":"Rikka","last_name":"Kjelkenes","full_name":"Kjelkenes, Rikka"},{"first_name":"Helga","last_name":"Ask","full_name":"Ask, Helga"},{"first_name":"Michael","full_name":"Rapp, Michael","last_name":"Rapp"},{"last_name":"Tschorn","full_name":"Tschorn, Mira","first_name":"Mira"},{"last_name":"Böttger","full_name":"Böttger, Sarah Jane","first_name":"Sarah Jane"},{"full_name":"Marquand, Andre","last_name":"Marquand","first_name":"Andre"},{"orcid":"0000-0002-7673-7178","last_name":"Novarino","full_name":"Novarino, Gaia","first_name":"Gaia","id":"3E57A680-F248-11E8-B48F-1D18A9856A87"},{"full_name":"Marr, Lena","last_name":"Marr","first_name":"Lena","id":"4406F586-F248-11E8-B48F-1D18A9856A87"},{"first_name":"Mel","last_name":"Slater","full_name":"Slater, Mel"},{"last_name":"Viapiana","full_name":"Viapiana, Guillem Feixas","first_name":"Guillem Feixas"},{"last_name":"Orosa","full_name":"Orosa, Francisco Eiroa","first_name":"Francisco Eiroa"},{"full_name":"Gallego, Jaime","last_name":"Gallego","first_name":"Jaime"},{"first_name":"Alvaro","full_name":"Pastor, Alvaro","last_name":"Pastor"},{"full_name":"Forstner, Andreas J.","last_name":"Forstner","first_name":"Andreas J."},{"first_name":"Per","full_name":"Hoffmann, Per","last_name":"Hoffmann"},{"first_name":"Markus M.","full_name":"Nöthen, Markus M.","last_name":"Nöthen"},{"full_name":"Claus, Isabelle","last_name":"Claus","first_name":"Isabelle"},{"last_name":"Miller","full_name":"Miller, Abigail","first_name":"Abigail"},{"first_name":"Carina M.","full_name":"Mathey, Carina M.","last_name":"Mathey"},{"full_name":"Heilmann-Heimbach, Stefanie","last_name":"Heilmann-Heimbach","first_name":"Stefanie"},{"first_name":"Peter","full_name":"Sommer, Peter","last_name":"Sommer"},{"last_name":"Patraskaki","full_name":"Patraskaki, Myrto","first_name":"Myrto"},{"last_name":"Wilbertz","full_name":"Wilbertz, Johannes","first_name":"Johannes"},{"last_name":"Schmitt","full_name":"Schmitt, Karen","first_name":"Karen"},{"last_name":"Jirsa","full_name":"Jirsa, Viktor","first_name":"Viktor"},{"full_name":"Petkoski, Spase","last_name":"Petkoski","first_name":"Spase"},{"last_name":"Pitel","full_name":"Pitel, Séverine","first_name":"Séverine"},{"full_name":"Otten, Lisa","last_name":"Otten","first_name":"Lisa"},{"first_name":"Anastasios Polykarpos","full_name":"Athanasiadis, Anastasios Polykarpos","last_name":"Athanasiadis"},{"last_name":"Pearmund","full_name":"Pearmund, Charlie","first_name":"Charlie"},{"first_name":"Bernhard","last_name":"Spanlang","full_name":"Spanlang, Bernhard"},{"full_name":"Alvarez, Elena","last_name":"Alvarez","first_name":"Elena"},{"first_name":"Mavi","full_name":"Sanchez, Mavi","last_name":"Sanchez"},{"first_name":"Arantxa","last_name":"Giner","full_name":"Giner, Arantxa"},{"first_name":"Tianye","last_name":"Jia","full_name":"Jia, Tianye"},{"last_name":"Gong","full_name":"Gong, Yanting","first_name":"Yanting"},{"first_name":"Yunman","full_name":"Xia, Yunman","last_name":"Xia"},{"last_name":"Chang","full_name":"Chang, Xiao","first_name":"Xiao"},{"full_name":"Calhoun, Vince","last_name":"Calhoun","first_name":"Vince"},{"first_name":"Jingyu","last_name":"Liu","full_name":"Liu, Jingyu"},{"full_name":"Schwalber, Ameli","last_name":"Schwalber","first_name":"Ameli"},{"full_name":"Thompson, Paul","last_name":"Thompson","first_name":"Paul"},{"last_name":"Clinton","full_name":"Clinton, Nicholas","first_name":"Nicholas"},{"first_name":"Sylvane","full_name":"Desrivières, Sylvane","last_name":"Desrivières"},{"full_name":"Young, Allan H.","last_name":"Young","first_name":"Allan H."},{"first_name":"Bernd","full_name":"Stahl, Bernd","last_name":"Stahl"},{"last_name":"Ogoh","full_name":"Ogoh, George","first_name":"George"}],"scopus_import":"1","intvolume":"         2","abstract":[{"text":"This Comment explores new approaches to enrich large-scale population data, including incorporating macro-environmental and digital health measures.","lang":"eng"}],"volume":2,"quality_controlled":"1","date_created":"2025-03-23T23:01:28Z","department":[{"_id":"GaNo"}],"language":[{"iso":"eng"}],"publication_status":"published"},{"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","oa_version":"Preprint","year":"2024","date_updated":"2025-05-14T11:32:52Z","project":[{"_id":"fc2ed2f7-9c52-11eb-aca3-c01059dda49c","grant_number":"101034413","call_identifier":"H2020","name":"IST-BRIDGE: International postdoctoral program"}],"date_published":"2024-12-20T00:00:00Z","day":"20","publisher":"Neural Information Processing Systems Foundation","type":"conference","status":"public","OA_place":"repository","OA_type":"green","alternative_title":["Advances in Neural Information Processing Systems"],"arxiv":1,"citation":{"ista":"Modoranu I-V, Safaryan M, Malinovsky G, Kurtic E, Robert T, Richtárik P, Alistarh D-A. 2024. MICROADAM: Accurate adaptive optimization with low space overhead and provable convergence. 38th Conference on Neural Information Processing Systems. , Advances in Neural Information Processing Systems, vol. 37.","chicago":"Modoranu, Ionut-Vlad, Mher Safaryan, Grigory Malinovsky, Eldar Kurtic, Thomas Robert, Peter Richtárik, and Dan-Adrian Alistarh. “MICROADAM: Accurate Adaptive Optimization with Low Space Overhead and Provable Convergence.” In <i>38th Conference on Neural Information Processing Systems</i>, Vol. 37. Neural Information Processing Systems Foundation, 2024.","apa":"Modoranu, I.-V., Safaryan, M., Malinovsky, G., Kurtic, E., Robert, T., Richtárik, P., &#38; Alistarh, D.-A. (2024). MICROADAM: Accurate adaptive optimization with low space overhead and provable convergence. In <i>38th Conference on Neural Information Processing Systems</i> (Vol. 37). Neural Information Processing Systems Foundation.","ama":"Modoranu I-V, Safaryan M, Malinovsky G, et al. MICROADAM: Accurate adaptive optimization with low space overhead and provable convergence. In: <i>38th Conference on Neural Information Processing Systems</i>. Vol 37. Neural Information Processing Systems Foundation; 2024.","ieee":"I.-V. Modoranu <i>et al.</i>, “MICROADAM: Accurate adaptive optimization with low space overhead and provable convergence,” in <i>38th Conference on Neural Information Processing Systems</i>, 2024, vol. 37.","mla":"Modoranu, Ionut-Vlad, et al. “MICROADAM: Accurate Adaptive Optimization with Low Space Overhead and Provable Convergence.” <i>38th Conference on Neural Information Processing Systems</i>, vol. 37, Neural Information Processing Systems Foundation, 2024.","short":"I.-V. Modoranu, M. Safaryan, G. Malinovsky, E. Kurtic, T. Robert, P. Richtárik, D.-A. Alistarh, in:, 38th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2024."},"related_material":{"link":[{"url":"https://github.com/IST-DASLab/MicroAdam","relation":"software"}]},"article_processing_charge":"No","title":"MICROADAM: Accurate adaptive optimization with low space overhead and provable convergence","month":"12","_id":"19510","ec_funded":1,"intvolume":"        37","publication":"38th Conference on Neural Information Processing Systems","scopus_import":"1","author":[{"id":"449f7a18-f128-11eb-9611-9b430c0c6333","first_name":"Ionut-Vlad","full_name":"Modoranu, Ionut-Vlad","last_name":"Modoranu"},{"id":"dd546b39-0804-11ed-9c55-ef075c39778d","first_name":"Mher","last_name":"Safaryan","full_name":"Safaryan, Mher"},{"last_name":"Malinovsky","full_name":"Malinovsky, Grigory","first_name":"Grigory"},{"id":"47beb3a5-07b5-11eb-9b87-b108ec578218","first_name":"Eldar","full_name":"Kurtic, Eldar","last_name":"Kurtic"},{"full_name":"Robert, Thomas","last_name":"Robert","first_name":"Thomas","id":"de632733-1457-11f0-ae22-b5914b8c1c41"},{"last_name":"Richtárik","full_name":"Richtárik, Peter","first_name":"Peter"},{"first_name":"Dan-Adrian","id":"4A899BFC-F248-11E8-B48F-1D18A9856A87","full_name":"Alistarh, Dan-Adrian","last_name":"Alistarh","orcid":"0000-0003-3650-940X"}],"main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2405.15593"}],"publication_identifier":{"issn":["1049-5258"]},"oa":1,"acknowledgement":"The authors thank Razvan Pascanu, Mahdi Nikdan and Soroush Tabesh for their valuable feedback, the IT department from Institute of Science and Technology Austria for the hardware support and Weights and Biases for the infrastructure to track all our experiments. Mher Safaryan has received funding from the European Union’s Horizon 2020 research and innovation program under the Marie Sklodowska-Curie grant agreement No 101034413.","language":[{"iso":"eng"}],"corr_author":"1","department":[{"_id":"DaAl"}],"external_id":{"arxiv":["2405.15593"]},"date_created":"2025-04-06T22:01:32Z","publication_status":"published","acknowledged_ssus":[{"_id":"CampIT"}],"abstract":[{"lang":"eng","text":"We propose a new variant of the Adam optimizer [Kingma and Ba, 2014] called\r\nMICROADAM that specifically minimizes memory overheads, while maintaining\r\ntheoretical convergence guarantees. We achieve this by compressing the gradient\r\ninformation before it is fed into the optimizer state, thereby reducing its memory\r\nfootprint significantly. We control the resulting compression error via a novel\r\ninstance of the classical error feedback mechanism from distributed optimization [Seide et al., 2014, Alistarh et al., 2018, Karimireddy et al., 2019] in which\r\nthe error correction information is itself compressed to allow for practical memory\r\ngains. We prove that the resulting approach maintains theoretical convergence\r\nguarantees competitive to those of AMSGrad, while providing good practical performance. Specifically, we show that MICROADAM can be implemented efficiently\r\non GPUs: on both million-scale (BERT) and billion-scale (LLaMA) models, MICROADAM provides practical convergence competitive to that of the uncompressed\r\nAdam baseline, with lower memory usage and similar running time. Our code is\r\navailable at https://github.com/IST-DASLab/MicroAdam."}],"quality_controlled":"1","volume":37},{"arxiv":1,"alternative_title":["Advances in Neural Information Processing Systems"],"OA_type":"green","OA_place":"repository","month":"12","title":"QuaRot: Outlier-free 4-bit inference in rotated LLMs","_id":"19511","related_material":{"link":[{"url":"https://github.com/spcl/QuaRot","relation":"software"}]},"citation":{"ieee":"S. Ashkboos <i>et al.</i>, “QuaRot: Outlier-free 4-bit inference in rotated LLMs,” in <i>38th Conference on Neural Information Processing Systems</i>, Vancouver, Canada, 2024, vol. 37.","mla":"Ashkboos, Saleh, et al. “QuaRot: Outlier-Free 4-Bit Inference in Rotated LLMs.” <i>38th Conference on Neural Information Processing Systems</i>, vol. 37, Neural Information Processing Systems Foundation, 2024.","short":"S. Ashkboos, A. Mohtashami, M.L. Croci, B. Li, P. Cameron, M. Jaggi, D.-A. Alistarh, T. Hoefler, J. Hensman, in:, 38th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2024.","chicago":"Ashkboos, Saleh, Amirkeivan Mohtashami, Maximilian L. Croci, Bo Li, Pashmina Cameron, Martin Jaggi, Dan-Adrian Alistarh, Torsten Hoefler, and James Hensman. “QuaRot: Outlier-Free 4-Bit Inference in Rotated LLMs.” In <i>38th Conference on Neural Information Processing Systems</i>, Vol. 37. Neural Information Processing Systems Foundation, 2024.","ista":"Ashkboos S, Mohtashami A, Croci ML, Li B, Cameron P, Jaggi M, Alistarh D-A, Hoefler T, Hensman J. 2024. QuaRot: Outlier-free 4-bit inference in rotated LLMs. 38th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems, Advances in Neural Information Processing Systems, vol. 37.","ama":"Ashkboos S, Mohtashami A, Croci ML, et al. QuaRot: Outlier-free 4-bit inference in rotated LLMs. In: <i>38th Conference on Neural Information Processing Systems</i>. Vol 37. Neural Information Processing Systems Foundation; 2024.","apa":"Ashkboos, S., Mohtashami, A., Croci, M. L., Li, B., Cameron, P., Jaggi, M., … Hensman, J. (2024). QuaRot: Outlier-free 4-bit inference in rotated LLMs. In <i>38th Conference on Neural Information Processing Systems</i> (Vol. 37). Vancouver, Canada: Neural Information Processing Systems Foundation."},"conference":{"name":"NeurIPS: Neural Information Processing Systems","start_date":"2024-12-09","location":"Vancouver, Canada","end_date":"2024-12-15"},"article_processing_charge":"No","date_updated":"2025-05-14T11:33:12Z","oa_version":"Preprint","year":"2024","date_published":"2024-12-20T00:00:00Z","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","status":"public","day":"20","type":"conference","publisher":"Neural Information Processing Systems Foundation","publication_status":"published","language":[{"iso":"eng"}],"date_created":"2025-04-06T22:01:32Z","external_id":{"arxiv":["2404.00456"]},"department":[{"_id":"DaAl"}],"quality_controlled":"1","volume":37,"abstract":[{"text":"We introduce QuaRot, a new Quantization scheme based on Rotations, which is able to quantize LLMs end-to-end, including all weights, activations, and KV cache in 4 bits. QuaRot rotates LLMs in a way that removes outliers from the hidden state without changing the output, making quantization easier. This computational invariance is applied to the hidden state (residual) of the LLM, as well as to the activations of the feed-forward components, aspects of the attention mechanism, and to the KV cache. The result is a quantized model where all matrix multiplications are performed in 4 bits, without any channels identified for retention in higher precision. Our 4-bit quantized LLAMA2-70B model has losses of at most 0.47 WikiText-2 perplexity and retains 99% of the zero-shot performance. We also show that QuaRot can provide lossless 6 and 8 bit LLAMA-2 models without any calibration data using round-to-nearest quantization. Code is available at github.com/spcl/QuaRot.","lang":"eng"}],"intvolume":"        37","publication":"38th Conference on Neural Information Processing Systems","scopus_import":"1","author":[{"last_name":"Ashkboos","full_name":"Ashkboos, Saleh","first_name":"Saleh"},{"first_name":"Amirkeivan","last_name":"Mohtashami","full_name":"Mohtashami, Amirkeivan"},{"first_name":"Maximilian L.","full_name":"Croci, Maximilian L.","last_name":"Croci"},{"first_name":"Bo","full_name":"Li, Bo","last_name":"Li"},{"first_name":"Pashmina","last_name":"Cameron","full_name":"Cameron, Pashmina"},{"full_name":"Jaggi, Martin","last_name":"Jaggi","first_name":"Martin"},{"id":"4A899BFC-F248-11E8-B48F-1D18A9856A87","first_name":"Dan-Adrian","full_name":"Alistarh, Dan-Adrian","last_name":"Alistarh","orcid":"0000-0003-3650-940X"},{"first_name":"Torsten","full_name":"Hoefler, Torsten","last_name":"Hoefler"},{"first_name":"James","full_name":"Hensman, James","last_name":"Hensman"}],"publication_identifier":{"issn":["1049-5258"]},"oa":1,"main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2404.00456"}]},{"abstract":[{"text":"Differential privacy with gradual expiration models the setting where data items\r\narrive in a stream and at a given time t the privacy loss guaranteed for a data item\r\nseen at time (t − d) is εg(d), where g is a monotonically non-decreasing function.\r\nWe study the fundamental continual (binary) counting problem where each data\r\nitem consists of a bit, and the algorithm needs to output at each time step the sum of\r\nall the bits streamed so far. For a stream of length T and privacy without expiration\r\ncontinual counting is possible with maximum (over all time steps) additive error\r\nO(log2\r\n(T)/ε) and the best known lower bound is Ω(log(T)/ε); closing this gap\r\nis a challenging open problem.\r\nWe show that the situation is very different for privacy with gradual expiration by\r\ngiving upper and lower bounds for a large set of expiration functions g. Specifically,\r\nour algorithm achieves an additive error of O(log(T)/ε) for a large set of privacy\r\nexpiration functions. We also give a lower bound that shows that if C is the additive\r\nerror of any ε-DP algorithm for this problem, then the product of C and the privacy\r\nexpiration function after 2C steps must be Ω(log(T)/ε). Our algorithm matches\r\nthis lower bound as its additive error is O(log(T)/ε), even when g(2C) = O(1).\r\nOur empirical evaluation shows that we achieve a slowly growing privacy loss\r\nwith significantly smaller empirical privacy loss for large values of d than a natural\r\nbaseline algorithm.","lang":"eng"}],"quality_controlled":"1","volume":37,"corr_author":"1","language":[{"iso":"eng"}],"date_created":"2025-04-06T22:01:32Z","external_id":{"arxiv":["2406.03802"]},"department":[{"_id":"MoHe"}],"publication_status":"published","main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2406.03802"}],"oa":1,"publication_identifier":{"issn":["1049-5258"]},"acknowledgement":"Monika Henzinger: This project has received funding from the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (Grant agreement No. 101019564) and the Austrian Science Fund (FWF) grant DOI 10.55776/Z422, grant DOI 10.55776/I5982, and grant DOI 10.55776/P33775 with additional funding from the netidee SCIENCE Stiftung, 2020–2024. Joel Daniel Andersson and Rasmus Pagh are affiliated with Basic Algorithms Research Copenhagen (BARC), supported by the VILLUM Foundation grant 16582, and are also supported by Providentia, a Data Science Distinguished Investigator grant from Novo Nordisk Fonden. Teresa Anna Steiner is supported by a research grant (VIL51463) from VILLUM FONDEN. This work was done while Teresa Anna Steiner was a Postdoc at the Technical University of Denmark. Jalaj Upadhyay’s research was funded by the Rutgers Decanal Grant no. 302918 and an unrestricted gift from Google.","intvolume":"        37","ec_funded":1,"author":[{"full_name":"Andersson, Joel Daniel","last_name":"Andersson","first_name":"Joel Daniel"},{"orcid":"0000-0002-5008-6530","full_name":"Henzinger, Monika H","last_name":"Henzinger","id":"540c9bbd-f2de-11ec-812d-d04a5be85630","first_name":"Monika H"},{"full_name":"Pagh, Rasmus","last_name":"Pagh","first_name":"Rasmus"},{"full_name":"Steiner, Teresa Anna","last_name":"Steiner","first_name":"Teresa Anna"},{"full_name":"Upadhyay, Jalaj","last_name":"Upadhyay","first_name":"Jalaj"}],"scopus_import":"1","publication":"38th Conference on Neural Information Processing Systems","citation":{"ieee":"J. D. Andersson, M. Henzinger, R. Pagh, T. A. Steiner, and J. Upadhyay, “Continual counting with gradual privacy expiration,” in <i>38th Conference on Neural Information Processing Systems</i>, Vancouver, Canada, 2024, vol. 37.","mla":"Andersson, Joel Daniel, et al. “Continual Counting with Gradual Privacy Expiration.” <i>38th Conference on Neural Information Processing Systems</i>, vol. 37, Neural Information Processing Systems Foundation, 2024.","short":"J.D. Andersson, M. Henzinger, R. Pagh, T.A. Steiner, J. Upadhyay, in:, 38th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2024.","chicago":"Andersson, Joel Daniel, Monika Henzinger, Rasmus Pagh, Teresa Anna Steiner, and Jalaj Upadhyay. “Continual Counting with Gradual Privacy Expiration.” In <i>38th Conference on Neural Information Processing Systems</i>, Vol. 37. Neural Information Processing Systems Foundation, 2024.","ista":"Andersson JD, Henzinger M, Pagh R, Steiner TA, Upadhyay J. 2024. Continual counting with gradual privacy expiration. 38th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems, Advances in Neural Information Processing Systems, vol. 37.","ama":"Andersson JD, Henzinger M, Pagh R, Steiner TA, Upadhyay J. Continual counting with gradual privacy expiration. In: <i>38th Conference on Neural Information Processing Systems</i>. Vol 37. Neural Information Processing Systems Foundation; 2024.","apa":"Andersson, J. D., Henzinger, M., Pagh, R., Steiner, T. A., &#38; Upadhyay, J. (2024). Continual counting with gradual privacy expiration. In <i>38th Conference on Neural Information Processing Systems</i> (Vol. 37). Vancouver, Canada: Neural Information Processing Systems Foundation."},"conference":{"start_date":"2024-12-09","name":"NeurIPS: Neural Information Processing Systems","end_date":"2024-12-15","location":"Vancouver, Canada"},"article_processing_charge":"No","month":"12","title":"Continual counting with gradual privacy expiration","_id":"19512","OA_type":"green","alternative_title":["Advances in Neural Information Processing Systems"],"OA_place":"repository","arxiv":1,"type":"conference","day":"20","publisher":"Neural Information Processing Systems Foundation","status":"public","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","date_updated":"2025-05-14T11:33:22Z","oa_version":"Preprint","year":"2024","project":[{"grant_number":"101019564","_id":"bd9ca328-d553-11ed-ba76-dc4f890cfe62","call_identifier":"H2020","name":"The design and evaluation of modern fully dynamic data structures"},{"name":"Efficient algorithms","_id":"34def286-11ca-11ed-8bc3-da5948e1613c","grant_number":"Z00422"},{"_id":"bda196b2-d553-11ed-ba76-8e8ee6c21103","grant_number":"I05982","name":"Static and Dynamic Hierarchical Graph Decompositions"},{"_id":"bd9e3a2e-d553-11ed-ba76-8aa684ce17fe","grant_number":"P33775","name":"Fast Algorithms for a Reactive Network Layer"}],"date_published":"2024-12-20T00:00:00Z"},{"OA_place":"repository","OA_type":"green","alternative_title":["Advances in Neural Information Processing Systems"],"arxiv":1,"article_processing_charge":"No","conference":{"name":"NeurIPS: Neural Information Processing Systems","start_date":"2024-12-09","location":"Vancouver, Canada","end_date":"2024-12-15"},"citation":{"ista":"Fumero M, Pegoraro M, Maiorca V, Locatello F, Rodolà E. 2024. Latent functional maps: A spectral framework for representation alignment. 38th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems, Advances in Neural Information Processing Systems, vol. 37.","chicago":"Fumero, Marco, Marco Pegoraro, Valentino Maiorca, Francesco Locatello, and Emanuele Rodolà. “Latent Functional Maps: A Spectral Framework for Representation Alignment.” In <i>38th Conference on Neural Information Processing Systems</i>, Vol. 37. Neural Information Processing Systems Foundation, 2024.","apa":"Fumero, M., Pegoraro, M., Maiorca, V., Locatello, F., &#38; Rodolà, E. (2024). Latent functional maps: A spectral framework for representation alignment. In <i>38th Conference on Neural Information Processing Systems</i> (Vol. 37). Vancouver, Canada: Neural Information Processing Systems Foundation.","ama":"Fumero M, Pegoraro M, Maiorca V, Locatello F, Rodolà E. Latent functional maps: A spectral framework for representation alignment. In: <i>38th Conference on Neural Information Processing Systems</i>. Vol 37. Neural Information Processing Systems Foundation; 2024.","ieee":"M. Fumero, M. Pegoraro, V. Maiorca, F. Locatello, and E. Rodolà, “Latent functional maps: A spectral framework for representation alignment,” in <i>38th Conference on Neural Information Processing Systems</i>, Vancouver, Canada, 2024, vol. 37.","short":"M. Fumero, M. Pegoraro, V. Maiorca, F. Locatello, E. Rodolà, in:, 38th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2024.","mla":"Fumero, Marco, et al. “Latent Functional Maps: A Spectral Framework for Representation Alignment.” <i>38th Conference on Neural Information Processing Systems</i>, vol. 37, Neural Information Processing Systems Foundation, 2024."},"_id":"19515","title":"Latent functional maps: A spectral framework for representation alignment","month":"12","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","date_published":"2024-12-20T00:00:00Z","project":[{"name":"IST-BRIDGE: International postdoctoral program","call_identifier":"H2020","grant_number":"101034413","_id":"fc2ed2f7-9c52-11eb-aca3-c01059dda49c"}],"year":"2024","oa_version":"Preprint","date_updated":"2025-05-14T11:36:51Z","type":"conference","publisher":"Neural Information Processing Systems Foundation","day":"20","status":"public","department":[{"_id":"FrLo"}],"external_id":{"arxiv":["2406.14183"]},"date_created":"2025-04-06T22:01:32Z","corr_author":"1","language":[{"iso":"eng"}],"publication_status":"published","abstract":[{"text":"Neural models learn data representations that lie on low-dimensional manifolds,\r\nyet modeling the relation between these representational spaces is an ongoing challenge. By integrating spectral geometry principles into neural modeling, we show\r\nthat this problem can be better addressed in the functional domain, mitigating complexity, while enhancing interpretability and performances on downstream tasks.\r\nTo this end, we introduce a multi-purpose framework to the representation learning\r\ncommunity, which allows to: (i) compare different spaces in an interpretable way\r\nand measure their intrinsic similarity; (ii) find correspondences between them, both\r\nin unsupervised and weakly supervised settings, and (iii) to effectively transfer\r\nrepresentations between distinct spaces. We validate our framework on various\r\napplications, ranging from stitching to retrieval tasks, and on multiple modalities,\r\ndemonstrating that Latent Functional Maps can serve as a swiss-army knife for\r\nrepresentation alignment","lang":"eng"}],"volume":37,"quality_controlled":"1","author":[{"first_name":"Marco","id":"1c1593eb-393f-11ef-bb8e-ab4f1e979650","full_name":"Fumero, Marco","last_name":"Fumero"},{"first_name":"Marco","last_name":"Pegoraro","full_name":"Pegoraro, Marco"},{"first_name":"Valentino","full_name":"Maiorca, Valentino","last_name":"Maiorca"},{"orcid":"0000-0002-4850-0683","id":"26cfd52f-2483-11ee-8040-88983bcc06d4","first_name":"Francesco","full_name":"Locatello, Francesco","last_name":"Locatello"},{"last_name":"Rodolà","full_name":"Rodolà, Emanuele","first_name":"Emanuele"}],"publication":"38th Conference on Neural Information Processing Systems","scopus_import":"1","ec_funded":1,"intvolume":"        37","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2406.14183","open_access":"1"}],"acknowledgement":"MF is supported by the MSCA IST-Bridge fellowship which has received funding from the European Union’s Horizon 2020 research and innovation program under the Marie Skłodowska-Curie grant agreement No 101034413. ER and VM are supported by the PNRR MUR project PE0000013-FAIR. MP is supported by the Sapienza grant \"Predicting and Explaining Clinical Trial Outcomes\", prot. RG12218166FA3F13.","oa":1,"publication_identifier":{"issn":["1049-5258"]}},{"abstract":[{"text":"In this paper, we present a novel data-free method for merging neural networks in weight space. Differently from most existing works, our method optimizes for the permutations of network neurons globally across all layers. This allows us to enforce cycle consistency of the permutations when merging n ≥ 3 models, allowing circular compositions of permutations to be computed without accumulating error along the path. We qualitatively and quantitatively motivate the need for such a constraint, showing its benefits when merging sets of models in scenarios spanning varying architectures and datasets. We finally show that, when coupled\r\nwith activation renormalization, our approach yields the best results in the task.","lang":"eng"}],"quality_controlled":"1","volume":37,"language":[{"iso":"eng"}],"corr_author":"1","external_id":{"arxiv":["2405.17897"]},"department":[{"_id":"FrLo"}],"date_created":"2025-04-06T22:01:32Z","publication_status":"published","main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2405.17897"}],"publication_identifier":{"issn":["1049-5258"]},"oa":1,"acknowledgement":"This work is supported by the ERC grant no.802554 (SPECGEO), PRIN 2020 project\r\nno.2020TA3K9N (LEGO.AI), and PNRR MUR project PE0000013-FAIR. Marco Fumero is supported by the MSCA IST-Bridge fellowship which has received funding from the European Union’s Horizon 2020 research and innovation program under the Marie Skłodowska-Curie grant agreement No 101034413. We thank Simone Scardapane for the helpful feedback on the paper.","ec_funded":1,"intvolume":"        37","publication":"38th Conference on Neural Information Processing Systems","scopus_import":"1","author":[{"first_name":"Donato","full_name":"Crisostomi, Donato","last_name":"Crisostomi"},{"first_name":"Marco","id":"1c1593eb-393f-11ef-bb8e-ab4f1e979650","full_name":"Fumero, Marco","last_name":"Fumero"},{"full_name":"Baieri, Daniele","last_name":"Baieri","first_name":"Daniele"},{"first_name":"Florian","last_name":"Bernard","full_name":"Bernard, Florian"},{"first_name":"Emanuele","full_name":"Rodolà, Emanuele","last_name":"Rodolà"}],"citation":{"mla":"Crisostomi, Donato, et al. “C2M3: Cycle-Consistent Multi-Model Merging.” <i>38th Conference on Neural Information Processing Systems</i>, vol. 37, Neural Information Processing Systems Foundation, 2024.","short":"D. Crisostomi, M. Fumero, D. Baieri, F. Bernard, E. Rodolà, in:, 38th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2024.","ieee":"D. Crisostomi, M. Fumero, D. Baieri, F. Bernard, and E. Rodolà, “C2M3: Cycle-consistent multi-model merging,” in <i>38th Conference on Neural Information Processing Systems</i>, Vancouver, Canada, 2024, vol. 37.","ama":"Crisostomi D, Fumero M, Baieri D, Bernard F, Rodolà E. C2M3: Cycle-consistent multi-model merging. In: <i>38th Conference on Neural Information Processing Systems</i>. Vol 37. Neural Information Processing Systems Foundation; 2024.","apa":"Crisostomi, D., Fumero, M., Baieri, D., Bernard, F., &#38; Rodolà, E. (2024). C2M3: Cycle-consistent multi-model merging. In <i>38th Conference on Neural Information Processing Systems</i> (Vol. 37). Vancouver, Canada: Neural Information Processing Systems Foundation.","chicago":"Crisostomi, Donato, Marco Fumero, Daniele Baieri, Florian Bernard, and Emanuele Rodolà. “C2M3: Cycle-Consistent Multi-Model Merging.” In <i>38th Conference on Neural Information Processing Systems</i>, Vol. 37. Neural Information Processing Systems Foundation, 2024.","ista":"Crisostomi D, Fumero M, Baieri D, Bernard F, Rodolà E. 2024. C2M3: Cycle-consistent multi-model merging. 38th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems, Advances in Neural Information Processing Systems, vol. 37."},"article_processing_charge":"No","conference":{"name":"NeurIPS: Neural Information Processing Systems","start_date":"2024-12-09","location":"Vancouver, Canada","end_date":"2024-12-15"},"title":"C2M3: Cycle-consistent multi-model merging","month":"12","_id":"19517","OA_place":"repository","OA_type":"green","alternative_title":["Advances in Neural Information Processing Systems"],"arxiv":1,"publisher":"Neural Information Processing Systems Foundation","day":"20","type":"conference","status":"public","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","oa_version":"Preprint","year":"2024","date_updated":"2025-05-14T11:36:59Z","date_published":"2024-12-20T00:00:00Z","project":[{"call_identifier":"H2020","name":"IST-BRIDGE: International postdoctoral program","_id":"fc2ed2f7-9c52-11eb-aca3-c01059dda49c","grant_number":"101034413"}]},{"conference":{"name":"NeurIPS: Neural Information Processing Systems","start_date":"2024-12-09","location":"Vancouver, Canada","end_date":"2024-12-15"},"article_processing_charge":"No","citation":{"ama":"Wu D, Modoranu I-V, Safaryan M, Kuznedelev D, Alistarh D-A. The iterative optimal brain surgeon: Faster sparse recovery by leveraging second-order information. In: <i>38th Conference on Neural Information Processing Systems</i>. Vol 37. Neural Information Processing Systems Foundation; 2024.","apa":"Wu, D., Modoranu, I.-V., Safaryan, M., Kuznedelev, D., &#38; Alistarh, D.-A. (2024). The iterative optimal brain surgeon: Faster sparse recovery by leveraging second-order information. In <i>38th Conference on Neural Information Processing Systems</i> (Vol. 37). Vancouver, Canada: Neural Information Processing Systems Foundation.","chicago":"Wu, Diyuan, Ionut-Vlad Modoranu, Mher Safaryan, Denis Kuznedelev, and Dan-Adrian Alistarh. “The Iterative Optimal Brain Surgeon: Faster Sparse Recovery by Leveraging Second-Order Information.” In <i>38th Conference on Neural Information Processing Systems</i>, Vol. 37. Neural Information Processing Systems Foundation, 2024.","ista":"Wu D, Modoranu I-V, Safaryan M, Kuznedelev D, Alistarh D-A. 2024. The iterative optimal brain surgeon: Faster sparse recovery by leveraging second-order information. 38th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems, Advances in Neural Information Processing Systems, vol. 37.","short":"D. Wu, I.-V. Modoranu, M. Safaryan, D. Kuznedelev, D.-A. Alistarh, in:, 38th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2024.","mla":"Wu, Diyuan, et al. “The Iterative Optimal Brain Surgeon: Faster Sparse Recovery by Leveraging Second-Order Information.” <i>38th Conference on Neural Information Processing Systems</i>, vol. 37, Neural Information Processing Systems Foundation, 2024.","ieee":"D. Wu, I.-V. Modoranu, M. Safaryan, D. Kuznedelev, and D.-A. Alistarh, “The iterative optimal brain surgeon: Faster sparse recovery by leveraging second-order information,” in <i>38th Conference on Neural Information Processing Systems</i>, Vancouver, Canada, 2024, vol. 37."},"_id":"19518","month":"12","title":"The iterative optimal brain surgeon: Faster sparse recovery by leveraging second-order information","OA_type":"green","alternative_title":["Advances in Neural Information Processing Systems"],"OA_place":"repository","arxiv":1,"type":"conference","publisher":"Neural Information Processing Systems Foundation","day":"20","status":"public","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","date_published":"2024-12-20T00:00:00Z","project":[{"_id":"fc2ed2f7-9c52-11eb-aca3-c01059dda49c","grant_number":"101034413","name":"IST-BRIDGE: International postdoctoral program","call_identifier":"H2020"}],"date_updated":"2025-05-14T11:37:10Z","year":"2024","oa_version":"Preprint","abstract":[{"text":"The rising footprint of machine learning has led to a focus on imposing model\r\nsparsity as a means of reducing computational and memory costs. For deep neural\r\nnetworks (DNNs), the state-of-the-art accuracy-vs-sparsity is achieved by heuristics\r\ninspired by the classical Optimal Brain Surgeon (OBS) framework [LeCun et al.,\r\n1989, Hassibi and Stork, 1992, Hassibi et al., 1993], which leverages loss curvature\r\ninformation to make better pruning decisions. Yet, these results still lack a solid\r\ntheoretical understanding, and it is unclear whether they can be improved by\r\nleveraging connections to the wealth of work on sparse recovery algorithms. In this\r\npaper, we draw new connections between these two areas and present new sparse\r\nrecovery algorithms inspired by the OBS framework that comes with theoretical\r\nguarantees under reasonable assumptions and have strong practical performance.\r\nSpecifically, our work starts from the observation that we can leverage curvature\r\ninformation in OBS-like fashion upon the projection step of classic iterative sparse\r\nrecovery algorithms such as IHT. We show for the first time that this leads both\r\nto improved convergence bounds under standard assumptions. Furthermore, we\r\npresent extensions of this approach to the practical task of obtaining accurate sparse\r\nDNNs, and validate it experimentally at scale for Transformer-based models on\r\nvision and language tasks.","lang":"eng"}],"acknowledged_ssus":[{"_id":"CampIT"}],"volume":37,"quality_controlled":"1","date_created":"2025-04-06T22:01:32Z","department":[{"_id":"DaAl"},{"_id":"MaMo"}],"external_id":{"arxiv":["2408.17163"]},"language":[{"iso":"eng"}],"corr_author":"1","publication_status":"published","main_file_link":[{"open_access":"1","url":"https://doi.org/10.48550/arXiv.2408.17163"}],"acknowledgement":"The authors thank the anonymous NeurIPS reviewers for their useful comments and feedback, the IT department from the Institute of Science and Technology Austria for the hardware support, and Weights and Biases for the infrastructure to track all our experiments. Mher Safaryan has received funding from the European Union’s Horizon 2020 research and innovation program under the Maria Skłodowska-Curie grant agreement No 101034413.","oa":1,"publication_identifier":{"issn":["1049-5258"]},"scopus_import":"1","author":[{"last_name":"Wu","full_name":"Wu, Diyuan","first_name":"Diyuan","id":"1a5914c2-896a-11ed-bdf8-fb80621a0635"},{"last_name":"Modoranu","full_name":"Modoranu, Ionut-Vlad","id":"449f7a18-f128-11eb-9611-9b430c0c6333","first_name":"Ionut-Vlad"},{"last_name":"Safaryan","full_name":"Safaryan, Mher","id":"dd546b39-0804-11ed-9c55-ef075c39778d","first_name":"Mher"},{"first_name":"Denis","full_name":"Kuznedelev, Denis","last_name":"Kuznedelev"},{"orcid":"0000-0003-3650-940X","id":"4A899BFC-F248-11E8-B48F-1D18A9856A87","first_name":"Dan-Adrian","full_name":"Alistarh, Dan-Adrian","last_name":"Alistarh"}],"publication":"38th Conference on Neural Information Processing Systems","intvolume":"        37","ec_funded":1},{"status":"public","publisher":"Neural Information Processing Systems Foundation","type":"conference","day":"20","date_updated":"2025-05-14T10:49:20Z","year":"2024","oa_version":"Published Version","date_published":"2024-12-20T00:00:00Z","has_accepted_license":"1","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","month":"12","title":"PV-tuning: Beyond straight-through estimation for extreme LLM compression","_id":"19519","citation":{"short":"V. Malinovskii, D. Mazur, I. Ilin, D. Kuznedelev, K. Burlachenko, K. Yi, D.-A. Alistarh, P. Richtarik, in:, 38th Conference on Neural Information Processing Systems, Neural Information Processing Systems Foundation, 2024.","mla":"Malinovskii, Vladimir, et al. “PV-Tuning: Beyond Straight-through Estimation for Extreme LLM Compression.” <i>38th Conference on Neural Information Processing Systems</i>, vol. 37, Neural Information Processing Systems Foundation, 2024.","ieee":"V. Malinovskii <i>et al.</i>, “PV-tuning: Beyond straight-through estimation for extreme LLM compression,” in <i>38th Conference on Neural Information Processing Systems</i>, Vancouver, Canada, 2024, vol. 37.","ama":"Malinovskii V, Mazur D, Ilin I, et al. PV-tuning: Beyond straight-through estimation for extreme LLM compression. In: <i>38th Conference on Neural Information Processing Systems</i>. Vol 37. Neural Information Processing Systems Foundation; 2024.","apa":"Malinovskii, V., Mazur, D., Ilin, I., Kuznedelev, D., Burlachenko, K., Yi, K., … Richtarik, P. (2024). PV-tuning: Beyond straight-through estimation for extreme LLM compression. In <i>38th Conference on Neural Information Processing Systems</i> (Vol. 37). Vancouver, Canada: Neural Information Processing Systems Foundation.","chicago":"Malinovskii, Vladimir, Denis Mazur, Ivan Ilin, Denis Kuznedelev, Konstantin Burlachenko, Kai Yi, Dan-Adrian Alistarh, and Peter Richtarik. “PV-Tuning: Beyond Straight-through Estimation for Extreme LLM Compression.” In <i>38th Conference on Neural Information Processing Systems</i>, Vol. 37. Neural Information Processing Systems Foundation, 2024.","ista":"Malinovskii V, Mazur D, Ilin I, Kuznedelev D, Burlachenko K, Yi K, Alistarh D-A, Richtarik P. 2024. PV-tuning: Beyond straight-through estimation for extreme LLM compression. 38th Conference on Neural Information Processing Systems. NeurIPS: Neural Information Processing Systems, Advances in Neural Information Processing Systems, vol. 37."},"conference":{"start_date":"2024-12-10","name":"NeurIPS: Neural Information Processing Systems","end_date":"2024-12-15","location":"Vancouver, Canada"},"article_processing_charge":"No","ddc":["000"],"arxiv":1,"OA_type":"gold","alternative_title":["Advances in Neural Information Processing Systems"],"OA_place":"publisher","oa":1,"publication_identifier":{"issn":["1049-5258"],"isbn":["9798331314385"]},"acknowledgement":"Authors would like to thank Vage Egiazarian, Andrei Panferov and Ruslan Svirschevski for their\r\nhelp and advice on AQLM codebase and running large-scale experiments. We also thank Philip\r\nZmushko and Artem Fedorov for helpful discussions during the early stages of our research. The research of Kai Yi, Konstantin Burlachenko, and Peter Richtárik reported in this publication was supported by funding from King Abdullah University of Science and Technology (KAUST) – Center of Excellence for Generative AI, under award number 5940. We would also like to thank our NeurIPS reviewers for their helpful suggestions, we specifically highlight p3Lv’s suggestions to consider smaller codebook sizes and evaluate PV-Tuning with QuIP#, both of which produced interesting findings. Finally, we thank the open-source contributors from llama.cpp9 and the LocalLlama10 community for discussions and inspirations on practical use cases of quantized language models, and in particular, Yalda Shabanzadeh and Arthur Aardvark for their help with improving the codebase.","file":[{"success":1,"relation":"main_file","access_level":"open_access","file_size":939712,"date_created":"2025-04-07T09:17:10Z","file_id":"19521","file_name":"2024_NeurIPS_Malinovskii.pdf","creator":"dernst","date_updated":"2025-04-07T09:17:10Z","checksum":"54d36f947887e26d0e568b512167001a","content_type":"application/pdf"}],"intvolume":"        37","publication":"38th Conference on Neural Information Processing Systems","scopus_import":"1","file_date_updated":"2025-04-07T09:17:10Z","author":[{"first_name":"Vladimir","full_name":"Malinovskii, Vladimir","last_name":"Malinovskii"},{"first_name":"Denis","full_name":"Mazur, Denis","last_name":"Mazur"},{"first_name":"Ivan","full_name":"Ilin, Ivan","last_name":"Ilin"},{"full_name":"Kuznedelev, Denis","last_name":"Kuznedelev","first_name":"Denis"},{"last_name":"Burlachenko","full_name":"Burlachenko, Konstantin","first_name":"Konstantin"},{"first_name":"Kai","last_name":"Yi","full_name":"Yi, Kai"},{"orcid":"0000-0003-3650-940X","last_name":"Alistarh","full_name":"Alistarh, Dan-Adrian","id":"4A899BFC-F248-11E8-B48F-1D18A9856A87","first_name":"Dan-Adrian"},{"first_name":"Peter","last_name":"Richtarik","full_name":"Richtarik, Peter"}],"quality_controlled":"1","volume":37,"abstract":[{"lang":"eng","text":"There has been significant interest in \"extreme\" compression of large language models (LLMs), i.e. to 1-2 bits per parameter, which allows such models to be executed efficiently on resource-constrained devices. Existing work focused on improved one-shot quantization techniques and weight representations; yet, purely post-training approaches are reaching diminishing returns in terms of the accuracy-vs-bit-width trade-off. State-of-the-art quantization methods such as QuIP# and AQLM include fine-tuning (part of) the compressed parameters over a limited amount of calibration data; however, such fine-tuning techniques over compressed weights often make exclusive use of straight-through estimators (STE), whose performance is not well-understood in this setting. In this work, we question the use of STE for extreme LLM compression, showing that it can be sub-optimal, and perform a systematic study of quantization-aware fine-tuning strategies for LLMs.We propose PV-Tuning - a representation-agnostic framework that generalizes and improves upon existing fine-tuning strategies, and provides convergence guarantees in restricted cases.On the practical side, when used for 1-2 bit vector quantization, PV-Tuning outperforms prior techniques for highly-performant models such as Llama and Mistral. Using PV-Tuning, we achieve the first Pareto-optimal quantization for Llama-2 family models at 2 bits per parameter."}],"publication_status":"published","language":[{"iso":"eng"}],"date_created":"2025-04-06T22:01:32Z","department":[{"_id":"DaAl"}],"external_id":{"arxiv":["2405.14852"]}},{"acknowledged_ssus":[{"_id":"Bio"}],"citation":{"ista":"Vijatovic D, Toma FA, Harrington ZP, Sommer CM, Hauschild R, Trevisan AJ, Chapman P, Julseth M, Brenner-Morton S, Gabitto MI, Dasen JS, Bikoff JB, Sweeney LB. Spinal neuron diversity scales exponentially with swim-to-limb transformation during frog metamorphosis. bioRxiv, <a href=\"https://doi.org/10.1101/2024.09.20.614050\">10.1101/2024.09.20.614050</a>.","chicago":"Vijatovic, David, Florina Alexandra  Toma, Zoe P Harrington, Christoph M Sommer, Robert Hauschild, Alexandra J. Trevisan, Phillip Chapman, et al. “Spinal Neuron Diversity Scales Exponentially with Swim-to-Limb Transformation during Frog Metamorphosis.” <i>BioRxiv</i>, n.d. <a href=\"https://doi.org/10.1101/2024.09.20.614050\">https://doi.org/10.1101/2024.09.20.614050</a>.","apa":"Vijatovic, D., Toma, F. A., Harrington, Z. P., Sommer, C. M., Hauschild, R., Trevisan, A. J., … Sweeney, L. B. (n.d.). Spinal neuron diversity scales exponentially with swim-to-limb transformation during frog metamorphosis. <i>bioRxiv</i>. <a href=\"https://doi.org/10.1101/2024.09.20.614050\">https://doi.org/10.1101/2024.09.20.614050</a>","ama":"Vijatovic D, Toma FA, Harrington ZP, et al. Spinal neuron diversity scales exponentially with swim-to-limb transformation during frog metamorphosis. <i>bioRxiv</i>. doi:<a href=\"https://doi.org/10.1101/2024.09.20.614050\">10.1101/2024.09.20.614050</a>","ieee":"D. Vijatovic <i>et al.</i>, “Spinal neuron diversity scales exponentially with swim-to-limb transformation during frog metamorphosis,” <i>bioRxiv</i>. .","mla":"Vijatovic, David, et al. “Spinal Neuron Diversity Scales Exponentially with Swim-to-Limb Transformation during Frog Metamorphosis.” <i>BioRxiv</i>, doi:<a href=\"https://doi.org/10.1101/2024.09.20.614050\">10.1101/2024.09.20.614050</a>.","short":"D. Vijatovic, F.A. Toma, Z.P. Harrington, C.M. Sommer, R. Hauschild, A.J. Trevisan, P. Chapman, M. Julseth, S. Brenner-Morton, M.I. Gabitto, J.S. Dasen, J.B. Bikoff, L.B. Sweeney, BioRxiv (n.d.)."},"article_processing_charge":"No","abstract":[{"text":"Vertebrates exhibit a wide range of motor behaviors, ranging from swimming to complex limb-based movements. Here we take advantage of frog metamorphosis, which captures a swim-to-limb-based movement transformation during the development of a single organism, to explore changes in the underlying spinal circuits. We find that the tadpole spinal cord contains small and largely homogeneous populations of motor neurons (MNs) and V1 interneurons (V1s) at early escape swimming stages. These neuronal populations only modestly increase in number and subtype heterogeneity with the emergence of free swimming. In contrast, during frog metamorphosis and the emergence of limb movement, there is a dramatic expansion of MN and V1 interneuron number and transcriptional heterogeneity, culminating in cohorts of neurons that exhibit striking molecular similarity to mammalian motor circuits. CRISPR/Cas9-mediated gene disruption of the limb MN and V1 determinants FoxP1 and Engrailed-1, respectively, results in severe but selective deficits in tail and limb function. Our work thus demonstrates that neural diversity scales exponentially with increasing behavioral complexity and illustrates striking evolutionary conservation in the molecular organization and function of motor circuits across species.","lang":"eng"}],"month":"09","title":"Spinal neuron diversity scales exponentially with swim-to-limb transformation during frog metamorphosis","_id":"19520","OA_type":"green","language":[{"iso":"eng"}],"corr_author":"1","doi":"10.1101/2024.09.20.614050","OA_place":"repository","date_created":"2025-04-07T08:48:28Z","department":[{"_id":"LoSw"},{"_id":"TiVo"},{"_id":"Bio"},{"_id":"NiBa"}],"publication_status":"submitted","day":"27","main_file_link":[{"open_access":"1","url":"https://doi.org/10.1101/2024.09.20.614050"}],"type":"preprint","oa":1,"acknowledgement":"We would like to thank the members of the Sweeney Lab (especially Stavros Papadopoulos and\r\nSophie Gobeil) for their contributions to this project and, in addition to the lab, Graziana Gatto\r\nand Mario de Bono, for discussion, and support. We are also grateful to Tom Jessell and Chris\r\nKintner for their scientific insight and mentorship during the conception of this project. This\r\nproject would also not have been possible with the technical support of the Matthias Nowak,\r\nVerena Mayer and the Aquatics as well as the Imaging and Optics Facility support teams\r\n(ISTA). In addition, we thank our funding sources for providing the resources to do these\r\nexperiments: FTI Strategy Lower Austria Dissertation Grant Number FT121-D-046 (D.V.);\r\nHorizon Europe ERC Starting Grant Number 101041551 (L.B.S., F.A.T. and D.V); Special\r\nResearch Program (SFB) of the Austrian Science Fund (FWF) Project number F7814-B (L.B.S);\r\nNINDS 5R35NS116858 (J.S.D); CZI grant DAF2020-225401 (DOI): 10.37921/120055ratwvi\r\n(R.H.); NIH grant number R01NS123116 (J.B.B); American Lebanese Syrian Associated\r\nCharities (ALSAC) (J.B.B.); German Academic Exchange Service (DAAD) IFI Grant Number\r\n57515251-91853472 (Z.H.); and Project A.L.S. (S.B-M.). ","status":"public","publication":"bioRxiv","author":[{"last_name":"Vijatovic","full_name":"Vijatovic, David","first_name":"David","id":"cf391e77-ec3c-11ea-a124-d69323410b58"},{"last_name":"Toma","full_name":"Toma, Florina Alexandra ","first_name":"Florina Alexandra ","id":"2f73f876-f128-11eb-9611-b96b5a30cb0e"},{"first_name":"Zoe P","id":"a8144562-32c9-11ee-b5ce-d9800628bda2","last_name":"Harrington","full_name":"Harrington, Zoe P","orcid":"0009-0008-0158-4032"},{"orcid":"0000-0003-1216-9105","id":"4DF26D8C-F248-11E8-B48F-1D18A9856A87","first_name":"Christoph M","last_name":"Sommer","full_name":"Sommer, Christoph M"},{"last_name":"Hauschild","full_name":"Hauschild, Robert","first_name":"Robert","id":"4E01D6B4-F248-11E8-B48F-1D18A9856A87","orcid":"0000-0001-9843-3522"},{"full_name":"Trevisan, Alexandra J.","last_name":"Trevisan","first_name":"Alexandra J."},{"last_name":"Chapman","full_name":"Chapman, Phillip","first_name":"Phillip"},{"full_name":"Julseth, Mara","last_name":"Julseth","first_name":"Mara","id":"1cf464b2-dc7d-11ea-9b2f-f9b1aa9417d1"},{"last_name":"Brenner-Morton","full_name":"Brenner-Morton, Susan","first_name":"Susan"},{"first_name":"Mariano I.","last_name":"Gabitto","full_name":"Gabitto, Mariano I."},{"first_name":"Jeremy S.","last_name":"Dasen","full_name":"Dasen, Jeremy S."},{"full_name":"Bikoff, Jay B.","last_name":"Bikoff","first_name":"Jay B."},{"orcid":"0000-0001-9242-5601","full_name":"Sweeney, Lora Beatrice Jaeger","last_name":"Sweeney","first_name":"Lora Beatrice Jaeger","id":"56BE8254-C4F0-11E9-8E45-0B23E6697425"}],"user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","date_updated":"2025-05-14T11:40:13Z","oa_version":"Preprint","year":"2024","project":[{"_id":"bd73af52-d553-11ed-ba76-912049f0ac7a","grant_number":"FTI21-D-046","name":"Development of V1 interneuron diversity during swim-to-walk transition of Xenopus metamorphosis"},{"_id":"ebb66355-77a9-11ec-83b8-b8ac210a4dae","grant_number":"101041551","name":"Development and Evolution of Tetrapod Motor Circuits"},{"grant_number":"CZI01","_id":"c08e9ad1-5a5b-11eb-8a69-9d1cf3b07473","name":"Tools for automation and feedback microscopy"}],"date_published":"2024-09-27T00:00:00Z"},{"publication_identifier":{"eisbn":["9781611977929"]},"oa":1,"acknowledgement":"This   project   has   received   funding   from   the   Euro-pean  Research  Council  (ERC)  under  the  EuropeanUnion’s  Horizon  2020  research  and  innovation  programme  (Grant  agreement  No.   101019564  “The  De-sign  of  Modern  Fully  Dynamic  Data  Structures  (Mo-DynStruct)”  and  the  Austrian  Science  Fund  (FWF)project Z 422-N, project “Static and Dynamic Hierar-chical  Graph  Decompositions”,  I  5982-N,  and  project“Fast  Algorithms  for  a  Reactive  Network  Layer  (Re-actNet)”, P 33775-N, with additional funding from thenetidee SCIENCE Stiftung, 2020–2024.D.  Sauplic  has  received  funding  from  the  Euro-pean  Union’s  Horizon  2020  research  and  innovation programme under the Marie Sklodowska-Curie    grant    agreementNo 101034413.","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2310.18034","open_access":"1"}],"ec_funded":1,"publication":"2024 Proceedings of the Symposium on Algorithm Engineering and Experiments","author":[{"full_name":"Henzinger, Monika H","last_name":"Henzinger","first_name":"Monika H","id":"540c9bbd-f2de-11ec-812d-d04a5be85630","orcid":"0000-0002-5008-6530"},{"id":"f8e48cf0-b0ff-11ed-b0e9-b4c35598f964","first_name":"David","full_name":"Saulpic, David","last_name":"Saulpic"},{"id":"8b563fd0-b441-11ee-9101-a3891c61efa6","first_name":"Leonhard","last_name":"Sidl","full_name":"Sidl, Leonhard"}],"scopus_import":"1","quality_controlled":"1","abstract":[{"lang":"eng","text":"For a set of points in Rd, the Euclidean k-means problems consists of finding k centers such that the sum of distances squared from each data point to its closest center is minimized. Coresets are one the main tools developed recently to solve this problem in a big data context. They allow to compress the initial dataset while preserving its structure: running any algorithm on the coreset provides a guarantee almost equivalent to running it on the full data. In this work, we study coresets in a fully-dynamic setting: points are added and deleted with the goal to efficiently maintain a coreset with which a k-means solution can be computed. Based on an algorithm from Henzinger and Kale [ESA'20], we present an efficient and practical implementation of a fully dynamic coreset algorithm, that improves the running time by up to a factor of 20 compared to our non-optimized implementation of the algorithm by Henzinger and Kale, without sacrificing more than 7% on the quality of the k-means solution."}],"publication_status":"published","corr_author":"1","language":[{"iso":"eng"}],"date_created":"2024-01-09T16:22:47Z","department":[{"_id":"MoHe"}],"external_id":{"arxiv":["2310.18034"]},"status":"public","type":"conference","day":"04","publisher":"Society for Industrial and Applied Mathematics","date_updated":"2025-04-14T13:50:50Z","oa_version":"Preprint","year":"2024","project":[{"call_identifier":"H2020","name":"The design and evaluation of modern fully dynamic data structures","_id":"bd9ca328-d553-11ed-ba76-dc4f890cfe62","grant_number":"101019564"},{"name":"Efficient algorithms","grant_number":"Z00422","_id":"34def286-11ca-11ed-8bc3-da5948e1613c"},{"name":"Static and Dynamic Hierarchical Graph Decompositions","_id":"bda196b2-d553-11ed-ba76-8e8ee6c21103","grant_number":"I05982"},{"grant_number":"P33775","_id":"bd9e3a2e-d553-11ed-ba76-8aa684ce17fe","name":"Fast Algorithms for a Reactive Network Layer"},{"name":"IST-BRIDGE: International postdoctoral program","call_identifier":"H2020","_id":"fc2ed2f7-9c52-11eb-aca3-c01059dda49c","grant_number":"101034413"}],"date_published":"2024-01-04T00:00:00Z","page":"220-233","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","month":"01","title":"Experimental evaluation of fully dynamic k-means via coresets","_id":"14769","citation":{"ista":"Henzinger M, Saulpic D, Sidl L. 2024. Experimental evaluation of fully dynamic k-means via coresets. 2024 Proceedings of the Symposium on Algorithm Engineering and Experiments. ALENEX: Workshop on Algorithm Engineering and Experiments, 220–233.","chicago":"Henzinger, Monika, David Saulpic, and Leonhard Sidl. “Experimental Evaluation of Fully Dynamic K-Means via Coresets.” In <i>2024 Proceedings of the Symposium on Algorithm Engineering and Experiments</i>, 220–33. Society for Industrial and Applied Mathematics, 2024. <a href=\"https://doi.org/10.1137/1.9781611977929.17\">https://doi.org/10.1137/1.9781611977929.17</a>.","apa":"Henzinger, M., Saulpic, D., &#38; Sidl, L. (2024). Experimental evaluation of fully dynamic k-means via coresets. In <i>2024 Proceedings of the Symposium on Algorithm Engineering and Experiments</i> (pp. 220–233). Alexandria, VA, United States: Society for Industrial and Applied Mathematics. <a href=\"https://doi.org/10.1137/1.9781611977929.17\">https://doi.org/10.1137/1.9781611977929.17</a>","ama":"Henzinger M, Saulpic D, Sidl L. Experimental evaluation of fully dynamic k-means via coresets. In: <i>2024 Proceedings of the Symposium on Algorithm Engineering and Experiments</i>. Society for Industrial and Applied Mathematics; 2024:220-233. doi:<a href=\"https://doi.org/10.1137/1.9781611977929.17\">10.1137/1.9781611977929.17</a>","ieee":"M. Henzinger, D. Saulpic, and L. Sidl, “Experimental evaluation of fully dynamic k-means via coresets,” in <i>2024 Proceedings of the Symposium on Algorithm Engineering and Experiments</i>, Alexandria, VA, United States, 2024, pp. 220–233.","short":"M. Henzinger, D. Saulpic, L. Sidl, in:, 2024 Proceedings of the Symposium on Algorithm Engineering and Experiments, Society for Industrial and Applied Mathematics, 2024, pp. 220–233.","mla":"Henzinger, Monika, et al. “Experimental Evaluation of Fully Dynamic K-Means via Coresets.” <i>2024 Proceedings of the Symposium on Algorithm Engineering and Experiments</i>, Society for Industrial and Applied Mathematics, 2024, pp. 220–33, doi:<a href=\"https://doi.org/10.1137/1.9781611977929.17\">10.1137/1.9781611977929.17</a>."},"conference":{"start_date":"2024-01-07","name":"ALENEX: Workshop on Algorithm Engineering and Experiments","end_date":"2024-01-08","location":"Alexandria, VA, United States"},"article_processing_charge":"No","arxiv":1,"doi":"10.1137/1.9781611977929.17"},{"publication":"STAR Protocols","file_date_updated":"2024-07-16T12:04:46Z","author":[{"full_name":"Hansen, Andi H","last_name":"Hansen","first_name":"Andi H","id":"38853E16-F248-11E8-B48F-1D18A9856A87"},{"orcid":"0000-0003-2279-1061","last_name":"Hippenmeyer","full_name":"Hippenmeyer, Simon","id":"37B36620-F248-11E8-B48F-1D18A9856A87","first_name":"Simon"}],"scopus_import":"1","intvolume":"         5","pmid":1,"acknowledgement":"We thank Florian Pauler for discussion and his expert technical support. This research was supported by the Scientific Service Units (SSU) at IST Austria through resources provided by the Imaging and Optics Facility (IOF) and Preclinical Facility (PCF). A.H.H. was a recipient of a DOC Fellowship (24812) of the Austrian Academy of Sciences.","file":[{"creator":"dernst","file_name":"2024_STARProtoc_Hansen.pdf","file_id":"17264","date_created":"2024-07-16T12:04:46Z","file_size":3758943,"checksum":"4644d537451c5c114a9d7c7829b65bba","content_type":"application/pdf","date_updated":"2024-07-16T12:04:46Z","success":1,"access_level":"open_access","relation":"main_file"}],"oa":1,"publication_identifier":{"eissn":["2666-1667"]},"date_created":"2024-01-14T23:00:56Z","external_id":{"pmid":["38165800"]},"department":[{"_id":"SiHi"}],"language":[{"iso":"eng"}],"corr_author":"1","publication_status":"published","article_number":"102795","abstract":[{"text":"Mosaic analysis with double markers (MADM) technology enables the sparse labeling of genetically defined neurons. We present a protocol for time-lapse imaging of cortical projection neuron migration in mice using MADM. We describe steps for the isolation, culturing, and 4D imaging of neuronal dynamics in MADM-labeled brain tissue. While this protocol is compatible with other single-cell labeling methods, the MADM approach provides a genetic platform for the functional assessment of cell-autonomous candidate gene function and the relative contribution of non-cell-autonomous effects.\r\n\r\nFor complete details on the use and execution of this protocol, please refer to Hansen et al. (2022),1 Contreras et al. (2021),2 and Amberg and Hippenmeyer (2021).3","lang":"eng"}],"acknowledged_ssus":[{"_id":"Bio"},{"_id":"PreCl"}],"volume":5,"quality_controlled":"1","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","project":[{"name":"Molecular mechanisms of radial neuronal migration","grant_number":"24812","_id":"2625A13E-B435-11E9-9278-68D0E5697425"}],"has_accepted_license":"1","date_published":"2024-03-15T00:00:00Z","date_updated":"2025-04-15T07:32:40Z","year":"2024","oa_version":"Published Version","issue":"1","publisher":"Elsevier","day":"15","type":"journal_article","status":"public","tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"article_type":"review","doi":"10.1016/j.xpro.2023.102795","ddc":["570"],"article_processing_charge":"Yes","related_material":{"link":[{"relation":"software","url":"http://github.com/hippenmeyerlab"}]},"citation":{"ista":"Hansen AH, Hippenmeyer S. 2024. Time-lapse imaging of cortical projection neuron migration in mice using mosaic analysis with double markers. STAR Protocols. 5(1), 102795.","chicago":"Hansen, Andi H, and Simon Hippenmeyer. “Time-Lapse Imaging of Cortical Projection Neuron Migration in Mice Using Mosaic Analysis with Double Markers.” <i>STAR Protocols</i>. Elsevier, 2024. <a href=\"https://doi.org/10.1016/j.xpro.2023.102795\">https://doi.org/10.1016/j.xpro.2023.102795</a>.","apa":"Hansen, A. H., &#38; Hippenmeyer, S. (2024). Time-lapse imaging of cortical projection neuron migration in mice using mosaic analysis with double markers. <i>STAR Protocols</i>. Elsevier. <a href=\"https://doi.org/10.1016/j.xpro.2023.102795\">https://doi.org/10.1016/j.xpro.2023.102795</a>","ama":"Hansen AH, Hippenmeyer S. Time-lapse imaging of cortical projection neuron migration in mice using mosaic analysis with double markers. <i>STAR Protocols</i>. 2024;5(1). doi:<a href=\"https://doi.org/10.1016/j.xpro.2023.102795\">10.1016/j.xpro.2023.102795</a>","ieee":"A. H. Hansen and S. Hippenmeyer, “Time-lapse imaging of cortical projection neuron migration in mice using mosaic analysis with double markers,” <i>STAR Protocols</i>, vol. 5, no. 1. Elsevier, 2024.","short":"A.H. Hansen, S. Hippenmeyer, STAR Protocols 5 (2024).","mla":"Hansen, Andi H., and Simon Hippenmeyer. “Time-Lapse Imaging of Cortical Projection Neuron Migration in Mice Using Mosaic Analysis with Double Markers.” <i>STAR Protocols</i>, vol. 5, no. 1, 102795, Elsevier, 2024, doi:<a href=\"https://doi.org/10.1016/j.xpro.2023.102795\">10.1016/j.xpro.2023.102795</a>."},"_id":"14794","month":"03","title":"Time-lapse imaging of cortical projection neuron migration in mice using mosaic analysis with double markers"},{"quality_controlled":"1","volume":34,"acknowledged_ssus":[{"_id":"Bio"},{"_id":"PreCl"}],"abstract":[{"lang":"eng","text":"Metazoan development relies on the formation and remodeling of cell-cell contacts. Dynamic reorganization of adhesion receptors and the actomyosin cell cortex in space and time plays a central role in cell-cell contact formation and maturation. Nevertheless, how this process is mechanistically achieved when new contacts are formed remains unclear. Here, by building a biomimetic assay composed of progenitor cells adhering to supported lipid bilayers functionalized with E-cadherin ectodomains, we show that cortical F-actin flows, driven by the depletion of myosin-2 at the cell contact center, mediate the dynamic reorganization of adhesion receptors and cell cortex at the contact. E-cadherin-dependent downregulation of the small GTPase RhoA at the forming contact leads to both a depletion of myosin-2 and a decrease of F-actin at the contact center. At the contact rim, in contrast, myosin-2 becomes enriched by the retraction of bleb-like protrusions, resulting in a cortical tension gradient from the contact rim to its center. This tension gradient, in turn, triggers centrifugal F-actin flows, leading to further accumulation of F-actin at the contact rim and the progressive redistribution of E-cadherin from the contact center to the rim. Eventually, this combination of actomyosin downregulation and flows at the contact determines the characteristic molecular organization, with E-cadherin and F-actin accumulating at the contact rim, where they are needed to mechanically link the contractile cortices of the adhering cells."}],"publication_status":"published","language":[{"iso":"eng"}],"corr_author":"1","department":[{"_id":"CaHe"},{"_id":"EdHa"},{"_id":"MaLo"},{"_id":"NanoFab"}],"external_id":{"pmid":["38134934"],"isi":["001154500400001"]},"date_created":"2024-01-14T23:00:56Z","publication_identifier":{"eissn":["1879-0445"],"issn":["0960-9822"]},"oa":1,"acknowledgement":"We are grateful to Edwin Munro for their feedback and help with the single particle analysis. We thank members of the Heisenberg and Loose labs for their help and feedback on the manuscript, notably Xin Tong for making the PCS2-mCherry-AHPH plasmid. Finally, we thank the Aquatics and Imaging & Optics facilities of ISTA for their continuous support, especially Yann Cesbron for assistance with the laser cutter. This work was supported by an ERC\r\nAdvanced Grant (MECSPEC) to C.-P.H.","file":[{"creator":"dernst","file_name":"2024_CurrentBiology_Arslan.pdf","date_created":"2024-01-16T10:53:31Z","file_id":"14813","file_size":5183861,"checksum":"51220b76d72a614208f84bdbfbaf9b72","content_type":"application/pdf","date_updated":"2024-01-16T10:53:31Z","success":1,"access_level":"open_access","relation":"main_file"}],"pmid":1,"ec_funded":1,"intvolume":"        34","isi":1,"file_date_updated":"2024-01-16T10:53:31Z","scopus_import":"1","publication":"Current Biology","author":[{"orcid":"0000-0001-5809-9566","first_name":"Feyza N","id":"49DA7910-F248-11E8-B48F-1D18A9856A87","last_name":"Arslan","full_name":"Arslan, Feyza N"},{"id":"3A9DB764-F248-11E8-B48F-1D18A9856A87","first_name":"Edouard B","last_name":"Hannezo","full_name":"Hannezo, Edouard B","orcid":"0000-0001-6005-1561"},{"full_name":"Merrin, Jack","last_name":"Merrin","id":"4515C308-F248-11E8-B48F-1D18A9856A87","first_name":"Jack","orcid":"0000-0001-5145-4609"},{"orcid":"0000-0001-7309-9724","id":"462D4284-F248-11E8-B48F-1D18A9856A87","first_name":"Martin","full_name":"Loose, Martin","last_name":"Loose"},{"orcid":"0000-0002-0912-4566","full_name":"Heisenberg, Carl-Philipp J","last_name":"Heisenberg","id":"39427864-F248-11E8-B48F-1D18A9856A87","first_name":"Carl-Philipp J"}],"title":"Adhesion-induced cortical flows pattern E-cadherin-mediated cell contacts","month":"01","_id":"14795","citation":{"chicago":"Arslan, Feyza N, Edouard B Hannezo, Jack Merrin, Martin Loose, and Carl-Philipp J Heisenberg. “Adhesion-Induced Cortical Flows Pattern E-Cadherin-Mediated Cell Contacts.” <i>Current Biology</i>. Elsevier, 2024. <a href=\"https://doi.org/10.1016/j.cub.2023.11.067\">https://doi.org/10.1016/j.cub.2023.11.067</a>.","ista":"Arslan FN, Hannezo EB, Merrin J, Loose M, Heisenberg C-PJ. 2024. Adhesion-induced cortical flows pattern E-cadherin-mediated cell contacts. Current Biology. 34(1), 171–182.e8.","ama":"Arslan FN, Hannezo EB, Merrin J, Loose M, Heisenberg C-PJ. Adhesion-induced cortical flows pattern E-cadherin-mediated cell contacts. <i>Current Biology</i>. 2024;34(1):171-182.e8. doi:<a href=\"https://doi.org/10.1016/j.cub.2023.11.067\">10.1016/j.cub.2023.11.067</a>","apa":"Arslan, F. N., Hannezo, E. B., Merrin, J., Loose, M., &#38; Heisenberg, C.-P. J. (2024). Adhesion-induced cortical flows pattern E-cadherin-mediated cell contacts. <i>Current Biology</i>. Elsevier. <a href=\"https://doi.org/10.1016/j.cub.2023.11.067\">https://doi.org/10.1016/j.cub.2023.11.067</a>","ieee":"F. N. Arslan, E. B. Hannezo, J. Merrin, M. Loose, and C.-P. J. Heisenberg, “Adhesion-induced cortical flows pattern E-cadherin-mediated cell contacts,” <i>Current Biology</i>, vol. 34, no. 1. Elsevier, p. 171–182.e8, 2024.","mla":"Arslan, Feyza N., et al. “Adhesion-Induced Cortical Flows Pattern E-Cadherin-Mediated Cell Contacts.” <i>Current Biology</i>, vol. 34, no. 1, Elsevier, 2024, p. 171–182.e8, doi:<a href=\"https://doi.org/10.1016/j.cub.2023.11.067\">10.1016/j.cub.2023.11.067</a>.","short":"F.N. Arslan, E.B. Hannezo, J. Merrin, M. Loose, C.-P.J. Heisenberg, Current Biology 34 (2024) 171–182.e8."},"article_processing_charge":"Yes (via OA deal)","ddc":["570"],"doi":"10.1016/j.cub.2023.11.067","article_type":"original","tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"status":"public","day":"08","publisher":"Elsevier","type":"journal_article","issue":"1","year":"2024","oa_version":"Published Version","date_updated":"2025-09-04T11:39:10Z","has_accepted_license":"1","project":[{"call_identifier":"H2020","name":"Interaction and feedback between cell mechanics and fate specification in vertebrate gastrulation","grant_number":"742573","_id":"260F1432-B435-11E9-9278-68D0E5697425"}],"date_published":"2024-01-08T00:00:00Z","page":"171-182.e8","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345"},{"has_accepted_license":"1","project":[{"call_identifier":"H2020","name":"Bridging Scales in Random Materials","_id":"0aa76401-070f-11eb-9043-b5bb049fa26d","grant_number":"948819"}],"date_published":"2024-10-01T00:00:00Z","date_updated":"2025-09-04T11:43:43Z","year":"2024","oa_version":"Published Version","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","page":"485-541","status":"public","tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"day":"01","publisher":"Springer Nature","type":"journal_article","ddc":["510"],"arxiv":1,"article_type":"original","OA_type":"hybrid","doi":"10.1007/s00440-023-01254-0","OA_place":"publisher","_id":"14797","month":"10","title":"Annealed quantitative estimates for the quadratic 2D-discrete random matching problem","article_processing_charge":"Yes (in subscription journal)","citation":{"short":"N. Clozeau, F. Mattesini, Probability Theory and Related Fields 190 (2024) 485–541.","mla":"Clozeau, Nicolas, and Francesco Mattesini. “Annealed Quantitative Estimates for the Quadratic 2D-Discrete Random Matching Problem.” <i>Probability Theory and Related Fields</i>, vol. 190, Springer Nature, 2024, pp. 485–541, doi:<a href=\"https://doi.org/10.1007/s00440-023-01254-0\">10.1007/s00440-023-01254-0</a>.","ieee":"N. Clozeau and F. Mattesini, “Annealed quantitative estimates for the quadratic 2D-discrete random matching problem,” <i>Probability Theory and Related Fields</i>, vol. 190. Springer Nature, pp. 485–541, 2024.","ama":"Clozeau N, Mattesini F. Annealed quantitative estimates for the quadratic 2D-discrete random matching problem. <i>Probability Theory and Related Fields</i>. 2024;190:485-541. doi:<a href=\"https://doi.org/10.1007/s00440-023-01254-0\">10.1007/s00440-023-01254-0</a>","apa":"Clozeau, N., &#38; Mattesini, F. (2024). Annealed quantitative estimates for the quadratic 2D-discrete random matching problem. <i>Probability Theory and Related Fields</i>. Springer Nature. <a href=\"https://doi.org/10.1007/s00440-023-01254-0\">https://doi.org/10.1007/s00440-023-01254-0</a>","chicago":"Clozeau, Nicolas, and Francesco Mattesini. “Annealed Quantitative Estimates for the Quadratic 2D-Discrete Random Matching Problem.” <i>Probability Theory and Related Fields</i>. Springer Nature, 2024. <a href=\"https://doi.org/10.1007/s00440-023-01254-0\">https://doi.org/10.1007/s00440-023-01254-0</a>.","ista":"Clozeau N, Mattesini F. 2024. Annealed quantitative estimates for the quadratic 2D-discrete random matching problem. Probability Theory and Related Fields. 190, 485–541."},"file_date_updated":"2025-01-09T08:10:54Z","scopus_import":"1","publication":"Probability Theory and Related Fields","author":[{"first_name":"Nicolas","id":"fea1b376-906f-11eb-847d-b2c0cf46455b","full_name":"Clozeau, Nicolas","last_name":"Clozeau"},{"full_name":"Mattesini, Francesco","last_name":"Mattesini","first_name":"Francesco"}],"isi":1,"intvolume":"       190","ec_funded":1,"file":[{"access_level":"open_access","relation":"main_file","success":1,"date_updated":"2025-01-09T08:10:54Z","checksum":"34f44cad6a210ff66791ee37e590af2c","content_type":"application/pdf","file_size":880117,"creator":"dernst","file_name":"2024_ProbTheoryRelatFields_Clozeau.pdf","date_created":"2025-01-09T08:10:54Z","file_id":"18788"}],"acknowledgement":"NC has received funding from the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (Grant agreement No 948819).\r\nFM is supported by the Deutsche Forschungsgemeinschaft (DFG, German Research Foundation) through the SPP 2265 Random Geometric Systems. FM has been funded by the Deutsche Forschungsgemeinschaft (DFG, German Research Foundation) under Germany’s Excellence Strategy EXC 2044 -390685587, Mathematics Münster: Dynamics–Geometry–Structure. FM has been funded by the Max Planck Institute for Mathematics in the Sciences.","oa":1,"publication_identifier":{"issn":["0178-8051"],"eissn":["1432-2064"]},"publication_status":"published","date_created":"2024-01-14T23:00:57Z","external_id":{"arxiv":["2303.00353"],"isi":["001136206200002"]},"department":[{"_id":"JuFi"}],"language":[{"iso":"eng"}],"corr_author":"1","volume":190,"quality_controlled":"1","abstract":[{"text":"We study a random matching problem on closed compact  2 -dimensional Riemannian manifolds (with respect to the squared Riemannian distance), with samples of random points whose common law is absolutely continuous with respect to the volume measure with strictly positive and bounded density. We show that given two sequences of numbers  n  and  m=m(n)  of points, asymptotically equivalent as  n  goes to infinity, the optimal transport plan between the two empirical measures  μn  and  νm  is quantitatively well-approximated by  (Id,exp(∇hn))#μn  where  hn  solves a linear elliptic PDE obtained by a regularized first-order linearization of the Monge-Ampère equation. This is obtained in the case of samples of correlated random points for which a stretched exponential decay of the  α -mixing coefficient holds and for a class of discrete-time Markov chains having a unique absolutely continuous invariant measure with respect to the volume measure.","lang":"eng"}]},{"oa":1,"publication_identifier":{"issn":["2334-2536"]},"DOAJ_listed":"1","file":[{"creator":"dernst","file_name":"2023_Optica_Diorico.pdf","file_id":"14824","date_created":"2024-01-17T08:53:16Z","file_size":4558986,"checksum":"eb99ca7d0fe73e22f121875175546ed7","content_type":"application/pdf","date_updated":"2024-01-17T08:53:16Z","success":1,"access_level":"open_access","relation":"main_file"}],"acknowledgement":"We thank Rishabh Sahu and Sebastian Wald for technical contributions to the experiment. Funding by Institute of Science and Technology Austria.","APC_amount":"3393,38 EUR","intvolume":"        11","isi":1,"file_date_updated":"2024-01-17T08:53:16Z","publication":"Optica","scopus_import":"1","keyword":["Atomic and Molecular Physics","and Optics","Electronic","Optical and Magnetic Materials"],"author":[{"full_name":"Diorico, Fritz R","last_name":"Diorico","id":"2E054C4C-F248-11E8-B48F-1D18A9856A87","first_name":"Fritz R","orcid":"0000-0002-4947-8924"},{"full_name":"Zhutov, Artem","last_name":"Zhutov","id":"0f02ed6a-b514-11ee-b891-8379c5f19cb7","first_name":"Artem"},{"orcid":"0000-0002-2031-204X","id":"4C02D85E-F248-11E8-B48F-1D18A9856A87","first_name":"Onur","full_name":"Hosten, Onur","last_name":"Hosten"}],"quality_controlled":"1","volume":11,"abstract":[{"text":"Frequency-stable lasers form the back bone of precision measurements in science and technology. Such lasers typically attain their stability through frequency locking to reference cavities. State-of-the-art locking performances to date had been achieved using frequency modulation based methods, complemented with active drift cancellation systems. We demonstrate an all passive, modulation-free laser-cavity locking technique (squash locking) that utilizes changes in spatial beam ellipticity for error signal generation, and a coherent polarization post-selection for noise resilience. By comparing two identically built proof-of-principle systems, we show a frequency locking instability of 5×10<jats:sup>−7</jats:sup> relative to the cavity linewidth at 10 s averaging. The results surpass the demonstrated performances of methods engineered over the last five decades, potentially enabling an advancement in the precision control of lasers, while creating avenues for bridging the performance gaps between industrial grade lasers with scientific ones due to the afforded simplicity and scalability.","lang":"eng"}],"publication_status":"published","corr_author":"1","language":[{"iso":"eng"}],"department":[{"_id":"OnHo"}],"external_id":{"isi":["001202817000004"]},"date_created":"2024-01-15T10:25:38Z","tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"status":"public","day":"20","publisher":"Optica Publishing Group","type":"journal_article","issue":"1","oa_version":"Published Version","year":"2024","date_updated":"2025-09-04T12:13:27Z","has_accepted_license":"1","date_published":"2024-01-20T00:00:00Z","page":"26-31","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","title":"Laser-cavity locking utilizing beam ellipticity: accessing the 10<sup>−7</sup> instability scale relative to cavity linewidth","month":"01","_id":"14802","citation":{"apa":"Diorico, F. R., Zhutov, A., &#38; Hosten, O. (2024). Laser-cavity locking utilizing beam ellipticity: accessing the 10<sup>−7</sup> instability scale relative to cavity linewidth. <i>Optica</i>. Optica Publishing Group. <a href=\"https://doi.org/10.1364/optica.507451\">https://doi.org/10.1364/optica.507451</a>","ama":"Diorico FR, Zhutov A, Hosten O. Laser-cavity locking utilizing beam ellipticity: accessing the 10<sup>−7</sup> instability scale relative to cavity linewidth. <i>Optica</i>. 2024;11(1):26-31. doi:<a href=\"https://doi.org/10.1364/optica.507451\">10.1364/optica.507451</a>","ista":"Diorico FR, Zhutov A, Hosten O. 2024. Laser-cavity locking utilizing beam ellipticity: accessing the 10<sup>−7</sup> instability scale relative to cavity linewidth. Optica. 11(1), 26–31.","chicago":"Diorico, Fritz R, Artem Zhutov, and Onur Hosten. “Laser-Cavity Locking Utilizing Beam Ellipticity: Accessing the 10<sup>−7</sup> Instability Scale Relative to Cavity Linewidth.” <i>Optica</i>. Optica Publishing Group, 2024. <a href=\"https://doi.org/10.1364/optica.507451\">https://doi.org/10.1364/optica.507451</a>.","mla":"Diorico, Fritz R., et al. “Laser-Cavity Locking Utilizing Beam Ellipticity: Accessing the 10<sup>−7</sup> Instability Scale Relative to Cavity Linewidth.” <i>Optica</i>, vol. 11, no. 1, Optica Publishing Group, 2024, pp. 26–31, doi:<a href=\"https://doi.org/10.1364/optica.507451\">10.1364/optica.507451</a>.","short":"F.R. Diorico, A. Zhutov, O. Hosten, Optica 11 (2024) 26–31.","ieee":"F. R. Diorico, A. Zhutov, and O. Hosten, “Laser-cavity locking utilizing beam ellipticity: accessing the 10<sup>−7</sup> instability scale relative to cavity linewidth,” <i>Optica</i>, vol. 11, no. 1. Optica Publishing Group, pp. 26–31, 2024."},"article_processing_charge":"Yes","ddc":["530"],"OA_place":"publisher","doi":"10.1364/optica.507451","article_type":"original","OA_type":"gold"},{"language":[{"iso":"eng"}],"corr_author":"1","department":[{"_id":"KrCh"},{"_id":"KrPi"}],"external_id":{"isi":["001168211400001"]},"date_created":"2024-01-16T13:40:41Z","publication_status":"published","abstract":[{"lang":"eng","text":"We consider a natural problem dealing with weighted packet selection across a rechargeable link, which e.g., finds applications in cryptocurrency networks. The capacity of a link (u, v) is determined by how many nodes u and v allocate for this link. Specifically, the input is a finite ordered sequence of packets that arrive in both directions along a link. Given (u, v) and a packet of weight x going from u to v, node u can either accept or reject the packet. If u accepts the packet, the capacity on link (u, v) decreases by x. Correspondingly, v's capacity on \r\n increases by x. If a node rejects the packet, this will entail a cost affinely linear in the weight of the packet. A link is “rechargeable” in the sense that the total capacity of the link has to remain constant, but the allocation of capacity at the ends of the link can depend arbitrarily on the nodes' decisions. The goal is to minimise the sum of the capacity injected into the link and the cost of rejecting packets. We show that the problem is NP-hard, but can be approximated efficiently with a ratio of (1+E) . (1+3)  for some arbitrary E>0."}],"article_number":"114353","quality_controlled":"1","volume":989,"ec_funded":1,"intvolume":"       989","isi":1,"author":[{"first_name":"Stefan","full_name":"Schmid, Stefan","last_name":"Schmid"},{"first_name":"Jakub","id":"130759D2-D7DD-11E9-87D2-DE0DE6697425","last_name":"Svoboda","full_name":"Svoboda, Jakub","orcid":"0000-0002-1419-3267"},{"orcid":"0009-0001-3676-4809","first_name":"Michelle X","id":"2D82B818-F248-11E8-B48F-1D18A9856A87","last_name":"Yeo","full_name":"Yeo, Michelle X"}],"keyword":["General Computer Science","Theoretical Computer Science"],"file_date_updated":"2024-07-16T12:02:25Z","scopus_import":"1","publication":"Theoretical Computer Science","publication_identifier":{"issn":["0304-3975"]},"oa":1,"acknowledgement":"We thank Mahsa Bastankhah and Mohammad Ali Maddah-Ali for fruitful discussions about different variants of the problem. This work is supported by the European Research Council (ERC) Consolidator Project 864228 (AdjustNet), 2020-2025, the ERC CoG 863818 (ForM-SMArt), and the German Research Foundation (DFG) grant 470029389 (FlexNets), 2021-2024.","file":[{"date_updated":"2024-07-16T12:02:25Z","checksum":"efd5b7e738bf845312ba53889a3e13e4","content_type":"application/pdf","file_size":603570,"file_id":"17263","date_created":"2024-07-16T12:02:25Z","creator":"dernst","file_name":"2024_TheorComputerScience_Schmid.pdf","relation":"main_file","access_level":"open_access","success":1}],"doi":"10.1016/j.tcs.2023.114353","article_type":"original","ddc":["000"],"citation":{"ista":"Schmid S, Svoboda J, Yeo MX. 2024. Weighted packet selection for rechargeable links in cryptocurrency networks: Complexity and approximation. Theoretical Computer Science. 989, 114353.","chicago":"Schmid, Stefan, Jakub Svoboda, and Michelle X Yeo. “Weighted Packet Selection for Rechargeable Links in Cryptocurrency Networks: Complexity and Approximation.” <i>Theoretical Computer Science</i>. Elsevier, 2024. <a href=\"https://doi.org/10.1016/j.tcs.2023.114353\">https://doi.org/10.1016/j.tcs.2023.114353</a>.","apa":"Schmid, S., Svoboda, J., &#38; Yeo, M. X. (2024). Weighted packet selection for rechargeable links in cryptocurrency networks: Complexity and approximation. <i>Theoretical Computer Science</i>. Elsevier. <a href=\"https://doi.org/10.1016/j.tcs.2023.114353\">https://doi.org/10.1016/j.tcs.2023.114353</a>","ama":"Schmid S, Svoboda J, Yeo MX. Weighted packet selection for rechargeable links in cryptocurrency networks: Complexity and approximation. <i>Theoretical Computer Science</i>. 2024;989. doi:<a href=\"https://doi.org/10.1016/j.tcs.2023.114353\">10.1016/j.tcs.2023.114353</a>","ieee":"S. Schmid, J. Svoboda, and M. X. Yeo, “Weighted packet selection for rechargeable links in cryptocurrency networks: Complexity and approximation,” <i>Theoretical Computer Science</i>, vol. 989. Elsevier, 2024.","short":"S. Schmid, J. Svoboda, M.X. Yeo, Theoretical Computer Science 989 (2024).","mla":"Schmid, Stefan, et al. “Weighted Packet Selection for Rechargeable Links in Cryptocurrency Networks: Complexity and Approximation.” <i>Theoretical Computer Science</i>, vol. 989, 114353, Elsevier, 2024, doi:<a href=\"https://doi.org/10.1016/j.tcs.2023.114353\">10.1016/j.tcs.2023.114353</a>."},"related_material":{"record":[{"status":"public","relation":"earlier_version","id":"19985"}]},"article_processing_charge":"Yes (via OA deal)","title":"Weighted packet selection for rechargeable links in cryptocurrency networks: Complexity and approximation","month":"03","_id":"14820","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","oa_version":"Published Version","year":"2024","date_updated":"2025-12-02T14:02:37Z","date_published":"2024-03-21T00:00:00Z","has_accepted_license":"1","project":[{"name":"Formal Methods for Stochastic Models: Algorithms and Applications","call_identifier":"H2020","grant_number":"863818","_id":"0599E47C-7A3F-11EA-A408-12923DDC885E"}],"day":"21","publisher":"Elsevier","type":"journal_article","tmp":{"short":"CC BY (4.0)","legal_code_url":"https://creativecommons.org/licenses/by/4.0/legalcode","image":"/images/cc_by.png","name":"Creative Commons Attribution 4.0 International Public License (CC-BY 4.0)"},"status":"public"}]
