@misc{9777,
  author       = {Grah, Rok and Friedlander, Tamar},
  publisher    = {Public Library of Science},
  title        = {{Maximizing crosstalk}},
  doi          = {10.1371/journal.pcbi.1007642.s002},
  year         = {2020},
}

@misc{9779,
  author       = {Grah, Rok and Friedlander, Tamar},
  publisher    = {Public Library of Science},
  title        = {{Distribution of crosstalk values}},
  doi          = {10.1371/journal.pcbi.1007642.s003},
  year         = {2020},
}

@misc{9780,
  abstract     = {PADREV : 4,4'-dimethoxy[1,1'-biphenyl]-2,2',5,5'-tetrol
Space Group: C 2 (5), Cell: a 24.488(16)Å b 5.981(4)Å c 3.911(3)Å, α 90° β 91.47(3)° γ 90°},
  author       = {Schlemmer, Werner and Nothdurft, Philipp and Petzold, Alina and Riess, Gisbert and Frühwirt, Philipp and Schmallegger, Max and Gescheidt-Demner, Georg and Fischer, Roland and Freunberger, Stefan Alexander and Kern, Wolfgang and Spirk, Stefan},
  publisher    = {CCDC},
  title        = {{CCDC 1991959: Experimental Crystal Structure Determination}},
  doi          = {10.5517/ccdc.csd.cc24vsrk},
  year         = {2020},
}

@misc{9798,
  abstract     = {Fitness interactions between mutations can influence a population’s evolution in many different ways. While epistatic effects are difficult to measure precisely, important information is captured by the mean and variance of log fitnesses for individuals carrying different numbers of mutations. We derive predictions for these quantities from a class of simple fitness landscapes, based on models of optimizing selection on quantitative traits. We also explore extensions to the models, including modular pleiotropy, variable effect sizes, mutational bias and maladaptation of the wild type. We illustrate our approach by reanalysing a large dataset of mutant effects in a yeast snoRNA. Though characterized by some large epistatic effects, these data give a good overall fit to the non-epistatic null model, suggesting that epistasis might have limited influence on the evolutionary dynamics in this system. We also show how the amount of epistasis depends on both the underlying fitness landscape and the distribution of mutations, and so is expected to vary in consistent ways between new mutations, standing variation and fixed mutations.},
  author       = {Fraisse, Christelle and Welch, John J.},
  publisher    = {Royal Society of London},
  title        = {{Simulation code for Fig S2 from the distribution of epistasis on simple fitness landscapes}},
  doi          = {10.6084/m9.figshare.7957472.v1},
  year         = {2020},
}

@misc{9799,
  abstract     = {Fitness interactions between mutations can influence a population’s evolution in many different ways. While epistatic effects are difficult to measure precisely, important information is captured by the mean and variance of log fitnesses for individuals carrying different numbers of mutations. We derive predictions for these quantities from a class of simple fitness landscapes, based on models of optimizing selection on quantitative traits. We also explore extensions to the models, including modular pleiotropy, variable effect sizes, mutational bias and maladaptation of the wild type. We illustrate our approach by reanalysing a large dataset of mutant effects in a yeast snoRNA. Though characterized by some large epistatic effects, these data give a good overall fit to the non-epistatic null model, suggesting that epistasis might have limited influence on the evolutionary dynamics in this system. We also show how the amount of epistasis depends on both the underlying fitness landscape and the distribution of mutations, and so is expected to vary in consistent ways between new mutations, standing variation and fixed mutations.},
  author       = {Fraisse, Christelle and Welch, John J.},
  publisher    = {Royal Society of London},
  title        = {{Simulation code for Fig S1 from the distribution of epistasis on simple fitness landscapes}},
  doi          = {10.6084/m9.figshare.7957469.v1},
  year         = {2020},
}

@misc{9814,
  abstract     = {Data and mathematica notebooks for plotting figures from Language learning with communication between learners},
  author       = {Ibsen-Jensen, Rasmus and Tkadlec, Josef and Chatterjee, Krishnendu and Nowak, Martin},
  publisher    = {Royal Society},
  title        = {{Data and mathematica notebooks for plotting figures from language learning with communication between learners from language acquisition with communication between learners}},
  doi          = {10.6084/m9.figshare.5973013.v1},
  year         = {2020},
}

@article{5681,
  abstract     = {We introduce dynamically warping grids for adaptive liquid simulation. Our primary contributions are a strategy for dynamically deforming regular grids over the course of a simulation and a method for efficiently utilizing these deforming grids for liquid simulation. Prior work has shown that unstructured grids are very effective for adaptive fluid simulations. However, unstructured grids often lead to complicated implementations and a poor cache hit rate due to inconsistent memory access. Regular grids, on the other hand, provide a fast, fixed memory access pattern and straightforward implementation. Our method combines the advantages of both: we leverage the simplicity of regular grids while still achieving practical and controllable spatial adaptivity. We demonstrate that our method enables adaptive simulations that are fast, flexible, and robust to null-space issues. At the same time, our method is simple to implement and takes advantage of existing highly-tuned algorithms.},
  author       = {Hikaru, Ibayashi and Wojtan, Christopher J and Thuerey, Nils and Igarashi, Takeo and Ando, Ryoichi},
  issn         = {1941-0506},
  journal      = {IEEE Transactions on Visualization and Computer Graphics},
  number       = {6},
  pages        = {2288--2302},
  publisher    = {IEEE},
  title        = {{Simulating liquids on dynamically warping grids}},
  doi          = {10.1109/TVCG.2018.2883628},
  volume       = {26},
  year         = {2020},
}

@article{6358,
  abstract     = {We study dynamical optimal transport metrics between density matricesassociated to symmetric Dirichlet forms on finite-dimensional C∗-algebras.  Our settingcovers  arbitrary  skew-derivations  and  it  provides  a  unified  framework  that  simultaneously  generalizes  recently  constructed  transport  metrics  for  Markov  chains,  Lindblad  equations,  and  the  Fermi  Ornstein–Uhlenbeck  semigroup.   We  develop  a  non-nommutative differential calculus that allows us to obtain non-commutative Ricci curvature  bounds,  logarithmic  Sobolev  inequalities,  transport-entropy  inequalities,  andspectral gap estimates.},
  author       = {Carlen, Eric A. and Maas, Jan},
  issn         = {1572-9613},
  journal      = {Journal of Statistical Physics},
  number       = {2},
  pages        = {319--378},
  publisher    = {Springer Nature},
  title        = {{Non-commutative calculus, optimal transport and functional inequalities  in dissipative quantum systems}},
  doi          = {10.1007/s10955-019-02434-w},
  volume       = {178},
  year         = {2020},
}

@article{6359,
  abstract     = {The strong rate of convergence of the Euler-Maruyama scheme for nondegenerate SDEs with irregular drift coefficients is considered. In the case of α-Hölder drift in the recent literature the rate α/2 was proved in many related situations. By exploiting the regularising effect of the noise more efficiently, we show that the rate is in fact arbitrarily close to 1/2 for all α>0. The result extends to Dini continuous coefficients, while in d=1 also to all bounded measurable coefficients.},
  author       = {Dareiotis, Konstantinos and Gerencser, Mate},
  issn         = {1083-6489},
  journal      = {Electronic Journal of Probability},
  publisher    = {Institute of Mathematical Statistics},
  title        = {{On the regularisation of the noise for the Euler-Maruyama scheme with irregular drift}},
  doi          = {10.1214/20-EJP479},
  volume       = {25},
  year         = {2020},
}

@article{6488,
  abstract     = {We prove a central limit theorem for the difference of linear eigenvalue statistics of a sample covariance matrix W˜ and its minor W. We find that the fluctuation of this difference is much smaller than those of the individual linear statistics, as a consequence of the strong correlation between the eigenvalues of W˜ and W. Our result identifies the fluctuation of the spatial derivative of the approximate Gaussian field in the recent paper by Dumitru and Paquette. Unlike in a similar result for Wigner matrices, for sample covariance matrices, the fluctuation may entirely vanish.},
  author       = {Cipolloni, Giorgio and Erdös, László},
  issn         = {2010-3271},
  journal      = {Random Matrices: Theory and Application},
  number       = {3},
  publisher    = {World Scientific Publishing},
  title        = {{Fluctuations for differences of linear eigenvalue statistics for sample covariance matrices}},
  doi          = {10.1142/S2010326320500069},
  volume       = {9},
  year         = {2020},
}

@article{6563,
  abstract     = {This paper presents two algorithms. The first decides the existence of a pointed homotopy between given simplicial maps 𝑓,𝑔:𝑋→𝑌, and the second computes the group [𝛴𝑋,𝑌]∗ of pointed homotopy classes of maps from a suspension; in both cases, the target Y is assumed simply connected. More generally, these algorithms work relative to 𝐴⊆𝑋.},
  author       = {Filakovský, Marek and Vokřínek, Lukas},
  issn         = {1615-3383},
  journal      = {Foundations of Computational Mathematics},
  pages        = {311--330},
  publisher    = {Springer Nature},
  title        = {{Are two given maps homotopic? An algorithmic viewpoint}},
  doi          = {10.1007/s10208-019-09419-x},
  volume       = {20},
  year         = {2020},
}

@article{6593,
  abstract     = {We consider the monotone variational inequality problem in a Hilbert space and describe a projection-type method with inertial terms under the following properties: (a) The method generates a strongly convergent iteration sequence; (b) The method requires, at each iteration, only one projection onto the feasible set and two evaluations of the operator; (c) The method is designed for variational inequality for which the underline operator is monotone and uniformly continuous; (d) The method includes an inertial term. The latter is also shown to speed up the convergence in our numerical results. A comparison with some related methods is given and indicates that the new method is promising.},
  author       = {Shehu, Yekini and Li, Xiao-Huan and Dong, Qiao-Li},
  issn         = {1572-9265},
  journal      = {Numerical Algorithms},
  pages        = {365--388},
  publisher    = {Springer Nature},
  title        = {{An efficient projection-type method for monotone variational inequalities in Hilbert spaces}},
  doi          = {10.1007/s11075-019-00758-y},
  volume       = {84},
  year         = {2020},
}

@article{6748,
  abstract     = {Fitting a function by using linear combinations of a large number N of `simple' components is one of the most fruitful ideas in statistical learning. This idea lies at the core of a variety of methods, from two-layer neural networks to kernel regression, to boosting. In general, the resulting risk minimization problem is non-convex and is solved by gradient descent or its variants. Unfortunately, little is known about global convergence properties of these approaches.
Here we consider the problem of learning a concave function f on a compact convex domain Ω⊆ℝd, using linear combinations of `bump-like' components (neurons). The parameters to be fitted are the centers of N bumps, and the resulting empirical risk minimization problem is highly non-convex. We prove that, in the limit in which the number of neurons diverges, the evolution of gradient descent converges to a Wasserstein gradient flow in the space of probability distributions over Ω. Further, when the bump width δ tends to 0, this gradient flow has a limit which is a viscous porous medium equation. Remarkably, the cost function optimized by this gradient flow exhibits a special property known as displacement convexity, which implies exponential convergence rates for N→∞, δ→0. Surprisingly, this asymptotic theory appears to capture well the behavior for moderate values of δ,N. Explaining this phenomenon, and understanding the dependence on δ,N in a quantitative manner remains an outstanding challenge.},
  author       = {Javanmard, Adel and Mondelli, Marco and Montanari, Andrea},
  issn         = {1941-7330},
  journal      = {Annals of Statistics},
  number       = {6},
  pages        = {3619--3642},
  publisher    = {Institute of Mathematical Statistics},
  title        = {{Analysis of a two-layer neural network via displacement convexity}},
  doi          = {10.1214/20-AOS1945},
  volume       = {48},
  year         = {2020},
}

@article{6796,
  abstract     = {Nearby grid cells have been observed to express a remarkable degree of long-rangeorder, which is often idealized as extending potentially to infinity. Yet their strict peri-odic firing and ensemble coherence are theoretically possible only in flat environments, much unlike the burrows which rodents usually live in. Are the symmetrical, coherent grid maps inferred in the lab relevant to chart their way in their natural habitat? We consider spheres as simple models of curved environments and waiting for the appropriate experiments to be performed, we use our adaptation model to predict what grid maps would emerge in a network with the same type of recurrent connections, which on the plane produce coherence among the units. We find that on the sphere such connections distort the maps that single grid units would express on their own, and aggregate them into clusters. When remapping to a different spherical environment, units in each cluster maintain only partial coherence, similar to what is observed in disordered materials, such as spin glasses.},
  author       = {Stella, Federico and Urdapilleta, Eugenio and Luo, Yifan and Treves, Alessandro},
  issn         = {1098-1063},
  journal      = {Hippocampus},
  number       = {4},
  pages        = {302--313},
  publisher    = {Wiley},
  title        = {{Partial coherence and frustration in self-organizing spherical grids}},
  doi          = {10.1002/hipo.23144},
  volume       = {30},
  year         = {2020},
}

@article{6808,
  abstract     = {Super-resolution fluorescence microscopy has become an important catalyst for discovery in the life sciences. In STimulated Emission Depletion (STED) microscopy, a pattern of light drives fluorophores from a signal-emitting on-state to a non-signalling off-state. Only emitters residing in a sub-diffraction volume around an intensity minimum are allowed to fluoresce, rendering them distinguishable from the nearby, but dark fluorophores. STED routinely achieves resolution in the few tens of nanometers range in biological samples and is suitable for live imaging. Here, we review the working principle of STED and provide general guidelines for successful STED imaging. The strive for ever higher resolution comes at the cost of increased light burden. We discuss techniques to reduce light exposure and mitigate its detrimental effects on the specimen. These include specialized illumination strategies as well as protecting fluorophores from photobleaching mediated by high-intensity STED light. This opens up the prospect of volumetric imaging in living cells and tissues with diffraction-unlimited resolution in all three spatial dimensions.},
  author       = {Jahr, Wiebke and Velicky, Philipp and Danzl, Johann G},
  issn         = {1046-2023},
  journal      = {Methods},
  number       = {3},
  pages        = {27--41},
  publisher    = {Elsevier},
  title        = {{Strategies to maximize performance in STimulated Emission Depletion (STED) nanoscopy of biological specimens}},
  doi          = {10.1016/j.ymeth.2019.07.019},
  volume       = {174},
  year         = {2020},
}

@article{6906,
  abstract     = {We consider systems of bosons trapped in a box, in the Gross–Pitaevskii regime. We show that low-energy states exhibit complete Bose–Einstein condensation with an optimal bound on the number of orthogonal excitations. This extends recent results obtained in Boccato et al. (Commun Math Phys 359(3):975–1026, 2018), removing the assumption of small interaction potential.},
  author       = {Boccato, Chiara and Brennecke, Christian and Cenatiempo, Serena and Schlein, Benjamin},
  issn         = {1432-0916},
  journal      = {Communications in Mathematical Physics},
  pages        = {1311--1395},
  publisher    = {Springer},
  title        = {{Optimal rate for Bose-Einstein condensation in the Gross-Pitaevskii regime}},
  doi          = {10.1007/s00220-019-03555-9},
  volume       = {376},
  year         = {2020},
}

@article{6944,
  abstract     = {We study the problem of automatically detecting if a given multi-class classifier operates outside of its specifications (out-of-specs), i.e. on input data from a different distribution than what it was trained for. This is an important problem to solve on the road towards creating reliable computer vision systems for real-world applications, because the quality of a classifier’s predictions cannot be guaranteed if it operates out-of-specs. Previously proposed methods for out-of-specs detection make decisions on the level of single inputs. This, however, is insufficient to achieve low false positive rate and high false negative rates at the same time. In this work, we describe a new procedure named KS(conf), based on statistical reasoning. Its main component is a classical Kolmogorov–Smirnov test that is applied to the set of predicted confidence values for batches of samples. Working with batches instead of single samples allows increasing the true positive rate without negatively affecting the false positive rate, thereby overcoming a crucial limitation of single sample tests. We show by extensive experiments using a variety of convolutional network architectures and datasets that KS(conf) reliably detects out-of-specs situations even under conditions where other tests fail. It furthermore has a number of properties that make it an excellent candidate for practical deployment: it is easy to implement, adds almost no overhead to the system, works with any classifier that outputs confidence scores, and requires no a priori knowledge about how the data distribution could change.},
  author       = {Sun, Rémy and Lampert, Christoph},
  issn         = {1573-1405},
  journal      = {International Journal of Computer Vision},
  number       = {4},
  pages        = {970--995},
  publisher    = {Springer Nature},
  title        = {{KS(conf): A light-weight test if a multiclass classifier operates outside of its specifications}},
  doi          = {10.1007/s11263-019-01232-x},
  volume       = {128},
  year         = {2020},
}

@article{6952,
  abstract     = {We present a unified framework tackling two problems: class-specific 3D reconstruction from a single image, and generation of new 3D shape samples. These tasks have received considerable attention recently; however, most existing approaches rely on 3D supervision, annotation of 2D images with keypoints or poses, and/or training with multiple views of each object instance. Our framework is very general: it can be trained in similar settings to existing approaches, while also supporting weaker supervision. Importantly, it can be trained purely from 2D images, without pose annotations, and with only a single view per instance. We employ meshes as an output representation, instead of voxels used in most prior work. This allows us to reason over lighting parameters and exploit shading information during training, which previous 2D-supervised methods cannot. Thus, our method can learn to generate and reconstruct concave object classes. We evaluate our approach in various settings, showing that: (i) it learns to disentangle shape from pose and lighting; (ii) using shading in the loss improves performance compared to just silhouettes; (iii) when using a standard single white light, our model outperforms state-of-the-art 2D-supervised methods, both with and without pose supervision, thanks to exploiting shading cues; (iv) performance improves further when using multiple coloured lights, even approaching that of state-of-the-art 3D-supervised methods; (v) shapes produced by our model capture smooth surfaces and fine details better than voxel-based approaches; and (vi) our approach supports concave classes such as bathtubs and sofas, which methods based on silhouettes cannot learn.},
  author       = {Henderson, Paul M and Ferrari, Vittorio},
  issn         = {1573-1405},
  journal      = {International Journal of Computer Vision},
  pages        = {835--854},
  publisher    = {Springer Nature},
  title        = {{Learning single-image 3D reconstruction by generative modelling of shape, pose and shading}},
  doi          = {10.1007/s11263-019-01219-8},
  volume       = {128},
  year         = {2020},
}

@article{6997,
  author       = {Zhang, Yuzhou and Friml, Jiří},
  issn         = {1469-8137},
  journal      = {New Phytologist},
  number       = {3},
  pages        = {1049--1052},
  publisher    = {Wiley},
  title        = {{Auxin guides roots to avoid obstacles during gravitropic growth}},
  doi          = {10.1111/nph.16203},
  volume       = {225},
  year         = {2020},
}

@article{7004,
  abstract     = {We define an action of the (double of) Cohomological Hall algebra of Kontsevich and Soibelman on the cohomology of the moduli space of spiked instantons of Nekrasov. We identify this action with the one of the affine Yangian of gl(1). Based on that we derive the vertex algebra at the corner Wr1,r2,r3 of Gaiotto and Rapčák. We conjecture that our approach works for a big class of Calabi–Yau categories, including those associated with toric Calabi–Yau 3-folds.},
  author       = {Rapcak, Miroslav and Soibelman, Yan and Yang, Yaping and Zhao, Gufang},
  issn         = {1432-0916},
  journal      = {Communications in Mathematical Physics},
  pages        = {1803--1873},
  publisher    = {Springer Nature},
  title        = {{Cohomological Hall algebras, vertex algebras and instantons}},
  doi          = {10.1007/s00220-019-03575-5},
  volume       = {376},
  year         = {2020},
}

