@inproceedings{18348,
  abstract     = {We present a novel method for estimation of articulated motion in depth scans. The method is based on a framework for regularization of vector- and matrix- valued functions on parametric surfaces.

We extend augmented-Lagrangian total variation regularization to smooth rigid motion cues on the scanned 3D surface obtained from a range scanner. We demonstrate the resulting smoothed motion maps to be a powerful tool in articulated scene understanding, providing a basis for rigid parts segmentation, with little prior assumptions on the scene, despite the noisy depth measurements that often appear in commodity depth scanners.},
  author       = {Rosman, Guy and Bronstein, Alexander and Bronstein, Michael M. and Tai, Xue-Cheng and Kimmel, Ron},
  booktitle    = {Computer Vision, ECCV 2012 - Workshops and Demonstrations},
  isbn         = {9783642338625},
  issn         = {1611-3349},
  location     = {Florence, Italy},
  number       = {Part 1},
  pages        = {52--62},
  publisher    = {Springer Nature},
  title        = {{Group-valued regularization for analysis of articulated motion}},
  doi          = {10.1007/978-3-642-33863-2_6},
  volume       = {7583},
  year         = {2012},
}

@inproceedings{18349,
  abstract     = {The rapid development of 3D acquisition technology has brought with itself the need to perform standard signal processing operations such as filters on 3D data. It has been shown that the eigenfunctions of the Laplace-Beltrami operator (manifold harmonics) of a surface play the role of the Fourier basis in the Euclidean space; it is thus possible to formulate signal analysis and synthesis in the manifold harmonics basis. In particular, geometry filtering can be carried out in the manifold harmonics domain by decomposing the embedding coordinates of the shape in this basis. However, since the basis functions depend on the shape itself, such filtering is valid only for weak (near all-pass) filters, and produces severe artifacts otherwise. In this paper, we analyze this problem and propose the fractional filtering approach, wherein we apply iteratively weak fractional powers of the filter, followed by the update of the basis functions. Experimental results show that such a process produces more plausible and meaningful results.},
  author       = {Kovnatsky, Artiom and Bronstein, Michael M. and Bronstein, Alexander},
  booktitle    = {Computer Vision, ECCV 2012 - Workshops and Demonstrations},
  isbn         = {9783642338625},
  issn         = {0302-9743},
  location     = {Florence, Italy},
  number       = {Part 1},
  pages        = {83--91},
  publisher    = {Springer Nature},
  title        = {{Stable Spectral Mesh Filtering}},
  doi          = {10.1007/978-3-642-33863-2_9},
  volume       = {7583},
  year         = {2012},
}

@inproceedings{18350,
  abstract     = {We introduce an (equi-)affine invariant geometric structure by which surfaces that go through squeeze and shear transformations can still be properly analyzed. The definition of an affine invariant metric enables us to evaluate a new form of geodesic distances and to construct an invariant Laplacian from which local and global diffusion geometry is constructed. Applications of the proposed framework demonstrate its power in generalizing and enriching the existing set of tools for shape analysis.},
  author       = {Raviv, Dan and Bronstein, Alexander and Bronstein, Michael M. and Kimmel, Ron and Sochen, Nir},
  booktitle    = {15th International Workshop on Theoretical Foundations of Computer Vision},
  isbn         = {9783642340901},
  issn         = {1611-3349},
  location     = {Dagstuhl, Germany},
  pages        = {177--190},
  publisher    = {Springer Nature},
  title        = {{Equi-affine invariant geometries of articulated objects}},
  doi          = {10.1007/978-3-642-34091-8_8},
  volume       = {7474},
  year         = {2012},
}

@inproceedings{10903,
  abstract     = {We propose a logic-based framework for automated reasoning about sequential programs manipulating singly-linked lists and arrays with unbounded data. We introduce the logic SLAD, which allows combining shape constraints, written in a fragment of Separation Logic, with data and size constraints. We address the problem of checking the entailment between SLAD formulas, which is crucial in performing pre-post condition reasoning. Although this problem is undecidable in general for SLAD, we propose a sound and powerful procedure that is able to solve this problem for a large class of formulas, beyond the capabilities of existing techniques and tools. We prove that this procedure is complete, i.e., it is actually a decision procedure for this problem, for an important fragment of SLAD including known decidable logics. We implemented this procedure and shown its preciseness and its efficiency on a significant benchmark of formulas.},
  author       = {Bouajjani, Ahmed and Dragoi, Cezara and Enea, Constantin and Sighireanu, Mihaela},
  booktitle    = {Automated Technology for Verification and Analysis},
  isbn         = {9783642333859},
  issn         = {1611-3349},
  location     = {Thiruvananthapuram, India},
  pages        = {167--182},
  publisher    = {Springer},
  title        = {{Accurate invariant checking for programs manipulating lists and arrays with infinite data}},
  doi          = {10.1007/978-3-642-33386-6_14},
  volume       = {7561},
  year         = {2012},
}

@inproceedings{10904,
  abstract     = {Multi-dimensional mean-payoff and energy games provide the mathematical foundation for the quantitative study of reactive systems, and play a central role in the emerging quantitative theory of verification and synthesis. In this work, we study the strategy synthesis problem for games with such multi-dimensional objectives along with a parity condition, a canonical way to express ω-regular conditions. While in general, the winning strategies in such games may require infinite memory, for synthesis the most relevant problem is the construction of a finite-memory winning strategy (if one exists). Our main contributions are as follows. First, we show a tight exponential bound (matching upper and lower bounds) on the memory required for finite-memory winning strategies in both multi-dimensional mean-payoff and energy games along with parity objectives. This significantly improves the triple exponential upper bound for multi energy games (without parity) that could be derived from results in literature for games on VASS (vector addition systems with states). Second, we present an optimal symbolic and incremental algorithm to compute a finite-memory winning strategy (if one exists) in such games. Finally, we give a complete characterization of when finite memory of strategies can be traded off for randomness. In particular, we show that for one-dimension mean-payoff parity games, randomized memoryless strategies are as powerful as their pure finite-memory counterparts.},
  author       = {Chatterjee, Krishnendu and Randour, Mickael and Raskin, Jean-François},
  booktitle    = {CONCUR 2012 - Concurrency Theory},
  editor       = {Koutny, Maciej and Ulidowski, Irek},
  isbn         = {9783642329395},
  issn         = {1611-3349},
  location     = {Newcastle upon Tyne, United Kingdom},
  pages        = {115--131},
  publisher    = {Springer},
  title        = {{Strategy synthesis for multi-dimensional quantitative objectives}},
  doi          = {10.1007/978-3-642-32940-1_10},
  volume       = {7454},
  year         = {2012},
}

@inproceedings{10905,
  abstract     = {Energy games belong to a class of turn-based two-player infinite-duration games played on a weighted directed graph. It is one of the rare and intriguing combinatorial problems that lie in NP ∩ co−NP, but are not known to be in P. While the existence of polynomial-time algorithms has been a major open problem for decades, there is no algorithm that solves any non-trivial subclass in polynomial time.
In this paper, we give several results based on the weight structures of the graph. First, we identify a notion of penalty and present a polynomial-time algorithm when the penalty is large. Our algorithm is the first polynomial-time algorithm on a large class of weighted graphs. It includes several counter examples that show that many previous algorithms, such as value iteration and random facet algorithms, require at least sub-exponential time. Our main technique is developing the first non-trivial approximation algorithm and showing how to convert it to an exact algorithm. Moreover, we show that in a practical case in verification where weights are clustered around a constant number of values, the energy game problem can be solved in polynomial time. We also show that the problem is still as hard as in general when the clique-width is bounded or the graph is strongly ergodic, suggesting that restricting graph structures need not help.},
  author       = {Chatterjee, Krishnendu and Henzinger, Monika H and Krinninger, Sebastian and Nanongkai, Danupon},
  booktitle    = {Algorithms – ESA 2012},
  isbn         = {9783642330896},
  issn         = {1611-3349},
  location     = {Ljubljana, Slovenia},
  pages        = {301--312},
  publisher    = {Springer},
  title        = {{Polynomial-time algorithms for energy games with special weight structures}},
  doi          = {10.1007/978-3-642-33090-2_27},
  volume       = {7501},
  year         = {2012},
}

@inproceedings{10906,
  abstract     = {HSF(C) is a tool that automates verification of safety and liveness properties for C programs. This paper describes the verification approach taken by HSF(C) and provides instructions on how to install and use the tool.},
  author       = {Grebenshchikov, Sergey and Gupta, Ashutosh and Lopes, Nuno P. and Popeea, Corneliu and Rybalchenko, Andrey},
  booktitle    = {Tools and Algorithms for the Construction and Analysis of Systems},
  editor       = {Flanagan, Cormac and König, Barbara},
  isbn         = {9783642287558},
  issn         = {1611-3349},
  location     = {Tallinn, Estonia},
  pages        = {549--551},
  publisher    = {Springer},
  title        = {{HSF(C): A software verifier based on Horn clauses}},
  doi          = {10.1007/978-3-642-28756-5_46},
  volume       = {7214},
  year         = {2012},
}

@inproceedings{5745,
  abstract     = {Unsatisfiability proofs find many applications in verification. Today, many SAT solvers are capable of producing resolution proofs of unsatisfiability. For efficiency smaller proofs are preferred over bigger ones. The solvers apply proof reduction methods to remove redundant parts of the proofs while and after generating the proofs. One method of reducing resolution proofs is redundant resolution reduction, i.e., removing repeated pivots in the paths of resolution proofs (aka Pivot recycle). The known single pass algorithm only tries to remove redundancies in the parts of the proof that are trees. In this paper, we present three modifications to improve the algorithm such that the redundancies can be found in the parts of the proofs that are DAGs. The first modified algorithm covers greater number of redundancies as compared to the known algorithm without incurring any additional cost. The second modified algorithm covers even greater number of the redundancies but it may have longer run times. Our third modified algorithm is parametrized and can trade off between run times and the coverage of the redundancies. We have implemented our algorithms in OpenSMT and applied them on unsatisfiability proofs of 198 examples from plain MUS track of SAT11 competition. The first and second algorithm additionally remove 0.89% and 10.57% of clauses respectively as compared to the original algorithm. For certain value of the parameter, the third algorithm removes almost as many clauses as the second algorithm but is significantly faster.},
  author       = {Gupta, Ashutosh},
  booktitle    = {10th International Symposium on Automated Technology for Verification and Analysis},
  isbn         = {9783642333859},
  issn         = {1611-3349},
  location     = {Thiruvananthapuram, Kerala, India},
  pages        = {107--121},
  publisher    = {Springer Nature},
  title        = {{Improved single pass algorithms for resolution proof reduction}},
  doi          = {10.1007/978-3-642-33386-6_10},
  volume       = {7561},
  year         = {2012},
}

@inproceedings{10907,
  abstract     = {This paper presents a method to create a model of an articulated object using the planar motion in an initialization video. The model consists of rigid parts connected by points of articulation. The rigid parts are described by the positions of salient feature-points tracked throughout the video. Following a filtering step that identifies points that belong to different objects, rigid parts are found by a grouping process in a graph pyramid. Valid articulation points are selected by verifying multiple hypotheses for each pair of parts.},
  author       = {Artner, Nicole M. and Ion, Adrian and Kropatsch, Walter G.},
  booktitle    = {Graph-Based Representations in Pattern Recognition},
  editor       = {Jiang, Xiaoyi and Ferrer, Miquel and Torsello, Andrea},
  isbn         = {9783642208430},
  issn         = {1611-3349},
  location     = {Münster, Germany},
  pages        = {215--224},
  publisher    = {Springer},
  title        = {{Spatio-temporal extraction of articulated models in a graph pyramid}},
  doi          = {10.1007/978-3-642-20844-7_22},
  volume       = {6658},
  year         = {2011},
}

@inproceedings{9648,
  abstract     = {In this paper, we establish a correspondence between the incremental algorithm for computing AT-models [8,9] and the one for computing persistent homology [6,14,15]. We also present a decremental algorithm for computing AT-models that allows to extend the persistence computation to a wider setting. Finally, we show how to combine incremental and decremental techniques for persistent homology computation.},
  author       = {Gonzalez-Diaz, Rocio and Ion, Adrian and Jimenez, Maria Jose and Poyatos, Regina},
  booktitle    = {Computer Analysis of Images and Patterns},
  isbn         = {9783642236716},
  issn         = {1611-3349},
  location     = {Seville, Spain},
  pages        = {286--293},
  publisher    = {Springer Nature},
  title        = {{Incremental-decremental algorithm for computing AT-models and persistent homology}},
  doi          = {10.1007/978-3-642-23672-3_35},
  volume       = {6854},
  year         = {2011},
}

@inproceedings{18338,
  abstract     = {Invariant image descriptors play an important role in many computer vision and pattern recognition problems such as image search and retrieval. A dominant paradigm today is that of “bags of features”, a representation of images as distributions of primitive visual elements. The main disadvantage of this approach is the loss of spatial relations between features, which often carry important information about the image. In this paper, we show how to construct spatially-sensitive image descriptors in which both the features and their relation are affine-invariant. Our construction is based on a vocabulary of pairs of features coupled with a vocabulary of invariant spatial relations between the features. Experimental results show the advantage of our approach in image retrieval applications.},
  author       = {Bronstein, Alexander and Bronstein, Michael M.},
  booktitle    = {11th European Conference on Computer Vision},
  isbn         = {9783642155512},
  issn         = {1611-3349},
  location     = {Heraklion, Greece},
  pages        = {197–208},
  publisher    = {Springer Nature},
  title        = {{Spatially-sensitive affine-invariant image descriptors}},
  doi          = {10.1007/978-3-642-15552-9_15},
  volume       = {6312},
  year         = {2010},
}

@inproceedings{18339,
  abstract     = {Automatic detection of symmetries, regularity, and repetitive structures in 3D geometry is a fundamental problem in shape analysis and pattern recognition with applications in computer vision and graphics. Especially challenging is to detect intrinsic regularity, where the repetitions are on an intrinsic grid, without any apparent Euclidean pattern to describe the shape, but rising out of (near) isometric deformation of the underlying surface. In this paper, we employ multidimensional scaling to reduce the problem of intrinsic structure detection to a simpler problem of 2D grid detection. Potential 2D grids are then identified using an autocorrelation analysis, refined using local fitting, validated, and finally projected back to the spatial domain. We test the detection algorithm on a variety of scanned plaster models in presence of imperfections like missing data, noise and outliers. We also present a range of applications including scan completion, shape editing, super-resolution, and structural correspondence.},
  author       = {Mitra, Niloy J. and Bronstein, Alexander and Bronstein, Michael},
  booktitle    = {11th European Conference on Computer Vision},
  isbn         = {9783642155574},
  issn         = {0302-9743},
  location     = {Heraklion, Greece},
  pages        = {398–410},
  publisher    = {Springer Nature},
  title        = {{Intrinsic regularity detection in 3D geometry}},
  doi          = {10.1007/978-3-642-15558-1_29},
  volume       = {6313},
  year         = {2010},
}

@inproceedings{10908,
  abstract     = {We present ABC, a software tool for automatically computing symbolic upper bounds on the number of iterations of nested program loops. The system combines static analysis of programs with symbolic summation techniques to derive loop invariant relations between program variables. Iteration bounds are obtained from the inferred invariants, by replacing variables with bounds on their greatest values. We have successfully applied ABC to a large number of examples. The derived symbolic bounds express non-trivial polynomial relations over loop variables. We also report on results to automatically infer symbolic expressions over harmonic numbers as upper bounds on loop iteration counts.},
  author       = {Blanc, Régis and Henzinger, Thomas A and Hottelier, Thibaud and Kovács, Laura},
  booktitle    = {Logic for Programming, Artificial Intelligence, and Reasoning},
  editor       = {Clarke, Edmund M and Voronkov, Andrei},
  isbn         = {9783642175107},
  issn         = {1611-3349},
  location     = {Dakar, Senegal},
  pages        = {103--118},
  publisher    = {Springer Nature},
  title        = {{ABC: Algebraic Bound Computation for loops}},
  doi          = {10.1007/978-3-642-17511-4_7},
  volume       = {6355},
  year         = {2010},
}

@inbook{5940,
  author       = {Juhás, Gabriel and Kazlov, Igor and Juhásová, Ana},
  booktitle    = {Applications and Theory of Petri Nets},
  isbn         = {9783642136740},
  issn         = {0302-9743},
  pages        = {1--17},
  publisher    = {Springer Berlin Heidelberg},
  title        = {{Instance Deadlock: A Mystery behind Frozen Programs}},
  doi          = {10.1007/978-3-642-13675-7_1},
  year         = {2010},
}

@inproceedings{18320,
  abstract     = {Presented here is the problem of recovering a dynamic image superimposed on a static background. Such a problem is ill-posed and may arise e.g. in imaging through semireflective media, in separation of an illumination image from a reflectance image, in imaging with diffraction phenomena, etc. In this work we study regularization of this problem in spirit of Total Variation and general sparsifying transformations.},
  author       = {Bronstein, Alexander and Bronstein, Michael M. and Zibulevsky, Michael},
  booktitle    = {6th International Conference on Independent Component Analysis and Signal Separation},
  isbn         = {9783540326304},
  issn         = {1611-3349},
  location     = {Charleston, SC, United States},
  pages        = {934--940},
  publisher    = {Springer Nature},
  title        = {{On separation of semitransparent dynamic images from static background}},
  doi          = {10.1007/11679363_116},
  volume       = {3889},
  year         = {2006},
}

@inproceedings{18321,
  abstract     = {Recent studies on three-dimensional face recognition proposed to model facial expressions as isometries of the facial surface. Based on this model, expression-invariant signatures of the face were constructed by means of approximate isometric embedding into flat spaces. Here, we apply a new method for measuring isometry-invariant similarity between faces by embedding one facial surface into another. We demonstrate that our approach has several significant advantages, one of which is the ability to handle partially missing data. Promising face recognition results are obtained in numerical experiments even when the facial surfaces are severely occluded.},
  author       = {Bronstein, Alexander and Bronstein, Michael M. and Kimmel, Ron},
  booktitle    = {9th European Conference on Computer Vision},
  isbn         = {9783540338369},
  issn         = {1611-3349},
  location     = {Graz, Austria},
  publisher    = {Springer Nature},
  title        = {{Robust expression-invariant face recognition from partially missing data}},
  doi          = {10.1007/11744078_31},
  volume       = {3953},
  year         = {2006},
}

@inproceedings{18322,
  abstract     = {A geometric framework for finding intrinsic correspondence between animated 3D faces is presented. We model facial expressions as isometries of the facial surface and find the correspondence between two faces as the minimum-distortion mapping. Generalized multidimensional scaling is used for this goal. We apply our approach to texture mapping onto 3D video, expression exaggeration and morphing between faces.},
  author       = {Bronstein, Alexander and Bronstein, Michael M. and Kimmel, Ron},
  booktitle    = {4th International Conference on Articulated Motion and Deformable Objects},
  isbn         = {9783540360315},
  issn         = {1611-3349},
  location     = {Mallorca, Spain},
  pages        = {38--47},
  publisher    = {Springer Nature},
  title        = {{Facetoface: An isometric model for facial animation}},
  doi          = {10.1007/11789239_5},
  volume       = {4069},
  year         = {2006},
}

@inproceedings{18323,
  abstract     = {We present a theoretical and computational framework for matching of two-dimensional articulated shapes. Assuming that articulations can be modeled as near-isometries, we show an axiomatic construction of an articulation-invariant distance between shapes, formulated as a generalized multidimensional scaling (GMDS) problem and solved efficiently. Some numerical results demonstrating the accuracy of our method are presented.},
  author       = {Bronstein, Alexander and Bronstein, Michael M. and Bruckstein, Alfred M. and Kimmel, Ron},
  booktitle    = {4th International Conference on Articulated Motion and Deformable Objects},
  isbn         = {9783540360315},
  issn         = {1611-3349},
  location     = {Mallorca, Spain},
  pages        = {48--57},
  publisher    = {Springer Nature},
  title        = {{Matching two-dimensional articulated shapes using generalized multidimensional scaling}},
  doi          = {10.1007/11789239_6},
  volume       = {4069},
  year         = {2006},
}

@inproceedings{18319,
  abstract     = {The problem of isometry-invariant representation and comparison of surfaces is of cardinal importance in pattern recognition applications dealing with deformable objects. Particularly, in three-dimensional face recognition treating facial expressions as isometries of the facial surface allows to perform robust recognition insensitive to expressions.
Isometry-invariant representation of surfaces can be constructed by isometrically embedding them into some convenient space, and carrying out the comparison in that space. Presented here is a discussion on isometric embedding into S3, which appears to be superior over the previously used Euclidean space in sense of the representation accuracy.},
  author       = {Bronstein, Alexander and Bronstein, Michael M. and Kimmel, Ron},
  booktitle    = {5th International Conference on Scale-Space Theories in Computer Vision},
  isbn         = {9783540255475},
  issn         = {1611-3349},
  location     = {Hofgeismar, Germany},
  pages        = {622--631},
  publisher    = {Springer Berlin Heidelberg},
  title        = {{Isometric embedding of facial surfaces into S3}},
  doi          = {10.1007/11408031_53},
  volume       = {3459},
  year         = {2005},
}

@inproceedings{11800,
  abstract     = {Web search engines have emerged as one of the central applications on the Internet. In fact, search has become one of the most important activities that people engage in on the the Internet. Even beyond becoming the number one source of information, a growing number of businesses are depending on web search engines for customer acquisition.

The first generation of web search engines used text-only retrieval techniques. Google revolutionized the field by deploying the PageRank technology – an eigenvector-based analysis of the hyperlink structure – to analyze the web in order to produce relevant results. Moving forward, our goal is to achieve a better understanding of a page with a view towards producing even more relevant results.},
  author       = {Henzinger, Monika H},
  booktitle    = {31st International Colloquium on Automata, Languages and Programming},
  issn         = {1611-3349},
  location     = {Turku, Finland},
  pages        = {3},
  publisher    = {Springer Nature},
  title        = {{The past, present, and future of web search engines}},
  doi          = {10.1007/978-3-540-27836-8_2},
  volume       = {3142},
  year         = {2004},
}

