@inproceedings{f84242e6ad0f46d3a929c5d9f96c94d4,

title = "Learning Scale and Shift-Invariant Dictionary for Sparse Representation",

abstract = "Sparse representation is a signal model to represent signals with a linear combination of a small number of prototype signals called atoms, and a set of atoms is called a dictionary. The design of the dictionary is a fundamental problem for sparse representation. However, when there are scaled or translated features in the signals, unstructured dictionary models cannot extract such features. In this paper, we propose a structured dictionary model which is scale and shift-invariant to extract features which commonly appear in several scales and locations. To achieve both scale and shift invariance, we assume that atoms of a dictionary are generated from vectors called ancestral atoms by scaling and shift operations, and an algorithm to learn these ancestral atoms is proposed.",

keywords = "Dictionary learning, Scale-invariance, Shift-invariance, Sparse coding",

author = "Toshimitsu Aritake and Noboru Murata",

year = "2019",

doi = "10.1007/978-3-030-37599-7_39",

language = "English",

isbn = "9783030375980",

series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",

publisher = "Springer",

pages = "472--483",

editor = "Giuseppe Nicosia and Panos Pardalos and Renato Umeton and Giovanni Giuffrida and Vincenzo Sciacca",

booktitle = "Machine Learning, Optimization, and Data Science - 5th International Conference, LOD 2019, Proceedings",

note = "5th International Conference on Machine Learning, Optimization, and Data Science, LOD 2019 ; Conference date: 10-09-2019 Through 13-09-2019",

}