-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathreferences.bib
497 lines (449 loc) · 21 KB
/
references.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
@book{biggs:algebraic,
title={Algebraic graph theory},
author={Biggs, Norman},
number={67},
year={1993},
publisher={Cambridge university press}
}
@article{dibattista:algorithms,
title={Algorithms for drawing graphs: an annotated bibliography},
author={Di Battista, Giuseppe and Eades, Peter and Tamassia, Roberto and Tollis, Ioannis G},
journal={Computational Geometry},
volume={4},
number={5},
pages={235--282},
year={1994},
publisher={Elsevier}
}
@article{arnoldi:principle,
title={The principle of minimized iterations in the solution of the matrix eigenvalue problem},
author={Arnoldi, Walter Edwin},
journal={Quarterly of applied mathematics},
volume={9},
number={1},
pages={17--29},
year={1951}
}
@book{doyle:random,
title={Random walks and electric networks},
author={Doyle, Peter G and Snell, J Laurie},
volume={22},
year={1984},
publisher={American Mathematical Soc.}
}
@inproceedings{kakutani:dirichlet,
title={Markov processes and the Dirichlet problem},
author={Kakutani, S.},
booktitle={Proceedings of the Japan Academy},
pages={227–233},
year={1945},
volume = {21}
}
@inproceedings{weinberger:graph,
title={Graph Laplacian regularization for large-scale semidefinite programming},
author={Weinberger, Kilian Q and Sha, Fei and Zhu, Qihui and Saul, Lawrence K},
booktitle={Advances in neural information processing systems},
pages={1489--1496},
year={2007}
}
@article{fokkema:jacobi,
title={Jacobi--Davidson style QR and QZ algorithms for the reduction of matrix pencils},
author={Fokkema, Diederik R and Sleijpen, Gerard LG and Van der Vorst, Henk A},
journal={SIAM journal on scientific computing},
volume={20},
number={1},
pages={94--125},
year={1998},
publisher={SIAM}
}
@inproceedings{desilva:global,
title={Global versus local methods in nonlinear dimensionality reduction.},
author={De Silva, Vin and Tenenbaum, Joshua B},
booktitle={Advances in neural information processing systems},
volume={15},
pages={705--712},
year={2002}
}
@article{hinton:reducing,
title={Reducing the dimensionality of data with neural networks},
author={Hinton, Geoffrey E and Salakhutdinov, Ruslan R},
journal={science},
volume={313},
number={5786},
pages={504--507},
year={2006},
publisher={American Association for the Advancement of Science}
}
@article{meytlis:face,
title={On the dimensionality of face space},
author={Meytlis, Marsha and Sirovich, Lawrence},
journal={IEEE Transactions on Pattern Analysis and Machine Intelligence},
volume={29},
number={7},
pages={1262--1267},
year={2007},
publisher={IEEE}
}
@book{bengio:learning,
title={Learning deep architectures for AI},
author={Bengio, Yoshua},
year={2009},
publisher={Now Publishers Inc}
}
@article{vandermaaten:comparison,
title={Dimensionality reduction: a comparative},
author={Van Der Maaten, Laurens and Postma, Eric and Van den Herik, Jaap and others},
journal={J Mach Learn Res},
volume={10},
number={66-71},
pages={13},
year={2009}
}
@inproceedings{lee:robust,
title={A robust nonlinear projection method},
author={Lee, John Aldo and Lendasse, Amaury and Donckers, Nicolas and Verleysen, Michel},
booktitle={Proceedings of the 8th European Symposium on Artificial Neural Networks},
year={2000},
pages = {13--20}
}
@book{mardia:multivariate,
title={Multivariate analysis},
author={Mardia, KV, Kent, JT and Bibby, J},
year={1979},
publisher={Academic Press Amsterdam}
}
@article{williams:connection,
title={On a connection between kernel PCA and metric multidimensional scaling},
author={Williams, Christopher KI},
journal={Machine Learning},
volume={46},
number={1},
pages={11--19},
year={2002},
publisher={Springer}
}
@article{grady:random,
title={Random walks for image segmentation},
author={Grady, Leo},
journal={IEEE transactions on pattern analysis and machine intelligence},
volume={28},
number={11},
pages={1768--1783},
year={2006},
publisher={IEEE}
}
@inproceedings{zhu:semi,
title={Semi-supervised learning using gaussian fields and harmonic functions},
author={Zhu, Xiaojin and Ghahramani, Zoubin and Lafferty, John D},
booktitle={Proceedings of the 20th International conference on Machine learning (ICML-03)},
pages={912--919},
year={2003}
}
@article{nadler:diffusion,
title={Diffusion maps, spectral clustering, and the reaction coordinates of dynamical systems},
author={Nadler, B and Lafon, S and Coifman, RR and Kevrekidis, IG},
journal = {Applied and Computational Harmonic Analysis:
Special Issue on Diffusion Maps and Wavelets},
year = {2006},
pages = {113--127},
volume = {21}
}
@article{jaakola:partially,
title={Partially labeled classification with Markov random walks},
author={Szummer, Martin and Jaakkola, Tommi},
journal={Advances in neural information processing systems},
volume={14},
pages={945--952},
year={2002},
publisher={MIT Press Cambridge, MA}
}
@article{lee:nonlinear2005,
title={Nonlinear dimensionality reduction of data manifolds with essential loops},
author={Lee, John Aldo and Verleysen, Michel},
journal={Neurocomputing},
volume={67},
pages={29--53},
year={2005},
publisher={Elsevier}
}
@article{lafon:diffusion,
title={Diffusion maps and coarse-graining: A unified framework for dimensionality reduction, graph partitioning, and data set parameterization},
author={Lafon, Stephane and Lee, Ann B},
journal={IEEE transactions on pattern analysis and machine intelligence},
volume={28},
number={9},
pages={1393--1403},
year={2006},
publisher={IEEE}
}
@TechReport{nene:coil20,
author = {Nene, Sameer A and Nayar, Shree K and Murase, Hiroshi},
title = {Columbia Object Image Library (COIL-20)},
institution = {Columbia University},
year = {1996},
number = {CUCS-005-96},
}
@article{jacobs:rates,
title={Increased rates of convergence through learning rate adaptation},
author={Jacobs, Robert A},
journal={Neural networks},
volume={1},
number={4},
pages={295--307},
year={1988},
publisher={Elsevier}
}
@article{battista:algorithms,
title = {Algorithms for drawing graphs: an annotated bibliography},
journal = {Computational Geometry},
volume = {4},
number = {5},
pages = {235-282},
year = {1994},
issn = {0925-7721},
doi = {https://doi.org/10.1016/0925-7721(94)00014-X},
url = {https://www.sciencedirect.com/science/article/pii/092577219400014X},
author = {Di Battista, Giuseppe and Eades, Peter and Tamassia, Roberto and Tollisi, Ioannis G },
abstract = {Several data presentation problems involve drawing graphs so
that they are easy to read and understand. Examples include circuit
schematics and software engineering diagrams. In this paper we present a
bibliographic survey on algorithms whose goal is to produce aesthetically
pleasing drawings of graphs. Research on this topic is spread over the
broad spectrum of Computer Science. This bibliography constitutes an
attempt to encompass both theoretical and application oriented papers from
disparate areas.}
}
@inproceedings{cook:visualizing,
title={Visualizing similarity data with a mixture of maps},
author={Cook, James and Sutskever, Ilya and Mnih, Andriy and Hinton, Geoffrey},
booktitle={In Proceedings of the 11th International Conference on Artificial Intelligence and Statistics},
pages={67--74},
year={2007},
volume = {2},
organization={PMLR}
}
@article{chernoff:use,
ISSN = {01621459},
URL = {http://www.jstor.org/stable/2284077},
abstract = {A novel method of representing multivariate data is presented. Each point in k-dimensional space, k ≤ 18, is represented by a cartoon of a face whose features, such as length of nose and curvature of mouth, correspond to components of the point. Thus every multivariate observation is visualized as a computer-drawn face. This presentation makes it easy for the human mind to grasp many of the essential regularities and irregularities present in the data. Other graphical representations are described briefly.},
author = {Chernoff, Herman},
journal = {Journal of the American Statistical Association},
number = {342},
pages = {361--368},
publisher = {[American Statistical Association, Taylor & Francis, Ltd.]},
title = {The Use of Faces to Represent Points in K-Dimensional Space Graphically},
volume = {68},
year = {1973}
}
@inproceedings{hinton:stochastic,
author = {Hinton, Geoffrey E and Roweis, Sam},
booktitle = {Advances in Neural Information Processing Systems},
editor = {S. Becker and S. Thrun and K. Obermayer},
pages = {},
publisher = {MIT Press},
title = {Stochastic Neighbor Embedding},
url = {https://proceedings.neurips.cc/paper/2002/file/6150ccc6069bea6b5716254057a194ef-Paper.pdf},
volume = {15},
year = {2003}
}
@ARTICLE{ferreira:visual,
author={Ferreira de Oliveira, M.C. and Levkowitz, H.},
journal={IEEE Transactions on Visualization and Computer Graphics},
title={From visual data exploration to visual data mining: a survey},
year={2003},
volume={9},
number={3},
pages={378-394},
doi={10.1109/TVCG.2003.1207445}
}
@book{lee:nonlinear,
author = {Lee, John A. and Verleysen, Michel},
title = {Nonlinear Dimensionality Reduction},
year = {2007},
isbn = {0387393501},
publisher = {Springer Publishing Company, Incorporated},
edition = {1st},
abstract = {Methods of dimensionality reduction provide a way to
understand and visualize the structure of complex data sets. Traditional
methods like principal component analysis and classical metric
multidimensional scaling suffer from being based on linear models. Until
recently, very few methods were able to reduce the data dimensionality in
a nonlinear way. However, since the late nineties, many new methods have
been developed and nonlinear dimensionality reduction, also called
manifold learning, has become a hot topic. New advances that account for
this rapid growth are, e.g. the use of graphs to represent the manifold
topology, and the use of new metrics like the geodesic distance. In
addition, new optimization schemes, based on kernel techniques and
spectral decomposition, have lead to spectral embedding, which encompasses
many of the recently developed methods. This book describes existing and
advanced methods to reduce the dimensionality of numerical databases. For
each method, the description starts from intuitive ideas, develops the
necessary mathematical details, and ends by outlining the algorithmic
implementation. Methods are compared with each other with the help of
different illustrative examples. The purpose of the book is to summarize
clear facts and ideas about well-known methods as well as recent
developments in the topic of nonlinear dimensionality reduction. With this
goal in mind, methods are all described from a unifying point of view, in
order to highlight their respective strengths and shortcomings. The book
is primarily intended for statisticians, computer scientists and data
analysts. It is also accessible to other practitioners having a basic
background in statistics and/or computational learning, like psychologists
(in psychometry) and economists.}
}
@article{keim:designing,
author = {Keim, Daniel A.},
title = {Designing Pixel-Oriented Visualization Techniques: Theory and Applications},
year = {2000},
issue_date = {January 2000},
publisher = {IEEE Educational Activities Department},
address = {USA},
volume = {6},
number = {1},
issn = {1077-2626},
url = {https://doi.org/10.1109/2945.841121},
doi = {10.1109/2945.841121},
abstract = {Visualization techniques are of increasing importance in exploring and analyzing large amounts of multidimensional information. One important class of visualization techniques which is particularly interesting for visualizing very large multidimensional data sets is the class of pixel-oriented techniques. The basic idea of pixel-oriented visualization techniques is to represent as many data objects as possible on the screen at the same time by mapping each data value to a pixel of the screen and arranging the pixels adequately. A number of different pixel-oriented visualization techniques have been proposed in recent years and it has been shown that the techniques are useful for visual data exploration in a number of different application contexts. In this paper, we discuss a number of issues which are of high importance in developing pixel-oriented visualization techniques. The major goal of this article is to provide a formal basis of pixel-oriented visualization techniques and show that the design decisions in developing them can be seen as solutions of well-defined optimization problems. This is true for the mapping of the data values to colors, the arrangement of pixels inside the subwindows, the shape of the subwindows, and the ordering of the dimension subwindows. The paper also discusses the design issues of special variants of pixel-oriented techniques for visualizing large spatial data sets. The optimization functions for the mentioned design decisions are important for the effectiveness of the resulting visualizations. We show this by evaluating the optimization functions and comparing the results to the visualizations obtained in a number of different application.},
journal = {IEEE Transactions on Visualization and Computer Graphics},
month = jan,
pages = {59–78},
numpages = {20},
keywords = {visual data exploration, visualizing multidimensional and multivariate data, Information visualization, visualizing large data sets, visual data mining.}
}
@article{hotelling:analysis,
title={Analysis of a complex of statistical variables into principal components.},
author={H. Hotelling},
journal={Journal of Educational Psychology},
year={1933},
volume={24},
pages={498-520}
}
@article{torgerson:multidimensional,
added-at = {2006-09-02T19:55:39.000+0200},
author = {Torgerson, W. S.},
biburl = {https://www.bibsonomy.org/bibtex/2ad459a056378d8cdf61f77b95729eb2c/gromgull},
interhash = {03c550f30084b5594c3647e2bcef5525},
intrahash = {ad459a056378d8cdf61f77b95729eb2c},
journal = {Psychometrika},
keywords = {scaling mds multidimensional},
pages = {401-419},
timestamp = {2006-09-02T19:55:39.000+0200},
title = {Multidimensional scaling: I. Theory and method},
volume = 17,
year = 1952
}
@inproceedings{street:nuclear,
author = {Street, W. Nick and Wolberg, W. H. and Mangasarian, O. L. },
title = {{Nuclear feature extraction for breast tumor diagnosis}},
volume = {1905},
booktitle = {Biomedical Image Processing and Biomedical Visualization},
editor = {Raj S. Acharya and Dmitry B. Goldgof},
organization = {International Society for Optics and Photonics},
publisher = {SPIE},
pages = {861 -- 870},
year = {1993},
doi = {},
URL = {https://doi.org/10.1117/12.148698}
}
@Article{sammon:nonlinear,
author={Sammon, J.W.},
journal={IEEE Transactions on Computers},
title={A Nonlinear Mapping for Data Structure Analysis},
year={1969},
volume={C-18},
number={5},
pages={401-409},
doi={10.1109/T-C.1969.222678}}
@article{demartines:curvilinear,
author={Demartines, P. and Herault, J.},
journal={IEEE Transactions on Neural Networks},
title={Curvilinear component analysis: a self-organizing neural network for nonlinear mapping of data sets},
year={1997},
volume={8},
number={1},
pages={148-154},
doi={10.1109/72.554199}
}
@article{tenenbaum:global,
abstract = {Describes an approach to solving dimensionality reduction problems that uses easily measured local metric information to learn the underlying global geometry of a data set. Capability of the approach to discover the nonlinear degrees of freedom that underlie complex natural observations; Difference of the approach from previous algorithms for nonlinear dimensionality reduction.},
added-at = {2009-06-04T20:48:13.000+0200},
author = {Tenenbaum, Joshua B. and de Silva, Vin and Langford, John C.},
biburl = {https://www.bibsonomy.org/bibtex/2e43477a419fee32ef998ed6ebbe33160/quesada},
interhash = {d98b45c2f0c37140697cdd69cb0d9c89},
intrahash = {e43477a419fee32ef998ed6ebbe33160},
journal = {Science},
keywords = {},
number = 5500,
pages = 2319,
timestamp = {2009-06-04T20:48:13.000+0200},
title = {A Global Geometric Framework for Nonlinear Dimensionality Reduction},
volume = 290,
year = 2000
}
@inproceedings{weinberger:learning,
author = {Weinberger, Kilian Q. and Sha, Fei and Saul, Lawrence K.},
title = {Learning a Kernel Matrix for Nonlinear Dimensionality Reduction},
year = {2004},
isbn = {1581138385},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/1015330.1015345},
doi = {10.1145/1015330.1015345},
abstract = {We investigate how to learn a kernel matrix for high dimensional data that lies on or near a low dimensional manifold. Noting that the kernel matrix implicitly maps the data into a nonlinear feature space, we show how to discover a mapping that "unfolds" the underlying manifold from which the data was sampled. The kernel matrix is constructed by maximizing the variance in feature space subject to local constraints that preserve the angles and distances between nearest neighbors. The main optimization involves an instance of semidefinite programming---a fundamentally different computation than previous algorithms for manifold learning, such as Isomap and locally linear embedding. The optimized kernels perform better than polynomial and Gaussian kernels for problems in manifold learning, but worse for problems in large margin classification. We explain these results in terms of the geometric properties of different kernels and comment on various interpretations of other manifold learning algorithms as kernel methods.},
booktitle = {Proceedings of the Twenty-First International Conference on Machine Learning},
pages = {106},
location = {Banff, Alberta, Canada},
series = {ICML '04}
}
@article{roweis:nonlinear,
abstract = {Many areas of science depend on exploratory data analysis and visualization. The need to analyze large amounts of multivariate data raises the fundamental problem of dimensionality reduction: how to discover compact representations of high-dimensional data. Here, we introduce locally linear embedding (LLE), an unsupervised learning algorithm that computes low-dimensional, neighbor-hood-preserving embeddings of high-dimensional inputs. Unlike clustering methods for local dimensionality reduction, LLE maps its inputs into a single global coordinate system of lower dimensionality, and its optimizations do not involve local minima. By exploiting the local symmetries of linear reconstructions, LLE is able to learn the global structure of nonlinear manifolds, such as those generated by images of faces or documents of text.},
added-at = {2012-07-13T11:59:39.000+0200},
author = {Roweis, Sam T. and Saul, Lawrence K.},
biburl = {https://www.bibsonomy.org/bibtex/2cccdcfb847911c5899f3f35f9f1b5de5/jabreftest},
doi = {10.1126/science.290.5500.2323},
eprint = {http://www.sciencemag.org/cgi/reprint/290/5500/2323.pdf},
file = {Roweis2000.pdf:2000/Roweis2000.pdf:PDF},
groups = {public},
interhash = {e4ae1bddd3d395677778454060783c55},
intrahash = {cccdcfb847911c5899f3f35f9f1b5de5},
journal = {Science},
keywords = {},
number = 5500,
pages = {2323-2326},
timestamp = {2012-07-13T11:59:39.000+0200},
title = {{Nonlinear Dimensionality Reduction by Locally Linear Embedding}},
url = {http://www.sciencemag.org/cgi/content/abstract/290/5500/2323},
username = {jabreftest},
volume = 290,
year = 2000
}
@inproceedings{belkin:laplacian,
author = {Belkin, Mikhail and Niyogi, Partha},
title = {Laplacian Eigenmaps and Spectral Techniques for Embedding and Clustering},
year = {2001},
publisher = {MIT Press},
address = {Cambridge, MA, USA},
abstract = {Drawing on the correspondence between the graph Laplacian, the
Laplace-Beltrami operator on a manifold, and the connections to the heat
equation, we propose a geometrically motivated algorithm for constructing
a representation for data sampled from a low dimensional manifold embedded
in a higher dimensional space. The algorithm provides a computationally
efficient approach to nonlinear dimensionality reduction that has locality
preserving properties and a natural connection to clustering. Several
applications are considered.},
booktitle = {Proceedings of the 14th International Conference on Neural Information Processing Systems: Natural and Synthetic},
pages = {585–591},
numpages = {7},
location = {Vancouver, British Columbia, Canada},
series = {Advances in neural information processing systems},
}
@inproceedings{song:colored,
author={Song, Le and Smola, Alexander J and Borgwardt, Karsten M and Gretton, Arthur},
booktitle = {Advances in Neural Information Processing Systems},
editor = {J. Platt and D. Koller and Y. Singer and S. Roweis},
pages = {},
publisher = {Curran Associates, Inc.},
title = {Colored Maximum Variance Unfolding},
url = {https://proceedings.neurips.cc/paper/2007/file/55a7cf9c71f1c9c495413f934dd1a158-Paper.pdf},
volume = {20},
year = {2008}
}