Skip to content

Commit f034b25

Browse files
authored
update doc for 0.7.0 (#355)
1 parent 8f8b1f7 commit f034b25

File tree

183 files changed

+14371
-29773
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

183 files changed

+14371
-29773
lines changed

_downloads/4f011433714ef22e0c66be75bbfd08e3/plot_sandwich.ipynb

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,17 @@
11
{
22
"cells": [
33
{
4-
"cell_type": "code",
5-
"execution_count": null,
6-
"metadata": {
7-
"collapsed": false
8-
},
9-
"outputs": [],
4+
"cell_type": "markdown",
5+
"metadata": {},
106
"source": [
11-
"%matplotlib inline"
7+
"\n# Sandwich demo\n\nSandwich demo based on code from http://nbviewer.ipython.org/6576096\n"
128
]
139
},
1410
{
1511
"cell_type": "markdown",
1612
"metadata": {},
1713
"source": [
18-
"\nSandwich demo\n=============\n\nSandwich demo based on code from http://nbviewer.ipython.org/6576096\n"
14+
"<div class=\"alert alert-info\"><h4>Note</h4><p>In order to show the charts of the examples you need a graphical\n ``matplotlib`` backend installed. For intance, use ``pip install pyqt5``\n to get Qt graphical interface or use your favorite one.</p></div>\n\n"
1915
]
2016
},
2117
{
@@ -26,7 +22,7 @@
2622
},
2723
"outputs": [],
2824
"source": [
29-
"import numpy as np\nfrom matplotlib import pyplot as plt\nfrom sklearn.metrics import pairwise_distances\nfrom sklearn.neighbors import NearestNeighbors\n\nfrom metric_learn import (LMNN, ITML_Supervised, LSML_Supervised,\n SDML_Supervised)\n\n\ndef sandwich_demo():\n x, y = sandwich_data()\n knn = nearest_neighbors(x, k=2)\n ax = plt.subplot(3, 1, 1) # take the whole top row\n plot_sandwich_data(x, y, ax)\n plot_neighborhood_graph(x, knn, y, ax)\n ax.set_title('input space')\n ax.set_aspect('equal')\n ax.set_xticks([])\n ax.set_yticks([])\n\n mls = [\n LMNN(),\n ITML_Supervised(num_constraints=200),\n SDML_Supervised(num_constraints=200, balance_param=0.001),\n LSML_Supervised(num_constraints=200),\n ]\n\n for ax_num, ml in enumerate(mls, start=3):\n ml.fit(x, y)\n tx = ml.transform(x)\n ml_knn = nearest_neighbors(tx, k=2)\n ax = plt.subplot(3, 2, ax_num)\n plot_sandwich_data(tx, y, axis=ax)\n plot_neighborhood_graph(tx, ml_knn, y, axis=ax)\n ax.set_title(ml.__class__.__name__)\n ax.set_xticks([])\n ax.set_yticks([])\n plt.show()\n\n\n# TODO: use this somewhere\ndef visualize_class_separation(X, labels):\n _, (ax1, ax2) = plt.subplots(ncols=2)\n label_order = np.argsort(labels)\n ax1.imshow(pairwise_distances(X[label_order]), interpolation='nearest')\n ax2.imshow(pairwise_distances(labels[label_order, None]),\n interpolation='nearest')\n\n\ndef nearest_neighbors(X, k=5):\n knn = NearestNeighbors(n_neighbors=k)\n knn.fit(X)\n return knn.kneighbors(X, return_distance=False)\n\n\ndef sandwich_data():\n # number of distinct classes\n num_classes = 6\n # number of points per class\n num_points = 9\n # distance between layers, the points of each class are in a layer\n dist = 0.7\n\n data = np.zeros((num_classes, num_points, 2), dtype=float)\n labels = np.zeros((num_classes, num_points), dtype=int)\n\n x_centers = np.arange(num_points, dtype=float) - num_points / 2\n y_centers = dist * (np.arange(num_classes, dtype=float) - num_classes / 2)\n for i, yc in enumerate(y_centers):\n for k, xc in enumerate(x_centers):\n data[i, k, 0] = np.random.normal(xc, 0.1)\n data[i, k, 1] = np.random.normal(yc, 0.1)\n labels[i, :] = i\n return data.reshape((-1, 2)), labels.ravel()\n\n\ndef plot_sandwich_data(x, y, axis=plt, colors='rbgmky'):\n for idx, val in enumerate(np.unique(y)):\n xi = x[y == val]\n axis.scatter(*xi.T, s=50, facecolors='none', edgecolors=colors[idx])\n\n\ndef plot_neighborhood_graph(x, nn, y, axis=plt, colors='rbgmky'):\n for i, a in enumerate(x):\n b = x[nn[i, 1]]\n axis.plot((a[0], b[0]), (a[1], b[1]), colors[y[i]])\n\n\nif __name__ == '__main__':\n sandwich_demo()"
25+
"import numpy as np\nfrom matplotlib import pyplot as plt\nfrom sklearn.metrics import pairwise_distances\nfrom sklearn.neighbors import NearestNeighbors\n\nfrom metric_learn import (LMNN, ITML_Supervised, LSML_Supervised,\n SDML_Supervised)\n\n\ndef sandwich_demo():\n x, y = sandwich_data()\n knn = nearest_neighbors(x, k=2)\n ax = plt.subplot(3, 1, 1) # take the whole top row\n plot_sandwich_data(x, y, ax)\n plot_neighborhood_graph(x, knn, y, ax)\n ax.set_title('input space')\n ax.set_aspect('equal')\n ax.set_xticks([])\n ax.set_yticks([])\n\n mls = [\n LMNN(),\n ITML_Supervised(n_constraints=200),\n SDML_Supervised(n_constraints=200, balance_param=0.001),\n LSML_Supervised(n_constraints=200),\n ]\n\n for ax_num, ml in enumerate(mls, start=3):\n ml.fit(x, y)\n tx = ml.transform(x)\n ml_knn = nearest_neighbors(tx, k=2)\n ax = plt.subplot(3, 2, ax_num)\n plot_sandwich_data(tx, y, axis=ax)\n plot_neighborhood_graph(tx, ml_knn, y, axis=ax)\n ax.set_title(ml.__class__.__name__)\n ax.set_xticks([])\n ax.set_yticks([])\n plt.show()\n\n\n# TODO: use this somewhere\ndef visualize_class_separation(X, labels):\n _, (ax1, ax2) = plt.subplots(ncols=2)\n label_order = np.argsort(labels)\n ax1.imshow(pairwise_distances(X[label_order]), interpolation='nearest')\n ax2.imshow(pairwise_distances(labels[label_order, None]),\n interpolation='nearest')\n\n\ndef nearest_neighbors(X, k=5):\n knn = NearestNeighbors(n_neighbors=k)\n knn.fit(X)\n return knn.kneighbors(X, return_distance=False)\n\n\ndef sandwich_data():\n # number of distinct classes\n num_classes = 6\n # number of points per class\n num_points = 9\n # distance between layers, the points of each class are in a layer\n dist = 0.7\n\n data = np.zeros((num_classes, num_points, 2), dtype=float)\n labels = np.zeros((num_classes, num_points), dtype=int)\n\n x_centers = np.arange(num_points, dtype=float) - num_points / 2\n y_centers = dist * (np.arange(num_classes, dtype=float) - num_classes / 2)\n for i, yc in enumerate(y_centers):\n for k, xc in enumerate(x_centers):\n data[i, k, 0] = np.random.normal(xc, 0.1)\n data[i, k, 1] = np.random.normal(yc, 0.1)\n labels[i, :] = i\n return data.reshape((-1, 2)), labels.ravel()\n\n\ndef plot_sandwich_data(x, y, axis=plt, colors='rbgmky'):\n for idx, val in enumerate(np.unique(y)):\n xi = x[y == val]\n axis.scatter(*xi.T, s=50, facecolors='none', edgecolors=colors[idx])\n\n\ndef plot_neighborhood_graph(x, nn, y, axis=plt, colors='rbgmky'):\n for i, a in enumerate(x):\n b = x[nn[i, 1]]\n axis.plot((a[0], b[0]), (a[1], b[1]), colors[y[i]])\n\n\nif __name__ == '__main__':\n sandwich_demo()"
3026
]
3127
}
3228
],
@@ -46,7 +42,7 @@
4642
"name": "python",
4743
"nbconvert_exporter": "python",
4844
"pygments_lexer": "ipython3",
49-
"version": "3.8.3"
45+
"version": "3.11.6"
5046
}
5147
},
5248
"nbformat": 4,
Binary file not shown.

_downloads/a6bf7d7136399675f28d77d001faa48f/plot_sandwich.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,13 @@
66
Sandwich demo based on code from http://nbviewer.ipython.org/6576096
77
"""
88

9+
######################################################################
10+
# .. note::
11+
#
12+
# In order to show the charts of the examples you need a graphical
13+
# ``matplotlib`` backend installed. For intance, use ``pip install pyqt5``
14+
# to get Qt graphical interface or use your favorite one.
15+
916
import numpy as np
1017
from matplotlib import pyplot as plt
1118
from sklearn.metrics import pairwise_distances
@@ -28,9 +35,9 @@ def sandwich_demo():
2835

2936
mls = [
3037
LMNN(),
31-
ITML_Supervised(num_constraints=200),
32-
SDML_Supervised(num_constraints=200, balance_param=0.001),
33-
LSML_Supervised(num_constraints=200),
38+
ITML_Supervised(n_constraints=200),
39+
SDML_Supervised(n_constraints=200, balance_param=0.001),
40+
LSML_Supervised(n_constraints=200),
3441
]
3542

3643
for ax_num, ml in enumerate(mls, start=3):

_downloads/bd873dc20743b4004d594a33f6abf089/plot_metric_learning_examples.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,11 @@
1515
######################################################################
1616
# Imports
1717
# ^^^^^^^
18+
# .. note::
1819
#
20+
# In order to show the charts of the examples you need a graphical
21+
# ``matplotlib`` backend installed. For intance, use ``pip install pyqt5``
22+
# to get Qt graphical interface or use your favorite one.
1923

2024
from sklearn.manifold import TSNE
2125

@@ -35,9 +39,9 @@
3539
# We will be using a synthetic dataset to illustrate the plotting,
3640
# using the function `sklearn.datasets.make_classification` from
3741
# scikit-learn. The dataset will contain:
38-
# - 100 points in 3 classes with 2 clusters per class
39-
# - 5 features, among which 3 are informative (correlated with the class
40-
# labels) and two are random noise with large magnitude
42+
# - 100 points in 3 classes with 2 clusters per class
43+
# - 5 features, among which 3 are informative (correlated with the class
44+
# labels) and two are random noise with large magnitude
4145

4246
X, y = make_classification(n_samples=100, n_classes=3, n_clusters_per_class=2,
4347
n_informative=3, class_sep=4., n_features=5,
@@ -139,7 +143,7 @@ def plot_tsne(X, y, colormap=plt.cm.Paired):
139143
#
140144

141145
# setting up LMNN
142-
lmnn = metric_learn.LMNN(k=5, learn_rate=1e-6)
146+
lmnn = metric_learn.LMNN(n_neighbors=5, learn_rate=1e-6)
143147

144148
# fit the data!
145149
lmnn.fit(X, y)
@@ -310,7 +314,7 @@ def plot_tsne(X, y, colormap=plt.cm.Paired):
310314
# - See more in the documentation of the class :py:class:`RCA
311315
# <metric_learn.RCA>`
312316

313-
rca = metric_learn.RCA_Supervised(num_chunks=30, chunk_size=2)
317+
rca = metric_learn.RCA_Supervised(n_chunks=30, chunk_size=2)
314318
X_rca = rca.fit_transform(X, y)
315319

316320
plot_tsne(X_rca, y)
Binary file not shown.

0 commit comments

Comments
 (0)