This example showcases using K3D to visualize data. Addtional requirements: scikit-learn
try:
import sklearn
except ImportError:
%pip install scikit-learn
from sklearn.datasets import load_iris
from sklearn.manifold import TSNE
from k3d import plot, points, nice_colors, text2d
iris = load_iris()
from k3d import plot, points, nice_colors, text2d
import numpy as np
def legend(p, axes):
"""Display classes' names in their color."""
k3dplot = plot(axes=['\:'.join(a.split(' ')) for a in axes.tolist()])
k3dplot += p
for i, name in enumerate(iris.target_names):
k3dplot += text2d(text=name, color=nice_colors[i], position=(0, i / 10))
return k3dplot
def point_size(data, resolution=20.):
span = max(np.max(data, axis=0) - np.min(data, axis=0))
return span / resolution
common = dict(
point_size=point_size(iris.data),
colors=[nice_colors[i] for i in iris.target]
)
def iris_omit(column_index):
names = np.roll(iris.feature_names, column_index)
print('omitting', names[0])
for dim, name in zip('xyz', names[1:]):
print(dim, 'is', name)
return (np.roll(iris.data, column_index, axis=1)[:, 1:].astype(np.float32), names[1:])
p, axes = iris_omit(0)
legend(points(p, **common), axes)
p, axes = iris_omit(1)
legend(points(p, **common), axes)
p, axes = iris_omit(2)
legend(points(p, **common), axes)
p, axes = iris_omit(3)
legend(points(p, **common), axes)
This can look a little different every time around.
NOTE: this is unsupervised learning, tSNE doesn't get the labels. And still, it clusters.
tsne = TSNE(n_components=3, verbose=1, perplexity=40, n_iter=3000)
tsne_results = tsne.fit_transform(iris.data).astype(np.float32)
tsne_results[:5]
common['point_size'] = point_size(tsne_results)
legend(points(tsne_results, **common), axes)