In [ ]:
from sklearn.preprocessing import normalize, robust_scale, minmax_scale
from planet4.dbscan import DBScanner
from planet4 import io
from sklearn.metrics import pairwise

In [ ]:
db = io.DBManager()

In [ ]:
obsids = db.image_ids

In [ ]:
data = db.get_image_name_markings(obsids[0])

In [ ]:
%matplotlib inline

In [ ]:
data.y.plot.kde()

In [ ]:
xycoords = {'x':[11, 12, 30],
            'y':[111, 112, 140],
            'z':[330, 333, 336],
           }
df = pd.DataFrame(xycoords)
df

In [ ]:
minmax_scale?

In [ ]:
euclidean_distances(df)

In [ ]:
euclidean_distances(normalize(df, axis=0))

In [ ]:
euclidean_distances(normalize(df, axis=1))

In [ ]:
normalize?

In [ ]:
dbscanner = DBScanner(df.values, eps=10, min_samples=2)

In [ ]:
dbscanner.core_samples

In [ ]:
%matplotlib inline

In [ ]:
from sklearn.cluster import DBSCAN
from sklearn import metrics
from sklearn.datasets.samples_generator import make_blobs
from sklearn.preprocessing import StandardScaler, scale


##############################################################################
# Generate sample data
centers = [[1, 1], [-1, -1], [1, -1]]
X, labels_true = make_blobs(n_samples=90, centers=centers, cluster_std=0.4,
                            random_state=0)

##############################################################################

In [ ]:
angles, _ = make_blobs(n_features=1, n_samples=90, centers=[[0.2], [0.7]], cluster_std=0.05,
                       random_state=0)

In [ ]:
X = np.concatenate([X, angles], axis=1)

In [ ]:
X = StandardScaler().fit_transform(X)

In [ ]:
plt.scatter(X[:,0], X[:, 2])

In [ ]:
plt.scatter(X[:,0], X[:, 2])

In [ ]:
X.shape

In [ ]:
# Compute DBSCAN
db = DBSCAN(eps=0.3, min_samples=10).fit(X)
core_samples_mask = np.zeros_like(db.labels_, dtype=bool)
core_samples_mask[db.core_sample_indices_] = True
labels = db.labels_

# Number of clusters in labels, ignoring noise if present.
n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)

print('Estimated number of clusters: %d' % n_clusters_)
# print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels_true, labels))
# print("Completeness: %0.3f" % metrics.completeness_score(labels_true, labels))
# print("V-measure: %0.3f" % metrics.v_measure_score(labels_true, labels))
# print("Adjusted Rand Index: %0.3f"
#       % metrics.adjusted_rand_score(labels_true, labels))
# print("Adjusted Mutual Information: %0.3f"
#       % metrics.adjusted_mutual_info_score(labels_true, labels))
print("Silhouette Coefficient: %0.3f"
      % metrics.silhouette_score(X, labels))

##############################################################################

In [ ]:
# Plot result
import matplotlib.pyplot as plt

# Black removed and is used for noise instead.
unique_labels = set(labels)
colors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels)))
for k, col in zip(unique_labels, colors):
    if k == -1:
        # Black used for noise.
        col = 'k'

    class_member_mask = (labels == k)

    xy = X[class_member_mask & core_samples_mask]
    plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col,
             markeredgecolor='k', markersize=14)

    xy = X[class_member_mask & ~core_samples_mask]
    plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col,
             markeredgecolor='k', markersize=6)

plt.title('Estimated number of clusters: %d' % n_clusters_)

In [ ]:
xy.shape

In [ ]:
x = xy[:, 0]
y = xy[:, 1]
z = xy[:, 2]

In [ ]:
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(111, projection='3d')
unique_labels = set(labels)
colors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels)))
for k, col in zip(unique_labels, colors):
    if k == -1:
        # Black used for noise.
        col = 'k'

    class_member_mask = (labels == k)

    xy = X[class_member_mask & core_samples_mask]
    ax.scatter(xy[:, 0], xy[:, 1], xy[:, 2], marker='o', c=col,
               s=14)

    xy = X[class_member_mask & ~core_samples_mask]
    ax.scatter(xy[:, 0], xy[:, 1], xy[:, 2], marker='o', c=col,
               s=6)

In [ ]:
import hdbscan

In [ ]:
clusterer = hdbscan.HDBSCAN()

In [ ]:
X.shape

In [ ]:
db = clusterer.fit(X)
core_samples_mask = np.zeros_like(db.labels_, dtype=bool)
core_samples_mask[db.probabilities_ > 0.8] = True
labels = db.labels_

# Number of clusters in labels, ignoring noise if present.
n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)

print('Estimated number of clusters: %d' % n_clusters_)
# print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels_true, labels))
# print("Completeness: %0.3f" % metrics.completeness_score(labels_true, labels))
# print("V-measure: %0.3f" % metrics.v_measure_score(labels_true, labels))
# print("Adjusted Rand Index: %0.3f"
#       % metrics.adjusted_rand_score(labels_true, labels))
# print("Adjusted Mutual Information: %0.3f"
#       % metrics.adjusted_mutual_info_score(labels_true, labels))
print("Silhouette Coefficient: %0.3f"
      % metrics.silhouette_score(X, labels))

In [ ]:
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(111, projection='3d')
unique_labels = set(labels)
colors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels)))
for k, col in zip(unique_labels, colors):
    if k == -1:
        # Black used for noise.
        col = 'k'

    class_member_mask = (labels == k)

    xy = X[class_member_mask & core_samples_mask]
    ax.scatter(xy[:, 0], xy[:, 1], xy[:, 2], marker='o', c=col,
               s=14)

    xy = X[class_member_mask & ~core_samples_mask]
    ax.scatter(xy[:, 0], xy[:, 1], xy[:, 2], marker='o', c=col,
               s=6)

In [ ]: