Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- '''
- import numpy as np
- import seaborn as sns
- import matplotlib.pyplot as plt
- from sklearn.cluster import KMeans
- from sklearn.datasets import make_blobs
- sns.set()
- 𝑋 = 𝑛𝑝.𝑎𝑟𝑟𝑎𝑦([[5,3],[10,15],[15,12],[24,10],[30,45],[85,70],[71,80],[60,78],[55,52],[80,91]])
- print(X.shape)
- kmeans = KMeans(n_clusters=6, random_state=4)
- kmeans.fit(X)
- y_kmeans = kmeans.predict(X)
- print(y_kmeans)
- centers = kmeans.cluster_centers_
- print(len(centers))
- X,y_true = make_blobs(n_samples=6, centers=6, cluster_std=0.4, random_state=4)
- plt.scatter(X[:,0], X[:, 1], c=np.unique(y_kmeans), s=50, cmap="summer")
- plt.scatter(centers[:,0], centers[:,1], c="blue", s=100, alpha=0.9)
- plt.show()
- import numpy as np
- import seaborn as sns
- import matplotlib.pyplot as plt
- from sklearn.datasets import load_iris
- from sklearn.cluster import KMeans
- sns.set()
- iris = load_iris()
- x = iris.data
- print(x.shape)
- kmeans = KMeans(n_clusters=10, random_state=4)
- y_kmeans = kmeans.fit_predict(x)
- print(kmeans.cluster_centers_.shape)
- #Visualising the clusters
- plt.scatter(x[y_kmeans == 0, 0], x[y_kmeans == 0, 1], s = 100, c = 'purple', label = 'setosa')
- plt.scatter(x[y_kmeans == 1, 0], x[y_kmeans == 1, 1], s = 100, c = 'orange', label = 'versicolour')
- plt.scatter(x[y_kmeans == 2, 0], x[y_kmeans == 2, 1], s = 100, c = 'green', label = 'virginica')
- #Plotting the centroids of the clusters
- plt.scatter(kmeans.cluster_centers_[:, 0], kmeans.cluster_centers_[:,1], s = 100, c = 'red', label = 'centroids')
- plt.legend()
- plt.show()
- import numpy as np
- import seaborn as sns
- import matplotlib.pyplot as plt
- import scipy.cluster.hierarchy as shc
- from sklearn.datasets import load_iris
- from sklearn.cluster import AgglomerativeClustering
- sns.set()
- iris = load_iris()
- data = iris.data[:, 0:2]
- plt.figure(figsize=(10,7),dpi=180)
- plt.title("IRIS DENDROGRAM")
- dend = shc.dendrogram(shc.linkage(data,method='ward'))
- cluster = AgglomerativeClustering(n_clusters=5, affinity='euclidean', linkage="ward")
- cluster.fit_predict(data)
- plt.figure(figsize=(10,7))
- plt.scatter(data[:,0],data[:,1],c=cluster.labels_,cmap='rainbow')
- plt.show()
- '''
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement