我是靠谱客的博主 怕孤单棒棒糖,最近开发中收集的这篇文章主要介绍LLE降维——代码实现,觉得挺不错的,现在分享给大家,希望可以做个参考。

概述

参考文章:

  1. 算法实现
#!/usr/bin/env python
# coding: utf-8
# ## LLE算法实现
# In[2]:
'''
LLE 2019.11.20
Author: luo
Reference:
Zhihua Zhou. Machine learning[M]. Tsinghua University Press, 2016
实现了LLE
总结:对比研究了LLE的在不同近邻参数下的降维效果,并与sklearn中的LLE进行了比较。
同时将降维后的数据应用于softmax训练
'''
import numpy as np
from scipy.spatial.distance import pdist, squareform
import matplotlib.pyplot as plt
'''
define get_k_maxtria function.
Input:
D:numpy.ndarry,
size: [num_sample, num_feature]
k: float,
the nearest neighbor parameter k
Return:
k_idx: numpy.ndarry,
size: [num_sample, k], the index of the k nearest neighbor
'''
def get_k_maxtria(D, k):
dist = pdist(D, 'euclidean')
# 获得距离矩阵
dist = squareform(dist)
# 转化为方阵
m = dist.shape[0]
k_idx = np.zeros([m, k])
for i in range(m):
topk = np.argsort(dist[i])[1:k + 1]
# 从1开始,是因为最小那个距离是它本身, 返回最小的k个的索引
k_idx[i] = k_idx[i] + topk
return k_idx.astype(np.int32)
'''
define get_w function.
Input:
D:numpy.ndarry,
size: [num_sample, num_feature]
kear_idx: numpy.ndarry,
size: [num_sample, k]
k: float,
the nearest neighbor parameter k
Return:
w: numpy.ndarry,
size: [num_sample, k], Linear reconstruction coefficient matrix
'''
def get_w(D, knear_idx, k):
m = D.shape[0]
w = np.zeros([m, k])
I = np.ones((k, 1))
for i in range(m):
Q_x = D[knear_idx[i]]
xi = D[i]
xi = np.tile(xi, (k,1))
C = np.dot((xi - Q_x), (xi-Q_x).T)
C = C +np.eye(k)*(1e-3)*np.trace(C)
C_inv = np.linalg.pinv(C)
w[i,:] = np.sum(C_inv, axis=0)/np.sum(C_inv)
return w
'''
define myLLE function.
Input:
D:numpy.ndarry,
size: [num_sample, num_feature], the sample set D
k: float,
default=5, the nearest neighbor parameter k
d: float,
default=2, the dimension after dimensionality reduction
Return:
Z: numpy.ndarry,
size: [num_sample, d], The projection of the sample set D in a low-dimensional space
'''
def myLLE(D, k=5, d=2):
m = D.shape[0]
knear_idx = get_k_maxtria(D, k)
w = get_w(D, knear_idx, k)
W = np.zeros([m,m])
for i in range(m):
for j in range(k):
idx = knear_idx[i, j]
W[i, idx] = w[i, j]
I = np.eye(m)
#(m, m)单位矩阵
M = np.dot((I-W).T, (I-W))
A, U = np.linalg.eig(M)
# np.linalg.eig获得的A是特征值,T是特征向量矩阵,且T的列向量是特征向量
top_A_idx = np.argsort(A)[1:d+1]
# 获得最小的d个特征值的索引
Z = U[:,top_A_idx]
# 获得最小的d个特征值对应的特征向量, (m, d)
return Z
  1. 代码测试
# ## 在S型平面的降维效果对比
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import NullFormatter
from sklearn import datasets
from sklearn.manifold import LocallyLinearEmbedding
# Next line to silence pyflakes. This import is needed.
Axes3D
n_points = 500
X, color = datasets.samples_generator.make_s_curve(n_points, random_state=0)
#X, color = datasets.make_swiss_roll(n_points, noise=0.01)
#	生成瑞士卷数据集
n_neighbors = 34
n_components = 2
fig = plt.figure(figsize=(15, 8))
ax = fig.add_subplot(251, projection='3d')
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=color, cmap=plt.cm.Spectral)
ax.view_init(4, -72)
plt.figure()
total_row = 3
now_pos = 1
n_neighbors =5
plt.subplot(total_row,2, now_pos)
Y = myLLE(X, n_neighbors, 2)
# ax = fig.add_subplot(252+2)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.axis('tight')
plt.title("myLLE with %i neighbors"% (n_neighbors))
plt.subplot(total_row,2,now_pos+1)
Y = LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2).fit_transform(X)
# ax = fig.add_subplot(252+3)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.axis('tight')
plt.title("sklearn LLE with %i neighbors"% (n_neighbors))
n_neighbors = 75
plt.subplot(total_row,2, now_pos+2)
Y = myLLE(X, n_neighbors, 2)
# ax = fig.add_subplot(252+2)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.axis('tight')
plt.xlabel("myLLE with %i neighbors"% (n_neighbors))
plt.subplot(total_row,2,now_pos+3)
Y = LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2).fit_transform(X)
# ax = fig.add_subplot(252+3)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.axis('tight')
plt.xlabel("sklearn LLE with %i neighbors"% (n_neighbors))
n_neighbors = 350
plt.subplot(total_row,2,now_pos+4)
Y = myLLE(X, n_neighbors, 2)
# ax = fig.add_subplot(252+2)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.axis('tight')
plt.xlabel("myLLE with %i neighbors"% (n_neighbors))
plt.subplot(total_row,2,now_pos+5)
Y = LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2).fit_transform(X)
# ax = fig.add_subplot(252+3)
plt.scatter(Y[:, 0], Y[:, 1], c=color, cmap=plt.cm.Spectral)
plt.axis('tight')
plt.xlabel("sklearn LLE with %i neighbors"% (n_neighbors))
  1. 测试结果
    以后再补…代码还要改一下…

最后

以上就是怕孤单棒棒糖为你收集整理的LLE降维——代码实现的全部内容,希望文章能够帮你解决LLE降维——代码实现所遇到的程序开发问题。

如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。

本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
点赞(57)

评论列表共有 0 条评论

立即
投稿
返回
顶部