本文整理汇总了Python中sklearn.manifold.locally_linear.barycenter_kneighbors_graph函数的典型用法代码示例。如果您正苦于以下问题:Python barycenter_kneighbors_graph函数的具体用法?Python barycenter_kneighbors_graph怎么用?Python barycenter_kneighbors_graph使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了barycenter_kneighbors_graph函数的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_barycenter_kneighbors_graph
def test_barycenter_kneighbors_graph():
X = np.array([[0, 1], [1.01, 1.0], [2, 0]])
A = barycenter_kneighbors_graph(X, 1)
assert_array_almost_equal(A.toarray(), [[0.0, 1.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]])
A = barycenter_kneighbors_graph(X, 2)
# check that columns sum to one
assert_array_almost_equal(np.sum(A.toarray(), 1), np.ones(3))
pred = np.dot(A.toarray(), X)
assert_less(linalg.norm(pred - X) / X.shape[0], 1)
开发者ID:93sam,项目名称:scikit-learn,代码行数:11,代码来源:test_locally_linear.py
示例2: test_barycenter_kneighbors_graph
def test_barycenter_kneighbors_graph():
X = np.array([[0, 1], [1.01, 1.], [2, 0]])
A = barycenter_kneighbors_graph(X, 1)
assert_array_almost_equal(
A.todense(),
[[0., 1., 0.],
[1., 0., 0.],
[0., 1., 0.]])
A = barycenter_kneighbors_graph(X, 2)
# check that columns sum to one
assert_array_almost_equal(np.sum(A.todense(), 1), np.ones((3, 1)))
pred = np.dot(A.todense(), X)
assert_true(np.linalg.norm(pred - X) / X.shape[0] < 1)
开发者ID:ashish-sadh,项目名称:scikit-learn,代码行数:15,代码来源:test_locally_linear.py
示例3: test_lle_simple_grid
def test_lle_simple_grid():
rng = np.random.RandomState(0)
# grid of equidistant points in 2D, out_dim = n_dim
X = np.array(list(product(range(5), repeat=2)))
out_dim = 2
clf = manifold.LocallyLinearEmbedding(n_neighbors=5, out_dim=out_dim)
tol = .1
N = barycenter_kneighbors_graph(X, clf.n_neighbors).todense()
reconstruction_error = np.linalg.norm(np.dot(N, X) - X, 'fro')
assert_lower(reconstruction_error, tol)
for solver in eigen_solvers:
clf.set_params(eigen_solver=solver)
clf.fit(X)
assert_true(clf.embedding_.shape[1] == out_dim)
reconstruction_error = np.linalg.norm(
np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
# FIXME: ARPACK fails this test ...
if solver != 'arpack':
assert_lower(reconstruction_error, tol)
assert_almost_equal(clf.reconstruction_error_,
reconstruction_error, decimal=4)
# re-embed a noisy version of X using the transform method
noise = rng.randn(*X.shape) / 100
X_reembedded = clf.transform(X + noise)
assert_lower(np.linalg.norm(X_reembedded - clf.embedding_), tol)
开发者ID:ashish-sadh,项目名称:scikit-learn,代码行数:28,代码来源:test_locally_linear.py
示例4: test_lle_manifold
def test_lle_manifold():
rng = np.random.RandomState(0)
# similar test on a slightly more complex manifold
X = np.array(list(product(range(20), repeat=2)))
X = np.c_[X, X[:, 0] ** 2 / 20]
X = X + 1e-10 * rng.uniform(size=X.shape)
n_components = 2
for method in ["standard", "hessian", "modified", "ltsa"]:
clf = manifold.LocallyLinearEmbedding(n_neighbors=6,
n_components=n_components, method=method, random_state=0)
tol = 1.5 if method == "standard" else 3
N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
reconstruction_error = np.linalg.norm(np.dot(N, X) - X)
assert_less(reconstruction_error, tol)
for solver in eigen_solvers:
clf.set_params(eigen_solver=solver)
clf.fit(X)
assert_true(clf.embedding_.shape[1] == n_components)
reconstruction_error = np.linalg.norm(
np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
details = ("solver: %s, method: %s"
% (solver, method))
assert_less(reconstruction_error, tol, msg=details)
assert_less(np.abs(clf.reconstruction_error_ -
reconstruction_error),
tol * reconstruction_error, msg=details)
开发者ID:Honglang,项目名称:scikit-learn,代码行数:28,代码来源:test_locally_linear.py
示例5: test_lle_simple_grid
def test_lle_simple_grid():
# note: ARPACK is numerically unstable, so this test will fail for
# some random seeds. We choose 2 because the tests pass.
rng = np.random.RandomState(2)
tol = 0.1
# grid of equidistant points in 2D, n_components = n_dim
X = np.array(list(product(range(5), repeat=2)))
X = X + 1e-10 * rng.uniform(size=X.shape)
n_components = 2
clf = manifold.LocallyLinearEmbedding(n_neighbors=5,
n_components=n_components, random_state=rng)
tol = 0.1
N = barycenter_kneighbors_graph(X, clf.n_neighbors).todense()
reconstruction_error = np.linalg.norm(np.dot(N, X) - X, 'fro')
assert_less(reconstruction_error, tol)
for solver in eigen_solvers:
clf.set_params(eigen_solver=solver)
clf.fit(X)
assert_true(clf.embedding_.shape[1] == n_components)
reconstruction_error = np.linalg.norm(
np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
assert_less(reconstruction_error, tol)
assert_almost_equal(clf.reconstruction_error_,
reconstruction_error, decimal=1)
# re-embed a noisy version of X using the transform method
noise = rng.randn(*X.shape) / 100
X_reembedded = clf.transform(X + noise)
assert_less(np.linalg.norm(X_reembedded - clf.embedding_), tol)
开发者ID:Honglang,项目名称:scikit-learn,代码行数:33,代码来源:test_locally_linear.py
示例6: test_lle_manifold
def test_lle_manifold():
# similar test on a slightly more complex manifold
X = np.array(list(product(range(20), repeat=2)))
X = np.c_[X, X[:, 0] ** 2 / 20]
out_dim = 2
clf = manifold.LocallyLinearEmbedding(n_neighbors=5, out_dim=out_dim)
tol = 1.5
N = barycenter_kneighbors_graph(X, clf.n_neighbors).toarray()
reconstruction_error = np.linalg.norm(np.dot(N, X) - X)
assert_lower(reconstruction_error, tol)
for solver in eigen_solvers:
clf.set_params(eigen_solver=solver)
clf.fit(X)
assert clf.embedding_.shape[1] == out_dim
reconstruction_error = np.linalg.norm(
np.dot(N, clf.embedding_) - clf.embedding_, 'fro') ** 2
details = "solver: " + solver
assert_lower(reconstruction_error, tol, details=details)
assert_lower(np.abs(clf.reconstruction_error_ - reconstruction_error),
tol * reconstruction_error, details=details)
开发者ID:Yangqing,项目名称:scikit-learn,代码行数:22,代码来源:test_locally_linear.py
示例7: locally_linear_embedding
def locally_linear_embedding(
X, n_neighbors, n_components, reg=1e-3, eigen_solver='auto', tol=1e-6,
max_iter=100, method='standard', hessian_tol=1E-4, modified_tol=1E-12,
random_state=None, n_jobs=None):
if eigen_solver not in ('auto', 'arpack', 'dense'):
raise ValueError("unrecognized eigen_solver '%s'" % eigen_solver)
if method not in ('standard', 'hessian', 'modified', 'ltsa'):
raise ValueError("unrecognized method '%s'" % method)
nbrs = NearestNeighbors(n_neighbors=n_neighbors + 1, n_jobs=n_jobs)
nbrs.fit(X)
X = nbrs._fit_X
N, d_in = X.shape
if n_components > d_in:
raise ValueError("output dimension must be less than or equal "
"to input dimension")
if n_neighbors >= N:
raise ValueError(
"Expected n_neighbors <= n_samples, "
" but n_samples = %d, n_neighbors = %d" %
(N, n_neighbors)
)
if n_neighbors <= 0:
raise ValueError("n_neighbors must be positive")
M_sparse = (eigen_solver != 'dense')
if method == 'standard':
W = barycenter_kneighbors_graph(nbrs, n_neighbors=n_neighbors, reg=reg, n_jobs=1)
if M_sparse:
M = eye(*W.shape, format=W.format) - W
M = (M.T * M).tocsr()
else:
M = (W.T * W - W.T - W).toarray()
M.flat[::M.shape[0] + 1] += 1 # W = W - I = W - I
elif method == 'hessian':
dp = n_components * (n_components + 1) // 2
if n_neighbors <= n_components + dp:
raise ValueError("for method='hessian', n_neighbors must be "
"greater than "
"[n_components * (n_components + 3) / 2]")
neighbors = nbrs.kneighbors(X, n_neighbors=n_neighbors + 1,
return_distance=False)
neighbors = neighbors[:, 1:]
Yi = np.empty((n_neighbors, 1 + n_components + dp), dtype=np.float64)
Yi[:, 0] = 1
M = np.zeros((N, N), dtype=np.float64)
use_svd = (n_neighbors > d_in)
for i in range(N):
Gi = X[neighbors[i]]
Gi -= Gi.mean(0)
# build Hessian estimator
if use_svd:
U = svd(Gi, full_matrices=0)[0]
else:
Ci = np.dot(Gi, Gi.T)
U = eigh(Ci)[1][:, ::-1]
Yi[:, 1:1 + n_components] = U[:, :n_components]
j = 1 + n_components
for k in range(n_components):
Yi[:, j:j + n_components - k] = (U[:, k:k + 1] *
U[:, k:n_components])
j += n_components - k
Q, R = qr(Yi)
w = Q[:, n_components + 1:]
S = w.sum(0)
S[np.where(abs(S) < hessian_tol)] = 1
w /= S
nbrs_x, nbrs_y = np.meshgrid(neighbors[i], neighbors[i])
M[nbrs_x, nbrs_y] += np.dot(w, w.T)
if M_sparse:
M = csr_matrix(M)
elif method == 'modified':
if n_neighbors < n_components:
raise ValueError("modified LLE requires "
"n_neighbors >= n_components")
#.........这里部分代码省略.........
开发者ID:yannistannier,项目名称:deepdr-dae-with-lle,代码行数:101,代码来源:deepR_denoising.py
示例8: ller
def ller(X, Y, n_neighbors, n_components, mu=0.5, gamma=None,
reg=1e-3,eigen_solver='auto', tol=1e-6, max_iter=100,
random_state=None):
"""
Locally Linear Embedding for Regression (LLER)
Parameters
----------
X : ndarray, 2-dimensional
The data matrix, shape (num_data_points, num_dims)
Y : ndarray, 1 or 2-dimensional
The response matrix, shape (num_response_points, num_responses).
Y[0:] is assumed to provide responses for X[:num_response_points]
n_neighbors : int
Number of neighbors for kNN graph construction.
n_components : int
Number of dimensions for embedding.
mu : float, optional
Influence of the Y-similarity penalty.
gamma : float, optional
Scaling factor for RBF kernel on Y.
Defaults to the inverse of the median distance between rows of Y.
Returns
-------
embedding : ndarray, 2-dimensional
The embedding of X, shape (num_points, n_components)
lle_error : float
The embedding error of X (for a fixed reconstruction matrix W)
ller_error : float
The embedding error of X that takes Y into account.
"""
if eigen_solver not in ('auto', 'arpack', 'dense'):
raise ValueError("unrecognized eigen_solver '%s'" % eigen_solver)
if Y.ndim == 1:
Y = Y[:, None]
if gamma is None:
dists = pairwise_distances(Y)
gamma = 1.0 / np.median(dists)
nbrs = NearestNeighbors(n_neighbors=n_neighbors + 1)
nbrs.fit(X)
X = nbrs._fit_X
Nx, d_in = X.shape
Ny = Y.shape[0]
if n_components > d_in:
raise ValueError("output dimension must be less than or equal "
"to input dimension")
if n_neighbors >= Nx:
raise ValueError("n_neighbors must be less than number of points")
if n_neighbors <= 0:
raise ValueError("n_neighbors must be positive")
if Nx < Ny:
raise ValueError("X should have at least as many points as Y")
M_sparse = (eigen_solver != 'dense')
W = barycenter_kneighbors_graph(
nbrs, n_neighbors=n_neighbors, reg=reg)
if M_sparse:
M = speye(*W.shape, format=W.format) - W
M = (M.T * M).tocsr()
else:
M = (W.T * W - W.T - W).toarray()
M.flat[::M.shape[0] + 1] += 1
P = rbf_kernel(Y, gamma=gamma)
L = laplacian(P, normed=False)
M /= np.abs(M).max() # optional scaling step
L /= np.abs(L).max()
if Nx > Ny:
# zeros = csr_matrix((Nx-Ny,Nx-Ny),dtype=M.dtype)
# L = bmat([[L, None], [None, zeros]])
ones = csr_matrix(np.ones((Nx-Ny,Nx-Ny)),dtype=M.dtype)
L = bmat([[L, None], [None, ones]])
omega = M + mu * L
embedding, lle_error = null_space(omega, n_components, k_skip=1,
eigen_solver=eigen_solver, tol=tol,
max_iter=max_iter,
random_state=random_state)
ller_error = np.trace(embedding.T.dot(L).dot(embedding))
return embedding, lle_error, ller_error
开发者ID:imgemp,项目名称:ller,代码行数:94,代码来源:ller.py
注:本文中的sklearn.manifold.locally_linear.barycenter_kneighbors_graph函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论