本文整理汇总了Python中sklearn.neighbors.kd_tree.KDTree类的典型用法代码示例。如果您正苦于以下问题:Python KDTree类的具体用法?Python KDTree怎么用?Python KDTree使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了KDTree类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: check_neighbors
def check_neighbors(dualtree, breadth_first, k, metric, kwargs):
kdt = KDTree(X, leaf_size=1, metric=metric, **kwargs)
dist1, ind1 = kdt.query(Y, k, dualtree=dualtree, breadth_first=breadth_first)
dist2, ind2 = brute_force_neighbors(X, Y, k, metric, **kwargs)
# don't check indices here: if there are any duplicate distances,
# the indices may not match. Distances should not have this problem.
assert_allclose(dist1, dist2)
开发者ID:kinnskogr,项目名称:scikit-learn,代码行数:8,代码来源:test_kd_tree.py
示例2: test_kd_tree_two_point
def test_kd_tree_two_point(dualtree):
n_samples, n_features = (100, 3)
rng = check_random_state(0)
X = rng.random_sample((n_samples, n_features))
Y = rng.random_sample((n_samples, n_features))
r = np.linspace(0, 1, 10)
kdt = KDTree(X, leaf_size=10)
D = DistanceMetric.get_metric("euclidean").pairwise(Y, X)
counts_true = [(D <= ri).sum() for ri in r]
counts = kdt.two_point_correlation(Y, r=r, dualtree=dualtree)
assert_array_almost_equal(counts, counts_true)
开发者ID:MartinThoma,项目名称:scikit-learn,代码行数:13,代码来源:test_kd_tree.py
示例3: test_gaussian_kde
def test_gaussian_kde(n_samples=1000):
# Compare gaussian KDE results to scipy.stats.gaussian_kde
from scipy.stats import gaussian_kde
rng = check_random_state(0)
x_in = rng.normal(0, 1, n_samples)
x_out = np.linspace(-5, 5, 30)
for h in [0.01, 0.1, 1]:
kdt = KDTree(x_in[:, None])
gkde = gaussian_kde(x_in, bw_method=h / np.std(x_in))
dens_kdt = kdt.kernel_density(x_out[:, None], h) / n_samples
dens_gkde = gkde.evaluate(x_out)
assert_array_almost_equal(dens_kdt, dens_gkde, decimal=3)
开发者ID:MartinThoma,项目名称:scikit-learn,代码行数:15,代码来源:test_kd_tree.py
示例4: test_kd_tree_pickle
def test_kd_tree_pickle(protocol):
import pickle
rng = check_random_state(0)
X = rng.random_sample((10, 3))
kdt1 = KDTree(X, leaf_size=1)
ind1, dist1 = kdt1.query(X)
def check_pickle_protocol(protocol):
s = pickle.dumps(kdt1, protocol=protocol)
kdt2 = pickle.loads(s)
ind2, dist2 = kdt2.query(X)
assert_array_almost_equal(ind1, ind2)
assert_array_almost_equal(dist1, dist2)
check_pickle_protocol(protocol)
开发者ID:AlexisMignon,项目名称:scikit-learn,代码行数:15,代码来源:test_kd_tree.py
示例5: test_kd_tree_pickle
def test_kd_tree_pickle():
import pickle
np.random.seed(0)
X = np.random.random((10, 3))
kdt1 = KDTree(X, leaf_size=1)
ind1, dist1 = kdt1.query(X)
def check_pickle_protocol(protocol):
s = pickle.dumps(kdt1, protocol=protocol)
kdt2 = pickle.loads(s)
ind2, dist2 = kdt2.query(X)
assert_array_almost_equal(ind1, ind2)
assert_array_almost_equal(dist1, dist2)
for protocol in (0, 1, 2):
yield check_pickle_protocol, protocol
开发者ID:0x0all,项目名称:scikit-learn,代码行数:16,代码来源:test_kd_tree.py
示例6: test_kd_tree_query_radius
def test_kd_tree_query_radius(n_samples=100, n_features=10):
rng = check_random_state(0)
X = 2 * rng.random_sample(size=(n_samples, n_features)) - 1
query_pt = np.zeros(n_features, dtype=float)
eps = 1E-15 # roundoff error can cause test to fail
kdt = KDTree(X, leaf_size=5)
rad = np.sqrt(((X - query_pt) ** 2).sum(1))
for r in np.linspace(rad[0], rad[-1], 100):
ind = kdt.query_radius([query_pt], r + eps)[0]
i = np.where(rad <= r + eps)[0]
ind.sort()
i.sort()
assert_array_almost_equal(i, ind)
开发者ID:MartinThoma,项目名称:scikit-learn,代码行数:17,代码来源:test_kd_tree.py
示例7: test_kd_tree_query_radius
def test_kd_tree_query_radius(n_samples=100, n_features=10):
np.random.seed(0)
X = 2 * np.random.random(size=(n_samples, n_features)) - 1
query_pt = np.zeros(n_features, dtype=float)
eps = 1E-15 # roundoff error can cause test to fail
kdt = KDTree(X, leaf_size=5)
rad = np.sqrt(((X - query_pt) ** 2).sum(1))
for r in np.linspace(rad[0], rad[-1], 100):
ind = kdt.query_radius(query_pt, r + eps)[0]
i = np.where(rad <= r + eps)[0]
ind.sort()
i.sort()
assert_allclose(i, ind)
开发者ID:Hydroinformatics-UNESCO-IHE,项目名称:scikit-learn,代码行数:17,代码来源:test_kd_tree.py
示例8: test_kd_tree_query_radius_distance
def test_kd_tree_query_radius_distance(n_samples=100, n_features=10):
rng = check_random_state(0)
X = 2 * rng.random_sample(size=(n_samples, n_features)) - 1
query_pt = np.zeros(n_features, dtype=float)
eps = 1E-15 # roundoff error can cause test to fail
kdt = KDTree(X, leaf_size=5)
rad = np.sqrt(((X - query_pt) ** 2).sum(1))
for r in np.linspace(rad[0], rad[-1], 100):
ind, dist = kdt.query_radius([query_pt], r + eps, return_distance=True)
ind = ind[0]
dist = dist[0]
d = np.sqrt(((query_pt - X[ind]) ** 2).sum(1))
assert_array_almost_equal(d, dist)
开发者ID:MartinThoma,项目名称:scikit-learn,代码行数:18,代码来源:test_kd_tree.py
示例9: test_gaussian_kde
def test_gaussian_kde(n_samples=1000):
"""Compare gaussian KDE results to scipy.stats.gaussian_kde"""
from scipy.stats import gaussian_kde
np.random.seed(0)
x_in = np.random.normal(0, 1, n_samples)
x_out = np.linspace(-5, 5, 30)
for h in [0.01, 0.1, 1]:
kdt = KDTree(x_in[:, None])
try:
gkde = gaussian_kde(x_in, bw_method=h / np.std(x_in))
except TypeError:
raise SkipTest("Old scipy, does not accept explicit bandwidth.")
dens_kdt = kdt.kernel_density(x_out[:, None], h) / n_samples
dens_gkde = gkde.evaluate(x_out)
assert_array_almost_equal(dens_kdt, dens_gkde, decimal=3)
开发者ID:0x0all,项目名称:scikit-learn,代码行数:18,代码来源:test_kd_tree.py
示例10: test_kd_tree_query_radius_distance
def test_kd_tree_query_radius_distance(n_samples=100, n_features=10):
np.random.seed(0)
X = 2 * np.random.random(size=(n_samples, n_features)) - 1
query_pt = np.zeros(n_features, dtype=float)
eps = 1e-15 # roundoff error can cause test to fail
kdt = KDTree(X, leaf_size=5)
rad = np.sqrt(((X - query_pt) ** 2).sum(1))
for r in np.linspace(rad[0], rad[-1], 100):
ind, dist = kdt.query_radius(query_pt, r + eps, return_distance=True)
ind = ind[0]
dist = dist[0]
d = np.sqrt(((query_pt - X[ind]) ** 2).sum(1))
assert_allclose(d, dist)
开发者ID:kinnskogr,项目名称:scikit-learn,代码行数:18,代码来源:test_kd_tree.py
示例11: test_gaussian_kde
def test_gaussian_kde(n_samples=1000):
"""Compare gaussian KDE results to scipy.stats.gaussian_kde"""
from scipy.stats import gaussian_kde
np.random.seed(0)
x_in = np.random.normal(0, 1, n_samples)
x_out = np.linspace(-5, 5, 30)
for h in [0.01, 0.1, 1]:
kdt = KDTree(x_in[:, None])
try:
gkde = gaussian_kde(x_in, bw_method=h / np.std(x_in))
except TypeError:
# older versions of scipy don't accept explicit bandwidth
raise SkipTest
dens_kdt = kdt.kernel_density(x_out[:, None], h) / n_samples
dens_gkde = gkde.evaluate(x_out)
assert_allclose(dens_kdt, dens_gkde, rtol=1E-3, atol=1E-3)
开发者ID:Hydroinformatics-UNESCO-IHE,项目名称:scikit-learn,代码行数:19,代码来源:test_kd_tree.py
示例12: range
#from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestNeighbors
from sklearn.neighbors.kd_tree import KDTree
#from sklearn.neighbors import DistanceMetric
import numpy as np
import get_data2 as gd
headers = gd.get_headers()
dicts = gd.get_data_list_of_dicts()
rows_lol = []
for i in range(len(gd.get_data_slice(headers[0], dicts))):
rows_lol.append([])
for i in range(len(headers)):
if i ==1 or i==4:
column = gd.get_data_slice_numbers(headers[i], dicts)
else:
column = gd.get_data_slice_numbers(headers[i], dicts)
for j in range(len(gd.get_data_slice(headers[0], dicts))):
rows_lol[j].append(column[j])
X = np.array(rows_lol)
#nbrs = NearestNeighbors(n_neighbors=5, algorithm ='kd_tree', metric ='jaccard').fit(X)
kdt = KDTree(X, leaf_size=30, metric='euclidean')
kdt.query(X, k=3, return_distance=False)
开发者ID:airblair94,项目名称:data_ex,代码行数:26,代码来源:manipulations2.py
注:本文中的sklearn.neighbors.kd_tree.KDTree类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论