本文整理汇总了Python中sklearn.mixture.gaussian_mixture.GaussianMixture类的典型用法代码示例。如果您正苦于以下问题:Python GaussianMixture类的具体用法?Python GaussianMixture怎么用?Python GaussianMixture使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了GaussianMixture类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_monotonic_likelihood
def test_monotonic_likelihood():
# We check that each step of the EM without regularization improve
# monotonically the training set likelihood
rng = np.random.RandomState(0)
rand_data = RandomData(rng, scale=7)
n_components = rand_data.n_components
for covar_type in COVARIANCE_TYPE:
X = rand_data.X[covar_type]
gmm = GaussianMixture(n_components=n_components,
covariance_type=covar_type, reg_covar=0,
warm_start=True, max_iter=1, random_state=rng,
tol=1e-7)
current_log_likelihood = -np.infty
with warnings.catch_warnings():
warnings.simplefilter("ignore", ConvergenceWarning)
# Do one training iteration at a time so we can make sure that the
# training log likelihood increases after each iteration.
for _ in range(600):
prev_log_likelihood = current_log_likelihood
try:
current_log_likelihood = gmm.fit(X).score(X)
except ConvergenceWarning:
pass
assert_greater_equal(current_log_likelihood,
prev_log_likelihood)
if gmm.converged_:
break
assert gmm.converged_
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:31,代码来源:test_gaussian_mixture.py
示例2: test_gaussian_mixture_fit_predict_n_init
def test_gaussian_mixture_fit_predict_n_init():
# Check that fit_predict is equivalent to fit.predict, when n_init > 1
X = np.random.RandomState(0).randn(1000, 5)
gm = GaussianMixture(n_components=5, n_init=5, random_state=0)
y_pred1 = gm.fit_predict(X)
y_pred2 = gm.predict(X)
assert_array_equal(y_pred1, y_pred2)
开发者ID:allefpablo,项目名称:scikit-learn,代码行数:7,代码来源:test_gaussian_mixture.py
示例3: test_gaussian_mixture_n_parameters
def test_gaussian_mixture_n_parameters():
# Test that the right number of parameters is estimated
rng = np.random.RandomState(0)
n_samples, n_features, n_components = 50, 5, 2
X = rng.randn(n_samples, n_features)
n_params = {'spherical': 13, 'diag': 21, 'tied': 26, 'full': 41}
for cv_type in COVARIANCE_TYPE:
g = GaussianMixture(
n_components=n_components, covariance_type=cv_type,
random_state=rng).fit(X)
assert_equal(g._n_parameters(), n_params[cv_type])
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:11,代码来源:test_gaussian_mixture.py
示例4: test_score
def test_score():
covar_type = 'full'
rng = np.random.RandomState(0)
rand_data = RandomData(rng, scale=7)
n_components = rand_data.n_components
X = rand_data.X[covar_type]
# Check the error message if we don't call fit
gmm1 = GaussianMixture(n_components=n_components, n_init=1,
max_iter=1, reg_covar=0, random_state=rng,
covariance_type=covar_type)
assert_raise_message(NotFittedError,
"This GaussianMixture instance is not fitted "
"yet. Call 'fit' with appropriate arguments "
"before using this method.", gmm1.score, X)
# Check score value
with warnings.catch_warnings():
warnings.simplefilter("ignore", ConvergenceWarning)
gmm1.fit(X)
gmm_score = gmm1.score(X)
gmm_score_proba = gmm1.score_samples(X).mean()
assert_almost_equal(gmm_score, gmm_score_proba)
# Check if the score increase
gmm2 = GaussianMixture(n_components=n_components, n_init=1, reg_covar=0,
random_state=rng,
covariance_type=covar_type).fit(X)
assert_greater(gmm2.score(X), gmm1.score(X))
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:29,代码来源:test_gaussian_mixture.py
示例5: test_sample
def test_sample():
rng = np.random.RandomState(0)
rand_data = RandomData(rng, scale=7, n_components=3)
n_features, n_components = rand_data.n_features, rand_data.n_components
for covar_type in COVARIANCE_TYPE:
X = rand_data.X[covar_type]
gmm = GaussianMixture(n_components=n_components,
covariance_type=covar_type, random_state=rng)
# To sample we need that GaussianMixture is fitted
assert_raise_message(NotFittedError, "This GaussianMixture instance "
"is not fitted", gmm.sample, 0)
gmm.fit(X)
assert_raise_message(ValueError, "Invalid value for 'n_samples",
gmm.sample, 0)
# Just to make sure the class samples correctly
n_samples = 20000
X_s, y_s = gmm.sample(n_samples)
for k in range(n_components):
if covar_type == 'full':
assert_array_almost_equal(gmm.covariances_[k],
np.cov(X_s[y_s == k].T), decimal=1)
elif covar_type == 'tied':
assert_array_almost_equal(gmm.covariances_,
np.cov(X_s[y_s == k].T), decimal=1)
elif covar_type == 'diag':
assert_array_almost_equal(gmm.covariances_[k],
np.diag(np.cov(X_s[y_s == k].T)),
decimal=1)
else:
assert_array_almost_equal(
gmm.covariances_[k], np.var(X_s[y_s == k] - gmm.means_[k]),
decimal=1)
means_s = np.array([np.mean(X_s[y_s == k], 0)
for k in range(n_components)])
assert_array_almost_equal(gmm.means_, means_s, decimal=1)
# Check shapes of sampled data, see
# https://github.com/scikit-learn/scikit-learn/issues/7701
assert_equal(X_s.shape, (n_samples, n_features))
for sample_size in range(1, 100):
X_s, _ = gmm.sample(sample_size)
assert_equal(X_s.shape, (sample_size, n_features))
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:49,代码来源:test_gaussian_mixture.py
示例6: test_convergence_detected_with_warm_start
def test_convergence_detected_with_warm_start():
# We check that convergence is detected when warm_start=True
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
n_components = rand_data.n_components
X = rand_data.X['full']
for max_iter in (1, 2, 50):
gmm = GaussianMixture(n_components=n_components, warm_start=True,
max_iter=max_iter, random_state=rng)
for _ in range(100):
gmm.fit(X)
if gmm.converged_:
break
assert gmm.converged_
assert max_iter >= gmm.n_iter_
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:16,代码来源:test_gaussian_mixture.py
示例7: test_score_samples
def test_score_samples():
covar_type = 'full'
rng = np.random.RandomState(0)
rand_data = RandomData(rng, scale=7)
n_components = rand_data.n_components
X = rand_data.X[covar_type]
# Check the error message if we don't call fit
gmm = GaussianMixture(n_components=n_components, n_init=1, reg_covar=0,
random_state=rng, covariance_type=covar_type)
assert_raise_message(NotFittedError,
"This GaussianMixture instance is not fitted "
"yet. Call 'fit' with appropriate arguments "
"before using this method.", gmm.score_samples, X)
gmm_score_samples = gmm.fit(X).score_samples(X)
assert_equal(gmm_score_samples.shape[0], rand_data.n_samples)
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:17,代码来源:test_gaussian_mixture.py
示例8: test_gaussian_mixture_fit
def test_gaussian_mixture_fit():
# recover the ground truth
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
n_features = rand_data.n_features
n_components = rand_data.n_components
for covar_type in COVARIANCE_TYPE:
X = rand_data.X[covar_type]
g = GaussianMixture(n_components=n_components, n_init=20,
reg_covar=0, random_state=rng,
covariance_type=covar_type)
g.fit(X)
# needs more data to pass the test with rtol=1e-7
assert_allclose(np.sort(g.weights_), np.sort(rand_data.weights),
rtol=0.1, atol=1e-2)
arg_idx1 = g.means_[:, 0].argsort()
arg_idx2 = rand_data.means[:, 0].argsort()
assert_allclose(g.means_[arg_idx1], rand_data.means[arg_idx2],
rtol=0.1, atol=1e-2)
if covar_type == 'full':
prec_pred = g.precisions_
prec_test = rand_data.precisions['full']
elif covar_type == 'tied':
prec_pred = np.array([g.precisions_] * n_components)
prec_test = np.array([rand_data.precisions['tied']] * n_components)
elif covar_type == 'spherical':
prec_pred = np.array([np.eye(n_features) * c
for c in g.precisions_])
prec_test = np.array([np.eye(n_features) * c for c in
rand_data.precisions['spherical']])
elif covar_type == 'diag':
prec_pred = np.array([np.diag(d) for d in g.precisions_])
prec_test = np.array([np.diag(d) for d in
rand_data.precisions['diag']])
arg_idx1 = np.trace(prec_pred, axis1=1, axis2=2).argsort()
arg_idx2 = np.trace(prec_test, axis1=1, axis2=2).argsort()
for k, h in zip(arg_idx1, arg_idx2):
ecov = EmpiricalCovariance()
ecov.covariance_ = prec_test[h]
# the accuracy depends on the number of data and randomness, rng
assert_allclose(ecov.error_norm(prec_pred[k]), 0, atol=0.1)
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:46,代码来源:test_gaussian_mixture.py
示例9: test_gaussian_mixture_fit_predict
def test_gaussian_mixture_fit_predict():
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
for covar_type in COVARIANCE_TYPE:
X = rand_data.X[covar_type]
Y = rand_data.Y
g = GaussianMixture(n_components=rand_data.n_components,
random_state=rng, weights_init=rand_data.weights,
means_init=rand_data.means,
precisions_init=rand_data.precisions[covar_type],
covariance_type=covar_type)
# check if fit_predict(X) is equivalent to fit(X).predict(X)
f = copy.deepcopy(g)
Y_pred1 = f.fit(X).predict(X)
Y_pred2 = g.fit_predict(X)
assert_array_equal(Y_pred1, Y_pred2)
assert_greater(adjusted_rand_score(Y, Y_pred2), .95)
开发者ID:SuryodayBasak,项目名称:scikit-learn,代码行数:18,代码来源:test_gaussian_mixture.py
示例10: test_property
def test_property():
rng = np.random.RandomState(0)
rand_data = RandomData(rng, scale=7)
n_components = rand_data.n_components
for covar_type in COVARIANCE_TYPE:
X = rand_data.X[covar_type]
gmm = GaussianMixture(n_components=n_components,
covariance_type=covar_type, random_state=rng,
n_init=5)
gmm.fit(X)
if covar_type == 'full':
for prec, covar in zip(gmm.precisions_, gmm.covariances_):
assert_array_almost_equal(linalg.inv(prec), covar)
elif covar_type == 'tied':
assert_array_almost_equal(linalg.inv(gmm.precisions_),
gmm.covariances_)
else:
assert_array_almost_equal(gmm.precisions_, 1. / gmm.covariances_)
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:20,代码来源:test_gaussian_mixture.py
示例11: test_check_means
def test_check_means():
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
n_components, n_features = rand_data.n_components, rand_data.n_features
X = rand_data.X['full']
g = GaussianMixture(n_components=n_components)
# Check means bad shape
means_bad_shape = rng.rand(n_components + 1, n_features)
g.means_init = means_bad_shape
assert_raise_message(ValueError,
"The parameter 'means' should have the shape of ",
g.fit, X)
# Check good means matrix
means = rand_data.means
g.means_init = means
g.fit(X)
assert_array_equal(means, g.means_init)
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:21,代码来源:test_gaussian_mixture.py
示例12: test_regularisation
def test_regularisation():
# We train the GaussianMixture on degenerate data by defining two clusters
# of a 0 covariance.
rng = np.random.RandomState(0)
n_samples, n_features = 10, 5
X = np.vstack((np.ones((n_samples // 2, n_features)),
np.zeros((n_samples // 2, n_features))))
for covar_type in COVARIANCE_TYPE:
gmm = GaussianMixture(n_components=n_samples, reg_covar=0,
covariance_type=covar_type, random_state=rng)
with warnings.catch_warnings():
warnings.simplefilter("ignore", RuntimeWarning)
assert_raise_message(ValueError,
"The algorithm has diverged because of too "
"few samples per components. "
"Try to decrease the number of components, "
"or increase reg_covar.", gmm.fit, X)
gmm.set_params(reg_covar=1e-6).fit(X)
开发者ID:LANRRI,项目名称:scikit-learn,代码行数:22,代码来源:test_gaussian_mixture.py
示例13: test_sample
def test_sample():
rng = np.random.RandomState(0)
rand_data = RandomData(rng, scale=7)
n_features, n_components = rand_data.n_features, rand_data.n_components
for covar_type in COVARIANCE_TYPE:
X = rand_data.X[covar_type]
gmm = GaussianMixture(n_components=n_components,
covariance_type=covar_type, random_state=rng)
# To sample we need that GaussianMixture is fitted
assert_raise_message(NotFittedError, "This GaussianMixture instance "
"is not fitted", gmm.sample, 0)
gmm.fit(X)
assert_raise_message(ValueError, "Invalid value for 'n_samples",
gmm.sample, 0)
# Just to make sure the class samples correctly
X_s, y_s = gmm.sample(20000)
for k in range(n_features):
if covar_type == 'full':
assert_array_almost_equal(gmm.covariances_[k],
np.cov(X_s[y_s == k].T), decimal=1)
elif covar_type == 'tied':
assert_array_almost_equal(gmm.covariances_,
np.cov(X_s[y_s == k].T), decimal=1)
elif covar_type == 'diag':
assert_array_almost_equal(gmm.covariances_[k],
np.diag(np.cov(X_s[y_s == k].T)),
decimal=1)
else:
assert_array_almost_equal(
gmm.covariances_[k], np.var(X_s[y_s == k] - gmm.means_[k]),
decimal=1)
means_s = np.array([np.mean(X_s[y_s == k], 0)
for k in range(n_features)])
assert_array_almost_equal(gmm.means_, means_s, decimal=1)
开发者ID:d-behi,项目名称:scikit-learn,代码行数:39,代码来源:test_gaussian_mixture.py
示例14: test_gaussian_mixture_predict_predict_proba
def test_gaussian_mixture_predict_predict_proba():
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
for covar_type in COVARIANCE_TYPE:
X = rand_data.X[covar_type]
Y = rand_data.Y
g = GaussianMixture(n_components=rand_data.n_components,
random_state=rng, weights_init=rand_data.weights,
means_init=rand_data.means,
precisions_init=rand_data.precisions[covar_type],
covariance_type=covar_type)
# Check a warning message arrive if we don't do fit
assert_raise_message(NotFittedError,
"This GaussianMixture instance is not fitted "
"yet. Call 'fit' with appropriate arguments "
"before using this method.", g.predict, X)
g.fit(X)
Y_pred = g.predict(X)
Y_pred_proba = g.predict_proba(X).argmax(axis=1)
assert_array_equal(Y_pred, Y_pred_proba)
assert_greater(adjusted_rand_score(Y, Y_pred), .95)
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:23,代码来源:test_gaussian_mixture.py
示例15: test_gaussian_mixture_estimate_log_prob_resp
def test_gaussian_mixture_estimate_log_prob_resp():
# test whether responsibilities are normalized
rng = np.random.RandomState(0)
rand_data = RandomData(rng, scale=5)
n_samples = rand_data.n_samples
n_features = rand_data.n_features
n_components = rand_data.n_components
X = rng.rand(n_samples, n_features)
for covar_type in COVARIANCE_TYPE:
weights = rand_data.weights
means = rand_data.means
precisions = rand_data.precisions[covar_type]
g = GaussianMixture(n_components=n_components, random_state=rng,
weights_init=weights, means_init=means,
precisions_init=precisions,
covariance_type=covar_type)
g.fit(X)
resp = g.predict_proba(X)
assert_array_almost_equal(resp.sum(axis=1), np.ones(n_samples))
assert_array_equal(g.weights_init, weights)
assert_array_equal(g.means_init, means)
assert_array_equal(g.precisions_init, precisions)
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:23,代码来源:test_gaussian_mixture.py
示例16: test_regularisation
def test_regularisation():
# We train the GaussianMixture on degenerate data by defining two clusters
# of a 0 covariance.
rng = np.random.RandomState(0)
n_samples, n_features = 10, 5
X = np.vstack((np.ones((n_samples // 2, n_features)),
np.zeros((n_samples // 2, n_features))))
for covar_type in COVARIANCE_TYPE:
gmm = GaussianMixture(n_components=n_samples, reg_covar=0,
covariance_type=covar_type, random_state=rng)
with warnings.catch_warnings():
warnings.simplefilter("ignore", RuntimeWarning)
assert_raise_message(ValueError,
"Fitting the mixture model failed because "
"some components have ill-defined empirical "
"covariance (for instance caused by "
"singleton or collapsed samples). Try to "
"decrease the number of components, or "
"increase reg_covar.", gmm.fit, X)
gmm.set_params(reg_covar=1e-6).fit(X)
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:24,代码来源:test_gaussian_mixture.py
示例17: test_check_precisions
def test_check_precisions():
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
n_components, n_features = rand_data.n_components, rand_data.n_features
# Define the bad precisions for each covariance_type
precisions_bad_shape = {
'full': np.ones((n_components + 1, n_features, n_features)),
'tied': np.ones((n_features + 1, n_features + 1)),
'diag': np.ones((n_components + 1, n_features)),
'spherical': np.ones((n_components + 1))}
# Define not positive-definite precisions
precisions_not_pos = np.ones((n_components, n_features, n_features))
precisions_not_pos[0] = np.eye(n_features)
precisions_not_pos[0, 0, 0] = -1.
precisions_not_positive = {
'full': precisions_not_pos,
'tied': precisions_not_pos[0],
'diag': np.full((n_components, n_features), -1.),
'spherical': np.full(n_components, -1.)}
not_positive_errors = {
'full': 'symmetric, positive-definite',
'tied': 'symmetric, positive-definite',
'diag': 'positive',
'spherical': 'positive'}
for covar_type in COVARIANCE_TYPE:
X = RandomData(rng).X[covar_type]
g = GaussianMixture(n_components=n_components,
covariance_type=covar_type,
random_state=rng)
# Check precisions with bad shapes
g.precisions_init = precisions_bad_shape[covar_type]
assert_raise_message(ValueError,
"The parameter '%s precision' should have "
"the shape of" % covar_type,
g.fit, X)
# Check not positive precisions
g.precisions_init = precisions_not_positive[covar_type]
assert_raise_message(ValueError,
"'%s precision' should be %s"
% (covar_type, not_positive_errors[covar_type]),
g.fit, X)
# Check the correct init of precisions_init
g.precisions_init = rand_data.precisions[covar_type]
g.fit(X)
assert_array_equal(rand_data.precisions[covar_type], g.precisions_init)
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:54,代码来源:test_gaussian_mixture.py
示例18: test_check_covariances
def test_check_covariances():
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
n_components, n_features = rand_data.n_components, rand_data.n_features
# Define the bad covariances for each covariance_type
covariances_bad_shape = {
'full': rng.rand(n_components + 1, n_features, n_features),
'tied': rng.rand(n_features + 1, n_features + 1),
'diag': rng.rand(n_components + 1, n_features),
'spherical': rng.rand(n_components + 1)}
# Define not positive-definite covariances
covariances_not_pos = rng.rand(n_components, n_features, n_features)
covariances_not_pos[0] = np.eye(n_features)
covariances_not_pos[0, 0, 0] = -1.
covariances_not_positive = {
'full': covariances_not_pos,
'tied': covariances_not_pos[0],
'diag': -1. * np.ones((n_components, n_features)),
'spherical': -1. * np.ones(n_components)}
not_positive_errors = {
'full': 'symmetric, positive-definite',
'tied': 'symmetric, positive-definite',
'diag': 'positive',
'spherical': 'positive'}
for cov_type in ['full', 'tied', 'diag', 'spherical']:
X = rand_data.X[cov_type]
g = GaussianMixture(n_components=n_components,
covariance_type=cov_type)
# Check covariance with bad shapes
g.covariances_init = covariances_bad_shape[cov_type]
assert_raise_message(ValueError,
"The parameter '%s covariance' should have "
"the shape of" % cov_type,
g.fit, X)
# Check not positive covariances
g.covariances_init = covariances_not_positive[cov_type]
assert_raise_message(ValueError,
"'%s covariance' should be %s"
% (cov_type, not_positive_errors[cov_type]),
g.fit, X)
# Check the correct init of covariances_init
g.covariances_init = rand_data.covariances[cov_type]
g.fit(X)
assert_array_equal(rand_data.covariances[cov_type], g.covariances_init)
开发者ID:123fengye741,项目名称:scikit-learn,代码行数:53,代码来源:test_gaussian_mixture.py
示例19: test_gaussian_mixture_fit_best_params
def test_gaussian_mixture_fit_best_params():
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
n_components = rand_data.n_components
n_init = 10
for covar_type in COVARIANCE_TYPE:
X = rand_data.X[covar_type]
g = GaussianMixture(n_components=n_components, n_init=1, reg_covar=0,
random_state=rng, covariance_type=covar_type)
ll = []
for _ in range(n_init):
g.fit(X)
ll.append(g.score(X))
ll = np.array(ll)
g_best = GaussianMixture(n_components=n_components,
n_init=n_init, reg_covar=0, random_state=rng,
covariance_type=covar_type)
g_best.fit(X)
assert_almost_equal(ll.min(), g_best.score(X))
开发者ID:jerry-dumblauskas,项目名称:scikit-learn,代码行数:19,代码来源:test_gaussian_mixture.py
示例20: test_check_weights
def test_check_weights():
rng = np.random.RandomState(0)
rand_data = RandomData(rng)
n_components = rand_data.n_components
X = rand_data.X['full']
g = GaussianMixture(n_components=n_components)
# Check bad shape
weights_bad_shape = rng.rand(n_components, 1)
g.weights_init = weights_bad_shape
assert_raise_message(ValueError,
"The parameter 'weights' should have the shape of "
"(%d,), "
"but got %s" % (n_components,
str(weights_bad_shape.shape)),
g.fit, X)
# Check bad range
weights_bad_range = rng.rand(n_components) + 1
g.weights_init = weights_bad_range
assert_raise_message(ValueError,
"The parameter 'weights' should be in the range "
"[0, 1], but got max value %.5f, min value %.5f"
% (np.min(weights_bad_range),
np.max(weights_bad_range)),
g.fit, X)
# Check bad normalization
weights_bad_norm = rng.rand(n_components)
weights_bad_norm = weights_bad_norm / (weights_bad_norm.sum() + 1)
g.weights_init = weights_bad_norm
assert_raise_message(ValueError,
"The parameter 'weights' should be normalized, "
"but got sum(weights) = %.5f"
% np.sum(weights_bad_norm),
g.fit, X)
# Check good weights matrix
weights = rand_data.weights
g = GaussianMixture(weights_init=weights, n_components=n_components)
g.fit(X)
assert_array_equal(weights, g.weights_init)
开发者ID:123fengye741,项目名称:scikit-learn,代码行数:44,代码来源:test_gaussian_mixture.py
注:本文中的sklearn.mixture.gaussian_mixture.GaussianMixture类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论