本文整理汇总了Python中sklearn.linear_model.coordinate_descent.ElasticNetCV类的典型用法代码示例。如果您正苦于以下问题:Python ElasticNetCV类的具体用法?Python ElasticNetCV怎么用?Python ElasticNetCV使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了ElasticNetCV类的16个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: runPrintResults
def runPrintResults(X, y, alpha, name):
print(name+":\n=========")
if (alpha is not None):
X_new = np.divide(X, alpha)
else:
X_new = X
enetCV = ElasticNetCV(l1_ratio=0.8, fit_intercept=False) # cv=nCV, max_iter=5000
# enetCV = LassoCV(fit_intercept=False) # cv=nCV, max_iter=5000
enetCV.fit(X_new, y)
y_pred_enet = enetCV.predict(X_new)
r2_score_enet = r2_score(y, y_pred_enet)
print("R2= ", r2_score_enet)
if (alpha is not None):
enetCV_coef = np.divide(enetCV.coef_, alpha)
else:
enetCV_coef = enetCV.coef_
print("Best Alpha: {}".format(enetCV.alpha_))
# print("coefs_: {}".format(enetCV.coef_))
print("coefs_/alpha: {}".format(enetCV_coef))
return enetCV.alpha_, enetCV_coef
开发者ID:doaa-altarawy,项目名称:PEAK,代码行数:28,代码来源:test_Iterative_enet.py
示例2: test_enet_path
def test_enet_path():
# We use a large number of samples and of informative features so that
# the l1_ratio selected is more toward ridge than lasso
X, y, X_test, y_test = build_dataset(n_samples=200, n_features=100,
n_informative_features=100)
max_iter = 150
# Here we have a small number of iterations, and thus the
# ElasticNet might not converge. This is to speed up tests
clf = ElasticNetCV(n_alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7], cv=3,
max_iter=max_iter)
ignore_warnings(clf.fit)(X, y)
# Well-conditioned settings, we should have selected our
# smallest penalty
assert_almost_equal(clf.alpha_, min(clf.alphas_))
# Non-sparse ground truth: we should have seleted an elastic-net
# that is closer to ridge than to lasso
assert_equal(clf.l1_ratio_, min(clf.l1_ratio))
clf = ElasticNetCV(n_alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7], cv=3,
max_iter=max_iter, precompute=True)
ignore_warnings(clf.fit)(X, y)
# Well-conditioned settings, we should have selected our
# smallest penalty
assert_almost_equal(clf.alpha_, min(clf.alphas_))
# Non-sparse ground truth: we should have seleted an elastic-net
# that is closer to ridge than to lasso
assert_equal(clf.l1_ratio_, min(clf.l1_ratio))
# We are in well-conditioned settings with low noise: we should
# have a good test-set performance
assert_greater(clf.score(X_test, y_test), 0.99)
开发者ID:DanielWeitzenfeld,项目名称:scikit-learn,代码行数:34,代码来源:test_coordinate_descent.py
示例3: test_path_parameters
def test_path_parameters():
X, y, _, _ = build_dataset()
max_iter = 50
clf = ElasticNetCV(n_alphas=50, eps=1e-3, max_iter=max_iter, l1_ratio=0.5)
clf.fit(X, y) # new params
assert_almost_equal(0.5, clf.l1_ratio)
assert_equal(50, clf.n_alphas)
assert_equal(50, len(clf.alphas_))
开发者ID:mugiro,项目名称:elm-python,代码行数:9,代码来源:test_coordinate_descent.py
示例4: test_path_parameters
def test_path_parameters():
X, y = make_sparse_data()
max_iter = 50
n_alphas = 10
clf = ElasticNetCV(n_alphas=n_alphas, eps=1e-3, max_iter=max_iter,
l1_ratio=0.5, fit_intercept=False)
clf.fit(X, y) # new params
assert_almost_equal(0.5, clf.l1_ratio)
assert_equal(n_alphas, clf.n_alphas)
assert_equal(n_alphas, len(clf.alphas_))
开发者ID:MarkyV,项目名称:scikit-learn,代码行数:10,代码来源:test_sparse_coordinate_descent.py
示例5: test_1d_multioutput_enet_and_multitask_enet_cv
def test_1d_multioutput_enet_and_multitask_enet_cv():
X, y, _, _ = build_dataset(n_features=10)
y = y[:, np.newaxis]
clf = ElasticNetCV(n_alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7])
clf.fit(X, y[:, 0])
clf1 = MultiTaskElasticNetCV(n_alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7])
clf1.fit(X, y)
assert_almost_equal(clf.l1_ratio_, clf1.l1_ratio_)
assert_almost_equal(clf.alpha_, clf1.alpha_)
assert_almost_equal(clf.coef_, clf1.coef_[0])
assert_almost_equal(clf.intercept_, clf1.intercept_[0])
开发者ID:allefpablo,项目名称:scikit-learn,代码行数:11,代码来源:test_coordinate_descent.py
示例6: test_path_parameters
def test_path_parameters():
X, y = make_sparse_data()
max_iter = 50
n_alphas = 10
clf = ElasticNetCV(n_alphas=n_alphas, eps=1e-3, max_iter=max_iter,
l1_ratio=0.5, fit_intercept=False)
clf.fit(X, y) # new params
assert_almost_equal(0.5, clf.l1_ratio)
assert_equal(n_alphas, clf.n_alphas)
assert_equal(n_alphas, len(clf.alphas_))
sparse_mse_path = clf.mse_path_
clf.fit(X.toarray(), y) # compare with dense data
assert_almost_equal(clf.mse_path_, sparse_mse_path)
开发者ID:2011200799,项目名称:scikit-learn,代码行数:13,代码来源:test_sparse_coordinate_descent.py
示例7: test_path_parameters
def test_path_parameters():
# build an ill-posed linear regression problem with many noisy features and
# comparatively few samples
n_samples, n_features, max_iter = 50, 200, 50
random_state = np.random.RandomState(0)
w = random_state.randn(n_features)
w[10:] = 0.0 # only the top 10 features are impacting the model
X = random_state.randn(n_samples, n_features)
y = np.dot(X, w)
clf = ElasticNetCV(n_alphas=50, eps=1e-3, max_iter=max_iter,
rho=0.5)
clf.fit(X, y) # new params
assert_almost_equal(0.5, clf.rho)
assert_equal(50, clf.n_alphas)
assert_equal(50, len(clf.alphas))
开发者ID:bvtrach,项目名称:scikit-learn,代码行数:17,代码来源:test_coordinate_descent.py
示例8: test_enet_path
def test_enet_path():
# We use a large number of samples and of informative features so that
# the l1_ratio selected is more toward ridge than lasso
X, y, X_test, y_test = build_dataset(n_samples=200, n_features=100,
n_informative_features=100)
max_iter = 150
# Here we have a small number of iterations, and thus the
# ElasticNet might not converge. This is to speed up tests
clf = ElasticNetCV(alphas=[0.01, 0.05, 0.1], eps=2e-3,
l1_ratio=[0.5, 0.7], cv=3,
max_iter=max_iter)
ignore_warnings(clf.fit)(X, y)
# Well-conditioned settings, we should have selected our
# smallest penalty
assert_almost_equal(clf.alpha_, min(clf.alphas_))
# Non-sparse ground truth: we should have selected an elastic-net
# that is closer to ridge than to lasso
assert_equal(clf.l1_ratio_, min(clf.l1_ratio))
clf = ElasticNetCV(alphas=[0.01, 0.05, 0.1], eps=2e-3,
l1_ratio=[0.5, 0.7], cv=3,
max_iter=max_iter, precompute=True)
ignore_warnings(clf.fit)(X, y)
# Well-conditioned settings, we should have selected our
# smallest penalty
assert_almost_equal(clf.alpha_, min(clf.alphas_))
# Non-sparse ground truth: we should have selected an elastic-net
# that is closer to ridge than to lasso
assert_equal(clf.l1_ratio_, min(clf.l1_ratio))
# We are in well-conditioned settings with low noise: we should
# have a good test-set performance
assert_greater(clf.score(X_test, y_test), 0.99)
# Multi-output/target case
X, y, X_test, y_test = build_dataset(n_features=10, n_targets=3)
clf = MultiTaskElasticNetCV(n_alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7],
cv=3, max_iter=max_iter)
ignore_warnings(clf.fit)(X, y)
# We are in well-conditioned settings with low noise: we should
# have a good test-set performance
assert_greater(clf.score(X_test, y_test), 0.99)
assert_equal(clf.coef_.shape, (3, 10))
# Mono-output should have same cross-validated alpha_ and l1_ratio_
# in both cases.
X, y, _, _ = build_dataset(n_features=10)
clf1 = ElasticNetCV(n_alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7])
clf1.fit(X, y)
clf2 = MultiTaskElasticNetCV(n_alphas=5, eps=2e-3, l1_ratio=[0.5, 0.7])
clf2.fit(X, y[:, np.newaxis])
assert_almost_equal(clf1.l1_ratio_, clf2.l1_ratio_)
assert_almost_equal(clf1.alpha_, clf2.alpha_)
开发者ID:allefpablo,项目名称:scikit-learn,代码行数:55,代码来源:test_coordinate_descent.py
示例9: test_enet_path
def test_enet_path():
# build an ill-posed linear regression problem with many noisy features and
# comparatively few samples
n_samples, n_features, max_iter = 50, 200, 50
random_state = np.random.RandomState(0)
w = random_state.randn(n_features)
w[10:] = 0.0 # only the top 10 features are impacting the model
X = random_state.randn(n_samples, n_features)
y = np.dot(X, w)
clf = ElasticNetCV(n_alphas=10, eps=1e-3, rho=0.95, cv=5,
max_iter=max_iter)
clf.fit(X, y)
assert_almost_equal(clf.alpha, 0.002, 2)
clf = ElasticNetCV(n_alphas=10, eps=1e-3, rho=0.95, cv=5,
max_iter=max_iter, precompute=True)
clf.fit(X, y)
assert_almost_equal(clf.alpha, 0.002, 2)
# test set
X_test = random_state.randn(n_samples, n_features)
y_test = np.dot(X_test, w)
assert clf.score(X_test, y_test) > 0.99
开发者ID:Scott-Alex,项目名称:scikit-learn,代码行数:25,代码来源:test_coordinate_descent.py
示例10: train_all
def train_all(self):
positions = ['PG.csv', 'SG.csv', 'SF.csv', 'PF.csv', 'C.csv']
with open(self.models_file_path, 'w') as model_file:
model_file_writer = csv.writer(model_file)
for (first, filename) in izip(chain((True,), repeat(False)), positions):
with open(os.path.join(self.cleaned_data_directory_path, filename),
'r') as cleaned_data:
cleaned_data_reader = csv.reader(cleaned_data)
cleaned_data_headers = cleaned_data_reader.next()
lines = [map(float, line[:-1]) + line[-1:] for line in cleaned_data_reader
if len(line) >= 2]
# conver lines to numpy array
num_data = len(lines)
num_features = len(lines[0]) - 2
X = np.zeros((num_data, num_features))
Y = np.zeros((num_data))
for (i, data) in enumerate(lines):
for (ii, feature) in enumerate(data[:-2]):
X[i][ii] = feature
Y[i] = lines[i][-2] # last one is name
# create an instance of elasticnet
net = ElasticNetCV(alphas=[0.01, 0.05, 0.1], eps=2e-3,
l1_ratio=[0.5, 0.7, 1], cv=3, normalize=True)
# create a model based on our data
net.fit(X, Y)
if first:
model_file_writer.writerow(cleaned_data_headers[:-2])
model_file_writer.writerow(net.coef_)
with open(os.path.join(
self.residual_data_path,
'_'.join(('resid', filename))), 'w') as resid_file:
resid_file_writer = csv.writer(resid_file)
# get the residuals
resid = X.dot(net.coef_) - Y
for (name, row) in izip(imap(lambda l: l[-1], lines), resid):
resid_file_writer.writerow((name, row))
print sum(resid)
开发者ID:dhatch,项目名称:schneiderman,代码行数:43,代码来源:regress.py
示例11: test_enet_path
def test_enet_path():
X, y, X_test, y_test = build_dataset()
max_iter = 50
clf = ElasticNetCV(n_alphas=10, eps=1e-3, rho=0.95, cv=5,
max_iter=max_iter)
clf.fit(X, y)
assert_almost_equal(clf.alpha, 0.002, 2)
clf = ElasticNetCV(n_alphas=10, eps=1e-3, rho=0.95, cv=5,
max_iter=max_iter, precompute=True)
clf.fit(X, y)
assert_almost_equal(clf.alpha, 0.002, 2)
# test set
assert clf.score(X_test, y_test) > 0.99
开发者ID:c0ldlimit,项目名称:scikit-learn,代码行数:16,代码来源:test_coordinate_descent.py
示例12: test_enet_cv_positive_constraint
def test_enet_cv_positive_constraint():
X, y, X_test, y_test = build_dataset()
max_iter = 500
# Ensure the unconstrained fit has a negative coefficient
enetcv_unconstrained = ElasticNetCV(n_alphas=3, eps=1e-1, max_iter=max_iter, cv=2, n_jobs=1)
enetcv_unconstrained.fit(X, y)
assert_true(min(enetcv_unconstrained.coef_) < 0)
# On same data, constrained fit has non-negative coefficients
enetcv_constrained = ElasticNetCV(n_alphas=3, eps=1e-1, max_iter=max_iter, cv=2, positive=True, n_jobs=1)
enetcv_constrained.fit(X, y)
assert_true(min(enetcv_constrained.coef_) >= 0)
开发者ID:nelson-liu,项目名称:scikit-learn,代码行数:13,代码来源:test_coordinate_descent.py
示例13: test_enet_path
def test_enet_path():
X, y, X_test, y_test = build_dataset()
max_iter = 150
with warnings.catch_warnings():
# Here we have a small number of iterations, and thus the
# ElasticNet might not converge. This is to speed up tests
warnings.simplefilter("ignore", UserWarning)
clf = ElasticNetCV(n_alphas=5, eps=2e-3, rho=[0.9, 0.95], cv=3, max_iter=max_iter)
clf.fit(X, y)
assert_almost_equal(clf.alpha, 0.002, 2)
assert_equal(clf.rho_, 0.95)
clf = ElasticNetCV(n_alphas=5, eps=2e-3, rho=[0.9, 0.95], cv=3, max_iter=max_iter, precompute=True)
clf.fit(X, y)
assert_almost_equal(clf.alpha, 0.002, 2)
assert_equal(clf.rho_, 0.95)
# test set
assert_greater(clf.score(X_test, y_test), 0.99)
开发者ID:gdub,项目名称:scikit-learn,代码行数:20,代码来源:test_coordinate_descent.py
示例14: test_enet_l1_ratio
def test_enet_l1_ratio():
# Test that an error message is raised if an estimator that
# uses _alpha_grid is called with l1_ratio=0
msg = ("Automatic alpha grid generation is not supported for l1_ratio=0. "
"Please supply a grid by providing your estimator with the "
"appropriate `alphas=` argument.")
X = np.array([[1, 2, 4, 5, 8], [3, 5, 7, 7, 8]]).T
y = np.array([12, 10, 11, 21, 5])
assert_raise_message(ValueError, msg, ElasticNetCV(
l1_ratio=0, random_state=42).fit, X, y)
assert_raise_message(ValueError, msg, MultiTaskElasticNetCV(
l1_ratio=0, random_state=42).fit, X, y[:, None])
# Test that l1_ratio=0 is allowed if we supply a grid manually
alphas = [0.1, 10]
estkwds = {'alphas': alphas, 'random_state': 42}
est_desired = ElasticNetCV(l1_ratio=0.00001, **estkwds)
est = ElasticNetCV(l1_ratio=0, **estkwds)
with ignore_warnings():
est_desired.fit(X, y)
est.fit(X, y)
assert_array_almost_equal(est.coef_, est_desired.coef_, decimal=5)
est_desired = MultiTaskElasticNetCV(l1_ratio=0.00001, **estkwds)
est = MultiTaskElasticNetCV(l1_ratio=0, **estkwds)
with ignore_warnings():
est.fit(X, y[:, None])
est_desired.fit(X, y[:, None])
assert_array_almost_equal(est.coef_, est_desired.coef_, decimal=5)
开发者ID:allefpablo,项目名称:scikit-learn,代码行数:30,代码来源:test_coordinate_descent.py
示例15: test_sparse_input_dtype_enet_and_lassocv
def test_sparse_input_dtype_enet_and_lassocv():
X, y, _, _ = build_dataset(n_features=10)
clf = ElasticNetCV(n_alphas=5)
clf.fit(sparse.csr_matrix(X), y)
clf1 = ElasticNetCV(n_alphas=5)
clf1.fit(sparse.csr_matrix(X, dtype=np.float32), y)
assert_almost_equal(clf.alpha_, clf1.alpha_, decimal=6)
assert_almost_equal(clf.coef_, clf1.coef_, decimal=6)
clf = LassoCV(n_alphas=5)
clf.fit(sparse.csr_matrix(X), y)
clf1 = LassoCV(n_alphas=5)
clf1.fit(sparse.csr_matrix(X, dtype=np.float32), y)
assert_almost_equal(clf.alpha_, clf1.alpha_, decimal=6)
assert_almost_equal(clf.coef_, clf1.coef_, decimal=6)
开发者ID:allefpablo,项目名称:scikit-learn,代码行数:15,代码来源:test_coordinate_descent.py
示例16: enumerate
labels_group = ['elderly', 'mci', 'young']
j = 0
for _, x in enumerate(X.T):
if (j%n_rows) == 0:
f = pl.figure()
for i in range(n_rows):
a = f.add_subplot(n_rows, n_rows,(n_rows)*(j%n_rows)+(i+1))
title = node_names[indexes[j][0]]+' -- '+node_names[indexes[j][1]]
pl.scatter(x[groups==i], y[groups==i], c=color[i], s=40, label=labels_group[i])
a.set_title(title)
pl.legend()
j+=1
######################################################
enetcv = ElasticNetCV(alphas=np.linspace(1, 0.05, 50),
cv=ShuffleSplit(len(y), n_iter=50, test_size=0.25))
lassocv = LassoCV(alphas=np.linspace(1, 0.05, 50),
cv=ShuffleSplit(len(y), n_iter=50, test_size=0.25))
for i in range(n_rows):
X_ = conn_data[groups==i,:]
y_ = y[groups==i]
enetcv = ElasticNetCV(alphas=np.linspace(1, 0.05, 50),
cv=ShuffleSplit(len(y_), n_iter=50, test_size=0.25))
lassocv = LassoCV(alphas=np.linspace(1, 0.05, 50),
cv=ShuffleSplit(len(y_), n_iter=50, test_size=0.25))
开发者ID:robbisg,项目名称:mvpa_itab_wu,代码行数:30,代码来源:insula_piero.py
注:本文中的sklearn.linear_model.coordinate_descent.ElasticNetCV类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论