• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python samples_generator.test_dataset_classif函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中scikits.learn.datasets.samples_generator.test_dataset_classif函数的典型用法代码示例。如果您正苦于以下问题:Python test_dataset_classif函数的具体用法?Python test_dataset_classif怎么用?Python test_dataset_classif使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了test_dataset_classif函数的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: test_grid_search_error

def test_grid_search_error():
    """Test that grid search will capture errors on data with different
    length"""
    X_, y_ = test_dataset_classif(n_samples=200, n_features=100, seed=0)

    clf = LinearSVC()
    cv = GridSearchCV(clf, {'C':[0.1, 1.0]})
    assert_raises(ValueError, cv.fit, X_[:180], y_)
开发者ID:almet,项目名称:scikit-learn,代码行数:8,代码来源:test_grid_search.py


示例2: test_weight

def test_weight():
    """
    Test class weights
    """

    X_, y_ = test_dataset_classif(n_samples=200, n_features=100, param=[5, 1], seed=0)
    X_ = scipy.sparse.csr_matrix(X_)
    for clf in (linear_model.sparse.LogisticRegression(), svm.sparse.LinearSVC(), svm.sparse.SVC()):
        clf.fit(X_[:180], y_[:180], class_weight={0: 5})
        y_pred = clf.predict(X_[180:])
        assert np.sum(y_pred == y_[180:]) >= 11
开发者ID:ronw,项目名称:scikit-learn,代码行数:11,代码来源:test_sparse.py


示例3: test_f_classif_multi_class

def test_f_classif_multi_class():
    """
    Test whether the F test yields meaningful results
    on a simple simulated classification problem
    """
    X, Y = test_dataset_classif(n_samples=50, n_features=20, k=5,
                                           seed=seed, param=[1, 1, 1])
    F, pv = f_classif(X, Y)
    assert(F>0).all()
    assert(pv>0).all()
    assert(pv<1).all()
    assert(pv[:5]<0.05).all()
    assert(pv[5:]>1.e-5).all()
开发者ID:aayushsaxena15,项目名称:projects,代码行数:13,代码来源:test_feature_select.py


示例4: test_f_regression

def test_f_regression():
    """
    Test whether the F test yields meaningful results
    on a simple simulated regression problem
    """
    X, Y = test_dataset_classif(n_samples=50, n_features=20, k=5,
                                           seed=seed)
    F, pv = f_regression(X, Y)
    assert(F>0).all()
    assert(pv>0).all()
    assert(pv<1).all()
    assert(pv[:5]<0.05).all()
    assert(pv[5:]>1.e-4).all()
开发者ID:aayushsaxena15,项目名称:projects,代码行数:13,代码来源:test_feature_select.py


示例5: test_weight

def test_weight():
    """
    Test class weights
    """
    clf = svm.SVC()
    # we give a small weights to class 1
    clf.fit(X, Y, {1: 0.1})
    # so all predicted values belong to class 2
    assert_array_almost_equal(clf.predict(X), [2] * 6)

    X_, y_ = test_dataset_classif(n_samples=200, n_features=100, param=[5, 1],
                                  seed=0)
    for clf in (linear_model.LogisticRegression(), svm.LinearSVC(), svm.SVC()):
        clf.fit(X_[: 180], y_[: 180], class_weight={0: 5})
        y_pred = clf.predict(X_[180:])
        assert np.sum(y_pred == y_[180:]) >= 11
开发者ID:mosi,项目名称:scikit-learn,代码行数:16,代码来源:test_svm.py


示例6: test_select_kbest_classif

def test_select_kbest_classif():
    """
    Test whether the relative univariate feature selection
    gets the correct items in a simple classification problem
    with the k best heuristic
    """
    X, Y = test_dataset_classif(n_samples=50, n_features=20, k=5,
                                           seed=seed)
    univariate_filter = SelectKBest(f_classif, k=5)
    X_r = univariate_filter.fit(X, Y).transform(X)
    X_r2 = GenericUnivariateSelect(f_classif, mode='k_best',
                    param=5).fit(X, Y).transform(X)
    assert_array_equal(X_r, X_r2)
    support = univariate_filter.get_support()
    gtruth = np.zeros(20)
    gtruth[:5]=1
    assert_array_equal(support, gtruth)
开发者ID:aayushsaxena15,项目名称:projects,代码行数:17,代码来源:test_feature_select.py


示例7: test_select_fwe_classif

def test_select_fwe_classif():
    """
    Test whether the relative univariate feature selection
    gets the correct items in a simple classification problem
    with the fpr heuristic
    """
    X, Y = test_dataset_classif(n_samples=50, n_features=20, k=5,
                                           seed=seed)
    univariate_filter = SelectFwe(f_classif, alpha=0.01)
    X_r = univariate_filter.fit(X, Y).transform(X)
    X_r2 = GenericUnivariateSelect(f_classif, mode='fwe',
                    param=0.01).fit(X, Y).transform(X)
    assert_array_equal(X_r, X_r2)
    support = univariate_filter.get_support()
    gtruth = np.zeros(20)
    gtruth[:5]=1
    assert(np.sum(np.abs(support-gtruth))<2)
开发者ID:aayushsaxena15,项目名称:projects,代码行数:17,代码来源:test_feature_select.py


示例8: test_grid_search_sparse_score_func

def test_grid_search_sparse_score_func():
    X_, y_ = test_dataset_classif(n_samples=200, n_features=100, seed=0)

    clf = LinearSVC()
    cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, score_func=f1_score)
    cv.fit(X_[:180], y_[:180])
    y_pred = cv.predict(X_[180:])
    C = cv.best_estimator.C

    X_ = sp.csr_matrix(X_)
    clf = SparseLinearSVC()
    cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, score_func=f1_score)
    cv.fit(X_[:180], y_[:180])
    y_pred2 = cv.predict(X_[180:])
    C2 = cv.best_estimator.C

    assert_array_equal(y_pred, y_pred2)
    assert_equal(C, C2)
开发者ID:AnneLaureF,项目名称:scikit-learn,代码行数:18,代码来源:test_grid_search.py


示例9: test_grid_search_sparse

def test_grid_search_sparse():
    """Test that grid search works with both dense and sparse matrices"""
    X_, y_ = test_dataset_classif(n_samples=200, n_features=100, seed=0)

    clf = LinearSVC()
    cv = GridSearchCV(clf, {'C':[0.1, 1.0]})
    cv.fit(X_[:180], y_[:180])
    y_pred = cv.predict(X_[180:])
    C = cv.best_estimator.C

    X_ = sp.csr_matrix(X_)
    clf = SparseLinearSVC()
    cv = GridSearchCV(clf, {'C':[0.1, 1.0]})
    cv.fit(X_[:180], y_[:180])
    y_pred2 = cv.predict(X_[180:])
    C2 = cv.best_estimator.C

    assert np.mean(y_pred == y_pred2) >= .9
    assert_equal(C, C2)
开发者ID:almet,项目名称:scikit-learn,代码行数:19,代码来源:test_grid_search.py


示例10: SelectKBest

"""
==========================
Pipeline Anova SVM
==========================

Simple usages of pipeline:
- ANOVA SVM-C
"""

from scikits.learn import svm
from scikits.learn.datasets import samples_generator
from scikits.learn.feature_selection.univariate_selection import SelectKBest,f_regression
from scikits.learn.pipeline import Pipeline

# import some data to play with
X, y = samples_generator.test_dataset_classif(k=5)


# ANOVA SVM-C
# 1) anova filter, take 5 best ranked features 
anova_filter = SelectKBest(f_regression, k=5)
# 2) svm
clf = svm.SVC(kernel='linear')

anova_svm = Pipeline([anova_filter], clf)
anova_svm.fit(X, y)
anova_svm.predict(X)

开发者ID:mszafran,项目名称:scikit-learn,代码行数:27,代码来源:feature_selection_pipeline.py


示例11: SVC

Recursive feature elimination with automatic tuning of the
number of features selected with cross-validation
"""
print __doc__
import numpy as np

from scikits.learn.svm import SVC
from scikits.learn.cross_val import StratifiedKFold
from scikits.learn.feature_selection import RFECV
from scikits.learn.datasets import samples_generator
from scikits.learn.metrics import zero_one

################################################################################
# Loading a dataset

X, y = samples_generator.test_dataset_classif(n_features=500, k=5, seed=0)

################################################################################
# Create the RFE object and compute a cross-validated score

svc = SVC(kernel='linear')
rfecv = RFECV(estimator=svc, n_features=2, percentage=0.1, loss_func=zero_one)
rfecv.fit(X, y, cv=StratifiedKFold(y, 2))

print 'Optimal number of features : %d' % rfecv.support_.sum()

import pylab as pl
pl.figure()
pl.semilogx(rfecv.n_features_, rfecv.cv_scores_)
pl.xlabel('Number of features selected')
pl.ylabel('Cross validation score (nb of misclassifications)')
开发者ID:AndreaCensi,项目名称:scikit-learn,代码行数:31,代码来源:plot_rfe_with_cross_validation.py



注:本文中的scikits.learn.datasets.samples_generator.test_dataset_classif函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python grid_search.GridSearchCV类代码示例发布时间:2022-05-27
下一篇:
Python datasets.load_iris函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap