• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python decomposition.KernelPCA类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中sklearn.decomposition.KernelPCA的典型用法代码示例。如果您正苦于以下问题:Python KernelPCA类的具体用法?Python KernelPCA怎么用?Python KernelPCA使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了KernelPCA类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: test_kernel_pca

def test_kernel_pca():
    rng = np.random.RandomState(0)
    X_fit = rng.random_sample((5, 4))
    X_pred = rng.random_sample((2, 4))

    def histogram(x, y, **kwargs):
        # Histogram kernel implemented as a callable.
        assert_equal(kwargs, {})  # no kernel_params that we didn't ask for
        return np.minimum(x, y).sum()

    for eigen_solver in ("auto", "dense", "arpack"):
        for kernel in ("linear", "rbf", "poly", histogram):
            # histogram kernel produces singular matrix inside linalg.solve
            # XXX use a least-squares approximation?
            inv = not callable(kernel)

            # transform fit data
            kpca = KernelPCA(4, kernel=kernel, eigen_solver=eigen_solver, fit_inverse_transform=inv)
            X_fit_transformed = kpca.fit_transform(X_fit)
            X_fit_transformed2 = kpca.fit(X_fit).transform(X_fit)
            assert_array_almost_equal(np.abs(X_fit_transformed), np.abs(X_fit_transformed2))

            # non-regression test: previously, gamma would be 0 by default,
            # forcing all eigenvalues to 0 under the poly kernel
            assert_not_equal(X_fit_transformed.size, 0)

            # transform new data
            X_pred_transformed = kpca.transform(X_pred)
            assert_equal(X_pred_transformed.shape[1], X_fit_transformed.shape[1])

            # inverse transform
            if inv:
                X_pred2 = kpca.inverse_transform(X_pred_transformed)
                assert_equal(X_pred2.shape, X_pred.shape)
开发者ID:Claire-Ling-Liu,项目名称:scikit-learn,代码行数:34,代码来源:test_kernel_pca.py


示例2: test_kernel_pca

def test_kernel_pca():
    rng = np.random.RandomState(0)
    X_fit = rng.random_sample((5, 4))
    X_pred = rng.random_sample((2, 4))

    for eigen_solver in ("auto", "dense", "arpack"):
        for kernel in ("linear", "rbf", "poly"):
            # transform fit data
            kpca = KernelPCA(4, kernel=kernel, eigen_solver=eigen_solver,
                             fit_inverse_transform=True)
            X_fit_transformed = kpca.fit_transform(X_fit)
            X_fit_transformed2 = kpca.fit(X_fit).transform(X_fit)
            assert_array_almost_equal(np.abs(X_fit_transformed),
                                      np.abs(X_fit_transformed2))

            # non-regression test: previously, gamma would be 0 by default,
            # forcing all eigenvalues to 0 under the poly kernel
            assert_not_equal(X_fit_transformed, [])

            # transform new data
            X_pred_transformed = kpca.transform(X_pred)
            assert_equal(X_pred_transformed.shape[1],
                         X_fit_transformed.shape[1])

            # inverse transform
            X_pred2 = kpca.inverse_transform(X_pred_transformed)
            assert_equal(X_pred2.shape, X_pred.shape)
开发者ID:Big-Data,项目名称:scikit-learn,代码行数:27,代码来源:test_kernel_pca.py


示例3: kPCA_visualization1d

def kPCA_visualization1d(X, y):
   
    kpca = KernelPCA(kernel="linear", fit_inverse_transform=True, gamma=10, n_components=2)
    X_kpca = kpca.fit_transform(X)
    X_back = kpca.inverse_transform(X_kpca)
    pca = PCA(n_components=1)
    X_pca = pca.fit_transform(X)

    class_1 = []
    class_0 = []

    for i in range(0, len(y)):
        
        if y[i] == 1:
            class_1.append(  list( X_kpca[i] )[0] )
        else:
            class_0.append(  list( X_kpca[i] )[0] )
    print "check"
    print class_1[:10]
    import numpy
    from matplotlib import pyplot
    

    pyplot.hist(class_1, 50, alpha=0.5, label='class 1' )  
    pyplot.hist(class_0, 50, alpha=0.5, label='class 0')

    pyplot.legend(loc='upper right')
    pyplot.show()
开发者ID:DavidZomerdijk,项目名称:play_notebook,代码行数:28,代码来源:PCA_visualization.py


示例4: test_compare_clinical_kernel

    def test_compare_clinical_kernel(self):
        x_full, y, _, _ = load_arff_file(WHAS500_FILE, ['fstat', 'lenfol'], '1',
                                         standardize_numeric=False, to_numeric=False)

        trans = ClinicalKernelTransform()
        trans.fit(x_full)

        x = encode_categorical(standardize(x_full))

        kpca = KernelPCA(kernel=trans.pairwise_kernel)
        xt = kpca.fit_transform(x)

        nrsvm = FastSurvivalSVM(optimizer='rbtree', tol=1e-8, max_iter=1000, random_state=0)
        nrsvm.fit(xt, y)

        rsvm = FastKernelSurvivalSVM(optimizer='rbtree', kernel=trans.pairwise_kernel,
                                     tol=1e-8, max_iter=1000, random_state=0)
        rsvm.fit(x, y)

        pred_nrsvm = nrsvm.predict(kpca.transform(x))
        pred_rsvm = rsvm.predict(x)

        self.assertEqual(len(pred_nrsvm), len(pred_rsvm))

        c1 = concordance_index_censored(y['fstat'], y['lenfol'], pred_nrsvm)
        c2 = concordance_index_censored(y['fstat'], y['lenfol'], pred_rsvm)

        self.assertAlmostEqual(c1[0], c2[0])
        self.assertTupleEqual(c1[1:], c2[1:])
开发者ID:tum-camp,项目名称:survival-support-vector-machine,代码行数:29,代码来源:test_survival_svm.py


示例5: perform_pca

    def perform_pca(self):
        """consider principle components as covariates, will be appended to self.X

        num_pcs : int
            Number of principle components to use as covariates

        
        K = self._centerer.fit_transform(K)

        # compute eigenvectors
        if self.eigen_solver == 'auto':
            if K.shape[0] > 200 and n_components < 10:
                eigen_solver = 'arpack'
            else:
                eigen_solver = 'dense'
        else:
            eigen_solver = self.eigen_solver

        if eigen_solver == 'dense':
            self.lambdas_, self.alphas_ = linalg.eigh(
                K, eigvals=(K.shape[0] - n_components, K.shape[0] - 1))
        elif eigen_solver == 'arpack':
            self.lambdas_, self.alphas_ = eigsh(K, n_components,
                                                which="LA",
                                                tol=self.tol,
                                                maxiter=self.max_iter)

        # sort eigenvectors in descending order
        indices = self.lambdas_.argsort()[::-1]
        self.lambdas_ = self.lambdas_[indices]
        self.alphas_ = self.alphas_[:, indices]

        # remove eigenvectors with a zero eigenvalue
        if self.remove_zero_eig or self.n_components is None:
            self.alphas_ = self.alphas_[:, self.lambdas_ > 0]
            self.lambdas_ = self.lambdas_[self.lambdas_ > 0]

        X_transformed = self.alphas_ * np.sqrt(self.lambdas_)

        """
        #TODO: implement numerics code directly, based on above template

        logging.info("performing PCA, keeping %i principle components" % (self.num_pcs))
        tt0 = time.time()
        if False:
            pca = KernelPCA(n_components=self.num_pcs)
            pca._fit_transform(self.K)
            self.pcs = pca.alphas_ * np.sqrt(pca.lambdas_)
        else:
            import scipy.linalg as la
            [s,u]=la.eigh(self.K)
            s=s[::-1]
            u=u[:,::-1]
            self.pcs = u[:,0:self.num_pcs]
        assert self.pcs.shape[1] == self.num_pcs

        self.X = sp.hstack((self.X, self.pcs))  

        logging.info("...done. PCA time %.2f s" % (float(time.time() - tt0)))
开发者ID:42binwang,项目名称:FaST-LMM,代码行数:59,代码来源:feature_selection_cv.py


示例6: pca

def pca(X, gamma1):
    kpca = KernelPCA(kernel='rbf', fit_inverse_transform=False, gamma=gamma1)
    X_kpca = kpca.fit_transform(X)
    print('X', X.shape)
    print('alphas', kpca.alphas_.shape)
    print('lambdas', kpca.lambdas_.shape)
    #X_back = kpca.inverse_transform(X_kpca)
    return X_kpca
开发者ID:fferri,项目名称:wir,代码行数:8,代码来源:test1.py


示例7: isomap

  def isomap(self, num_dims=None, directed=None):
    '''Isomap embedding.

    num_dims : dimension of embedded coordinates, defaults to input dimension
    directed : used for .shortest_path() calculation
    '''
    W = -0.5 * self.shortest_path(directed=directed) ** 2
    kpca = KernelPCA(n_components=num_dims, kernel='precomputed')
    return kpca.fit_transform(W)
开发者ID:all-umass,项目名称:graphs,代码行数:9,代码来源:embed.py


示例8: __init__

	def __init__(self,corpus,n_components=2,kernel=None):
		StyloClassifier.__init__(self,corpus)
		data = self.data_frame[self.cols].values
		self.n_components = n_components
		self.kernel = kernel
		if not kernel:
			self.pca = PCA(n_components=self.n_components)
		else:
			self.pca = KernelPCA(kernel=kernel, gamma=10)
		self.pca_data = self.pca.fit_transform(StandardScaler().fit_transform(data))
开发者ID:Skeezix333,项目名称:stylometry,代码行数:10,代码来源:cluster.py


示例9: test_kernel_pca_consistent_transform

def test_kernel_pca_consistent_transform():
    # X_fit_ needs to retain the old, unmodified copy of X
    state = np.random.RandomState(0)
    X = state.rand(10, 10)
    kpca = KernelPCA(random_state=state).fit(X)
    transformed1 = kpca.transform(X)

    X_copy = X.copy()
    X[:, 0] = 666
    transformed2 = kpca.transform(X_copy)
    assert_array_almost_equal(transformed1, transformed2)
开发者ID:MartinThoma,项目名称:scikit-learn,代码行数:11,代码来源:test_kernel_pca.py


示例10: getProjectionMatrixKPCA

def getProjectionMatrixKPCA(dim=50):
    """ Kernel PCA : see paper for detailed description"""
    # Create an X for the hierarchy
    X = np.zeros((len(labelDict), len(labelDict)))
    for item in labelDict:
        pars = getPathToRoot(item)
        for par in pars:
            X[labelIndex[item]][labelIndex[par]] = 1
    kpca = KernelPCA(n_components=dim, fit_inverse_transform=True)
    X_kpca = kpca.fit(X)
    return kpca, kpca.alphas_
开发者ID:sushobhannayak,项目名称:cssag,代码行数:11,代码来源:hierarchicalMultiLabelLearning.py


示例11: test_kernel_pca_n_components

def test_kernel_pca_n_components():
    rng = np.random.RandomState(0)
    X_fit = rng.random_sample((5, 4))
    X_pred = rng.random_sample((2, 4))

    for eigen_solver in ("dense", "arpack"):
        for c in [1, 2, 4]:
            kpca = KernelPCA(n_components=c, eigen_solver=eigen_solver)
            shape = kpca.fit(X_fit).transform(X_pred).shape

            assert_equal(shape, (2, c))
开发者ID:Ablat,项目名称:scikit-learn,代码行数:11,代码来源:test_kernel_pca.py


示例12: main

def main():
    definition = load_definition()
    data = np.load(os.path.join(ROOT, definition.embedding))
    uuids = np.load(os.path.join(ROOT, definition.uuids))

    pca = KernelPCA(**definition.pca)
    tsne = TSNE(**definition.tsne)
    data = pca.fit_transform(data)
    data = tsne.fit_transform(data)

    plot_vectors(data, uuids, definition.sources, definition.output)
开发者ID:danijar,项目名称:semantic,代码行数:11,代码来源:visualize.py


示例13: fit

 def fit(self,X, num, method='dijkstra'):
     # Construct k-neigh. graph
     knn = KNN(num).fit(X)
     #Find shortest path
     if method == 'dijkstra':
         result = dijkstra(knn)
     else:
         result = shortest_path(knn, method=method)
     #Multidimensional scaling
     #Can be used Kernel PCA
     model = KernelPCA(n_components=num)
     return model.fit_transform(result)
开发者ID:saromanov,项目名称:manifold,代码行数:12,代码来源:isomap.py


示例14: kernelPCA

def kernelPCA(data, labels, new_dimension):
    print "start kernel pca..."

    if hasattr(data, "toarray"):
        data = data.toarray()

    start = time.time()
    pca = KernelPCA(fit_inverse_transform=True, gamma=10, n_components=new_dimension, alpha=2)

    reduced = pca.fit_transform(data)
    end = time.time()
    return (reduced, end-start)
开发者ID:sebastian-alfers,项目名称:master-thesis,代码行数:12,代码来源:dimensionality_reduction.py


示例15: test_kernel_pca_deterministic_output

def test_kernel_pca_deterministic_output():
    rng = np.random.RandomState(0)
    X = rng.rand(10, 10)
    eigen_solver = ('arpack', 'dense')

    for solver in eigen_solver:
        transformed_X = np.zeros((20, 2))
        for i in range(20):
            kpca = KernelPCA(n_components=2, eigen_solver=solver,
                             random_state=rng)
            transformed_X[i, :] = kpca.fit_transform(X)[0]
        assert_allclose(
            transformed_X, np.tile(transformed_X[0, :], 20).reshape(20, 2))
开发者ID:daniel-perry,项目名称:scikit-learn,代码行数:13,代码来源:test_kernel_pca.py


示例16: reduce_kpca

def reduce_kpca(X, kern, retall=False):
    """ reduce_kpca(X, components, kern, retall=False)
    Reduce dim by Kernel PCA
    """

    kpca = KernelPCA(kernel=kern, fit_inverse_transform=True)
    X_kpca = kpca.fit_transform(X)
    X_back = kpca.inverse_transform(X_kpca)

    if not retall:
        return X_kpca, X_back
    else:
        return X_kpca, X_back, kpca
开发者ID:makokal,项目名称:pycss,代码行数:13,代码来源:recog_exps.py


示例17: isomap

def isomap(X, n_neighbors, metric):
    """
        Based on sklearn,
        Author: Jake Vanderplas  -- <[email protected]>
        License: BSD, (C) 2011
    """    
    
    kng = kneighbors_graph(D, n_neighbors = n_neighbors, metric = metric)    
    dist_matrix_ = graph_shortest_path(kng, method='auto', directed=False)    
    kernel_pca_ = KernelPCA(n_components=2, kernel="precomputed", eigen_solver='auto')
    G = dist_matrix_ ** 2
    G *= -0.5
    return kernel_pca_.fit_transform(G)
开发者ID:dchichkov,项目名称:pixel-coords-recover,代码行数:13,代码来源:v.py


示例18: RunKPCAScikit

    def RunKPCAScikit(q):
      totalTimer = Timer()

      # Load input dataset.
      Log.Info("Loading dataset", self.verbose)
      data = np.genfromtxt(self.dataset, delimiter=',')

      with totalTimer:
        # Get the new dimensionality, if it is necessary.
        dimension = re.search('-d (\d+)', options)
        if not dimension:
          d = data.shape[1]
        else:
          d = int(dimension.group(1))
          if (d > data.shape[1]):
            Log.Fatal("New dimensionality (" + str(d) + ") cannot be greater "
              + "than existing dimensionality (" + str(data.shape[1]) + ")!")
            q.put(-1)
            return -1

        # Get the kernel type and make sure it is valid.
        kernel = re.search("-k ([^\s]+)", options)
        try:
          if not kernel:
            Log.Fatal("Choose kernel type, valid choices are 'linear'," +
                  " 'hyptan' and 'polynomial'.")
            q.put(-1)
            return -1
          elif kernel.group(1) == "linear":
            model = KernelPCA(n_components=d, kernel="linear")
          elif kernel.group(1) == "hyptan":
            model = KernelPCA(n_components=d, kernel="sigmoid")
          elif kernel.group(1) == "polynomial":
            degree = re.search('-D (\d+)', options)
            degree = 1 if not degree else int(degree.group(1))

            model = KernelPCA(n_components=d, kernel="poly", degree=degree)
          else:
            Log.Fatal("Invalid kernel type (" + kernel.group(1) + "); valid " +
                "choices are 'linear', 'hyptan' and 'polynomial'.")
            q.put(-1)
            return -1

          out = model.fit_transform(data)
        except Exception as e:
          q.put(-1)
          return -1

      time = totalTimer.ElapsedTime()
      q.put(time)
      return time
开发者ID:MarcosPividori,项目名称:benchmarks,代码行数:51,代码来源:kernel_pca.py


示例19: reduceDataset

 def reduceDataset(self,nr=3,method='PCA'):
     '''It reduces the dimensionality of a given dataset using different techniques provided by Sklearn library
      Methods available:
                         'PCA'
                         'FactorAnalysis'
                         'KPCArbf','KPCApoly'
                         'KPCAcosine','KPCAsigmoid'
                         'IPCA'
                         'FastICADeflation'
                         'FastICAParallel'
                         'Isomap'
                         'LLE'
                         'LLEmodified'
                         'LLEltsa'
     '''
     dataset=self.ModelInputs['Dataset']
     #dataset=self.dataset[Model.in_columns]
     #dataset=self.dataset[['Humidity','TemperatureF','Sea Level PressureIn','PrecipitationIn','Dew PointF','Value']]
     #PCA
     if method=='PCA':
         sklearn_pca = sklearnPCA(n_components=nr)
         reduced = sklearn_pca.fit_transform(dataset)
     #Factor Analysis
     elif method=='FactorAnalysis':
         fa=FactorAnalysis(n_components=nr)
         reduced=fa.fit_transform(dataset)
     #kernel pca with rbf kernel
     elif method=='KPCArbf':
         kpca=KernelPCA(nr,kernel='rbf')
         reduced=kpca.fit_transform(dataset)
     #kernel pca with poly kernel
     elif method=='KPCApoly':
         kpca=KernelPCA(nr,kernel='poly')
         reduced=kpca.fit_transform(dataset)
     #kernel pca with cosine kernel
     elif method=='KPCAcosine':
         kpca=KernelPCA(nr,kernel='cosine')
         reduced=kpca.fit_transform(dataset)
     #kernel pca with sigmoid kernel
     elif method=='KPCAsigmoid':
         kpca=KernelPCA(nr,kernel='sigmoid')
         reduced=kpca.fit_transform(dataset)
     #ICA
     elif method=='IPCA':
         ipca=IncrementalPCA(nr)
         reduced=ipca.fit_transform(dataset)
     #Fast ICA
     elif method=='FastICAParallel':
         fip=FastICA(nr,algorithm='parallel')
         reduced=fip.fit_transform(dataset)
     elif method=='FastICADeflation':
         fid=FastICA(nr,algorithm='deflation')
         reduced=fid.fit_transform(dataset)
     elif method == 'All':
         self.dimensionalityReduction(nr=nr)
         return self
     
     self.ModelInputs.update({method:reduced})
     self.datasetsAvailable.append(method)
     return self
开发者ID:UIUC-SULLIVAN,项目名称:ThesisProject_Andrea_Mattera,代码行数:60,代码来源:Classes.py


示例20: generate_kpca_compression

def generate_kpca_compression(X, n_components=16):
    """
    Compresses the data using sklearn KernelPCA implementation.

    :param X: Data (n_samples, n_features)
    :param n_components: Number of dimensions for PCA to keep

    :return: X_prime (the compressed representation), pca
    """

    kpca = KernelPCA(n_components=n_components, kernel='rbf', eigen_solver='arpack', fit_inverse_transform=False)
    kpca.fit(X)

    return kpca.transform(X), kpca
开发者ID:TieSKey,项目名称:database_dcnn,代码行数:14,代码来源:compression.py



注:本文中的sklearn.decomposition.KernelPCA类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python decomposition.LatentDirichletAllocation类代码示例发布时间:2022-05-27
下一篇:
Python decomposition.IncrementalPCA类代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap