本文整理汇总了Python中shogun.Features.RealFeatures类的典型用法代码示例。如果您正苦于以下问题:Python RealFeatures类的具体用法?Python RealFeatures怎么用?Python RealFeatures使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了RealFeatures类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: createFeatures
def createFeatures(self, examples):
"""Converts numpy arrays or sequences into shogun features"""
if self.kparam['name'] == 'gauss' or self.kparam['name'] == 'linear' or self.kparam['name'] == 'poly':
examples = numpy.array(examples)
feats = RealFeatures(examples)
elif self.kparam['name'] == 'wd' or self.kparam['name'] == 'localalign' or self.kparam['name'] == 'localimprove':
#examples = non_atcg_convert(examples, nuc_con)
feats = StringCharFeatures(examples, DNA)
elif self.kparam['name'] == 'spec':
#examples = non_atcg_convert(examples, nuc_con)
feats = StringCharFeatures(examples, DNA)
wf = StringUlongFeatures( feats.get_alphabet() )
wf.obtain_from_char(feats, kparam['degree']-1, kparam['degree'], 0, kname=='cumspec')
del feats
if train_mode:
preproc = SortUlongString()
preproc.init(wf)
wf.add_preproc(preproc)
ret = wf.apply_preproc()
feats = wf
else:
print 'Unknown kernel %s' % self.kparam['name']
raise ValueError
return feats
开发者ID:boya888,项目名称:oqtans_tools,代码行数:29,代码来源:EasySVM.py
示例2: prune_var_sub_mean
def prune_var_sub_mean ():
print 'PruneVarSubMean'
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.PreProc import PruneVarSubMean
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
preproc=PruneVarSubMean()
preproc.init(feats_train)
feats_train.add_preproc(preproc)
feats_train.apply_preproc()
feats_test.add_preproc(preproc)
feats_test.apply_preproc()
width=1.4
size_cache=10
kernel=Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
开发者ID:memimo,项目名称:shogun-liblinear,代码行数:25,代码来源:preproc_prunevarsubmean_modular.py
示例3: features_simple_modular
def features_simple_modular(A=matrixA,B=matrixB,C=matrixC):
a=RealFeatures(A)
b=LongIntFeatures(B)
c=ByteFeatures(C)
# or 16bit wide ...
#feat1 = f.ShortFeatures(N.zeros((10,5),N.short))
#feat2 = f.WordFeatures(N.zeros((10,5),N.uint16))
# print some statistics about a
# get first feature vector and set it
a.set_feature_vector(array([1,4,0,0,0,9], dtype=float64), 0)
# get matrices
a_out = a.get_feature_matrix()
b_out = b.get_feature_matrix()
c_out = c.get_feature_matrix()
assert(all(a_out==A))
assert(all(b_out==B))
assert(all(c_out==C))
return a_out,b_out,c_out,a,b,c
开发者ID:AsherBond,项目名称:shogun,代码行数:28,代码来源:features_simple_modular.py
示例4: norm_one
def norm_one ():
print 'NormOne'
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.PreProc import NormOne
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
preproc=NormOne()
preproc.init(feats_train)
feats_train.add_preproc(preproc)
feats_train.apply_preproc()
feats_test.add_preproc(preproc)
feats_test.apply_preproc()
width=1.4
size_cache=10
kernel=Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
开发者ID:memimo,项目名称:shogun-liblinear,代码行数:25,代码来源:preproc_normone_modular.py
示例5: modelselection_grid_search_kernel
def modelselection_grid_search_kernel():
num_subsets=3
num_vectors=20
dim_vectors=3
# create some (non-sense) data
matrix=rand(dim_vectors, num_vectors)
# create num_feautres 2-dimensional vectors
features=RealFeatures()
features.set_feature_matrix(matrix)
# create labels, two classes
labels=BinaryLabels(num_vectors)
for i in range(num_vectors):
labels.set_label(i, 1 if i%2==0 else -1)
# create svm
classifier=LibSVM()
# splitting strategy
splitting_strategy=StratifiedCrossValidationSplitting(labels, num_subsets)
# accuracy evaluation
evaluation_criterion=ContingencyTableEvaluation(ACCURACY)
# cross validation class for evaluation in model selection
cross=CrossValidation(classifier, features, labels, splitting_strategy, evaluation_criterion)
cross.set_num_runs(1)
# print all parameter available for modelselection
# Dont worry if yours is not included, simply write to the mailing list
classifier.print_modsel_params()
# model parameter selection
param_tree=create_param_tree()
param_tree.print_tree()
grid_search=GridSearchModelSelection(param_tree, cross)
print_state=True
best_combination=grid_search.select_model(print_state)
print("best parameter(s):")
best_combination.print_tree()
best_combination.apply_to_machine(classifier)
# larger number of runs to have tighter confidence intervals
cross.set_num_runs(10)
cross.set_conf_int_alpha(0.01)
result=cross.evaluate()
print("result: ")
result.print_result()
return 0
开发者ID:ratschlab,项目名称:ASP,代码行数:55,代码来源:modelselection_grid_search_kernel.py
示例6: features_dense_zero_copy_modular
def features_dense_zero_copy_modular (in_data=data):
feats = None
if numpy.__version__ >= '1.5':
feats=numpy.array(in_data, dtype=float64, order='F')
a=RealFeatures()
a.frombuffer(feats, False)
b=numpy.array(a, copy=False)
c=numpy.array(a, copy=True)
d=RealFeatures()
d.frombuffer(a, False)
e=RealFeatures()
e.frombuffer(a, True)
a[:,0]=0
print a[0:4]
print b[0:4]
print c[0:4]
print d[0:4]
print e[0:4]
else:
print "numpy version >= 1.5 is needed"
return feats
开发者ID:AlexBinder,项目名称:shogun,代码行数:27,代码来源:features_dense_zero_copy_modular.py
示例7: distance_mahalanobis_modular
def distance_mahalanobis_modular (fm_train_real = traindat, fm_test_real = testdat):
from shogun.Features import RealFeatures
from shogun.Distance import MahalanobisDistance
feats_train = RealFeatures(fm_train_real)
feats_test = RealFeatures(fm_test_real)
distance = MahalanobisDistance(feats_test, feats_train)
for i in range(feats_test.get_num_vectors()):
for j in range(feats_train.get_num_vectors()):
dm = distance.distance(i, j)
print dm
开发者ID:flxb,项目名称:shogun,代码行数:13,代码来源:distance_mahalanobis_modular.py
示例8: prepare_feats
def prepare_feats(desc, l=2, as_shogun=False):
if l==2: desc = np.sqrt(desc) #bias not afected by sqrt
norms = np.apply_along_axis(np.linalg.norm, 0, desc[:-1,:], l) #leave bias alone
np.seterr(divide='ignore', invalid='ignore')
desc[:-1,:]=desc[:-1,:]/norms #leave bias alone
np.seterr(divide='warn', invalid='warn')
if l==1: desc=desc[:-1,:] #removing bias dim if L1 -> nonlinear TODO find better way...
desc[np.isnan(desc)]=0 #handle NaNs
if as_shogun:
desc=RealFeatures(desc.astype('float'))
return desc
开发者ID:jypuigbo,项目名称:robocup-code,代码行数:16,代码来源:run_detector.py
示例9: features_dense_real_modular
def features_dense_real_modular(A=matrix):
# ... of type Real, LongInt and Byte
a = RealFeatures(A)
# print(some statistics about a)
# print(a.get_num_vectors())
# print(a.get_num_features())
# get first feature vector and set it
# print(a.get_feature_vector(0))
a.set_feature_vector(array([1, 4, 0, 0, 0, 9], dtype=float64), 0)
# get matrix
a_out = a.get_feature_matrix()
assert all(a_out == A)
return a_out
开发者ID:joseph-chan,项目名称:rqpersonalsvn,代码行数:18,代码来源:features_dense_real_modular.py
示例10: features_director_dot_modular
def features_director_dot_modular (fm_train_real, fm_test_real,
label_train_twoclass, C, epsilon):
from shogun.Features import RealFeatures, SparseRealFeatures, BinaryLabels
from shogun.Classifier import LibLinear, L2R_L2LOSS_SVC_DUAL
from shogun.Mathematics import Math_init_random
Math_init_random(17)
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
labels=BinaryLabels(label_train_twoclass)
dfeats_train=NumpyFeatures(fm_train_real)
dfeats_test=NumpyFeatures(fm_test_real)
dlabels=BinaryLabels(label_train_twoclass)
print feats_train.get_computed_dot_feature_matrix()
print dfeats_train.get_computed_dot_feature_matrix()
svm=LibLinear(C, feats_train, labels)
svm.set_liblinear_solver_type(L2R_L2LOSS_SVC_DUAL)
svm.set_epsilon(epsilon)
svm.set_bias_enabled(True)
svm.train()
svm.set_features(feats_test)
svm.apply().get_labels()
predictions = svm.apply()
dfeats_train.__disown__()
dfeats_train.parallel.set_num_threads(1)
dsvm=LibLinear(C, dfeats_train, dlabels)
dsvm.set_liblinear_solver_type(L2R_L2LOSS_SVC_DUAL)
dsvm.set_epsilon(epsilon)
dsvm.set_bias_enabled(True)
dsvm.train()
dfeats_test.__disown__()
dfeats_test.parallel.set_num_threads(1)
dsvm.set_features(dfeats_test)
dsvm.apply().get_labels()
dpredictions = dsvm.apply()
return predictions, svm, predictions.get_labels()
开发者ID:AlexBinder,项目名称:shogun,代码行数:44,代码来源:features_director_dot_modular.py
示例11: kernel_anova_modular
def kernel_anova_modular (fm_train_real=traindat,fm_test_real=testdat,cardinality=2, size_cache=10):
from shogun.Kernel import ANOVAKernel
from shogun.Features import RealFeatures
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
kernel=ANOVAKernel(feats_train, feats_train, cardinality, size_cache)
for i in range(0,feats_train.get_num_vectors()):
for j in range(0,feats_train.get_num_vectors()):
k1 = kernel.compute_rec1(i,j)
k2 = kernel.compute_rec2(i,j)
#if abs(k1-k2) > 1e-10:
# print "|%s|%s|" % (k1, k2)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train, km_test, kernel
开发者ID:ashish-sadh,项目名称:shogun,代码行数:20,代码来源:kernel_anova_modular.py
示例12: preprocessor_randomfouriergausspreproc_modular
def preprocessor_randomfouriergausspreproc_modular (fm_train_real=traindat,fm_test_real=testdat,width=1.4,size_cache=10):
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.Preprocessor import RandomFourierGaussPreproc
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
preproc=RandomFourierGaussPreproc()
preproc.init(feats_train)
feats_train.add_preprocessor(preproc)
feats_train.apply_preprocessor()
feats_test.add_preprocessor(preproc)
feats_test.apply_preprocessor()
kernel=Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
开发者ID:ashish-sadh,项目名称:shogun,代码行数:22,代码来源:preprocessor_randomfouriergausspreproc_modular.py
示例13: preproc_prunevarsubmean_modular
def preproc_prunevarsubmean_modular(fm_train_real=traindat, fm_test_real=testdat, width=1.4, size_cache=10):
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.PreProc import PruneVarSubMean
feats_train = RealFeatures(fm_train_real)
feats_test = RealFeatures(fm_test_real)
preproc = PruneVarSubMean()
preproc.init(feats_train)
feats_train.add_preproc(preproc)
feats_train.apply_preproc()
feats_test.add_preproc(preproc)
feats_test.apply_preproc()
kernel = Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train = kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test = kernel.get_kernel_matrix()
return km_train, km_test, kernel
开发者ID:haipengwang,项目名称:shogun,代码行数:22,代码来源:preproc_prunevarsubmean_modular.py
示例14: preprocessor_normone_modular
def preprocessor_normone_modular (fm_train_real=traindat,fm_test_real=testdat,width=1.4,size_cache=10):
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.Preprocessor import NormOne
feats_train=RealFeatures(fm_train_real)
feats_test=RealFeatures(fm_test_real)
preprocessor=NormOne()
preprocessor.init(feats_train)
feats_train.add_preprocessor(preprocessor)
feats_train.apply_preprocessor()
feats_test.add_preprocessor(preprocessor)
feats_test.apply_preprocessor()
kernel=Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
开发者ID:Anshul-Bansal,项目名称:gsoc,代码行数:23,代码来源:preprocessor_normone_modular.py
示例15: preproc_logplusone_modular
def preproc_logplusone_modular(fm_train_real=traindat, fm_test_real=testdat, width=1.4, size_cache=10):
from shogun.Kernel import Chi2Kernel
from shogun.Features import RealFeatures
from shogun.PreProc import LogPlusOne
feats_train = RealFeatures(fm_train_real)
feats_test = RealFeatures(fm_test_real)
preproc = LogPlusOne()
preproc.init(feats_train)
feats_train.add_preproc(preproc)
feats_train.apply_preproc()
feats_test.add_preproc(preproc)
feats_test.apply_preproc()
kernel = Chi2Kernel(feats_train, feats_train, width, size_cache)
km_train = kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test = kernel.get_kernel_matrix()
return km_train, km_test, kernel
开发者ID:haipengwang,项目名称:shogun,代码行数:23,代码来源:preproc_logplusone_modular.py
示例16: serialization_complex_example
def serialization_complex_example(num=5, dist=1, dim=10, C=2.0, width=10):
import os
from numpy import concatenate, zeros, ones
from numpy.random import randn, seed
from shogun.Features import RealFeatures, Labels
from shogun.Classifier import GMNPSVM
from shogun.Kernel import GaussianKernel
from shogun.IO import SerializableHdf5File,SerializableAsciiFile, \
SerializableJsonFile,SerializableXmlFile,MSG_DEBUG
from shogun.Preprocessor import NormOne, LogPlusOne
seed(17)
data=concatenate((randn(dim, num), randn(dim, num) + dist,
randn(dim, num) + 2*dist,
randn(dim, num) + 3*dist), axis=1)
lab=concatenate((zeros(num), ones(num), 2*ones(num), 3*ones(num)))
feats=RealFeatures(data)
#feats.io.set_loglevel(MSG_DEBUG)
kernel=GaussianKernel(feats, feats, width)
labels=Labels(lab)
svm = GMNPSVM(C, kernel, labels)
feats.add_preprocessor(NormOne())
feats.add_preprocessor(LogPlusOne())
feats.set_preprocessed(1)
svm.train(feats)
#svm.print_serializable()
fstream = SerializableHdf5File("blaah.h5", "w")
status = svm.save_serializable(fstream)
check_status(status)
fstream = SerializableAsciiFile("blaah.asc", "w")
status = svm.save_serializable(fstream)
check_status(status)
fstream = SerializableJsonFile("blaah.json", "w")
status = svm.save_serializable(fstream)
check_status(status)
fstream = SerializableXmlFile("blaah.xml", "w")
status = svm.save_serializable(fstream)
check_status(status)
fstream = SerializableHdf5File("blaah.h5", "r")
new_svm=GMNPSVM()
status = new_svm.load_serializable(fstream)
check_status(status)
new_svm.train()
fstream = SerializableAsciiFile("blaah.asc", "r")
new_svm=GMNPSVM()
status = new_svm.load_serializable(fstream)
check_status(status)
new_svm.train()
fstream = SerializableJsonFile("blaah.json", "r")
new_svm=GMNPSVM()
status = new_svm.load_serializable(fstream)
check_status(status)
new_svm.train()
fstream = SerializableXmlFile("blaah.xml", "r")
new_svm=GMNPSVM()
status = new_svm.load_serializable(fstream)
check_status(status)
new_svm.train()
os.unlink("blaah.h5")
os.unlink("blaah.asc")
os.unlink("blaah.json")
os.unlink("blaah.xml")
return svm,new_svm
开发者ID:Anshul-Bansal,项目名称:gsoc,代码行数:79,代码来源:serialization_complex_example.py
示例17: create_features
def create_features(kname, examples, kparam, train_mode, preproc, seq_source, nuc_con):
"""Converts numpy arrays or sequences into shogun features"""
if kname == 'gauss' or kname == 'linear' or kname == 'poly':
examples = numpy.array(examples)
feats = RealFeatures(examples)
elif kname == 'wd' or kname == 'localalign' or kname == 'localimprove':
if seq_source == 'dna':
examples = non_atcg_convert(examples, nuc_con)
feats = StringCharFeatures(examples, DNA)
elif seq_source == 'protein':
examples = non_aminoacid_converter(examples, nuc_con)
feats = StringCharFeatures(examples, PROTEIN)
else:
sys.stderr.write("Sequence source -"+seq_source+"- is invalid. select [dna|protein]\n")
sys.exit(-1)
elif kname == 'spec' or kname == 'cumspec':
if seq_source == 'dna':
examples = non_atcg_convert(examples, nuc_con)
feats = StringCharFeatures(examples, DNA)
elif seq_source == 'protein':
examples = non_aminoacid_converter(examples, nuc_con)
feats = StringCharFeatures(examples, PROTEIN)
else:
sys.stderr.write("Sequence source -"+seq_source+"- is invalid. select [dna|protein]\n")
sys.exit(-1)
wf = StringUlongFeatures( feats.get_alphabet() )
wf.obtain_from_char(feats, kparam['degree']-1, kparam['degree'], 0, kname=='cumspec')
del feats
if train_mode:
preproc = SortUlongString()
preproc.init(wf)
wf.add_preproc(preproc)
ret = wf.apply_preproc()
#assert(ret)
feats = wf
elif kname == 'spec2' or kname == 'cumspec2':
# spectrum kernel on two sequences
feats = {}
feats['combined'] = CombinedFeatures()
reversed = kname=='cumspec2'
(ex0,ex1) = zip(*examples)
f0 = StringCharFeatures(list(ex0), DNA)
wf = StringWordFeatures(f0.get_alphabet())
wf.obtain_from_char(f0, kparam['degree']-1, kparam['degree'], 0, reversed)
del f0
if train_mode:
preproc = SortWordString()
preproc.init(wf)
wf.add_preprocessor(preproc)
ret = wf.apply_preprocessors()
assert(ret)
feats['combined'].append_feature_obj(wf)
feats['f0'] = wf
f1 = StringCharFeatures(list(ex1), DNA)
wf = StringWordFeatures( f1.get_alphabet() )
wf.obtain_from_char(f1, kparam['degree']-1, kparam['degree'], 0, reversed)
del f1
if train_mode:
preproc = SortWordString()
preproc.init(wf)
wf.add_preproc(preproc)
ret = wf.apply_preproc()
assert(ret)
feats['combined'].append_feature_obj(wf)
feats['f1'] = wf
else:
print 'Unknown kernel %s' % kname
return (feats,preproc)
开发者ID:axitkhurana,项目名称:shogun,代码行数:82,代码来源:experiment.py
示例18: statistics_linear_time_mmd
def statistics_linear_time_mmd ():
from shogun.Features import RealFeatures
from shogun.Features import DataGenerator
from shogun.Kernel import GaussianKernel
from shogun.Statistics import LinearTimeMMD
from shogun.Statistics import BOOTSTRAP, MMD1_GAUSSIAN
from shogun.Distance import EuclideanDistance
from shogun.Mathematics import Statistics, Math
# note that the linear time statistic is designed for much larger datasets
n=10000
dim=2
difference=0.5
# use data generator class to produce example data
# in pratice, this generate data function could be replaced by a method
# that obtains data from a stream
data=DataGenerator.generate_mean_data(n,dim,difference)
print "dimension means of X", mean(data.T[0:n].T)
print "dimension means of Y", mean(data.T[n:2*n+1].T)
# create shogun feature representation
features=RealFeatures(data)
# compute median data distance in order to use for Gaussian kernel width
# 0.5*median_distance normally (factor two in Gaussian kernel)
# However, shoguns kernel width is different to usual parametrization
# Therefore 0.5*2*median_distance^2
# Use a subset of data for that, only 200 elements. Median is stable
# Using all distances here would blow up memory
subset=Math.randperm_vec(features.get_num_vectors())
subset=subset[0:200]
features.add_subset(subset)
dist=EuclideanDistance(features, features)
distances=dist.get_distance_matrix()
features.remove_subset()
median_distance=Statistics.matrix_median(distances, True)
sigma=median_distance**2
print "median distance for Gaussian kernel:", sigma
kernel=GaussianKernel(10,sigma)
mmd=LinearTimeMMD(kernel,features, n)
# perform test: compute p-value and test if null-hypothesis is rejected for
# a test level of 0.05
statistic=mmd.compute_statistic()
print "test statistic:", statistic
# do the same thing using two different way to approximate null-dstribution
# bootstrapping and gaussian approximation (ony for really large samples)
alpha=0.05
print "computing p-value using bootstrapping"
mmd.set_null_approximation_method(BOOTSTRAP)
mmd.set_bootstrap_iterations(50) # normally, far more iterations are needed
p_value=mmd.compute_p_value(statistic)
print "p_value:", p_value
print "p_value <", alpha, ", i.e. test sais p!=q:", p_value<alpha
print "computing p-value using gaussian approximation"
mmd.set_null_approximation_method(MMD1_GAUSSIAN)
p_value=mmd.compute_p_value(statistic)
print "p_value:", p_value
print "p_value <", alpha, ", i.e. test sais p!=q:", p_value<alpha
# sample from null distribution (these may be plotted or whatsoever)
# mean should be close to zero, variance stronly depends on data/kernel
mmd.set_null_approximation_method(BOOTSTRAP)
mmd.set_bootstrap_iterations(10) # normally, far more iterations are needed
null_samples=mmd.bootstrap_null()
print "null mean:", mean(null_samples)
print "null variance:", var(null_samples)
开发者ID:TharinduRusira,项目名称:shogun,代码行数:73,代码来源:statistics_linear_time_mmd.py
示例19: k
# parameters, change to get different results
m=250
difference=3
# setting the angle lower makes a harder test
angle=pi/30
# number of samples taken from null and alternative distribution
num_null_samples=500
# use data generator class to produce example data
data=DataGenerator.generate_sym_mix_gauss(m,difference,angle)
# create shogun feature representation
features_x=RealFeatures(array([data[0]]))
features_y=RealFeatures(array([data[1]]))
# use a kernel width of sigma=2, which is 8 in SHOGUN's parametrization
# which is k(x,y)=exp(-||x-y||^2 / tau), in constrast to the standard
# k(x,y)=exp(-||x-y||^2 / (2*sigma^2)), so tau=2*sigma^2
# Note that kernels per data can be different
kernel_x=GaussianKernel(10,8)
kernel_y=GaussianKernel(10,8)
# create hsic instance. Note that this is a convienience constructor which copies
# feature data. features_x and features_y are not these used in hsic.
# This is only for user-friendlyness. Usually, its ok to do this.
# Below, the alternative distribution is sampled, which means
# that new feature objects have to be created in each iteration (slow)
# However, normally, the alternative distribution is not sampled
开发者ID:coodoing,项目名称:shogun,代码行数:30,代码来源:statistics_hsic.py
示例20: normally
# parameters, change to get different results
m=250
difference=3
# setting the angle lower makes a harder test
angle=pi/30
# number of samples taken from null and alternative distribution
num_null_samples=500
# use data generator class to produce example data
data=DataGenerator.generate_sym_mix_gauss(m,difference,angle)
# create shogun feature representation
features_x=RealFeatures(array([data[0]]))
features_y=RealFeatures(array([data[1]]))
# compute median data distance in order to use for Gaussian kernel width
# 0.5*median_distance normally (factor two in Gaussian kernel)
# However, shoguns kernel width is different to usual parametrization
# Therefore 0.5*2*median_distance^2
# Use a subset of data for that, only 200 elements. Median is stable
subset=Math.randperm_vec(features_x.get_num_vectors())
subset=subset[0:200]
features_x.add_subset(subset)
dist=EuclideanDistance(features_x, features_x)
distances=dist.get_distance_matrix()
features_x.remove_subset()
median_distance=Statistics.matrix_median(distances, True)
sigma_x=median_distance**2
开发者ID:AlexBinder,项目名称:shogun,代码行数:30,代码来源:statistics_hsic.py
注:本文中的shogun.Features.RealFeatures类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论