本文整理汇总了Java中it.uniroma2.sag.kelp.kernel.Kernel类的典型用法代码示例。如果您正苦于以下问题:Java Kernel类的具体用法?Java Kernel怎么用?Java Kernel使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Kernel类属于it.uniroma2.sag.kelp.kernel包,在下文中一共展示了Kernel类的17个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: initializeExamples
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
/**
* This method will be executed before each test method.
*/
@Before
public void initializeExamples() {
String reprA = "fakeclass |BDV:"+DENSE_NAME+"| 1.0,0.0,1.0 |EV| |BV:"+SPARSE_NAME+"| one:1.0 three:1.0 |EV|";
String reprB = "fakeclass |BDV:"+DENSE_NAME+"| 0.0,1.0,1.0 |EV| |BV:"+SPARSE_NAME+"| two:1.0 three:1.0 |EV|";
try {
a = ExampleFactory.parseExample(reprA);
b = ExampleFactory.parseExample(reprB);
} catch (InstantiationException e) {
e.printStackTrace();
Assert.fail();
}
Kernel kernelOnDense = new PolynomialKernel(2, new LinearKernel(DENSE_NAME));
Kernel kernelOnSparse = new NormalizationKernel(new LinearKernel(SPARSE_NAME));
kernel = new LinearKernelCombination();
kernel.addKernel(1, kernelOnDense);
kernel.addKernel(2, kernelOnSparse);
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-full,代码行数:23,代码来源:SimpleExampleSerializationTest.java
示例2: initializeExamples
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
/**
* This method will be executed before each test method.
*/
@Before
public void initializeExamples() {
String textualA = "class1 class2 |<| lab1 |BDV:" + DENSE_NAME + "| 0.5 1 |EDV| |,| |BDV:" + DENSE_NAME + "| -3 1 |EDV| |>| |BV:" + SPARSE_NAME + "| ptkSim:0.8 |EV|";
String textualB = "class1 class2 |<| lab1 |BDV:" + DENSE_NAME + "| -0.5 1.3 |EDV| |,| |BDV:" + DENSE_NAME + "| 0 1 |EDV| |>| |BV:" + SPARSE_NAME + "| ptkSim:0.3 |EV|";
try {
pairA = ExampleFactory.parseExample(textualA);
pairB = ExampleFactory.parseExample(textualB);
} catch (InstantiationException e) {
e.printStackTrace();
Assert.fail();
}
Kernel kernelOnDense = new LinearKernel(DENSE_NAME);
PreferenceKernel preference = new PreferenceKernel(kernelOnDense);
Kernel kernelOnSparse = new LinearKernel(SPARSE_NAME);
kernel = new LinearKernelCombination();
kernel.addKernel(1, preference);
kernel.addKernel(2, kernelOnSparse);
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-full,代码行数:26,代码来源:ExamplePairSerializationTest.java
示例3: tune
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
private static float tune(SimpleDataset allTrainingSet, Kernel kernel,
float split, float[] cs) throws NoSuchPerformanceMeasureException,
IOException {
float bestC = 0.0f;
float bestF1 = -Float.MAX_VALUE;
// Split data according to a fix split
Dataset[] split2 = allTrainingSet
.splitClassDistributionInvariant(split);
SimpleDataset trainingSet = (SimpleDataset) split2[0];
SimpleDataset testSet = (SimpleDataset) split2[1];
// tune parameter C
for (float c : cs) {
float f1 = test(trainingSet, kernel, c, testSet, false);
System.out.println("C:" + c + "\t" + f1);
if (f1 > bestF1) {
bestF1 = f1;
bestC = c;
}
}
return bestC;
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:24,代码来源:TweetSentimentAnalysisSemeval2013.java
示例4: learnModel
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
@BeforeClass
public static void learnModel() {
trainingSet = new SimpleDataset();
testSet = new SimpleDataset();
try {
trainingSet.populate("src/test/resources/svmTest/binary/binary_train.klp");
// Read a dataset into a test variable
testSet.populate("src/test/resources/svmTest/binary/binary_test.klp");
} catch (Exception e) {
e.printStackTrace();
Assert.assertTrue(false);
}
// define the positive class
StringLabel positiveClass = new StringLabel("+1");
// define the kernel
Kernel kernel = new LinearKernel("0");
// add a cache
kernel.setKernelCache(new FixIndexKernelCache(trainingSet
.getNumberOfExamples()));
// define the learning algorithm
BinaryNuSvmClassification learner = new BinaryNuSvmClassification(kernel,
positiveClass, 0.5f);
// learn and get the prediction function
learner.learn(trainingSet);
f = learner.getPredictionFunction();
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-full,代码行数:32,代码来源:BinaryNuSVMTest.java
示例5: learnModel
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
@BeforeClass
public static void learnModel() {
trainingSet = new SimpleDataset();
testSet = new SimpleDataset();
try {
trainingSet.populate("src/test/resources/svmTest/binary/binary_train.klp");
// Read a dataset into a test variable
testSet.populate("src/test/resources/svmTest/binary/binary_test.klp");
} catch (Exception e) {
e.printStackTrace();
Assert.assertTrue(false);
}
// define the positive class
StringLabel positiveClass = new StringLabel("+1");
// define the kernel
Kernel kernel = new LinearKernel("0");
// add a cache
kernel.setKernelCache(new FixIndexKernelCache(trainingSet
.getNumberOfExamples()));
// define the learning algorithm
BinaryCSvmClassification learner = new BinaryCSvmClassification(kernel,
positiveClass, 1, 1);
// learn and get the prediction function
learner.learn(trainingSet);
f = learner.getPredictionFunction();
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-full,代码行数:32,代码来源:BinaryCSVMTest.java
示例6: learnModel
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
@BeforeClass
public static void learnModel() {
trainingSet = new SimpleDataset();
testSet = new SimpleDataset();
try {
trainingSet
.populate("src/test/resources/svmTest/regression/mg_scale.klp");
// Read a dataset into a test variable
testSet.populate("src/test/resources/svmTest/regression/mg_scale.klp");
} catch (Exception e) {
e.printStackTrace();
Assert.assertTrue(false);
}
// define the regression label
Label label = new StringLabel("r");
// define the kernel
Kernel kernel = new LinearKernel("0");
// add a cache
kernel.setKernelCache(new FixIndexKernelCache(trainingSet
.getNumberOfExamples()));
// define the learning algorithm
EpsilonSvmRegression learner = new EpsilonSvmRegression(kernel, label,
1, 0.1f);
// learn and get the prediction function
learner.learn(trainingSet);
p = learner.getPredictionFunction();
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-full,代码行数:33,代码来源:EpsilonSVRTest.java
示例7: getKernel
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
public Kernel getKernel() {
return kernel;
}
开发者ID:SAG-KeLP-Legacy,项目名称:kernel-clustering,代码行数:4,代码来源:KernelBasedKMeansEngine.java
示例8: setKernel
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
public void setKernel(Kernel kernel) {
this.kernel = kernel;
}
开发者ID:SAG-KeLP-Legacy,项目名称:kernel-clustering,代码行数:4,代码来源:KernelBasedKMeansEngine.java
示例9: main
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
float split = 0.8f;
String train_file = "src/main/resources/tweetSentiment2013/train.klp.gz";
String test_file = "src/main/resources/tweetSentiment2013/test.klp.gz";
int kernelmode = 1;
float polyD = 0;
float gamma = 0;
float[] Cs = new float[] { 0.1f, 0.5f, 1f };
// Read a dataset into a test variable
SimpleDataset trainingSet = new SimpleDataset();
trainingSet.populate(train_file);
// Read a dataset into a test variable
SimpleDataset testSet = new SimpleDataset();
testSet.populate(test_file);
// set the cache size
int cacheSize = trainingSet.getNumberOfExamples()
+ testSet.getNumberOfExamples();
// Initialize a kernel
Kernel kernel = null;
switch (kernelmode) {
case 1:
kernel = getBowKernel(cacheSize);
break;
case 2:
kernel = getPolyBow(cacheSize, polyD);
break;
case 3:
kernel = getWordspaceKernel(cacheSize);
break;
case 4:
kernel = getRbfWordspaceKernel(cacheSize, gamma);
break;
case 5:
kernel = getBowWordSpaceKernel(cacheSize);
break;
case 6:
kernel = getPolyBowRbfWordspaceKernel(cacheSize, polyD, gamma);
break;
default:
kernel = getBowKernel(cacheSize);
break;
}
// Find optimal C
float c = tune(trainingSet, kernel, split, Cs);
System.out.println("start testing with C=" + c);
// test
float f1 = test(trainingSet, kernel, c, testSet, true);
System.out.println("Mean F1 on test set=" + f1);
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:53,代码来源:TweetSentimentAnalysisSemeval2013.java
示例10: main
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
// The epsilon in loss function of the regressor
float pReg = 0.1f;
// The regularization parameter of the regressor
float c = 2f;
// The gamma parameter in the RBF kernel
float gamma = 1f;
// The label indicating the value considered by the regressor
Label label = new StringLabel("r");
// Load the dataset
SimpleDataset dataset = new SimpleDataset();
dataset.populate("src/main/resources/sv_regression_test/mg_scale.klp");
// Split the dataset in train and test datasets
dataset.shuffleExamples(new Random(0));
SimpleDataset[] split = dataset.split(0.7f);
SimpleDataset trainDataset = split[0];
SimpleDataset testDataset = split[1];
// Kernel for the first representation (0-index)
Kernel linear = new LinearKernel("0");
// Applying the RBF kernel
Kernel rbf = new RbfKernel(gamma, linear);
// Applying a cache
FixIndexKernelCache kernelCache = new FixIndexKernelCache(
trainDataset.getNumberOfExamples());
rbf.setKernelCache(kernelCache);
// instantiate the regressor
EpsilonSvmRegression regression = new EpsilonSvmRegression(rbf, label,
c, pReg);
// learn
regression.learn(trainDataset);
// get the prediction function
RegressionFunction regressor = regression.getPredictionFunction();
// initializing the performance evaluator
RegressorEvaluator evaluator = new RegressorEvaluator(
trainDataset.getRegressionProperties());
// For each example from the test set
for (Example e : testDataset.getExamples()) {
// Predict the value
Prediction prediction = regressor.predict(e);
// Print the original and the predicted values
System.out.println("real value: " + e.getRegressionValue(label)
+ "\t-\tpredicted value: " + prediction.getScore(label));
// Update the evaluator
evaluator.addCount(e, prediction);
}
// Get the Mean Squared Error for the targeted label
float measSquareError = evaluator.getMeanSquaredError(label);
System.out.println("\nMean Squared Error:\t" + measSquareError);
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:59,代码来源:EpsilonSVRegressionExample.java
示例11: main
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
public static void main(String[] args) {
try {
// Read a dataset into a trainingSet variable
SimpleDataset trainingSet = new SimpleDataset();
trainingSet.populate("src/main/resources/multiplerepresentation/train.klp");
// Read a dataset into a test variable
SimpleDataset testSet = new SimpleDataset();
testSet.populate("src/main/resources/multiplerepresentation/test.klp");
List<Label> classes = trainingSet.getClassificationLabels();
for (int i=0; i<classes.size(); ++i) {
Label l = classes.get(i);
System.out.println("Class: " + l.toString());
System.out.println(trainingSet.getNumberOfPositiveExamples(l));
System.out.println(testSet.getNumberOfPositiveExamples(l));
}
// instantiate a passive aggressive algorithm
KernelizedPassiveAggressiveClassification kPA = new KernelizedPassiveAggressiveClassification();
// set an aggressiveness parameter
kPA.setC(2f);
// Kernel for the first representation (0-index)
Kernel linear = new LinearKernel("0");
// Normalize the linear kernel
NormalizationKernel normalizedKernel = new NormalizationKernel(
linear);
// Apply a 2-degree Polynomial kernel on the score (normalized) computed by
// the linear kernel
Kernel polyKernel = new PolynomialKernel(2f, normalizedKernel);
// Kernel for the second representation (1-index)
Kernel linear1 = new LinearKernel("1");
// Normalize the linear kernel
NormalizationKernel normalizedKernel1 = new NormalizationKernel(
linear1);
// Apply a RBF kernel on the score (normalized) computed by
// the linear kernel
Kernel rbfKernel = new RbfKernel(2f, normalizedKernel1);
// tell the algorithm that the kernel we want to use in learning is
// the polynomial kernel
LinearKernelCombination linearCombination = new LinearKernelCombination();
linearCombination.addKernel(1f, polyKernel);
linearCombination.addKernel(1f, rbfKernel);
// normalize the weights such that their sum is 1
linearCombination.normalizeWeights();
// set the kernel for the PA algorithm
kPA.setKernel(linearCombination);
// Instantiate a OneVsAll learning algorithm
// It is a so called meta learner, it receives in input a binary learning algorithm
OneVsAllLearning metaOneVsAllLearner = new OneVsAllLearning();
metaOneVsAllLearner.setBaseAlgorithm(kPA);
metaOneVsAllLearner.setLabels(classes);
long startLearningTime = System.currentTimeMillis();
// learn and get the prediction function
metaOneVsAllLearner.learn(trainingSet);
OneVsAllClassifier f = metaOneVsAllLearner.getPredictionFunction();
long endLearningTime = System.currentTimeMillis();
// classify examples and compute some statistics
MulticlassClassificationEvaluator ev = new MulticlassClassificationEvaluator(classes);
for (Example e : testSet.getExamples()) {
OneVsAllClassificationOutput prediction = f.predict(e);
ev.addCount(e, prediction);
}
System.out
.println("Accuracy: "
+ ev.getAccuracy());
System.out.println("Learning time without cache: " + (endLearningTime-startLearningTime) + " ms");
} catch (Exception e1) {
e1.printStackTrace();
}
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:81,代码来源:OneVsAllPassiveAggressiveExample.java
示例12: main
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
public static void main(String[] args) {
try {
// Read a dataset into a trainingSet variable
SimpleDataset trainingSet = new SimpleDataset();
trainingSet.populate("src/main/resources/hellolearning/train.klp");
// Read a dataset into a test variable
SimpleDataset testSet = new SimpleDataset();
testSet.populate("src/main/resources/hellolearning/test.klp");
// define the positive class
StringLabel positiveClass = new StringLabel("+1");
// print some statistics
System.out.println("Training set statistics");
System.out.print("Examples number ");
System.out.println(trainingSet.getNumberOfExamples());
System.out.print("Positive examples ");
System.out.println(trainingSet
.getNumberOfPositiveExamples(positiveClass));
System.out.print("Negative examples ");
System.out.println(trainingSet
.getNumberOfNegativeExamples(positiveClass));
System.out.println("Test set statistics");
System.out.print("Examples number ");
System.out.println(testSet.getNumberOfExamples());
System.out.print("Positive examples ");
System.out.println(testSet
.getNumberOfPositiveExamples(positiveClass));
System.out.print("Negative examples ");
System.out.println(testSet
.getNumberOfNegativeExamples(positiveClass));
// instantiate a passive aggressive algorithm
KernelizedPassiveAggressiveClassification kPA = new KernelizedPassiveAggressiveClassification();
// indicate to the learner what is the positive class
kPA.setLabel(positiveClass);
// set an aggressiveness parameter
kPA.setC(0.01f);
// use the first (and only here) representation
Kernel linear = new LinearKernel("0");
// Normalize the linear kernel
NormalizationKernel normalizedKernel = new NormalizationKernel(
linear);
// Apply a Polynomial kernel on the score (normalized) computed by
// the linear kernel
Kernel polyKernel = new PolynomialKernel(2f, normalizedKernel);
// tell the algorithm that the kernel we want to use in learning is
// the polynomial kernel
kPA.setKernel(polyKernel);
// learn and get the prediction function
kPA.learn(trainingSet);
Classifier f = kPA.getPredictionFunction();
// classify examples and compute some statistics
BinaryClassificationEvaluator ev = new BinaryClassificationEvaluator(positiveClass);
for (Example e : testSet.getExamples()) {
ClassificationOutput p = f.predict(testSet.getNextExample());
ev.addCount(e, p);
}
System.out
.println("Accuracy: " +
ev.getAccuracy());
} catch (Exception e1) {
e1.printStackTrace();
}
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:70,代码来源:HelloKernelLearning.java
示例13: main
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
public static void main(String[] args) {
try {
// Read a dataset into a trainingSet variable
SimpleDataset trainingSet = new SimpleDataset();
trainingSet
.populate("src/main/resources/sequenceKernelExample/sequenceTrain.txt");
SimpleDataset testSet = new SimpleDataset();
testSet.populate("src/main/resources/sequenceKernelExample/sequenceTest.txt");
// print some statistics
System.out.println("Training set statistics");
System.out.print("Examples number ");
System.out.println(trainingSet.getNumberOfExamples());
List<Label> classes = trainingSet.getClassificationLabels();
for (Label l : classes) {
System.out.println("Training Label " + l.toString() + " "
+ trainingSet.getNumberOfPositiveExamples(l));
System.out.println("Training Label " + l.toString() + " "
+ trainingSet.getNumberOfNegativeExamples(l));
System.out.println("Test Label " + l.toString() + " "
+ testSet.getNumberOfPositiveExamples(l));
System.out.println("Test Label " + l.toString() + " "
+ testSet.getNumberOfNegativeExamples(l));
}
// Kernel for the first representation (0-index)
Kernel kernel = new SequenceKernel("SEQUENCE", 2, 1);
// Normalize the linear kernel
NormalizationKernel normalizedKernel = new NormalizationKernel(
kernel);
kernel.setSquaredNormCache(new FixIndexSquaredNormCache(trainingSet.getNumberOfExamples()));
kernel.setKernelCache(new FixIndexKernelCache(trainingSet.getNumberOfExamples()));
// instantiate an svmsolver
BinaryCSvmClassification svmSolver = new BinaryCSvmClassification();
svmSolver.setKernel(normalizedKernel);
svmSolver.setCp(1);
svmSolver.setCn(1);
OneVsAllLearning ovaLearner = new OneVsAllLearning();
ovaLearner.setBaseAlgorithm(svmSolver);
ovaLearner.setLabels(classes);
// learn and get the prediction function
ovaLearner.learn(trainingSet);
Classifier f = ovaLearner.getPredictionFunction();
// classify examples and compute some statistics
MulticlassClassificationEvaluator ev = new MulticlassClassificationEvaluator(
trainingSet.getClassificationLabels());
for (Example e : testSet.getExamples()) {
ClassificationOutput p = f.predict(testSet.getNextExample());
ev.addCount(e, p);
}
System.out.println("Accuracy: "
+ ev.getPerformanceMeasure("accuracy"));
} catch (Exception e1) {
e1.printStackTrace();
}
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:66,代码来源:SequenceKernelExample.java
示例14: main
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
public static void main(String[] args) {
try {
// Read a dataset into a trainingSet variable
SimpleDataset trainingSet = new SimpleDataset();
trainingSet
.populate("src/main/resources/iris_dataset/iris_train.klp");
SimpleDataset testSet = new SimpleDataset();
testSet.populate("src/main/resources/iris_dataset/iris_test.klp");
// print some statistics
System.out.println("Training set statistics");
System.out.print("Examples number ");
System.out.println(trainingSet.getNumberOfExamples());
List<Label> classes = trainingSet.getClassificationLabels();
for (Label l : classes) {
System.out.println("Training Label " + l.toString() + " "
+ trainingSet.getNumberOfPositiveExamples(l));
System.out.println("Training Label " + l.toString() + " "
+ trainingSet.getNumberOfNegativeExamples(l));
System.out.println("Test Label " + l.toString() + " "
+ testSet.getNumberOfPositiveExamples(l));
System.out.println("Test Label " + l.toString() + " "
+ testSet.getNumberOfNegativeExamples(l));
}
// Kernel for the first representation (0-index)
Kernel linear = new LinearKernel("0");
// Normalize the linear kernel
NormalizationKernel normalizedKernel = new NormalizationKernel(
linear);
// instantiate an svmsolver
BinaryCSvmClassification svmSolver = new BinaryCSvmClassification();
svmSolver.setKernel(normalizedKernel);
svmSolver.setCp(1);
svmSolver.setCn(1);
OneVsAllLearning ovaLearner = new OneVsAllLearning();
ovaLearner.setBaseAlgorithm(svmSolver);
ovaLearner.setLabels(classes);
// learn and get the prediction function
ovaLearner.learn(trainingSet);
Classifier f = ovaLearner.getPredictionFunction();
// classify examples and compute some statistics
MulticlassClassificationEvaluator ev = new MulticlassClassificationEvaluator(
trainingSet.getClassificationLabels());
for (Example e : testSet.getExamples()) {
ClassificationOutput p = f.predict(testSet.getNextExample());
ev.addCount(e, p);
}
List<Label> twoLabels = new ArrayList<Label>();
twoLabels.add(new StringLabel("iris-setosa"));
twoLabels.add(new StringLabel("iris-virginica"));
Object[] as = new Object[1];
as[0] = twoLabels;
System.out.println("Mean F1: "
+ ev.getPerformanceMeasure("MeanF1"));
System.out.println("Mean F1 For iris-setosa/iris-virginica: "
+ ev.getPerformanceMeasure("MeanF1For", as));
System.out.println("F1: "
+ ev.getPerformanceMeasure("OverallF1"));
} catch (Exception e1) {
e1.printStackTrace();
}
}
开发者ID:SAG-KeLP-Legacy,项目名称:kelp-examples,代码行数:75,代码来源:OneVsAllSVMExample.java
示例15: KernelBasedKMeansEngine
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
/**
* @param kernel
* The kernel function
* @param k
* The number of expected clusters
* @param maxIterations
* The maximum number of iterations
*/
public KernelBasedKMeansEngine(Kernel kernel, int k, int maxIterations) {
this();
this.kernel = kernel;
this.k = k;
this.maxIterations = maxIterations;
}
开发者ID:SAG-KeLP-Legacy,项目名称:kernel-clustering,代码行数:15,代码来源:KernelBasedKMeansEngine.java
示例16: getKernel
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
/**
* Returns the kernel used in comparing two vectors
*
* @return the kernel used in comparing two vectors
*/
public Kernel getKernel() {
return kernel;
}
开发者ID:SAG-KeLP,项目名称:kelp-additional-kernels,代码行数:9,代码来源:VectorBasedStructureElementSimilarity.java
示例17: setKernel
import it.uniroma2.sag.kelp.kernel.Kernel; //导入依赖的package包/类
/**
* Sets the kernel to be used in comparing two vectors
*
* <p>
* NOTE: the kernel cache mechanism cannot be enabled. Thus the
* cache is automatically disabled to <code>kernel</code>
*
* @param kernel the kernel to be used in comparing two vectors
*/
public void setKernel(Kernel kernel) {
this.kernel = kernel;
if(kernel!=null)
kernel.disableCache();
}
开发者ID:SAG-KeLP,项目名称:kelp-additional-kernels,代码行数:15,代码来源:VectorBasedStructureElementSimilarity.java
注:本文中的it.uniroma2.sag.kelp.kernel.Kernel类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论