本文整理汇总了C++中TFltVV类的典型用法代码示例。如果您正苦于以下问题:C++ TFltVV类的具体用法?C++ TFltVV怎么用?C++ TFltVV使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了TFltVV类的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。
示例1: GetEigVec
// to get first few eigenvectors
void GetEigVec(const PUNGraph& Graph, const int& EigVecs, TFltV& EigValV, TVec<TFltV>& EigVecV) {
const int Nodes = Graph->GetNodes();
// Lanczos
TUNGraphMtx GraphMtx(Graph);
int CalcVals = int(2*EigVecs);
if (CalcVals > Nodes) { CalcVals = Nodes; }
TFltVV EigVecVV;
//while (EigValV.Len() < EigVecs && CalcVals < 10*EigVecs) {
try {
TSparseSVD::Lanczos(GraphMtx, EigVecs, 2*EigVecs, ssotFull, EigValV, EigVecVV, false); }
catch(...) {
printf("\n ***EXCEPTION: TRIED %d GOT %d values** \n", CalcVals, EigValV.Len()); }
if (EigValV.Len() < EigVecs) {
printf(" ***TRIED %d GOT %d values** \n", CalcVals, EigValV.Len()); }
// CalcVals += EigVecs;
//}
TFltIntPrV EigValIdV;
for (int i = 0; i < EigValV.Len(); i++) {
EigValIdV.Add(TFltIntPr(EigValV[i], i));
}
EigValIdV.Sort(false);
EigValV.Sort(false);
for (int v = 0; v < EigValIdV.Len(); v++) { // vector components are not sorted!!!
EigVecV.Add();
EigVecVV.GetCol(EigValIdV[v].Val2, EigVecV.Last());
}
IsAllValVNeg(EigVecV[0], true);
}
开发者ID:Networks-Learning,项目名称:infopath,代码行数:29,代码来源:gsvd.cpp
示例2: LeftInstNV
void TDecisionTree::TNode::Split(const TFltVV& FtrVV, const TFltV& ClassV, const TIntV& InstNV) {
// construct the children
int NInstLeft = 0;
for (int i = 0; i < NExamples; i++) {
AssertR(0 <= InstNV[i] && InstNV[i] < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstNV[i]) + "!");
if (FtrVV(CutFtrN, InstNV[i]) <= CutFtrVal) {
NInstLeft++;
}
}
TIntV LeftInstNV(NInstLeft, 0);
TIntV RightInstNV(NExamples - NInstLeft, 0);
int InstN;
for (int i = 0; i < NExamples; i++) {
InstN = InstNV[i];
AssertR(0 <= InstN && InstN < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstN) + "!");
if (FtrVV(CutFtrN, InstN) <= CutFtrVal) {
LeftInstNV.Add(InstN);
} else {
RightInstNV.Add(InstN);
}
}
Left = new TNode(Tree);
Right = new TNode(Tree);
Left->Fit(FtrVV, ClassV, LeftInstNV);
Right->Fit(FtrVV, ClassV, RightInstNV);
}
开发者ID:Bradeskojest,项目名称:qminer,代码行数:31,代码来源:classification.cpp
示例3: ValClassPrV
void TDecisionTree::TNode::Fit(const TFltVV& FtrVV, const TFltV& ClassV, const TIntV& InstNV) {
EAssert(!InstNV.Empty());
const int Dim = FtrVV.GetRows();
NExamples = InstNV.Len();
ClassHist.Gen(2);
FtrHist.Gen(Dim);
{
int TotalPos = 0;
double BestScore = TFlt::NInf, CutVal = TFlt::NInf, Score = TFlt::NInf;
for (int i = 0; i < NExamples; i++) {
AssertR(0 <= InstNV[i] && InstNV[i] < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstNV[i]) + "!");
TotalPos += (int) ClassV[InstNV[i]];
}
ClassHist[0] = 1 - double(TotalPos) / NExamples;
ClassHist[1] = 1 - ClassHist[0];
TFltIntPrV ValClassPrV(NExamples);
// get the best score and cut value
int InstN;
for (int FtrN = 0; FtrN < Dim; FtrN++) {
double FtrSum = 0;
for (int i = 0; i < NExamples; i++) {
InstN = InstNV[i];
AssertR(0 <= InstN && InstN < FtrVV.GetCols(), "Invalid instance index: " + TInt::GetStr(InstN) + "!");
ValClassPrV[i].Val1 = FtrVV(FtrN, InstN);
ValClassPrV[i].Val2 = (int) ClassV[InstN];
FtrSum += FtrVV(FtrN, InstN);
}
ValClassPrV.Sort(true); // have to sort to speed up the calculation
if (CanSplitNumFtr(ValClassPrV, TotalPos, CutVal, Score) && Score > BestScore) {
BestScore = Score;
CutFtrN = FtrN;
CutFtrVal = CutVal;
}
FtrHist[FtrN] = FtrSum / NExamples;
}
}
// cut the dataset into left and right and build the tree recursively
if (ShouldGrow() && CutFtrN >= 0) {
EAssert(CutFtrN < Dim);
// the best attribute is now selected, calculate the correlation between the
// selected attribute and other attributes, then split the node
CalcCorrFtrV(FtrVV, InstNV);
Split(FtrVV, ClassV, InstNV);
}
}
开发者ID:Bradeskojest,项目名称:qminer,代码行数:60,代码来源:classification.cpp
示例4: GetStr
TStr TStrUtil::GetStr(const TFltVV& FltVV, const TStr& DelimiterStr, const TStr& FmtStr) {
TChA ResChA;
for (int i = 0; i < FltVV.GetXDim(); i++) {
for (int j = 0; j < FltVV.GetYDim(); j++) {
ResChA += TFlt::GetStr(FltVV(i,j), FmtStr);
if (j < FltVV.GetYDim() - 1) { ResChA += DelimiterStr; }
}
if (i < FltVV.GetXDim() - 1) { ResChA += '\n'; }
}
return ResChA;
}
开发者ID:amrsobhy,项目名称:qminer,代码行数:13,代码来源:strut.cpp
示例5: CalcKernelMatrix
//////////////////////////////////////////////////////////////////////////
// Kernel utilities
void TKernelUtil::CalcKernelMatrix(PSVMTrainSet Set, TFltVV& K) {
const int Size = Set->Len(); K.Gen(Size, Size);
for (int i = 0; i < Size; i++) {
for (int j = i; j < Size; j++)
K(i,j) = K(j,i) = Set->DotProduct(i,j);
}
}
开发者ID:AlertProject,项目名称:Text-processing-bundle,代码行数:9,代码来源:kernelmethods.cpp
示例6: CutFtrV
void TDecisionTree::TNode::CalcCorrFtrV(const TFltVV& FtrVV, const TIntV& InstNV) {
if (Tree->IsCalcCorr()) {
const int Dim = FtrVV.GetRows();
CutFtrCorrFtrNPValTrV.Gen(Dim-1, 0);
TFltV CutFtrV(NExamples), OthrFtrV(NExamples);
for (int i = 0; i < NExamples; i++) {
CutFtrV[i] = FtrVV(CutFtrN, InstNV[i]);
}
for (int FtrN = 0; FtrN < Dim; FtrN++) {
if (FtrN != CutFtrN) {
for (int i = 0; i < NExamples; i++) {
OthrFtrV[i] = FtrVV(FtrN, InstNV[i]);
}
TCorr Corr(CutFtrV, OthrFtrV);
CutFtrCorrFtrNPValTrV.Add(TFltIntFltTr(Corr.GetCorrCf(), FtrN, Corr.GetCorrCfPrb()));
}
}
CutFtrCorrFtrNPValTrV.Sort(false);
}
}
开发者ID:Bradeskojest,项目名称:qminer,代码行数:25,代码来源:classification.cpp
示例7: GetNewtonStep
void TLogRegFit::GetNewtonStep(TFltVV& HVV, const TFltV& GradV, TFltV& DeltaLV) {
bool HSingular = false;
for (int i = 0; i < HVV.GetXDim(); i++) {
if (HVV(i,i) == 0.0) {
HVV(i,i) = 0.001;
HSingular = true;
}
DeltaLV[i] = GradV[i] / HVV(i, i);
}
if (! HSingular) {
if (HVV(0, 0) < 0) { // if Hessian is negative definite, convert it to positive definite
for (int r = 0; r < Theta.Len(); r++) {
for (int c = 0; c < Theta.Len(); c++) {
HVV(r, c) = - HVV(r, c);
}
}
TNumericalStuff::SolveSymetricSystem(HVV, GradV, DeltaLV);
}
else {
TNumericalStuff::SolveSymetricSystem(HVV, GradV, DeltaLV);
for (int i = 0; i < DeltaLV.Len(); i++) {
DeltaLV[i] = - DeltaLV[i];
}
}
}
}
开发者ID:RoyZhengGao,项目名称:CommunityEvaluation,代码行数:27,代码来源:agm.cpp
示例8: PredictInternal
void TPropHazards::PredictInternal(const TFltVV& X, TFltV& IntensV) const {
const int NInst = X.GetCols();
TLinAlg::MultiplyT(X, WgtV, IntensV);
for (int i = 0; i < NInst; i++) {
IntensV[i] = exp(IntensV[i]);
}
}
开发者ID:gitter-badger,项目名称:qminer,代码行数:8,代码来源:regression.cpp
示例9: Grow
void TDecisionTree::Grow(const TFltVV& FtrVV, const TFltV& ClassV, const PNotify& Notify) {
CleanUp();
const int NInst = FtrVV.GetCols();
TIntV RangeV(NInst); TLinAlgTransform::RangeV(NInst, RangeV);
Root = new TNode(this);
Root->Fit(FtrVV, ClassV, RangeV);
}
开发者ID:Bradeskojest,项目名称:qminer,代码行数:9,代码来源:classification.cpp
示例10: CenterKernelMatrix
void TKernelUtil::CenterKernelMatrix(TFltVV& K) {
IAssert(K.GetXDim() == K.GetYDim());
const int l = K.GetYDim();
TFltV jK(l); // j'K
double jKj = 0.0; // j'Kj
for (int j = 0; j < l; j++) {
jK[j] = 0.0;
for (int i = 0; i < l; i++)
jK[j] += K(i,j);
jKj += jK[j];
}
double invl = 1.0/l;
for (int i = 0; i < l; i++) {
for (int j = 0; j < l; j++)
K(i,j) = K(i,j) - invl*jK[j] - invl*jK[i] + invl*invl*jKj;
}
}
开发者ID:AlertProject,项目名称:Text-processing-bundle,代码行数:19,代码来源:kernelmethods.cpp
示例11: PMultiply
// Result = A * B(:,ColId)
void TUNGraphMtx::PMultiply(const TFltVV& B, int ColId, TFltV& Result) const {
const int RowN = GetRows();
Assert(B.GetRows() >= RowN && Result.Len() >= RowN);
const THash<TInt, TUNGraph::TNode>& NodeH = Graph->NodeH;
for (int j = 0; j < RowN; j++) {
const TIntV& RowV = NodeH[j].NIdV;
Result[j] = 0.0;
for (int i = 0; i < RowV.Len(); i++) {
Result[j] += B(RowV[i], ColId);
}
}
}
开发者ID:Networks-Learning,项目名称:infopath,代码行数:13,代码来源:gsvd.cpp
示例12: PMultiplyT
// Result = A' * B(:,ColId)
void TNGraphMtx::PMultiplyT(const TFltVV& B, int ColId, TFltV& Result) const {
const int ColN = GetCols();
Assert(B.GetRows() >= ColN && Result.Len() >= ColN);
const THash<TInt, TNGraph::TNode>& NodeH = Graph->NodeH;
for (int i = 0; i < ColN; i++) Result[i] = 0.0;
for (int j = 0; j < ColN; j++) {
const TIntV& RowV = NodeH[j].OutNIdV;
for (int i = 0; i < RowV.Len(); i++) {
Result[RowV[i]] += B(j, ColId);
}
}
}
开发者ID:Networks-Learning,项目名称:infopath,代码行数:13,代码来源:gsvd.cpp
示例13: PMultiplyT
void TBowMatrix::PMultiplyT(const TFltVV& B, int ColId, TFltV& Result) const {
IAssert(B.GetXDim() >= PGetRows() && Result.Len() >= PGetCols());
int ColN = PGetCols();
int i, j, len; //TFlt *ResV = Result.BegI();
for (j = 0; j < ColN; j++) {
PBowSpV ColV = ColSpVV[j];
len = ColV->Len(); Result[j] = 0.0;
for (i = 0; i < len; i++) {
Result[j] += ColV->GetWgt(i) * B(ColV->GetWId(i), ColId);
}
}
}
开发者ID:adobekan,项目名称:qminer,代码行数:12,代码来源:bowlinalg.cpp
示例14: GetInvParticipRat
void GetInvParticipRat(const PUNGraph& Graph, int MaxEigVecs, int TimeLimit, TFltPrV& EigValIprV) {
TUNGraphMtx GraphMtx(Graph);
TFltVV EigVecVV;
TFltV EigValV;
TExeTm ExeTm;
if (MaxEigVecs<=1) { MaxEigVecs=1000; }
int EigVecs = TMath::Mn(Graph->GetNodes(), MaxEigVecs);
printf("start %d vecs...", EigVecs);
try {
TSparseSVD::Lanczos2(GraphMtx, EigVecs, TimeLimit, ssotFull, EigValV, EigVecVV, false);
} catch(...) {
printf("\n ***EXCEPTION: TRIED %d GOT %d values** \n", EigVecs, EigValV.Len()); }
printf(" ***TRIED %d GOT %d values in %s\n", EigVecs, EigValV.Len(), ExeTm.GetStr());
TFltV EigVec;
EigValIprV.Clr();
if (EigValV.Empty()) { return; }
for (int v = 0; v < EigVecVV.GetCols(); v++) {
EigVecVV.GetCol(v, EigVec);
EigValIprV.Add(TFltPr(EigValV[v], GetInvParticipRat(EigVec)));
}
EigValIprV.Sort();
}
开发者ID:Networks-Learning,项目名称:infopath,代码行数:22,代码来源:gsvd.cpp
示例15: Hessian
void TLogRegFit::Hessian(TFltVV& HVV) {
HVV.Gen(Theta.Len(), Theta.Len());
TFltV OutV;
TLogRegPredict::GetCfy(X, OutV, Theta);
for (int i = 0; i < X.Len(); i++) {
for (int r = 0; r < Theta.Len(); r++) {
HVV.At(r, r) += - (X[i][r] * OutV[i] * (1 - OutV[i]) * X[i][r]);
for (int c = r + 1; c < Theta.Len(); c++) {
HVV.At(r, c) += - (X[i][r] * OutV[i] * (1 - OutV[i]) * X[i][c]);
HVV.At(c, r) += - (X[i][r] * OutV[i] * (1 - OutV[i]) * X[i][c]);
}
}
}
/*
printf("\n");
for (int r = 0; r < Theta.Len(); r++) {
for (int c = 0; c < Theta.Len(); c++) {
printf("%f\t", HVV.At(r, c).Val);
}
printf("\n");
}
*/
}
开发者ID:RoyZhengGao,项目名称:CommunityEvaluation,代码行数:23,代码来源:agm.cpp
示例16: GetDocVV
void TPartialGS::GetDocVV(TFltVV& DocVV) {
IAssert(R.Len() > 0);
const int DocN = R[0].Len();
const int DimN = R.Len();
DocVV.Gen(DimN, DocN);
for (int DocC = 0; DocC < DocN; DocC++) {
const int DId = IdV[DocC];
int l = TInt::GetMn(DocC, DimN-1);
for (int i = 0; i <= l; i++)
DocVV(i,DId) = R[i][DocC - i];
for (int i = l+1; i < DimN; i++)
DocVV(i,DId) = 0.0;
}
}
开发者ID:AlertProject,项目名称:Text-processing-bundle,代码行数:15,代码来源:kernelmethods.cpp
示例17: Fit
void TDecisionTree::Fit(const TFltVV& FtrVV, const TFltV& ClassV, const PNotify& Notify) {
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Building a decision tree on %d instances ...", FtrVV.GetCols());
Grow(FtrVV, ClassV, Notify);
Prune(Notify);
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Done!");
}
开发者ID:Bradeskojest,项目名称:qminer,代码行数:8,代码来源:classification.cpp
示例18: EAssertR
void TPropHazards::Fit(const TFltVV& _X, const TFltV& t, const double& Eps) {
const int NInst = _X.GetCols();
const int Dim = _X.GetRows() + 1;
EAssertR(NInst == t.Len(), "TPropHazards::Fit the number of instances in X.GetCols() and t.Len() do not match");
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Fitting proportional hazards model on %d instances ...", NInst);
TFltVV X(_X.GetRows()+1, NInst);
for (int ColN = 0; ColN < NInst; ColN++) {
X(0, ColN) = 1;
for (int RowN = 0; RowN < _X.GetRows(); RowN++) {
X(RowN+1, ColN) = _X(RowN, ColN);
}
}
WgtV.Gen(Dim);
TFltVV X_t(X); X_t.Transpose(); // TODO slow
TFltVV XTimesW(X.GetRows(), X.GetCols());
TFltVV H(Dim, Dim);
TFltV TempNInstV(NInst, NInst);
TFltV GradV(Dim, Dim);
TFltV DeltaWgtV(Dim, Dim);
TSpVV WgtColSpVV(NInst, NInst);
double IntensTimesT;
// generate weight matrix with only ones on the diagonal
// so you don't recreate all the object every iteration
for (int i = 0; i < NInst; i++) {
WgtColSpVV[i].Add(TIntFltKd(i, 1));
}
int k = 0;
double Diff = TFlt::PInf;
while (Diff > Eps) {
// construct the intensity vector
PredictInternal(X, TempNInstV);
// I) construct the Hessian: X*W*X' + lambda*I
// prepare W and t .* intens - 1
for (int i = 0; i < NInst; i++) {
IntensTimesT = t[i] * TempNInstV[i];
TempNInstV[i] = IntensTimesT - 1;
WgtColSpVV[i][0].Dat = IntensTimesT;
}
// 1) compute X*W
TLinAlg::Multiply(X, WgtColSpVV, XTimesW);
// 2) compute (X*W)*X'
TLinAlg::Multiply(XTimesW, X_t, H);
// 3) (X*W*X') + lambda*I, exclude the base hazard
if (Lambda > 0) {
for (int i = 1; i < Dim; i++) {
H(i,i) += Lambda;
}
}
// II) construct the gradient: (t .* intens - 1) * X' + lambda*[0, w(2:end)]
// 1) (t .* intens - 1) * X'
TLinAlg::Multiply(X, TempNInstV, GradV);
// 2) ((t .* intens - 1) * X') + lambda*[0, w(2:end)]
if (Lambda > 0) {
for (int i = 1; i < Dim; i++) {
GradV[i] += Lambda * WgtV[i];
}
}
// III) compute: delta_w = H \ grad
#ifdef LAPACKE
const TFlt SingEps = 1e-10;
if (H.GetRows() == 1) { // fix for a bug in SVD factorization
DeltaWgtV[0] = GradV[0] / H(0,0);
} else {
TLinAlg::SVDSolve(H, DeltaWgtV, GradV, SingEps);
}
#else
throw TExcept::New("Should include LAPACKE!!");
#endif
// IV) w <= w - delta_w
for (int i = 0; i < Dim; i++) {
WgtV[i] -= DeltaWgtV[i];
}
Diff = TLinAlg::Norm(DeltaWgtV);
EAssertR(!TFlt::IsNan(Diff), "nans in delta wgt vector!");
if (++k % 10 == 0) {
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Step: %d, diff: %.3f", k, Diff);
}
}
Notify->OnNotifyFmt(TNotifyType::ntInfo, "Converged. Diff: %.5f", Diff);
}
开发者ID:gitter-badger,项目名称:qminer,代码行数:96,代码来源:regression.cpp
注:本文中的TFltVV类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论