本文整理了Java中water.fvec.Frame.clone()
方法的一些代码示例,展示了Frame.clone()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Frame.clone()
方法的具体详情如下:
包路径:water.fvec.Frame
类名称:Frame
方法名:clone
暂无
代码示例来源:origin: h2oai/h2o-3
fr.clone(), // train
fr.clone(), // train
代码示例来源:origin: h2oai/h2o-3
try {
dinfo = new DataInfo(
fr.clone(), // train
代码示例来源:origin: h2oai/h2o-3
@Test public void testAirlines1() { // just test that it works at all
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/airlines/allyears2k_headers.zip");
try {
DataInfo dinfo = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(8),fr.name(16),fr.name(2)}) // interactions
);
dinfo.dropInteractions();
dinfo.remove();
} finally {
fr.delete();
}
}
代码示例来源:origin: h2oai/h2o-3
fr.clone(), // train
fr.clone(), // train
代码示例来源:origin: h2oai/h2o-3
model = new GLM(params).trainModel().get();
Scope.track_generic(model);
DataInfo tinfo = new DataInfo(train.clone(), null, 0, true, DataInfo.TransformType.STANDARDIZE,
DataInfo.TransformType.NONE, false, false, false,
/* weights */ false, /* offset */ false, /* fold */ false);
代码示例来源:origin: h2oai/h2o-2
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, new GLM2.Source((Frame)fr.clone(), fr.vec("CAPSULE"), true, true), Family.binomial).setNonNegative(false).setRegularization(new double[]{1},new double[]{0.001607}).setBetaConstraints(betaConstraints).doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
model = DKV.get(modelKey).get();
Assert.assertTrue(model.get_params().state == Job.JobState.DONE);
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, new GLM2.Source((Frame)fr.clone(), fr.vec("CAPSULE"), true, true), Family.binomial).setNonNegative(false).setRegularization(new double[]{1},new double[]{0.001607}).setBetaConstraints(betaConstraints).doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
model = DKV.get(modelKey).get();
System.out.println(model.coefficients());
betaConstraints = ParseDataset2.parse(parsed, new Key[]{betaConsKey});
try {
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, new GLM2.Source((Frame) fr.clone(), fr.vec("CAPSULE"), true, true), Family.binomial).setNonNegative(false).setRegularization(new double[]{1}, new double[]{0.001607}).setBetaConstraints(betaConstraints).doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
assertTrue("should've thrown",false);
} catch(IllegalArgumentException t) {
betaConstraints = ParseDataset2.parse(parsed, new Key[]{betaConsKey});
try {
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, new GLM2.Source((Frame) fr.clone(), fr.vec("CAPSULE"), true, true), Family.binomial).setNonNegative(false).setRegularization(new double[]{1}, new double[]{0.001607}).setBetaConstraints(betaConstraints).doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
assertTrue("should've thrown",false);
} catch(IllegalArgumentException t) {
betaConstraints = ParseDataset2.parse(parsed, new Key[]{betaConsKey});
try {
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, new GLM2.Source((Frame) fr.clone(), fr.vec("CAPSULE"), true, true), Family.binomial).setNonNegative(false).setRegularization(new double[]{1}, new double[]{0.001607}).setBetaConstraints(betaConstraints).doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
assertTrue("should've thrown",false);
} catch(IllegalArgumentException t) {
betaConstraints = ParseDataset2.parse(parsed, new Key[]{betaConsKey});
try {
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, new GLM2.Source((Frame) fr.clone(), fr.vec("CAPSULE"), true, true), Family.binomial).setNonNegative(false).setRegularization(new double[]{1}, new double[]{0.001607}).setBetaConstraints(betaConstraints).doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
代码示例来源:origin: h2oai/h2o-3
@Test public void testIris1() { // test that getting sparseRows and denseRows produce the same results
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv");
fr.swap(1,4);
Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1);
DataInfo di=null;
try {
di = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.NONE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1)}) // interactions
);
checker(di,false);
} finally {
fr.delete();
if( di!=null ) {
di.dropInteractions();
di.remove();
}
}
}
代码示例来源:origin: h2oai/h2o-3
);
DataInfo dinfo = new DataInfo(
fr.clone(), // train
代码示例来源:origin: h2oai/h2o-3
@Test public void testIris2() { // test that getting sparseRows and denseRows produce the same results
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv");
fr.swap(1,4);
Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1);
DataInfo di=null;
try {
di = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1)}) // interactions
);
checker(di,true);
} finally {
fr.delete();
if( di!=null ) {
di.dropInteractions();
di.remove();
}
}
}
代码示例来源:origin: h2oai/h2o-3
if (_parms._link == Link.family_default)
_parms._link = _parms._family.defaultLink;
_dinfo = new DataInfo(_train.clone(), _valid, 1, _parms._use_all_factor_levels || _parms._lambda_search, _parms._standardize ? DataInfo.TransformType.STANDARDIZE : DataInfo.TransformType.NONE, DataInfo.TransformType.NONE, _parms._missing_values_handling == MissingValuesHandling.Skip, _parms._missing_values_handling == MissingValuesHandling.MeanImputation, false, hasWeightCol(), hasOffsetCol(), hasFoldCol(), _parms.interactionSpec());
代码示例来源:origin: h2oai/h2o-3
@Test public void testIris3() { // test that getting sparseRows and denseRows produce the same results
Frame fr = parse_test_file(Key.make("a.hex"), "smalldata/iris/iris_wheader.csv");
fr.swap(2,4);
Model.InteractionPair[] ips = Model.InteractionPair.generatePairwiseInteractionsFromList(0, 1, 2, 3);
DataInfo di=null;
try {
di = new DataInfo(
fr.clone(), // train
null, // valid
1, // num responses
true, // use all factor levels
DataInfo.TransformType.STANDARDIZE, // predictor transform
DataInfo.TransformType.NONE, // response transform
true, // skip missing
false, // impute missing
false, // missing bucket
false, // weight
false, // offset
false, // fold
Model.InteractionSpec.allPairwise(new String[]{fr.name(0),fr.name(1),fr.name(2),fr.name(3)}) // interactions
);
checker(di,true);
} finally {
fr.delete();
if( di!=null ) {
di.dropInteractions();
di.remove();
}
}
}
代码示例来源:origin: h2oai/h2o-2
p.source = (Frame)_train.clone();
p.response = _train.lastVec();
p.ignored_cols = null;
代码示例来源:origin: h2oai/h2o-3
try {
di = new DataInfo(
fr.clone(), // train
代码示例来源:origin: h2oai/h2o-3
try {
di = new DataInfo(
fr.clone(), // train
代码示例来源:origin: h2oai/h2o-2
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, new GLM2.Source((Frame)fr.clone(), fr.vec("CAPSULE"), true, true), Family.binomial).setNonNegative(true).setRegularization(new double[]{1},new double[]{2.22E-5}).doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
model = DKV.get(modelKey).get();
Assert.assertTrue(model.get_params().state == Job.JobState.DONE);
代码示例来源:origin: h2oai/h2o-2
new GLM2("GLM offset test on prostate.",Key.make(),modelKey,new GLM2.Source((Frame)fr.clone(),fr.vec("CAPSULE"),false,false),Family.binomial).setRegularization(new double[]{0},new double[]{0}).doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
model = DKV.get(modelKey).get();
Assert.assertTrue(model.get_params().state == Job.JobState.DONE);
assertEquals(413, val.aic(),1e-1);
score = model.score((Frame)fr.clone());
Vec mu = score.vec("1");
final double [] exp_preds =
代码示例来源:origin: h2oai/h2o-2
try {
GLM2.Source src = new GLM2.Source((Frame)fr.clone(), fr.vec("CAPSULE"), false, true);
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, src, Family.binomial).setNonNegative(false).setRegularization(new double[]{0},new double[]{0.000}).setBetaConstraints(betaConstraints).setHighAccuracy().doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
model = DKV.get(modelKey).get();
Assert.assertEquals(0,grad[i] + betaConstraints.vec("rho").at(i) * (beta[i] - betaConstraints.vec("beta_given").at(i)),1e-8);
src = new GLM2.Source((Frame)fr.clone(), fr.vec("CAPSULE"), true, true);
new GLM2("GLM offset test on prostate.", Key.make(), modelKey, src, Family.binomial).setNonNegative(false).setRegularization(new double[]{0},new double[]{0.000}).setBetaConstraints(betaConstraints).setHighAccuracy().doInit().fork().get(); //.setHighAccuracy().doInit().fork().get();
model = DKV.get(modelKey).get();
代码示例来源:origin: ai.h2o/h2o-algos
if (_parms._link == Link.family_default)
_parms._link = _parms._family.defaultLink;
_dinfo = new DataInfo(_train.clone(), _valid, 1, _parms._use_all_factor_levels || _parms._lambda_search, _parms._standardize ? DataInfo.TransformType.STANDARDIZE : DataInfo.TransformType.NONE, DataInfo.TransformType.NONE, _parms._missing_values_handling == MissingValuesHandling.Skip, _parms._missing_values_handling == MissingValuesHandling.MeanImputation, false, hasWeightCol(), hasOffsetCol(), hasFoldCol(), _parms.interactionSpec());
内容来源于网络,如有侵权,请联系作者删除!