本文整理了Java中de.lmu.ifi.dbs.elki.logging.statistics.Duration
类的一些代码示例,展示了Duration
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Duration
类的具体详情如下:
包路径:de.lmu.ifi.dbs.elki.logging.statistics.Duration
类名称:Duration
[英]Class that tracks the duration of a task. TODO: add support for different time units?
[中]类,该类跟踪任务的持续时间。TODO:添加对不同时间单位的支持?
代码示例来源:origin: elki-project/elki
/**
* Choose the initial means.
*
* @param database Database
* @param relation Relation
* @return Means
*/
protected double[][] initialMeans(Database database, Relation<V> relation) {
Duration inittime = getLogger().newDuration(initializer.getClass() + ".time").begin();
double[][] means = initializer.chooseInitialMeans(database, relation, k, getDistanceFunction());
getLogger().statistics(inittime.end());
return means;
}
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki-clustering
/**
* Choose the initial means.
*
* @param database Database
* @param relation Relation
* @return Means
*/
protected double[][] initialMeans(Database database, Relation<V> relation) {
Duration inittime = getLogger().newDuration(initializer.getClass() + ".time").begin();
double[][] means = initializer.chooseInitialMeans(database, relation, k, getDistanceFunction());
getLogger().statistics(inittime.end());
return means;
}
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki
/**
* Iterate over the k range.
*
* @param prefix Prefix string
* @param startk Start k
* @param stepk Step k
* @param maxk Max k
* @param runner Runner to run
*/
private void runForEachK(String prefix, int startk, int stepk, int maxk, AlgRunner runner) {
if(isDisabled(prefix)) {
LOG.verbose("Skipping (disabled): " + prefix);
return; // Disabled
}
LOG.verbose("Running " + prefix);
final int digits = (int) Math.ceil(Math.log10(maxk + 1));
final String format = "%s-%0" + digits + "d";
for(int k = startk; k <= maxk; k += stepk) {
Duration time = LOG.newDuration(this.getClass().getCanonicalName() + "." + prefix + ".k" + k + ".runtime").begin();
runner.run(k, String.format(Locale.ROOT, format, prefix, k));
LOG.statistics(time.end());
}
}
代码示例来源:origin: elki-project/elki
@Override
public void run() {
Duration ptime = LOG.newDuration("evaluation.time.load").begin();
MultipleObjectsBundle allData = databaseConnection.loadData();
holdout.initialize(allData);
LOG.statistics(ptime.end());
Duration time = LOG.newDuration("evaluation.time.total").begin();
ArrayList<ClassLabel> labels = holdout.getLabels();
int[][] confusion = new int[labels.size()][labels.size()];
Duration dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".init.time").begin();
Database db = new StaticArrayDatabase(new MultipleObjectsBundleDatabaseConnection(partition.getTraining()), indexFactories);
db.initialize();
LOG.statistics(dur.end());
dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".train.time").begin();
Relation<ClassLabel> lrel = db.getRelation(TypeUtil.CLASSLABEL);
algorithm.buildClassifier(db, lrel);
LOG.statistics(dur.end());
dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".evaluation.time").begin();
LOG.statistics(dur.end());
LOG.statistics(time.end());
ConfusionMatrix m = new ConfusionMatrix(labels, confusion);
LOG.statistics(m.toString());
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki-classification
@Override
public void run() {
Duration ptime = LOG.newDuration("evaluation.time.load").begin();
MultipleObjectsBundle allData = databaseConnection.loadData();
holdout.initialize(allData);
LOG.statistics(ptime.end());
Duration time = LOG.newDuration("evaluation.time.total").begin();
ArrayList<ClassLabel> labels = holdout.getLabels();
int[][] confusion = new int[labels.size()][labels.size()];
Duration dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".init.time").begin();
Database db = new StaticArrayDatabase(new MultipleObjectsBundleDatabaseConnection(partition.getTraining()), indexFactories);
db.initialize();
LOG.statistics(dur.end());
dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".train.time").begin();
Relation<ClassLabel> lrel = db.getRelation(TypeUtil.CLASSLABEL);
algorithm.buildClassifier(db, lrel);
LOG.statistics(dur.end());
dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".evaluation.time").begin();
LOG.statistics(dur.end());
LOG.statistics(time.end());
ConfusionMatrix m = new ConfusionMatrix(labels, confusion);
LOG.statistics(m.toString());
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki
@Override
public void run() throws UnableToComplyException {
Duration ptime = LOG.newDuration("evaluation.time.load").begin();
MultipleObjectsBundle allData = databaseConnection.loadData();
holdout.initialize(allData);
LOG.statistics(ptime.end());
Duration time = LOG.newDuration("evaluation.time.total").begin();
ArrayList<ClassLabel> labels = holdout.getLabels();
int[][] confusion = new int[labels.size()][labels.size()];
Duration dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".init.time").begin();
Database db = new StaticArrayDatabase(new MultipleObjectsBundleDatabaseConnection(partition.getTraining()), indexFactories);
db.initialize();
LOG.statistics(dur.end());
dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".train.time").begin();
Relation<ClassLabel> lrel = db.getRelation(TypeUtil.CLASSLABEL);
algorithm.buildClassifier(db, lrel);
LOG.statistics(dur.end());
dur = LOG.newDuration(this.getClass().getName() + ".fold-" + (p + 1) + ".evaluation.time").begin();
LOG.statistics(dur.end());
LOG.statistics(time.end());
ConfusionMatrix m = new ConfusionMatrix(labels, confusion);
LOG.statistics(m.toString());
代码示例来源:origin: elki-project/elki
/**
* Choose the initial medoids.
*
* @param distQ Distance query
* @param ids IDs to choose from
* @return Initial medoids
*/
protected ArrayModifiableDBIDs initialMedoids(DistanceQuery<V> distQ, DBIDs ids) {
if(getLogger().isStatistics()) {
getLogger().statistics(new StringStatistic(getClass().getName() + ".initialization", initializer.toString()));
}
Duration initd = getLogger().newDuration(getClass().getName() + ".initialization-time").begin();
ArrayModifiableDBIDs medoids = DBIDUtil.newArray(initializer.chooseInitialMedoids(k, ids, distQ));
getLogger().statistics(initd.end());
if(medoids.size() != k) {
throw new AbortException("Initializer " + initializer.toString() + " did not return " + k + " means, but " + medoids.size());
}
return medoids;
}
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki-clustering
/**
* Choose the initial medoids.
*
* @param distQ Distance query
* @param ids IDs to choose from
* @return Initial medoids
*/
protected ArrayModifiableDBIDs initialMedoids(DistanceQuery<V> distQ, DBIDs ids) {
if(getLogger().isStatistics()) {
getLogger().statistics(new StringStatistic(getClass().getName() + ".initialization", initializer.toString()));
}
Duration initd = getLogger().newDuration(getClass().getName() + ".initialization-time").begin();
ArrayModifiableDBIDs medoids = DBIDUtil.newArray(initializer.chooseInitialMedoids(k, ids, distQ));
getLogger().statistics(initd.end());
if(medoids.size() != k) {
throw new AbortException("Initializer " + initializer.toString() + " did not return " + k + " means, but " + medoids.size());
}
return medoids;
}
代码示例来源:origin: elki-project/elki
LOG.debugFine("Parsing as stream.");
Duration duration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".load").begin() : null;
MultipleObjectsBundle objects = invokeStreamFilters(streamParser).asMultipleObjectsBundle();
parser.cleanup();
if(duration != null) {
LOG.statistics(duration.end());
Duration duration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".parse").begin() : null;
ins = ins != null ? ins : in.get();
MultipleObjectsBundle parsingResult = parser.parse(ins);
parser.cleanup();
if(duration != null) {
LOG.statistics(duration.end());
LOG.debugFine("Invoking filters.");
Duration fduration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".filter").begin() : null;
MultipleObjectsBundle objects = invokeBundleFilters(parsingResult);
if(fduration != null) {
LOG.statistics(fduration.end());
代码示例来源:origin: elki-project/elki
/**
* Iterate over the k range.
*
* @param prefix Prefix string
* @param mink Minimum value of k for this method
* @param maxk Maximum value of k for this method
* @param runner Runner to run
* @param out Output function
*/
private void runForEachK(String prefix, int mink, int maxk, IntFunction<OutlierResult> runner, BiConsumer<String, OutlierResult> out) {
if(isDisabled(prefix)) {
LOG.verbose("Skipping (disabled): " + prefix);
return; // Disabled
}
LOG.verbose("Running " + prefix);
final int digits = (int) FastMath.ceil(FastMath.log10(krange.getMax() + 1));
final String format = "%s-%0" + digits + "d";
krange.forEach(k -> {
if(k >= mink && k <= maxk) {
Duration time = LOG.newDuration(this.getClass().getCanonicalName() + "." + prefix + ".k" + k + ".runtime").begin();
OutlierResult result = runner.apply(k);
LOG.statistics(time.end());
if(result != null) {
out.accept(String.format(Locale.ROOT, format, prefix, k), result);
result.getHierarchy().removeSubtree(result);
}
}
});
}
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki-input
LOG.debugFine("Parsing as stream.");
Duration duration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".load").begin() : null;
MultipleObjectsBundle objects = invokeStreamFilters(streamParser).asMultipleObjectsBundle();
parser.cleanup();
if(duration != null) {
LOG.statistics(duration.end());
Duration duration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".parse").begin() : null;
ins = ins != null ? ins : in.get();
MultipleObjectsBundle parsingResult = parser.parse(ins);
parser.cleanup();
if(duration != null) {
LOG.statistics(duration.end());
LOG.debugFine("Invoking filters.");
Duration fduration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".filter").begin() : null;
MultipleObjectsBundle objects = invokeBundleFilters(parsingResult);
if(fduration != null) {
LOG.statistics(fduration.end());
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki
LOG.debugFine("Parsing as stream.");
Duration duration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".load").begin() : null;
MultipleObjectsBundle objects = invokeStreamFilters(streamParser).asMultipleObjectsBundle();
parser.cleanup();
if(duration != null) {
LOG.statistics(duration.end());
Duration duration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".parse").begin() : null;
MultipleObjectsBundle parsingResult = parser.parse(in);
parser.cleanup();
if(duration != null) {
LOG.statistics(duration.end());
LOG.debugFine("Invoking filters.");
Duration fduration = LOG.isStatistics() ? LOG.newDuration(this.getClass().getName() + ".filter").begin() : null;
MultipleObjectsBundle objects = invokeBundleFilters(parsingResult);
if(fduration != null) {
LOG.statistics(fduration.end());
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki
/**
* Run the Eclat algorithm
*
* @param db Database to process
* @param relation Bit vector relation
* @return Frequent patterns found
*/
public FrequentItemsetsResult run(Database db, final Relation<BitVector> relation) {
// TODO: implement with resizable arrays, to not need dim.
final int dim = RelationUtil.dimensionality(relation);
final VectorFieldTypeInformation<BitVector> meta = RelationUtil.assumeVectorField(relation);
// Compute absolute minsupport
final int minsupp = getMinimumSupport(relation.size());
LOG.verbose("Build 1-dimensional transaction lists.");
Duration ctime = LOG.newDuration(STAT + "eclat.transposition.time").begin();
DBIDs[] idx = buildIndex(relation, dim, minsupp);
LOG.statistics(ctime.end());
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Building frequent itemsets", idx.length, LOG) : null;
Duration etime = LOG.newDuration(STAT + "eclat.extraction.time").begin();
final List<Itemset> solution = new ArrayList<>();
for(int i = 0; i < idx.length; i++) {
LOG.incrementProcessed(prog);
extractItemsets(idx, i, minsupp, solution);
}
LOG.ensureCompleted(prog);
Collections.sort(solution);
LOG.statistics(etime.end());
LOG.statistics(new LongStatistic(STAT + "frequent-itemsets", solution.size()));
return new FrequentItemsetsResult("Eclat", "eclat", solution, meta);
}
代码示例来源:origin: elki-project/elki
/**
* Run the Eclat algorithm
*
* @param db Database to process
* @param relation Bit vector relation
* @return Frequent patterns found
*/
public FrequentItemsetsResult run(Database db, final Relation<BitVector> relation) {
// TODO: implement with resizable arrays, to not need dim.
final int dim = RelationUtil.dimensionality(relation);
final VectorFieldTypeInformation<BitVector> meta = RelationUtil.assumeVectorField(relation);
// Compute absolute minsupport
final int minsupp = getMinimumSupport(relation.size());
LOG.verbose("Build 1-dimensional transaction lists.");
Duration ctime = LOG.newDuration(STAT + "eclat.transposition.time").begin();
DBIDs[] idx = buildIndex(relation, dim, minsupp);
LOG.statistics(ctime.end());
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Building frequent itemsets", idx.length, LOG) : null;
Duration etime = LOG.newDuration(STAT + "eclat.extraction.time").begin();
final List<Itemset> solution = new ArrayList<>();
for(int i = 0; i < idx.length; i++) {
LOG.incrementProcessed(prog);
extractItemsets(idx, i, minsupp, solution);
}
LOG.ensureCompleted(prog);
Collections.sort(solution);
LOG.statistics(etime.end());
LOG.statistics(new LongStatistic(STAT + "frequent-itemsets", solution.size()));
return new FrequentItemsetsResult("Eclat", "eclat", solution, meta, relation.size());
}
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki-itemsets
/**
* Run the Eclat algorithm
*
* @param db Database to process
* @param relation Bit vector relation
* @return Frequent patterns found
*/
public FrequentItemsetsResult run(Database db, final Relation<BitVector> relation) {
// TODO: implement with resizable arrays, to not need dim.
final int dim = RelationUtil.dimensionality(relation);
final VectorFieldTypeInformation<BitVector> meta = RelationUtil.assumeVectorField(relation);
// Compute absolute minsupport
final int minsupp = getMinimumSupport(relation.size());
LOG.verbose("Build 1-dimensional transaction lists.");
Duration ctime = LOG.newDuration(STAT + "eclat.transposition.time").begin();
DBIDs[] idx = buildIndex(relation, dim, minsupp);
LOG.statistics(ctime.end());
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Building frequent itemsets", idx.length, LOG) : null;
Duration etime = LOG.newDuration(STAT + "eclat.extraction.time").begin();
final List<Itemset> solution = new ArrayList<>();
for(int i = 0; i < idx.length; i++) {
LOG.incrementProcessed(prog);
extractItemsets(idx, i, minsupp, solution);
}
LOG.ensureCompleted(prog);
Collections.sort(solution);
LOG.statistics(etime.end());
LOG.statistics(new LongStatistic(STAT + "frequent-itemsets", solution.size()));
return new FrequentItemsetsResult("Eclat", "eclat", solution, meta, relation.size());
}
代码示例来源:origin: elki-project/elki
/**
* Perform the preprocessing step.
*
* @param modelcls Class of models
* @param relation Data relation
* @param query Range query
* @return Precomputed models
*/
public DataStore<M> preprocess(Class<? super M> modelcls, Relation<O> relation, RangeQuery<O> query) {
WritableDataStore<M> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, modelcls);
Duration time = getLogger().newDuration(this.getClass().getName() + ".preprocessing-time").begin();
FiniteProgress progress = getLogger().isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), getLogger()) : null;
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DoubleDBIDList neighbors = query.getRangeForDBID(iditer, epsilon);
storage.put(iditer, computeLocalModel(iditer, neighbors, relation));
getLogger().incrementProcessed(progress);
}
getLogger().ensureCompleted(progress);
getLogger().statistics(time.end());
return storage;
}
代码示例来源:origin: elki-project/elki
/**
* Full instantiation method.
*
* @param database Database
* @param relation Vector relation
* @return Instance
*/
public COPACNeighborPredicate.Instance instantiate(Database database, Relation<V> relation) {
DistanceQuery<V> dq = database.getDistanceQuery(relation, EuclideanDistanceFunction.STATIC);
KNNQuery<V> knnq = database.getKNNQuery(dq, settings.k);
WritableDataStore<COPACModel> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, COPACModel.class);
Duration time = LOG.newDuration(this.getClass().getName() + ".preprocessing-time").begin();
FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), LOG) : null;
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DoubleDBIDList ref = knnq.getKNNForDBID(iditer, settings.k);
storage.put(iditer, computeLocalModel(iditer, ref, relation));
LOG.incrementProcessed(progress);
}
LOG.ensureCompleted(progress);
LOG.statistics(time.end());
return new Instance(relation.getDBIDs(), storage);
}
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki-clustering
/**
* Full instantiation method.
*
* @param database Database
* @param relation Vector relation
* @return Instance
*/
public COPACNeighborPredicate.Instance instantiate(Database database, Relation<V> relation) {
DistanceQuery<V> dq = database.getDistanceQuery(relation, EuclideanDistanceFunction.STATIC);
KNNQuery<V> knnq = database.getKNNQuery(dq, settings.k);
WritableDataStore<COPACModel> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, COPACModel.class);
Duration time = LOG.newDuration(this.getClass().getName() + ".preprocessing-time").begin();
FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), LOG) : null;
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DoubleDBIDList ref = knnq.getKNNForDBID(iditer, settings.k);
storage.put(iditer, computeLocalModel(iditer, ref, relation));
LOG.incrementProcessed(progress);
}
LOG.ensureCompleted(progress);
LOG.statistics(time.end());
return new Instance(relation.getDBIDs(), storage);
}
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki
/**
* Full instantiation interface.
*
* @param database Database
* @param relation Relation
* @return Instance
*/
public Instance instantiate(Database database, Relation<V> relation) {
DistanceQuery<V> dq = database.getDistanceQuery(relation, EuclideanDistanceFunction.STATIC);
KNNQuery<V> knnq = database.getKNNQuery(dq, settings.k);
WritableDataStore<PCAFilteredResult> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, PCAFilteredResult.class);
Duration time = LOG.newDuration(this.getClass().getName() + ".preprocessing-time").begin();
FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), LOG) : null;
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DoubleDBIDList ref = knnq.getKNNForDBID(iditer, settings.k);
storage.put(iditer, settings.pca.processQueryResult(ref, relation));
LOG.incrementProcessed(progress);
}
LOG.ensureCompleted(progress);
LOG.statistics(time.end());
return new Instance(relation.getDBIDs(), storage, relation);
}
代码示例来源:origin: de.lmu.ifi.dbs.elki/elki
/**
* Full instantiation method.
*
* @param database Database
* @param relation Vector relation
* @return Instance
*/
public COPACNeighborPredicate.Instance instantiate(Database database, Relation<V> relation) {
DistanceQuery<V> dq = database.getDistanceQuery(relation, EuclideanDistanceFunction.STATIC);
KNNQuery<V> knnq = database.getKNNQuery(dq, settings.k);
WritableDataStore<COPACModel> storage = DataStoreUtil.makeStorage(relation.getDBIDs(), DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, COPACModel.class);
Duration time = LOG.newDuration(this.getClass().getName() + ".preprocessing-time").begin();
FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress(this.getClass().getName(), relation.size(), LOG) : null;
for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) {
DoubleDBIDList ref = knnq.getKNNForDBID(iditer, settings.k);
storage.put(iditer, computeLocalModel(iditer, ref, relation));
LOG.incrementProcessed(progress);
}
LOG.ensureCompleted(progress);
LOG.statistics(time.end());
return new Instance(relation.getDBIDs(), storage);
}
内容来源于网络,如有侵权,请联系作者删除!