本文整理了Java中org.nd4j.linalg.factory.Nd4j.hstack()
方法的一些代码示例,展示了Nd4j.hstack()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Nd4j.hstack()
方法的具体详情如下:
包路径:org.nd4j.linalg.factory.Nd4j
类名称:Nd4j
方法名:hstack
[英]Concatenates two matrices horizontally. Matrices must have identical numbers of rows.
[中]水平连接两个矩阵。矩阵的行数必须相同。
代码示例来源:origin: deeplearning4j/nd4j
/**
* Adds a feature for each example on to the current feature vector
*
* @param toAdd the feature vector to add
*/
@Override
public void addFeatureVector(INDArray toAdd) {
setFeatures(Nd4j.hstack(getFeatureMatrix(), toAdd));
}
代码示例来源:origin: deeplearning4j/dl4j-examples
INDArray hstack = Nd4j.hstack(ones,zeros);
System.out.println("### HSTACK ####");
System.out.println(hstack);
代码示例来源:origin: deeplearning4j/dl4j-examples
INDArray hStack = Nd4j.hstack(rowVector1, rowVector2); //Horizontal stack: [1,3]+[1,3] to [1,6]
System.out.println("\n\n\nCreating INDArrays from other INDArrays, using hstack and vstack:");
System.out.println("vStack:\n" + vStack);
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
private INDArray constructParams() {
//some params will be null for subsampling etc
INDArray keepView = null;
for (INDArray aParam : editedParams) {
if (aParam != null) {
if (keepView == null) {
keepView = aParam;
} else {
keepView = Nd4j.hstack(keepView, aParam);
}
}
}
if (!appendParams.isEmpty()) {
INDArray appendView = Nd4j.hstack(appendParams);
return Nd4j.hstack(keepView, appendView);
} else {
return keepView;
}
}
代码示例来源:origin: org.nd4j/nd4j-parameter-server-node
@Override
public INDArray getAccumulatedResult() {
if (aggregationWidth == 1) {
return chunks.get((short) 0);
} else
return Nd4j.hstack(chunks.values());
}
代码示例来源:origin: org.nd4j/nd4j-parameter-server-node_2.11
@Override
public INDArray getAccumulatedResult() {
if (aggregationWidth == 1) {
return chunks.get((short) 0);
} else
return Nd4j.hstack(chunks.values());
}
代码示例来源:origin: org.nd4j/nd4j-api
/**
* Adds a feature for each example on to the current feature vector
*
* @param toAdd the feature vector to add
*/
@Override
public void addFeatureVector(INDArray toAdd) {
setFeatures(Nd4j.hstack(getFeatureMatrix(), toAdd));
}
代码示例来源:origin: mccorby/FederatedAndroidTrainer
@Override
public FederatedDataSet getTestData() {
Random rand = new Random(seed);
int numSamples = N_SAMPLES/10;
double[] sum = new double[numSamples];
double[] input1 = new double[numSamples];
double[] input2 = new double[numSamples];
for (int i = 0; i < numSamples; i++) {
input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
sum[i] = input1[i] + input2[i];
}
INDArray inputNDArray1 = Nd4j.create(input1, new int[]{numSamples, 1});
INDArray inputNDArray2 = Nd4j.create(input2, new int[]{numSamples, 1});
INDArray inputNDArray = Nd4j.hstack(inputNDArray1, inputNDArray2);
INDArray outPut = Nd4j.create(sum, new int[]{numSamples, 1});
return new FederatedDataSetImpl(new DataSet(inputNDArray, outPut));
}
代码示例来源:origin: mccorby/FederatedAndroidTrainer
@Override
public FederatedDataSet getTrainingData() {
Random rand = new Random(seed);
double[] sum = new double[N_SAMPLES];
double[] input1 = new double[N_SAMPLES];
double[] input2 = new double[N_SAMPLES];
for (int i = 0; i < N_SAMPLES; i++) {
input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
sum[i] = input1[i] + input2[i];
}
INDArray inputNDArray1 = Nd4j.create(input1, new int[]{N_SAMPLES, 1});
INDArray inputNDArray2 = Nd4j.create(input2, new int[]{N_SAMPLES, 1});
INDArray inputNDArray = Nd4j.hstack(inputNDArray1, inputNDArray2);
INDArray outPut = Nd4j.create(sum, new int[]{N_SAMPLES, 1});
DataSet dataSet = new DataSet(inputNDArray, outPut);
dataSet.shuffle();
return new FederatedDataSetImpl(dataSet);
}
代码示例来源:origin: neo4j-graph-analytics/ml-models
final INDArray nodeFeatures = Nd4j.hstack(arrays);
embedding.putRow(nodeId, nodeFeatures);
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
out = Nd4j.hstack(inputs);
break;
case 3:
out = Nd4j.hstack(inputs);
out = Nd4j.hstack(inputs);
代码示例来源:origin: sjsdfg/dl4j-tutorials
private static DataSetIterator getTrainingData(int batchSize, Random rand) {
double [] sum = new double[nSamples];
double [] input1 = new double[nSamples];
double [] input2 = new double[nSamples];
for (int i= 0; i< nSamples; i++) {
input1[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
input2[i] = MIN_RANGE + (MAX_RANGE - MIN_RANGE) * rand.nextDouble();
sum[i] = input1[i] + input2[i];
}
INDArray inputNDArray1 = Nd4j.create(input1, new int[]{nSamples,1});
INDArray inputNDArray2 = Nd4j.create(input2, new int[]{nSamples,1});
INDArray inputNDArray = Nd4j.hstack(inputNDArray1,inputNDArray2);
INDArray outPut = Nd4j.create(sum, new int[]{nSamples, 1});
DataSet dataSet = new DataSet(inputNDArray, outPut);
List<DataSet> listDs = dataSet.asList();
return new ListDataSetIterator(listDs,batchSize);
}
}
代码示例来源:origin: neo4j-graph-analytics/ml-models
public Embedding prune(Embedding prevEmbedding, Embedding embedding) {
INDArray embeddingToPrune = Nd4j.hstack(prevEmbedding.getNDEmbedding(), embedding.getNDEmbedding());
Feature[] featuresToPrune = ArrayUtils.addAll(prevEmbedding.getFeatures(), embedding.getFeatures());
progressLogger.log("Feature Pruning: Creating features graph");
final Graph graph = loadFeaturesGraph(embeddingToPrune, prevEmbedding.features.length);
progressLogger.log("Feature Pruning: Created features graph");
progressLogger.log("Feature Pruning: Finding features to keep");
int[] featureIdsToKeep = findConnectedComponents(graph)
.collect(Collectors.groupingBy(item -> item.setId))
.values()
.stream()
.mapToInt(results -> results.stream().mapToInt(value -> (int) value.nodeId).min().getAsInt())
.toArray();
progressLogger.log("Feature Pruning: Found features to keep");
progressLogger.log("Feature Pruning: Pruning embeddings");
INDArray prunedNDEmbedding = pruneEmbedding(embeddingToPrune, featureIdsToKeep);
progressLogger.log("Feature Pruning: Pruned embeddings");
Feature[] prunedFeatures = new Feature[featureIdsToKeep.length];
for (int index = 0; index < featureIdsToKeep.length; index++) {
prunedFeatures[index] = featuresToPrune[featureIdsToKeep[index]];
}
return new Embedding(prunedFeatures, prunedNDEmbedding);
}
代码示例来源:origin: neo4j-graph-analytics/ml-models
@Override
public INDArray ndOp(INDArray features, INDArray adjacencyMatrix) {
INDArray[] maxes = new INDArray[features.columns()];
for (int fCol = 0; fCol < features.columns(); fCol++) {
INDArray mul = adjacencyMatrix.transpose().mulColumnVector(features.getColumn(fCol));
maxes[fCol] = mul.max(0).transpose();
}
return Nd4j.hstack(maxes);
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-datavec-iterators
f = Nd4j.hstack(f1, f2);
} else {
代码示例来源:origin: org.deeplearning4j/deeplearning4j-core
f = Nd4j.hstack(f1, f2);
} else {
内容来源于网络,如有侵权,请联系作者删除!