本文整理了Java中org.nd4j.linalg.factory.Nd4j.toFlattened()
方法的一些代码示例,展示了Nd4j.toFlattened()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Nd4j.toFlattened()
方法的具体详情如下:
包路径:org.nd4j.linalg.factory.Nd4j
类名称:Nd4j
方法名:toFlattened
[英]Create a long row vector of all of the given ndarrays
[中]创建所有给定nArray的长行向量
代码示例来源:origin: deeplearning4j/dl4j-examples
static INDArray append(INDArray arr1, INDArray values, int dimension) {
if(dimension == -1) {
return Nd4j.toFlattened(arr1, values);
} else {
return Nd4j.concat(dimension, arr1, values);
}
}
代码示例来源:origin: deeplearning4j/dl4j-examples
static INDArray insert(INDArray arr1, int index, INDArray values, int dimension) {
if(dimension == -1) {
INDArray flat1 = Nd4j.toFlattened(arr1);
INDArray flatValues = Nd4j.toFlattened(values);
INDArray firstSlice = flat1.get(NDArrayIndex.interval(0, index));
INDArray secondSlice = flat1.get(NDArrayIndex.interval(index, flat1.length()));
return Nd4j.toFlattened(firstSlice, flatValues, secondSlice);
} else {
INDArray firstSlice = arr1.get(createIntervalOnDimension(dimension, false,
0, index));
INDArray secondSlice = arr1.get(createIntervalOnDimension(dimension, false,
index, arr1.shape()[dimension]));
return Nd4j.concat(dimension, firstSlice, values, secondSlice);
}
}
代码示例来源:origin: deeplearning4j/nd4j
protected INDArray handleParamsView(INDArray outputArray, INDArray paramView) {
//minor optimization when the views are the same, just return
if(paramView == null || paramView == outputArray)
return outputArray;
INDArray flat = Nd4j.toFlattened(order(), outputArray);
if (flat.length() != paramView.length())
throw new RuntimeException("ParamView length does not match initialized weights length (view length: "
+ paramView.length() + ", view shape: " + Arrays.toString(paramView.shape())
+ "; flattened length: " + flat.length());
paramView.assign(flat);
return paramView.reshape(order(), outputArray.shape());
}
代码示例来源:origin: deeplearning4j/nd4j
public static INDArray tailor4d2d(@NonNull INDArray data) {
long instances = data.size(0);
long channels = data.size(1);
long height = data.size(2);
long width = data.size(3);
INDArray in2d = Nd4j.create(channels, height * width * instances);
long tads = data.tensorssAlongDimension(3, 2, 0);
for (int i = 0; i < tads; i++) {
INDArray thisTAD = data.tensorAlongDimension(i, 3, 2, 0);
in2d.putRow(i, Nd4j.toFlattened(thisTAD));
}
return in2d.transposei();
}
代码示例来源:origin: deeplearning4j/dl4j-examples
static INDArray delete(int dimension, INDArray arr1, int... interval) {
int length = interval.length;
int lastIntervalValue = interval[length - 1];
if(dimension == -1) {
INDArray array1 = arr1.get(NDArrayIndex.interval(0, interval[0]));
if(lastIntervalValue == arr1.length() - 1) {
return Nd4j.toFlattened(array1);
} else {
INDArray array2 = arr1.get(NDArrayIndex.interval(lastIntervalValue + 1,
arr1.length()));
return Nd4j.toFlattened(array1, array2);
}
} else {
INDArray array1 = arr1.get(createIntervalOnDimension(dimension, false, 0, interval[0]));
if(lastIntervalValue == arr1.shape()[dimension] - 1) {
return array1;
} else {
INDArray array2 = arr1.get(createIntervalOnDimension(dimension, false,
lastIntervalValue + 1,
arr1.shape()[dimension]));
return Nd4j.concat(dimension, array1, array2);
}
}
}
代码示例来源:origin: deeplearning4j/dl4j-examples
print("Ascended sorted array on zero axis: ", axisSortedArray);
INDArray flattened = Nd4j.toFlattened(fourByFiveRandomZeroToOne);
print("Flattened array", flattened);
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
@Override
public INDArray params() {
//C order flattening, to match the gradient flattening order
return Nd4j.toFlattened('c', params.values());
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
public INDArray params() {
List<INDArray> list = new ArrayList<>(2);
for (Map.Entry<String, INDArray> entry : params.entrySet()) {
list.add(entry.getValue());
}
return Nd4j.toFlattened('f', list);
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
private void flattenGradient() {
if (flatteningOrders != null) {
//Arrays with non-default order get flattened to row vector first, then everything is flattened to f order
//TODO revisit this, and make more efficient
List<INDArray> toFlatten = new ArrayList<>();
for (Map.Entry<String, INDArray> entry : gradients.entrySet()) {
if (flatteningOrders.containsKey(entry.getKey())
&& flatteningOrders.get(entry.getKey()) != DEFAULT_FLATTENING_ORDER) {
//Specific flattening order for this array, that isn't the default
toFlatten.add(Nd4j.toFlattened(flatteningOrders.get(entry.getKey()), entry.getValue()));
} else {
//default flattening order for this array
toFlatten.add(entry.getValue());
}
}
flattenedGradient = Nd4j.toFlattened(DEFAULT_FLATTENING_ORDER, toFlatten);
} else {
//Standard case: flatten all to f order
flattenedGradient = Nd4j.toFlattened(DEFAULT_FLATTENING_ORDER, gradients.values());
}
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
@Override
public INDArray gradient(List<String> order) {
List<INDArray> toFlatten = new ArrayList<>();
if (flatteningOrders == null) {
for (String s : order) {
if (!gradients.containsKey(s))
continue;
toFlatten.add(gradients.get(s));
}
} else {
for (String s : order) {
if (!gradients.containsKey(s))
continue;
if (flatteningOrders.containsKey(s) && flatteningOrders.get(s) != DEFAULT_FLATTENING_ORDER) {
//Arrays with non-default order get flattened to row vector first, then everything is flattened to f order
//TODO revisit this, and make more efficient
toFlatten.add(Nd4j.toFlattened(flatteningOrders.get(s), gradients.get(s)));
} else {
toFlatten.add(gradients.get(s));
}
}
}
return Nd4j.toFlattened(DEFAULT_FLATTENING_ORDER, toFlatten);
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
public INDArray scoreExamples(DataSetIterator iter, boolean addRegularizationTerms) {
List<INDArray> out = new ArrayList<>();
while (iter.hasNext()) {
out.add(scoreExamples(iter.next(), addRegularizationTerms));
}
return Nd4j.toFlattened('f', out);
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
/**
* Returns a 1 x m vector where the vector is composed of
* a flattened vector of all of the weights for the
* various neuralNets(w,hbias NOT VBIAS) and output layer
*
* @return the params for this neural net
*/
public INDArray params(boolean backwardOnly) {
if (backwardOnly)
return params();
List<INDArray> params = new ArrayList<>();
for (Layer layer : getLayers()) {
INDArray layerParams = layer.params();
if (layerParams != null)
params.add(layerParams); //may be null: subsampling etc layers
}
return Nd4j.toFlattened('f', params);
}
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
/**
* Get the parameters for the ComputationGraph
*
* @param backwardOnly If true: backprop parameters only (i.e., no visible layer biases used in layerwise pretraining layers)
*/
public INDArray params(boolean backwardOnly) {
if (backwardOnly)
return flattenedParams;
List<INDArray> list = new ArrayList<>(layers.length);
for (int i = 0; i < topologicalOrder.length; i++) {
if (!vertices[topologicalOrder[i]].hasLayer())
continue;
Layer l = vertices[topologicalOrder[i]].getLayer();
INDArray layerParams = l.params();
if (layerParams != null)
list.add(layerParams); //may be null: subsampling etc layers
}
return Nd4j.toFlattened('f', list);
}
代码示例来源:origin: org.nd4j/nd4j-api
public static INDArray tailor4d2d(@NonNull INDArray data) {
int instances = data.size(0);
int channels = data.size(1);
int height = data.size(2);
int width = data.size(3);
INDArray in2d = Nd4j.create(channels, height * width * instances);
int tads = data.tensorssAlongDimension(3, 2, 0);
for (int i = 0; i < tads; i++) {
INDArray thisTAD = data.tensorAlongDimension(i, 3, 2, 0);
in2d.putRow(i, Nd4j.toFlattened(thisTAD));
}
return in2d.transposei();
}
代码示例来源:origin: Waikato/wekaDeeplearning4j
for (int i = 0; i < batchsize; i++) {
INDArray row = ndArray.getRow(i);
INDArray flattenedRow = Nd4j.toFlattened(row);
Instance inst = new DenseInstance(atts.size());
for (int j = 0; j < flattenedRow.size(1); j++) {
代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn
INDArray flat = Nd4j.toFlattened(order, ret);
if (flat.length() != paramView.length())
throw new RuntimeException("ParamView length does not match initialized weights length (view length: "
内容来源于网络,如有侵权,请联系作者删除!