org.nd4j.linalg.factory.Nd4j.ones()方法的使用及代码示例

x33g5p2x  于2022-01-24 转载在 其他  
字(6.6k)|赞(0)|评价(0)|浏览(177)

本文整理了Java中org.nd4j.linalg.factory.Nd4j.ones()方法的一些代码示例,展示了Nd4j.ones()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Nd4j.ones()方法的具体详情如下:
包路径:org.nd4j.linalg.factory.Nd4j
类名称:Nd4j
方法名:ones

Nd4j.ones介绍

[英]Creates a row vector with the specified number of columns
[中]创建具有指定列数的行向量

代码示例

代码示例来源:origin: deeplearning4j/nd4j

/**
 * Ones like
 *
 * @param arr the array to create the ones like
 * @return ones in the shape of the given array
 */
public static INDArray onesLike(INDArray arr) {
  return ones(arr.shape());
}

代码示例来源:origin: deeplearning4j/dl4j-examples

INDArray ones = Nd4j.ones(nRows, nColumns);

代码示例来源:origin: deeplearning4j/nd4j

public double getGradient(double gradient, int column, int[] shape) {
  boolean historicalInitialized = false;
  if (this.historicalGradient == null) {
    this.historicalGradient = Nd4j.ones(shape);
    historicalInitialized = true;
  }
  double sqrtHistory = !historicalInitialized ? Math.sqrt(historicalGradient.getDouble(column))
          : historicalGradient.getDouble(column);
  double learningRates = learningRate / (sqrtHistory + epsilon);
  double adjustedGradient = gradient * (learningRates);
  historicalGradient.putScalar(column, historicalGradient.getDouble(column) + gradient * gradient);
  numIterations++;
  //ensure no zeros
  return adjustedGradient;
}

代码示例来源:origin: deeplearning4j/dl4j-examples

System.out.println(allZeros);
INDArray allOnes = Nd4j.ones(nRows, nColumns);
System.out.println("\nNd4j.ones(nRows, nColumns)");
System.out.println(allOnes);
INDArray threeDimArray = Nd4j.ones(3,4,5);      //3x4x5 INDArray
INDArray fourDimArray = Nd4j.ones(3,4,5,6);     //3x4x5x6 INDArray
INDArray fiveDimArray = Nd4j.ones(3,4,5,6,7);   //3x4x5x6x7 INDArray
System.out.println("\n\n\nCreating INDArrays with more dimensions:");
System.out.println("3d array shape:         " + Arrays.toString(threeDimArray.shape()));

代码示例来源:origin: deeplearning4j/nd4j

@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
  if (labels.size(1) != preOutput.size(1)) {
    throw new IllegalArgumentException(
        "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
            + " number of outputs (nOut = " + preOutput.size(1) + ") ");
  }
  final INDArray grad = Nd4j.ones(labels.shape());
  calculate(labels, preOutput, activationFn, mask, null, grad);
  return grad;
}

代码示例来源:origin: deeplearning4j/nd4j

@Override
public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) {
  INDArray dLdz = Nd4j.ones(in.shape());
  BooleanIndexing.replaceWhere(dLdz, alpha, Conditions.lessThanOrEqual(0.0));
  dLdz.muli(epsilon);
  return new Pair<>(dLdz, null);
}

代码示例来源:origin: deeplearning4j/nd4j

@Override
public Pair<Double, INDArray> computeGradientAndScore(INDArray labels,
                           INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) {
  final INDArray scoreArr = Nd4j.create(labels.size(0), 1);
  final INDArray grad = Nd4j.ones(labels.shape());
  calculate(labels, preOutput, activationFn, mask, scoreArr, grad);
  double score = scoreArr.sumNumber().doubleValue();
  if (average)
    score /= scoreArr.size(0);
  return new Pair<>(score, grad);
}

代码示例来源:origin: deeplearning4j/nd4j

public static INDArray mergePerOutputMasks2d(long[] outShape, INDArray[] arrays, INDArray[] masks) {
  val numExamplesPerArr = new long[arrays.length];
  for (int i = 0; i < numExamplesPerArr.length; i++) {
    numExamplesPerArr[i] = arrays[i].size(0);
  }
  INDArray outMask = Nd4j.ones(outShape); //Initialize to 'all present' (1s)
  int rowsSoFar = 0;
  for (int i = 0; i < masks.length; i++) {
    long thisRows = numExamplesPerArr[i]; //Mask itself may be null -> all present, but may include multiple examples
    if (masks[i] == null) {
      continue;
    }
    outMask.put(new INDArrayIndex[] {NDArrayIndex.interval(rowsSoFar, rowsSoFar + thisRows),
            NDArrayIndex.all()}, masks[i]);
    rowsSoFar += thisRows;
  }
  return outMask;
}

代码示例来源:origin: deeplearning4j/dl4j-examples

INDArray values = Nd4j.ones(3,4);
SDVariable variable = sd.var("myVariable", values);

代码示例来源:origin: deeplearning4j/nd4j

public AdaGrad createSubset(int index) {
    if (historicalGradient == null)
      this.historicalGradient = Nd4j.ones(shape);

    if (Shape.isMatrix(shape)) {
      AdaGrad a = new AdaGrad(1, historicalGradient.columns());
      //grab only the needed elements
      INDArray slice = historicalGradient.slice(index).dup();
      a.historicalGradient = slice;
      a.setLearningRate(learningRate);
      return a;
    } else {
      AdaGrad a = new AdaGrad(1, 1);
      //grab only the needed elements
      INDArray slice = Nd4j.scalar(historicalGradient.getDouble(index));
      a.historicalGradient = slice;
      a.setLearningRate(learningRate);
      return a;
    }
  }
}

代码示例来源:origin: deeplearning4j/nd4j

/**
 * Merge the vectors and append a bias.
 * Each vector must be either row or column vectors.
 * An exception is thrown for inconsistency (mixed row and column vectors)
 *
 * @param vectors the vectors to merge
 * @return the merged ndarray appended with the bias
 */
@Override
public INDArray appendBias(INDArray... vectors) {
  int size = 0;
  for (INDArray vector : vectors) {
    size += vector.rows();
  }
  INDArray result = Nd4j.create(size + 1, vectors[0].columns());
  int index = 0;
  for (INDArray vector : vectors) {
    INDArray put = toFlattened(vector, Nd4j.ones(1));
    result.put(new INDArrayIndex[] {NDArrayIndex.interval(index, index + vector.rows() + 1),
            NDArrayIndex.interval(0, vectors[0].columns())}, put);
    index += vector.rows();
  }
  return result;
}

代码示例来源:origin: deeplearning4j/dl4j-examples

INDArray values = Nd4j.ones(3,4);
var3.setArray(values);

代码示例来源:origin: deeplearning4j/nd4j

public INDArray adjustMasks(INDArray label, INDArray labelMask, int minorityLabel, double targetDist) {
    labelMask = Nd4j.ones(label.size(0), label.size(2));

代码示例来源:origin: deeplearning4j/nd4j

final Double locNormFactor = normFactor.getDouble(i);
final INDArray operandA = Nd4j.ones(shape[1], shape[0]).mmul(locCfn);
final INDArray operandB = operandA.transpose();

代码示例来源:origin: deeplearning4j/nd4j

INDArray mask = (needMask && maskRank != 3 ? Nd4j.ones(totalExamples, maxLength) : null);

代码示例来源:origin: deeplearning4j/dl4j-examples

print("One dimensional zeros", oneDZeros);
INDArray threeByFourOnes = Nd4j.ones(3, 4);
print("3x4 ones", threeByFourOnes);

代码示例来源:origin: org.nd4j/nd4j-api

/**
 * Ones like
 *
 * @param arr the array to create the ones like
 * @return ones in the shape of the given array
 */
public static INDArray onesLike(INDArray arr) {
  return ones(arr.shape());
}

代码示例来源:origin: improbable-research/keanu

public static INDArray ones(long[] shape, DataBuffer.Type bufferType) {
  Nd4j.setDataType(bufferType);
  switch (shape.length) {
    case 0:
      return scalar(1.0, bufferType);
    case 1:
      return reshapeToVector(Nd4j.ones(shape));
    default:
      return Nd4j.ones(shape);
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

private INDArray rowOfLogTransitionMatrix(int k) {
  INDArray row = Nd4j.ones(1, states).muli(logOfDiangnalTProb);
  row.putScalar(k, logMetaInstability);
  return row;
}

代码示例来源:origin: org.nd4j/nd4j-api

@Override
public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) {
  INDArray dLdz = Nd4j.ones(in.shape());
  BooleanIndexing.replaceWhere(dLdz, alpha, Conditions.lessThanOrEqual(0.0));
  dLdz.muli(epsilon);
  return new Pair<>(dLdz, null);
}

相关文章