org.nd4j.linalg.factory.Nd4j.getOpFactory()方法的使用及代码示例

x33g5p2x  于2022-01-24 转载在 其他  
字(9.8k)|赞(0)|评价(0)|浏览(123)

本文整理了Java中org.nd4j.linalg.factory.Nd4j.getOpFactory()方法的一些代码示例,展示了Nd4j.getOpFactory()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Nd4j.getOpFactory()方法的具体详情如下:
包路径:org.nd4j.linalg.factory.Nd4j
类名称:Nd4j
方法名:getOpFactory

Nd4j.getOpFactory介绍

[英]Get the operation factory
[中]去操作工厂

代码示例

代码示例来源:origin: deeplearning4j/nd4j

public static long getOpNum(String name, Op.Type type) {
  if (type == Op.Type.CUSTOM)
    return Nd4j.getExecutioner().getCustomOperations().get(name.toLowerCase()).getHash();
  else
    return (long) Nd4j.getOpFactory().getOpNumByName(name);
}

代码示例来源:origin: deeplearning4j/nd4j

return (long) Nd4j.getOpFactory().getOpNumByName(name);

代码示例来源:origin: deeplearning4j/nd4j

public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
  if (labels.size(1) != preOutput.size(1)) {
    throw new IllegalArgumentException(
            "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                    + " number of outputs (nOut = " + preOutput.size(1) + ") ");
  }
  INDArray scoreArr;
  //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
  INDArray output = activationFn.getActivation(preOutput.dup(), true);
  scoreArr = output.subi(labels);
  Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("abs", scoreArr));
  //Weighted loss function
  if (weights != null) {
    if (weights.length() != output.size(1)) {
      throw new IllegalStateException("Weights vector (length " + weights.length()
              + ") does not match output.size(1)=" + output.size(1));
    }
    scoreArr.muliRowVector(weights);
  }
  if (mask != null) {
    LossUtil.applyMask(scoreArr, mask);
  }
  return scoreArr;
}

代码示例来源:origin: deeplearning4j/nd4j

public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
  if (labels.size(1) != preOutput.size(1)) {
    throw new IllegalArgumentException(
            "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                    + " number of outputs (nOut = " + preOutput.size(1) + ") ");
  }
  INDArray scoreArr;
  //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
  INDArray output = activationFn.getActivation(preOutput.dup(), true);
  scoreArr = output.rsubi(labels).divi(labels);
  Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("abs", scoreArr));
  scoreArr.muli(100.0 / labels.size(1));
  //Weighted loss function
  if (weights != null) {
    if (weights.length() != output.size(1)) {
      throw new IllegalStateException("Weights vector (length " + weights.length()
              + ") does not match output.size(1)=" + output.size(1));
    }
    scoreArr.muliRowVector(weights);
  }
  if (mask != null) {
    LossUtil.applyMask(scoreArr, mask);
  }
  return scoreArr;
}

代码示例来源:origin: org.deeplearning4j/nearestneighbor-core

/**
 *
 * @param m1
 * @param m2
 * @return
 */
public double getDistance(Point m1, Point m2) {
  return Nd4j.getExecutioner()
          .execAndReturn(Nd4j.getOpFactory().createAccum(distanceFunction, m1.getArray(), m2.getArray()))
          .getFinalResult().doubleValue();
}

代码示例来源:origin: org.deeplearning4j/nearestneighbor-core

/**
 * Get the distance to the given
 * point from the cluster
 * @param point the point to get the distance for
 * @return
 */
public double getDistanceToCenter(Point point) {
  return Nd4j.getExecutioner().execAndReturn(
          Nd4j.getOpFactory().createAccum(distanceFunction, center.getArray(), point.getArray()))
          .getFinalResult().doubleValue();
}

代码示例来源:origin: org.deeplearning4j/nearestneighbor-core

public void run() {
    try {
      for (int k = clusterIdx + 1, l = clusterSet.getClusterCount(); k < l; k++) {
        Cluster toCluster = clusterSet.getClusters().get(k);
        double distance = Nd4j.getExecutioner()
                .execAndReturn(Nd4j.getOpFactory().createAccum(
                        clusterSet.getDistanceFunction(),
                        fromCluster.getCenter().getArray(),
                        toCluster.getCenter().getArray()))
                .getFinalResult().doubleValue();
        info.getDistancesBetweenClustersCenters().put(fromCluster.getId(), toCluster.getId(),
                distance);
      }
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
});

代码示例来源:origin: org.deeplearning4j/nearestneighbor-core

/**
 *
 * @param cluster
 * @param distanceFunction
 * @return
 */
public static ClusterInfo computeClusterInfos(Cluster cluster, String distanceFunction) {
  ClusterInfo info = new ClusterInfo(cluster.isInverse(), true);
  for (int i = 0, j = cluster.getPoints().size(); i < j; i++) {
    Point point = cluster.getPoints().get(i);
    //shouldn't need to inverse here. other parts of
    //the code should interpret the "distance" or score here
    double distance = Nd4j.getExecutioner()
            .execAndReturn(Nd4j.getOpFactory().createAccum(distanceFunction,
                    cluster.getCenter().getArray(), point.getArray()))
            .getFinalResult().doubleValue();
    info.getPointDistancesFromCenter().put(point.getId(), distance);
    double diff = info.getTotalPointDistanceFromCenter() + distance;
    info.setTotalPointDistanceFromCenter(diff);
  }
  if (!cluster.getPoints().isEmpty())
    info.setAveragePointDistanceFromCenter(info.getTotalPointDistanceFromCenter() / cluster.getPoints().size());
  return info;
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

public INDArray propUpDerivative(INDArray z) {
  switch (layerConf().getHiddenUnit()) {
    case IDENTITY:
      return Nd4j.getExecutioner()
              .execAndReturn(Nd4j.getOpFactory().createTransform("identity", z).derivative());
    case BINARY:
      return Nd4j.getExecutioner()
              .execAndReturn(Nd4j.getOpFactory().createTransform("sigmoid", z).derivative());
    case GAUSSIAN: {
      Distribution dist = Nd4j.getDistributions().createNormal(z, 1);
      INDArray gaussian = dist.sample(z.shape());
      INDArray derivative = z.mul(-2).mul(gaussian);
      return derivative;
    }
    case RECTIFIED:
      return Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("relu", z).derivative());
    case SOFTMAX:
      return Nd4j.getExecutioner()
              .execAndReturn(Nd4j.getOpFactory().createTransform("softmax", z).derivative());
    default:
      throw new IllegalStateException(
              "Hidden unit type should either be binary, gaussian, or rectified linear " + layerId());
  }
}

代码示例来源:origin: org.nd4j/nd4j-api

public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
  if (labels.size(1) != preOutput.size(1)) {
    throw new IllegalArgumentException("Labels array numColumns (size(1) = " + labels.size(1)
            + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1)
            + ") ");
    
  }
  INDArray scoreArr;
  //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
  INDArray output = activationFn.getActivation(preOutput.dup(), true);
  scoreArr = output.subi(labels);
  Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("abs", scoreArr));
  //Weighted loss function
  if (weights != null) {
    if (weights.length() != output.size(1)) {
      throw new IllegalStateException("Weights vector (length " + weights.length()
              + ") does not match output.size(1)=" + output.size(1));
    }
    scoreArr.muliRowVector(weights);
  }
  if (mask != null) {
    LossUtil.applyMask(scoreArr, mask);
  }
  return scoreArr;
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

/**
 * Calculates the activation of the visible :
 * sigmoid(v * W + hbias)
 * @param v the visible layer
 * @return the approximated activations of the visible layer
 */
public INDArray propUp(INDArray v, boolean training) {
  INDArray preSig = preOutput(v, training);
  switch (layerConf().getHiddenUnit()) {
    case IDENTITY:
      return preSig;
    case BINARY:
      return sigmoid(preSig);
    case GAUSSIAN:
      Distribution dist = Nd4j.getDistributions().createNormal(preSig, 1);
      preSig = dist.sample(preSig.shape());
      return preSig;
    case RECTIFIED:
      preSig = max(preSig, 0.0);
      return preSig;
    case SOFTMAX:
      return Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", preSig));
    default:
      throw new IllegalStateException(
              "Hidden unit type should either be binary, gaussian, or rectified linear " + layerId());
  }
}

代码示例来源:origin: org.nd4j/nd4j-api

public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
  if (labels.size(1) != preOutput.size(1)) {
    throw new IllegalArgumentException("Labels array numColumns (size(1) = " + labels.size(1)
            + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1)
            + ") ");
    
  }
  INDArray scoreArr;
  //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
  INDArray output = activationFn.getActivation(preOutput.dup(), true);
  scoreArr = output.rsubi(labels).divi(labels);
  Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("abs", scoreArr));
  scoreArr.muli(100.0 / labels.size(1));
  //Weighted loss function
  if (weights != null) {
    if (weights.length() != output.size(1)) {
      throw new IllegalStateException("Weights vector (length " + weights.length()
              + ") does not match output.size(1)=" + output.size(1));
    }
    scoreArr.muliRowVector(weights);
  }
  if (mask != null) {
    LossUtil.applyMask(scoreArr, mask);
  }
  return scoreArr;
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

/**
 * Calculates the activation of the hidden:
 * activation(h * W + vbias)
 * @param h the hidden layer
 * @return the approximated output of the hidden layer
 */
public INDArray propDown(INDArray h) {
  INDArray W = getParam(PretrainParamInitializer.WEIGHT_KEY).transpose();
  INDArray vBias = getParam(PretrainParamInitializer.VISIBLE_BIAS_KEY);
  INDArray vMean = h.mmul(W).addiRowVector(vBias);
  switch (layerConf().getVisibleUnit()) {
    case IDENTITY:
      return vMean;
    case BINARY:
      return sigmoid(vMean);
    case GAUSSIAN:
      Distribution dist = Nd4j.getDistributions().createNormal(vMean, 1);
      vMean = dist.sample(vMean.shape());
      return vMean;
    case LINEAR:
      return vMean;
    case SOFTMAX:
      return Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", vMean));
    default:
      throw new IllegalStateException("Visible unit type should either be binary or gaussian " + layerId());
  }
}

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

vSample = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", vProb));
break;

代码示例来源:origin: org.deeplearning4j/deeplearning4j-nn

hSample = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", hProb));
break;

相关文章