本文整理了Java中org.nd4j.linalg.factory.Nd4j.createUninitializedDetached()
方法的一些代码示例,展示了Nd4j.createUninitializedDetached()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Nd4j.createUninitializedDetached()
方法的具体详情如下:
包路径:org.nd4j.linalg.factory.Nd4j
类名称:Nd4j
方法名:createUninitializedDetached
[英]Cretes uninitialized INDArray detached from any (if any) workspace
[中]Cretes未初始化的INDArray从任何(如果有的话)工作区分离
代码示例来源:origin: deeplearning4j/nd4j
/**
* Cretes uninitialized INDArray detached from any (if any) workspace
*
* @param shape
* @return
*/
public static INDArray createUninitializedDetached(int[] shape) {
return createUninitializedDetached(shape, Nd4j.order());
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Cretes uninitialized INDArray detached from any (if any) workspace
*
* @param shape
* @return
*/
public static INDArray createUninitializedDetached(long[] shape) {
return createUninitializedDetached(shape, Nd4j.order());
}
代码示例来源:origin: org.nd4j/nd4j-api
/**
* Cretes uninitialized INDArray detached from any (if any) workspace
*
* @param shape
* @return
*/
public static INDArray createUninitializedDetached(int[] shape) {
checkShapeValues(shape);
//ensure shapes that wind up being scalar end up with the write shape
return createUninitializedDetached(shape, Nd4j.order());
}
代码示例来源:origin: CampagneLaboratory/variationanalysis
private void keepLongestMask(int minibatchSize, INDArray mask, int[] randomIndex1, int[] randomIndex2) {
if (mask == null) return;
INDArray[] tmpBuffer = new INDArray[minibatchSize];
// Find the longest mask and keep it as mixup ask:
for (int exampleIndex = 0; exampleIndex < minibatchSize; exampleIndex++) {
int random1 = randomIndex1[exampleIndex];
int random2 = randomIndex2[exampleIndex];
final INDArray mask1 = mask.getRow(random1);
final INDArray mask2 = mask.getRow(random2);
tmpBuffer[exampleIndex] = Nd4j.createUninitializedDetached(mask1.shape());
if (mask1.sub(mask2).sumNumber().doubleValue() < 0) {
// mask2 has more 1s than mask1, use mask2:
Nd4j.copy(mask2, tmpBuffer[exampleIndex]);
} else {
Nd4j.copy(mask1, tmpBuffer[exampleIndex]);
}
}
for (int exampleIndex = 0; exampleIndex < minibatchSize; exampleIndex++) {
// assign tmpBuffer[inputIndex] back into the minibatch:
mask.putRow(exampleIndex, tmpBuffer[exampleIndex]);
}
}
代码示例来源:origin: CampagneLaboratory/variationanalysis
inputs[sampleIndex][index] = Nd4j.createUninitializedDetached(inputShape, 'f');
inputMasks[sampleIndex][index] = needMask ? Nd4j.createUninitializedDetached(
domainDescriptor.getInputMaskShape(size, input), 'f'
) : null;
for (int sampleIndex = 0; sampleIndex < sampleIndices.length; sampleIndex++) {
labelMappers[index][sampleIndex] = domainDescriptor.getLabelMapper(label);
labels[sampleIndex][index] = Nd4j.createUninitializedDetached(domainDescriptor.getLabelShape(size, label), 'f');
labelMasks[sampleIndex][index] = needMask ? Nd4j.createUninitializedDetached(
domainDescriptor.getLabelMaskShape(size, label), 'f'
) : null;
代码示例来源:origin: CampagneLaboratory/variationanalysis
inputs[index] = Nd4j.createUninitializedDetached(domainDescriptor.getInputShape(size, input),'f');
featureMappers[index] = domainDescriptor.getFeatureMapper(input);
index += 1;
labels[index] = Nd4j.createUninitializedDetached(domainDescriptor.getLabelShape(size, label),'f');
labelMappers[index] = domainDescriptor.getLabelMapper(label);
index++;
代码示例来源:origin: CampagneLaboratory/variationanalysis
inputs[index] = Nd4j.createUninitializedDetached(inputShape, 'f');
featureMappers[index] = domainDescriptor.getFeatureMapper(input);
boolean needMask = featureMappers[index].hasMask();
inputMasks[index] = needMask ? Nd4j.createUninitializedDetached(domainDescriptor.getInputMaskShape(size, input), 'f') : null;
labels[index] = Nd4j.createUninitializedDetached(domainDescriptor.getLabelShape(size, label), 'f');
labelMasks[index] = Nd4j.createUninitializedDetached(domainDescriptor.getLabelMaskShape(size, label), 'f');
代码示例来源:origin: org.deeplearning4j/deeplearning4j-cuda-9.2
if (training) {
if(meanCache == null || meanCache.length() < mean.length()){
meanCache = Nd4j.createUninitializedDetached((int)mean.length());
if(Nd4j.dataType() == DataBuffer.Type.HALF){
try(MemoryWorkspace ws = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) {
varCache = Nd4j.createUninitializedDetached((int)mean.length());
if(Nd4j.dataType() == DataBuffer.Type.HALF){
try(MemoryWorkspace ws = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) {
内容来源于网络,如有侵权,请联系作者删除!