本文整理了Java中org.nd4j.linalg.factory.Nd4j.getMemoryManager()
方法的一些代码示例,展示了Nd4j.getMemoryManager()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Nd4j.getMemoryManager()
方法的具体详情如下:
包路径:org.nd4j.linalg.factory.Nd4j
类名称:Nd4j
方法名:getMemoryManager
[英]This method returns backend-specific MemoryManager implementation, for low-level memory management
[中]此方法返回特定于后端的MemoryManager实现,用于低级内存管理
代码示例来源:origin: deeplearning4j/nd4j
/**
* This method notifies given Workspace that new use cycle is starting now
*
* @return
*/
@Override
public MemoryWorkspace notifyScopeEntered() {
parentWorkspace = Nd4j.getMemoryManager().getCurrentWorkspace();
Nd4j.getMemoryManager().setCurrentWorkspace(null);
return this;
}
代码示例来源:origin: deeplearning4j/nd4j
@Override
public void close() {
Nd4j.getMemoryManager().setCurrentWorkspace(parentWorkspace);
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* This method TEMPORARY enters this workspace, without reset applied
*
* @return
*/
@Override
public MemoryWorkspace notifyScopeBorrowed() {
if (isBorrowed.get())
throw new ND4JIllegalStateException("Workspace [" + id + "]: Can't borrow from borrowed workspace");
borrowingWorkspace = Nd4j.getMemoryManager().getCurrentWorkspace();
isBorrowed.set(true);
Nd4j.getMemoryManager().setCurrentWorkspace(this);
return this;
}
代码示例来源:origin: deeplearning4j/nd4j
public static DataBuffer createBuffer(long[] shape, DataBuffer.Type type) {
long length = ArrayUtil.prodLong(shape);
if (type == DataBuffer.Type.INT)
return Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createInt(length, true) : DATA_BUFFER_FACTORY_INSTANCE.createInt(length, true, Nd4j.getMemoryManager().getCurrentWorkspace());
else if (type == DataBuffer.Type.LONG)
return Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createLong(length, true) : DATA_BUFFER_FACTORY_INSTANCE.createLong(length, true, Nd4j.getMemoryManager().getCurrentWorkspace());
else if (type == DataBuffer.Type.HALF)
return Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createHalf(length, true) : DATA_BUFFER_FACTORY_INSTANCE.createHalf(length, true, Nd4j.getMemoryManager().getCurrentWorkspace());
else if (type == DataBuffer.Type.DOUBLE)
return Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createDouble(length, true) : DATA_BUFFER_FACTORY_INSTANCE.createDouble(length, true, Nd4j.getMemoryManager().getCurrentWorkspace());
else
return Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createFloat(length, true) : DATA_BUFFER_FACTORY_INSTANCE.createFloat(length, true, Nd4j.getMemoryManager().getCurrentWorkspace());
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Get the feature wise
* range for the statistics.
* Note that this is a lazy getter.
* It is only computed when needed.
* @return the feature wise range
* given the min and max
*/
public INDArray getRange() {
if (range == null) {
try (MemoryWorkspace ws = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) {
range = upper.sub(lower);
}
}
return range;
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* This method temporary opens block out of any workspace scope.
* <p>
* PLEASE NOTE: Do not forget to close this block.
*
* @return
*/
@Override
public MemoryWorkspace scopeOutOfWorkspaces() {
MemoryWorkspace workspace = Nd4j.getMemoryManager().getCurrentWorkspace();
if (workspace == null)
return new DummyWorkspace();
else {
Nd4j.getMemoryManager().setCurrentWorkspace(null);
return workspace.tagOutOfScopeUse();
}
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Create a buffer equal of length prod(shape)
*
* @param data the shape of the buffer to create
* @return the created buffer
*/
public static DataBuffer createBuffer(int[] data) {
DataBuffer ret;
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createInt(data) : DATA_BUFFER_FACTORY_INSTANCE.createInt(data, Nd4j.getMemoryManager().getCurrentWorkspace());
logCreationIfNecessary(ret);
return ret;
}
代码示例来源:origin: deeplearning4j/nd4j
@Override
public DataBuffer decompress(DataBuffer buffer) {
CompressedDataBuffer comp = (CompressedDataBuffer) buffer;
DataBuffer result = Nd4j.createBuffer(comp.length(), false);
Nd4j.getMemoryManager().memcpy(result, buffer);
return result;
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Create a DistributionStats object from the data ingested so far. Can be used multiple times when updating
* online.
*/
public MinMaxStats build() {
if (runningLower == null) {
throw new RuntimeException("No data was added, statistics cannot be determined");
}
try (MemoryWorkspace workspace = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) {
return new MinMaxStats(runningLower.dup(), runningUpper.dup());
}
}
}
代码示例来源:origin: deeplearning4j/nd4j
@Override
public void assertCurrentWorkspace(@NonNull T arrayType, String msg) {
validateConfig(arrayType);
MemoryWorkspace curr = Nd4j.getMemoryManager().getCurrentWorkspace();
if(!scopeOutOfWs.contains(arrayType) && (curr == null || !getWorkspaceName(arrayType).equals(curr.getId()))){
throw new ND4JWorkspaceException("Assertion failed: expected current workspace to be \"" + getWorkspaceName(arrayType)
+ "\" (for array type " + arrayType + ") - actual current workspace is " + (curr == null ? null : curr.getId())
+ (msg == null ? "" : ": " + msg));
};
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Create a buffer based on the data opType
*
* @param data the data to create the buffer with
* @return the created buffer
*/
public static DataBuffer createBuffer(float[] data) {
DataBuffer ret;
if (dataType() == DataBuffer.Type.FLOAT)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createFloat(data) : DATA_BUFFER_FACTORY_INSTANCE.createFloat(data, Nd4j.getMemoryManager().getCurrentWorkspace());
else if (dataType() == DataBuffer.Type.HALF)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createHalf(data): DATA_BUFFER_FACTORY_INSTANCE.createHalf(data, Nd4j.getMemoryManager().getCurrentWorkspace());
else
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createDouble(ArrayUtil.toDoubles(data)) : DATA_BUFFER_FACTORY_INSTANCE.createDouble(ArrayUtil.toDoubles(data), Nd4j.getMemoryManager().getCurrentWorkspace()) ;
logCreationIfNecessary(ret);
return ret;
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Create a buffer based on the data opType
*
* @param data the data to create the buffer with
* @return the created buffer
*/
public static DataBuffer createBuffer(float[] data, long offset) {
DataBuffer ret;
if (dataType() == DataBuffer.Type.FLOAT)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createFloat(offset, data) : DATA_BUFFER_FACTORY_INSTANCE.createFloat(offset, data, Nd4j.getMemoryManager().getCurrentWorkspace());
else if (dataType() == DataBuffer.Type.HALF)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createHalf(offset, data) : DATA_BUFFER_FACTORY_INSTANCE.createHalf(offset, data, Nd4j.getMemoryManager().getCurrentWorkspace());
else
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createDouble(offset, ArrayUtil.toDoubles(data)) : DATA_BUFFER_FACTORY_INSTANCE.createDouble(offset, ArrayUtil.toDoubles(data), Nd4j.getMemoryManager().getCurrentWorkspace());
logCreationIfNecessary(ret);
return ret;
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Create a buffer based on the data opType
*
* @param data the data to create the buffer with
* @return the created buffer
*/
public static DataBuffer createBuffer(double[] data, long offset) {
DataBuffer ret;
if (dataType() == DataBuffer.Type.DOUBLE)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createDouble(offset, data) : DATA_BUFFER_FACTORY_INSTANCE.createDouble(offset, data, Nd4j.getMemoryManager().getCurrentWorkspace());
else if (dataType() == DataBuffer.Type.HALF)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createHalf(offset, data) : DATA_BUFFER_FACTORY_INSTANCE.createHalf(offset, ArrayUtil.toFloats(data), Nd4j.getMemoryManager().getCurrentWorkspace());
else
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createFloat(offset, ArrayUtil.toFloats(data)) : DATA_BUFFER_FACTORY_INSTANCE.createFloat(offset, ArrayUtil.toFloats(data), Nd4j.getMemoryManager().getCurrentWorkspace());
logCreationIfNecessary(ret);
return ret;
}
代码示例来源:origin: deeplearning4j/nd4j
/**
*
* @param length
* @param initialize
* @return
*/
public static DataBuffer createBuffer(long length, boolean initialize) {
DataBuffer ret;
if (dataType() == DataBuffer.Type.FLOAT)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createFloat(length, initialize) : DATA_BUFFER_FACTORY_INSTANCE.createFloat(length, initialize, Nd4j.getMemoryManager().getCurrentWorkspace());
else if (dataType() == DataBuffer.Type.INT)
ret = DATA_BUFFER_FACTORY_INSTANCE.createInt(length, initialize);
else if (dataType() == DataBuffer.Type.HALF)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createHalf(length, initialize) : DATA_BUFFER_FACTORY_INSTANCE.createHalf(length, initialize, Nd4j.getMemoryManager().getCurrentWorkspace());
else
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createDouble(length, initialize) : DATA_BUFFER_FACTORY_INSTANCE.createDouble(length, initialize, Nd4j.getMemoryManager().getCurrentWorkspace());
logCreationIfNecessary(ret);
return ret;
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Create a buffer based on the data opType
*
* @param data the data to create the buffer with
* @return the created buffer
*/
public static DataBuffer createBuffer(double[] data) {
DataBuffer ret;
if (dataType() == DataBuffer.Type.DOUBLE)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createDouble(data) : DATA_BUFFER_FACTORY_INSTANCE.createDouble(data, Nd4j.getMemoryManager().getCurrentWorkspace());
else if (dataType() == DataBuffer.Type.HALF)
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createHalf(data) : DATA_BUFFER_FACTORY_INSTANCE.createHalf(ArrayUtil.toFloats(data), Nd4j.getMemoryManager().getCurrentWorkspace());
else
ret = Nd4j.getMemoryManager().getCurrentWorkspace() == null ? DATA_BUFFER_FACTORY_INSTANCE.createFloat(ArrayUtil.toFloats(data)) : DATA_BUFFER_FACTORY_INSTANCE.createFloat(ArrayUtil.toFloats(data), Nd4j.getMemoryManager().getCurrentWorkspace());
logCreationIfNecessary(ret);
return ret;
}
代码示例来源:origin: deeplearning4j/nd4j
@Override
public MemoryWorkspace scopeOutOfWorkspaces() {
MemoryWorkspace workspace = Nd4j.getMemoryManager().getCurrentWorkspace();
if (workspace == null)
return new DummyWorkspace();
else {
//Nd4j.getMemoryManager().setCurrentWorkspace(null);
return new DummyWorkspace().notifyScopeEntered();//workspace.tagOutOfScopeUse();
}
}
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* Assert that the specified workspace is open, active, and is the current workspace
*
* @param ws Name of the workspace to assert open/active/current
* @param errorMsg Message to include in the exception, if required
*/
public static void assertOpenActiveAndCurrent(@NonNull String ws, @NonNull String errorMsg) throws ND4JWorkspaceException {
if (!Nd4j.getWorkspaceManager().checkIfWorkspaceExistsAndActive(ws)) {
throw new ND4JWorkspaceException(errorMsg + " - workspace is not open and active");
}
MemoryWorkspace currWs = Nd4j.getMemoryManager().getCurrentWorkspace();
if (currWs == null || !ws.equals(currWs.getId())) {
throw new ND4JWorkspaceException(errorMsg + " - not the current workspace (current workspace: "
+ (currWs == null ? null : currWs.getId()));
}
}
代码示例来源:origin: deeplearning4j/nd4j
/**
* This method migrates this DataSet into current Workspace (if any)
*/
@Override
public void migrate() {
if (Nd4j.getMemoryManager().getCurrentWorkspace() != null) {
if (features != null)
features = features.migrate();
if (labels != null)
labels = labels.migrate();
if (featuresMask != null)
featuresMask = featuresMask.migrate();
if (labelsMask != null)
labelsMask = labelsMask.migrate();
}
}
代码示例来源:origin: deeplearning4j/nd4j
@Override
public DataBuffer compress(DataBuffer buffer) {
CompressionDescriptor descriptor = new CompressionDescriptor(buffer, this);
BytePointer ptr = new BytePointer(buffer.length() * buffer.getElementSize());
CompressedDataBuffer result = new CompressedDataBuffer(ptr, descriptor);
Nd4j.getMemoryManager().memcpy(result, buffer);
return result;
}
代码示例来源:origin: deeplearning4j/nd4j
@Override
public void migrate() {
if (Nd4j.getMemoryManager().getCurrentWorkspace() != null) {
if (features != null)
for (int e = 0; e < features.length; e++)
features[e] = features[e].migrate();
if (labels != null)
for (int e = 0; e < labels.length; e++)
labels[e] = labels[e].migrate();
if (featuresMaskArrays != null)
for (int e = 0; e < featuresMaskArrays.length; e++)
featuresMaskArrays[e] = featuresMaskArrays[e].migrate();
if (labelsMaskArrays != null)
for (int e = 0; e < labelsMaskArrays.length; e++)
labelsMaskArrays[e] = labelsMaskArrays[e].migrate();
}
}
内容来源于网络,如有侵权,请联系作者删除!