Package ai.djl.pytorch.jni
Class JniUtils
- java.lang.Object
-
- ai.djl.pytorch.jni.JniUtils
-
public final class JniUtils extends java.lang.ObjectA class containing utilities to interact with the PyTorch Engine's Java Native Interface (JNI) layer.
-
-
Method Summary
All Methods Static Methods Concrete Methods Modifier and Type Method Description static PtNDArrayabs(PtNDArray ndArray)static PtNDArrayacos(PtNDArray ndArray)static voidadamUpdate(PtNDArray weight, PtNDArray grad, PtNDArray mean, PtNDArray variance, float lr, float learningRateBiasCorrection, float wd, float rescaleGrad, float clipGrad, float beta1, float beta2, float eps, boolean adamw)static PtNDArrayadaptiveAvgPool(PtNDArray ndArray, ai.djl.ndarray.types.Shape outputSize)static PtNDArrayadaptiveMaxPool(PtNDArray ndArray, ai.djl.ndarray.types.Shape outputSize)static PtNDArrayadd(PtNDArray ndArray1, PtNDArray ndArray2)static voidaddi(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArrayall(PtNDArray ndArray)static PtNDArrayany(PtNDArray ndArray)static PtNDArrayarange(PtNDManager manager, float start, float stop, float step, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)static PtNDArrayargMax(PtNDArray ndArray)static PtNDArrayargMax(PtNDArray ndArray, long dim, boolean keepDim)static PtNDArrayargMin(PtNDArray ndArray)static PtNDArrayargMin(PtNDArray ndArray, long dim, boolean keepDim)static PtNDArrayargSort(PtNDArray ndArray, long dim, boolean keepDim)static PtNDArrayasin(PtNDArray ndArray)static PtNDArrayatan(PtNDArray ndArray)static voidattachGradient(PtNDArray ndArray, boolean requiresGrad)static PtNDArrayavgPool(PtNDArray ndArray, ai.djl.ndarray.types.Shape kernelSize, ai.djl.ndarray.types.Shape stride, ai.djl.ndarray.types.Shape padding, boolean ceilMode, boolean countIncludePad)static voidbackward(PtNDArray ndArray, PtNDArray gradNd, boolean keepGraph, boolean createGraph)static PtNDArraybatchNorm(PtNDArray ndArray, PtNDArray gamma, PtNDArray beta, PtNDArray runningMean, PtNDArray runningVar, boolean isTraining, double momentum, double eps)static PtNDArraybmm(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraybooleanMask(PtNDArray ndArray, PtNDArray indicesNd)static voidbooleanMaskSet(PtNDArray ndArray, PtNDArray value, PtNDArray indicesNd)static PtNDArraybroadcast(PtNDArray ndArray, ai.djl.ndarray.types.Shape shape)static PtNDArraycat(PtNDArray[] arrays, long dim)static PtNDArrayceil(PtNDArray ndArray)static PtNDArrayclip(PtNDArray ndArray, java.lang.Number min, java.lang.Number max)static PtNDArrayclone(PtNDArray ndArray)static PtNDArraycomplex(PtNDArray ndArray)static booleancontentEqual(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArrayconvolution(PtNDArray ndArray, PtNDArray weight, PtNDArray bias, ai.djl.ndarray.types.Shape stride, ai.djl.ndarray.types.Shape padding, ai.djl.ndarray.types.Shape dilation, int groups)static PtNDArraycos(PtNDArray ndArray)static PtNDArraycosh(PtNDArray ndArray)static PtNDArraycreateEmptyNdArray(PtNDManager manager, ai.djl.ndarray.types.Shape shape, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)static PtNDArraycreateNdFromByteBuffer(PtNDManager manager, java.nio.ByteBuffer data, ai.djl.ndarray.types.Shape shape, ai.djl.ndarray.types.DataType dType, ai.djl.ndarray.types.SparseFormat fmt, ai.djl.Device device)static PtNDArraycreateOnesNdArray(PtNDManager manager, ai.djl.ndarray.types.Shape shape, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)static PtNDArraycreateSparseCoo(PtNDArray indices, PtNDArray values, ai.djl.ndarray.types.Shape shape)static PtNDArraycreateZerosNdArray(PtNDManager manager, ai.djl.ndarray.types.Shape shape, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)static PtNDArraycumProd(PtNDArray ndArray, long dim, ai.djl.ndarray.types.DataType dataType)static PtNDArraycumSum(PtNDArray ndArray, long dim)static voiddeleteModule(long pointer)static voiddeleteNDArray(long handle)static PtNDArraydetachGradient(PtNDArray ndArray)static PtNDArraydiv(PtNDArray ndArray1, PtNDArray ndArray2)static voiddivi(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraydot(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraydropout(PtNDArray ndArray, double prob, boolean training)static PtNDArrayelu(PtNDArray ndArray, double alpha)static PtNDArrayembedding(PtNDArray input, PtNDArray weight, boolean sparse)static voidemptyCudaCache()static voidenableInferenceMode(PtSymbolBlock block)static voidenableTrainingMode(PtSymbolBlock block)static PtNDArrayeq(PtNDArray self, PtNDArray other)static PtNDArrayerfinv(PtNDArray ndArray)static PtNDArrayexp(PtNDArray ndArray)static PtNDArrayeye(PtNDManager manager, int n, int m, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)static PtNDArrayfft(PtNDArray ndArray, long length, long axis)static PtNDArrayflatten(PtNDArray ndArray, long startDim, long endDim)static PtNDArrayflip(PtNDArray ndArray, long[] dims)static PtNDArrayfloor(PtNDArray ndArray)static PtNDArrayfull(PtNDManager manager, ai.djl.ndarray.types.Shape shape, double fillValue, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)static PtNDArraygather(PtNDArray ndArray, PtNDArray index, long dim)static PtNDArraygelu(PtNDArray ndArray)static java.nio.ByteBuffergetByteBuffer(PtNDArray ndArray)static ai.djl.ndarray.types.DataTypegetDataType(PtNDArray ndArray)static ai.djl.DevicegetDevice(PtNDArray ndArray)static java.util.Set<java.lang.String>getFeatures()static PtNDArraygetGradient(PtNDArray ndArray)static java.lang.StringgetGradientFunctionNames(PtNDArray ndArray)static PtNDArraygetItem(PtNDArray ndArray, long[] indices, PtNDManager manager)static intgetLayout(PtNDArray array)static java.lang.String[]getMethodNames(PtSymbolBlock block)static intgetNumInteropThreads()static intgetNumThreads()static ai.djl.ndarray.types.ShapegetShape(PtNDArray ndArray)static ai.djl.ndarray.types.SparseFormatgetSparseFormat(PtNDArray ndArray)static ai.djl.ndarray.NDListgru(PtNDArray input, PtNDArray hx, ai.djl.ndarray.NDList params, boolean hasBiases, int numLayers, double dropRate, boolean training, boolean bidirectional, boolean batchFirst)static PtNDArraygt(PtNDArray self, PtNDArray other)static PtNDArraygte(PtNDArray self, PtNDArray other)static PtNDArrayhannWindow(PtNDManager manager, long numPoints, boolean periodic, ai.djl.Device device)static PtNDArrayindex(PtNDArray ndArray, long[] minIndices, long[] maxIndices, long[] stepIndices, PtNDManager manager)static PtNDArrayindexAdv(PtNDArray ndArray, ai.djl.ndarray.index.NDIndex index, PtNDManager manager)static voidindexAdvPut(PtNDArray ndArray, ai.djl.ndarray.index.NDIndex index, PtNDArray data)static voidindexSet(PtNDArray ndArray, PtNDArray value, long[] minIndices, long[] maxIndices, long[] stepIndices)static PtNDArrayinterpolate(PtNDArray ndArray, long[] size, int mode, boolean alignCorners)static PtNDArrayinverse(PtNDArray ndArray)static booleanisGradMode()static PtNDArrayisInf(PtNDArray ndArray)static PtNDArrayisNaN(PtNDArray ndArray)static PtNDArraylayerNorm(PtNDArray ndArray, ai.djl.ndarray.types.Shape normalizedShape, PtNDArray gamma, PtNDArray beta, double eps)static PtNDArrayleakyRelu(PtNDArray ndArray, double negativeSlope)static PtNDArraylinear(PtNDArray input, PtNDArray weight, PtNDArray bias)static PtNDArraylinspace(PtNDManager manager, float start, float stop, int step, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)static PtSymbolBlockloadModule(PtNDManager manager, java.io.InputStream is, boolean mapLocation, boolean hasSize)static PtSymbolBlockloadModule(PtNDManager manager, java.nio.file.Path path, boolean mapLocation, java.lang.String[] extraFileKeys, java.lang.String[] extraFileValues, boolean trainParam)static longloadModuleHandle(java.io.InputStream is, ai.djl.Device device, boolean mapLocation, boolean hasSize)static PtNDArraylog(PtNDArray ndArray)static PtNDArraylog10(PtNDArray ndArray)static PtNDArraylog2(PtNDArray ndArray)static PtNDArraylogicalAnd(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraylogicalNot(PtNDArray ndArray)static PtNDArraylogicalOr(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraylogicalXor(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraylogSoftmax(PtNDArray ndArray, long dim, ai.djl.ndarray.types.DataType dTpe)static PtNDArraylpPool(PtNDArray ndArray, double normType, ai.djl.ndarray.types.Shape kernelSize, ai.djl.ndarray.types.Shape stride, boolean ceilMode)static ai.djl.ndarray.NDListlstm(PtNDArray input, ai.djl.ndarray.NDList hx, ai.djl.ndarray.NDList params, boolean hasBiases, int numLayers, double dropRate, boolean training, boolean bidirectional, boolean batchFirst)static PtNDArraylt(PtNDArray self, PtNDArray other)static PtNDArraylte(PtNDArray self, PtNDArray other)static PtNDArraymatmul(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraymax(PtNDArray ndArray)static PtNDArraymax(PtNDArray ndArray, long dim, boolean keepDim)static PtNDArraymax(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraymaxPool(PtNDArray ndArray, ai.djl.ndarray.types.Shape kernelSize, ai.djl.ndarray.types.Shape stride, ai.djl.ndarray.types.Shape padding, boolean ceilMode)static PtNDArraymean(PtNDArray ndArray)static PtNDArraymean(PtNDArray ndArray, long dim, boolean keepDim)static ai.djl.ndarray.NDListmedian(PtNDArray ndArray, long dim, boolean keepDim)static PtNDArraymin(PtNDArray ndArray)static PtNDArraymin(PtNDArray ndArray, long dim, boolean keepDim)static PtNDArraymin(PtNDArray ndArray1, PtNDArray ndArray2)static ai.djl.ndarray.NDListmoduleGetParams(PtSymbolBlock block, PtNDManager manager)static PtNDArraymul(PtNDArray ndArray1, PtNDArray ndArray2)static voidmuli(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArrayneg(PtNDArray ndArray)static voidnegi(PtNDArray ndArray)static PtNDArrayneq(PtNDArray self, PtNDArray other)static PtNDArraynone(PtNDArray ndArray)static PtNDArraynonZeros(PtNDArray ndArray)static PtNDArraynorm(PtNDArray ndArray, int ord, int[] axes, boolean keepDims)static PtNDArraynormal(PtNDManager manager, double mean, double std, ai.djl.ndarray.types.Shape size, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)static PtNDArraynormalize(PtNDArray ndArray, double p, long dim, double eps)static PtNDArrayoneHot(PtNDArray ndArray, int depth, ai.djl.ndarray.types.DataType dataType)static PtNDArrayonesLike(PtNDArray array, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)static PtNDArraypermute(PtNDArray ndArray, long[] dims)static PtNDArraypick(PtNDArray ndArray, PtNDArray index, long dim)static PtNDArraypow(PtNDArray ndArray1, PtNDArray ndArray2)static voidpowi(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArrayprod(PtNDArray ndArray)static PtNDArrayprod(PtNDArray ndArray, long dim, boolean keepDim)static PtNDArrayput(PtNDArray ndArray, PtNDArray index, PtNDArray value)static PtNDArrayrandint(PtNDManager manager, long low, long high, ai.djl.ndarray.types.Shape size, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)static PtNDArrayrandperm(PtNDManager manager, long n, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)static PtNDArrayreal(PtNDArray ndArray)static PtNDArrayrelu(PtNDArray ndArray)static PtNDArrayremainder(PtNDArray ndArray1, PtNDArray ndArray2)static voidremainderi(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArrayrepeat(PtNDArray ndArray, long repeat, long dim)static booleanrequiresGrad(PtNDArray ndArray)static PtNDArrayreshape(PtNDArray ndArray, long[] shape)static ai.djl.ndarray.NDListrnn(PtNDArray input, PtNDArray hx, ai.djl.ndarray.NDList params, boolean hasBiases, int numLayers, ai.djl.nn.recurrent.RNN.Activation activation, double dropRate, boolean training, boolean bidirectional, boolean batchFirst)static PtNDArrayrot90(PtNDArray ndArray, int times, int[] axes)static PtNDArrayround(PtNDArray ndArray)static PtNDArrayscatter(PtNDArray ndArray, PtNDArray index, PtNDArray value, int axis)static PtNDArrayselu(PtNDArray ndArray)static voidset(PtNDArray self, java.nio.ByteBuffer data)static voidsetBenchmarkCuDNN(boolean enable)static voidsetGradMode(boolean enable)static voidsetGraphExecutorOptimize(boolean enabled)static voidsetNumInteropThreads(int threads)static voidsetNumThreads(int threads)static voidsetSeed(long seed)static voidsgdUpdate(PtNDArray weight, PtNDArray grad, PtNDArray state, float lr, float wd, float rescaleGrad, float clipGrad, float momentum)static PtNDArraysigmoid(PtNDArray ndArray)static PtNDArraysign(PtNDArray ndArray)static voidsigni(PtNDArray ndArray)static PtNDArraysin(PtNDArray ndArray)static PtNDArraysinh(PtNDArray ndArray)static PtNDArrayslice(PtNDArray ndArray, long dim, long start, long stop, long step)static PtNDArraysoftmax(PtNDArray ndArray, long dim, ai.djl.ndarray.types.DataType dTpe)static PtNDArraysoftPlus(PtNDArray ndArray)static PtNDArraysoftSign(PtNDArray ndArray)static PtNDArraysort(PtNDArray ndArray, long dim, boolean descending)static ai.djl.ndarray.NDListsplit(PtNDArray ndArray, long[] indices, long axis)static ai.djl.ndarray.NDListsplit(PtNDArray ndArray, long size, long axis)static PtNDArraysqrt(PtNDArray ndArray)static PtNDArraysquare(PtNDArray ndArray)static PtNDArraysqueeze(PtNDArray ndArray)static PtNDArraysqueeze(PtNDArray ndArray, long dim)static PtNDArraystack(PtNDArray[] arrays, int dim)static voidstartProfile(boolean useCuda, boolean recordShape, boolean profileMemory)Calls this method to start profile the area you are interested in.static PtNDArraystft(PtNDArray ndArray, long nFft, long hopLength, PtNDArray window, boolean center, boolean normalize, boolean returnComplex)static voidstopProfile(java.lang.String outputFile)static PtNDArraysub(PtNDArray ndArray1, PtNDArray ndArray2)static voidsubi(PtNDArray ndArray1, PtNDArray ndArray2)static PtNDArraysum(PtNDArray ndArray)static PtNDArraysum(PtNDArray ndArray, long[] dims, boolean keepDim)static PtNDArraytake(PtNDArray ndArray, PtNDArray index, PtNDManager manager)static PtNDArraytan(PtNDArray ndArray)static PtNDArraytanh(PtNDArray ndArray)static PtNDArraytile(PtNDArray ndArray, long[] repeats)static PtNDArrayto(PtNDArray ndArray, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)static PtNDArraytoDense(PtNDArray ndArray)static ai.djl.ndarray.NDListtopK(PtNDArray ndArray, long k, long axis, boolean largest, boolean sorted)static PtNDArraytoSparse(PtNDArray ndArray)static PtNDArraytranspose(PtNDArray ndArray, long dim1, long dim2)static PtNDArraytrunc(PtNDArray ndArray)static PtNDArrayuniform(PtNDManager manager, double low, double high, ai.djl.ndarray.types.Shape size, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)static ai.djl.ndarray.NDListunique(PtNDArray ndArray, java.lang.Integer dim, boolean sorted, boolean returnInverse, boolean returnCounts)static PtNDArrayunsqueeze(PtNDArray ndArray, long dim)static PtNDArraywhere(PtNDArray condition, PtNDArray self, PtNDArray other)static voidwriteModule(PtSymbolBlock block, java.io.OutputStream os, boolean writeSize)static PtNDArrayxlogy(PtNDArray ndArray1, PtNDArray ndArray2)static voidzeroGrad(PtNDArray weight)static PtNDArrayzerosLike(PtNDArray array, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
-
-
Method Detail
-
isGradMode
public static boolean isGradMode()
-
setGradMode
public static void setGradMode(boolean enable)
-
getNumInteropThreads
public static int getNumInteropThreads()
-
getNumThreads
public static int getNumThreads()
-
setNumInteropThreads
public static void setNumInteropThreads(int threads)
-
setNumThreads
public static void setNumThreads(int threads)
-
setBenchmarkCuDNN
public static void setBenchmarkCuDNN(boolean enable)
-
getFeatures
public static java.util.Set<java.lang.String> getFeatures()
-
setSeed
public static void setSeed(long seed)
-
startProfile
public static void startProfile(boolean useCuda, boolean recordShape, boolean profileMemory)Calls this method to start profile the area you are interested in.Example usage
JniUtils.startProfile(false, true, true); Predictor.predict(img); JniUtils.stopProfile(outputFile)- Parameters:
useCuda- Enables timing of CUDA events as well using the cudaEvent API.recordShape- If shapes recording is set, information about input dimensions will be collectedprofileMemory- Whether to report memory usage
-
stopProfile
public static void stopProfile(java.lang.String outputFile)
-
createNdFromByteBuffer
public static PtNDArray createNdFromByteBuffer(PtNDManager manager, java.nio.ByteBuffer data, ai.djl.ndarray.types.Shape shape, ai.djl.ndarray.types.DataType dType, ai.djl.ndarray.types.SparseFormat fmt, ai.djl.Device device)
-
emptyCudaCache
public static void emptyCudaCache()
-
createEmptyNdArray
public static PtNDArray createEmptyNdArray(PtNDManager manager, ai.djl.ndarray.types.Shape shape, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
createZerosNdArray
public static PtNDArray createZerosNdArray(PtNDManager manager, ai.djl.ndarray.types.Shape shape, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
createOnesNdArray
public static PtNDArray createOnesNdArray(PtNDManager manager, ai.djl.ndarray.types.Shape shape, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
full
public static PtNDArray full(PtNDManager manager, ai.djl.ndarray.types.Shape shape, double fillValue, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
zerosLike
public static PtNDArray zerosLike(PtNDArray array, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
onesLike
public static PtNDArray onesLike(PtNDArray array, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
arange
public static PtNDArray arange(PtNDManager manager, float start, float stop, float step, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
linspace
public static PtNDArray linspace(PtNDManager manager, float start, float stop, int step, ai.djl.ndarray.types.DataType dType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
createSparseCoo
public static PtNDArray createSparseCoo(PtNDArray indices, PtNDArray values, ai.djl.ndarray.types.Shape shape)
-
to
public static PtNDArray to(PtNDArray ndArray, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)
-
index
public static PtNDArray index(PtNDArray ndArray, long[] minIndices, long[] maxIndices, long[] stepIndices, PtNDManager manager)
-
indexAdv
public static PtNDArray indexAdv(PtNDArray ndArray, ai.djl.ndarray.index.NDIndex index, PtNDManager manager)
-
indexAdvPut
public static void indexAdvPut(PtNDArray ndArray, ai.djl.ndarray.index.NDIndex index, PtNDArray data)
-
indexSet
public static void indexSet(PtNDArray ndArray, PtNDArray value, long[] minIndices, long[] maxIndices, long[] stepIndices)
-
set
public static void set(PtNDArray self, java.nio.ByteBuffer data)
-
take
public static PtNDArray take(PtNDArray ndArray, PtNDArray index, PtNDManager manager)
-
scatter
public static PtNDArray scatter(PtNDArray ndArray, PtNDArray index, PtNDArray value, int axis)
-
booleanMaskSet
public static void booleanMaskSet(PtNDArray ndArray, PtNDArray value, PtNDArray indicesNd)
-
getItem
public static PtNDArray getItem(PtNDArray ndArray, long[] indices, PtNDManager manager)
-
softmax
public static PtNDArray softmax(PtNDArray ndArray, long dim, ai.djl.ndarray.types.DataType dTpe)
-
logSoftmax
public static PtNDArray logSoftmax(PtNDArray ndArray, long dim, ai.djl.ndarray.types.DataType dTpe)
-
topK
public static ai.djl.ndarray.NDList topK(PtNDArray ndArray, long k, long axis, boolean largest, boolean sorted)
-
signi
public static void signi(PtNDArray ndArray)
-
median
public static ai.djl.ndarray.NDList median(PtNDArray ndArray, long dim, boolean keepDim)
-
cumProd
public static PtNDArray cumProd(PtNDArray ndArray, long dim, ai.djl.ndarray.types.DataType dataType)
-
oneHot
public static PtNDArray oneHot(PtNDArray ndArray, int depth, ai.djl.ndarray.types.DataType dataType)
-
split
public static ai.djl.ndarray.NDList split(PtNDArray ndArray, long size, long axis)
-
split
public static ai.djl.ndarray.NDList split(PtNDArray ndArray, long[] indices, long axis)
-
unique
public static ai.djl.ndarray.NDList unique(PtNDArray ndArray, java.lang.Integer dim, boolean sorted, boolean returnInverse, boolean returnCounts)
-
stft
public static PtNDArray stft(PtNDArray ndArray, long nFft, long hopLength, PtNDArray window, boolean center, boolean normalize, boolean returnComplex)
-
negi
public static void negi(PtNDArray ndArray)
-
randint
public static PtNDArray randint(PtNDManager manager, long low, long high, ai.djl.ndarray.types.Shape size, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)
-
randperm
public static PtNDArray randperm(PtNDManager manager, long n, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)
-
normal
public static PtNDArray normal(PtNDManager manager, double mean, double std, ai.djl.ndarray.types.Shape size, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)
-
uniform
public static PtNDArray uniform(PtNDManager manager, double low, double high, ai.djl.ndarray.types.Shape size, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device)
-
eye
public static PtNDArray eye(PtNDManager manager, int n, int m, ai.djl.ndarray.types.DataType dataType, ai.djl.Device device, ai.djl.ndarray.types.SparseFormat fmt)
-
hannWindow
public static PtNDArray hannWindow(PtNDManager manager, long numPoints, boolean periodic, ai.djl.Device device)
-
interpolate
public static PtNDArray interpolate(PtNDArray ndArray, long[] size, int mode, boolean alignCorners)
-
convolution
public static PtNDArray convolution(PtNDArray ndArray, PtNDArray weight, PtNDArray bias, ai.djl.ndarray.types.Shape stride, ai.djl.ndarray.types.Shape padding, ai.djl.ndarray.types.Shape dilation, int groups)
-
batchNorm
public static PtNDArray batchNorm(PtNDArray ndArray, PtNDArray gamma, PtNDArray beta, PtNDArray runningMean, PtNDArray runningVar, boolean isTraining, double momentum, double eps)
-
layerNorm
public static PtNDArray layerNorm(PtNDArray ndArray, ai.djl.ndarray.types.Shape normalizedShape, PtNDArray gamma, PtNDArray beta, double eps)
-
rnn
public static ai.djl.ndarray.NDList rnn(PtNDArray input, PtNDArray hx, ai.djl.ndarray.NDList params, boolean hasBiases, int numLayers, ai.djl.nn.recurrent.RNN.Activation activation, double dropRate, boolean training, boolean bidirectional, boolean batchFirst)
-
gru
public static ai.djl.ndarray.NDList gru(PtNDArray input, PtNDArray hx, ai.djl.ndarray.NDList params, boolean hasBiases, int numLayers, double dropRate, boolean training, boolean bidirectional, boolean batchFirst)
-
lstm
public static ai.djl.ndarray.NDList lstm(PtNDArray input, ai.djl.ndarray.NDList hx, ai.djl.ndarray.NDList params, boolean hasBiases, int numLayers, double dropRate, boolean training, boolean bidirectional, boolean batchFirst)
-
avgPool
public static PtNDArray avgPool(PtNDArray ndArray, ai.djl.ndarray.types.Shape kernelSize, ai.djl.ndarray.types.Shape stride, ai.djl.ndarray.types.Shape padding, boolean ceilMode, boolean countIncludePad)
-
maxPool
public static PtNDArray maxPool(PtNDArray ndArray, ai.djl.ndarray.types.Shape kernelSize, ai.djl.ndarray.types.Shape stride, ai.djl.ndarray.types.Shape padding, boolean ceilMode)
-
adaptiveMaxPool
public static PtNDArray adaptiveMaxPool(PtNDArray ndArray, ai.djl.ndarray.types.Shape outputSize)
-
adaptiveAvgPool
public static PtNDArray adaptiveAvgPool(PtNDArray ndArray, ai.djl.ndarray.types.Shape outputSize)
-
lpPool
public static PtNDArray lpPool(PtNDArray ndArray, double normType, ai.djl.ndarray.types.Shape kernelSize, ai.djl.ndarray.types.Shape stride, boolean ceilMode)
-
getDataType
public static ai.djl.ndarray.types.DataType getDataType(PtNDArray ndArray)
-
getDevice
public static ai.djl.Device getDevice(PtNDArray ndArray)
-
getSparseFormat
public static ai.djl.ndarray.types.SparseFormat getSparseFormat(PtNDArray ndArray)
-
getShape
public static ai.djl.ndarray.types.Shape getShape(PtNDArray ndArray)
-
getByteBuffer
public static java.nio.ByteBuffer getByteBuffer(PtNDArray ndArray)
-
deleteNDArray
public static void deleteNDArray(long handle)
-
requiresGrad
public static boolean requiresGrad(PtNDArray ndArray)
-
getGradientFunctionNames
public static java.lang.String getGradientFunctionNames(PtNDArray ndArray)
-
attachGradient
public static void attachGradient(PtNDArray ndArray, boolean requiresGrad)
-
backward
public static void backward(PtNDArray ndArray, PtNDArray gradNd, boolean keepGraph, boolean createGraph)
-
deleteModule
public static void deleteModule(long pointer)
-
setGraphExecutorOptimize
public static void setGraphExecutorOptimize(boolean enabled)
-
loadModule
public static PtSymbolBlock loadModule(PtNDManager manager, java.nio.file.Path path, boolean mapLocation, java.lang.String[] extraFileKeys, java.lang.String[] extraFileValues, boolean trainParam)
-
loadModule
public static PtSymbolBlock loadModule(PtNDManager manager, java.io.InputStream is, boolean mapLocation, boolean hasSize) throws java.io.IOException
- Throws:
java.io.IOException
-
loadModuleHandle
public static long loadModuleHandle(java.io.InputStream is, ai.djl.Device device, boolean mapLocation, boolean hasSize) throws java.io.IOException- Throws:
java.io.IOException
-
writeModule
public static void writeModule(PtSymbolBlock block, java.io.OutputStream os, boolean writeSize)
-
moduleGetParams
public static ai.djl.ndarray.NDList moduleGetParams(PtSymbolBlock block, PtNDManager manager)
-
getMethodNames
public static java.lang.String[] getMethodNames(PtSymbolBlock block)
-
enableInferenceMode
public static void enableInferenceMode(PtSymbolBlock block)
-
enableTrainingMode
public static void enableTrainingMode(PtSymbolBlock block)
-
zeroGrad
public static void zeroGrad(PtNDArray weight)
-
adamUpdate
public static void adamUpdate(PtNDArray weight, PtNDArray grad, PtNDArray mean, PtNDArray variance, float lr, float learningRateBiasCorrection, float wd, float rescaleGrad, float clipGrad, float beta1, float beta2, float eps, boolean adamw)
-
sgdUpdate
public static void sgdUpdate(PtNDArray weight, PtNDArray grad, PtNDArray state, float lr, float wd, float rescaleGrad, float clipGrad, float momentum)
-
getLayout
public static int getLayout(PtNDArray array)
-
-