@hoff97/tensor-js
Version:
PyTorch like deep learning inferrence library
8 lines (7 loc) • 474 B
TypeScript
import { DTypeGpu, GPUTensorConstructor, GPUTensorI } from '../../../tensor/gpu/interface';
import { GPUMemoryAllocator } from '../../../tensor/gpu/memory';
import { BinaryOperation } from './binaryOperation';
export declare class PowerOperation<GPUTensor extends GPUTensorI> extends BinaryOperation<GPUTensor> {
constructor(tensorConstructor: GPUTensorConstructor<GPUTensor>, dtype: DTypeGpu, allocator?: GPUMemoryAllocator);
getOp(a: string, b: string): string;
}