@hoff97/tensor-js
Version:
PyTorch like deep learning inferrence library
10 lines (9 loc) • 727 B
TypeScript
import { BinaryOperation } from '../../../../ops/gpu/binary/binaryOperation';
import { Dispatcher } from '../../../../ops/gpu/dispatcher';
import { DTypeGpu, GPUTensorConstructor, GPUTensorI } from '../../../../tensor/gpu/interface';
import { GPUMemoryAllocator } from '../../../../tensor/gpu/memory';
export declare class BCEBackOperation<GPUTensor extends GPUTensorI> extends BinaryOperation<GPUTensor> {
constructor(tensorConstructor: GPUTensorConstructor<GPUTensor>, dtype: DTypeGpu, allocator?: GPUMemoryAllocator);
getOp(a: string, b: string): string;
}
export declare const defaultBCEBackD: Dispatcher<GPUTensorI, unknown, unknown, BCEBackOperation<import("../../../../tensor/gpu/tensor").GPUTensor<DTypeGpu>>>;