@hoff97/tensor-js
Version:
PyTorch like deep learning inferrence library
20 lines (19 loc) • 923 B
TypeScript
import { DTypeGpu, GPUTensorConstructor, GPUTensorI } from '../../../tensor/gpu/interface';
import { GPUMemoryAllocator } from '../../../tensor/gpu/memory';
import { Input } from '../operation';
import { BinaryOperation, BinaryOpInfo, BinaryOpInput } from './binaryOperation';
export interface DivideInfo extends BinaryOpInfo {
alpha?: number;
}
export interface DivideInput extends BinaryOpInput {
alpha: number;
}
export declare class DivideOperation<GPUTensor extends GPUTensorI> extends BinaryOperation<GPUTensor, DivideInfo, DivideInput> {
constructor(tensorConstructor: GPUTensorConstructor<GPUTensor>, dtype: DTypeGpu, allocator?: GPUMemoryAllocator);
getOp(a: string, b: string): string;
calc(input: DivideInput): GPUTensor;
getVariables(): string;
getUniformAttrs(): Input[];
getCompilationInfo(input: DivideInput): DivideInfo;
getInputInfoString(input: DivideInput): string;
}