UNPKG

onnxruntime-web

Version:

A Javascript library for running ONNX models on browsers

111 lines (110 loc) 3.83 kB
'use strict'; // automatically generated by the FlatBuffers compiler, do not modify var __createBinding = (this && this.__createBinding) || (Object.create ? function (o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ('get' in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function () { return m[k]; }, }; } Object.defineProperty(o, k2, desc); } : function (o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; }); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? function (o, v) { Object.defineProperty(o, 'default', { enumerable: true, value: v }); } : function (o, v) { o['default'] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== 'default' && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, '__esModule', { value: true }); exports.InferenceSession = void 0; /* eslint-disable @typescript-eslint/no-unused-vars, @typescript-eslint/no-explicit-any, @typescript-eslint/no-non-null-assertion */ const flatbuffers = __importStar(require('flatbuffers')); const kernel_type_str_resolver_js_1 = require('../../onnxruntime/fbs/kernel-type-str-resolver.js'); const model_js_1 = require('../../onnxruntime/fbs/model.js'); class InferenceSession { constructor() { this.bb = null; this.bb_pos = 0; } __init(i, bb) { this.bb_pos = i; this.bb = bb; return this; } static getRootAsInferenceSession(bb, obj) { return (obj || new InferenceSession()).__init(bb.readInt32(bb.position()) + bb.position(), bb); } static getSizePrefixedRootAsInferenceSession(bb, obj) { bb.setPosition(bb.position() + flatbuffers.SIZE_PREFIX_LENGTH); return (obj || new InferenceSession()).__init(bb.readInt32(bb.position()) + bb.position(), bb); } static bufferHasIdentifier(bb) { return bb.__has_identifier('ORTM'); } ortVersion(optionalEncoding) { const offset = this.bb.__offset(this.bb_pos, 4); return offset ? this.bb.__string(this.bb_pos + offset, optionalEncoding) : null; } model(obj) { const offset = this.bb.__offset(this.bb_pos, 6); return offset ? (obj || new model_js_1.Model()).__init(this.bb.__indirect(this.bb_pos + offset), this.bb) : null; } kernelTypeStrResolver(obj) { const offset = this.bb.__offset(this.bb_pos, 10); return offset ? (obj || new kernel_type_str_resolver_js_1.KernelTypeStrResolver()).__init( this.bb.__indirect(this.bb_pos + offset), this.bb, ) : null; } static startInferenceSession(builder) { builder.startObject(4); } static addOrtVersion(builder, ortVersionOffset) { builder.addFieldOffset(0, ortVersionOffset, 0); } static addModel(builder, modelOffset) { builder.addFieldOffset(1, modelOffset, 0); } static addKernelTypeStrResolver(builder, kernelTypeStrResolverOffset) { builder.addFieldOffset(3, kernelTypeStrResolverOffset, 0); } static endInferenceSession(builder) { const offset = builder.endObject(); return offset; } static finishInferenceSessionBuffer(builder, offset) { builder.finish(offset, 'ORTM'); } static finishSizePrefixedInferenceSessionBuffer(builder, offset) { builder.finish(offset, 'ORTM', true); } } exports.InferenceSession = InferenceSession; //# sourceMappingURL=inference-session.js.map