@cogniformai/instructor-stream
Version:
Streaming-first structured data extraction from LLMs with real-time updates
2 lines • 5.29 kB
JavaScript
;Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }var _chunk2RKUKCF5cjs = require('./chunk-2RKUKCF5.cjs');var _effect = require('effect');async function*M(a){let e=a.getReader();try{for(;;){let{done:t,value:r}=await e.read();if(t)break;yield r}}finally{e.releaseLock()}}var _Runtime = require('effect/Runtime'); var E = _interopRequireWildcard(_Runtime);var _LanguageModel = require('@effect/ai/LanguageModel'); var C = _interopRequireWildcard(_LanguageModel);var z=a=>_effect.Effect.gen(function*(){let e=yield*_effect.Effect.runtime(),t=E.runFork(e),r,o="",u,s=!1,g=new TextEncoder;return{stream:new ReadableStream({start(i){u=t(_effect.Stream.runForEach(a,n=>_effect.Effect.sync(()=>{switch(n.type){case"text-delta":{n.delta.length>0&&i.enqueue(g.encode(n.delta));break}case"reasoning-delta":{o+=n.delta;break}case"reasoning-end":{"text"in n&&typeof n.text=="string"&&(o+=n.text);break}case"finish":{r=n.usage;break}case"error":{s||(s=!0,i.error(n.error));break}}})).pipe(_effect.Effect.catchAll(n=>_effect.Effect.sync(()=>{s||(s=!0,i.error(n))})),_effect.Effect.ensuring(_effect.Effect.sync(()=>{s||(s=!0,i.close())}))))},cancel(){u&&t(_effect.Fiber.interrupt(u))}}),meta:()=>{let i={};r&&(i.usage={prompt_tokens:_nullishCoalesce(r.inputTokens, () => (0)),completion_tokens:_nullishCoalesce(r.outputTokens, () => (0)),total_tokens:_nullishCoalesce(r.totalTokens, () => ((_nullishCoalesce(r.inputTokens, () => (0)))+(_nullishCoalesce(r.outputTokens, () => (0)))))});let n=o.trim();return n.length>0&&(i.thinking=n),i}}}).pipe(_effect.Effect.catchAll(e=>_effect.Effect.fail(new (0, _chunk2RKUKCF5cjs.b)({message:"Unable to materialize language model stream",cause:e})))),N= exports.c =a=>_effect.Stream.unwrap(_effect.Effect.gen(function*(){let e=yield*C.LanguageModel,t=yield*y,r=e.streamText({prompt:a.prompt,..._nullishCoalesce(a.options, () => ({}))}),o=yield*z(r);return t.stream({schema:a.schema,provider:{stream:o.stream,...o.meta?{meta:o.meta}:{}},...a.validationMode!==void 0?{validationMode:a.validationMode}:{}})})),B= exports.d =(a,e)=>_effect.Effect.flatMap(y,t=>t.stub({schema:a,...e!==void 0?{defaultData:e}:{}}));var _Schema = require('effect/Schema'); var q = _interopRequireWildcard(_Schema);var D=a=>{if(!a)return;let e=q.decodeUnknown(a);return async t=>(await _effect.Effect.runPromiseExit(e(t)))._tag==="Success"},G=a=>{if(a)return async e=>(await a.safeParseAsync(e)).success},Z=({schema:a,provider:e,validationMode:t="none"})=>{let r=_chunk2RKUKCF5cjs.e.call(void 0, a),o=r.zod;if(!o)throw new (0, _chunk2RKUKCF5cjs.c)({message:`Schema '${r.name}' requires a Zod schema for streaming stub generation.`});let u=D(r.effect),s=G(o),g=typeof e.meta=="function"?e.meta:()=>_nullishCoalesce(e.meta, () => ({})),A=[],b=[],i=0,n=!1,h=null,d=!0,F=new (0, _chunk2RKUKCF5cjs.f)(o,{snapshotMode:"object",typeDefaults:{string:null,number:null,boolean:null},onKeyComplete:({activePath:m,completedPaths:p})=>{A=[...m],b=p.map(c=>Array.isArray(c)?[...c]:[]),p.length>i&&(n=!0,i=p.length)}}).parse({handleUnescapedNewLines:!0}),L=new TransformStream({async transform(m,p){h=m;try{t==="on-complete"&&n?(u?d=await u(m):s?d=await s(m):d=!0,n=!1):t==="none"&&(d=!0);let c=g(),P=_nullishCoalesce(e.channelType, () => (c._type)),w={...c,_isValid:t==="none"?!0:d,_activePath:A,_completedPaths:b,...P!==void 0?{_type:P}:{}};p.enqueue({data:[m],_meta:w})}catch(c){p.error(new (0, _chunk2RKUKCF5cjs.b)({message:"Failed to process streaming snapshot",cause:c}))}},async flush(m){if(!h||t!=="final")return;try{u?d=await u(h):s?d=await s(h):d=!0}catch(w){m.error(new (0, _chunk2RKUKCF5cjs.d)({reason:"Final validation failed",issues:w}));return}let p=g(),c=_nullishCoalesce(e.channelType, () => (p._type)),P={...p,_isValid:d,_activePath:A,_completedPaths:b,...c!==void 0?{_type:c}:{}};m.enqueue({data:[h],_meta:P})}}),H=e.stream.pipeThrough(F).pipeThrough(L),V=M(H);return _effect.Stream.fromAsyncIterable(V,m=>new (0, _chunk2RKUKCF5cjs.b)({message:"Streaming pipeline failed",cause:m}))},O=({schema:a,defaultData:e})=>{let t=_chunk2RKUKCF5cjs.e.call(void 0, a),r=t.zod;if(!r)throw new (0, _chunk2RKUKCF5cjs.c)({message:`Schema '${t.name}' requires a Zod schema to derive default streaming stubs.`});let o=e;return new (0, _chunk2RKUKCF5cjs.f)(r,{defaultData:o,typeDefaults:{string:null,number:null,boolean:null}}).getSchemaStub(r,o)},y= exports.a =class extends _effect.Effect.Service()("instructor/streaming/SnapshotHydrator",{effect:_effect.Effect.sync(()=>({stream:e=>_effect.Stream.unwrap(_effect.Effect.try({try:()=>Z(e),catch:t=>new (0, _chunk2RKUKCF5cjs.b)({message:"Unable to construct streaming pipeline",cause:t})})),stub:e=>_effect.Effect.try({try:()=>O(e),catch:t=>new (0, _chunk2RKUKCF5cjs.b)({message:"Unable to construct schema stub",cause:t})})}))}){},ce= exports.b =y.Default;exports.a = y; exports.b = ce; exports.c = N; exports.d = B;
//# sourceMappingURL=chunk-AL3KQNZS.cjs.map