UNPKG

@cogniformai/instructor-stream

Version:

Streaming-first structured data extraction from LLMs with real-time updates

2 lines 3.89 kB
import{b as f,c as R,d as k,e as v,f as T}from"./chunk-ZKNSR3JA.js";import{Effect as S,Stream as _}from"effect";async function*M(a){let e=a.getReader();try{for(;;){let{done:t,value:r}=await e.read();if(t)break;yield r}}finally{e.releaseLock()}}import{Effect as l,Stream as x,Fiber as U}from"effect";import*as E from"effect/Runtime";import*as C from"@effect/ai/LanguageModel";var z=a=>l.gen(function*(){let e=yield*l.runtime(),t=E.runFork(e),r,o="",u,s=!1,g=new TextEncoder;return{stream:new ReadableStream({start(i){u=t(x.runForEach(a,n=>l.sync(()=>{switch(n.type){case"text-delta":{n.delta.length>0&&i.enqueue(g.encode(n.delta));break}case"reasoning-delta":{o+=n.delta;break}case"reasoning-end":{"text"in n&&typeof n.text=="string"&&(o+=n.text);break}case"finish":{r=n.usage;break}case"error":{s||(s=!0,i.error(n.error));break}}})).pipe(l.catchAll(n=>l.sync(()=>{s||(s=!0,i.error(n))})),l.ensuring(l.sync(()=>{s||(s=!0,i.close())}))))},cancel(){u&&t(U.interrupt(u))}}),meta:()=>{let i={};r&&(i.usage={prompt_tokens:r.inputTokens??0,completion_tokens:r.outputTokens??0,total_tokens:r.totalTokens??(r.inputTokens??0)+(r.outputTokens??0)});let n=o.trim();return n.length>0&&(i.thinking=n),i}}}).pipe(l.catchAll(e=>l.fail(new f({message:"Unable to materialize language model stream",cause:e})))),N=a=>x.unwrap(l.gen(function*(){let e=yield*C.LanguageModel,t=yield*y,r=e.streamText({prompt:a.prompt,...a.options??{}}),o=yield*z(r);return t.stream({schema:a.schema,provider:{stream:o.stream,...o.meta?{meta:o.meta}:{}},...a.validationMode!==void 0?{validationMode:a.validationMode}:{}})})),B=(a,e)=>l.flatMap(y,t=>t.stub({schema:a,...e!==void 0?{defaultData:e}:{}}));import*as q from"effect/Schema";var D=a=>{if(!a)return;let e=q.decodeUnknown(a);return async t=>(await S.runPromiseExit(e(t)))._tag==="Success"},G=a=>{if(a)return async e=>(await a.safeParseAsync(e)).success},Z=({schema:a,provider:e,validationMode:t="none"})=>{let r=v(a),o=r.zod;if(!o)throw new R({message:`Schema '${r.name}' requires a Zod schema for streaming stub generation.`});let u=D(r.effect),s=G(o),g=typeof e.meta=="function"?e.meta:()=>e.meta??{},A=[],b=[],i=0,n=!1,h=null,d=!0,F=new T(o,{snapshotMode:"object",typeDefaults:{string:null,number:null,boolean:null},onKeyComplete:({activePath:m,completedPaths:p})=>{A=[...m],b=p.map(c=>Array.isArray(c)?[...c]:[]),p.length>i&&(n=!0,i=p.length)}}).parse({handleUnescapedNewLines:!0}),L=new TransformStream({async transform(m,p){h=m;try{t==="on-complete"&&n?(u?d=await u(m):s?d=await s(m):d=!0,n=!1):t==="none"&&(d=!0);let c=g(),P=e.channelType??c._type,w={...c,_isValid:t==="none"?!0:d,_activePath:A,_completedPaths:b,...P!==void 0?{_type:P}:{}};p.enqueue({data:[m],_meta:w})}catch(c){p.error(new f({message:"Failed to process streaming snapshot",cause:c}))}},async flush(m){if(!h||t!=="final")return;try{u?d=await u(h):s?d=await s(h):d=!0}catch(w){m.error(new k({reason:"Final validation failed",issues:w}));return}let p=g(),c=e.channelType??p._type,P={...p,_isValid:d,_activePath:A,_completedPaths:b,...c!==void 0?{_type:c}:{}};m.enqueue({data:[h],_meta:P})}}),H=e.stream.pipeThrough(F).pipeThrough(L),V=M(H);return _.fromAsyncIterable(V,m=>new f({message:"Streaming pipeline failed",cause:m}))},O=({schema:a,defaultData:e})=>{let t=v(a),r=t.zod;if(!r)throw new R({message:`Schema '${t.name}' requires a Zod schema to derive default streaming stubs.`});let o=e;return new T(r,{defaultData:o,typeDefaults:{string:null,number:null,boolean:null}}).getSchemaStub(r,o)},y=class extends S.Service()("instructor/streaming/SnapshotHydrator",{effect:S.sync(()=>({stream:e=>_.unwrap(S.try({try:()=>Z(e),catch:t=>new f({message:"Unable to construct streaming pipeline",cause:t})})),stub:e=>S.try({try:()=>O(e),catch:t=>new f({message:"Unable to construct schema stub",cause:t})})}))}){},ce=y.Default;export{y as a,ce as b,N as c,B as d}; //# sourceMappingURL=chunk-OHDPDXNB.js.map