ai-sdk-token-usage
Version:
A lightweight Typescript library to track and visualize token usage across multiple AI model providers.
1 lines • 26.6 kB
Source Map (JSON)
{"version":3,"sources":["../src/core/format.ts","../src/core/errors.ts","../src/core/hooks/helpers.ts","../src/core/hooks/use-models.ts","../src/core/hooks/use-model-details.ts","../src/core/hooks/use-token-context.ts","../src/core/hooks/use-token-cost.ts"],"names":["useMemo"],"mappings":";;;;AAeO,SAAS,kBAAkB,MAAA,EAAgB;AAChD,EAAA,OAAO,IAAI,IAAA,CAAK,YAAA,CAAa,OAAA,EAAS;AAAA,IACpC,QAAA,EAAU;AAAA,GACX,CAAA,CAAE,MAAA,CAAO,MAAM,CAAA;AAClB;AAkBO,SAAS,WAAA,CAAY,KAAA,EAAe,QAAA,GAAmB,KAAA,EAAO;AACnE,EAAA,OAAO,IAAI,IAAA,CAAK,YAAA,CAAa,OAAA,EAAS;AAAA,IACpC,KAAA,EAAO,UAAA;AAAA,IACP;AAAA,GACD,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA;AACjB;;;ACxCO,IAAM,SAAA,GAAN,cAAwB,KAAA,CAAM;AAAA,EACnC,MAAA;AAAA,EACA,IAAA;AAAA,EAEA,WAAA,CAAY,MAAA,EAAgB,IAAA,EAAe,OAAA,EAAkB;AAC3D,IAAA,KAAA,CAAM,WAAW,mBAAmB,CAAA;AACpC,IAAA,IAAA,CAAK,IAAA,GAAO,WAAA;AACZ,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AAAA,EAEA,MAAA,GAA0B;AACxB,IAAA,OAAO;AAAA,MACL,MAAM,IAAA,CAAK,IAAA;AAAA,MACX,SAAS,IAAA,CAAK,OAAA;AAAA,MACd,QAAQ,IAAA,CAAK,MAAA;AAAA,MACb,MAAM,IAAA,CAAK;AAAA,KACb;AAAA,EACF;AACF,CAAA;AAEO,IAAM,UAAA,GAAN,cAAyB,SAAA,CAAU;AAAA,EACxC,WAAA,CAAY,MAAA,EAAgB,IAAA,EAAe,OAAA,EAAkB;AAC3D,IAAA,KAAA;AAAA,MACE,MAAA;AAAA,MACA,IAAA;AAAA,MACA,OAAA,IAAW;AAAA,KACb;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,YAAA;AAAA,EACd;AACF,CAAA;AAEO,IAAM,kBAAA,GAAN,cAAiC,SAAA,CAAU;AAAA,EAChD,YAAY,IAAA,EAAe;AACzB,IAAA,KAAA;AAAA,MACE,GAAA;AAAA,MACA,IAAA;AAAA,MACA;AAAA,KACF;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,oBAAA;AAAA,EACd;AACF,CAAA;AAEO,IAAM,oBAAA,GAAN,cAAmC,SAAA,CAAU;AAAA,EAClD,YAAY,IAAA,EAAe;AACzB,IAAA,KAAA;AAAA,MACE,GAAA;AAAA,MACA,IAAA;AAAA,MACA;AAAA,KACF;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,sBAAA;AAAA,EACd;AACF,CAAA;AAEO,IAAM,oBAAA,GAAN,cAAmC,SAAA,CAAU;AAAA,EAClD,YAAY,IAAA,EAAe;AACzB,IAAA,KAAA;AAAA,MACE,GAAA;AAAA,MACA,IAAA;AAAA,MACA;AAAA,KACF;AACA,IAAA,IAAA,CAAK,IAAA,GAAO,sBAAA;AAAA,EACd;AACF,CAAA;AAEO,IAAM,YAAA,GAAN,cAA2B,SAAA,CAAU;AAAA,EAC1C,WAAA,GAAc;AACZ,IAAA,KAAA,CAAM,GAAA,EAAK,QAAW,0BAA0B,CAAA;AAChD,IAAA,IAAA,CAAK,IAAA,GAAO,cAAA;AAAA,EACd;AACF,CAAA;;;ACnEO,SAAS,kBAAkB,GAAA,EAAc;AAC9C,EAAA,OAAO,GAAA,YAAe,YAAY,GAAA,CAAI,MAAA,KAAW,IAAI,YAAA,GAAe,MAAA,EAAO;AAC7E;AAEO,SAAS,YAAY,KAAA,EAAwB;AAClD,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,MAAA;AAAA,IACN,SAAA,EAAW,KAAA;AAAA,IACX;AAAA,GACF;AACF;AAEO,SAAS,aAAA,GAAgB;AAC9B,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,MAAA;AAAA,IACN,SAAA,EAAW,IAAA;AAAA,IACX,KAAA,EAAO;AAAA,GACT;AACF;AAEO,SAAS,cAAiB,IAAA,EAAS;AACxC,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA,SAAA,EAAW,KAAA;AAAA,IACX,KAAA,EAAO;AAAA,GACT;AACF;AAEA,IAAM,cAAA,GAAwC,EAAE,gBAAA,EAAkB,KAAA,EAAM;AACxE,IAAM,eAAA,GAAyD;AAAA,EAC7D,MAAA,EAAQ,EAAE,gBAAA,EAAkB,IAAA,EAAK;AAAA,EACjC,MAAA,EAAQ,EAAE,gBAAA,EAAkB,KAAA,EAAM;AAAA,EAClC,SAAA,EAAW,EAAE,gBAAA,EAAkB,KAAA;AACjC,CAAA;AAEA,SAAS,UAAU,UAAA,EAAoB;AACrC,EAAA,OAAO,eAAA,CAAgB,UAAU,CAAA,IAAK,cAAA;AACxC;AAEO,SAAS,mBAAA,CAAoB,OAAA,EAAoB,mBAAA,GAA+B,KAAA,EAA6B;AAClH,EAAA,MAAM,EAAE,UAAA,EAAY,aAAA,EAAc,GAAI,OAAA,CAAQ,QAAA;AAC9C,EAAA,MAAM,EAAE,UAAA,EAAW,GAAI,kBAAA,CAAmB,aAAa,CAAA;AAEvD,EAAA,MAAM,MAAA,GAAS,UAAU,UAAU,CAAA;AAEnC,EAAA,MAAM,KAAA,GAAQ,WAAW,WAAA,IAAe,CAAA;AACxC,EAAA,MAAM,WAAA,GAAc,WAAW,iBAAA,IAAqB,CAAA;AACpD,EAAA,MAAM,MAAA,GAAS,WAAW,YAAA,IAAgB,CAAA;AAC1C,EAAA,MAAM,SAAA,GAAY,WAAW,eAAA,IAAmB,CAAA;AAEhD,EAAA,MAAM,aAAA,GAAgB,QAAQ,KAAA,CAAM,IAAA,CAAK,CAAC,CAAA,KAAM,CAAA,CAAE,SAAS,WAAW,CAAA;AACtE,EAAA,MAAM,qBAAA,GAAwB,aAAA,IAAiB,aAAA,CAAc,IAAA,CAAK,MAAK,KAAM,EAAA;AAG7E,EAAA,MAAM,sBAAsB,qBAAA,IAAyB,mBAAA;AAErD,EAAA,OAAO;AAAA,IACL,KAAA;AAAA,IACA,MAAA,EAAQ,OAAO,gBAAA,GAAmB,IAAA,CAAK,IAAI,CAAA,EAAG,MAAA,GAAS,SAAS,CAAA,GAAI,MAAA;AAAA,IACpE,SAAA,EAAW,sBAAsB,CAAA,GAAI,SAAA;AAAA,IACrC;AAAA,GACF;AACF;AAEO,SAAS,mBAAmB,IAAA,EAAuD;AACxF,EAAA,MAAM,CAAC,aAAa,EAAA,EAAI,OAAA,GAAU,EAAE,CAAA,GAAI,IAAA,CAAK,KAAA,CAAM,GAAA,EAAK,CAAC,CAAA;AACzD,EAAA,OAAO,EAAE,YAAY,OAAA,EAAQ;AAC/B;AAEO,SAAS,6BAA6B,OAAA,EAAyC;AACpF,EAAA,IAAI,CAAC,SAAS,OAAO,KAAA;AAErB,EAAA,MAAM,OAAO,OAAA,CAAQ,QAAA;AAGrB,EAAA,IAAI,IAAA,IAAQ,QAAQ,OAAO,IAAA,KAAS,YAAY,KAAA,CAAM,OAAA,CAAQ,IAAI,CAAA,EAAG,OAAO,IAAA;AAE5E,EAAA,MAAM,CAAA,GAAI,IAAA;AAGV,EAAA,MAAM,gBAAA,GAAmB,OAAO,CAAA,CAAE,aAAA,KAAkB,QAAA;AACpD,EAAA,MAAM,aAAA,GAAgB,OAAO,CAAA,CAAE,UAAA,KAAe,QAAA,IAAY,CAAA,CAAE,UAAA,KAAe,IAAA,IAAQ,CAAC,KAAA,CAAM,OAAA,CAAQ,CAAA,CAAE,UAAU,CAAA;AAE9G,EAAA,OAAO,EAAE,gBAAA,IAAoB,aAAA,CAAA;AAC/B;ACnFA,eAAe,YAAe,GAAA,EAAa;AACzC,EAAA,MAAM,GAAA,GAAM,MAAM,KAAA,CAAM,GAAG,CAAA;AAE3B,EAAA,IAAI,CAAC,IAAI,EAAA,EAAI;AACX,IAAA,MAAM,IAAI,UAAA,CAAW,GAAA,CAAI,QAAQ,MAAM,GAAA,CAAI,MAAM,CAAA;AAAA,EACnD;AAEA,EAAA,OAAO,IAAI,IAAA,EAAK;AAClB;AAEO,SAAS,SAAA,GAAY;AAC1B,EAAA,MAAM,EAAE,IAAA,EAAM,SAAA,EAAW,OAAM,GAAI,MAAA,CAA6C,iBAAiB,WAAW,CAAA;AAE5G,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA,SAAA;AAAA,IACA;AAAA,GACF;AACF;;;ACGO,SAAS,eAAA,CAAgB,EAAE,aAAA,EAAc,EAAoD;AAClG,EAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAQ,SAAA,EAAW,KAAA,KAAU,SAAA,EAAU;AAErD,EAAA,IAAI,SAAA,SAAkB,aAAA,EAAc;AACpC,EAAA,IAAI,KAAA,EAAO,OAAO,WAAA,CAAY,KAAA,CAAM,QAAQ,CAAA;AAE5C,EAAA,MAAM,EAAE,UAAA,EAAY,OAAA,EAAQ,GAAI,mBAAmB,aAAa,CAAA;AAEhE,EAAA,MAAM,KAAA,GAAQ,MAAA,GAAS,UAAU,CAAA,EAAG,OAAO,OAAO,CAAA;AAClD,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,OAAO,WAAA,CAAY,IAAI,kBAAA,CAAmB,EAAE,YAAY,OAAA,EAAS,CAAA,CAAE,MAAA,EAAQ,CAAA;AAAA,EAC7E;AACA,EAAA,IAAI,CAAC,MAAM,IAAA,EAAM;AACf,IAAA,OAAO,YAAY,IAAI,oBAAA,CAAqB,EAAE,UAAA,EAAY,OAAA,EAAS,CAAC,CAAA;AAAA,EACtE;AAEA,EAAA,MAAM,OAAO,KAAA,CAAM,IAAA;AACnB,EAAA,MAAM,QAAQ,KAAA,CAAM,KAAA;AAEpB,EAAA,MAAM,YAAA,GAA6B;AAAA,IACjC,aAAA;AAAA,IACA,OAAA,EAAS;AAAA,MACP,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,QAAQ,IAAA,CAAK,MAAA;AAAA,MACb,SAAA,EAAW,IAAA,CAAK,SAAA,IAAa,IAAA,CAAK,MAAA;AAAA,MAClC,WAAA,EAAa,IAAA,CAAK,UAAA,IAAc,IAAA,CAAK;AAAA,KACvC;AAAA,IACA;AAAA,GACF;AAEA,EAAA,OAAO,cAA4B,YAAY,CAAA;AACjD;ACzCA,SAAS,QAAA,CAAY,KAAmB,IAAA,EAAwC;AAC9E,EAAA,KAAA,IAAS,IAAI,GAAA,CAAI,MAAA,GAAS,CAAA,EAAG,CAAA,IAAK,GAAG,CAAA,EAAA,EAAK;AACxC,IAAA,MAAM,CAAA,GAAI,IAAI,CAAC,CAAA;AACf,IAAA,IAAI,MAAM,MAAA,EAAW;AACrB,IAAA,IAAI,IAAA,CAAK,CAAC,CAAA,EAAG,OAAO,CAAA;AAAA,EACtB;AACA,EAAA,OAAO,MAAA;AACT;AAEA,SAAS,cAAA,CAAe,SAAgC,KAAA,EAAuB;AAC7E,EAAA,IAAI,OAAA,IAAW,4BAAA,CAA6B,OAAO,CAAA,EAAG;AACpD,IAAA,MAAM,IAAI,oBAAA,CAAqB,EAAE,SAAS,QAAA,EAAU,OAAA,CAAQ,UAAU,CAAA;AAAA,EACxE;AAEA,EAAA,MAAM,SAAA,GAAY,OAAA,GAAU,mBAAA,CAAoB,OAAA,EAAS,IAAI,CAAA,GAAI,EAAE,KAAA,EAAO,CAAA,EAAG,MAAA,EAAQ,CAAA,EAAG,SAAA,EAAW,CAAA,EAAG,aAAa,CAAA,EAAE;AAErH,EAAA,MAAM,IAAA,GAAO,MAAA,CAAO,MAAA,CAAO,SAAS,CAAA,CAAE,MAAA,CAAO,CAAC,GAAA,EAAK,CAAA,KAAM,GAAA,GAAM,CAAA,EAAG,CAAC,CAAA;AACnE,EAAA,MAAM,KAAA,GAAQ,MAAM,KAAA,CAAM,OAAA;AAC1B,EAAA,MAAM,SAAA,GAAY,IAAA,CAAK,GAAA,CAAI,CAAA,EAAG,QAAQ,IAAI,CAAA;AAC1C,EAAA,MAAM,YAAA,GAAe,KAAA,GAAQ,CAAA,GAAI,IAAA,GAAO,KAAA,GAAQ,CAAA;AAChD,EAAA,MAAM,iBAAiB,YAAA,GAAe,GAAA;AACtC,EAAA,MAAM,aAAa,IAAA,GAAO,KAAA;AAE1B,EAAA,OAAO;AAAA,IACL,SAAA;AAAA,IACA,IAAA;AAAA,IACA,KAAA;AAAA,IACA,SAAA;AAAA,IACA,YAAA;AAAA,IACA,cAAA;AAAA,IACA;AAAA,GACF;AACF;AAqBO,SAAS,eAAA,CAAgB;AAAA,EAC9B,QAAA;AAAA,EACA;AACF,CAAA,EAGoB;AAClB,EAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAQ,SAAA,EAAW,KAAA,KAAU,SAAA,EAAU;AAErD,EAAA,MAAM,0BAAA,GAA6B,OAAA;AAAA,IACjC,MAAM,QAAA,CAAS,QAAA,EAAU,CAAC,CAAA,KAAM,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,QAAA,KAAa,MAAS,CAAA;AAAA,IAClF,CAAC,QAAQ;AAAA,GACX;AAEA,EAAA,IAAI,SAAA,SAAkB,aAAA,EAAc;AACpC,EAAA,IAAI,KAAA,EAAO,OAAO,WAAA,CAAY,KAAA,CAAM,QAAQ,CAAA;AAE5C,EAAA,MAAM,EAAE,UAAA,EAAY,OAAA,EAAQ,GAAI,mBAAmB,aAAa,CAAA;AAEhE,EAAA,MAAM,KAAA,GAAQ,MAAA,GAAS,UAAU,CAAA,EAAG,OAAO,OAAO,CAAA;AAClD,EAAA,IAAI,CAAC,KAAA,EAAO;AACV,IAAA,OAAO,WAAA,CAAY,IAAI,kBAAA,CAAmB,EAAE,SAAS,UAAA,EAAY,CAAA,CAAE,MAAA,EAAQ,CAAA;AAAA,EAC7E;AAEA,EAAA,IAAI;AACF,IAAA,OAAO,aAAA,CAAuB,cAAA,CAAe,0BAAA,EAA4B,KAAK,CAAC,CAAA;AAAA,EACjF,SAAS,GAAA,EAAK;AACZ,IAAA,OAAO,WAAA,CAAY,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAAA,EAC3C;AACF;ACjFA,SAAS,WAAA,CAAY,UAAgC,YAAA,EAAmC;AACtF,EAAA,MAAM,SAAA,GAA2B;AAAA,IAC/B,KAAA,EAAO,EAAE,MAAA,EAAQ,CAAA,EAAG,MAAM,CAAA,EAAE;AAAA,IAC5B,MAAA,EAAQ,EAAE,MAAA,EAAQ,CAAA,EAAG,MAAM,CAAA,EAAE;AAAA,IAC7B,SAAA,EAAW,EAAE,MAAA,EAAQ,CAAA,EAAG,MAAM,CAAA,EAAE;AAAA,IAChC,WAAA,EAAa,EAAE,MAAA,EAAQ,CAAA,EAAG,MAAM,CAAA;AAAE,GACpC;AAEA,EAAA,QAAA,CAAS,OAAA,CAAQ,CAAC,CAAA,KAAM;AACtB,IAAA,IAAI,4BAAA,CAA6B,CAAC,CAAA,EAAG;AACnC,MAAA,MAAM,IAAI,qBAAqB,EAAE,OAAA,EAAS,GAAG,QAAA,EAAU,CAAA,CAAE,UAAU,CAAA;AAAA,IACrE;AAEA,IAAA,MAAM,EAAE,aAAA,EAAc,GAAI,CAAA,CAAE,QAAA;AAC5B,IAAA,MAAM,EAAE,UAAA,EAAY,OAAA,EAAQ,GAAI,mBAAmB,aAAa,CAAA;AAEhE,IAAA,MAAM,KAAA,GAAQ,YAAA,CAAa,EAAE,UAAA,EAAY,SAAS,CAAA;AAElD,IAAA,IAAI,CAAC,KAAA,EAAO;AACV,MAAA,MAAM,IAAI,kBAAA,CAAmB,EAAE,UAAA,EAAY,SAAS,CAAA;AAAA,IACtD;AACA,IAAA,IAAI,CAAC,MAAM,IAAA,EAAM;AACf,MAAA,MAAM,IAAI,oBAAA,CAAqB,EAAE,UAAA,EAAY,SAAS,CAAA;AAAA,IACxD;AAEA,IAAA,MAAM,MAAA,GAAS,oBAAoB,CAAC,CAAA;AACpC,IAAA,MAAM,OAAO,KAAA,CAAM,IAAA;AAEnB,IAAA,SAAA,CAAU,KAAA,CAAM,UAAU,MAAA,CAAO,KAAA;AACjC,IAAA,SAAA,CAAU,KAAA,CAAM,IAAA,IAAS,MAAA,CAAO,KAAA,GAAQ,MAAa,IAAA,CAAK,KAAA;AAE1D,IAAA,SAAA,CAAU,MAAA,CAAO,UAAU,MAAA,CAAO,MAAA;AAClC,IAAA,SAAA,CAAU,MAAA,CAAO,IAAA,IAAS,MAAA,CAAO,MAAA,GAAS,MAAa,IAAA,CAAK,MAAA;AAE5D,IAAA,SAAA,CAAU,SAAA,CAAU,UAAU,MAAA,CAAO,SAAA;AACrC,IAAA,SAAA,CAAU,UAAU,IAAA,IAAS,MAAA,CAAO,YAAY,GAAA,IAAc,IAAA,CAAK,aAAa,IAAA,CAAK,MAAA,CAAA;AAErF,IAAA,SAAA,CAAU,WAAA,CAAY,UAAU,MAAA,CAAO,WAAA;AACvC,IAAA,SAAA,CAAU,YAAY,IAAA,IAAS,MAAA,CAAO,cAAc,GAAA,IAAc,IAAA,CAAK,cAAc,IAAA,CAAK,KAAA,CAAA;AAAA,EAC5F,CAAC,CAAA;AAED,EAAA,MAAM,KAAA,GAAQ,MAAA,CAAO,MAAA,CAAO,SAAS,CAAA,CAAE,MAAA,CAAO,CAAC,GAAA,EAAK,CAAA,KAAM,GAAA,GAAM,CAAA,CAAE,IAAA,EAAM,CAAC,CAAA;AAEzE,EAAA,OAAO,EAAE,SAAA,EAAW,KAAA,EAAO,QAAA,EAAU,KAAA,EAAM;AAC7C;AAyBO,SAAS,aAAa,MAAA,EAAmF;AAC9G,EAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAQ,SAAA,EAAW,KAAA,KAAU,SAAA,EAAU;AAErD,EAAA,MAAM,WAAiC,UAAA,IAAc,MAAA,GAAS,OAAO,QAAA,GAAW,CAAC,OAAO,OAAO,CAAA;AAC/F,EAAA,MAAM,iBAAA,GAAoBA,OAAAA;AAAA,IACxB,MAAM,QAAA,CAAS,MAAA,CAAO,CAAC,CAAA,KAAM,EAAE,IAAA,KAAS,WAAA,IAAe,CAAA,CAAE,QAAA,KAAa,MAAS,CAAA;AAAA,IAC/E,CAAC,QAAQ;AAAA,GACX;AAEA,EAAA,IAAI,SAAA,SAAkB,aAAA,EAAc;AACpC,EAAA,IAAI,KAAA,EAAO,OAAO,WAAA,CAAY,KAAA,CAAM,QAAQ,CAAA;AAE5C,EAAA,MAAM,YAAA,GAA8B,CAAC,EAAE,UAAA,EAAY,OAAA,OAAc,MAAA,GAAS,UAAU,CAAA,EAAG,MAAA,GAAS,OAAO,CAAA;AAEvG,EAAA,IAAI;AACF,IAAA,OAAO,aAAA,CAAoB,WAAA,CAAY,iBAAA,EAAmB,YAAY,CAAC,CAAA;AAAA,EACzE,SAAS,GAAA,EAAK;AACZ,IAAA,OAAO,WAAA,CAAY,iBAAA,CAAkB,GAAG,CAAC,CAAA;AAAA,EAC3C;AACF","file":"index.mjs","sourcesContent":["/**\n * Formats a numeric token count into a compact, human-readable string.\n *\n * Uses the built-in `Intl.NumberFormat` API with `\"compact\"` notation\n * (e.g., `1K`, `2.5M`) and a U.S. English locale.\n *\n * @example\n * ```ts\n * formatTokenAmount(15230) // \"15.2K\"\n * formatTokenAmount(1000000) // \"1M\"\n * ```\n *\n * @param tokens - The number of tokens to format.\n * @returns A compact, localized string representation of the token amount.\n */\nexport function formatTokenAmount(tokens: number) {\n return new Intl.NumberFormat(\"en-US\", {\n notation: \"compact\",\n }).format(tokens)\n}\n\n/**\n * Formats a numeric value as a localized currency string.\n *\n * Uses the built-in `Intl.NumberFormat` API to format the given price\n * in the specified currency. Defaults to USD if no currency code is provided.\n *\n * @example\n * ```ts\n * formatPrice(0.032) // \"$0.03\"\n * formatPrice(1.2, \"EUR\") // \"€1.20\"\n * ```\n *\n * @param price - The numeric value to format as a currency.\n * @param currency - The ISO 4217 currency code (defaults to `\"USD\"`).\n * @returns A localized currency string representing the formatted price.\n */\nexport function formatPrice(price: number, currency: string = \"USD\") {\n return new Intl.NumberFormat(\"en-US\", {\n style: \"currency\",\n currency,\n }).format(price)\n}\n","import type { TokenUsageError } from \"./types\"\n\nexport class BaseError extends Error {\n status: number\n info: unknown\n\n constructor(status: number, info: unknown, message?: string) {\n super(message ?? \"An error occurred\")\n this.name = \"BaseError\"\n this.status = status\n this.info = info\n }\n\n toJSON(): TokenUsageError {\n return {\n name: this.name,\n message: this.message,\n status: this.status,\n info: this.info,\n }\n }\n}\n\nexport class FetchError extends BaseError {\n constructor(status: number, info: unknown, message?: string) {\n super(\n status,\n info,\n message ?? \"Network request failed or returned an unexpected status. Check `status` and `info` for details.\",\n )\n this.name = \"FetchError\"\n }\n}\n\nexport class ModelNotFoundError extends BaseError {\n constructor(info: unknown) {\n super(\n 404,\n info,\n \"Model not found in catalog. Verify the model ID/provider, or inspect the catalog at https://models.dev.\",\n )\n this.name = \"ModelNotFoundError\"\n }\n}\n\nexport class MissingMetadataError extends BaseError {\n constructor(info: unknown) {\n super(\n 422,\n info,\n \"Message metadata is missing or invalid. Expected { totalUsage: LanguageModelUsage, canonicalSlug: string }. Extra fields are allowed.\",\n )\n this.name = \"MissingMetadataError\"\n }\n}\n\nexport class CostComputationError extends BaseError {\n constructor(info: unknown) {\n super(\n 404,\n info,\n \"Cost computation failed: pricing is missing for one or more models. Visit https://models.dev to see the catalog\",\n )\n this.name = \"CostComputationError\"\n }\n}\n\nexport class UnknownError extends BaseError {\n constructor() {\n super(500, undefined, \"An unknown error occured\")\n this.name = \"UnknownError\"\n }\n}\n","import type { UIMessage } from \"ai\"\nimport type { TokenUsageMetadata } from \"../../shared-types\"\nimport { BaseError, UnknownError } from \"../errors\"\nimport type { NormalizedTokenUsage, TokenAccountingPolicy, TokenUsageError } from \"../types\"\n\nexport function toTokenUsageError(err: unknown) {\n return err instanceof BaseError ? err.toJSON() : new UnknownError().toJSON()\n}\n\nexport function resultError(error: TokenUsageError) {\n return {\n data: undefined,\n isLoading: false,\n error,\n }\n}\n\nexport function resultLoading() {\n return {\n data: undefined,\n isLoading: true,\n error: null,\n }\n}\n\nexport function resultSuccess<T>(data: T) {\n return {\n data,\n isLoading: false,\n error: null,\n }\n}\n\nconst DEFAULT_POLICY: TokenAccountingPolicy = { reasoningBakedIn: false }\nconst PROVIDER_POLICY: Record<string, TokenAccountingPolicy> = {\n openai: { reasoningBakedIn: true },\n google: { reasoningBakedIn: false },\n anthropic: { reasoningBakedIn: false },\n}\n\nfunction getPolicy(providerId: string) {\n return PROVIDER_POLICY[providerId] ?? DEFAULT_POLICY\n}\n\nexport function normalizeTokenUsage(message: UIMessage, stripEmptyReasoning: boolean = false): NormalizedTokenUsage {\n const { totalUsage, canonicalSlug } = message.metadata as TokenUsageMetadata\n const { providerId } = parseCanonicalSlug(canonicalSlug)\n\n const policy = getPolicy(providerId)\n\n const input = totalUsage.inputTokens ?? 0\n const cachedInput = totalUsage.cachedInputTokens ?? 0\n const output = totalUsage.outputTokens ?? 0\n const reasoning = totalUsage.reasoningTokens ?? 0\n\n const reasoningPart = message.parts.find((p) => p.type === \"reasoning\")\n const hasEmptyReasoningPart = reasoningPart && reasoningPart.text.trim() === \"\"\n\n // When computing the context window, strip reasoning tokens if the reasoning part is empty\n const shouldZeroReasoning = hasEmptyReasoningPart && stripEmptyReasoning\n\n return {\n input,\n output: policy.reasoningBakedIn ? Math.max(0, output - reasoning) : output,\n reasoning: shouldZeroReasoning ? 0 : reasoning,\n cachedInput,\n }\n}\n\nexport function parseCanonicalSlug(slug: string): { providerId: string; modelId: string } {\n const [providerId = \"\", modelId = \"\"] = slug.split(\"/\", 2)\n return { providerId, modelId }\n}\n\nexport function hasInvalidTokenUsageMetadata(message: UIMessage | undefined): boolean {\n if (!message) return false\n\n const meta = message.metadata\n\n // Must exist and be a non-array object\n if (meta == null || typeof meta !== \"object\" || Array.isArray(meta)) return true\n\n const m = meta as Record<string, unknown>\n\n // Required fields\n const hasCanonicalSlug = typeof m.canonicalSlug === \"string\"\n const hasTotalUsage = typeof m.totalUsage === \"object\" && m.totalUsage !== null && !Array.isArray(m.totalUsage)\n\n return !(hasCanonicalSlug && hasTotalUsage)\n}\n","\"use client\"\n\nimport useSWR from \"swr\"\nimport { FetchError } from \"../errors\"\nimport type { Provider } from \"../types\"\n\nasync function fetchModels<T>(url: string) {\n const res = await fetch(url)\n\n if (!res.ok) {\n throw new FetchError(res.status, await res.json())\n }\n\n return res.json() as Promise<T>\n}\n\nexport function useModels() {\n const { data, isLoading, error } = useSWR<Record<string, Provider>, FetchError>(\"/__models.dev\", fetchModels)\n\n return {\n data,\n isLoading,\n error,\n }\n}\n","\"use client\"\n\nimport { CostComputationError, ModelNotFoundError } from \"../errors\"\nimport type { ModelDetails, Result } from \"../types\"\nimport { parseCanonicalSlug, resultError, resultLoading, resultSuccess } from \"./helpers\"\nimport { useModels } from \"./use-models\"\n\n/**\n * React hook that retrieves **model details**, such as pricing and token limits.\n *\n * The hook derives model information directly from the model registry based on\n * the provided {@link canonicalSlug}. It returns structured data including\n * per-token pricing (input, output, reasoning, cached input) and context/output\n * token limits.\n *\n * Internally, the hook leverages SWR and follows the SWR-style return pattern with\n * `data`, `isLoading`, and `error` states for consistent asynchronous handling.\n *\n * @param params - The parameters for the hook.\n * @param params.canonicalSlug - The canonical model identifier, composed of provider and model ID\n * (e.g. `\"openai/gpt-5\"`).\n *\n * @returns A {@link Result} object containing:\n * - `data`: The resolved {@link ModelDetails}, including pricing and limits.\n * - `isLoading`: Whether model data is still being loaded.\n * - `error`: A {@link TokenUsageError} if an error occurred.\n */\nexport function useModelDetails({ canonicalSlug }: { canonicalSlug: string }): Result<ModelDetails> {\n const { data: models, isLoading, error } = useModels()\n\n if (isLoading) return resultLoading()\n if (error) return resultError(error.toJSON())\n\n const { providerId, modelId } = parseCanonicalSlug(canonicalSlug)\n\n const model = models?.[providerId]?.models[modelId]\n if (!model) {\n return resultError(new ModelNotFoundError({ providerId, modelId }).toJSON())\n }\n if (!model.cost) {\n return resultError(new CostComputationError({ providerId, modelId }))\n }\n\n const cost = model.cost\n const limit = model.limit\n\n const modelDetails: ModelDetails = {\n canonicalSlug,\n pricing: {\n input: cost.input,\n output: cost.output,\n reasoning: cost.reasoning ?? cost.output,\n cachedInput: cost.cache_read ?? cost.input,\n },\n limit,\n }\n\n return resultSuccess<ModelDetails>(modelDetails)\n}\n","\"use client\"\n\nimport type { UIMessage } from \"ai\"\nimport { useMemo } from \"react\"\nimport { MissingMetadataError, ModelNotFoundError } from \"../errors\"\nimport type { Context, Model, Result } from \"../types\"\nimport {\n hasInvalidTokenUsageMetadata,\n normalizeTokenUsage,\n parseCanonicalSlug,\n resultError,\n resultLoading,\n resultSuccess,\n toTokenUsageError,\n} from \"./helpers\"\nimport { useModels } from \"./use-models\"\n\nfunction findLast<T>(arr: readonly T[], pred: (x: T) => boolean): T | undefined {\n for (let i = arr.length - 1; i >= 0; i--) {\n const v = arr[i]\n if (v === undefined) continue\n if (pred(v)) return v\n }\n return undefined\n}\n\nfunction computeContext(message: UIMessage | undefined, model: Model): Context {\n if (message && hasInvalidTokenUsageMetadata(message)) {\n throw new MissingMetadataError({ message, metadata: message.metadata })\n }\n\n const breakdown = message ? normalizeTokenUsage(message, true) : { input: 0, output: 0, reasoning: 0, cachedInput: 0 }\n\n const used = Object.values(breakdown).reduce((sum, v) => sum + v, 0)\n const limit = model.limit.context\n const remaining = Math.max(0, limit - used)\n const fractionUsed = limit > 0 ? used / limit : 0\n const percentageUsed = fractionUsed * 100\n const isExceeded = used > limit\n\n return {\n breakdown,\n used,\n limit,\n remaining,\n fractionUsed,\n percentageUsed,\n isExceeded,\n }\n}\n\n/**\n * React hook that provides insight into how much of the model’s context window is remaining.\n *\n * The hook derives usage information directly from the message metadata and the model’s defined\n * context window. It returns pre-computed values such as used and remaining tokens, percentage used,\n * and whether the context limit has been exceeded.\n *\n * Internally, the hook leverages SWR and follows the SWR-style return pattern with\n * `data`, `isLoading`, and `error` states for consistent asynchronous handling.\n *\n * @param params - The parameters for the hook.\n * @param params.messages - The messages in the chat, typically returned from `useChat`.\n * @param params.canonicalSlug - The canonical model identifier, composed of provider and model ID (e.g. `\"openai/gpt-5\"`).\n *\n * @returns A {@link Result} object containing:\n * - `data`: The computed {@link Context} with usage metrics.\n * - `isLoading`: Whether model data is still being loaded.\n * - `error`: A {@link TokenUsageError} if an error occurred.\n */\nexport function useTokenContext({\n messages,\n canonicalSlug,\n}: {\n messages: readonly UIMessage[]\n canonicalSlug: string\n}): Result<Context> {\n const { data: models, isLoading, error } = useModels()\n\n const mostRecentAssistantMessage = useMemo(\n () => findLast(messages, (m) => m.role === \"assistant\" && m.metadata !== undefined),\n [messages],\n )\n\n if (isLoading) return resultLoading()\n if (error) return resultError(error.toJSON())\n\n const { providerId, modelId } = parseCanonicalSlug(canonicalSlug)\n\n const model = models?.[providerId]?.models[modelId]\n if (!model) {\n return resultError(new ModelNotFoundError({ modelId, providerId }).toJSON())\n }\n\n try {\n return resultSuccess<Context>(computeContext(mostRecentAssistantMessage, model))\n } catch (err) {\n return resultError(toTokenUsageError(err))\n }\n}\n","\"use client\"\n\nimport type { UIMessage } from \"ai\"\nimport { useMemo } from \"react\"\nimport type { TokenUsageMetadata } from \"../../shared-types\"\nimport { CostComputationError, MissingMetadataError, ModelNotFoundError } from \"../errors\"\nimport type { Cost, CostBreakdown, ModelResolver, Result } from \"../types\"\nimport {\n hasInvalidTokenUsageMetadata,\n normalizeTokenUsage,\n parseCanonicalSlug,\n resultError,\n resultLoading,\n resultSuccess,\n toTokenUsageError,\n} from \"./helpers\"\nimport { useModels } from \"./use-models\"\n\nfunction computeCost(messages: readonly UIMessage[], resolveModel: ModelResolver): Cost {\n const breakdown: CostBreakdown = {\n input: { amount: 0, cost: 0 },\n output: { amount: 0, cost: 0 },\n reasoning: { amount: 0, cost: 0 },\n cachedInput: { amount: 0, cost: 0 },\n }\n\n messages.forEach((m) => {\n if (hasInvalidTokenUsageMetadata(m)) {\n throw new MissingMetadataError({ message: m, metadata: m.metadata })\n }\n\n const { canonicalSlug } = m.metadata as TokenUsageMetadata\n const { providerId, modelId } = parseCanonicalSlug(canonicalSlug)\n\n const model = resolveModel({ providerId, modelId })\n\n if (!model) {\n throw new ModelNotFoundError({ providerId, modelId })\n }\n if (!model.cost) {\n throw new CostComputationError({ providerId, modelId })\n }\n\n const tokens = normalizeTokenUsage(m)\n const cost = model.cost\n\n breakdown.input.amount += tokens.input\n breakdown.input.cost += (tokens.input / 1_000_000) * cost.input\n\n breakdown.output.amount += tokens.output\n breakdown.output.cost += (tokens.output / 1_000_000) * cost.output\n\n breakdown.reasoning.amount += tokens.reasoning\n breakdown.reasoning.cost += (tokens.reasoning / 1_000_000) * (cost.reasoning ?? cost.output)\n\n breakdown.cachedInput.amount += tokens.cachedInput\n breakdown.cachedInput.cost += (tokens.cachedInput / 1_000_000) * (cost.cache_read ?? cost.input)\n })\n\n const total = Object.values(breakdown).reduce((sum, v) => sum + v.cost, 0)\n\n return { breakdown, total, currency: \"USD\" }\n}\n\n/**\n * React hook that computes the **monetary cost** of token usage for assistant messages.\n *\n * The hook derives cost directly from message metadata and the model’s pricing\n * information. It returns pre-computed values such as the total cost, a detailed\n * cost breakdown per token type (input, output, reasoning, cached input), and the\n * currency.\n *\n * Internally, the hook leverages SWR and follows the SWR-style return pattern with\n * `data`, `isLoading`, and `error` states for consistent asynchronous handling.\n *\n * @param params - The parameters for the hook. Exactly one of the following must be provided:\n * - `messages`: All chat messages (typically returned from `useChat`).\n * - `message`: A single assistant message.\n *\n * @returns A {@link Result} object containing:\n * - `data`: The computed {@link Cost} with `breakdown`, `total`, and `currency`.\n * - `isLoading`: Whether model pricing data is still being loaded.\n * - `error`: A {@link TokenUsageError} if an error occurred.\n */\nexport function useTokenCost({ messages }: { messages: readonly UIMessage[] }): Result<Cost>\nexport function useTokenCost({ message }: { message: UIMessage }): Result<Cost>\n\nexport function useTokenCost(params: { messages: readonly UIMessage[] } | { message: UIMessage }): Result<Cost> {\n const { data: models, isLoading, error } = useModels()\n\n const messages: readonly UIMessage[] = \"messages\" in params ? params.messages : [params.message]\n const assistantMessages = useMemo(\n () => messages.filter((m) => m.role === \"assistant\" && m.metadata !== undefined),\n [messages],\n )\n\n if (isLoading) return resultLoading()\n if (error) return resultError(error.toJSON())\n\n const resolveModel: ModelResolver = ({ providerId, modelId }) => models?.[providerId]?.models?.[modelId]\n\n try {\n return resultSuccess<Cost>(computeCost(assistantMessages, resolveModel))\n } catch (err) {\n return resultError(toTokenUsageError(err))\n }\n}\n"]}