firebase
Version:
Firebase JavaScript library for web and Node.js
1 lines • 51.4 kB
JavaScript
import{_isFirebaseServerApp as e,_getProvider,getApp as t,_registerComponent as n,registerVersion as s}from"https://www.gstatic.com/firebasejs/12.5.0/firebase-app.js";class Deferred{constructor(){this.reject=()=>{},this.resolve=()=>{},this.promise=new Promise(((e,t)=>{this.resolve=e,this.reject=t}))}wrapCallback(e){return(t,n)=>{t?this.reject(t):this.resolve(n),"function"==typeof e&&(this.promise.catch((()=>{})),1===e.length?e(t):e(t,n))}}}class FirebaseError extends Error{constructor(e,t,n){super(t),this.code=e,this.customData=n,this.name="FirebaseError",Object.setPrototypeOf(this,FirebaseError.prototype),Error.captureStackTrace&&Error.captureStackTrace(this,ErrorFactory.prototype.create)}}class ErrorFactory{constructor(e,t,n){this.service=e,this.serviceName=t,this.errors=n}create(e,...t){const n=t[0]||{},s=`${this.service}/${e}`,r=this.errors[e],i=r?function replaceTemplate(e,t){return e.replace(o,((e,n)=>{const s=t[n];return null!=s?String(s):`<${n}?>`}))}(r,n):"Error",a=`${this.serviceName}: ${i} (${s}).`;return new FirebaseError(s,a,n)}}const o=/\{\$([^}]+)}/g;class Component{constructor(e,t,n){this.name=e,this.instanceFactory=t,this.type=n,this.multipleInstances=!1,this.serviceProps={},this.instantiationMode="LAZY",this.onInstanceCreated=null}setInstantiationMode(e){return this.instantiationMode=e,this}setMultipleInstances(e){return this.multipleInstances=e,this}setServiceProps(e){return this.serviceProps=e,this}setInstanceCreatedCallback(e){return this.onInstanceCreated=e,this}}var r;!function(e){e[e.DEBUG=0]="DEBUG",e[e.VERBOSE=1]="VERBOSE",e[e.INFO=2]="INFO",e[e.WARN=3]="WARN",e[e.ERROR=4]="ERROR",e[e.SILENT=5]="SILENT"}(r||(r={}));const i={debug:r.DEBUG,verbose:r.VERBOSE,info:r.INFO,warn:r.WARN,error:r.ERROR,silent:r.SILENT},a=r.INFO,c={[r.DEBUG]:"log",[r.VERBOSE]:"log",[r.INFO]:"info",[r.WARN]:"warn",[r.ERROR]:"error"},defaultLogHandler=(e,t,...n)=>{if(t<e.logLevel)return;const s=(new Date).toISOString(),o=c[t];if(!o)throw new Error(`Attempted to log a message with an invalid logType (value: ${t})`);console[o](`[${s}] ${e.name}:`,...n)};var d="@firebase/ai",l="2.5.0";const p="AI",h="us-central1",u="firebasevertexai.googleapis.com",m=l;class AIError extends FirebaseError{constructor(e,t,n){const s=`${p}: ${t} (${`${p}/${e}`})`;super(e,s),this.code=e,this.customErrorData=n,Error.captureStackTrace&&Error.captureStackTrace(this,AIError),Object.setPrototypeOf(this,AIError.prototype),this.toString=()=>s}}const E=["user","model","function","system"],g={HARM_CATEGORY_HATE_SPEECH:"HARM_CATEGORY_HATE_SPEECH",HARM_CATEGORY_SEXUALLY_EXPLICIT:"HARM_CATEGORY_SEXUALLY_EXPLICIT",HARM_CATEGORY_HARASSMENT:"HARM_CATEGORY_HARASSMENT",HARM_CATEGORY_DANGEROUS_CONTENT:"HARM_CATEGORY_DANGEROUS_CONTENT"},f={BLOCK_LOW_AND_ABOVE:"BLOCK_LOW_AND_ABOVE",BLOCK_MEDIUM_AND_ABOVE:"BLOCK_MEDIUM_AND_ABOVE",BLOCK_ONLY_HIGH:"BLOCK_ONLY_HIGH",BLOCK_NONE:"BLOCK_NONE",OFF:"OFF"},S={SEVERITY:"SEVERITY",PROBABILITY:"PROBABILITY"},R={NEGLIGIBLE:"NEGLIGIBLE",LOW:"LOW",MEDIUM:"MEDIUM",HIGH:"HIGH"},I={HARM_SEVERITY_NEGLIGIBLE:"HARM_SEVERITY_NEGLIGIBLE",HARM_SEVERITY_LOW:"HARM_SEVERITY_LOW",HARM_SEVERITY_MEDIUM:"HARM_SEVERITY_MEDIUM",HARM_SEVERITY_HIGH:"HARM_SEVERITY_HIGH",HARM_SEVERITY_UNSUPPORTED:"HARM_SEVERITY_UNSUPPORTED"},A={SAFETY:"SAFETY",OTHER:"OTHER",BLOCKLIST:"BLOCKLIST",PROHIBITED_CONTENT:"PROHIBITED_CONTENT"},O={STOP:"STOP",MAX_TOKENS:"MAX_TOKENS",SAFETY:"SAFETY",RECITATION:"RECITATION",OTHER:"OTHER",BLOCKLIST:"BLOCKLIST",PROHIBITED_CONTENT:"PROHIBITED_CONTENT",SPII:"SPII",MALFORMED_FUNCTION_CALL:"MALFORMED_FUNCTION_CALL"},y={AUTO:"AUTO",ANY:"ANY",NONE:"NONE"},_={MODALITY_UNSPECIFIED:"MODALITY_UNSPECIFIED",TEXT:"TEXT",IMAGE:"IMAGE",VIDEO:"VIDEO",AUDIO:"AUDIO",DOCUMENT:"DOCUMENT"},C={TEXT:"TEXT",IMAGE:"IMAGE",AUDIO:"AUDIO"},w={PREFER_ON_DEVICE:"prefer_on_device",ONLY_ON_DEVICE:"only_on_device",ONLY_IN_CLOUD:"only_in_cloud",PREFER_IN_CLOUD:"prefer_in_cloud"},T={ON_DEVICE:"on_device",IN_CLOUD:"in_cloud"},b={UNSPECIFIED:"OUTCOME_UNSPECIFIED",OK:"OUTCOME_OK",FAILED:"OUTCOME_FAILED",DEADLINE_EXCEEDED:"OUTCOME_DEADLINE_EXCEEDED"},N={UNSPECIFIED:"LANGUAGE_UNSPECIFIED",PYTHON:"PYTHON"},L={URL_RETRIEVAL_STATUS_UNSPECIFIED:"URL_RETRIEVAL_STATUS_UNSPECIFIED",URL_RETRIEVAL_STATUS_SUCCESS:"URL_RETRIEVAL_STATUS_SUCCESS",URL_RETRIEVAL_STATUS_ERROR:"URL_RETRIEVAL_STATUS_ERROR",URL_RETRIEVAL_STATUS_PAYWALL:"URL_RETRIEVAL_STATUS_PAYWALL",URL_RETRIEVAL_STATUS_UNSAFE:"URL_RETRIEVAL_STATUS_UNSAFE"},v={SERVER_CONTENT:"serverContent",TOOL_CALL:"toolCall",TOOL_CALL_CANCELLATION:"toolCallCancellation"},k={ERROR:"error",REQUEST_ERROR:"request-error",RESPONSE_ERROR:"response-error",FETCH_ERROR:"fetch-error",SESSION_CLOSED:"session-closed",INVALID_CONTENT:"invalid-content",API_NOT_ENABLED:"api-not-enabled",INVALID_SCHEMA:"invalid-schema",NO_API_KEY:"no-api-key",NO_APP_ID:"no-app-id",NO_MODEL:"no-model",NO_PROJECT_ID:"no-project-id",PARSE_FAILED:"parse-failed",UNSUPPORTED:"unsupported"},P={STRING:"string",NUMBER:"number",INTEGER:"integer",BOOLEAN:"boolean",ARRAY:"array",OBJECT:"object"},D={BLOCK_LOW_AND_ABOVE:"block_low_and_above",BLOCK_MEDIUM_AND_ABOVE:"block_medium_and_above",BLOCK_ONLY_HIGH:"block_only_high",BLOCK_NONE:"block_none"},M={BLOCK_ALL:"dont_allow",ALLOW_ADULT:"allow_adult",ALLOW_ALL:"allow_all"},U={SQUARE:"1:1",LANDSCAPE_3x4:"3:4",PORTRAIT_4x3:"4:3",LANDSCAPE_16x9:"16:9",PORTRAIT_9x16:"9:16"},G={VERTEX_AI:"VERTEX_AI",GOOGLE_AI:"GOOGLE_AI"};class Backend{constructor(e){this.backendType=e}}class GoogleAIBackend extends Backend{constructor(){super(G.GOOGLE_AI)}}class VertexAIBackend extends Backend{constructor(e=h){super(G.VERTEX_AI),this.location=e||h}}const x=new class Logger{constructor(e){this.name=e,this._logLevel=a,this._logHandler=defaultLogHandler,this._userLogHandler=null}get logLevel(){return this._logLevel}set logLevel(e){if(!(e in r))throw new TypeError(`Invalid value "${e}" assigned to \`logLevel\``);this._logLevel=e}setLogLevel(e){this._logLevel="string"==typeof e?i[e]:e}get logHandler(){return this._logHandler}set logHandler(e){if("function"!=typeof e)throw new TypeError("Value assigned to `logHandler` must be a function");this._logHandler=e}get userLogHandler(){return this._userLogHandler}set userLogHandler(e){this._userLogHandler=e}debug(...e){this._userLogHandler&&this._userLogHandler(this,r.DEBUG,...e),this._logHandler(this,r.DEBUG,...e)}log(...e){this._userLogHandler&&this._userLogHandler(this,r.VERBOSE,...e),this._logHandler(this,r.VERBOSE,...e)}info(...e){this._userLogHandler&&this._userLogHandler(this,r.INFO,...e),this._logHandler(this,r.INFO,...e)}warn(...e){this._userLogHandler&&this._userLogHandler(this,r.WARN,...e),this._logHandler(this,r.WARN,...e)}error(...e){this._userLogHandler&&this._userLogHandler(this,r.ERROR,...e),this._logHandler(this,r.ERROR,...e)}}("@firebase/vertexai");var H;!function(e){e.UNAVAILABLE="unavailable",e.DOWNLOADABLE="downloadable",e.DOWNLOADING="downloading",e.AVAILABLE="available"}(H||(H={}));const B=[{type:"image"}];class ChromeAdapterImpl{constructor(e,t,n){this.languageModelProvider=e,this.mode=t,this.isDownloading=!1,this.onDeviceParams={createOptions:{expectedInputs:B}},n&&(this.onDeviceParams=n,this.onDeviceParams.createOptions?this.onDeviceParams.createOptions.expectedInputs||(this.onDeviceParams.createOptions.expectedInputs=B):this.onDeviceParams.createOptions={expectedInputs:B})}async isAvailable(e){if(!this.mode)return x.debug("On-device inference unavailable because mode is undefined."),!1;if(this.mode===w.ONLY_IN_CLOUD)return x.debug('On-device inference unavailable because mode is "only_in_cloud".'),!1;const t=await this.downloadIfAvailable();if(this.mode===w.ONLY_ON_DEVICE){if(t===H.UNAVAILABLE)throw new AIError(k.API_NOT_ENABLED,"Local LanguageModel API not available in this environment.");return t!==H.DOWNLOADABLE&&t!==H.DOWNLOADING||(x.debug("Waiting for download of LanguageModel to complete."),await this.downloadPromise,!0)}return t!==H.AVAILABLE?(x.debug(`On-device inference unavailable because availability is "${t}".`),!1):!!ChromeAdapterImpl.isOnDeviceRequest(e)||(x.debug("On-device inference unavailable because request is incompatible."),!1)}async generateContent(e){const t=await this.createSession(),n=await Promise.all(e.contents.map(ChromeAdapterImpl.toLanguageModelMessage)),s=await t.prompt(n,this.onDeviceParams.promptOptions);return ChromeAdapterImpl.toResponse(s)}async generateContentStream(e){const t=await this.createSession(),n=await Promise.all(e.contents.map(ChromeAdapterImpl.toLanguageModelMessage)),s=t.promptStreaming(n,this.onDeviceParams.promptOptions);return ChromeAdapterImpl.toStreamResponse(s)}async countTokens(e){throw new AIError(k.REQUEST_ERROR,"Count Tokens is not yet available for on-device model.")}static isOnDeviceRequest(e){if(0===e.contents.length)return x.debug("Empty prompt rejected for on-device inference."),!1;for(const t of e.contents){if("function"===t.role)return x.debug('"Function" role rejected for on-device inference.'),!1;for(const e of t.parts)if(e.inlineData&&-1===ChromeAdapterImpl.SUPPORTED_MIME_TYPES.indexOf(e.inlineData.mimeType))return x.debug(`Unsupported mime type "${e.inlineData.mimeType}" rejected for on-device inference.`),!1}return!0}async downloadIfAvailable(){const e=await(this.languageModelProvider?.availability(this.onDeviceParams.createOptions));return e===H.DOWNLOADABLE&&this.download(),e}download(){this.isDownloading||(this.isDownloading=!0,this.downloadPromise=this.languageModelProvider?.create(this.onDeviceParams.createOptions).finally((()=>{this.isDownloading=!1})))}static async toLanguageModelMessage(e){const t=await Promise.all(e.parts.map(ChromeAdapterImpl.toLanguageModelMessageContent));return{role:ChromeAdapterImpl.toLanguageModelMessageRole(e.role),content:t}}static async toLanguageModelMessageContent(e){if(e.text)return{type:"text",value:e.text};if(e.inlineData){const t=await fetch(`data:${e.inlineData.mimeType};base64,${e.inlineData.data}`),n=await t.blob();return{type:"image",value:await createImageBitmap(n)}}throw new AIError(k.REQUEST_ERROR,"Processing of this Part type is not currently supported.")}static toLanguageModelMessageRole(e){return"model"===e?"assistant":"user"}async createSession(){if(!this.languageModelProvider)throw new AIError(k.UNSUPPORTED,"Chrome AI requested for unsupported browser version.");const e=await this.languageModelProvider.create(this.onDeviceParams.createOptions);return this.oldSession&&this.oldSession.destroy(),this.oldSession=e,e}static toResponse(e){return{json:async()=>({candidates:[{content:{parts:[{text:e}]}}]})}}static toStreamResponse(e){const t=new TextEncoder;return{body:e.pipeThrough(new TransformStream({transform(e,n){const s=JSON.stringify({candidates:[{content:{role:"model",parts:[{text:e}]}}]});n.enqueue(t.encode(`data: ${s}\n\n`))}}))}}}function chromeAdapterFactory(e,t,n){if(void 0!==t&&e)return new ChromeAdapterImpl(t.LanguageModel,e,n)}ChromeAdapterImpl.SUPPORTED_MIME_TYPES=["image/jpeg","image/png"];class AIService{constructor(e,t,n,s,o){this.app=e,this.backend=t,this.chromeAdapterFactory=o;const r=s?.getImmediate({optional:!0}),i=n?.getImmediate({optional:!0});this.auth=i||null,this.appCheck=r||null,this.location=t instanceof VertexAIBackend?t.location:""}_delete(){return Promise.resolve()}set options(e){this._options=e}get options(){return this._options}}function factory(e,{instanceIdentifier:t}){if(!t)throw new AIError(k.ERROR,"AIService instance identifier is undefined.");const n=function decodeInstanceIdentifier(e){const t=e.split("/");if(t[0]!==p)throw new AIError(k.ERROR,`Invalid instance identifier, unknown prefix '${t[0]}'`);switch(t[1]){case"vertexai":const n=t[2];if(!n)throw new AIError(k.ERROR,`Invalid instance identifier, unknown location '${e}'`);return new VertexAIBackend(n);case"googleai":return new GoogleAIBackend;default:throw new AIError(k.ERROR,`Invalid instance identifier string: '${e}'`)}}(t),s=e.getProvider("app").getImmediate(),o=e.getProvider("auth-internal"),r=e.getProvider("app-check-internal");return new AIService(s,n,o,r,chromeAdapterFactory)}class AIModel{constructor(t,n){if(!t.app?.options?.apiKey)throw new AIError(k.NO_API_KEY,'The "apiKey" field is empty in the local Firebase config. Firebase AI requires this field to contain a valid API key.');if(!t.app?.options?.projectId)throw new AIError(k.NO_PROJECT_ID,'The "projectId" field is empty in the local Firebase config. Firebase AI requires this field to contain a valid project ID.');if(!t.app?.options?.appId)throw new AIError(k.NO_APP_ID,'The "appId" field is empty in the local Firebase config. Firebase AI requires this field to contain a valid app ID.');if(this._apiSettings={apiKey:t.app.options.apiKey,project:t.app.options.projectId,appId:t.app.options.appId,automaticDataCollectionEnabled:t.app.automaticDataCollectionEnabled,location:t.location,backend:t.backend},e(t.app)&&t.app.settings.appCheckToken){const e=t.app.settings.appCheckToken;this._apiSettings.getAppCheckToken=()=>Promise.resolve({token:e})}else t.appCheck&&(t.options?.useLimitedUseAppCheckTokens?this._apiSettings.getAppCheckToken=()=>t.appCheck.getLimitedUseToken():this._apiSettings.getAppCheckToken=()=>t.appCheck.getToken());t.auth&&(this._apiSettings.getAuthToken=()=>t.auth.getToken()),this.model=AIModel.normalizeModelName(n,this._apiSettings.backend.backendType)}static normalizeModelName(e,t){return t===G.GOOGLE_AI?AIModel.normalizeGoogleAIModelName(e):AIModel.normalizeVertexAIModelName(e)}static normalizeGoogleAIModelName(e){return`models/${e}`}static normalizeVertexAIModelName(e){let t;return t=e.includes("/")?e.startsWith("models/")?`publishers/google/${e}`:e:`publishers/google/models/${e}`,t}}var F;!function(e){e.GENERATE_CONTENT="generateContent",e.STREAM_GENERATE_CONTENT="streamGenerateContent",e.COUNT_TOKENS="countTokens",e.PREDICT="predict"}(F||(F={}));class RequestUrl{constructor(e,t,n,s,o){this.model=e,this.task=t,this.apiSettings=n,this.stream=s,this.requestOptions=o}toString(){const e=new URL(this.baseUrl);return e.pathname=`/${this.apiVersion}/${this.modelPath}:${this.task}`,e.search=this.queryParams.toString(),e.toString()}get baseUrl(){return this.requestOptions?.baseUrl||`https://${u}`}get apiVersion(){return"v1beta"}get modelPath(){if(this.apiSettings.backend instanceof GoogleAIBackend)return`projects/${this.apiSettings.project}/${this.model}`;if(this.apiSettings.backend instanceof VertexAIBackend)return`projects/${this.apiSettings.project}/locations/${this.apiSettings.backend.location}/${this.model}`;throw new AIError(k.ERROR,`Invalid backend: ${JSON.stringify(this.apiSettings.backend)}`)}get queryParams(){const e=new URLSearchParams;return this.stream&&e.set("alt","sse"),e}}class WebSocketUrl{constructor(e){this.apiSettings=e}toString(){const e=new URL(`wss://${u}`);e.pathname=this.pathname;const t=new URLSearchParams;return t.set("key",this.apiSettings.apiKey),e.search=t.toString(),e.toString()}get pathname(){return this.apiSettings.backend.backendType===G.GOOGLE_AI?"ws/google.firebase.vertexai.v1beta.GenerativeService/BidiGenerateContent":`ws/google.firebase.vertexai.v1beta.LlmBidiService/BidiGenerateContent/locations/${this.apiSettings.location}`}}async function getHeaders(e){const t=new Headers;if(t.append("Content-Type","application/json"),t.append("x-goog-api-client",function getClientHeaders(){const e=[];return e.push(`gl-js/${m}`),e.push(`fire/${m}`),e.join(" ")}()),t.append("x-goog-api-key",e.apiSettings.apiKey),e.apiSettings.automaticDataCollectionEnabled&&t.append("X-Firebase-Appid",e.apiSettings.appId),e.apiSettings.getAppCheckToken){const n=await e.apiSettings.getAppCheckToken();n&&(t.append("X-Firebase-AppCheck",n.token),n.error&&x.warn(`Unable to obtain a valid App Check token: ${n.error.message}`))}if(e.apiSettings.getAuthToken){const n=await e.apiSettings.getAuthToken();n&&t.append("Authorization",`Firebase ${n.accessToken}`)}return t}async function makeRequest(e,t,n,s,o,r){const i=new RequestUrl(e,t,n,s,r);let a,c;try{const d=await async function constructRequest(e,t,n,s,o,r){const i=new RequestUrl(e,t,n,s,r);return{url:i.toString(),fetchOptions:{method:"POST",headers:await getHeaders(i),body:o}}}(e,t,n,s,o,r),l=null!=r?.timeout&&r.timeout>=0?r.timeout:18e4,p=new AbortController;if(c=setTimeout((()=>p.abort()),l),d.fetchOptions.signal=p.signal,a=await fetch(d.url,d.fetchOptions),!a.ok){let e,t="";try{const n=await a.json();t=n.error.message,n.error.details&&(t+=` ${JSON.stringify(n.error.details)}`,e=n.error.details)}catch(e){}if(403===a.status&&e&&e.some((e=>"SERVICE_DISABLED"===e.reason))&&e.some((e=>e.links?.[0]?.description.includes("Google developers console API activation"))))throw new AIError(k.API_NOT_ENABLED,`The Firebase AI SDK requires the Firebase AI API ('firebasevertexai.googleapis.com') to be enabled in your Firebase project. Enable this API by visiting the Firebase Console at https://console.firebase.google.com/project/${i.apiSettings.project}/genai/ and clicking "Get started". If you enabled this API recently, wait a few minutes for the action to propagate to our systems and then retry.`,{status:a.status,statusText:a.statusText,errorDetails:e});throw new AIError(k.FETCH_ERROR,`Error fetching from ${i}: [${a.status} ${a.statusText}] ${t}`,{status:a.status,statusText:a.statusText,errorDetails:e})}}catch(e){let t=e;throw e.code!==k.FETCH_ERROR&&e.code!==k.API_NOT_ENABLED&&e instanceof Error&&(t=new AIError(k.ERROR,`Error fetching from ${i.toString()}: ${e.message}`),t.stack=e.stack),t}finally{c&&clearTimeout(c)}return a}function hasValidCandidates(e){if(e.candidates&&e.candidates.length>0){if(e.candidates.length>1&&x.warn(`This response had ${e.candidates.length} candidates. Returning text from the first candidate only. Access response.candidates directly to use the other candidates.`),hadBadFinishReason(e.candidates[0]))throw new AIError(k.RESPONSE_ERROR,`Response error: ${formatBlockErrorMessage(e)}. Response body stored in error.response`,{response:e});return!0}return!1}function createEnhancedContentResponse(e,t=T.IN_CLOUD){e.candidates&&!e.candidates[0].hasOwnProperty("index")&&(e.candidates[0].index=0);const n=function addHelpers(e){return e.text=()=>{if(hasValidCandidates(e))return getText(e,(e=>!e.thought));if(e.promptFeedback)throw new AIError(k.RESPONSE_ERROR,`Text not available. ${formatBlockErrorMessage(e)}`,{response:e});return""},e.thoughtSummary=()=>{if(hasValidCandidates(e)){const t=getText(e,(e=>!!e.thought));return""===t?void 0:t}if(e.promptFeedback)throw new AIError(k.RESPONSE_ERROR,`Thought summary not available. ${formatBlockErrorMessage(e)}`,{response:e})},e.inlineDataParts=()=>{if(hasValidCandidates(e))return function getInlineDataParts(e){const t=[];if(e.candidates?.[0].content?.parts)for(const n of e.candidates?.[0].content?.parts)n.inlineData&&t.push(n);return t.length>0?t:void 0}(e);if(e.promptFeedback)throw new AIError(k.RESPONSE_ERROR,`Data not available. ${formatBlockErrorMessage(e)}`,{response:e})},e.functionCalls=()=>{if(hasValidCandidates(e))return function getFunctionCalls(e){const t=[];if(e.candidates?.[0].content?.parts)for(const n of e.candidates?.[0].content?.parts)n.functionCall&&t.push(n.functionCall);return t.length>0?t:void 0}(e);if(e.promptFeedback)throw new AIError(k.RESPONSE_ERROR,`Function call not available. ${formatBlockErrorMessage(e)}`,{response:e})},e}(e);return n.inferenceSource=t,n}function getText(e,t){const n=[];if(e.candidates?.[0].content?.parts)for(const s of e.candidates?.[0].content?.parts)s.text&&t(s)&&n.push(s.text);return n.length>0?n.join(""):""}const $=[O.RECITATION,O.SAFETY];function hadBadFinishReason(e){return!!e.finishReason&&$.some((t=>t===e.finishReason))}function formatBlockErrorMessage(e){let t="";if(e.candidates&&0!==e.candidates.length||!e.promptFeedback){if(e.candidates?.[0]){const n=e.candidates[0];hadBadFinishReason(n)&&(t+=`Candidate was blocked due to ${n.finishReason}`,n.finishMessage&&(t+=`: ${n.finishMessage}`))}}else t+="Response was blocked",e.promptFeedback?.blockReason&&(t+=` due to ${e.promptFeedback.blockReason}`),e.promptFeedback?.blockReasonMessage&&(t+=`: ${e.promptFeedback.blockReasonMessage}`);return t}async function handlePredictResponse(e){const t=await e.json(),n=[];let s;if(!t.predictions||0===t.predictions?.length)throw new AIError(k.RESPONSE_ERROR,"No predictions or filtered reason received from Vertex AI. Please report this issue with the full error details at https://github.com/firebase/firebase-js-sdk/issues.");for(const e of t.predictions)if(e.raiFilteredReason)s=e.raiFilteredReason;else if(e.mimeType&&e.bytesBase64Encoded)n.push({mimeType:e.mimeType,bytesBase64Encoded:e.bytesBase64Encoded});else if(e.mimeType&&e.gcsUri)n.push({mimeType:e.mimeType,gcsURI:e.gcsUri});else if(!e.safetyAttributes)throw new AIError(k.RESPONSE_ERROR,`Unexpected element in 'predictions' array in response: '${JSON.stringify(e)}'`);return{images:n,filteredReason:s}}function mapGenerateContentRequest(e){if(e.safetySettings?.forEach((e=>{if(e.method)throw new AIError(k.UNSUPPORTED,"SafetySetting.method is not supported in the the Gemini Developer API. Please remove this property.")})),e.generationConfig?.topK){const t=Math.round(e.generationConfig.topK);t!==e.generationConfig.topK&&(x.warn("topK in GenerationConfig has been rounded to the nearest integer to match the format for requests to the Gemini Developer API."),e.generationConfig.topK=t)}return e}function mapGenerateContentResponse(e){return{candidates:e.candidates?mapGenerateContentCandidates(e.candidates):void 0,prompt:e.promptFeedback?mapPromptFeedback(e.promptFeedback):void 0,usageMetadata:e.usageMetadata}}function mapGenerateContentCandidates(e){const t=[];let n;return t&&e.forEach((e=>{let s;if(e.citationMetadata&&(s={citations:e.citationMetadata.citationSources}),e.safetyRatings&&(n=e.safetyRatings.map((e=>({...e,severity:e.severity??I.HARM_SEVERITY_UNSUPPORTED,probabilityScore:e.probabilityScore??0,severityScore:e.severityScore??0})))),e.content?.parts?.some((e=>e?.videoMetadata)))throw new AIError(k.UNSUPPORTED,"Part.videoMetadata is not supported in the Gemini Developer API. Please remove this property.");const o={index:e.index,content:e.content,finishReason:e.finishReason,finishMessage:e.finishMessage,safetyRatings:n,citationMetadata:s,groundingMetadata:e.groundingMetadata,urlContextMetadata:e.urlContextMetadata};t.push(o)})),t}function mapPromptFeedback(e){const t=[];e.safetyRatings.forEach((e=>{t.push({category:e.category,probability:e.probability,severity:e.severity??I.HARM_SEVERITY_UNSUPPORTED,probabilityScore:e.probabilityScore??0,severityScore:e.severityScore??0,blocked:e.blocked})}));return{blockReason:e.blockReason,safetyRatings:t,blockReasonMessage:e.blockReasonMessage}}const V=/^data\: (.*)(?:\n\n|\r\r|\r\n\r\n)/;function processStream(e,t,n){const s=function getResponseStream(e){const t=e.getReader();return new ReadableStream({start(e){let n="";return pump();function pump(){return t.read().then((({value:t,done:s})=>{if(s)return n.trim()?void e.error(new AIError(k.PARSE_FAILED,"Failed to parse stream")):void e.close();n+=t;let o,r=n.match(V);for(;r;){try{o=JSON.parse(r[1])}catch(t){return void e.error(new AIError(k.PARSE_FAILED,`Error parsing JSON response: "${r[1]}`))}e.enqueue(o),n=n.substring(r[0].length),r=n.match(V)}return pump()}))}}})}(e.body.pipeThrough(new TextDecoderStream("utf8",{fatal:!0}))),[o,r]=s.tee();return{stream:generateResponseSequence(o,t,n),response:getResponsePromise(r,t,n)}}async function getResponsePromise(e,t,n){const s=[],o=e.getReader();for(;;){const{done:e,value:r}=await o.read();if(e){let e=aggregateResponses(s);return t.backend.backendType===G.GOOGLE_AI&&(e=mapGenerateContentResponse(e)),createEnhancedContentResponse(e,n)}s.push(r)}}async function*generateResponseSequence(e,t,n){const s=e.getReader();for(;;){const{value:e,done:o}=await s.read();if(o)break;let r;r=t.backend.backendType===G.GOOGLE_AI?createEnhancedContentResponse(mapGenerateContentResponse(e),n):createEnhancedContentResponse(e,n);const i=r.candidates?.[0];(i?.content?.parts||i?.finishReason||i?.citationMetadata||i?.urlContextMetadata)&&(yield r)}}function aggregateResponses(e){const t=e[e.length-1],n={promptFeedback:t?.promptFeedback};for(const t of e)if(t.candidates)for(const e of t.candidates){const t=e.index||0;n.candidates||(n.candidates=[]),n.candidates[t]||(n.candidates[t]={index:e.index}),n.candidates[t].citationMetadata=e.citationMetadata,n.candidates[t].finishReason=e.finishReason,n.candidates[t].finishMessage=e.finishMessage,n.candidates[t].safetyRatings=e.safetyRatings,n.candidates[t].groundingMetadata=e.groundingMetadata;const s=e.urlContextMetadata;if("object"==typeof s&&null!==s&&Object.keys(s).length>0&&(n.candidates[t].urlContextMetadata=s),e.content){if(!e.content.parts)continue;n.candidates[t].content||(n.candidates[t].content={role:e.content.role||"user",parts:[]});for(const s of e.content.parts){const e={...s};""!==s.text&&(Object.keys(e).length>0&&n.candidates[t].content.parts.push(e))}}}return n}const W=[k.FETCH_ERROR,k.ERROR,k.API_NOT_ENABLED];async function callCloudOrDevice(e,t,n,s){if(!t)return{response:await s(),inferenceSource:T.IN_CLOUD};switch(t.mode){case w.ONLY_ON_DEVICE:if(await t.isAvailable(e))return{response:await n(),inferenceSource:T.ON_DEVICE};throw new AIError(k.UNSUPPORTED,"Inference mode is ONLY_ON_DEVICE, but an on-device model is not available.");case w.ONLY_IN_CLOUD:return{response:await s(),inferenceSource:T.IN_CLOUD};case w.PREFER_IN_CLOUD:try{return{response:await s(),inferenceSource:T.IN_CLOUD}}catch(e){if(e instanceof AIError&&W.includes(e.code))return{response:await n(),inferenceSource:T.ON_DEVICE};throw e}case w.PREFER_ON_DEVICE:return await t.isAvailable(e)?{response:await n(),inferenceSource:T.ON_DEVICE}:{response:await s(),inferenceSource:T.IN_CLOUD};default:throw new AIError(k.ERROR,`Unexpected infererence mode: ${t.mode}`)}}async function generateContentStream(e,t,n,s,o){const r=await callCloudOrDevice(n,s,(()=>s.generateContentStream(n)),(()=>async function generateContentStreamOnCloud(e,t,n,s){return e.backend.backendType===G.GOOGLE_AI&&(n=mapGenerateContentRequest(n)),makeRequest(t,F.STREAM_GENERATE_CONTENT,e,!0,JSON.stringify(n),s)}(e,t,n,o)));return processStream(r.response,e)}async function generateContent(e,t,n,s,o){const r=await callCloudOrDevice(n,s,(()=>s.generateContent(n)),(()=>async function generateContentOnCloud(e,t,n,s){return e.backend.backendType===G.GOOGLE_AI&&(n=mapGenerateContentRequest(n)),makeRequest(t,F.GENERATE_CONTENT,e,!1,JSON.stringify(n),s)}(e,t,n,o))),i=await async function processGenerateContentResponse(e,t){const n=await e.json();return t.backend.backendType===G.GOOGLE_AI?mapGenerateContentResponse(n):n}(r.response,e);return{response:createEnhancedContentResponse(i,r.inferenceSource)}}function formatSystemInstruction(e){if(null!=e)return"string"==typeof e?{role:"system",parts:[{text:e}]}:e.text?{role:"system",parts:[e]}:e.parts?e.role?e:{role:"system",parts:e.parts}:void 0}function formatNewContent(e){let t=[];if("string"==typeof e)t=[{text:e}];else for(const n of e)"string"==typeof n?t.push({text:n}):t.push(n);return function assignRoleToPartsAndValidateSendMessageRequest(e){const t={role:"user",parts:[]},n={role:"function",parts:[]};let s=!1,o=!1;for(const r of e)"functionResponse"in r?(n.parts.push(r),o=!0):(t.parts.push(r),s=!0);if(s&&o)throw new AIError(k.INVALID_CONTENT,"Within a single message, FunctionResponse cannot be mixed with other type of Part in the request for sending chat message.");if(!s&&!o)throw new AIError(k.INVALID_CONTENT,"No Content is provided for sending chat message.");if(s)return t;return n}(t)}function formatGenerateContentInput(e){let t;if(e.contents)t=e;else{t={contents:[formatNewContent(e)]}}return e.systemInstruction&&(t.systemInstruction=formatSystemInstruction(e.systemInstruction)),t}function createPredictRequestBody(e,{gcsURI:t,imageFormat:n,addWatermark:s,numberOfImages:o=1,negativePrompt:r,aspectRatio:i,safetyFilterLevel:a,personFilterLevel:c}){return{instances:[{prompt:e}],parameters:{storageUri:t,negativePrompt:r,sampleCount:o,aspectRatio:i,outputOptions:n,addWatermark:s,safetyFilterLevel:a,personGeneration:c,includeRaiReason:!0,includeSafetyAttributes:!0}}}const Y=["text","inlineData","functionCall","functionResponse","thought","thoughtSignature"],q={user:["text","inlineData"],function:["functionResponse"],model:["text","functionCall","thought","thoughtSignature"],system:["text"]},j={user:["model"],function:["model"],model:["user","function"],system:[]};const J="SILENT_ERROR";class ChatSession{constructor(e,t,n,s,o){this.model=t,this.chromeAdapter=n,this.params=s,this.requestOptions=o,this._history=[],this._sendPromise=Promise.resolve(),this._apiSettings=e,s?.history&&(!function validateChatHistory(e){let t=null;for(const n of e){const{role:e,parts:s}=n;if(!t&&"user"!==e)throw new AIError(k.INVALID_CONTENT,`First Content should be with role 'user', got ${e}`);if(!E.includes(e))throw new AIError(k.INVALID_CONTENT,`Each item should include role field. Got ${e} but valid roles are: ${JSON.stringify(E)}`);if(!Array.isArray(s))throw new AIError(k.INVALID_CONTENT,"Content should have 'parts' property with an array of Parts");if(0===s.length)throw new AIError(k.INVALID_CONTENT,"Each Content should have at least one part");const o={text:0,inlineData:0,functionCall:0,functionResponse:0,thought:0,thoughtSignature:0,executableCode:0,codeExecutionResult:0};for(const e of s)for(const t of Y)t in e&&(o[t]+=1);const r=q[e];for(const t of Y)if(!r.includes(t)&&o[t]>0)throw new AIError(k.INVALID_CONTENT,`Content with role '${e}' can't contain '${t}' part`);if(t&&!j[e].includes(t.role))throw new AIError(k.INVALID_CONTENT,`Content with role '${e}' can't follow '${t.role}'. Valid previous roles: ${JSON.stringify(j)}`);t=n}}(s.history),this._history=s.history)}async getHistory(){return await this._sendPromise,this._history}async sendMessage(e){await this._sendPromise;const t=formatNewContent(e),n={safetySettings:this.params?.safetySettings,generationConfig:this.params?.generationConfig,tools:this.params?.tools,toolConfig:this.params?.toolConfig,systemInstruction:this.params?.systemInstruction,contents:[...this._history,t]};let s={};return this._sendPromise=this._sendPromise.then((()=>generateContent(this._apiSettings,this.model,n,this.chromeAdapter,this.requestOptions))).then((e=>{if(e.response.candidates&&e.response.candidates.length>0){this._history.push(t);const n={parts:e.response.candidates?.[0].content.parts||[],role:e.response.candidates?.[0].content.role||"model"};this._history.push(n)}else{const t=formatBlockErrorMessage(e.response);t&&x.warn(`sendMessage() was unsuccessful. ${t}. Inspect response object for details.`)}s=e})),await this._sendPromise,s}async sendMessageStream(e){await this._sendPromise;const t=formatNewContent(e),n={safetySettings:this.params?.safetySettings,generationConfig:this.params?.generationConfig,tools:this.params?.tools,toolConfig:this.params?.toolConfig,systemInstruction:this.params?.systemInstruction,contents:[...this._history,t]},s=generateContentStream(this._apiSettings,this.model,n,this.chromeAdapter,this.requestOptions);return this._sendPromise=this._sendPromise.then((()=>s)).catch((e=>{throw new Error(J)})).then((e=>e.response)).then((e=>{if(e.candidates&&e.candidates.length>0){this._history.push(t);const n={...e.candidates[0].content};n.role||(n.role="model"),this._history.push(n)}else{const t=formatBlockErrorMessage(e);t&&x.warn(`sendMessageStream() was unsuccessful. ${t}. Inspect response object for details.`)}})).catch((e=>{e.message!==J&&x.error(e)})),s}}async function countTokens(e,t,n,s,o){if(s?.mode===w.ONLY_ON_DEVICE)throw new AIError(k.UNSUPPORTED,"countTokens() is not supported for on-device models.");return async function countTokensOnCloud(e,t,n,s){let o="";if(e.backend.backendType===G.GOOGLE_AI){const e=function mapCountTokensRequest(e,t){return{generateContentRequest:{model:t,...e}}}(n,t);o=JSON.stringify(e)}else o=JSON.stringify(n);return(await makeRequest(t,F.COUNT_TOKENS,e,!1,o,s)).json()}(e,t,n,o)}class GenerativeModel extends AIModel{constructor(e,t,n,s){super(e,t.model),this.chromeAdapter=s,this.generationConfig=t.generationConfig||{},this.safetySettings=t.safetySettings||[],this.tools=t.tools,this.toolConfig=t.toolConfig,this.systemInstruction=formatSystemInstruction(t.systemInstruction),this.requestOptions=n||{}}async generateContent(e){const t=formatGenerateContentInput(e);return generateContent(this._apiSettings,this.model,{generationConfig:this.generationConfig,safetySettings:this.safetySettings,tools:this.tools,toolConfig:this.toolConfig,systemInstruction:this.systemInstruction,...t},this.chromeAdapter,this.requestOptions)}async generateContentStream(e){const t=formatGenerateContentInput(e);return generateContentStream(this._apiSettings,this.model,{generationConfig:this.generationConfig,safetySettings:this.safetySettings,tools:this.tools,toolConfig:this.toolConfig,systemInstruction:this.systemInstruction,...t},this.chromeAdapter,this.requestOptions)}startChat(e){return new ChatSession(this._apiSettings,this.model,this.chromeAdapter,{tools:this.tools,toolConfig:this.toolConfig,systemInstruction:this.systemInstruction,generationConfig:this.generationConfig,safetySettings:this.safetySettings,...e},this.requestOptions)}async countTokens(e){const t=formatGenerateContentInput(e);return countTokens(this._apiSettings,this.model,t,this.chromeAdapter)}}class LiveSession{constructor(e,t){this.webSocketHandler=e,this.serverMessages=t,this.isClosed=!1,this.inConversation=!1}async send(e,t=!0){if(this.isClosed)throw new AIError(k.REQUEST_ERROR,"This LiveSession has been closed and cannot be used.");const n={clientContent:{turns:[formatNewContent(e)],turnComplete:t}};this.webSocketHandler.send(JSON.stringify(n))}async sendTextRealtime(e){if(this.isClosed)throw new AIError(k.REQUEST_ERROR,"This LiveSession has been closed and cannot be used.");const t={realtimeInput:{text:e}};this.webSocketHandler.send(JSON.stringify(t))}async sendAudioRealtime(e){if(this.isClosed)throw new AIError(k.REQUEST_ERROR,"This LiveSession has been closed and cannot be used.");const t={realtimeInput:{audio:e}};this.webSocketHandler.send(JSON.stringify(t))}async sendVideoRealtime(e){if(this.isClosed)throw new AIError(k.REQUEST_ERROR,"This LiveSession has been closed and cannot be used.");const t={realtimeInput:{video:e}};this.webSocketHandler.send(JSON.stringify(t))}async sendFunctionResponses(e){if(this.isClosed)throw new AIError(k.REQUEST_ERROR,"This LiveSession has been closed and cannot be used.");const t={toolResponse:{functionResponses:e}};this.webSocketHandler.send(JSON.stringify(t))}async*receive(){if(this.isClosed)throw new AIError(k.SESSION_CLOSED,"Cannot read from a Live session that is closed. Try starting a new Live session.");for await(const e of this.serverMessages)e&&"object"==typeof e?v.SERVER_CONTENT in e?yield{type:"serverContent",...e.serverContent}:v.TOOL_CALL in e?yield{type:"toolCall",...e.toolCall}:v.TOOL_CALL_CANCELLATION in e?yield{type:"toolCallCancellation",...e.toolCallCancellation}:x.warn(`Received an unknown message type from the server: ${JSON.stringify(e)}`):x.warn(`Received an invalid message from the server: ${JSON.stringify(e)}`)}async close(){this.isClosed||(this.isClosed=!0,await this.webSocketHandler.close(1e3,"Client closed session."))}async sendMediaChunks(e){if(this.isClosed)throw new AIError(k.REQUEST_ERROR,"This LiveSession has been closed and cannot be used.");e.forEach((e=>{const t={realtimeInput:{mediaChunks:[e]}};this.webSocketHandler.send(JSON.stringify(t))}))}async sendMediaStream(e){if(this.isClosed)throw new AIError(k.REQUEST_ERROR,"This LiveSession has been closed and cannot be used.");const t=e.getReader();for(;;)try{const{done:e,value:n}=await t.read();if(e)break;if(!n)throw new Error("Missing chunk in reader, but reader is not done.");await this.sendMediaChunks([n])}catch(e){const t=e instanceof Error?e.message:"Error processing media stream.";throw new AIError(k.REQUEST_ERROR,t)}}}class LiveGenerativeModel extends AIModel{constructor(e,t,n){super(e,t.model),this._webSocketHandler=n,this.generationConfig=t.generationConfig||{},this.tools=t.tools,this.toolConfig=t.toolConfig,this.systemInstruction=formatSystemInstruction(t.systemInstruction)}async connect(){const e=new WebSocketUrl(this._apiSettings);let t;await this._webSocketHandler.connect(e.toString()),t=this._apiSettings.backend.backendType===G.GOOGLE_AI?`projects/${this._apiSettings.project}/${this.model}`:`projects/${this._apiSettings.project}/locations/${this._apiSettings.location}/${this.model}`;const{inputAudioTranscription:n,outputAudioTranscription:s,...o}=this.generationConfig,r={setup:{model:t,generationConfig:o,tools:this.tools,toolConfig:this.toolConfig,systemInstruction:this.systemInstruction,inputAudioTranscription:n,outputAudioTranscription:s}};try{const e=this._webSocketHandler.listen();this._webSocketHandler.send(JSON.stringify(r));const t=(await e.next()).value;if(!t||"object"!=typeof t||!("setupComplete"in t))throw await this._webSocketHandler.close(1011,"Handshake failure"),new AIError(k.RESPONSE_ERROR,"Server connection handshake failed. The server did not respond with a setupComplete message.");return new LiveSession(this._webSocketHandler,e)}catch(e){throw await this._webSocketHandler.close(),e}}}class ImagenModel extends AIModel{constructor(e,t,n){const{model:s,generationConfig:o,safetySettings:r}=t;super(e,s),this.requestOptions=n,this.generationConfig=o,this.safetySettings=r}async generateImages(e){const t=createPredictRequestBody(e,{...this.generationConfig,...this.safetySettings});return handlePredictResponse(await makeRequest(this.model,F.PREDICT,this._apiSettings,!1,JSON.stringify(t),this.requestOptions))}async generateImagesGCS(e,t){const n=createPredictRequestBody(e,{gcsURI:t,...this.generationConfig,...this.safetySettings});return handlePredictResponse(await makeRequest(this.model,F.PREDICT,this._apiSettings,!1,JSON.stringify(n),this.requestOptions))}}class WebSocketHandlerImpl{constructor(){if("undefined"==typeof WebSocket)throw new AIError(k.UNSUPPORTED,'The WebSocket API is not available in this environment. The "Live" feature is not supported here. It is supported in modern browser windows, Web Workers with WebSocket support, and Node >= 22.')}connect(e){return new Promise(((t,n)=>{this.ws=new WebSocket(e),this.ws.binaryType="blob",this.ws.addEventListener("open",(()=>t()),{once:!0}),this.ws.addEventListener("error",(()=>n(new AIError(k.FETCH_ERROR,"Error event raised on WebSocket"))),{once:!0}),this.ws.addEventListener("close",(e=>{e.reason&&x.warn(`WebSocket connection closed by server. Reason: '${e.reason}'`)}))}))}send(e){if(!this.ws||this.ws.readyState!==WebSocket.OPEN)throw new AIError(k.REQUEST_ERROR,"WebSocket is not open.");this.ws.send(e)}async*listen(){if(!this.ws)throw new AIError(k.REQUEST_ERROR,"WebSocket is not connected.");const e=[],t=[];let n=null,s=!1;const messageListener=async s=>{let o;if(s.data instanceof Blob)o=await s.data.text();else{if("string"!=typeof s.data)return t.push(new AIError(k.PARSE_FAILED,`Failed to parse WebSocket response. Expected data to be a Blob or string, but was ${typeof s.data}.`)),void(n&&(n(),n=null));o=s.data}try{const t=JSON.parse(o);e.push(t)}catch(e){const n=e;t.push(new AIError(k.PARSE_FAILED,`Error parsing WebSocket message to JSON: ${n.message}`))}n&&(n(),n=null)},errorListener=()=>{t.push(new AIError(k.FETCH_ERROR,"WebSocket connection error.")),n&&(n(),n=null)},closeListener=e=>{e.reason&&x.warn(`WebSocket connection closed by the server with reason: ${e.reason}`),s=!0,n&&(n(),n=null),this.ws?.removeEventListener("message",messageListener),this.ws?.removeEventListener("close",closeListener),this.ws?.removeEventListener("error",errorListener)};for(this.ws.addEventListener("message",messageListener),this.ws.addEventListener("close",closeListener),this.ws.addEventListener("error",errorListener);!s;){if(t.length>0){throw t.shift()}e.length>0?yield e.shift():await new Promise((e=>{n=e}))}if(t.length>0){throw t.shift()}}close(e,t){return new Promise((n=>this.ws?(this.ws.addEventListener("close",(()=>n()),{once:!0}),this.ws.readyState===WebSocket.CLOSED||this.ws.readyState===WebSocket.CONNECTING?n():void(this.ws.readyState!==WebSocket.CLOSING&&this.ws.close(e,t))):n()))}}class Schema{constructor(e){if(!e.type&&!e.anyOf)throw new AIError(k.INVALID_SCHEMA,"A schema must have either a 'type' or an 'anyOf' array of sub-schemas.");for(const t in e)this[t]=e[t];this.type=e.type,this.format=e.hasOwnProperty("format")?e.format:void 0,this.nullable=!!e.hasOwnProperty("nullable")&&!!e.nullable}toJSON(){const e={type:this.type};for(const t in this)this.hasOwnProperty(t)&&void 0!==this[t]&&("required"===t&&this.type!==P.OBJECT||(e[t]=this[t]));return e}static array(e){return new ArraySchema(e,e.items)}static object(e){return new ObjectSchema(e,e.properties,e.optionalProperties)}static string(e){return new StringSchema(e)}static enumString(e){return new StringSchema(e,e.enum)}static integer(e){return new IntegerSchema(e)}static number(e){return new NumberSchema(e)}static boolean(e){return new BooleanSchema(e)}static anyOf(e){return new AnyOfSchema(e)}}class IntegerSchema extends Schema{constructor(e){super({type:P.INTEGER,...e})}}class NumberSchema extends Schema{constructor(e){super({type:P.NUMBER,...e})}}class BooleanSchema extends Schema{constructor(e){super({type:P.BOOLEAN,...e})}}class StringSchema extends Schema{constructor(e,t){super({type:P.STRING,...e}),this.enum=t}toJSON(){const e=super.toJSON();return this.enum&&(e.enum=this.enum),e}}class ArraySchema extends Schema{constructor(e,t){super({type:P.ARRAY,...e}),this.items=t}toJSON(){const e=super.toJSON();return e.items=this.items.toJSON(),e}}class ObjectSchema extends Schema{constructor(e,t,n=[]){super({type:P.OBJECT,...e}),this.properties=t,this.optionalProperties=n}toJSON(){const e=super.toJSON();e.properties={...this.properties};const t=[];if(this.optionalProperties)for(const e of this.optionalProperties)if(!this.properties.hasOwnProperty(e))throw new AIError(k.INVALID_SCHEMA,`Property "${e}" specified in "optionalProperties" does not exist.`);for(const n in this.properties)this.properties.hasOwnProperty(n)&&(e.properties[n]=this.properties[n].toJSON(),this.optionalProperties.includes(n)||t.push(n));return t.length>0&&(e.required=t),delete e.optionalProperties,e}}class AnyOfSchema extends Schema{constructor(e){if(0===e.anyOf.length)throw new AIError(k.INVALID_SCHEMA,"The 'anyOf' array must not be empty.");super({...e,type:void 0}),this.anyOf=e.anyOf}toJSON(){const e=super.toJSON();return this.anyOf&&Array.isArray(this.anyOf)&&(e.anyOf=this.anyOf.map((e=>e.toJSON()))),e}}class ImagenImageFormat{constructor(){this.mimeType="image/png"}static jpeg(e){return e&&(e<0||e>100)&&x.warn(`Invalid JPEG compression quality of ${e} specified; the supported range is [0, 100].`),{mimeType:"image/jpeg",compressionQuality:e}}static png(){return{mimeType:"image/png"}}}const K="audio-processor",Q=`\n class AudioProcessor extends AudioWorkletProcessor {\n constructor(options) {\n super();\n this.targetSampleRate = options.processorOptions.targetSampleRate;\n // 'sampleRate' is a global variable available inside the AudioWorkletGlobalScope,\n // representing the native sample rate of the AudioContext.\n this.inputSampleRate = sampleRate;\n }\n\n /**\n * This method is called by the browser's audio engine for each block of audio data.\n * Input is a single input, with a single channel (input[0][0]).\n */\n process(inputs) {\n const input = inputs[0];\n if (input && input.length > 0 && input[0].length > 0) {\n const pcmData = input[0]; // Float32Array of raw audio samples.\n \n // Simple linear interpolation for resampling.\n const resampled = new Float32Array(Math.round(pcmData.length * this.targetSampleRate / this.inputSampleRate));\n const ratio = pcmData.length / resampled.length;\n for (let i = 0; i < resampled.length; i++) {\n resampled[i] = pcmData[Math.floor(i * ratio)];\n }\n\n // Convert Float32 (-1, 1) samples to Int16 (-32768, 32767)\n const resampledInt16 = new Int16Array(resampled.length);\n for (let i = 0; i < resampled.length; i++) {\n const sample = Math.max(-1, Math.min(1, resampled[i]));\n if (sample < 0) {\n resampledInt16[i] = sample * 32768;\n } else {\n resampledInt16[i] = sample * 32767;\n }\n }\n \n this.port.postMessage(resampledInt16);\n }\n // Return true to keep the processor alive and processing the next audio block.\n return true;\n }\n }\n\n // Register the processor with a name that can be used to instantiate it from the main thread.\n registerProcessor('${K}', AudioProcessor);\n`;class AudioConversationRunner{constructor(e,t,n){this.liveSession=e,this.options=t,this.deps=n,this.isStopped=!1,this.stopDeferred=new Deferred,this.playbackQueue=[],this.scheduledSources=[],this.nextStartTime=0,this.isPlaybackLoopRunning=!1,this.liveSession.inConversation=!0,this.receiveLoopPromise=this.runReceiveLoop().finally((()=>this.cleanup())),this.deps.workletNode.port.onmessage=e=>{if(this.isStopped)return;const t=e.data,n={mimeType:"audio/pcm",data:btoa(String.fromCharCode.apply(null,Array.from(new Uint8Array(t.buffer))))};this.liveSession.sendAudioRealtime(n)}}async stop(){this.isStopped||(this.isStopped=!0,this.stopDeferred.resolve(),await this.receiveLoopPromise)}cleanup(){this.interruptPlayback(),this.deps.workletNode.port.onmessage=null,this.deps.workletNode.disconnect(),this.deps.sourceNode.disconnect(),this.deps.mediaStream.getTracks().forEach((e=>e.stop())),"closed"!==this.deps.audioContext.state&&this.deps.audioContext.close(),this.liveSession.inConversation=!1}enqueueAndPlay(e){this.playbackQueue.push(e),this.processPlaybackQueue()}interruptPlayback(){[...this.scheduledSources].forEach((e=>e.stop(0))),this.playbackQueue.length=0,this.nextStartTime=this.deps.audioContext.currentTime}async processPlaybackQueue(){if(!this.isPlaybackLoopRunning){for(this.isPlaybackLoopRunning=!0;this.playbackQueue.length>0&&!this.isStopped;){const e=this.playbackQueue.shift();try{const t=new Int16Array(e),n=t.length,s=this.deps.audioContext.createBuffer(1,n,24e3),o=s.getChannelData(0);for(let e=0;e<n;e++)o[e]=t[e]/32768;const r=this.deps.audioContext.createBufferSource();r.buffer=s,r.connect(this.deps.audioContext.destination),this.scheduledSources.push(r),r.onended=()=>{this.scheduledSources=this.scheduledSources.filter((e=>e!==r))},this.nextStartTime=Math.max(this.deps.audioContext.currentTime,this.nextStartTime),r.start(this.nextStartTime),this.nextStartTime+=s.duration}catch(e){x.error("Error playing audio:",e)}}this.isPlaybackLoopRunning=!1}}async runReceiveLoop(){const e=this.liveSession.receive();for(;!this.isStopped;){const t=await Promise.race([e.next(),this.stopDeferred.promise]);if(this.isStopped||!t||t.done)break;const n=t.value;if("serverContent"===n.type){const e=n;e.interrupted&&this.interruptPlayback();const t=e.modelTurn?.parts.find((e=>e.inlineData?.mimeType.startsWith("audio/")));if(t?.inlineData){const e=Uint8Array.from(atob(t.inlineData.data),(e=>e.charCodeAt(0))).buffer;this.enqueueAndPlay(e)}}else if("toolCall"===n.type)if(this.options.functionCallingHandler)try{const e=await this.options.functionCallingHandler(n.functionCalls);this.isStopped||this.liveSession.sendFunctionResponses([e])}catch(e){throw new AIError(k.ERROR,`Function calling handler failed: ${e.message}`)}else x.warn("Received tool call message, but StartAudioConversationOptions.functionCallingHandler is undefined. Ignoring tool call.")}}}async function startAudioConversation(e,t={}){if(e.isClosed)throw new AIError(k.SESSION_CLOSED,"Cannot start audio conversation on a closed LiveSession.");if(e.inConversation)throw new AIError(k.REQUEST_ERROR,"An audio conversation is already in progress for this session.");if("undefined"==typeof AudioWorkletNode||"undefined"==typeof AudioContext||"undefined"==typeof navigator||!navigator.mediaDevices)throw new AIError(k.UNSUPPORTED,"Audio conversation is not supported in this environment. It requires the Web Audio API and AudioWorklet support.");let n;try{n=new AudioContext,"suspended"===n.state&&await n.resume();const s=await navigator.mediaDevices.getUserMedia({audio:!0}),o=new Blob([Q],{type:"application/javascript"}),r=URL.createObjectURL(o);await n.audioWorklet.addModule(r);const i=n.createMediaStreamSource(s),a=new AudioWorkletNode(n,K,{processorOptions:{targetSampleRate:16e3}});i.connect(a);const c=new AudioConversationRunner(e,t,{audioContext:n,mediaStream:s,sourceNode:i,workletNode:a});return{stop:()=>c.stop()}}catch(e){if(n&&"closed"!==n.state&&n.close(),e instanceof AIError||e instanceof DOMException)throw e;throw new AIError(k.ERROR,`Failed to initialize audio recording: ${e.message}`)}}function getAI(e=t(),n){e=function getModularInstance(e){return e&&e._delegate?e._delegate:e}(e);const s=_getProvider(e,p),o=n?.backend??new GoogleAIBackend,r={useLimitedUseAppCheckTokens:n?.useLimitedUseAppCheckTokens??!1},i=function encodeInstanceIdentifier(e){if(e instanceof GoogleAIBackend)return`${p}/googleai`;if(e instanceof VertexAIBackend)return`${p}/vertexai/${e.location}`;throw new AIError(k.ERROR,`Invalid backend: ${JSON.stringify(e.backendType)}`)}(o),a=s.getImmediate({identifier:i});return a.options=r,a}function getGenerativeModel(e,t,n){const s=t;let o;if(o=s.mode?s.inCloudParams||{model:"gemini-2.0-flash-lite"}:t,!o.model)throw new AIError(k.NO_MODEL,"Must provide a model name. Example: getGenerativeModel({ model: 'my-model-name' })");const r=e.chromeAdapterFactory?.(s.mode,"undefined"==typeof window?void 0:window,s.onDeviceParams);return new GenerativeModel(e,o,n,r)}function getImagenModel(e,t,n){if(!t.mo