@ratley/react-native-apple-foundation-models
Version:
Access Apple’s on-device Foundation Models (text + image AI)
141 lines • 5.07 kB
JavaScript
import { useCallback, useEffect, useRef, useState } from "react";
import { toTextGenerationError } from "./errors";
import { isTextModelAvailable } from "./index";
import { LLMSession } from "./LLMSession";
/**
* React hook that manages a local language model session, providing
* availability detection, lifecycle state, in-memory history, and convenient
* helpers for generation. Conceptually aligned with Apple's
* `LanguageModelSession`, but adapted for React usage and this module's
* surface area.
*
* The hook performs a support check on mount via `isTextModelAvailable()`. If
* supported and `autoCreate` is true, a session is created lazily so callers
* can immediately call `ask()`.
*
* @param params Optional configuration for initial instructions, an initial
* session id, and automatic creation behavior.
* @returns `UseLLMSessionReturn` with helpers and state.
* @see https://developer.apple.com/documentation/foundationmodels/languagemodelsession
*/
export function useLLMSession({ instructions, initialId, autoCreate = true, } = {}) {
const [isAvailable, setIsAvailable] = useState(true);
const [status, setStatus] = useState("idle");
const [error, setError] = useState(undefined);
const [history, setHistory] = useState([]);
const sessionRef = useRef(null);
const lastPromptRef = useRef(undefined);
const effectiveInitialId = initialId;
useEffect(() => {
let mounted = true;
(async () => {
try {
const available = await isTextModelAvailable();
if (!mounted)
return;
setIsAvailable(!!available);
if (!available) {
setStatus("unsupported");
return;
}
if (autoCreate && !sessionRef.current) {
sessionRef.current = await LLMSession.create({
instructions,
sessionId: effectiveInitialId,
});
}
}
catch (_error) {
if (!mounted)
return;
setIsAvailable(false);
setStatus("unsupported");
}
})();
return () => {
mounted = false;
};
}, [autoCreate, instructions, effectiveInitialId]);
const sessionId = sessionRef.current?.sessionId;
// no persistence
const ensureSession = useCallback(async () => {
if (sessionRef.current)
return sessionRef.current;
sessionRef.current = await LLMSession.create({
instructions,
sessionId: effectiveInitialId,
});
return sessionRef.current;
}, [instructions, effectiveInitialId]);
const ask = useCallback(async (prompt, options) => {
if (!isAvailable) {
throw toTextGenerationError({
code: "ERR_TEXT_GENERATION_UNSUPPORTED",
message: "Text generation is not supported.",
});
}
const session = await ensureSession();
setStatus("running");
setError(undefined);
try {
lastPromptRef.current = prompt;
setHistory((h) => h.concat([{ role: "user", content: prompt }]));
const text = await session.ask({
prompt,
temperature: options?.temperature,
maxOutputTokens: options?.maxOutputTokens,
});
setHistory((h) => h.concat([{ role: "assistant", content: text }]));
setStatus("idle");
return text;
}
catch (e) {
const err = toTextGenerationError(e);
setError({ code: err.code, message: err.message });
setStatus("error");
throw err;
}
}, [ensureSession, isAvailable]);
const regenerate = useCallback(async () => {
const last = lastPromptRef.current?.trim();
if (!last) {
throw toTextGenerationError({
code: "ERR_TEXT_PROMPT_INVALID",
message: "No previous prompt to regenerate.",
});
}
return ask(last);
}, [ask]);
const setInstructions = useCallback((value) => {
if (!sessionRef.current)
return;
sessionRef.current.reset({ instructions: value });
}, []);
const reset = useCallback((value) => {
if (!sessionRef.current)
return;
sessionRef.current.reset({ instructions: value?.instructions });
setHistory([]);
}, []);
const destroy = useCallback(() => {
if (!sessionRef.current)
return;
sessionRef.current.destroy();
sessionRef.current = null;
setHistory([]);
}, []);
return {
sessionId,
isAvailable,
status,
error,
history,
ask,
regenerate,
setInstructions,
reset,
destroy,
};
}
export default useLLMSession;
//# sourceMappingURL=useLLMSession.js.map