@digilogiclabs/saas-factory-ai
Version:
Next.js 15 Compatible AI Integration Platform - Drop-in ready with server/client separation for seamless React Server Components support.
253 lines (217 loc) • 8.14 kB
TypeScript
// Next.js 15 Compatible AI Package Type Definitions
// @digilogiclabs/saas-factory-ai v4.0.4
// ============================================================================
// CORE CONFIGURATION INTERFACES
// ============================================================================
export interface QuickStartOptions {
// Required
apiKey: string;
gatewayUrl?: string;
// Project configuration
projectType?: 'ecommerce' | 'content-creator' | 'education' | 'healthcare' | 'financial' | 'gaming' | 'customer-service' | 'developer' | 'research' | 'custom';
primaryProvider?: 'openai' | 'anthropic' | 'gemini';
capabilities?: ('chat' | 'video' | 'audio' | 'embeddings' | 'analysis')[];
environment?: 'development' | 'production';
// Enhanced API parameters (NEW in v4.0.4)
maxTokens?: number;
temperature?: number;
provider?: 'openai' | 'anthropic' | 'gemini';
// Feature toggles
enableCaching?: boolean;
enableMonitoring?: boolean;
enableRateLimit?: boolean;
// Custom provider support
customProvider?: {
name: string;
endpoint: string;
apiKey?: string;
capabilities: string[];
};
}
export interface AIConfig {
apiKey: string;
gatewayUrl: string;
environment?: 'development' | 'production';
timeout?: number;
retries?: number;
}
export interface AIHubConfig {
gatewayUrl: string;
apiKey: string;
providers?: Record<string, any>;
routing?: {
strategy: 'quality' | 'cost' | 'latency';
fallback: boolean;
loadBalancing?: boolean;
};
features?: {
caching?: { enabled: boolean; ttl?: number; storage?: string };
rateLimiting?: { enabled: boolean; requests?: number; windowMs?: number };
monitoring?: { enabled: boolean; metrics?: string[] };
security?: { encryption?: boolean; auditLogging?: boolean };
};
}
// ============================================================================
// MESSAGE AND OPTIONS INTERFACES
// ============================================================================
export interface ChatMessage {
role: 'system' | 'user' | 'assistant';
content: string;
metadata?: Record<string, any>;
}
export interface ChatOptions {
model?: string;
stream?: boolean;
maxTokens?: number;
temperature?: number;
topP?: number;
frequencyPenalty?: number;
presencePenalty?: number;
stop?: string[];
systemPrompt?: string;
}
export interface AudioOptions {
style?: string;
durationSec?: number;
format?: 'mp3' | 'wav' | 'ogg';
voice?: string;
speed?: number;
}
export interface VideoOptions {
style?: string;
durationSec?: number;
resolution?: '720p' | '1080p' | '4K';
aspectRatio?: '16:9' | '9:16' | '1:1' | '4:3';
fps?: number;
format?: 'mp4' | 'webm' | 'mov';
model?: string;
}
// ============================================================================
// RESPONSE INTERFACES
// ============================================================================
export interface GenerationResponse {
jobId: string;
status: 'queued' | 'running' | 'done' | 'error';
estimatedTimeMs?: number;
cost?: number;
}
export interface AnalysisResult {
type: string;
score: number;
confidence: number;
details: Record<string, any>;
metadata?: Record<string, any>;
}
export interface JobStatus {
id: string;
status: 'queued' | 'running' | 'done' | 'error';
progress?: number;
result?: {
url?: string;
metadata?: Record<string, any>;
};
error?: string;
createdAt: string;
updatedAt: string;
}
// ============================================================================
// AI HUB INTERFACE
// ============================================================================
export interface AIHub {
chat(messages: ChatMessage[], options?: ChatOptions): Promise<string | AsyncIterable<string>>;
generateVideo(prompt: string, options?: VideoOptions): Promise<GenerationResponse>;
generateAudio(prompt: string, options?: AudioOptions): Promise<GenerationResponse>;
analyze(content: string, analysisType: string): Promise<AnalysisResult>;
}
// ============================================================================
// ENHANCED SERVER-SIDE FUNCTIONS (NEW in v4.0.4)
// ============================================================================
// Enhanced simpleChat with message parameter support
export declare function simpleChat(options: {
message: string;
apiKey: string;
maxTokens?: number;
temperature?: number;
provider?: 'openai' | 'anthropic' | 'gemini';
} & Partial<QuickStartOptions>): Promise<string>;
// Enhanced simpleVideoGeneration
export declare function simpleVideoGeneration(options: {
prompt: string;
apiKey: string;
durationSec?: number;
resolution?: '720p' | '1080p' | '4K';
provider?: 'openai' | 'anthropic' | 'gemini';
} & Partial<QuickStartOptions>): Promise<string>;
// Enhanced simpleAnalysis
export declare function simpleAnalysis(options: {
content: string;
analysisType: string;
apiKey: string;
provider?: 'openai' | 'anthropic' | 'gemini';
} & Partial<QuickStartOptions>): Promise<AnalysisResult>;
// Core server functions
export declare function quickStartAI(options: QuickStartOptions): AIHub;
export declare function createAIHub(config: AIHubConfig): AIHub;
// ============================================================================
// CLIENT-SIDE REACT HOOKS AND COMPONENTS
// ============================================================================
export interface AIContextType {
config: AIConfig;
isLoading: boolean;
error: string | null;
chat: (messages: ChatMessage[], options?: ChatOptions) => Promise<string | AsyncIterable<string>>;
generateVideo: (prompt: string, options?: VideoOptions) => Promise<GenerationResponse>;
generateAudio: (prompt: string, options?: AudioOptions) => Promise<GenerationResponse>;
clearError: () => void;
}
export interface UseChatReturn {
messages: ChatMessage[];
isLoading: boolean;
error: Error | null;
sendMessage: (content: string, options?: ChatOptions) => Promise<void>;
clearMessages: () => void;
}
export interface UseGenerateAudioReturn {
generate: (prompt: string, options?: AudioOptions) => Promise<GenerationResponse>;
isLoading: boolean;
error: Error | null;
clearError: () => void;
}
export interface UseGenerateVideoReturn {
generate: (prompt: string, options?: VideoOptions) => Promise<GenerationResponse>;
isLoading: boolean;
error: Error | null;
clearError: () => void;
}
export interface UseJobStatusReturn {
status: JobStatus | null;
isLoading: boolean;
error: Error | null;
refetch: () => Promise<void>;
isComplete: boolean;
isDone: boolean;
isError: boolean;
}
// React Hook Declarations
export declare function useChat(initialMessages?: ChatMessage[]): UseChatReturn;
export declare function useGenerateAudio(): UseGenerateAudioReturn;
export declare function useGenerateVideo(): UseGenerateVideoReturn;
export declare function useJobStatus(jobId: string | null, options?: { pollingInterval?: number; enabled?: boolean }): UseJobStatusReturn;
export declare function useEmbeddings(): { generate: (texts: string[]) => Promise<any> };
export declare function useAI(): AIContextType;
// React Component Declarations
export interface AIProviderProps {
children: React.ReactNode;
config?: AIConfig;
apiKey?: string;
gatewayUrl?: string;
}
export declare const AIProvider: React.ComponentType<AIProviderProps>;
// ============================================================================
// UTILITY FUNCTIONS
// ============================================================================
export declare function validateAIConfig(config: AIConfig): { isValid: boolean; errors: string[] };
export declare function getAIConfigFromEnv(): AIConfig;
// Environment detection
export declare const isServerSide: boolean;
export declare const isClientSide: boolean;