@emergentmethods/asknews-typescript-sdk
Version:
Typescript SDK for AskNews API
70 lines (69 loc) • 4.24 kB
TypeScript
/**
* AskNews API
* AskNews API
*
* The version of the OpenAPI document: 0.21.1
* Contact: contact@emergentmethods.ai
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import * as runtime from '../runtime';
import type { CreateChatCompletionRequest, CreateChatCompletionResponse1, CreateDeepNewsRequest, CreateDeepNewsResponse1, ListModelResponse } from '../models/index';
export interface DeepNewsRequest {
createDeepNewsRequest: CreateDeepNewsRequest;
}
export interface GetChatCompletionsRequest {
createChatCompletionRequest: CreateChatCompletionRequest;
}
export interface GetHeadlineQuestionsRequest {
queries?: Array<string>;
}
/**
*
*/
export declare class ChatApi extends runtime.BaseAPI {
/**
* Deep research into real-time news, archive news, and Google.
*/
deepNewsRaw(requestParameters: DeepNewsRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<runtime.ApiResponse<CreateDeepNewsResponse1> | runtime.StreamApiResponse>;
/**
* Deep research into real-time news, archive news, and Google.
*/
deepNews(requestParameters: DeepNewsRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<CreateDeepNewsResponse1 | ReadableStream<any>>;
/**
* Get the chat completions for a given user message. This endpoint follows the OpenAI API spec. It includes a couple extra params, which include: - **journalist_mode**: Whether to activate an auto prompt that is more keen on AP styling, citations, and fair reporting. Setting to false, you get a vanilla LLM with the news pre added to the system prompt. No other prompting. - **inline_citations**: Decides how you want the bot to cite sources. It can use brackets, or it can also include the markdown with URL automatically. - **asknews_watermark**: Whether to include the AskNews watermark in the response.
* Get chat completions from a news-infused AI assistant
*/
getChatCompletionsRaw(requestParameters: GetChatCompletionsRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<runtime.ApiResponse<CreateChatCompletionResponse1> | runtime.StreamApiResponse>;
/**
* Get the chat completions for a given user message. This endpoint follows the OpenAI API spec. It includes a couple extra params, which include: - **journalist_mode**: Whether to activate an auto prompt that is more keen on AP styling, citations, and fair reporting. Setting to false, you get a vanilla LLM with the news pre added to the system prompt. No other prompting. - **inline_citations**: Decides how you want the bot to cite sources. It can use brackets, or it can also include the markdown with URL automatically. - **asknews_watermark**: Whether to include the AskNews watermark in the response.
* Get chat completions from a news-infused AI assistant
*/
getChatCompletions(requestParameters: GetChatCompletionsRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<CreateChatCompletionResponse1 | ReadableStream<any>>;
/**
* Get the headline example questions related to the given queries.
* Get example headline questions
*/
getHeadlineQuestionsRaw(requestParameters: GetHeadlineQuestionsRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<runtime.ApiResponse<{
[key: string]: Array<string>;
}>>;
/**
* Get the headline example questions related to the given queries.
* Get example headline questions
*/
getHeadlineQuestions(requestParameters?: GetHeadlineQuestionsRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<{
[key: string]: Array<string>;
}>;
/**
* List the available chat models.
* List available chat models
*/
listChatModelsRaw(initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<runtime.ApiResponse<ListModelResponse>>;
/**
* List the available chat models.
* List available chat models
*/
listChatModels(initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<ListModelResponse>;
}