@emergentmethods/asknews-typescript-sdk
Version:
Typescript SDK for AskNews API
159 lines (158 loc) • 8.29 kB
JavaScript
;
/* tslint:disable */
/* eslint-disable */
/**
* AskNews API
* AskNews API
*
* The version of the OpenAPI document: 0.21.1
* Contact: contact@emergentmethods.ai
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChatApi = void 0;
const runtime = require("../runtime");
const index_1 = require("../models/index");
/**
*
*/
class ChatApi extends runtime.BaseAPI {
/**
* Deep research into real-time news, archive news, and Google.
*/
deepNewsRaw(requestParameters, initOverrides) {
return __awaiter(this, void 0, void 0, function* () {
if (requestParameters['createDeepNewsRequest'] == null) {
throw new runtime.RequiredError('createDeepNewsRequest', 'Required parameter "createDeepNewsRequest" was null or undefined when calling deepNews().');
}
const queryParameters = {};
const headerParameters = {};
headerParameters['Content-Type'] = 'application/json';
const response = yield this.request({
path: `/v1/chat/deepnews`,
method: 'POST',
headers: headerParameters,
query: queryParameters,
body: (0, index_1.CreateDeepNewsRequestToJSON)(requestParameters['createDeepNewsRequest']),
}, initOverrides);
if ('createDeepNewsRequest' in requestParameters && requestParameters['createDeepNewsRequest'] != null && 'stream' in requestParameters['createDeepNewsRequest'] && requestParameters['createDeepNewsRequest']['stream'] === true) {
return new runtime.StreamApiResponse(response);
}
return new runtime.JSONApiResponse(response, (jsonValue) => (0, index_1.CreateDeepNewsResponse1FromJSON)(jsonValue));
});
}
/**
* Deep research into real-time news, archive news, and Google.
*/
deepNews(requestParameters, initOverrides) {
return __awaiter(this, void 0, void 0, function* () {
const response = yield this.deepNewsRaw(requestParameters, initOverrides);
return yield response.value();
});
}
/**
* Get the chat completions for a given user message. This endpoint follows the OpenAI API spec. It includes a couple extra params, which include: - **journalist_mode**: Whether to activate an auto prompt that is more keen on AP styling, citations, and fair reporting. Setting to false, you get a vanilla LLM with the news pre added to the system prompt. No other prompting. - **inline_citations**: Decides how you want the bot to cite sources. It can use brackets, or it can also include the markdown with URL automatically. - **asknews_watermark**: Whether to include the AskNews watermark in the response.
* Get chat completions from a news-infused AI assistant
*/
getChatCompletionsRaw(requestParameters, initOverrides) {
return __awaiter(this, void 0, void 0, function* () {
if (requestParameters['createChatCompletionRequest'] == null) {
throw new runtime.RequiredError('createChatCompletionRequest', 'Required parameter "createChatCompletionRequest" was null or undefined when calling getChatCompletions().');
}
const queryParameters = {};
const headerParameters = {};
headerParameters['Content-Type'] = 'application/json';
const response = yield this.request({
path: `/v1/openai/chat/completions`,
method: 'POST',
headers: headerParameters,
query: queryParameters,
body: (0, index_1.CreateChatCompletionRequestToJSON)(requestParameters['createChatCompletionRequest']),
}, initOverrides);
if ('createChatCompletionRequest' in requestParameters && requestParameters['createChatCompletionRequest'] != null && 'stream' in requestParameters['createChatCompletionRequest'] && requestParameters['createChatCompletionRequest']['stream'] === true) {
return new runtime.StreamApiResponse(response);
}
return new runtime.JSONApiResponse(response, (jsonValue) => (0, index_1.CreateChatCompletionResponse1FromJSON)(jsonValue));
});
}
/**
* Get the chat completions for a given user message. This endpoint follows the OpenAI API spec. It includes a couple extra params, which include: - **journalist_mode**: Whether to activate an auto prompt that is more keen on AP styling, citations, and fair reporting. Setting to false, you get a vanilla LLM with the news pre added to the system prompt. No other prompting. - **inline_citations**: Decides how you want the bot to cite sources. It can use brackets, or it can also include the markdown with URL automatically. - **asknews_watermark**: Whether to include the AskNews watermark in the response.
* Get chat completions from a news-infused AI assistant
*/
getChatCompletions(requestParameters, initOverrides) {
return __awaiter(this, void 0, void 0, function* () {
const response = yield this.getChatCompletionsRaw(requestParameters, initOverrides);
return yield response.value();
});
}
/**
* Get the headline example questions related to the given queries.
* Get example headline questions
*/
getHeadlineQuestionsRaw(requestParameters, initOverrides) {
return __awaiter(this, void 0, void 0, function* () {
const queryParameters = {};
if (requestParameters['queries'] != null) {
queryParameters['queries'] = requestParameters['queries'];
}
const headerParameters = {};
const response = yield this.request({
path: `/v1/chat/questions`,
method: 'GET',
headers: headerParameters,
query: queryParameters,
}, initOverrides);
return new runtime.JSONApiResponse(response);
});
}
/**
* Get the headline example questions related to the given queries.
* Get example headline questions
*/
getHeadlineQuestions(requestParameters = {}, initOverrides) {
return __awaiter(this, void 0, void 0, function* () {
const response = yield this.getHeadlineQuestionsRaw(requestParameters, initOverrides);
return yield response.value();
});
}
/**
* List the available chat models.
* List available chat models
*/
listChatModelsRaw(initOverrides) {
return __awaiter(this, void 0, void 0, function* () {
const queryParameters = {};
const headerParameters = {};
const response = yield this.request({
path: `/v1/openai/models`,
method: 'GET',
headers: headerParameters,
query: queryParameters,
}, initOverrides);
return new runtime.JSONApiResponse(response, (jsonValue) => (0, index_1.ListModelResponseFromJSON)(jsonValue));
});
}
/**
* List the available chat models.
* List available chat models
*/
listChatModels(initOverrides) {
return __awaiter(this, void 0, void 0, function* () {
const response = yield this.listChatModelsRaw(initOverrides);
return yield response.value();
});
}
}
exports.ChatApi = ChatApi;