UNPKG

@azure/ai-language-text

Version:

An isomorphic client library for the text analysis features in the Azure Cognitive Language Service.

207 lines 9.07 kB
// Copyright (c) Microsoft Corporation. // Licensed under the MIT license. import { __rest } from "tslib"; import { DEFAULT_COGNITIVE_SCOPE, SDK_VERSION } from "./constants"; import { isTokenCredential } from "@azure/core-auth"; import { createTracingClient } from "@azure/core-tracing"; import { convertToLanguageDetectionInput, convertToTextDocumentInput, getOperationOptions, isStringArray, } from "./util"; import { createAnalyzeBatchLro, createCreateAnalyzeBatchPollerLro, createPollerWithCancellation, createUpdateAnalyzeState, getDocIDsFromState, processAnalyzeResult, } from "./lro"; import { throwError, transformActionResult } from "./transforms"; import { GeneratedClient } from "./generated/generatedClient"; import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; import { createHttpPoller } from "@azure/core-lro"; import { logger } from "./logger"; import { textAnalyticsAzureKeyCredentialPolicy } from "./azureKeyCredentialPolicy"; /** * A client for interacting with the text analysis features in Azure Cognitive * Language Service. * * The client needs the endpoint of a Language resource and an authentication * method such as an API key or AAD. The API key and endpoint can be found in * the Language resource page in the Azure portal. They will be located in the * resource's Keys and Endpoint page, under Resource Management. * * ### Examples for authentication: * * #### API Key * * ```js * import { TextAnalysisClient, AzureKeyCredential } from "@azure/ai-language-text"; * * const endpoint = "https://<resource name>.cognitiveservices.azure.com"; * const credential = new AzureKeyCredential("<api key>"); * * const client = new TextAnalysisClient(endpoint, credential); * ``` * * #### Azure Active Directory * * See the [`@azure/identity`](https://npmjs.com/package/\@azure/identity) * package for more information about authenticating with Azure Active Directory. * * ```js * import { TextAnalysisClient } from "@azure/ai-language-text"; * import { DefaultAzureCredential } from "@azure/identity"; * * const endpoint = "https://<resource name>.cognitiveservices.azure.com"; * const credential = new DefaultAzureCredential(); * * const client = new TextAnalysisClient(endpoint, credential); * ``` */ export class TextAnalysisClient { constructor(endpointUrl, credential, options = {}) { const { defaultCountryHint = "us", defaultLanguage = "en", serviceVersion } = options, pipelineOptions = __rest(options, ["defaultCountryHint", "defaultLanguage", "serviceVersion"]); this.defaultCountryHint = defaultCountryHint; this.defaultLanguage = defaultLanguage; const internalPipelineOptions = Object.assign(Object.assign(Object.assign({}, pipelineOptions), { loggingOptions: { logger: logger.info, additionalAllowedHeaderNames: ["x-ms-correlation-request-id", "x-ms-request-id"], }, }), { apiVersion: serviceVersion }); this._client = new GeneratedClient(endpointUrl, internalPipelineOptions); const authPolicy = isTokenCredential(credential) ? bearerTokenAuthenticationPolicy({ credential, scopes: DEFAULT_COGNITIVE_SCOPE }) : textAnalyticsAzureKeyCredentialPolicy(credential); this._client.pipeline.addPolicy(authPolicy); this._tracing = createTracingClient({ packageName: "@azure/ai-language-text", packageVersion: SDK_VERSION, namespace: "Microsoft.CognitiveServices", }); } // implementation async analyze(actionName, documents, languageOrCountryHintOrOptions, options) { let realOptions; if (documents.length === 0) { throw new Error("'documents' must be a non-empty array"); } let realInputs; if (isStringArray(documents)) { if (actionName === "LanguageDetection") { realInputs = convertToLanguageDetectionInput(documents, typeof languageOrCountryHintOrOptions === "string" ? languageOrCountryHintOrOptions : this.defaultCountryHint); } else { realInputs = convertToTextDocumentInput(documents, typeof languageOrCountryHintOrOptions === "string" ? languageOrCountryHintOrOptions : this.defaultLanguage); } realOptions = options || {}; } else { realInputs = documents; realOptions = languageOrCountryHintOrOptions || {}; } const { options: operationOptions, rest: action } = getOperationOptions(realOptions); return this._tracing.withSpan("TextAnalysisClient.analyze", operationOptions, async (updatedOptions) => throwError(this._client .analyze({ kind: actionName, analysisInput: { documents: realInputs, }, parameters: action, }, updatedOptions) .then((result) => transformActionResult(actionName, realInputs.map(({ id }) => id), result)))); } // implementation async beginAnalyzeBatch(actions, documents, languageOrOptions, options = {}) { var _a; let realOptions; let realInputs; if (!Array.isArray(documents) || documents.length === 0) { throw new Error("'documents' must be a non-empty array"); } if (isStringArray(documents)) { const languageCode = (_a = languageOrOptions) !== null && _a !== void 0 ? _a : this.defaultLanguage; realInputs = convertToTextDocumentInput(documents, languageCode); realOptions = options; } else { realInputs = documents; realOptions = languageOrOptions; } const realActions = actions.map((_a) => { var { kind, actionName } = _a, rest = __rest(_a, ["kind", "actionName"]); return ({ kind, actionName, parameters: rest, }); }); const { includeStatistics, updateIntervalInMs, displayName } = realOptions, rest = __rest(realOptions, ["includeStatistics", "updateIntervalInMs", "displayName"]); const lro = createAnalyzeBatchLro({ client: this._client, commonOptions: rest, documents: realInputs, initialRequestOptions: { displayName }, pollRequestOptions: { includeStatistics }, tasks: realActions, tracing: this._tracing, }); const docIds = realInputs.map(({ id }) => id); const state = { continuationToken: "" }; const poller = await createHttpPoller(lro, { intervalInMs: updateIntervalInMs, processResult: processAnalyzeResult({ client: this._client, tracing: this._tracing, docIds, opOptions: Object.assign(Object.assign({}, rest), { includeStatistics }), state, }), updateState: createUpdateAnalyzeState(docIds), withOperationLocation(operationLocation) { state.continuationToken = operationLocation; }, }); await poller.poll(); const id = poller.getOperationState().id; return createPollerWithCancellation({ id, client: this._client, options, poller, tracing: this._tracing, }); } // implementation async restoreAnalyzeBatchPoller(serializedState, options = {}) { const { includeStatistics, updateIntervalInMs } = options, rest = __rest(options, ["includeStatistics", "updateIntervalInMs"]); const docIds = getDocIDsFromState(serializedState); const lro = createCreateAnalyzeBatchPollerLro({ client: this._client, options: Object.assign(Object.assign({}, rest), { includeStatistics }), tracing: this._tracing, }); const state = { continuationToken: "" }; const poller = await createHttpPoller(lro, { intervalInMs: updateIntervalInMs, restoreFrom: serializedState, processResult: processAnalyzeResult({ client: this._client, tracing: this._tracing, docIds, opOptions: Object.assign(Object.assign({}, rest), { includeStatistics }), state, }), updateState: createUpdateAnalyzeState(), withOperationLocation(operationLocation) { state.continuationToken = operationLocation; }, }); await poller.poll(); const id = poller.getOperationState().id; return createPollerWithCancellation({ id, client: this._client, options, poller, tracing: this._tracing, }); } } //# sourceMappingURL=textAnalysisClient.js.map