@azure/storage-file-datalake
Version:
Microsoft Azure Storage SDK for JavaScript - DataLake
95 lines • 4.07 kB
JavaScript
;
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
Object.defineProperty(exports, "__esModule", { value: true });
exports.StorageClient = void 0;
const StorageContextClient_js_1 = require("./StorageContextClient.js");
const storage_blob_1 = require("@azure/storage-blob");
const transforms_js_1 = require("./transforms.js");
const utils_common_js_1 = require("./utils/utils.common.js");
// This function relies on the Pipeline already being initialized by a storage-blob client
function getCoreClientOptions(pipeline) {
const { httpClient: v1Client, ...restOptions } = pipeline.options;
const httpClient = pipeline._coreHttpClient;
if (!httpClient) {
throw new Error("Pipeline not correctly initialized; missing V2 HttpClient");
}
const corePipeline = pipeline._corePipeline;
if (!corePipeline) {
throw new Error("Pipeline not correctly initialized; missing V2 Pipeline");
}
return {
...restOptions,
allowInsecureConnection: true,
httpClient,
pipeline: corePipeline,
};
}
/**
* A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}
* and etc.
*/
class StorageClient {
/**
* Encoded URL string value.
*/
url;
accountName;
/**
* Encoded URL string value for corresponding blob endpoint.
*/
blobEndpointUrl;
/**
* Encoded URL string value for corresponding dfs endpoint.
*/
dfsEndpointUrl;
/**
* Request policy pipeline.
*
* @internal
*/
pipeline;
/**
* Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
*/
credential;
/**
* StorageClient is a reference to protocol layer operations entry, which is
* generated by AutoRest generator.
*/
storageClientContext;
/**
* storageClientContextWithBlobEndpoint is a reference to protocol layer operations entry, which is
* generated by AutoRest generator, with its url pointing to the Blob endpoint.
*/
storageClientContextToBlobEndpoint;
/**
*/
isHttps;
/**
* Creates an instance of StorageClient.
* @param url - url to resource
* @param pipeline - request policy pipeline.
*/
constructor(url, pipeline) {
// URL should be encoded and only once, protocol layer shouldn't encode URL again
this.url = (0, utils_common_js_1.escapeURLPath)(url);
this.blobEndpointUrl = (0, transforms_js_1.toBlobEndpointUrl)(this.url);
this.dfsEndpointUrl = (0, transforms_js_1.toDfsEndpointUrl)(this.url);
this.accountName = (0, utils_common_js_1.getAccountNameFromUrl)(this.blobEndpointUrl);
this.pipeline = pipeline;
// creating this BlobServiceClient allows us to use the converted V2 Pipeline attached to `pipeline`.
const blobClient = new storage_blob_1.BlobServiceClient(url, pipeline);
this.storageClientContext = new StorageContextClient_js_1.StorageContextClient(this.dfsEndpointUrl, getCoreClientOptions(pipeline));
this.storageClientContextToBlobEndpoint = new StorageContextClient_js_1.StorageContextClient(this.blobEndpointUrl, getCoreClientOptions(pipeline));
this.isHttps = (0, utils_common_js_1.iEqual)((0, utils_common_js_1.getURLScheme)(this.url) || "", "https");
this.credential = blobClient.credential;
// Override protocol layer's default content-type
const storageClientContext = this.storageClientContext;
storageClientContext.requestContentType = undefined;
const storageClientContextWithBlobEndpoint = this.storageClientContextToBlobEndpoint;
storageClientContextWithBlobEndpoint.requestContentType = undefined;
}
}
exports.StorageClient = StorageClient;
//# sourceMappingURL=StorageClient.js.map