@azure/storage-file-datalake
Version:
Microsoft Azure Storage SDK for JavaScript - DataLake
91 lines • 3.78 kB
JavaScript
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
import { StorageContextClient } from "./StorageContextClient.js";
import { BlobServiceClient } from "@azure/storage-blob";
import { toBlobEndpointUrl, toDfsEndpointUrl } from "./transforms.js";
import { escapeURLPath, getAccountNameFromUrl, getURLScheme, iEqual, } from "./utils/utils.common.js";
// This function relies on the Pipeline already being initialized by a storage-blob client
function getCoreClientOptions(pipeline) {
const { httpClient: v1Client, ...restOptions } = pipeline.options;
const httpClient = pipeline._coreHttpClient;
if (!httpClient) {
throw new Error("Pipeline not correctly initialized; missing V2 HttpClient");
}
const corePipeline = pipeline._corePipeline;
if (!corePipeline) {
throw new Error("Pipeline not correctly initialized; missing V2 Pipeline");
}
return {
...restOptions,
allowInsecureConnection: true,
httpClient,
pipeline: corePipeline,
};
}
/**
* A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}
* and etc.
*/
export class StorageClient {
/**
* Encoded URL string value.
*/
url;
accountName;
/**
* Encoded URL string value for corresponding blob endpoint.
*/
blobEndpointUrl;
/**
* Encoded URL string value for corresponding dfs endpoint.
*/
dfsEndpointUrl;
/**
* Request policy pipeline.
*
* @internal
*/
pipeline;
/**
* Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
*/
credential;
/**
* StorageClient is a reference to protocol layer operations entry, which is
* generated by AutoRest generator.
*/
storageClientContext;
/**
* storageClientContextWithBlobEndpoint is a reference to protocol layer operations entry, which is
* generated by AutoRest generator, with its url pointing to the Blob endpoint.
*/
storageClientContextToBlobEndpoint;
/**
*/
isHttps;
/**
* Creates an instance of StorageClient.
* @param url - url to resource
* @param pipeline - request policy pipeline.
*/
constructor(url, pipeline) {
// URL should be encoded and only once, protocol layer shouldn't encode URL again
this.url = escapeURLPath(url);
this.blobEndpointUrl = toBlobEndpointUrl(this.url);
this.dfsEndpointUrl = toDfsEndpointUrl(this.url);
this.accountName = getAccountNameFromUrl(this.blobEndpointUrl);
this.pipeline = pipeline;
// creating this BlobServiceClient allows us to use the converted V2 Pipeline attached to `pipeline`.
const blobClient = new BlobServiceClient(url, pipeline);
this.storageClientContext = new StorageContextClient(this.dfsEndpointUrl, getCoreClientOptions(pipeline));
this.storageClientContextToBlobEndpoint = new StorageContextClient(this.blobEndpointUrl, getCoreClientOptions(pipeline));
this.isHttps = iEqual(getURLScheme(this.url) || "", "https");
this.credential = blobClient.credential;
// Override protocol layer's default content-type
const storageClientContext = this.storageClientContext;
storageClientContext.requestContentType = undefined;
const storageClientContextWithBlobEndpoint = this.storageClientContextToBlobEndpoint;
storageClientContextWithBlobEndpoint.requestContentType = undefined;
}
}
//# sourceMappingURL=StorageClient.js.map