@azure/storage-file-datalake
Version:
Microsoft Azure Storage SDK for JavaScript - DataLake
281 lines • 14.7 kB
TypeScript
import type { TokenCredential } from "@azure/core-auth";
import type { PagedAsyncIterableIterator } from "@azure/core-paging";
import type { ServiceGetPropertiesOptions, ServiceSetPropertiesOptions, ServiceSetPropertiesResponse } from "@azure/storage-blob";
import type { Pipeline, StoragePipelineOptions } from "./Pipeline.js";
import { AnonymousCredential } from "@azure/storage-blob";
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential.js";
import { DataLakeFileSystemClient } from "./DataLakeFileSystemClient.js";
import type { FileSystemItem, ServiceGenerateAccountSasUrlOptions, ServiceListFileSystemsOptions, ServiceListFileSystemsSegmentResponse, ServiceUndeleteFileSystemOptions, FileSystemUndeleteResponse } from "./models.js";
import { StorageClient } from "./StorageClient.js";
import type { ServiceGetUserDelegationKeyOptions, ServiceGetUserDelegationKeyResponse } from "./models.js";
import { AccountSASPermissions } from "./sas/AccountSASPermissions.js";
import type { DataLakeServiceGetPropertiesResponse, DataLakeServiceProperties } from "./index.js";
/**
* DataLakeServiceClient allows you to manipulate Azure
* Data Lake service resources and file systems. The storage account provides
* the top-level namespace for the Data Lake service.
*/
export declare class DataLakeServiceClient extends StorageClient {
/**
* blobServiceClient provided by `@azure/storage-blob` package.
*/
private blobServiceClient;
/**
*
* Creates an instance of DataLakeServiceClient from connection string.
*
* @param connectionString - Account connection string or a SAS connection string of an Azure storage account.
* [ Note - Account connection string can only be used in NODE.JS runtime. ]
* Account connection string example -
* `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`
* SAS connection string example -
* `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`
* @param options - Optional. Options to configure the HTTP pipeline.
*/
static fromConnectionString(connectionString: string, options?: StoragePipelineOptions): DataLakeServiceClient;
/**
* Creates an instance of DataLakeServiceClient from url.
*
* @param url - A Client string pointing to Azure Storage data lake service, such as
* "https://myaccount.dfs.core.windows.net". You can append a SAS
* if using AnonymousCredential, such as "https://myaccount.dfs.core.windows.net?sasString".
* @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
* @param options - Optional. Options to configure the HTTP pipeline.
*/
constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions);
/**
* Creates an instance of DataLakeServiceClient from url and pipeline.
*
* @param url - A Client string pointing to Azure Storage data lake service, such as
* "https://myaccount.dfs.core.windows.net". You can append a SAS
* if using AnonymousCredential, such as "https://myaccount.dfs.core.windows.net?sasString".
* @param pipeline - Call newPipeline() to create a default
* pipeline, or provide a customized pipeline.
*/
constructor(url: string, pipeline: Pipeline);
/**
* Creates a {@link DataLakeFileSystemClient} object.
*
* @param fileSystemName - File system name.
*/
getFileSystemClient(fileSystemName: string): DataLakeFileSystemClient;
/**
* ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).
*
* Retrieves a user delegation key for the Data Lake service. This is only a valid operation when using
* bearer token authentication.
*
* @example
* ```ts snippet:DatalakeServiceClientGetUserDelegationKey
* import {
* DataLakeServiceClient,
* generateDataLakeSASQueryParameters,
* FileSystemSASPermissions,
* SASProtocol,
* } from "@azure/storage-file-datalake";
*
* const account = "<account>";
* const sas = "<sas token>";
* const datalakeServiceClient = new DataLakeServiceClient(
* `https://${account}.dfs.core.windows.net${sas}`,
* );
*
* const fileSystemName = "<file system name>";
* const accountName = "<account name>";
* const startsOn = new Date();
* const expiresOn = new Date(+new Date() + 86400 * 1000);
* // Generate user delegation SAS for a file system
* const userDelegationKey = await datalakeServiceClient.getUserDelegationKey(startsOn, expiresOn);
* const fileSystemSAS = generateDataLakeSASQueryParameters(
* {
* fileSystemName, // Required
* permissions: FileSystemSASPermissions.parse("racwdl"), // Required
* startsOn, // Required. Date type
* expiresOn, // Optional. Date type
* ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional
* protocol: SASProtocol.HttpsAndHttp, // Optional
* version: "2018-11-09", // Must greater than or equal to 2018-11-09 to generate user delegation SAS
* },
* userDelegationKey, // UserDelegationKey
* accountName,
* ).toString();
* ```
* @see https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key
*
* @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time.
* @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time.
* @param options -
*/
getUserDelegationKey(startsOn: Date, expiresOn: Date, options?: ServiceGetUserDelegationKeyOptions): Promise<ServiceGetUserDelegationKeyResponse>;
/**
* Returns an async iterable iterator to list all the file systems
* under the specified account.
*
* .byPage() returns an async iterable iterator to list the file systems in pages.
*
* Example using `for await` syntax:
*
* ```ts snippet:ReadmeSampleListFileSystems
* import { DataLakeServiceClient } from "@azure/storage-file-datalake";
* import { DefaultAzureCredential } from "@azure/identity";
*
* const account = "<account>";
* const datalakeServiceClient = new DataLakeServiceClient(
* `https://${account}.dfs.core.windows.net`,
* new DefaultAzureCredential(),
* );
*
* let i = 1;
* const fileSystems = datalakeServiceClient.listFileSystems();
* for await (const fileSystem of fileSystems) {
* console.log(`File system ${i++}: ${fileSystem.name}`);
* }
* ```
*
* Example using `iter.next()`:
*
* ```ts snippet:ReadmeSampleListFileSystems_Iterator
* import { DataLakeServiceClient } from "@azure/storage-file-datalake";
* import { DefaultAzureCredential } from "@azure/identity";
*
* const account = "<account>";
* const datalakeServiceClient = new DataLakeServiceClient(
* `https://${account}.dfs.core.windows.net`,
* new DefaultAzureCredential(),
* );
*
* let i = 1;
* const fileSystems = datalakeServiceClient.listFileSystems();
* let { value, done } = await fileSystems.next();
* while (!done) {
* console.log(`File system ${i++}: ${value.name}`);
* ({ value, done } = await fileSystems.next());
* }
* ```
*
* Example using `byPage()`:
*
* ```ts snippet:ReadmeSampleListFileSystems_ByPage
* import { DataLakeServiceClient } from "@azure/storage-file-datalake";
* import { DefaultAzureCredential } from "@azure/identity";
*
* const account = "<account>";
* const datalakeServiceClient = new DataLakeServiceClient(
* `https://${account}.dfs.core.windows.net`,
* new DefaultAzureCredential(),
* );
*
* let i = 1;
* for await (const response of datalakeServiceClient.listFileSystems().byPage({ maxPageSize: 20 })) {
* if (response.fileSystemItems) {
* for (const fileSystem of response.fileSystemItems) {
* console.log(`File System ${i++}: ${fileSystem.name}`);
* }
* }
* }
* ```
*
* Example using paging with a marker:
*
* ```ts snippet:ReadmeSampleListFileSystems_Continuation
* import { DataLakeServiceClient } from "@azure/storage-file-datalake";
* import { DefaultAzureCredential } from "@azure/identity";
*
* const account = "<account>";
* const datalakeServiceClient = new DataLakeServiceClient(
* `https://${account}.dfs.core.windows.net`,
* new DefaultAzureCredential(),
* );
*
* let i = 1;
* let fileSystems = datalakeServiceClient.listFileSystems().byPage({ maxPageSize: 2 });
* let response = (await fileSystems.next()).value;
* // Prints 2 file systems
* if (response.fileSystemItems) {
* for (const fileSystem of response.fileSystemItems) {
* console.log(`File system ${i++}: ${fileSystem.name}`);
* }
* }
* // Gets next marker
* let marker = response.continuationToken;
* // Passing next marker as continuationToken
* fileSystems = datalakeServiceClient
* .listFileSystems()
* .byPage({ continuationToken: marker, maxPageSize: 10 });
* response = (await fileSystems.next()).value;
* // Prints 10 file systems
* if (response.fileSystemItems) {
* for (const fileSystem of response.fileSystemItems) {
* console.log(`File system ${i++}: ${fileSystem.name}`);
* }
* }
* ```
*
* @see https://learn.microsoft.com/rest/api/storageservices/list-containers2
*
* @param options -
*/
listFileSystems(options?: ServiceListFileSystemsOptions): PagedAsyncIterableIterator<FileSystemItem, ServiceListFileSystemsSegmentResponse>;
/**
* Only available for DataLakeServiceClient constructed with a shared key credential.
*
* Generates an account Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in. The SAS is signed by the shared key credential of the client.
*
* @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas
*
* @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not specified.
* @param permissions - Specifies the list of permissions to be associated with the SAS.
* @param resourceTypes - Specifies the resource types associated with the shared access signature.
* @param options - Optional parameters.
* @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateAccountSasUrl(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string;
/**
* Only available for DataLakeServiceClient constructed with a shared key credential.
*
* Generates string to sign for an account Shared Access Signature (SAS) based on the client properties
* and parameters passed in. The SAS is signed by the shared key credential of the client.
*
* @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas
*
* @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not specified.
* @param permissions - Specifies the list of permissions to be associated with the SAS.
* @param resourceTypes - Specifies the resource types associated with the shared access signature.
* @param options - Optional parameters.
* @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateSasStringToSign(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string;
/**
* Restore a previously deleted File System.
* This API is only functional if Container Soft Delete is enabled for the storage account.
*
* @param deletedFileSystemName - The name of the source File System.
* @param deleteFileSystemVersion - The new name of the File System.
* @param options - Options to configure File System Restore operation.
*/
undeleteFileSystem(deletedFileSystemName: string, deleteFileSystemVersion: string, options?: ServiceUndeleteFileSystemOptions): Promise<{
fileSystemClient: DataLakeFileSystemClient;
fileSystemUndeleteResponse: FileSystemUndeleteResponse;
}>;
/**
* Gets the properties of a storage account’s Blob service endpoint, including properties
* for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.
* @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties
*
* @param options - Options to the Service Get Properties operation.
* @returns Response data for the Service Get Properties operation.
*/
getProperties(options?: ServiceGetPropertiesOptions): Promise<DataLakeServiceGetPropertiesResponse>;
/**
* Sets properties for a storage account’s Blob service endpoint, including properties
* for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.
* @see https://learn.microsoft.com/rest/api/storageservices/set-blob-service-properties
*
* @param properties -
* @param options - Options to the Service Set Properties operation.
* @returns Response data for the Service Set Properties operation.
*/
setProperties(properties: DataLakeServiceProperties, options?: ServiceSetPropertiesOptions): Promise<ServiceSetPropertiesResponse>;
}
//# sourceMappingURL=DataLakeServiceClient.d.ts.map