@azure/storage-file-datalake
Version:
Microsoft Azure Storage SDK for JavaScript - DataLake
689 lines • 35.9 kB
TypeScript
import type { TokenCredential } from "@azure/core-auth";
import type { RequestBodyType as HttpRequestBody } from "@azure/core-rest-pipeline";
import type { Pipeline, StoragePipelineOptions } from "./Pipeline.js";
import { AnonymousCredential } from "@azure/storage-blob";
import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential.js";
import type { Readable } from "node:stream";
import { DataLakeLeaseClient } from "./DataLakeLeaseClient.js";
import type { DirectoryCreateIfNotExistsOptions, DirectoryCreateIfNotExistsResponse, DirectoryCreateOptions, DirectoryCreateResponse, DirectoryGenerateSasUrlOptions, FileAppendOptions, FileAppendResponse, FileCreateIfNotExistsOptions, FileCreateIfNotExistsResponse, FileCreateOptions, FileCreateResponse, FileExpiryMode, FileFlushOptions, FileFlushResponse, FileGenerateSasUrlOptions, FileParallelUploadOptions, FileQueryOptions, FileReadOptions, FileReadResponse, FileReadToBufferOptions, FileSetExpiryOptions, FileSetExpiryResponse, FileUploadResponse, Metadata, PathAccessControlItem, PathChangeAccessControlRecursiveOptions, PathChangeAccessControlRecursiveResponse, PathCreateIfNotExistsOptions, PathCreateIfNotExistsResponse, PathCreateOptions, PathCreateResponse, PathDeleteIfExistsResponse, PathDeleteOptions, PathDeleteResponse, PathExistsOptions, PathGetAccessControlOptions, PathGetAccessControlResponse, PathGetPropertiesOptions, PathGetPropertiesResponse, PathHttpHeaders, PathMoveOptions, PathMoveResponse, PathPermissions, PathResourceTypeModel, PathSetAccessControlOptions, PathSetAccessControlResponse, PathSetHttpHeadersOptions, PathSetHttpHeadersResponse, PathSetMetadataOptions, PathSetMetadataResponse, PathSetPermissionsOptions, PathSetPermissionsResponse, RemovePathAccessControlItem, UserDelegationKey } from "./models.js";
import { StorageClient } from "./StorageClient.js";
/**
* A DataLakePathClient represents a URL to the Azure Storage path (directory or file).
*/
export declare class DataLakePathClient extends StorageClient {
/**
* pathContext provided by protocol layer.
*/
private pathContext;
/**
* blobClient provided by `@azure/storage-blob` package.
*/
private blobClient;
private isTokenCredential?;
/**
* SetAccessControlRecursiveInternal operation sets the Access Control on a path and sub paths.
*
* @param mode - Mode \"set\" sets POSIX access control rights on files and directories,
* Mode \"modify\" modifies one or more POSIX access control rights that pre-exist on files and directories,
* Mode \"remove\" removes one or more POSIX access control rights that were present earlier on files and directories.
* @param acl - The POSIX access control list for the file or directory.
* @param options - Optional. Options
*/
private setAccessControlRecursiveInternal;
/**
* Creates an instance of DataLakePathClient from url and credential.
*
* @param url - A Client string pointing to Azure Storage data lake path (directory or file), such as
* "https://myaccount.dfs.core.windows.net/filesystem/directory" or "https://myaccount.dfs.core.windows.net/filesystem/file".
* You can append a SAS if using AnonymousCredential, such as "https://myaccount.dfs.core.windows.net/filesystem/directory?sasString".
* @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
* @param options - Optional. Options to configure the HTTP pipeline.
*/
constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions);
/**
* Creates an instance of DataLakePathClient from url and pipeline.
*
* @param url - A Client string pointing to Azure Storage data lake path (directory or file), such as
* "https://myaccount.dfs.core.windows.net/filesystem/directory" or "https://myaccount.dfs.core.windows.net/filesystem/file".
* You can append a SAS if using AnonymousCredential, such as "https://myaccount.dfs.core.windows.net/filesystem/directory?sasString".
* @param pipeline - Call newPipeline() to create a default
* pipeline, or provide a customized pipeline.
*/
constructor(url: string, pipeline: Pipeline);
/**
* Name of current file system.
*
* @readonly
*/
get fileSystemName(): string;
/**
* Name of current path (directory or file).
*
* @readonly
*/
get name(): string;
/**
* Convert current DataLakePathClient to DataLakeDirectoryClient if current path is a directory.
*
*/
toDirectoryClient(): DataLakeDirectoryClient;
/**
* Convert current DataLakePathClient to DataLakeFileClient if current path is a file.
*
*/
toFileClient(): DataLakeFileClient;
/**
* Get a {@link DataLakeLeaseClient} that manages leases on the path (directory or file).
*
* @param proposeLeaseId - Optional. Initial proposed lease Id.
*/
getDataLakeLeaseClient(proposeLeaseId?: string): DataLakeLeaseClient;
/**
* Create a directory or path.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param resourceType - Resource type, "directory" or "file".
* @param options - Optional. Options when creating path.
*/
create(resourceType: PathResourceTypeModel, options?: PathCreateOptions): Promise<PathCreateResponse>;
/**
* Create a directory or file. If the resource already exists, it is not changed.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param resourceType - Resource type, "directory" or "file".
* @param options -
*/
createIfNotExists(resourceType: PathResourceTypeModel, options?: PathCreateIfNotExistsOptions): Promise<PathCreateIfNotExistsResponse>;
/**
* Returns true if the Data Lake file represented by this client exists; false otherwise.
*
* NOTE: use this function with care since an existing file might be deleted by other clients or
* applications. Vice versa new files might be added by other clients or applications after this
* function completes.
*
* @param options - options to Exists operation.
*/
exists(options?: PathExistsOptions): Promise<boolean>;
/**
* Delete current path (directory or file).
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/delete
*
* @param recursive - Required and valid only when the resource is a directory. If "true", all paths beneath the directory will be deleted.
* @param options - Optional. Options when deleting path.
*/
delete(recursive?: boolean, options?: PathDeleteOptions): Promise<PathDeleteResponse>;
/**
* Delete current path (directory or file) if it exists.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/delete
*
* @param recursive - Required and valid only when the resource is a directory. If "true", all paths beneath the directory will be deleted.
* @param options -
*/
deleteIfExists(recursive?: boolean, options?: PathDeleteOptions): Promise<PathDeleteIfExistsResponse>;
/**
* Returns the access control data for a path (directory of file).
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/getproperties
*
* @param options - Optional. Options when getting file access control.
*/
getAccessControl(options?: PathGetAccessControlOptions): Promise<PathGetAccessControlResponse>;
/**
* Set the access control data for a path (directory of file).
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update
*
* @param acl - The POSIX access control list for the file or directory.
* @param options - Optional. Options when setting path access control.
*/
setAccessControl(acl: PathAccessControlItem[], options?: PathSetAccessControlOptions): Promise<PathSetAccessControlResponse>;
/**
* Sets the Access Control on a path and sub paths.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update
*
* @param acl - The POSIX access control list for the file or directory.
* @param options - Optional. Options
*/
setAccessControlRecursive(acl: PathAccessControlItem[], options?: PathChangeAccessControlRecursiveOptions): Promise<PathChangeAccessControlRecursiveResponse>;
/**
* Modifies the Access Control on a path and sub paths.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update
*
* @param acl - The POSIX access control list for the file or directory.
* @param options - Optional. Options
*/
updateAccessControlRecursive(acl: PathAccessControlItem[], options?: PathChangeAccessControlRecursiveOptions): Promise<PathChangeAccessControlRecursiveResponse>;
/**
* Removes the Access Control on a path and sub paths.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update
*
* @param acl - The POSIX access control list for the file or directory.
* @param options - Optional. Options
*/
removeAccessControlRecursive(acl: RemovePathAccessControlItem[], options?: PathChangeAccessControlRecursiveOptions): Promise<PathChangeAccessControlRecursiveResponse>;
/**
* Sets the file permissions on a path.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update
*
* @param permissions - The POSIX access permissions for the file owner, the file owning group, and others.
* @param options - Optional. Options when setting path permissions.
*/
setPermissions(permissions: PathPermissions, options?: PathSetPermissionsOptions): Promise<PathSetPermissionsResponse>;
/**
* Returns all user-defined metadata, standard HTTP properties, and system properties
* for the path (directory or file).
*
* WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if
* they originally contained uppercase characters. This differs from the metadata keys returned by
* the methods of {@link DataLakeFileSystemClient} that list paths using the `includeMetadata` option, which
* will retain their original casing.
*
* @see https://learn.microsoft.com/rest/api/storageservices/get-blob-properties
*
* @param options - Optional. Options when getting path properties.
*/
getProperties(options?: PathGetPropertiesOptions): Promise<PathGetPropertiesResponse>;
/**
* Sets system properties on the path (directory or file).
*
* If no value provided, or no value provided for the specified blob HTTP headers,
* these blob HTTP headers without a value will be cleared.
* @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties
*
* @param httpHeaders -
* @param options -
*/
setHttpHeaders(httpHeaders: PathHttpHeaders, options?: PathSetHttpHeadersOptions): Promise<PathSetHttpHeadersResponse>;
/**
* Sets user-defined metadata for the specified path (directory of file) as one or more name-value pairs.
*
* If no option provided, or no metadata defined in the parameter, the path
* metadata will be removed.
*
* @see https://learn.microsoft.com/rest/api/storageservices/set-blob-metadata
*
* @param metadata - Optional. Replace existing metadata with this value.
* If no value provided the existing metadata will be removed.
* @param options - Optional. Options when setting path metadata.
*/
setMetadata(metadata?: Metadata, options?: PathSetMetadataOptions): Promise<PathSetMetadataResponse>;
/**
* Move directory or file within same file system.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param destinationPath - Destination directory path like "directory" or file path "directory/file".
* If the destinationPath is authenticated with SAS, add the SAS to the destination path like "directory/file?sasToken".
* @param options - Optional. Options when moving directory or file.
*/
move(destinationPath: string, options?: PathMoveOptions): Promise<PathMoveResponse>;
/**
* Move directory or file to another file system.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param destinationFileSystem - Destination file system like "filesystem".
* @param destinationPath - Destination directory path like "directory" or file path "directory/file"
* If the destinationPath is authenticated with SAS, add the SAS to the destination path like "directory/file?sasToken".
* @param options - Optional. Options when moving directory or file.
*/
move(destinationFileSystem: string, destinationPath: string, options?: PathMoveOptions): Promise<PathMoveResponse>;
}
/**
* A DataLakeDirectoryClient represents a URL to the Azure Storage directory.
*/
export declare class DataLakeDirectoryClient extends DataLakePathClient {
/**
* Create a directory.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param resourceType - Resource type, must be "directory" for DataLakeDirectoryClient.
* @param options - Optional. Options when creating directory.
*/
create(resourceType: PathResourceTypeModel, options?: PathCreateOptions): Promise<PathCreateResponse>;
/**
* Create a directory.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param options - Optional. Options when creating directory.
*/
create(options?: DirectoryCreateOptions): Promise<DirectoryCreateResponse>;
/**
* Create a directory if it doesn't already exists.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param resourceType - Resource type, must be "directory" for DataLakeDirectoryClient.
* @param options -
*/
createIfNotExists(resourceType: PathResourceTypeModel, options?: PathCreateIfNotExistsOptions): Promise<PathCreateIfNotExistsResponse>;
/**
* Create a directory if it doesn't already exists.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param options -
*/
createIfNotExists(options?: DirectoryCreateIfNotExistsOptions): Promise<DirectoryCreateIfNotExistsResponse>;
/**
* Creates a {@link DataLakeDirectoryClient} object under current directory.
*
* @param subdirectoryName - Subdirectory name.
*/
getSubdirectoryClient(subdirectoryName: string): DataLakeDirectoryClient;
/**
* Creates a {@link DataLakeFileClient} object under current directory.
*
* @param fileName -
*/
getFileClient(fileName: string): DataLakeFileClient;
/**
* Only available for clients constructed with a shared key credential.
*
* Generates a Service Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in. The SAS is signed by the shared key credential of the client.
*
* @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas
*
* @param options - Optional parameters.
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateSasUrl(options: DirectoryGenerateSasUrlOptions): Promise<string>;
/**
* Generates string to sign for a Service Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in.
*
* @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas
*
* @param options - Optional parameters.
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateSasStringToSign(options: DirectoryGenerateSasUrlOptions): string;
/**
* Generates a Service Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in. The SAS is signed by the input user delegation key.
*
* @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas
*
* @param options - Optional parameters.
* @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateUserDelegationSasUrl(options: DirectoryGenerateSasUrlOptions, userDelegationKey: UserDelegationKey): Promise<string>;
/**
* Generates string to sign for a Service Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in The SAS is signed by the input user delegation key.
*
* @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas
*
* @param options - Optional parameters.
* @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateUserDelegationSasStringToSign(options: DirectoryGenerateSasUrlOptions, userDelegationKey: UserDelegationKey): string;
}
/**
* A DataLakeFileClient represents a URL to the Azure Storage file.
*/
export declare class DataLakeFileClient extends DataLakePathClient {
/**
* pathContextInternal provided by protocol layer.
*/
private pathContextInternal;
/**
* pathContextInternal provided by protocol layer, with its url pointing to the Blob endpoint.
*/
private pathContextInternalToBlobEndpoint;
/**
* blockBlobClientInternal provided by `@azure/storage-blob` package.
*/
private blockBlobClientInternal;
/**
* Creates an instance of DataLakeFileClient from url and credential.
*
* @param url - A Client string pointing to Azure Storage data lake file, such as
* "https://myaccount.dfs.core.windows.net/filesystem/file".
* You can append a SAS if using AnonymousCredential, such as "https://myaccount.dfs.core.windows.net/filesystem/directory/file?sasString".
* @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
* @param options - Optional. Options to configure the HTTP pipeline.
*/
constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions);
/**
* Creates an instance of DataLakeFileClient from url and pipeline.
*
* @param url - A Client string pointing to Azure Storage data lake file, such as
* "https://myaccount.dfs.core.windows.net/filesystem/file".
* You can append a SAS if using AnonymousCredential, such as "https://myaccount.dfs.core.windows.net/filesystem/directory/file?sasString".
* @param pipeline - Call newPipeline() to create a default
* pipeline, or provide a customized pipeline.
*/
constructor(url: string, pipeline: Pipeline);
/**
* Create a file.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param resourceType - Resource type, must be "file" for DataLakeFileClient.
* @param options - Optional. Options when creating file.
*/
create(resourceType: PathResourceTypeModel, options?: PathCreateOptions): Promise<PathCreateResponse>;
/**
* Create a file.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param options - Optional. Options when creating file.
*/
create(options?: FileCreateOptions): Promise<FileCreateResponse>;
/**
* Create a file if it doesn't already exists.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param resourceType - Resource type, must be "file" for DataLakeFileClient.
* @param options -
*/
createIfNotExists(resourceType: PathResourceTypeModel, options?: PathCreateIfNotExistsOptions): Promise<PathCreateIfNotExistsResponse>;
/**
* Create a file if it doesn't already exists.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create
*
* @param options - Optional. Options when creating file.
*/
createIfNotExists(options?: FileCreateIfNotExistsOptions): Promise<FileCreateIfNotExistsResponse>;
/**
* Downloads a file from the service, including its metadata and properties.
*
* * In Node.js, data returns in a Readable stream readableStreamBody
* * In browsers, data returns in a promise contentAsBlob
*
* @see https://learn.microsoft.com/rest/api/storageservices/get-blob
*
* * Example usage (Node.js):
*
* ```ts snippet:ReadmeSampleDownloadFile_Node
* import { DataLakeServiceClient } from "@azure/storage-file-datalake";
* import { DefaultAzureCredential } from "@azure/identity";
*
* const account = "<account>";
* const datalakeServiceClient = new DataLakeServiceClient(
* `https://${account}.dfs.core.windows.net`,
* new DefaultAzureCredential(),
* );
*
* const fileSystemName = "<file system name>";
* const fileName = "<file name>";
* const fileSystemClient = datalakeServiceClient.getFileSystemClient(fileSystemName);
* const fileClient = fileSystemClient.getFileClient(fileName);
*
* // Get file content from position 0 to the end
* // In Node.js, get downloaded data by accessing downloadResponse.readableStreamBody
* const downloadResponse = await fileClient.read();
* if (downloadResponse.readableStreamBody) {
* const downloaded = await streamToBuffer(downloadResponse.readableStreamBody);
* console.log("Downloaded file content:", downloaded.toString());
* }
*
* // [Node.js only] A helper method used to read a Node.js readable stream into a Buffer.
* async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Buffer> {
* return new Promise((resolve, reject) => {
* const chunks: Buffer[] = [];
* readableStream.on("data", (data) => {
* chunks.push(data instanceof Buffer ? data : Buffer.from(data));
* });
* readableStream.on("end", () => {
* resolve(Buffer.concat(chunks));
* });
* readableStream.on("error", reject);
* });
* }
* ```
*
* Example usage (browser):
*
* ```ts snippet:ReadmeSampleDownloadFile_Browser
* import { DataLakeServiceClient } from "@azure/storage-file-datalake";
*
* const account = "<account>";
* const sas = "<sas token>";
* const datalakeServiceClient = new DataLakeServiceClient(
* `https://${account}.dfs.core.windows.net${sas}`,
* );
*
* const fileSystemName = "<file system name>";
* const fileName = "<file name>";
* const fileSystemClient = datalakeServiceClient.getFileSystemClient(fileSystemName);
* const fileClient = fileSystemClient.getFileClient(fileName);
*
* // Get file content from position 0 to the end
* // In browsers, get downloaded data by accessing downloadResponse.contentAsBlob
* const downloadResponse = await fileClient.read();
* if (downloadResponse.contentAsBlob) {
* const blob = await downloadResponse.contentAsBlob;
* const downloaded = await blob.text();
* console.log(`Downloaded file content ${downloaded}`);
* }
* ```
*
* @param offset - Optional. Offset to read file, default value is 0.
* @param count - Optional. How many bytes to read, default will read from offset to the end.
* @param options - Optional. Options when reading file.
*/
read(offset?: number, count?: number, options?: FileReadOptions): Promise<FileReadResponse>;
/**
* Uploads data to be appended to a file. Data can only be appended to a file.
* To apply perviously uploaded data to a file, call flush.
*
* @see https://learn.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update
*
* @param body - Content to be uploaded.
* @param offset - Append offset in bytes.
* @param length - Length of content to append in bytes.
* @param options - Optional. Options when appending data.
*/
append(body: HttpRequestBody, offset: number, length: number, options?: FileAppendOptions): Promise<FileAppendResponse>;
/**
* Flushes (writes) previously appended data to a file.
*
* @param position - File position to flush.
* This parameter allows the caller to upload data in parallel and control the order in which it is appended to the file.
* It is required when uploading data to be appended to the file and when flushing previously uploaded data to the file.
* The value must be the position where the data is to be appended. Uploaded data is not immediately flushed, or written,
* to the file. To flush, the previously uploaded data must be contiguous, the position parameter must be specified and
* equal to the length of the file after all data has been written, and there must not be a request entity body included
* with the request.
* @param options - Optional. Options when flushing data.
*/
flush(position: number, options?: FileFlushOptions): Promise<FileFlushResponse>;
/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
* Uploads a local file to a Data Lake file.
*
* @param filePath - Full path of the local file
* @param options -
*/
uploadFile(filePath: string, options?: FileParallelUploadOptions): Promise<FileUploadResponse>;
/**
* Uploads a Buffer(Node.js)/Blob/ArrayBuffer/ArrayBufferView to a File.
*
* @param data - Buffer(Node), Blob, ArrayBuffer or ArrayBufferView
* @param options -
*/
upload(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: FileParallelUploadOptions): Promise<FileUploadResponse>;
private uploadSeekableInternal;
/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
* Uploads a Node.js Readable stream into a Data Lake file.
* This method will try to create a file, then starts uploading chunk by chunk.
* Please make sure potential size of stream doesn't exceed FILE_MAX_SIZE_BYTES and
* potential number of chunks doesn't exceed BLOCK_BLOB_MAX_BLOCKS.
*
* PERFORMANCE IMPROVEMENT TIPS:
* * Input stream highWaterMark is better to set a same value with options.chunkSize
* parameter, which will avoid Buffer.concat() operations.
*
* @param stream - Node.js Readable stream.
* @param options -
*/
uploadStream(stream: Readable, options?: FileParallelUploadOptions): Promise<FileUploadResponse>;
/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
* Reads a Data Lake file in parallel to a buffer.
* Offset and count are optional, pass 0 for both to read the entire file.
*
* Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two
* gigabytes on 64-bit systems due to limitations of Node.js/V8. For files larger than this size,
* consider {@link readToFile}.
*
* @param buffer - Buffer to be fill, must have length larger than count
* @param offset - From which position of the Data Lake file to read
* @param count - How much data to be read. Will read to the end when passing undefined
* @param options -
*/
readToBuffer(buffer: Buffer, offset?: number, count?: number, options?: FileReadToBufferOptions): Promise<Buffer>;
/**
* ONLY AVAILABLE IN NODE.JS RUNTIME
*
* Reads a Data Lake file in parallel to a buffer.
* Offset and count are optional, pass 0 for both to read the entire file
*
* Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two
* gigabytes on 64-bit systems due to limitations of Node.js/V8. For files larger than this size,
* consider {@link readToFile}.
*
* @param offset - From which position of the Data Lake file to read(in bytes)
* @param count - How much data(in bytes) to be read. Will read to the end when passing undefined
* @param options -
*/
readToBuffer(offset?: number, count?: number, options?: FileReadToBufferOptions): Promise<Buffer>;
/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
* Downloads a Data Lake file to a local file.
* Fails if the the given file path already exits.
* Offset and count are optional, pass 0 and undefined respectively to download the entire file.
*
* @param filePath -
* @param offset - From which position of the file to download.
* @param count - How much data to be downloaded. Will download to the end when passing undefined.
* @param options - Options to read Data Lake file.
* @returns The response data for file read operation,
* but with readableStreamBody set to undefined since its
* content is already read and written into a local file
* at the specified path.
*/
readToFile(filePath: string, offset?: number, count?: number, options?: FileReadOptions): Promise<FileReadResponse>;
/**
* Quick query for a JSON or CSV formatted file.
*
* Example usage (Node.js):
*
* ```ts snippet:ReadmeSampleQueryFile_Node
* import { DataLakeServiceClient } from "@azure/storage-file-datalake";
*
* const account = "<account>";
* const sas = "<sas token>";
* const datalakeServiceClient = new DataLakeServiceClient(
* `https://${account}.dfs.core.windows.net${sas}`,
* );
*
* const fileSystemName = "<file system name>";
* const fileName = "<file name>";
* const fileSystemClient = datalakeServiceClient.getFileSystemClient(fileSystemName);
* const fileClient = fileSystemClient.getFileClient(fileName);
*
* // Query and convert a file to a string
* const queryResponse = await fileClient.query("select * from BlobStorage");
* if (queryResponse.readableStreamBody) {
* const responseBuffer = await streamToBuffer(queryResponse.readableStreamBody);
* const downloaded = responseBuffer.toString();
* console.log(`Query file content: ${downloaded}`);
* }
*
* async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Buffer> {
* return new Promise((resolve, reject) => {
* const chunks: Buffer[] = [];
* readableStream.on("data", (data) => {
* chunks.push(data instanceof Buffer ? data : Buffer.from(data));
* });
* readableStream.on("end", () => {
* resolve(Buffer.concat(chunks));
* });
* readableStream.on("error", reject);
* });
* }
* ```
*
* @param query -
* @param options -
*/
query(query: string, options?: FileQueryOptions): Promise<FileReadResponse>;
/**
* Sets an expiry time on a file, once that time is met the file is deleted.
*
* @param mode -
* @param options -
*/
setExpiry(mode: FileExpiryMode, options?: FileSetExpiryOptions): Promise<FileSetExpiryResponse>;
/**
* Only available for clients constructed with a shared key credential.
*
* Generates a Service Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in. The SAS is signed by the shared key credential of the client.
*
* @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas
*
* @param options - Optional parameters.
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateSasUrl(options: FileGenerateSasUrlOptions): Promise<string>;
/**
* Only available for clients constructed with a shared key credential.
*
* Generates string to sign for a Service Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in. The SAS is signed by the shared key credential of the client.
*
* @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas
*
* @param options - Optional parameters.
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateSasStringToSign(options: FileGenerateSasUrlOptions): string;
/**
* Generates a Service Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in. The SAS is signed by the input user delegation key.
*
* @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas
*
* @param options - Optional parameters.
* @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateUserDelegationSasUrl(options: FileGenerateSasUrlOptions, userDelegationKey: UserDelegationKey): Promise<string>;
/**
* Generates string to sign for a Service Shared Access Signature (SAS) URI based on the client properties
* and parameters passed in. The SAS is signed by the input user delegation key.
*
* @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas
*
* @param options - Optional parameters.
* @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`
* @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.
*/
generateUserDelegationSasStringToSign(options: FileGenerateSasUrlOptions, userDelegationKey: UserDelegationKey): string;
}
//# sourceMappingURL=clients.d.ts.map