tdpw
Version:
CLI tool for uploading Playwright test reports to TestDino platform with Azure storage support
198 lines • 9.38 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.AzureUploadService = exports.AzureStorageClient = void 0;
const files_1 = require("../utils/files");
const fs_1 = require("../utils/fs");
const retry_1 = require("../utils/retry");
/**
* Azure storage client configured with SAS-based authentication
*/
class AzureStorageClient {
sasResponse;
constructor(sasResponse) {
this.sasResponse = sasResponse;
}
/**
* Upload a single file using the SAS token response format
*/
async uploadFile(filePath, blobPath) {
// Pre-validate file type before attempting upload
const allowedTypes = this.sasResponse.uploadInstructions.allowedFileTypes;
const fileExtension = filePath.split('.').pop()?.toLowerCase() || '';
if (allowedTypes.length > 0 && !allowedTypes.includes(fileExtension)) {
throw new Error(`File type '${fileExtension}' not allowed. Allowed types: ${allowedTypes.join(', ')}`);
}
// Build the complete upload URL using the SAS response format
const { baseUrl } = this.sasResponse.uploadInstructions;
const { pathPrefix } = this.sasResponse.uploadInstructions;
// Combine path prefix with blob path
const fullBlobPath = `${pathPrefix}/${blobPath}`;
// Build upload URL (baseUrl already contains SAS token)
const uploadUrl = `${baseUrl.replace(/\?.*$/, '')}/${fullBlobPath}?${this.sasResponse.sasToken}`;
// Read file data
const data = await (0, fs_1.readFileBuffer)(filePath);
// Detect content type
const contentType = (0, files_1.getContentType)(filePath);
// Validate file size
if (data.length > this.sasResponse.uploadInstructions.maxFileSize) {
const sizeMB = Math.round(data.length / (1024 * 1024));
const maxSizeMB = Math.round(this.sasResponse.uploadInstructions.maxFileSize / (1024 * 1024));
throw new Error(`File too large: ${sizeMB}MB (max: ${maxSizeMB}MB)`);
}
// Prepare headers based on example upload format
const headers = {
'x-ms-blob-type': 'BlockBlob',
'Content-Type': contentType,
'Content-Length': data.length.toString(),
};
// Upload file using PUT request
const response = await fetch(uploadUrl, {
method: 'PUT',
headers,
body: data,
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(`TestDino server upload failed: ${response.status} ${errorText}`);
}
// Return the public URL (without SAS token for public access)
return `${this.sasResponse.containerUrl}/${fullBlobPath}`;
}
}
exports.AzureStorageClient = AzureStorageClient;
/**
* Service to upload files to Azure Blob Storage using SAS credentials with optimal concurrency
*/
class AzureUploadService {
storageClient;
maxConcurrentUploads = 5; // Optimal for most scenarios
constructor(storageClient) {
this.storageClient = storageClient;
}
/**
* Upload an entire directory of files with concurrent uploads and progress tracking
* @param localDir Local directory path
* @param prefix Additional prefix for blob paths (optional)
* @param onProgress Progress callback (optional)
* @param preFilteredPaths Optional pre-filtered file paths (if not provided, will collect all files)
*/
async uploadDirectory(localDir, prefix = '', onProgress, preFilteredPaths) {
// Use provided file paths or collect all files recursively
const filePaths = preFilteredPaths || await (0, files_1.collectFilePaths)(localDir);
const uploadedUrls = [];
let uploadedCount = 0;
// Upload files in batches for optimal performance
const batches = this.createBatches(filePaths, this.maxConcurrentUploads);
for (const batch of batches) {
// Upload batch concurrently
const batchPromises = batch.map(async (filePath) => {
try {
// Compute blob path: prefix + relative path from directory
const relativePath = this.getRelativePath(filePath, localDir);
const blobPath = prefix ? `${prefix}/${relativePath}` : relativePath;
// Upload with retry logic
const uploadUrl = await (0, retry_1.withFileUploadRetry)(async () => this.storageClient.uploadFile(filePath, blobPath), filePath);
uploadedUrls.push(uploadUrl);
uploadedCount++;
// Report progress
if (onProgress) {
onProgress(uploadedCount, filePaths.length, relativePath);
}
return uploadUrl;
}
catch (_error) {
// Show warnings only for critical files or if verbose is enabled
const isImportantFile = filePath.includes('index.html') || filePath.includes('.json');
if (isImportantFile || process.env.TESTDINO_VERBOSE === 'true') {
const fileName = filePath.split('/').pop() || filePath;
console.warn(`⚠️ Skipped: ${fileName}`);
}
return null;
}
});
// Wait for current batch to complete before starting next
const results = await Promise.all(batchPromises);
// Filter out null results (skipped files)
results.filter(url => url !== null);
// Note: uploadedUrls is already populated in the map function above
}
return uploadedUrls;
}
/**
* Get relative path from a local directory, ensuring proper path handling
*/
getRelativePath(filePath, localDir) {
// Normalize paths to handle different OS separators
const normalizedFilePath = filePath.replace(/\\/g, '/');
const normalizedLocalDir = localDir.replace(/\\/g, '/').replace(/\/$/, '');
// Remove the local directory prefix to get relative path
let relativePath = normalizedFilePath.replace(`${normalizedLocalDir}/`, '');
// Handle edge case where local dir doesn't end with /
if (relativePath === normalizedFilePath) {
relativePath = normalizedFilePath.replace(normalizedLocalDir, '');
if (relativePath.startsWith('/')) {
relativePath = relativePath.substring(1);
}
}
return relativePath;
}
/**
* Create batches of files for concurrent upload
*/
createBatches(items, batchSize) {
const batches = [];
for (let i = 0; i < items.length; i += batchSize) {
batches.push(items.slice(i, i + batchSize));
}
return batches;
}
/**
* Upload with progress reporting and detailed feedback
*/
async uploadDirectoryWithProgress(localDir, prefix = '') {
const filePaths = await (0, files_1.collectFilePaths)(localDir);
console.log(`📁 Uploading ${filePaths.length} files to TestDino server...`);
let lastProgressTime = Date.now();
const uploadedUrls = await this.uploadDirectory(localDir, prefix, (uploaded, total, currentFile) => {
const now = Date.now();
// Update progress every 500ms to avoid spam
if (now - lastProgressTime > 500) {
const percentage = Math.round((uploaded / total) * 100);
console.log(` 📤 ${percentage}% (${uploaded}/${total}) - ${currentFile}`);
lastProgressTime = now;
}
});
console.log(`✅ ${uploadedUrls.length} files uploaded successfully`);
return uploadedUrls;
}
/**
* Upload HTML directory with filtering based on upload configuration
*/
async uploadHtmlDirectoryWithProgress(localDir, prefix = '', config) {
const filePaths = await (0, files_1.collectFilePathsForHtml)(localDir, true, config);
// Create descriptive message based on config
const parts = [];
if (config?.uploadHtml)
parts.push('HTML');
if (config?.uploadImages)
parts.push('images');
if (config?.uploadVideos)
parts.push('videos');
const filterDescription = parts.join(' + ');
console.log(`📁 Uploading ${filePaths.length} files (${filterDescription}) to TestDino server...`);
let lastProgressTime = Date.now();
const uploadedUrls = await this.uploadDirectory(localDir, prefix, (uploaded, total, currentFile) => {
const now = Date.now();
// Update progress every 500ms to avoid spam
if (now - lastProgressTime > 500) {
const percentage = Math.round((uploaded / total) * 100);
console.log(` 📤 ${percentage}% (${uploaded}/${total}) - ${currentFile}`);
lastProgressTime = now;
}
}, filePaths); // Pass the filtered file paths
console.log(`✅ ${uploadedUrls.length} HTML report files uploaded successfully`);
return uploadedUrls;
}
}
exports.AzureUploadService = AzureUploadService;
//# sourceMappingURL=azure.js.map