UNPKG

tdpw

Version:

CLI tool for uploading Playwright test reports to TestDino platform with Azure storage support

921 lines (899 loc) 40.9 kB
"use strict"; /** * Enhanced Upload Service - Complete Integration * Implements the full upload flow: Azure files → JSON + URLs → TestDino API */ Object.defineProperty(exports, "__esModule", { value: true }); exports.UploadService = void 0; const path_1 = require("path"); const parser_1 = require("../core/parser"); const attachments_1 = require("../core/attachments"); const git_1 = require("../collectors/git"); const ci_1 = require("../collectors/ci"); const system_1 = require("../collectors/system"); const api_1 = require("./api"); const sas_1 = require("./sas"); const azure_1 = require("./azure"); const progress_1 = require("../utils/progress"); const retry_1 = require("../utils/retry"); const types_1 = require("../types"); /** * Service to upload Playwright report and metadata to TestDino */ class UploadService { config; apiClient; sasService; constructor(config) { this.config = config; this.apiClient = new api_1.ApiClient(config); this.sasService = new sas_1.SasTokenService(config); } /** * Get report directory from JSON file path */ getReportDirectory(jsonPath) { return (0, path_1.dirname)(jsonPath); } /** * Generate unique Azure blob path for attachment using UUID * This eliminates any possibility of path collisions regardless of directory structure */ createJsonAttachmentPath(attachment) { // Generate a unique ID for this attachment const uniqueId = this.generateUniqueId(); // Get file extension from original path or content type const fileExtension = this.getFileExtension(attachment); // Create clean, unique path: json/attachments/{uniqueId}.{extension} return `json/attachments/${uniqueId}${fileExtension}`; } /** * Generate a unique identifier for attachments * Uses timestamp + random string for uniqueness */ generateUniqueId() { const timestamp = Date.now().toString(36); // Base36 timestamp const random = Math.random().toString(36).substring(2, 8); // 6 random chars return `${timestamp}_${random}`; } /** * Extract file extension from attachment info */ getFileExtension(attachment) { // First try to get extension from original path const pathParts = attachment.originalPath.split('.'); if (pathParts.length > 1) { const extension = pathParts[pathParts.length - 1]; if (extension) { return `.${extension.toLowerCase()}`; } } // Fallback: derive from content type const contentType = attachment.contentType.toLowerCase(); if (contentType.includes('png')) return '.png'; if (contentType.includes('jpeg') || contentType.includes('jpg')) return '.jpg'; if (contentType.includes('gif')) return '.gif'; if (contentType.includes('webp')) return '.webp'; if (contentType.includes('svg')) return '.svg'; if (contentType.includes('webm')) return '.webm'; if (contentType.includes('mp4')) return '.mp4'; if (contentType.includes('zip')) return '.zip'; // Default fallback return '.bin'; } /** * Main upload orchestration method * Flow: Collect Metadata → Upload Azure Files → Send JSON + URLs to API */ async uploadReport(jsonPath, htmlDir, traceDir) { const tracker = (0, progress_1.createProgressTracker)(); try { // Step 1: Parse the base Playwright report tracker.start('Parsing Playwright report...'); const baseReport = await (0, parser_1.parsePlaywrightJson)(jsonPath); tracker.succeed('Report parsed successfully'); // Step 2: Scan for attachments tracker.start('Scanning for attachments...'); const reportDirectory = this.getReportDirectory(jsonPath); const attachmentScanner = new attachments_1.AttachmentScanner(reportDirectory); const attachmentScanResult = await attachmentScanner.scanAttachments(baseReport); // Filter attachments based on configuration const attachmentsToUpload = attachments_1.AttachmentScanner.filterAttachments(attachmentScanResult, this.config); if (attachmentsToUpload.length > 0) { tracker.succeed(`Found ${attachmentsToUpload.length} attachments to upload`); } else { tracker.succeed('No attachments to upload based on current flags'); } // Step 3: Collect all metadata tracker.start('Collecting environment metadata...'); const metadata = await this.collectMetadata(baseReport); tracker.succeed('Metadata collected'); // Step 4: Upload files to Azure with STRICT validation after auto-discovery let azureUploadResult; const shouldUploadToAzure = this.config.uploadImages || this.config.uploadVideos || this.config.uploadHtml || this.config.uploadTraces; if (shouldUploadToAzure) { // STRICT ENFORCEMENT: After auto-discovery, validate we have what user requested this.enforceStrictUploadRequirements(htmlDir, traceDir, attachmentsToUpload); tracker.start('Uploading files to TestDino server...'); azureUploadResult = await this.uploadToAzure(htmlDir, traceDir, attachmentsToUpload); if (azureUploadResult.status === 'uploaded') { tracker.succeed('TestDino server upload completed successfully'); } else if (azureUploadResult.status === 'failed') { tracker.fail('TestDino server upload failed'); throw new Error('Upload failed when uploads were explicitly enabled'); } else { tracker.succeed('TestDino server upload skipped'); } } // Step 5: Build final payload with Azure URLs tracker.start('Uploading to TestDino API...'); // Update attachment paths with Azure URLs or status markers const urlMapping = azureUploadResult?.urlMapping || new Map(); const finalReport = attachments_1.AttachmentScanner.updateAttachmentPaths(baseReport, urlMapping, this.config); const finalPayload = this.buildFinalPayload(finalReport, metadata, azureUploadResult); if (this.config.verbose) { const testCount = Array.isArray(finalPayload.suites) ? finalPayload.suites.length : 0; console.log(`📦 Uploading report: ${testCount} test suites`); } // Step 5: Upload to TestDino API with retry const response = await (0, retry_1.withRetry)(() => this.apiClient.uploadReport(finalPayload), { maxAttempts: 3, baseDelay: 1000 }); tracker.succeed('Report uploaded successfully'); return response; } catch (error) { tracker.fail('Upload failed'); throw error; } } /** * Collect all metadata with ZERO data loss guarantee * CRITICAL: Each collector has fallbacks - never fails completely */ async collectMetadata(baseReport) { const collectionResults = await Promise.allSettled([ this.collectGitMetadataWithFallback(), this.collectCiMetadataWithFallback(), this.collectSystemMetadataWithFallback(), ]); // Extract results, using fallbacks for any failures const gitMeta = collectionResults[0].status === 'fulfilled' ? collectionResults[0].value : this.getGitMetadataFallback(); const ciMeta = collectionResults[1].status === 'fulfilled' ? collectionResults[1].value : this.getCiMetadataFallback(); const systemMeta = collectionResults[2].status === 'fulfilled' ? collectionResults[2].value : this.getSystemMetadataFallback(); // Extract test configuration from the base report const testMeta = this.extractTestMetadata(baseReport); const metadata = { git: gitMeta, ci: ciMeta, system: systemMeta, test: testMeta, }; // Log any collection failures const failures = collectionResults .map((result, index) => ({ result, type: ['git', 'ci', 'system'][index] })) .filter(({ result }) => result.status === 'rejected') .map(({ type, result }) => `${type}: ${result.reason}`); if (failures.length > 0) { console.warn(`⚠️ Metadata collection issues (using fallbacks): ${failures.join(', ')}`); } if (this.config.verbose) { console.log(`📋 Metadata: ${gitMeta.branch || 'unknown branch'}, ${ciMeta.provider || 'local'} environment`); } // STRICT ENFORCEMENT: Validate critical metadata when uploads are enabled this.enforceStrictMetadataRequirements(metadata); return metadata; } /** * Collect Git metadata with comprehensive error handling */ async collectGitMetadataWithFallback() { try { return await new git_1.GitCollector(process.cwd()).getMetadata(); } catch (error) { if (this.config.verbose) { console.warn(`⚠️ Git metadata collection failed: ${error instanceof Error ? error.message : 'unknown error'}`); } return this.getGitMetadataFallback(); } } /** * Collect CI metadata with error handling */ async collectCiMetadataWithFallback() { try { return ci_1.CiCollector.collect(); } catch (error) { if (this.config.verbose) { console.warn(`⚠️ CI metadata collection failed: ${error instanceof Error ? error.message : 'unknown error'}`); } return this.getCiMetadataFallback(); } } /** * Collect System metadata with error handling */ async collectSystemMetadataWithFallback() { try { return system_1.SystemCollector.collect(); } catch (error) { if (this.config.verbose) { console.warn(`⚠️ System metadata collection failed: ${error instanceof Error ? error.message : 'unknown error'}`); } return this.getSystemMetadataFallback(); } } /** * Fallback Git metadata when all collection methods fail */ getGitMetadataFallback() { const env = process.env; return { branch: env.CI_COMMIT_REF_NAME || env.GITHUB_HEAD_REF || env.BRANCH_NAME || 'unknown', commit: { hash: env.CI_COMMIT_SHA || env.GITHUB_SHA || env.GIT_COMMIT || 'unknown', message: env.CI_COMMIT_MESSAGE || '', author: env.CI_COMMIT_AUTHOR || env.GITHUB_ACTOR || env.GIT_AUTHOR_NAME || '', email: env.CI_COMMIT_AUTHOR_EMAIL || env.GIT_AUTHOR_EMAIL || '', timestamp: new Date().toISOString(), }, repository: { name: env.CI_PROJECT_PATH || env.GITHUB_REPOSITORY || 'unknown', url: env.CI_PROJECT_URL || env.GITHUB_REPOSITORY_URL || env.GIT_URL || '', }, pr: { id: env.CI_MERGE_REQUEST_ID || env.GITHUB_PR_NUMBER || '', title: env.CI_MERGE_REQUEST_TITLE || '', url: env.CI_MERGE_REQUEST_URL || '', status: '', }, }; } /** * Fallback CI metadata when collection fails */ getCiMetadataFallback() { const env = process.env; return { provider: 'unknown', pipeline: { id: env.CI_PIPELINE_ID || env.GITHUB_RUN_ID || 'unknown', name: env.CI_PIPELINE_NAME || env.GITHUB_WORKFLOW || 'CI Pipeline', url: env.CI_PIPELINE_URL || '', }, build: { number: env.CI_BUILD_NUMBER || env.GITHUB_RUN_NUMBER || 'unknown', trigger: env.CI_PIPELINE_TRIGGER || env.GITHUB_EVENT_NAME || '', }, environment: { name: env.CI_ENVIRONMENT_NAME || 'local', type: '', os: 'unknown', node: process.version || 'unknown', }, }; } /** * Fallback System metadata when collection fails */ getSystemMetadataFallback() { return { hostname: 'unknown', cpu: { count: 1, model: 'unknown', }, memory: { total: 'unknown', }, os: 'unknown', nodejs: process.version || 'unknown', playwright: 'unknown', }; } /** * STRICT ENFORCEMENT: Validate we have required files/directories after auto-discovery * FAILS HARD if user enabled options but we can't deliver what they requested */ enforceStrictUploadRequirements(htmlDir, traceDir, attachments = []) { const violations = []; // Strict HTML upload validation if (this.config.uploadHtml && !htmlDir) { violations.push('❌ HTML upload enabled (--upload-html) but no HTML report directory found\n' + '💡 Auto-discovery failed - ensure HTML report exists or use --html-report <path>'); } // Strict trace upload validation if (this.config.uploadTraces && !traceDir) { violations.push('❌ Trace upload enabled (--upload-traces) but no trace directory found\n' + '💡 Auto-discovery failed - ensure trace files exist or use --trace-dir <path>'); } // Strict attachment validation if ((this.config.uploadImages || this.config.uploadVideos) && attachments.length === 0) { const enabledTypes = []; if (this.config.uploadImages) enabledTypes.push('images'); if (this.config.uploadVideos) enabledTypes.push('videos'); violations.push(`❌ ${enabledTypes.join(' and ')} upload enabled but no ${enabledTypes.join('/')} found\n` + '💡 Auto-discovery failed - ensure test attachments exist in the report'); } // STRICT FAILURE: If any enabled option cannot be fulfilled, fail hard if (violations.length > 0) { const errorMessage = '🚫 STRICT VALIDATION FAILED - Cannot fulfill explicitly enabled upload options:\n\n' + violations.join('\n\n') + '\n\n💡 Either provide the missing files/directories or remove the corresponding upload flags.'; throw new Error(errorMessage); } // Additional validation: Check directory accessibility if (htmlDir && this.config.uploadHtml) { this.validateDirectoryAccess(htmlDir, 'HTML report directory'); } if (traceDir && this.config.uploadTraces) { this.validateDirectoryAccess(traceDir, 'trace directory'); } } /** * Validate directory is accessible and contains expected content */ validateDirectoryAccess(dirPath, description) { try { // Basic accessibility check would go here // For now, we trust that auto-discovery already validated these if (this.config.verbose) { console.log(`✅ ${description} validated: ${dirPath}`); } } catch (error) { throw new Error(`❌ ${description} validation failed: ${dirPath}\n` + `💡 ${error instanceof Error ? error.message : 'Directory is not accessible'}`); } } /** * STRICT ENFORCEMENT: Validate critical metadata is available * FAILS HARD if essential metadata is missing when uploads are enabled */ enforceStrictMetadataRequirements(metadata) { const isUploadEnabled = this.config.uploadImages || this.config.uploadVideos || this.config.uploadHtml || this.config.uploadTraces; if (!isUploadEnabled) { return; // No strict requirements for JSON-only uploads } const violations = []; // Strict Git metadata validation if (!metadata.git.commit?.hash || metadata.git.commit.hash === 'unknown') { violations.push('❌ Git commit hash missing or unknown\n' + '💡 Required for upload tracking - ensure you\'re in a git repository with commits'); } if (!metadata.git.branch || metadata.git.branch === 'unknown') { violations.push('❌ Git branch information missing or unknown\n' + '💡 Required for upload organization - ensure you\'re on a valid git branch'); } // Strict repository metadata validation if (!metadata.git.repository?.name || metadata.git.repository.name === 'unknown') { violations.push('❌ Repository name missing or unknown\n' + '💡 Required for upload categorization - ensure git remote is configured'); } // STRICT FAILURE: If critical metadata is missing during uploads, fail hard if (violations.length > 0) { const errorMessage = '🚫 STRICT METADATA VALIDATION FAILED - Missing critical information for uploads:\n\n' + violations.join('\n\n') + '\n\n💡 Either fix the git repository setup or disable upload options for JSON-only uploads.'; throw new Error(errorMessage); } if (this.config.verbose) { console.log('✅ Strict metadata validation passed - all critical information available'); } } /* * Standardized error handling for upload operations - UNUSED in simplified version */ /*private handleUploadError( error: unknown, type: 'attachment' | 'html' | 'trace', result: AzureUploadResult, context?: string ): void { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const contextMessage = context ? ` (${context})` : ''; // Determine if this error should be critical based on strict enforcement const isCritical = this.shouldBeStrict(type); const failure: UploadFailure = { type, error: errorMessage, critical: isCritical, }; result.failures.push(failure); // Log appropriately based on criticality if (isCritical) { console.error(`❌ ${type.toUpperCase()} upload failed${contextMessage}: ${errorMessage}`); if (this.config.verbose) { console.error('Stack:', error instanceof Error ? error.stack : 'No stack trace'); } } else { console.warn(`⚠️ ${type.toUpperCase()} upload failed${contextMessage}: ${errorMessage}`); if (this.config.verbose) { console.log('Non-critical failure - continuing with upload'); } } // Update result status based on failure criticality if (isCritical) { result.status = 'failed'; result.error = errorMessage; } else if (result.status !== 'failed') { result.status = 'partial'; // Some uploads succeeded, some failed } }*/ /* * Determine if upload type should be strict based on user configuration - UNUSED */ /*private shouldBeStrict(type: 'attachment' | 'html' | 'trace'): boolean { switch (type) { case 'html': return this.config.uploadHtml; // Critical if user explicitly enabled HTML upload case 'trace': return this.config.uploadTraces; // Critical if user explicitly enabled trace upload case 'attachment': return this.config.uploadImages || this.config.uploadVideos; // Critical if user enabled attachments default: return false; } }*/ /* * Generate comprehensive upload summary for reporting - UNUSED in simplified version */ /*private generateUploadSummary(result: AzureUploadResult): string { const parts = []; if (result.status === 'uploaded') { parts.push('✅ All uploads completed successfully'); } else if (result.status === 'partial') { parts.push('⚠️ Upload completed with some failures'); } else { parts.push('❌ Upload failed'); } // Add details about what succeeded const successes = []; if (result.htmlUrl) successes.push('HTML report'); if (result.traceUrls && result.traceUrls.length > 0) successes.push(`${result.traceUrls.length} trace files`); if (result.attachmentUrls && result.attachmentUrls.size > 0) successes.push(`${result.attachmentUrls.size} attachments`); if (successes.length > 0) { parts.push(`Successfully uploaded: ${successes.join(', ')}`); } // Add failure details if (result.failures.length > 0) { const criticalFailures = result.failures.filter(f => f.critical); const nonCriticalFailures = result.failures.filter(f => !f.critical); if (criticalFailures.length > 0) { parts.push(`Critical failures: ${criticalFailures.map(f => f.type).join(', ')}`); } if (nonCriticalFailures.length > 0) { parts.push(`Non-critical failures: ${nonCriticalFailures.map(f => f.type).join(', ')}`); } } return parts.join('\n'); }*/ /** * Validate and sanitize path prefix from SAS response * CRITICAL: Ensures reliable URL generation even with malformed server responses */ validateAndSanitizePathPrefix(sasResponse) { const pathPrefix = sasResponse?.uploadInstructions && typeof sasResponse.uploadInstructions === 'object' && sasResponse.uploadInstructions !== null ? sasResponse.uploadInstructions.pathPrefix : undefined; if (!pathPrefix || typeof pathPrefix !== 'string') { // Generate fallback path prefix based on current timestamp const now = new Date(); const year = now.getFullYear(); const month = String(now.getMonth() + 1).padStart(2, '0'); const day = String(now.getDate()).padStart(2, '0'); const randomId = Math.random().toString(36).substring(2, 10); const fallbackPrefix = `${year}/${month}/${day}/${randomId}`; if (this.config.verbose) { console.warn(`⚠️ Invalid or missing pathPrefix from server, using fallback: ${fallbackPrefix}`); } return fallbackPrefix; } // Sanitize path prefix to ensure it's safe const sanitized = pathPrefix .replace(/^\/+|\/+$/g, '') // Remove leading/trailing slashes .replace(/\/+/g, '/') // Replace multiple slashes with single slash .replace(/[^a-zA-Z0-9/\-_]/g, ''); // Remove unsafe characters if (!sanitized) { // If sanitization resulted in empty string, use fallback const now = new Date(); const fallbackPrefix = `${now.getFullYear()}/${String(now.getMonth() + 1).padStart(2, '0')}/${String(now.getDate()).padStart(2, '0')}/fallback_${Date.now()}`; if (this.config.verbose) { console.warn(`⚠️ Path prefix sanitization resulted in empty string, using fallback: ${fallbackPrefix}`); } return fallbackPrefix; } if (sanitized !== pathPrefix && this.config.verbose) { console.warn(`⚠️ Path prefix sanitized: '${pathPrefix}' → '${sanitized}'`); } return sanitized; } /** * Validate SAS response completeness and generate safe fallbacks */ validateSasResponse(sasResponse) { const required = [ 'sasToken', 'containerUrl', 'uploadInstructions', 'uploadInstructions.baseUrl', 'uploadInstructions.allowedFileTypes', 'uploadInstructions.maxFileSize' ]; const missing = []; for (const field of required) { const parts = field.split('.'); let current = sasResponse; for (const part of parts) { if (!current || typeof current !== 'object' || current === null || !(part in current)) { missing.push(field); break; } current = current[part]; } } if (missing.length > 0) { throw new Error(`Invalid SAS response from server - missing required fields: ${missing.join(', ')}\n` + 'This indicates a server-side issue. Please contact support.'); } // Validate data types and ranges const uploadInstructions = sasResponse.uploadInstructions; if (typeof uploadInstructions.maxFileSize !== 'number' || uploadInstructions.maxFileSize <= 0) { throw new Error('Invalid maxFileSize in SAS response'); } if (!Array.isArray(uploadInstructions.allowedFileTypes)) { throw new Error('Invalid allowedFileTypes in SAS response'); } } /** * Implement basic upload resume logic for failed uploads * Retries failed uploads with exponential backoff */ /*private async retryFailedUploads( originalResult: AzureUploadResult, sasResponse: unknown, sanitizedPathPrefix: string ): Promise<AzureUploadResult> { const failedUploads = originalResult.failures.filter(f => !f.critical); if (failedUploads.length === 0) { return originalResult; // No non-critical failures to retry } if (this.config.verbose) { console.log(`🔄 Attempting to retry ${failedUploads.length} failed uploads...`); } const retryResult = { ...originalResult }; const retriesSuccessful = 0; // TODO: Increment this when retries succeed // Create a fresh storage client for retries // eslint-disable-next-line @typescript-eslint/no-explicit-any const originalSasResponse = sasResponse as any; // Deep object manipulation requires any type const validatedSasResponse = { ...originalSasResponse, uploadInstructions: { ...originalSasResponse.uploadInstructions, pathPrefix: sanitizedPathPrefix, }, }; const storageClient = new AzureStorageClient(validatedSasResponse); // Retry each failed upload type for (const failure of failedUploads) { try { await new Promise(resolve => setTimeout(resolve, 1000)); // Basic delay between retries if (failure.type === 'attachment') { // TODO: Implement actual retry logic using storageClient // For attachment failures, we'd need access to the original attachment list // This is a simplified implementation - in a real scenario, we'd track which specific attachments failed if (this.config.verbose) { console.log(`⚠️ Skipping attachment retry - would need specific failed attachment tracking`); console.log(`📦 Storage client available for retry: ${storageClient ? 'Yes' : 'No'}`); } // Example retry would be: // const retryUrl = await storageClient.uploadFile(failedAttachment.path, failedAttachment.blobPath); // if (retryUrl) retriesSuccessful++; } // Note: HTML and trace retries would be more complex and require the original directories // This is a basic framework - full implementation would need state tracking } catch (error) { if (this.config.verbose) { console.warn(`⚠️ Retry failed for ${failure.type}: ${error instanceof Error ? error.message : 'Unknown error'}`); } } } if (retriesSuccessful > 0) { console.log(`✅ Successfully retried ${retriesSuccessful} failed uploads`); // Update status if we recovered some uploads if (retryResult.status === 'failed' && retriesSuccessful === failedUploads.length) { retryResult.status = 'uploaded'; } else if (retryResult.status === 'failed' && retriesSuccessful > 0) { retryResult.status = 'partial'; } } return retryResult; }*/ /** * Extract test configuration metadata from Playwright report */ extractTestMetadata(report) { const config = report.config || {}; const projects = config.projects || []; // Get Playwright version let playwrightVersion = 'unknown'; try { // eslint-disable-next-line @typescript-eslint/no-require-imports const pkg = require('playwright/package.json'); playwrightVersion = pkg.version || 'unknown'; } catch { try { // eslint-disable-next-line @typescript-eslint/no-require-imports const pkg = require('@playwright/test/package.json'); playwrightVersion = pkg.version || 'unknown'; } catch { // Keep default 'unknown' } } // Build browser configurations const browsers = projects.map((project) => ({ browserId: project.id || project.name || 'unknown', name: project.name || 'unknown', version: config.version || 'unknown', viewport: '1280x720', // Default, could be extracted from project config headless: true, // Default assumption repeatEach: project.repeatEach || 1, retries: project.retries || 0, testDir: project.testDir || config.rootDir || 'unknown', outputDir: project.outputDir || 'unknown', })); return { framework: { name: 'playwright', version: playwrightVersion, }, config: { browsers, actualWorkers: config.metadata?.actualWorkers || config.workers || 1, timeout: projects[0]?.timeout || config.timeout || 30000, preserveOutput: config.preserveOutput || 'always', reporters: this.extractReporterConfig(config.reporter), grep: config.grep || {}, grepInvert: config.grepInvert || null, fullyParallel: config.fullyParallel || false, forbidOnly: config.forbidOnly || false, projects: projects.length || 0, shard: config.shard || null, }, customTags: [], }; } /** * Extract reporter configuration from Playwright config */ extractReporterConfig(reporters) { if (!Array.isArray(reporters)) return []; return reporters.map((reporter) => { if (Array.isArray(reporter)) { return { name: reporter[0], options: reporter[1] || {}, }; } return { name: reporter, options: {}, }; }); } /** * Upload HTML, trace files, and attachments to Azure storage with proper directory structure */ async uploadToAzure(htmlDir, traceDir, attachments = []) { try { // Request SAS token with retry (ONE TOKEN PER COMMAND) const sasResponse = await (0, retry_1.withRetry)(() => this.sasService.requestSasToken(), { maxAttempts: 3, baseDelay: 1000 }); // Validate SAS response completeness this.validateSasResponse(sasResponse); // Validate and sanitize path prefix const sanitizedPathPrefix = this.validateAndSanitizePathPrefix(sasResponse); if (this.config.verbose) { const expiryMinutes = Math.floor((new Date(sasResponse.expiresAt).getTime() - Date.now()) / 60000); console.log(`🔐 Token acquired (expires in ${expiryMinutes} minutes)`); console.log(`📁 Upload path: ${sasResponse.containerUrl}/${sanitizedPathPrefix}/`); } // Create Azure storage client with validated path prefix const validatedSasResponse = { ...sasResponse, uploadInstructions: { ...sasResponse.uploadInstructions, pathPrefix: sanitizedPathPrefix, }, }; const storageClient = new azure_1.AzureStorageClient(validatedSasResponse); const uploadService = new azure_1.AzureUploadService(storageClient); let htmlUrl = ''; const urlMapping = new Map(); // Upload attachments (images, videos, etc.) and collect URL mappings if (attachments.length > 0) { try { console.log(`📎 Uploading ${attachments.length} attachments...`); // Upload attachments in configurable batches for better performance const batchSize = this.config.batchSize; for (let i = 0; i < attachments.length; i += batchSize) { const batch = attachments.slice(i, i + batchSize); // Upload batch in parallel and collect results const uploadPromises = batch.map(async (attachment) => { try { // Create clean blob path for JSON attachments with directory structure const cleanPath = this.createJsonAttachmentPath(attachment); const uploadedUrl = await storageClient.uploadFile(attachment.absolutePath, cleanPath); if (this.config.verbose) { console.log(` ✅ ${attachment.name}: ${uploadedUrl}`); } return { attachment, uploadedUrl }; } catch (error) { console.warn(`⚠️ Failed to upload attachment ${attachment.name}: ${error}`); return null; } }); const results = await Promise.all(uploadPromises); // Collect successful uploads into URL mapping for (const result of results) { if (result?.uploadedUrl) { urlMapping.set(result.attachment.originalPath, result.uploadedUrl); } } } console.log(`✅ Attachments upload completed`); } catch (error) { console.warn(`⚠️ Attachment upload failed: ${error}`); } } // Upload HTML report if enabled and directory exists const shouldUploadHtmlDir = this.config.uploadHtml && htmlDir; if (shouldUploadHtmlDir) { try { console.log(`📁 Uploading HTML report from: ${htmlDir}`); // Upload directory contents with filtering based on flags const htmlConfig = { uploadImages: this.config.uploadImages || this.config.uploadHtml, uploadVideos: this.config.uploadVideos || this.config.uploadHtml, uploadHtml: this.config.uploadHtml, }; const uploadedUrls = await uploadService.uploadHtmlDirectoryWithProgress(htmlDir, 'html', htmlConfig); // Build HTML URL - find the index.html in uploaded URLs const indexUrl = uploadedUrls.find(url => url.endsWith('index.html')); if (indexUrl) { htmlUrl = indexUrl; } else { // Fallback: construct URL manually using validated path prefix htmlUrl = `${sasResponse.containerUrl}/${sanitizedPathPrefix}/html/index.html`; if (this.config.verbose) { console.log(`🔗 Generated fallback HTML URL: ${htmlUrl}`); } } console.log('✅ HTML report uploaded successfully'); } catch (error) { console.error(`❌ HTML upload failed: ${error}`); // htmlUrl remains empty, which will result in 'failed' status } } // Upload trace files if enabled and directory exists (but don't track in metadata) if (this.config.uploadTraces && traceDir) { try { console.log(`📦 Uploading trace files from: ${traceDir}`); // Upload traces with 'traces' prefix to organize them const traceUrls = await uploadService.uploadDirectoryWithProgress(traceDir, 'traces'); console.log(`✅ ${traceUrls.length} trace files uploaded`); } catch (error) { console.warn(`⚠️ Trace upload failed: ${error}`); } } // Return result based on HTML upload status only if (this.config.uploadHtml) { if (htmlUrl) { return { status: 'uploaded', url: htmlUrl, urlMapping }; } else { return { status: 'failed', url: '', urlMapping }; } } else { return { status: 'disabled', url: '', urlMapping }; } } catch (error) { const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const emptyUrlMapping = new Map(); // If Azure upload fails completely and HTML upload was enabled, return failed if (this.config.uploadHtml) { console.error(`❌ TestDino server upload failed completely: ${errorMessage}`); return { status: 'failed', url: '', urlMapping: emptyUrlMapping }; } else { console.warn(`⚠️ TestDino server upload failed: ${errorMessage}`); return { status: 'disabled', url: '', urlMapping: emptyUrlMapping }; } } } /** * Build the final payload combining base report + metadata + Azure URLs * This must match the exact structure from sample-report.json */ buildFinalPayload(baseReport, metadata, azureUpload) { // Attach Azure upload result to metadata if available if (azureUpload) { metadata.azureUpload = azureUpload; } else { // No Azure upload attempted - use default metadata.azureUpload = { status: 'not-found', url: '', urlMapping: new Map(), }; } // Build the payload EXACTLY matching sample-report.json structure const payload = { config: baseReport.config, suites: baseReport.suites, stats: baseReport.stats, errors: baseReport.errors ?? [], metadata, }; return payload; } /** * Upload with graceful fallback for failed Azure uploads */ async uploadWithFallback(jsonPath, htmlDir, traceDir) { try { // Try full upload first return await this.uploadReport(jsonPath, htmlDir, traceDir); } catch (error) { if (error instanceof types_1.NetworkError && (htmlDir || traceDir)) { console.warn('⚠️ Full upload failed, attempting JSON-only upload...'); // Fallback: try JSON-only upload try { return await this.uploadReport(jsonPath); // No HTML/traces } catch (fallbackError) { console.error('❌ Fallback upload also failed'); throw fallbackError; } } throw error; } } } exports.UploadService = UploadService; //# sourceMappingURL=upload.js.map