upload-test-manual
Version:
CLI tool untuk mengolah file CSV dan upload ke ReportPortal
202 lines (177 loc) • 6.55 kB
JavaScript
const fs = require('fs');
const path = require('path');
const csv = require('csv-parser');
const { Builder } = require('xml2js');
const { createTestItem, finishTestItem } = require('./rpHelpers');
const { validateCSVStructure } = require('./csvParser');
const preparePaths = (filename, options = {}) => {
if (!filename || typeof filename !== 'string') {
throw new Error('Filename must be a string');
}
const baseDir = options.baseDir || path.join(__dirname, '..', 'data');
const inputDir = options.inputDir || 'input-csv';
const outputDir = options.outputDir || 'output-xml';
const baseName = path.basename(filename, path.extname(filename))
.replace(/[^a-zA-Z0-9-_.]/g, '_')
.substring(0, 100);
return {
csvPath: path.isAbsolute(filename) ? filename : path.join(baseDir, inputDir, filename),
xmlPath: path.join(baseDir, outputDir, `${baseName}-${Date.now()}.xml`)
};
};
const ensureDirExists = (dirPath) => {
try {
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true, mode: 0o755 });
console.log(`Created directory: ${dirPath}`);
}
} catch (error) {
throw new Error(`Failed to create directory ${dirPath}: ${error.message}`);
}
};
const cleanOldFiles = (dirPath, options = {}) => {
const config = {
maxAgeDays: 7,
maxFilesToKeep: 20,
filePattern: /\.xml$/i,
dryRun: false,
...options
};
try {
if (!fs.existsSync(dirPath)) return { deleted: 0, kept: 0 };
const files = fs.readdirSync(dirPath)
.filter(file => config.filePattern.test(file))
.map(file => ({
name: file,
path: path.join(dirPath, file),
stats: fs.statSync(path.join(dirPath, file))
}))
.sort((a, b) => b.stats.mtimeMs - a.stats.mtimeMs);
const oldFiles = files.filter(file =>
file.stats.mtimeMs < (Date.now() - (config.maxAgeDays * 86400000))
);
const excessFiles = files.slice(config.maxFilesToKeep);
[...oldFiles, ...excessFiles].forEach(file => {
if (!config.dryRun) fs.unlinkSync(file.path);
console.log(`${config.dryRun ? '[DRY RUN]' : 'Deleted'} old file: ${file.name}`);
});
return {
total: files.length,
deleted: oldFiles.length + excessFiles.length,
kept: files.length - oldFiles.length - excessFiles.length
};
} catch (error) {
console.error(`Error cleaning directory ${dirPath}:`, error.message);
return null;
}
};
const processCSV = async (csvPath, xmlPath, launchId, rpConfig, options = {}) => {
if (!fs.existsSync(csvPath)) {
throw new Error(`CSV file not found: ${csvPath}`);
}
return new Promise((resolve, reject) => {
const testItems = [];
const xmlBuilder = new Builder({
xmldec: { version: '1.0', encoding: 'UTF-8' },
headless: true,
renderOpts: { pretty: true, indent: ' ', newline: '\n' }
});
const xmlData = {
testsuites: {
testsuite: {
$: {
name: options.suiteName || 'CSV Import',
timestamp: new Date().toISOString(),
tests: 0,
failures: 0,
errors: 0,
skipped: 0,
time: 0
},
properties: [
{ property: { $: { name: 'launchId', value: launchId } } },
{ property: { $: { name: 'sourceFile', value: path.basename(csvPath) } } }
],
testcase: []
}
}
};
const fieldMappings = {
testCaseIdField: 'Test Case ID',
...options.fieldMappings
};
console.log(`Processing CSV: ${csvPath} with mappings:`, fieldMappings);
fs.createReadStream(csvPath)
.pipe(csv({
mapHeaders: ({ header }) => header.trim(),
mapValues: ({ value }) => typeof value === 'string' ? value.trim() : value,
separator: options.separator || ',',
skipLines: options.skipLines || 0
}))
.on('data', async (data) => {
try {
validateCSVStructure(data, options.validationOptions);
// Ensure Test Case ID exists
if (!data['Test Case ID']) {
data['Test Case ID'] = `GEN-${Date.now()}-${Math.floor(Math.random() * 1000)}`;
console.warn(`Generated Test Case ID for row: ${data['Test Case ID']}`);
}
const itemId = await createTestItem(launchId, data, rpConfig, fieldMappings);
if (itemId) {
await finishTestItem(itemId, data, rpConfig, fieldMappings);
testItems.push(itemId);
}
// Update XML report
const testCase = {
$: {
name: data['Title'] || 'Unnamed Test',
classname: data['Test Suite'] || 'Default',
time: data['Duration'] || '0',
'test-case-id': data['Test Case ID'] // Include in XML
}
};
// Handle test status
const status = data['Status'];
if (status === 'Failed') {
xmlData.testsuites.testsuite.$.failures++;
testCase.failure = [{
_: data['Comment'] || 'No comment',
$: { message: data['Defect'] || 'Unknown defect' }
}];
} else if (status === 'Skipped') {
xmlData.testsuites.testsuite.$.skipped++;
testCase.skipped = {};
}
xmlData.testsuites.testsuite.$.tests++;
xmlData.testsuites.testsuite.testcase.push(testCase);
} catch (error) {
console.error(`Error processing row:`, error.message);
xmlData.testsuites.testsuite.$.errors++;
}
})
.on('end', async () => {
try {
await cleanOldFiles(path.dirname(xmlPath));
const xml = xmlBuilder.buildObject(xmlData);
fs.writeFileSync(xmlPath, xml);
console.log(`XML report generated: ${xmlPath}`);
resolve({
totalTests: xmlData.testsuites.testsuite.$.tests,
testItems: testItems.length,
xmlPath
});
} catch (error) {
reject(new Error(`Failed to generate XML: ${error.message}`));
}
})
.on('error', (error) => {
reject(new Error(`CSV processing error: ${error.message}`));
});
});
};
module.exports = {
preparePaths,
ensureDirExists,
processCSV,
cleanOldFiles
};