@devicecloud.dev/dcd
Version:
Better cloud maestro testing
283 lines (282 loc) • 11.4 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.formatDurationSeconds = exports.writeJSONFile = exports.verifyAdditionalAppFiles = exports.uploadBinaries = exports.uploadBinary = exports.extractAppMetadataIos = exports.extractAppMetadataIosZip = exports.extractAppMetadataAndroid = exports.verifyAppZip = exports.compressFilesFromRelativePath = exports.compressFolderToBlob = exports.toBuffer = void 0;
const core_1 = require("@oclif/core");
// required polyfill for node 18
const file_1 = require("@web-std/file");
const AppInfoParser = require("app-info-parser");
const archiver = require("archiver");
const bplist_parser_1 = require("bplist-parser");
const node_crypto_1 = require("node:crypto");
const node_fs_1 = require("node:fs");
const promises_1 = require("node:fs/promises");
const path = require("node:path");
const node_stream_1 = require("node:stream");
const StreamZip = require("node-stream-zip");
const plist_1 = require("plist");
const cloud_1 = require("./commands/cloud");
const api_gateway_1 = require("./gateways/api-gateway");
const supabase_gateway_1 = require("./gateways/supabase-gateway");
const toBuffer = async (archive) => {
const chunks = [];
const writable = new node_stream_1.Writable();
writable._write = (chunk, _, callback) => {
// save to array to concatenate later
chunks.push(chunk);
callback();
};
// pipe to writable
archive.pipe(writable);
await archive.finalize();
// once done, concatenate chunks
return Buffer.concat(chunks);
};
exports.toBuffer = toBuffer;
const compressFolderToBlob = async (sourceDir) => {
const archive = archiver('zip', {
zlib: { level: 9 },
});
archive.on('error', (err) => {
throw err;
});
archive.directory(sourceDir, sourceDir.split('/').pop());
const buffer = await (0, exports.toBuffer)(archive);
return new Blob([buffer], { type: 'application/zip' });
};
exports.compressFolderToBlob = compressFolderToBlob;
const compressFilesFromRelativePath = async (basePath, files, commonRoot) => {
const archive = archiver('zip', {
zlib: { level: 9 },
});
archive.on('error', (err) => {
throw err;
});
for (const file of files) {
archive.file(path.resolve(basePath, file), {
name: file.replace(commonRoot, ''),
});
}
const buffer = await (0, exports.toBuffer)(archive);
// await writeFile('./my-zip.zip', buffer);
return buffer;
};
exports.compressFilesFromRelativePath = compressFilesFromRelativePath;
const verifyAppZip = async (zipPath) => {
// eslint-disable-next-line import/namespace, new-cap
const zip = await new StreamZip.async({
file: zipPath,
storeEntries: true,
});
const entries = await zip.entries();
const topLevelEntries = Object.values(entries).filter((entry) => !entry.name.split('/')[1]);
if (topLevelEntries.length !== 1 ||
!topLevelEntries[0].name.endsWith('.app/')) {
throw new Error('Zip file must contain exactly one entry which is a .app, check the contents of the zip file');
}
zip.close();
};
exports.verifyAppZip = verifyAppZip;
const extractAppMetadataAndroid = async (appFilePath) => {
const parser = new AppInfoParser(appFilePath);
const result = await parser.parse();
return { appId: result.package, platform: 'android' };
};
exports.extractAppMetadataAndroid = extractAppMetadataAndroid;
const parseInfoPlist = async (buffer) => {
let data;
const bufferType = buffer[0];
if (bufferType === 60 ||
bufferType === '<' ||
bufferType === 239) {
data = (0, plist_1.parse)(buffer.toString());
}
else if (bufferType === 98) {
data = (0, bplist_parser_1.parseBuffer)(buffer)[0];
}
else {
throw new Error('Unknown plist buffer type.');
}
return data;
};
const extractAppMetadataIosZip = async (appFilePath) => new Promise((resolve, reject) => {
const zip = new StreamZip({ file: appFilePath });
zip.on('ready', () => {
// Get all entries and sort them by path depth
const entries = Object.values(zip.entries());
const sortedEntries = entries.sort((a, b) => {
const aDepth = a.name.split('/').length;
const bDepth = b.name.split('/').length;
return aDepth - bDepth;
});
// Find the first Info.plist in the shallowest directory
const infoPlist = sortedEntries.find((e) => e.name.endsWith('.app/Info.plist'));
if (!infoPlist) {
reject(new Error('Failed to find info plist'));
return;
}
const buffer = zip.entryDataSync(infoPlist.name);
parseInfoPlist(buffer)
.then((data) => {
const appId = data.CFBundleIdentifier;
zip.close();
resolve({ appId, platform: 'ios' });
})
.catch(reject);
});
zip.on('error', reject);
});
exports.extractAppMetadataIosZip = extractAppMetadataIosZip;
const extractAppMetadataIos = async (appFolderPath) => {
const infoPlistPath = path.normalize(path.join(appFolderPath, 'Info.plist'));
const buffer = await (0, promises_1.readFile)(infoPlistPath);
const data = await parseInfoPlist(buffer);
const appId = data.CFBundleIdentifier;
return { appId, platform: 'ios' };
};
exports.extractAppMetadataIos = extractAppMetadataIos;
const uploadBinary = async (filePath, apiUrl, apiKey, ignoreShaCheck = false, log = true) => {
if (log) {
core_1.ux.action.start('Checking and uploading binary', 'Initializing', {
stdout: true,
});
}
let file;
if (filePath?.endsWith('.app')) {
const zippedAppBlob = await (0, exports.compressFolderToBlob)(filePath);
file = new file_1.File([zippedAppBlob], filePath + '.zip');
}
else {
const fileBuffer = await (0, promises_1.readFile)(filePath);
const binaryBlob = new Blob([new Uint8Array(fileBuffer)], {
type: cloud_1.mimeTypeLookupByExtension[filePath.split('.').pop()],
});
file = new file_1.File([binaryBlob], filePath);
}
let sha;
try {
sha = await getFileHashFromFile(file);
}
catch (error) {
if (log) {
console.warn('Warning: Failed to get file hash', error);
}
}
if (!ignoreShaCheck && sha) {
try {
const { appBinaryId, exists } = await api_gateway_1.ApiGateway.checkForExistingUpload(apiUrl, apiKey, sha);
if (exists) {
if (log) {
core_1.ux.info(`sha hash matches existing binary with id: ${appBinaryId}, skipping upload. Force upload with --ignore-sha-check`);
core_1.ux.action.stop(`Skipping upload.`);
}
return appBinaryId;
}
}
catch {
// ignore error
}
}
const { id, message, path, token } = await api_gateway_1.ApiGateway.getBinaryUploadUrl(apiUrl, apiKey, filePath?.endsWith('.apk') ? 'android' : 'ios');
if (!path)
throw new Error(message);
let metadata;
try {
metadata = filePath?.endsWith('.apk')
? await (0, exports.extractAppMetadataAndroid)(filePath)
: filePath?.endsWith('.zip')
? await (0, exports.extractAppMetadataIosZip)(filePath)
: await (0, exports.extractAppMetadataIos)(filePath);
}
catch {
if (log) {
core_1.ux.warn('Failed to extract app metadata, please share with support@devicecloud.dev so we can improve our parsing.');
}
}
const env = apiUrl === 'https://api.devicecloud.dev' ? 'prod' : 'dev';
await supabase_gateway_1.SupabaseGateway.uploadToSignedUrl(env, path, token, file);
await api_gateway_1.ApiGateway.finaliseUpload(apiUrl, apiKey, id, metadata, path, sha);
if (log) {
core_1.ux.action.stop(`\nBinary uploaded with id: ${id}`);
}
return id;
};
exports.uploadBinary = uploadBinary;
const uploadBinaries = async (finalAppFiles, apiUrl, apiKey, ignoreShaCheck = false, log = true) => Promise.all(finalAppFiles.map((f) => (0, exports.uploadBinary)(f, apiUrl, apiKey, ignoreShaCheck, log)));
exports.uploadBinaries = uploadBinaries;
const verifyAdditionalAppFiles = async (appFiles) => {
if (appFiles?.length) {
if (!appFiles.every((f) => ['apk', '.app', '.zip'].some((ext) => f.endsWith(ext)))) {
throw new Error('App file must be a .apk for android or .app/.zip file for iOS');
}
await Promise.all(appFiles.map(async (f) => {
if (f.endsWith('.zip')) {
await (0, exports.verifyAppZip)(f);
}
}));
}
};
exports.verifyAdditionalAppFiles = verifyAdditionalAppFiles;
async function getFileHashFromFile(file) {
return new Promise((resolve, reject) => {
const hash = (0, node_crypto_1.createHash)('sha256');
const stream = file.stream();
const reader = stream.getReader();
const processChunks = async () => {
try {
let readerResult = await reader.read();
while (!readerResult.done) {
const { value } = readerResult;
hash.update(value);
readerResult = await reader.read();
}
resolve(hash.digest('hex'));
}
catch (error) {
reject(error);
}
};
processChunks();
});
}
/**
* Writes JSON data to a file with error handling
* @param filePath - Path to the output JSON file
* @param data - Data to be serialized to JSON
* @param logger - Logger object with log and warn methods
* @returns true if successful, false if an error occurred
*/
const writeJSONFile = (filePath, data, logger) => {
try {
(0, node_fs_1.writeFileSync)(filePath, JSON.stringify(data, null, 2));
logger.log(`JSON output written to: ${path.resolve(filePath)}`);
}
catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
const isPermissionError = errorMessage.includes('EACCES') || errorMessage.includes('EPERM');
const isNoSuchFileError = errorMessage.includes('ENOENT');
logger.warn(`Failed to write JSON output to file: ${filePath}`);
if (isPermissionError) {
logger.warn('Permission denied - check file/directory write permissions');
logger.warn('Try running with appropriate permissions or choose a different output location');
}
else if (isNoSuchFileError) {
logger.warn('Directory does not exist - create the directory first or choose an existing path');
}
logger.warn(`Error details: ${errorMessage}`);
}
};
exports.writeJSONFile = writeJSONFile;
/**
* Formats duration in seconds into a human readable string
* @param durationSeconds - Duration in seconds
* @returns Formatted duration string (e.g. "2m 30s" or "45s")
*/
const formatDurationSeconds = (durationSeconds) => {
const minutes = Math.floor(durationSeconds / 60);
const seconds = durationSeconds % 60;
if (minutes > 0) {
return `${minutes}m ${seconds}s`;
}
return `${durationSeconds}s`;
};
exports.formatDurationSeconds = formatDurationSeconds;
;