appium-xcuitest-driver
Version:
Appium driver for iOS using XCUITest for backend
420 lines (387 loc) • 12.8 kB
text/typescript
import _ from 'lodash';
import {fs, tempDir, logger, util} from 'appium/support';
import {SubProcess} from 'teen_process';
import {encodeBase64OrUpload} from '../utils';
import {WDA_BASE_URL} from 'appium-webdriveragent';
import {waitForCondition} from 'asyncbox';
import url from 'url';
import type {XCUITestDriver} from '../driver';
import type {StartRecordingScreenOptions, StopRecordingScreenOptions} from './types';
import type {WDASettings} from 'appium-webdriveragent';
/**
* Set max timeout for 'reconnect_delay_max' ffmpeg argument usage.
* It could have [0-4294] range limitation, thus this value should be less than that right now
* to return a better error message.
*/
const MAX_RECORDING_TIME_SEC = 4200;
const DEFAULT_RECORDING_TIME_SEC = 60 * 3;
const DEFAULT_FPS = 10;
const DEFAULT_QUALITY = 'medium';
const DEFAULT_MJPEG_SERVER_PORT = 9100;
const DEFAULT_VCODEC = 'mjpeg';
const MP4_EXT = '.mp4';
const FFMPEG_BINARY = 'ffmpeg';
const ffmpegLogger = logger.getLogger(FFMPEG_BINARY);
const QUALITY_MAPPING: Record<string, number> = {
low: 10,
medium: 25,
high: 75,
photo: 100,
};
const HARDWARE_ACCELERATION_PARAMETERS: Record<
string,
{
hwaccel: string;
hwaccelOutputFormat: string;
scaleFilterHWAccel: string;
videoTypeHWAccel: string;
}
> = {
/* https://trac.ffmpeg.org/wiki/HWAccelIntro#VideoToolbox */
videoToolbox: {
hwaccel: 'videotoolbox',
hwaccelOutputFormat: 'videotoolbox_vld',
scaleFilterHWAccel: 'scale_vt',
videoTypeHWAccel: 'h264_videotoolbox',
},
/* https://trac.ffmpeg.org/wiki/HWAccelIntro#CUDANVENCNVDEC */
cuda: {
hwaccel: 'cuda',
hwaccelOutputFormat: 'cuda',
scaleFilterHWAccel: 'scale_cuda',
videoTypeHWAccel: 'h264_nvenc',
},
/* https://trac.ffmpeg.org/wiki/Hardware/AMF */
amf_dx11: {
hwaccel: 'd3d11va',
hwaccelOutputFormat: 'd3d11',
scaleFilterHWAccel: 'scale',
videoTypeHWAccel: 'av1_amf',
},
/* https://trac.ffmpeg.org/wiki/Hardware/QuickSync */
qsv: {
hwaccel: 'qsv',
hwaccelOutputFormat: '',
scaleFilterHWAccel: 'scale_qsv',
videoTypeHWAccel: 'h264_qsv',
},
/* https://trac.ffmpeg.org/wiki/Hardware/VAAPI */
vaapi: {
hwaccel: 'vaapi',
hwaccelOutputFormat: 'vaapi',
scaleFilterHWAccel: 'scale_vaapi',
videoTypeHWAccel: 'h264_vaapi',
},
};
const CAPTURE_START_MARKER = /^\s*frame=/;
export class ScreenRecorder {
private readonly videoPath: string;
private readonly log: any;
private readonly opts: ScreenRecorderOptions;
private readonly udid: string;
private mainProcess: SubProcess | null;
private timeoutHandler: NodeJS.Timeout | null;
constructor(udid: string, log: any, videoPath: string, opts: ScreenRecorderOptions = {}) {
this.videoPath = videoPath;
this.log = log;
this.opts = opts;
this.udid = udid;
this.mainProcess = null;
this.timeoutHandler = null;
}
async start(timeoutMs: number): Promise<void> {
try {
await fs.which(FFMPEG_BINARY);
} catch {
throw new Error(
`'${FFMPEG_BINARY}' binary is not found in PATH. Install it using 'brew install ffmpeg'. ` +
`Check https://www.ffmpeg.org/download.html for more details.`,
);
}
const {
hardwareAcceleration,
remotePort,
remoteUrl,
videoFps,
videoType,
videoScale,
videoFilters,
pixelFormat,
} = this.opts;
const args: string[] = [
'-f',
'mjpeg',
// https://github.com/appium/appium/issues/16294
'-reconnect',
'1',
'-reconnect_at_eof',
'1',
'-reconnect_streamed',
'1',
'-reconnect_delay_max',
`${timeoutMs / 1000 + 1}`,
];
const {
hwaccel,
hwaccelOutputFormat,
scaleFilterHWAccel,
videoTypeHWAccel,
} = HARDWARE_ACCELERATION_PARAMETERS[hardwareAcceleration || ''] ?? {};
if (hwaccel) {
args.push('-hwaccel', hwaccel);
}
if (hwaccelOutputFormat) {
args.push('-hwaccel_output_format', hwaccelOutputFormat);
}
//Parameter `-r` is optional. See details: https://github.com/appium/appium/issues/12067
if ((videoFps && videoType === 'libx264') || videoTypeHWAccel) {
args.push('-r', String(videoFps));
}
const {protocol, hostname} = url.parse(remoteUrl || '');
args.push('-i', `${protocol}//${hostname}:${remotePort}`);
if (videoFilters || videoScale) {
args.push('-vf', videoFilters || `${scaleFilterHWAccel || 'scale'}=${videoScale}`);
}
// Quicktime compatibility via pixelFormat: 'yuv420p'
if (pixelFormat) {
args.push('-pix_fmt', pixelFormat);
}
args.push('-vcodec', videoTypeHWAccel || videoType || DEFAULT_VCODEC);
args.push('-y');
args.push(this.videoPath);
this.mainProcess = new SubProcess(FFMPEG_BINARY, args);
let isCaptureStarted = false;
this.mainProcess.on('line-stderr', (line: string) => {
if (CAPTURE_START_MARKER.test(line)) {
if (!isCaptureStarted) {
isCaptureStarted = true;
}
} else {
ffmpegLogger.info(line);
}
});
await this.mainProcess.start(0);
const startupTimeout = 5000;
try {
await waitForCondition(() => isCaptureStarted, {
waitMs: startupTimeout,
intervalMs: 300,
});
} catch {
this.log.warn(
`Screen capture process did not start within ${startupTimeout}ms. Continuing anyway`,
);
}
if (!this.mainProcess.isRunning) {
throw new Error(
`The screen capture process '${FFMPEG_BINARY}' died unexpectedly. ` +
`Check server logs for more details`,
);
}
this.log.info(
`Starting screen capture on the device '${this.udid}' with command: '${FFMPEG_BINARY} ${args.join(' ')}'. ` +
`Will timeout in ${timeoutMs}ms`,
);
this.timeoutHandler = setTimeout(async () => {
if (!(await this.interrupt())) {
this.log.warn(
`Cannot finish the active screen recording on the device '${this.udid}' after ${timeoutMs}ms timeout`,
);
}
}, timeoutMs);
}
async interrupt(force = false): Promise<boolean> {
let result = true;
if (this.timeoutHandler) {
clearTimeout(this.timeoutHandler);
this.timeoutHandler = null;
}
if (this.mainProcess?.isRunning) {
const interruptPromise = this.mainProcess.stop(force ? 'SIGTERM' : 'SIGINT');
this.mainProcess = null;
try {
await interruptPromise;
} catch (e: any) {
this.log.warn(
`Cannot ${force ? 'terminate' : 'interrupt'} ${FFMPEG_BINARY}. ` +
`Original error: ${e.message}`,
);
result = false;
}
}
return result;
}
async finish(): Promise<string> {
await this.interrupt();
return this.videoPath;
}
async cleanup(): Promise<void> {
if (await fs.exists(this.videoPath)) {
await fs.rimraf(this.videoPath);
}
}
}
/**
* Direct Appium to start recording the device screen
*
* Record the display of devices running iOS Simulator since Xcode 9 or real devices since iOS 11
* (ffmpeg utility is required: 'brew install ffmpeg').
* It records screen activity to a MPEG-4 file. Audio is not recorded with the video file.
* If screen recording has been already started then the command will stop it forcefully and start a new one.
* The previously recorded video file will be deleted.
*
* @param options - The available options.
* @returns Base64-encoded content of the recorded media file if
* any screen recording is currently running or an empty string.
* @throws {Error} If screen recording has failed to start.
*/
export async function startRecordingScreen(
this: XCUITestDriver,
options: StartRecordingScreenOptions = {},
): Promise<string> {
const {
videoType = DEFAULT_VCODEC,
timeLimit = DEFAULT_RECORDING_TIME_SEC,
videoQuality = DEFAULT_QUALITY,
videoFps = DEFAULT_FPS,
videoFilters,
videoScale,
forceRestart,
pixelFormat,
hardwareAcceleration,
} = options;
let result = '';
if (!forceRestart) {
this.log.info(
`Checking if there is/was a previous screen recording. ` +
`Set 'forceRestart' option to 'true' if you'd like to skip this step.`,
);
result = (await this.stopRecordingScreen(options)) ?? result;
}
const videoPath = await tempDir.path({
prefix: `appium_${Math.random().toString(16).substring(2, 8)}`,
suffix: MP4_EXT,
});
const wdaBaseUrl = this.opts.wdaBaseUrl || WDA_BASE_URL;
const screenRecorder = new ScreenRecorder(this.device.udid, this.log, videoPath, {
remotePort: this.opts.mjpegServerPort || DEFAULT_MJPEG_SERVER_PORT,
remoteUrl: wdaBaseUrl,
videoType,
videoFilters,
videoScale,
videoFps: typeof videoFps === 'string' ? parseInt(videoFps, 10) : videoFps,
pixelFormat,
hardwareAcceleration,
});
if (!(await screenRecorder.interrupt(true))) {
throw this.log.errorWithException('Unable to stop screen recording process');
}
if (this._recentScreenRecorder) {
await this._recentScreenRecorder.cleanup();
this._recentScreenRecorder = null;
}
const timeoutSeconds = parseFloat(String(timeLimit));
if (isNaN(timeoutSeconds) || timeoutSeconds > MAX_RECORDING_TIME_SEC || timeoutSeconds <= 0) {
throw this.log.errorWithException(
`The timeLimit value must be in range [1, ${MAX_RECORDING_TIME_SEC}] seconds. ` +
`The value of '${timeLimit}' has been passed instead.`,
);
}
let {mjpegServerScreenshotQuality, mjpegServerFramerate} = (await this.proxyCommand(
'/appium/settings',
'GET',
)) as WDASettings;
if (videoQuality) {
const quality = _.isInteger(videoQuality)
? videoQuality
: QUALITY_MAPPING[_.toLower(String(videoQuality))];
if (!quality) {
throw new Error(
`videoQuality value should be one of ${JSON.stringify(
_.keys(QUALITY_MAPPING),
)} or a number in range 1..100. ` + `'${videoQuality}' is given instead`,
);
}
mjpegServerScreenshotQuality =
mjpegServerScreenshotQuality !== quality ? (quality as number) : undefined;
} else {
mjpegServerScreenshotQuality = undefined;
}
if (videoFps) {
const fps = parseInt(String(videoFps), 10);
if (isNaN(fps)) {
throw new Error(
`videoFps value should be a valid number in range 1..60. ` +
`'${videoFps}' is given instead`,
);
}
mjpegServerFramerate = mjpegServerFramerate !== fps ? fps : undefined;
} else {
mjpegServerFramerate = undefined;
}
if (util.hasValue(mjpegServerScreenshotQuality) || util.hasValue(mjpegServerFramerate)) {
await this.proxyCommand('/appium/settings', 'POST', {
settings: {
mjpegServerScreenshotQuality,
mjpegServerFramerate,
},
});
}
try {
await screenRecorder.start(timeoutSeconds * 1000);
} catch (e) {
await screenRecorder.interrupt(true);
await screenRecorder.cleanup();
throw e;
}
this._recentScreenRecorder = screenRecorder;
return result;
}
/**
* Direct Appium to stop screen recording and return the video
*
* If no screen recording process is running then the endpoint will try to get
* the recently recorded file. If no previously recorded file is found and no
* active screen recording processes are running then the method returns an
* empty string.
*
* @param options - The available options.
* @returns Base64-encoded content of the recorded media
* file if `remotePath` parameter is empty or null or an empty string.
* @throws {Error} If there was an error while getting the name of a media
* file or the file content cannot be uploaded to the remote
* location.
*/
export async function stopRecordingScreen(
this: XCUITestDriver,
options: StopRecordingScreenOptions = {},
): Promise<string | null> {
if (!this._recentScreenRecorder) {
this.log.info('Screen recording is not running. There is nothing to stop.');
return '';
}
try {
const videoPath = await this._recentScreenRecorder.finish();
if (!(await fs.exists(videoPath))) {
throw this.log.errorWithException(
`The screen recorder utility has failed ` +
`to store the actual screen recording at '${videoPath}'`,
);
}
return await encodeBase64OrUpload(videoPath, options.remotePath, options);
} finally {
await this._recentScreenRecorder.interrupt(true);
await this._recentScreenRecorder.cleanup();
this._recentScreenRecorder = null;
}
}
interface ScreenRecorderOptions {
hardwareAcceleration?: string;
remotePort?: number;
remoteUrl?: string;
videoFps?: number;
videoType?: string;
videoScale?: string;
videoFilters?: string;
pixelFormat?: string;
}