dryrun-ci
Version:
DryRun CI - Local GitLab CI/CD pipeline testing tool with Docker execution, performance monitoring, and security sandboxing
764 lines (763 loc) • 29.7 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.IntegrationTestingManager = void 0;
const events_1 = require("events");
const execution_1 = require("../types/execution");
class IntegrationTestingManager extends events_1.EventEmitter {
constructor() {
super();
this.environments = new Map();
this.runningContainers = new Map();
this.activeNetworks = new Map();
this.initializeDocker();
}
async initializeDocker() {
try {
const Docker = require('dockerode');
this.docker = new Docker();
await this.docker.ping();
this.emit('docker-ready');
}
catch (error) {
this.emit('docker-error', { error: error instanceof Error ? error.message : 'Unknown error' });
throw new Error('Failed to initialize Docker for integration testing');
}
}
async createTestEnvironment(name, services, databases = [], config) {
const environmentId = this.generateEnvironmentId(name);
const environment = {
id: environmentId,
name,
services: [...services, ...this.createDatabaseServices(databases)],
databases,
networks: this.createNetworkDefinitions(services, databases),
volumes: this.createVolumeDefinitions(services, databases),
env: this.createEnvironmentVariables(services, databases),
status: 'creating',
endpoints: []
};
this.environments.set(environmentId, environment);
this.emit('environment-creating', { environmentId, name });
try {
await this.createNetworks(environment);
await this.createVolumes(environment);
await this.startServicesInOrder(environment, config);
await this.waitForServicesReady(environment);
await this.initializeDatabases(environment);
environment.status = 'ready';
this.emit('environment-ready', { environmentId, endpoints: environment.endpoints });
}
catch (error) {
environment.status = 'error';
this.emit('environment-error', {
environmentId,
error: error instanceof Error ? error.message : 'Unknown error'
});
await this.destroyTestEnvironment(environmentId);
throw error;
}
return environment;
}
createDatabaseServices(databases) {
return databases.map(db => {
switch (db.type) {
case 'postgresql':
return this.createPostgreSQLService(db);
case 'mysql':
return this.createMySQLService(db);
case 'mongodb':
return this.createMongoDBService(db);
case 'redis':
return this.createRedisService(db);
case 'elasticsearch':
return this.createElasticsearchService(db);
default:
throw new Error(`Unsupported database type: ${db.type}`);
}
});
}
createPostgreSQLService(db) {
return {
name: `postgresql-${db.database}`,
image: `postgres:${db.version}`,
ports: [{ host: db.port, container: 5432, protocol: 'tcp' }],
environment: {
POSTGRES_DB: db.database,
POSTGRES_USER: db.username,
POSTGRES_PASSWORD: db.password,
POSTGRES_INITDB_ARGS: '--auth-host=scram-sha-256'
},
volumes: [
{ host: `postgres-data-${db.database}`, container: '/var/lib/postgresql/data', readOnly: false }
],
healthCheck: {
command: ['pg_isready', '-U', db.username, '-d', db.database],
interval: 10,
timeout: 5,
retries: 5,
startPeriod: 30
},
dependsOn: [],
networks: ['test-network'],
labels: {
'dryrun.service': 'database',
'dryrun.type': 'postgresql'
}
};
}
createMySQLService(db) {
return {
name: `mysql-${db.database}`,
image: `mysql:${db.version}`,
ports: [{ host: db.port, container: 3306, protocol: 'tcp' }],
environment: {
MYSQL_DATABASE: db.database,
MYSQL_USER: db.username,
MYSQL_PASSWORD: db.password,
MYSQL_ROOT_PASSWORD: db.password
},
volumes: [
{ host: `mysql-data-${db.database}`, container: '/var/lib/mysql', readOnly: false }
],
healthCheck: {
command: ['mysqladmin', 'ping', '-h', 'localhost'],
interval: 10,
timeout: 5,
retries: 5,
startPeriod: 30
},
dependsOn: [],
networks: ['test-network'],
labels: {
'dryrun.service': 'database',
'dryrun.type': 'mysql'
}
};
}
createMongoDBService(db) {
return {
name: `mongodb-${db.database}`,
image: `mongo:${db.version}`,
ports: [{ host: db.port, container: 27017, protocol: 'tcp' }],
environment: {
MONGO_INITDB_DATABASE: db.database,
MONGO_INITDB_ROOT_USERNAME: db.username,
MONGO_INITDB_ROOT_PASSWORD: db.password
},
volumes: [
{ host: `mongo-data-${db.database}`, container: '/data/db', readOnly: false }
],
healthCheck: {
command: ['mongo', '--eval', 'db.adminCommand("ping")'],
interval: 10,
timeout: 5,
retries: 5,
startPeriod: 30
},
dependsOn: [],
networks: ['test-network'],
labels: {
'dryrun.service': 'database',
'dryrun.type': 'mongodb'
}
};
}
createRedisService(db) {
return {
name: `redis-${db.database}`,
image: `redis:${db.version}`,
ports: [{ host: db.port, container: 6379, protocol: 'tcp' }],
environment: {},
volumes: [
{ host: `redis-data-${db.database}`, container: '/data', readOnly: false }
],
healthCheck: {
command: ['redis-cli', 'ping'],
interval: 10,
timeout: 5,
retries: 3,
startPeriod: 10
},
dependsOn: [],
networks: ['test-network'],
labels: {
'dryrun.service': 'database',
'dryrun.type': 'redis'
}
};
}
createElasticsearchService(db) {
return {
name: `elasticsearch-${db.database}`,
image: `elasticsearch:${db.version}`,
ports: [
{ host: db.port, container: 9200, protocol: 'tcp' },
{ host: db.port + 100, container: 9300, protocol: 'tcp' }
],
environment: {
'discovery.type': 'single-node',
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
},
volumes: [
{ host: `elasticsearch-data-${db.database}`, container: '/usr/share/elasticsearch/data', readOnly: false }
],
healthCheck: {
command: ['curl', '-f', 'http://localhost:9200/_health'],
interval: 30,
timeout: 10,
retries: 3,
startPeriod: 60
},
dependsOn: [],
networks: ['test-network'],
labels: {
'dryrun.service': 'database',
'dryrun.type': 'elasticsearch'
}
};
}
createNetworkDefinitions(services, databases) {
return [
{
name: 'test-network',
driver: 'bridge',
options: {
'com.docker.network.bridge.name': 'dryrun-test-br'
}
}
];
}
createVolumeDefinitions(services, databases) {
const volumes = [];
databases.forEach(db => {
volumes.push(`${db.type}-data-${db.database}`);
});
services.forEach(service => {
service.volumes.forEach(volume => {
if (!volume.host.startsWith('/')) {
volumes.push(volume.host);
}
});
});
return Array.from(new Set(volumes));
}
createEnvironmentVariables(services, databases) {
const env = {};
databases.forEach(db => {
const serviceKey = `${db.type.toUpperCase()}_URL`;
env[serviceKey] = this.getDatabaseConnectionString(db);
env[`${db.type.toUpperCase()}_HOST`] = `${db.type}-${db.database}`;
env[`${db.type.toUpperCase()}_PORT`] = db.port.toString();
env[`${db.type.toUpperCase()}_DATABASE`] = db.database;
env[`${db.type.toUpperCase()}_USERNAME`] = db.username;
env[`${db.type.toUpperCase()}_PASSWORD`] = db.password;
});
services.forEach(service => {
const serviceKey = service.name.toUpperCase().replace(/-/g, '_');
if (service.ports.length > 0) {
env[`${serviceKey}_HOST`] = service.name;
env[`${serviceKey}_PORT`] = service.ports[0].container.toString();
env[`${serviceKey}_URL`] = `http://${service.name}:${service.ports[0].container}`;
}
});
return env;
}
getDatabaseConnectionString(db) {
switch (db.type) {
case 'postgresql':
return `postgresql://${db.username}:${db.password}@${db.type}-${db.database}:5432/${db.database}`;
case 'mysql':
return `mysql://${db.username}:${db.password}@${db.type}-${db.database}:3306/${db.database}`;
case 'mongodb':
return `mongodb://${db.username}:${db.password}@${db.type}-${db.database}:27017/${db.database}`;
case 'redis':
return `redis://${db.type}-${db.database}:6379`;
case 'elasticsearch':
return `http://${db.type}-${db.database}:9200`;
default:
return '';
}
}
async createNetworks(environment) {
const createdNetworks = [];
for (const networkDef of environment.networks) {
try {
const network = await this.docker.createNetwork({
Name: `${environment.id}-${networkDef.name}`,
Driver: networkDef.driver,
Options: networkDef.options,
Labels: {
'dryrun.environment': environment.id,
'dryrun.network': networkDef.name
}
});
createdNetworks.push(network.id);
this.emit('network-created', {
environmentId: environment.id,
networkName: networkDef.name,
networkId: network.id
});
}
catch (error) {
for (const networkId of createdNetworks) {
try {
const network = this.docker.getNetwork(networkId);
await network.remove();
}
catch (cleanupError) {
}
}
throw error;
}
}
this.activeNetworks.set(environment.id, createdNetworks);
}
async createVolumes(environment) {
for (const volumeName of environment.volumes) {
try {
await this.docker.createVolume({
Name: `${environment.id}-${volumeName}`,
Labels: {
'dryrun.environment': environment.id,
'dryrun.volume': volumeName
}
});
this.emit('volume-created', {
environmentId: environment.id,
volumeName
});
}
catch (error) {
if (error instanceof Error && !error.message?.includes('already exists')) {
throw error;
}
}
}
}
async startServicesInOrder(environment, config) {
const startedServices = new Set();
const containerIds = [];
const sortedServices = this.topologicalSort(environment.services);
for (const service of sortedServices) {
for (const dependency of service.dependsOn) {
if (!startedServices.has(dependency)) {
throw new Error(`Service ${service.name} depends on ${dependency} which is not started`);
}
}
try {
const containerId = await this.startService(service, environment, config);
containerIds.push(containerId);
startedServices.add(service.name);
this.emit('service-started', {
environmentId: environment.id,
serviceName: service.name,
containerId
});
}
catch (error) {
this.emit('service-start-error', {
environmentId: environment.id,
serviceName: service.name,
error: error instanceof Error ? error.message : 'Unknown error'
});
throw error;
}
}
this.runningContainers.set(environment.id, containerIds);
}
topologicalSort(services) {
const sorted = [];
const visited = new Set();
const visiting = new Set();
const visit = (service) => {
if (visiting.has(service.name)) {
throw new Error(`Circular dependency detected involving ${service.name}`);
}
if (visited.has(service.name)) {
return;
}
visiting.add(service.name);
for (const depName of service.dependsOn) {
const dependency = services.find(s => s.name === depName);
if (dependency) {
visit(dependency);
}
}
visiting.delete(service.name);
visited.add(service.name);
sorted.push(service);
};
for (const service of services) {
if (!visited.has(service.name)) {
visit(service);
}
}
return sorted;
}
async startService(service, environment, config) {
const containerConfig = {
Image: service.image,
name: `${environment.id}-${service.name}`,
Env: Object.entries(service.environment).map(([key, value]) => `${key}=${value}`),
ExposedPorts: this.createExposedPorts(service.ports),
HostConfig: {
PortBindings: this.createPortBindings(service.ports),
Binds: this.createBindMounts(service.volumes, environment.id),
NetworkMode: `${environment.id}-${service.networks[0] || 'test-network'}`,
RestartPolicy: { Name: 'unless-stopped' },
SecurityOpt: config.security !== execution_1.SecurityLevel.NONE ? ['no-new-privileges:true'] : [],
ReadonlyRootfs: config.security === execution_1.SecurityLevel.STRICT
},
Labels: {
'dryrun.environment': environment.id,
'dryrun.service': service.name,
...service.labels
},
Healthcheck: service.healthCheck ? {
Test: service.healthCheck.command,
Interval: service.healthCheck.interval * 1000000000,
Timeout: service.healthCheck.timeout * 1000000000,
Retries: service.healthCheck.retries,
StartPeriod: service.healthCheck.startPeriod * 1000000000
} : undefined
};
await this.pullImageIfNeeded(service.image);
const container = await this.docker.createContainer(containerConfig);
await container.start();
return container.id;
}
createExposedPorts(ports) {
const exposedPorts = {};
for (const port of ports) {
exposedPorts[`${port.container}/${port.protocol}`] = {};
}
return exposedPorts;
}
createPortBindings(ports) {
const portBindings = {};
for (const port of ports) {
portBindings[`${port.container}/${port.protocol}`] = [
{ HostPort: port.host.toString() }
];
}
return portBindings;
}
createBindMounts(volumes, environmentId) {
return volumes.map(volume => {
const hostPath = volume.host.startsWith('/')
? volume.host
: `${environmentId}-${volume.host}`;
const mode = volume.readOnly ? 'ro' : 'rw';
return `${hostPath}:${volume.container}:${mode}`;
});
}
async pullImageIfNeeded(image) {
try {
await this.docker.getImage(image).inspect();
}
catch (error) {
this.emit('image-pulling', { image });
await new Promise((resolve, reject) => {
this.docker.pull(image, (err, stream) => {
if (err)
return reject(err);
this.docker.modem.followProgress(stream, (err) => {
if (err)
return reject(err);
resolve();
});
});
});
this.emit('image-pulled', { image });
}
}
async waitForServicesReady(environment) {
const maxWaitTime = 300000;
const checkInterval = 5000;
const startTime = Date.now();
while (Date.now() - startTime < maxWaitTime) {
let allReady = true;
const endpoints = [];
for (const service of environment.services) {
const endpoint = await this.checkServiceHealth(service, environment.id);
endpoints.push(endpoint);
if (!endpoint.ready) {
allReady = false;
}
}
environment.endpoints = endpoints;
if (allReady) {
this.emit('all-services-ready', { environmentId: environment.id });
return;
}
await new Promise(resolve => setTimeout(resolve, checkInterval));
}
throw new Error(`Services did not become ready within ${maxWaitTime / 1000} seconds`);
}
async checkServiceHealth(service, environmentId) {
const endpoint = {
service: service.name,
url: service.ports.length > 0 ? `http://localhost:${service.ports[0].host}` : '',
port: service.ports.length > 0 ? service.ports[0].host : 0,
ready: false,
healthStatus: 'starting'
};
try {
const container = this.docker.getContainer(`${environmentId}-${service.name}`);
const inspect = await container.inspect();
if (inspect.State.Health) {
endpoint.healthStatus = inspect.State.Health.Status === 'healthy' ? 'healthy' : 'unhealthy';
endpoint.ready = endpoint.healthStatus === 'healthy';
}
else if (inspect.State.Running) {
endpoint.healthStatus = 'healthy';
endpoint.ready = true;
}
}
catch (error) {
endpoint.healthStatus = 'unhealthy';
endpoint.ready = false;
}
return endpoint;
}
async initializeDatabases(environment) {
for (const db of environment.databases) {
if (db.initScripts.length > 0) {
await this.runDatabaseInitScripts(db, environment.id);
}
if (db.seedData) {
await this.seedDatabase(db, environment.id);
}
}
}
async runDatabaseInitScripts(db, environmentId) {
const containerName = `${environmentId}-${db.type}-${db.database}`;
for (const script of db.initScripts) {
try {
const container = this.docker.getContainer(containerName);
let execCmd;
switch (db.type) {
case 'postgresql':
execCmd = ['psql', '-U', db.username, '-d', db.database, '-c', script];
break;
case 'mysql':
execCmd = ['mysql', '-u', db.username, `-p${db.password}`, db.database, '-e', script];
break;
case 'mongodb':
execCmd = ['mongo', db.database, '--eval', script];
break;
default:
continue;
}
const exec = await container.exec({
Cmd: execCmd,
AttachStdout: true,
AttachStderr: true
});
await exec.start({});
this.emit('database-script-executed', {
database: db.database,
script: script.substring(0, 100)
});
}
catch (error) {
this.emit('database-script-error', {
database: db.database,
script: script.substring(0, 100),
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
}
async seedDatabase(db, environmentId) {
this.emit('database-seeded', { database: db.database });
}
async runIntegrationTests(environmentId, job, config) {
const environment = this.environments.get(environmentId);
if (!environment) {
throw new Error(`Test environment ${environmentId} not found`);
}
if (environment.status !== 'ready') {
throw new Error(`Test environment ${environmentId} is not ready`);
}
const startTime = Date.now();
const result = {
environmentId,
testsPassed: 0,
testsFailed: 0,
testsSkipped: 0,
duration: 0,
services: [],
logs: [],
artifacts: []
};
this.emit('integration-tests-started', { environmentId, jobName: job.name });
try {
const testContainer = await this.createTestRunner(job, environment, config);
await this.executeTestScripts(testContainer, job, result);
await this.collectTestResults(testContainer, result);
await testContainer.remove();
result.duration = Date.now() - startTime;
this.emit('integration-tests-completed', {
environmentId,
result: {
passed: result.testsPassed,
failed: result.testsFailed,
duration: result.duration
}
});
}
catch (error) {
result.testsFailed = 1;
result.logs.push(`Integration test error: ${error instanceof Error ? error.message : 'Unknown error'}`);
this.emit('integration-tests-failed', {
environmentId,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
return result;
}
async createTestRunner(job, environment, config) {
const image = job.image || 'node:18-alpine';
const containerConfig = {
Image: image,
name: `${environment.id}-test-runner`,
Env: [
...Object.entries(environment.env).map(([key, value]) => `${key}=${value}`),
...Object.entries(job.variables || {}).map(([key, value]) => `${key}=${value}`)
],
HostConfig: {
NetworkMode: `${environment.id}-test-network`,
SecurityOpt: config.security !== execution_1.SecurityLevel.NONE ? ['no-new-privileges:true'] : []
},
Labels: {
'dryrun.environment': environment.id,
'dryrun.role': 'test-runner'
}
};
await this.pullImageIfNeeded(image);
const container = await this.docker.createContainer(containerConfig);
await container.start();
return container;
}
async executeTestScripts(container, job, result) {
const scripts = job.script || [];
for (const script of scripts) {
try {
const exec = await container.exec({
Cmd: ['sh', '-c', script],
AttachStdout: true,
AttachStderr: true
});
const stream = await exec.start({});
let output = '';
stream.on('data', (chunk) => {
output += chunk.toString();
});
await new Promise((resolve, reject) => {
stream.on('end', resolve);
stream.on('error', reject);
});
const inspectResult = await exec.inspect();
result.logs.push(`Script: ${script}`);
result.logs.push(output);
if (inspectResult.ExitCode === 0) {
result.testsPassed++;
}
else {
result.testsFailed++;
}
}
catch (error) {
result.testsFailed++;
result.logs.push(`Script error: ${script} - ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
}
async collectTestResults(container, result) {
try {
const exec = await container.exec({
Cmd: ['find', '/tmp', '-name', '*.xml', '-o', '-name', '*.json'],
AttachStdout: true
});
const stream = await exec.start({});
let output = '';
stream.on('data', (chunk) => {
output += chunk.toString();
});
await new Promise((resolve, reject) => {
stream.on('end', resolve);
stream.on('error', reject);
});
const artifactPaths = output.trim().split('\n').filter(path => path.length > 0);
result.artifacts = artifactPaths;
}
catch (error) {
}
}
async destroyTestEnvironment(environmentId) {
const environment = this.environments.get(environmentId);
if (!environment) {
return;
}
this.emit('environment-destroying', { environmentId });
try {
const containerIds = this.runningContainers.get(environmentId) || [];
for (const containerId of containerIds) {
try {
const container = this.docker.getContainer(containerId);
await container.stop();
await container.remove();
}
catch (error) {
}
}
const networkIds = this.activeNetworks.get(environmentId) || [];
for (const networkId of networkIds) {
try {
const network = this.docker.getNetwork(networkId);
await network.remove();
}
catch (error) {
}
}
for (const volumeName of environment.volumes) {
try {
const volume = this.docker.getVolume(`${environmentId}-${volumeName}`);
await volume.remove();
}
catch (error) {
}
}
this.runningContainers.delete(environmentId);
this.activeNetworks.delete(environmentId);
this.environments.delete(environmentId);
this.emit('environment-destroyed', { environmentId });
}
catch (error) {
this.emit('environment-destroy-error', {
environmentId,
error: error instanceof Error ? error.message : 'Unknown error'
});
throw error;
}
}
async listTestEnvironments() {
return Array.from(this.environments.values());
}
async getTestEnvironment(environmentId) {
return this.environments.get(environmentId) || null;
}
generateEnvironmentId(name) {
const timestamp = Date.now();
const random = Math.random().toString(36).substring(2, 8);
return `test-${name.toLowerCase().replace(/[^a-z0-9]/g, '-')}-${timestamp}-${random}`;
}
cleanup() {
for (const environmentId of Array.from(this.environments.keys())) {
this.destroyTestEnvironment(environmentId).catch(() => { });
}
this.removeAllListeners();
}
}
exports.IntegrationTestingManager = IntegrationTestingManager;