@ably/cli
Version:
Ably CLI for Pub/Sub, Chat and Spaces
567 lines (566 loc) • 28.2 kB
JavaScript
import { Args, Flags } from "@oclif/core";
import chalk from "chalk";
import Table from "cli-table3";
import { AblyBaseCommand } from "../../base-command.js";
export default class BenchPublisher extends AblyBaseCommand {
static args = {
channel: Args.string({
description: "The channel name to publish to",
required: true,
}),
};
static description = "Run a publisher benchmark test";
static examples = [
"$ ably bench publisher my-channel",
"$ ably bench publisher --messages 5000 --rate 10 my-channel",
"$ ably bench publisher --transport realtime my-channel",
];
static flags = {
...AblyBaseCommand.globalFlags,
"message-size": Flags.integer({
default: 100,
description: "Size of the message payload in bytes",
}),
messages: Flags.integer({
char: "m",
default: 1000,
description: "Number of messages to publish (max 10,000)",
}),
rate: Flags.integer({
char: "r",
default: 15,
description: "Messages per second to publish (max 20)",
}),
transport: Flags.string({
char: "t",
default: "realtime",
description: "Transport to use for publishing",
options: ["rest", "realtime"],
}),
"wait-for-subscribers": Flags.boolean({
default: false,
description: "Wait for subscribers to be present before starting",
}),
};
// Helper function for delays
delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
intervalId = null;
MAX_LOG_LINES = 10; // Buffer for the last 10 logs
messageLogBuffer = [];
realtime = null;
presenceCount = 0;
// Override finally to ensure resources are cleaned up
async finally(err) {
if (this.intervalId) {
clearInterval(this.intervalId);
this.intervalId = null;
}
if (this.realtime && this.realtime.connection.state !== "closed") {
this.realtime.close();
}
return super.finally(err);
}
async run() {
const { args, flags } = await this.parse(BenchPublisher);
// Validate max values
const messageCount = Math.min(flags.messages, 10_000);
const messageRate = Math.min(flags.rate, 20);
const messageSize = Math.max(flags["message-size"], 10);
this.realtime = await this.createAblyRealtimeClient(flags);
if (!this.realtime) {
this.error("Failed to create Ably client. Please check your API key and try again.");
return;
}
const client = this.realtime;
// Set up connection state logging
this.setupConnectionStateLogging(client, flags, {
includeUserFriendlyMessages: true
});
let channel = null;
const testId = `test-${Date.now()}`;
const messageTracking = {};
const metrics = {
batchCount: 0,
batchSize: Math.ceil(messageRate / 2),
echoLatencies: [],
errors: 0,
lastBatchTime: Date.now(),
messagesEchoed: 0,
messagesSent: 0,
requestLatencies: [],
startTime: 0, // Will be set before publishing starts
};
try {
channel = client.channels.get(args.channel, { params: { rewind: "1" } });
// Set up channel state logging
this.setupChannelStateLogging(channel, flags, {
includeUserFriendlyMessages: true
});
await this.subscribeToEcho(channel, metrics, messageTracking, flags, args.channel);
await this.enterPresence(channel, testId, messageCount, messageRate, messageSize, flags);
const shouldContinue = await this.checkAndWaitForSubscribers(channel, flags);
if (!shouldContinue) {
this.logCliEvent(flags, "benchmark", "testCancelled", "Benchmark test cancelled by user.");
return; // Exits run method, finally will handle cleanup
}
this.logCliEvent(flags, "benchmark", "startingTest", `Starting benchmark test with ID: ${testId}`, { messageCount, messageRate, messageSize, transport: flags.transport });
if (!this.shouldOutputJson(flags)) {
this.log(`\nStarting benchmark test with ID: ${testId}`);
this.log(`Publishing ${messageCount} messages at ${messageRate} msg/sec using ${flags.transport} transport`);
this.log(`Message size: ${messageSize} bytes\n`);
}
const { intervalId: progressIntervalId, progressDisplay } = this.setupProgressDisplay(flags, metrics, messageCount);
this.intervalId = progressIntervalId; // Assign to class property for finally block
metrics.startTime = Date.now();
await (flags.transport === "rest"
? this.publishMessagesRest(channel, metrics, messageTracking, messageCount, messageRate, messageSize, testId, flags)
: this.publishMessagesRealtime(channel, metrics, messageTracking, messageCount, messageRate, messageSize, testId, flags));
// Wait a bit for remaining echoes
this.logCliEvent(flags, "benchmark", "waitingForEchoes", "Waiting for remaining messages to be echoed back...");
if (!this.shouldOutputJson(flags)) {
this.log("\nWaiting for remaining messages to be echoed back...");
}
await this.delay(3000);
// Clear progress interval if it exists
if (this.intervalId) {
clearInterval(this.intervalId);
this.intervalId = null;
}
this.displaySummary(metrics, flags, metrics.startTime, messageCount, args, testId, progressDisplay);
}
catch (error) {
this.logCliEvent(flags, "benchmark", "testError", `Benchmark failed: ${error instanceof Error ? error.message : String(error)}`, { error: error instanceof Error ? error.stack : String(error) });
this.error(`Benchmark failed: ${error instanceof Error ? error.message : String(error)}`);
}
finally {
// Cleanup managed by the finally method override
if (channel) {
try {
await channel.presence.leave();
this.logCliEvent(flags, "presence", "presenceLeft", "Left presence");
}
catch (leaveError) {
this.logCliEvent(flags, "presence", "leaveError", `Error leaving presence: ${leaveError instanceof Error ? leaveError.message : String(leaveError)}`);
}
}
}
}
// --- Refactored Helper Methods ---
// --- Original Private Methods ---
addLogToBuffer(logMessage) {
if (this.shouldOutputJson({}))
return; // Don't buffer in JSON mode
this.messageLogBuffer.push(`[${new Date().toLocaleTimeString()}] ${logMessage}`);
if (this.messageLogBuffer.length > this.MAX_LOG_LINES) {
this.messageLogBuffer.shift(); // Remove the oldest log
}
}
async checkAndWaitForSubscribers(channel, flags) {
if (flags["wait-for-subscribers"]) {
this.logCliEvent(flags, "benchmark", "waitingForSubscribers", "Waiting for subscribers...");
await new Promise((resolve) => {
const subscriberCheck = (member) => {
if (member.data &&
typeof member.data === "object" &&
"role" in member.data &&
member.data.role === "subscriber") {
this.logCliEvent(flags, "benchmark", "subscriberDetected", `Subscriber detected: ${member.clientId}`, { clientId: member.clientId });
channel.presence.unsubscribe("enter", subscriberCheck);
resolve();
}
};
channel.presence.subscribe("enter", subscriberCheck);
channel.presence
.get()
.then((members) => {
const subscribers = members.filter((m) => m.data &&
typeof m.data === "object" &&
"role" in m.data &&
m.data.role === "subscriber");
if (subscribers.length > 0) {
this.logCliEvent(flags, "benchmark", "subscribersFound", `Found ${subscribers.length} subscribers already present`);
channel.presence.unsubscribe("enter", subscriberCheck);
resolve();
}
})
.catch((error) => {
this.logCliEvent(flags, "presence", "getPresenceError", `Error getting initial presence: ${error instanceof Error ? error.message : String(error)}`);
// Continue waiting
});
});
}
else {
const members = await channel.presence.get();
const subscribers = members.filter((m) => m.data &&
typeof m.data === "object" &&
"role" in m.data &&
m.data.role === "subscriber");
this.logCliEvent(flags, "benchmark", "subscriberCheckComplete", `Found ${subscribers.length} subscribers present`);
if (subscribers.length === 0 && !this.shouldOutputJson(flags)) {
const shouldContinue = await this.interactiveHelper.confirm("No subscribers found. Continue anyway?");
if (!shouldContinue) {
return false; // Indicate cancellation
}
}
}
return true; // Indicate test should continue
}
createPayload(index, size, testId, messageTracking) {
const timestamp = Date.now();
const msgId = `${testId}-${index}`;
messageTracking[msgId] = { publishTime: timestamp };
// Include a `type` field so subscribers can distinguish benchmark payloads
const basePayload = {
type: "message",
index,
msgId,
testId,
timestamp,
};
const basePayloadString = JSON.stringify(basePayload);
const paddingSize = Math.max(0, size - basePayloadString.length - 2);
const padding = paddingSize > 0 ? "a".repeat(paddingSize) : "";
return { ...basePayload, padding };
}
createProgressDisplay() {
const table = new Table({
colWidths: [20, 40], // Adjust column widths
head: [chalk.white("Benchmark Progress"), chalk.white("Status")],
style: {
border: [], // No additional styles for the border
head: [], // No additional styles for the header
},
});
table.push(["Messages sent", "0"], ["Messages echoed", "0"], ["Current rate", "0 msg/sec"], ["Echo latency", "0 ms"], // Changed from Request latency
["Progress", "0%"]);
return table;
}
displaySummary(metrics, flags, startTime, messageCount, args, testId, progressDisplay) {
const totalTime = (Date.now() - startTime) / 1000;
const avgRate = metrics.messagesSent / totalTime;
const avgRequestLatency = metrics.requestLatencies.length > 0
? metrics.requestLatencies.reduce((sum, lat) => sum + lat, 0) /
metrics.requestLatencies.length
: 0;
const avgEchoLatency = metrics.echoLatencies.length > 0
? metrics.echoLatencies.reduce((sum, lat) => sum + lat, 0) /
metrics.echoLatencies.length
: 0;
metrics.requestLatencies.sort((a, b) => a - b);
const reqP50 = metrics.requestLatencies[Math.floor(metrics.requestLatencies.length * 0.5)] || 0;
const reqP90 = metrics.requestLatencies[Math.floor(metrics.requestLatencies.length * 0.9)] || 0;
const reqP95 = metrics.requestLatencies[Math.floor(metrics.requestLatencies.length * 0.95)] || 0;
metrics.echoLatencies.sort((a, b) => a - b);
const echoP50 = metrics.echoLatencies[Math.floor(metrics.echoLatencies.length * 0.5)] ||
0;
const echoP90 = metrics.echoLatencies[Math.floor(metrics.echoLatencies.length * 0.9)] ||
0;
const echoP95 = metrics.echoLatencies[Math.floor(metrics.echoLatencies.length * 0.95)] ||
0;
const summaryData = {
actualRateMsgsPerSec: avgRate,
channel: args.channel,
echoLatencyAvgMs: avgEchoLatency,
echoLatencyP50Ms: echoP50,
echoLatencyP90Ms: echoP90,
echoLatencyP95Ms: echoP95,
errors: metrics.errors,
messageCount,
messagesEchoed: metrics.messagesEchoed,
messagesSent: metrics.messagesSent,
requestLatencyAvgMs: avgRequestLatency,
requestLatencyP50Ms: reqP50,
requestLatencyP90Ms: reqP90,
requestLatencyP95Ms: reqP95,
testId,
totalTimeSeconds: totalTime,
transport: flags.transport,
};
this.logCliEvent(flags, "benchmark", "testCompleted", "Benchmark test completed", summaryData);
if (this.shouldOutputJson(flags)) {
this.log(this.formatJsonOutput(summaryData, flags));
}
else {
if (progressDisplay && this.shouldUseTerminalUpdates()) {
// Skip terminal control in CI/test mode
process.stdout.write("\u001B[2J\u001B[0f");
}
this.log("\n\n" + chalk.green("Benchmark Complete") + "\n");
const summaryTable = new Table({
head: [chalk.white("Metric"), chalk.white("Value")],
style: { border: [], head: [] },
});
summaryTable.push(["Test ID", testId], ["Channel", args.channel], ["Transport", flags.transport], ["Messages sent", `${metrics.messagesSent}/${messageCount}`], [
"Messages echoed",
`${metrics.messagesEchoed}/${metrics.messagesSent}`,
], ["Errors", metrics.errors.toString()], ["Total time", `${totalTime.toFixed(2)} seconds`], ["Actual rate", `${avgRate.toFixed(2)} msg/sec`]);
this.log(summaryTable.toString());
const latencyTable = new Table({
head: [chalk.white("Latency Metric"), chalk.white("Value (ms)")],
style: { border: [], head: [] },
});
latencyTable.push(["Echo Average", avgEchoLatency.toFixed(2)], ["Echo P50", echoP50.toFixed(2)], ["Echo P90", echoP90.toFixed(2)], ["Echo P95", echoP95.toFixed(2)]);
this.log("\nLatency Measurements:");
this.log("• Echo Latency: Round trip time (Publisher → Ably → Publisher)");
this.log(latencyTable.toString());
this.log("\nTest complete. Disconnecting...");
}
}
async enterPresence(channel, testId, messageCount, messageRate, messageSize, flags) {
const presenceData = {
role: "publisher",
testDetails: {
messageCount,
messageRate,
messageSize,
startTime: Date.now(),
transport: flags.transport,
},
testId,
};
channel.presence.subscribe("enter", (member) => {
this.logCliEvent(flags, "presence", "memberEntered", `Member entered presence: ${member.clientId}`, { clientId: member.clientId, data: member.data });
});
channel.presence.subscribe("leave", (member) => {
this.logCliEvent(flags, "presence", "memberLeft", `Member left presence: ${member.clientId}`, { clientId: member.clientId });
});
channel.presence.subscribe("update", (member) => {
this.logCliEvent(flags, "presence", "memberUpdated", `Member updated presence: ${member.clientId}`, { clientId: member.clientId, data: member.data });
});
this.logCliEvent(flags, "presence", "enteringPresence", `Entering presence as publisher with test ID: ${testId}`);
await channel.presence.enter(presenceData);
this.logCliEvent(flags, "presence", "presenceEntered", `Entered presence as publisher with test ID: ${testId}`, { testId });
}
async publishMessagesRealtime(channel, metrics, messageTracking, messageCount, messageRate, messageSize, testId, flags) {
this.logCliEvent(flags, "benchmark", "publishingStart", "Starting to publish messages via Realtime");
// Send a control envelope to mark the start of the benchmark so that
// subscriber logic can initialise its state and rendering immediately.
await channel.publish("benchmark", {
type: "start",
testId,
startTime: Date.now(),
messageCount,
messageRate,
transport: flags.transport,
});
const messagePromises = [];
let i = 0;
const messageDelay = 1000 / messageRate;
await new Promise((resolveOuter) => {
const publishInterval = setInterval(() => {
if (i >= messageCount) {
clearInterval(publishInterval);
this.logCliEvent(flags, "benchmark", "publishLoopComplete", "All messages scheduled for publishing");
Promise.all(messagePromises)
.then(() => {
this.logCliEvent(flags, "benchmark", "allPublishesAcknowledged", "All publish operations acknowledged");
resolveOuter();
})
.catch(() => {
this.logCliEvent(flags, "benchmark", "publishAcknowledgeError", "Error occurred while waiting for publish acknowledgements");
resolveOuter();
});
// After all messages have been published/acknowledged send a final
// control envelope so subscribers finish instantly rather than
// waiting for presence-leave or inactivity watchdogs.
channel
.publish("benchmark", { type: "end", testId })
.catch(() => {
/* non-critical */
});
return;
}
const payload = this.createPayload(i, messageSize, testId, messageTracking);
const msgIndex = i;
const publishStart = Date.now();
const publishPromise = channel
.publish("benchmark", payload)
.then(() => {
const requestLatency = Date.now() - publishStart;
if (messageTracking[payload.msgId]) {
messageTracking[payload.msgId].requestCompleteTime = Date.now();
}
metrics.requestLatencies.push(requestLatency);
const logMsg = `Message ${msgIndex} published (req: ${requestLatency}ms)`;
this.addLogToBuffer(logMsg);
this.logCliEvent(flags, "benchmark", "messagePublished", logMsg, {
msgId: payload.msgId,
requestLatency,
});
})
.catch((error) => {
metrics.errors++;
const errorMsg = `Error publishing message ${msgIndex}: ${error instanceof Error ? error.message : String(error)}`;
this.addLogToBuffer(chalk.red(errorMsg));
this.logCliEvent(flags, "benchmark", "publishError", errorMsg, {
error: error instanceof Error ? error.message : String(error),
msgIndex,
});
});
messagePromises.push(publishPromise);
metrics.messagesSent++;
i++;
}, messageDelay);
});
}
async publishMessagesRest(channel, metrics, messageTracking, messageCount, messageRate, messageSize, testId, flags) {
this.logCliEvent(flags, "benchmark", "publishingStart", "Starting to publish messages via REST");
// Send start control message
await channel.publish("benchmark", {
type: "start",
testId,
startTime: Date.now(),
messageCount,
messageRate,
transport: flags.transport,
});
const messagePromises = [];
let i = 0;
const messageDelay = 1000 / messageRate;
await new Promise((resolveOuter) => {
const publishInterval = setInterval(() => {
if (i >= messageCount) {
clearInterval(publishInterval);
this.logCliEvent(flags, "benchmark", "publishLoopComplete", "All messages scheduled for publishing");
Promise.all(messagePromises)
.then(() => {
this.logCliEvent(flags, "benchmark", "allPublishesAcknowledged", "All publish operations acknowledged");
resolveOuter();
})
.catch(() => {
this.logCliEvent(flags, "benchmark", "publishAcknowledgeError", "Error occurred while waiting for publish acknowledgements");
resolveOuter();
});
// Send end control message
channel
.publish("benchmark", { type: "end", testId })
.catch(() => {
/* ignore */
});
return;
}
const payload = this.createPayload(i, messageSize, testId, messageTracking);
const msgIndex = i;
const publishStart = Date.now();
const publishPromise = channel
.publish("benchmark", payload)
.then(() => {
const requestLatency = Date.now() - publishStart;
if (messageTracking[payload.msgId]) {
messageTracking[payload.msgId].requestCompleteTime = Date.now();
}
metrics.requestLatencies.push(requestLatency);
const logMsg = `Message ${msgIndex} published (req: ${requestLatency}ms)`;
this.addLogToBuffer(logMsg);
this.logCliEvent(flags, "benchmark", "messagePublished", logMsg, {
msgId: payload.msgId,
requestLatency,
});
})
.catch((error) => {
metrics.errors++;
const errorMsg = `Error publishing message ${msgIndex}: ${error instanceof Error ? error.message : String(error)}`;
this.addLogToBuffer(chalk.red(errorMsg));
this.logCliEvent(flags, "benchmark", "publishError", errorMsg, {
error: error instanceof Error ? error.message : String(error),
msgIndex,
});
});
messagePromises.push(publishPromise);
metrics.messagesSent++;
i++;
}, messageDelay);
});
}
setupProgressDisplay(flags, metrics, messageCount) {
if (this.shouldOutputJson(flags) || flags.logLevel === "debug") {
return { intervalId: null, progressDisplay: null };
}
let intervalId = null;
const progressDisplay = new Table({
colWidths: [20, 40],
head: [chalk.white("Benchmark Progress"), chalk.white("Status")],
style: {
border: [],
head: [],
},
});
progressDisplay.push(["Messages sent", "0"], ["Messages echoed", "0"], ["Current rate", "0 msg/sec"], ["Echo latency", "0 ms"], ["Progress", "0%"]);
if (this.shouldUseTerminalUpdates()) {
process.stdout.write("\u001B[2J\u001B[0f");
}
this.log(progressDisplay.toString());
this.log("\n--- Logs (Last 10) ---");
intervalId = setInterval(() => {
if (progressDisplay) {
this.updateProgressAndLogs(metrics, progressDisplay, messageCount);
}
}, 500);
return { intervalId, progressDisplay };
}
async subscribeToEcho(channel, metrics, messageTracking, flags, channelName) {
await channel.subscribe("benchmark", (message) => {
if (!message.data ||
typeof message.data !== "object" ||
!("msgId" in message.data)) {
return; // Not our benchmark message
}
const msgId = message.data.msgId;
const tracker = messageTracking[msgId];
if (tracker && tracker.publishTime) {
const echoLatency = Date.now() - tracker.publishTime;
metrics.echoLatencies.push(echoLatency);
metrics.messagesEchoed++;
this.logCliEvent(flags, "benchmark", "messageEchoReceived", `Echo received for message ${msgId}`, { echoLatency, msgId });
delete messageTracking[msgId];
}
});
this.logCliEvent(flags, "benchmark", "subscribedToEcho", `Subscribed to benchmark messages on channel '${channelName}'`);
}
updateProgressAndLogs(metrics, displayTable, total) {
const now = Date.now();
const elapsed = now - metrics.lastBatchTime;
// Only update if at least 100ms has passed
if (elapsed < 100)
return;
// Calculate current rate (messages per second in this batch)
const currentRate = metrics.messagesSent > metrics.batchCount
? ((metrics.messagesSent - metrics.batchCount) / elapsed) * 1000
: 0;
// Update batch count and time
metrics.batchCount = metrics.messagesSent;
metrics.lastBatchTime = now;
// Calculate average latency of recent messages (only echo)
const recentEchoLatencies = metrics.echoLatencies.slice(-metrics.batchSize);
const avgEchoLatency = recentEchoLatencies.length > 0
? recentEchoLatencies.reduce((sum, lat) => sum + lat, 0) /
recentEchoLatencies.length
: 0;
// Calculate progress percentage
const progressPercent = Math.min(100, Math.floor((metrics.messagesSent / total) * 100));
// Create progress bar
const progressBarWidth = 30; // Match colWidths - 10 for label
const filledChars = Math.floor((progressPercent / 100) * progressBarWidth);
const progressBar = `[${"=".repeat(filledChars)}${" ".repeat(progressBarWidth - filledChars)}] ${progressPercent}%`;
// Clear console and redraw table and logs
if (this.shouldUseTerminalUpdates()) {
process.stdout.write("\u001B[2J\u001B[0f"); // Clear screen and move cursor to top-left
}
// Recreate table with updated data
const updatedTable = new Table({
colWidths: [20, 40],
head: [chalk.white("Benchmark Progress"), chalk.white("Status")],
style: {
border: [],
head: [],
},
});
// Push data as arrays matching the head order
updatedTable.push(["Messages sent", `${metrics.messagesSent}/${total}`], ["Messages echoed", `${metrics.messagesEchoed}/${metrics.messagesSent}`], ["Current rate", `${currentRate.toFixed(1)} msg/sec`], ["Echo latency", `${avgEchoLatency.toFixed(1)} ms`], ["Progress", progressBar]);
this.log(updatedTable.toString());
this.log("\n--- Logs (Last 10) ---");
for (const log of this.messageLogBuffer) {
this.log(log);
}
}
}