@scrapeless-ai/sdk
Version:
Node SDK for Scrapeless AI
1,626 lines (1,604 loc) • 111 kB
JavaScript
//#region rolldown:runtime
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
key = keys[i];
if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
get: ((k) => from[k]).bind(null, key),
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
});
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
value: mod,
enumerable: true
}) : target, mod));
//#endregion
const dotenv = __toESM(require("dotenv"));
const node_fetch = __toESM(require("node-fetch"));
const winston = __toESM(require("winston"));
require("winston-daily-rotate-file");
const path = __toESM(require("path"));
const fs = __toESM(require("fs"));
const node_fs = __toESM(require("node:fs"));
const form_data = __toESM(require("form-data"));
const node_path = __toESM(require("node:path"));
const uuid = __toESM(require("uuid"));
const puppeteer_core = __toESM(require("puppeteer-core"));
const playwright_core = __toESM(require("playwright-core"));
//#region src/utils/utils.ts
/**
* Base utility functions collection
*/
/**
* Sleep for specified milliseconds
* @param ms Milliseconds
*/
const sleep = (ms) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};
/**
* Retry a specified function until success or maximum attempts reached
* @param fn Async function to retry
* @param options Retry options
*/
const retry = async (fn, options = {}) => {
const { maxAttempts = 3, delay = 1e3, backoff = true, onRetry = () => {} } = options;
let attempt = 0;
let lastError = new Error("Retry failed");
while (attempt < maxAttempts) try {
return await fn();
} catch (error) {
lastError = error;
attempt++;
if (attempt >= maxAttempts) break;
onRetry(attempt, lastError);
const waitTime = backoff ? delay * Math.pow(2, attempt - 1) : delay;
await sleep(waitTime);
}
throw lastError;
};
/**
* Parse URL, extract domain and path
* @param url URL string
*/
const parseUrl = (url) => {
try {
const parsedUrl = new URL(url);
const query = {};
parsedUrl.searchParams.forEach((value, key) => {
query[key] = value;
});
return {
protocol: parsedUrl.protocol.replace(":", ""),
hostname: parsedUrl.hostname,
path: parsedUrl.pathname,
query,
fragment: parsedUrl.hash.replace("#", "")
};
} catch {
throw new Error(`Invalid URL: ${url}`);
}
};
/**
* Generate random string
* @param length String length
*/
const randomString = (length = 10) => {
const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let result = "";
for (let i = 0; i < length; i++) result += chars.charAt(Math.floor(Math.random() * chars.length));
return result;
};
/**
* Detect browser environment
*/
const isBrowser = () => {
return typeof window !== "undefined" && typeof document !== "undefined";
};
/**
* Format date
* @param date Date object or timestamp
* @param format Format string
*/
const formatDate = (date, format = "YYYY-MM-DD HH:mm:ss") => {
const d = new Date(date);
const replacements = {
YYYY: d.getFullYear().toString(),
MM: (d.getMonth() + 1).toString().padStart(2, "0"),
DD: d.getDate().toString().padStart(2, "0"),
HH: d.getHours().toString().padStart(2, "0"),
mm: d.getMinutes().toString().padStart(2, "0"),
ss: d.getSeconds().toString().padStart(2, "0")
};
return format.replace(/YYYY|MM|DD|HH|mm|ss/g, (match) => replacements[match]);
};
//#endregion
//#region src/utils/logger.ts
let LogLevel = /* @__PURE__ */ function(LogLevel$1) {
LogLevel$1["TRACE"] = "trace";
LogLevel$1["DEBUG"] = "debug";
LogLevel$1["INFO"] = "info";
LogLevel$1["WARN"] = "warn";
LogLevel$1["ERROR"] = "error";
return LogLevel$1;
}({});
const LOG_LEVELS = {
[LogLevel.ERROR]: 0,
[LogLevel.WARN]: 1,
[LogLevel.INFO]: 2,
[LogLevel.TRACE]: 3,
[LogLevel.DEBUG]: 4
};
const LEVEL_COLORS = {
[LogLevel.TRACE]: "gray",
[LogLevel.DEBUG]: "cyan",
[LogLevel.INFO]: "green",
[LogLevel.WARN]: "yellow",
[LogLevel.ERROR]: "red"
};
const PREFIX_COLORS = [
"cyan",
"magenta",
"blue",
"yellow",
"green"
];
const getDefaultLogDir = () => {
return "./logs";
};
const LOG_MAX_SIZE = process.env.SCRAPELESS_LOG_MAX_SIZE || "100m";
const LOG_MAX_BACKUPS = parseInt(process.env.SCRAPELESS_LOG_MAX_BACKUPS || "5", 10);
const LOG_MAX_AGE = parseInt(process.env.SCRAPELESS_LOG_MAX_AGE || "7", 10);
const LOG_ROOT_DIR = process.env.SCRAPELESS_LOG_ROOT_DIR || getDefaultLogDir();
try {
if (!fs.existsSync(LOG_ROOT_DIR)) fs.mkdirSync(LOG_ROOT_DIR, { recursive: true });
} catch (err) {
console.error(`Failed to create log directory ${LOG_ROOT_DIR}:`, err);
}
const colorize = (text, color) => {
const colors = {
gray: "\x1B[90m",
red: "\x1B[31m",
green: "\x1B[32m",
yellow: "\x1B[33m",
blue: "\x1B[34m",
magenta: "\x1B[35m",
cyan: "\x1B[36m",
white: "\x1B[37m",
reset: "\x1B[0m"
};
return colors[color] ? `${colors[color]}${text}${colors.reset}` : text;
};
const customFormat = winston.default.format.printf(({ level, message, timestamp, prefix }) => {
const prefixStr = prefix ? `${prefix}: ` : "";
return `${timestamp} ${level} ${prefixStr}${message}`;
});
const consoleFormat = winston.default.format.printf(({ level, message, timestamp, prefix }) => {
const prefixStr = prefix ? `${colorizePrefix(prefix)}: ` : "";
return `${timestamp} ${level} ${prefixStr}${message}`;
});
const colorizePrefix = (prefix) => {
if (!prefix) return "";
const hash = prefix.split("").reduce((acc, char) => acc + char.charCodeAt(0), 0);
const colorIndex = hash % PREFIX_COLORS.length;
return colorize(prefix, "blue");
};
const transports = [new winston.default.transports.Console({
level: process.env.SCRAPELESS_LOG_LEVEL || LogLevel.INFO,
format: winston.default.format.combine(winston.default.format.colorize({ colors: LEVEL_COLORS }), winston.default.format.timestamp({ format: "YYYY-MM-DDTHH:mm:ssZ" }), consoleFormat)
})];
try {
fs.accessSync(LOG_ROOT_DIR, fs.constants.W_OK);
const fileTransport = new winston.default.transports.DailyRotateFile({
level: process.env.SCRAPELESS_LOG_LEVEL || LogLevel.INFO,
filename: path.join(LOG_ROOT_DIR, "scrapeless-%DATE%.log"),
datePattern: "YYYY-MM-DD",
zippedArchive: true,
maxSize: LOG_MAX_SIZE,
maxFiles: LOG_MAX_BACKUPS,
createSymlink: true,
symlinkName: "scrapeless.log"
});
transports.push(fileTransport);
} catch (err) {
console.warn(`Cannot write to log directory ${LOG_ROOT_DIR}, file logging disabled:`, err);
}
const formatWithPrefix = winston.default.format.combine(winston.default.format.timestamp({ format: "YYYY-MM-DDTHH:mm:ssZ" }), customFormat);
const logger$2 = winston.default.createLogger({
levels: LOG_LEVELS,
level: process.env.SCRAPELESS_LOG_LEVEL || LogLevel.INFO,
format: formatWithPrefix,
transports,
silent: process.env.SCRAPELESS_LOG_DISABLED === "true"
});
const loggerInstances = new Map();
const getLoggerWithPrefix = (prefix) => {
logger$2.silent = process.env.SCRAPELESS_LOG_DISABLED === "true";
if (!prefix) return logger$2;
if (loggerInstances.has(prefix)) return loggerInstances.get(prefix);
const prefixLogger = logger$2.child({ prefix });
loggerInstances.set(prefix, prefixLogger);
return prefixLogger;
};
var log = class log {
static _prefix = "";
static setPrefix(prefix) {
this._prefix = prefix;
}
static getPrefix() {
return this._prefix;
}
static withPrefix(prefix) {
const newLog = new log();
newLog._prefix = prefix;
return newLog;
}
static getLogger() {
return getLoggerWithPrefix(this._prefix);
}
_prefix = "";
getLogger() {
return getLoggerWithPrefix(this._prefix);
}
static trace(format, ...args) {
this.getLogger().log(LogLevel.TRACE, this.formatString(format, args));
}
trace(format, ...args) {
this.getLogger().log(LogLevel.TRACE, log.formatString(format, args));
}
static debug(format, ...args) {
this.getLogger().debug(this.formatString(format, args));
}
debug(format, ...args) {
this.getLogger().debug(log.formatString(format, args));
}
static info(format, ...args) {
this.getLogger().info(this.formatString(format, args));
}
info(format, ...args) {
this.getLogger().info(log.formatString(format, args));
}
static log(format, ...args) {
this.getLogger().info(this.formatString(format, args));
}
log(format, ...args) {
this.getLogger().info(log.formatString(format, args));
}
static print(format, ...args) {
this.getLogger().info(this.formatString(format, args));
}
print(format, ...args) {
this.getLogger().info(log.formatString(format, args));
}
static warn(format, ...args) {
this.getLogger().log(LogLevel.WARN, this.formatString(format, args));
}
warn(format, ...args) {
this.getLogger().log(LogLevel.WARN, log.formatString(format, args));
}
static error(format, ...args) {
this.getLogger().log(LogLevel.ERROR, this.formatString(format, args));
}
error(format, ...args) {
this.getLogger().log(LogLevel.ERROR, log.formatString(format, args));
}
static formatMessage(message, args) {
if (args.length === 0) return typeof message === "string" ? message : JSON.stringify(message);
return [message, ...args].map((arg) => {
if (typeof arg === "string") return arg;
try {
return JSON.stringify(arg);
} catch (e) {
return `[Circular or Non-Serializable Object]`;
}
}).join(" ");
}
static formatString(format, args) {
if (typeof format !== "string") return String(format) + args.map((arg) => ` ${arg}`).join("");
const usedIndices = new Set();
let result = args.reduce((msg, arg, i) => {
const placeholder = `{${i}}`;
if (msg.includes(placeholder)) {
usedIndices.add(i);
return msg.split(placeholder).join(typeof arg === "object" ? JSON.stringify(arg) : String(arg));
}
return msg;
}, format);
if (usedIndices.size < args.length) {
const unusedArgs = args.filter((_, i) => !usedIndices.has(i));
if (unusedArgs.length <= 0) return result;
result += unusedArgs.map((arg) => {
if (typeof arg === "object") try {
return ` ${JSON.stringify(arg)}`;
} catch (e) {
return ` ${arg}`;
}
return ` ${arg}`;
}).join("");
}
return result;
}
};
//#endregion
//#region src/services/base.ts
const log$2 = log.withPrefix("BaseService.request");
var BaseService = class {
constructor(apiKey, baseUrl, timeout = 3e4, handleResponse) {
this.apiKey = apiKey;
this.baseUrl = baseUrl;
this.timeout = timeout;
this.handleResponse = handleResponse;
}
async request(endpoint, method = "GET", body, additionalHeaders = {}, responseWithStatus = false) {
const headers = {
"Content-Type": "application/json",
"X-API-Key": this.apiKey,
...additionalHeaders
};
const options = {
method,
headers,
timeout: this.timeout
};
if (body) options.body = JSON.stringify(body);
if (body && additionalHeaders["content-type"]?.startsWith("multipart/form-data;")) {
options.headers = {
...additionalHeaders,
"X-API-Key": this.apiKey
};
options.body = body;
}
const response = await (0, node_fetch.default)(`${this.baseUrl}${endpoint}`, options);
let data;
const contentType = response.headers.get("content-type");
if (contentType?.includes("application/json")) data = await response.json();
else data = await response.text();
if (!response.ok) {
let errorMessage = "";
let errorCode = response.status;
if (typeof data === "object") {
if (data.error) errorMessage = data.error;
if (data.msg) errorMessage = data.msg;
if (data.message) errorMessage = data.message;
if (data.code) errorCode = data.code;
if (data.traceId) if (errorMessage) errorMessage += ` (TraceID: ${data.traceId})`;
else errorMessage = `failed with status ${response.status} (TraceID: ${data.traceId})`;
}
if (!errorMessage) errorMessage = `failed with status ${response.status}`;
errorMessage = `Request ${method} ${this.baseUrl}${endpoint} ${errorMessage}`;
log$2.error(errorMessage);
throw new ScrapelessError(errorMessage, errorCode);
}
if (this.handleResponse) return this.handleResponse(data);
return responseWithStatus ? {
data,
status: response.status
} : data.data;
}
};
//#endregion
//#region src/services/extension.ts
var ExtensionService = class extends BaseService {
constructor(apiKey, baseUrl, timeout) {
super(apiKey, baseUrl, timeout);
}
/**
* Validates the file path and extracts the file name.
* @param filePath The path to the file.
* @returns The extracted file name.
* @throws Error if the file suffix is not valid.
*/
getFileName(filePath) {
const validSuffixes = [".zip"];
const fileSuffix = filePath.slice(filePath.lastIndexOf(".")).toLowerCase();
if (!validSuffixes.includes(fileSuffix)) throw new Error(`Invalid file suffix: ${fileSuffix}. Supported suffixes: ${validSuffixes.join(", ")}`);
return filePath.slice(filePath.lastIndexOf("/") + 1);
}
async upload(filePath, name) {
const fileName = this.getFileName(filePath);
const fileStream = node_fs.default.createReadStream(filePath);
const formData = new form_data.default();
formData.append("file", fileStream, fileName);
formData.append("name", name);
const res = await this.request("/browser/extensions/upload", "POST", formData, formData.getHeaders(), true);
return res.data;
}
async update(extensionId, filePath, name) {
const fileName = this.getFileName(filePath);
const fileStream = node_fs.default.createReadStream(filePath);
const formData = new form_data.default();
formData.append("file", fileStream, fileName);
if (name) formData.append("name", name);
const res = await this.request(`/browser/extensions/${extensionId}`, "PUT", formData, formData.getHeaders(), true);
return res.data;
}
async get(extensionId) {
const res = await this.request(`/browser/extensions/${extensionId}`, "GET", void 0, {}, true);
return res.data;
}
async list() {
const res = await this.request("/browser/extensions/list", "GET", void 0, {}, true);
return res.data;
}
async delete(extensionId) {
const res = await this.request(`/browser/extensions/${extensionId}`, "DELETE", void 0, {}, true);
return res.data;
}
};
//#endregion
//#region src/env.ts
const log$1 = log.withPrefix("Environment");
/**
* Scrapeless Actor environment variables
*
* This module provides access to environment variables used by the Scrapeless
* Actor runtime. It defines all supported environment variables and provides
* utility functions to retrieve them.
*
*/
let ActorEnv = /* @__PURE__ */ function(ActorEnv$1) {
ActorEnv$1["SCRAPELESS_BASE_API_URL"] = "SCRAPELESS_BASE_API_URL";
ActorEnv$1["SCRAPELESS_ACTOR_API_URL"] = "SCRAPELESS_ACTOR_API_URL";
ActorEnv$1["SCRAPELESS_STORAGE_API_URL"] = "SCRAPELESS_STORAGE_API_URL";
ActorEnv$1["SCRAPELESS_BROWSER_API_URL"] = "SCRAPELESS_BROWSER_API_URL";
ActorEnv$1["SCRAPELESS_CRAWL_API_URL"] = "SCRAPELESS_CRAWL_API_URL";
ActorEnv$1["SCRAPELESS_API_KEY"] = "SCRAPELESS_API_KEY";
ActorEnv$1["SCRAPELESS_TEAM_ID"] = "SCRAPELESS_TEAM_ID";
ActorEnv$1["SCRAPELESS_ACTOR_ID"] = "SCRAPELESS_ACTOR_ID";
ActorEnv$1["SCRAPELESS_RUN_ID"] = "SCRAPELESS_RUN_ID";
ActorEnv$1["SCRAPELESS_INPUT"] = "SCRAPELESS_INPUT";
ActorEnv$1["SCRAPELESS_DATASET_ID"] = "SCRAPELESS_DATASET_ID";
ActorEnv$1["SCRAPELESS_KV_NAMESPACE_ID"] = "SCRAPELESS_KV_NAMESPACE_ID";
ActorEnv$1["SCRAPELESS_BUCKET_ID"] = "SCRAPELESS_BUCKET_ID";
ActorEnv$1["SCRAPELESS_QUEUE_ID"] = "SCRAPELESS_QUEUE_ID";
ActorEnv$1["SCRAPELESS_IS_ONLINE"] = "SCRAPELESS_IS_ONLINE";
return ActorEnv$1;
}({});
/**
* Get environment variable
* @param key Environment variable key
* @returns Value of the environment variable
* @throws Error if the environment variable is not defined
*/
function getEnv(key) {
const env$1 = process.env[ActorEnv[key]];
if (env$1 === void 0) throw new Error(`Environment variable ${key} is not defined`);
return env$1;
}
/**
* Get environment variable with default value
* @param key Environment variable key
* @param defaultValue Default value to return if env variable is not defined
* @returns Value of the environment variable or default value
*/
function getEnvWithDefault(key, defaultValue) {
const env$1 = process.env[ActorEnv[key]];
return env$1 === void 0 ? defaultValue : env$1;
}
function PrintEnv() {
for (const key in ActorEnv) {
const value = process.env[key];
log$1.trace(`{0}: {1}`, key, value);
}
}
//#endregion
//#region src/services/browser.ts
const DEFAULT_BROWSER_OPTIONS = {
session_name: "",
session_ttl: 180,
proxy_country: "ANY"
};
var BrowserService = class extends BaseService {
extension;
constructor(apiKey, baseUrl, timeout = 3e4) {
super(apiKey, baseUrl, timeout);
const baseApiURL = getEnvWithDefault("SCRAPELESS_BASE_API_URL", "https://api.scrapeless.com");
this.extension = new ExtensionService(apiKey, baseApiURL, timeout);
}
/**
* Build browser search params
* @param options Browser session configuration
* @returns URLSearchParams
*/
buildBrowserSearchParams(options) {
const data = {
...DEFAULT_BROWSER_OPTIONS,
...options
};
const { session_name, session_ttl, session_recording, proxy_country, proxy_url, extension_ids, profile_id, profile_persist } = data;
const { sessionName = session_name, sessionTTL = session_ttl, sessionRecording = session_recording, proxyCountry = proxy_country, proxyURL = proxy_url, fingerprint, extensionIds = extension_ids, profileId = profile_id, profilePersist = profile_persist } = data;
const params = {
token: this.apiKey,
sessionName,
sessionTTL: sessionTTL?.toString(),
sessionRecording: sessionRecording?.toString(),
proxyCountry,
proxyURL,
fingerprint: fingerprint ? JSON.stringify(fingerprint) : void 0,
extensionIds,
profileId,
profilePersist: profilePersist?.toString()
};
if (data.proxyURL) delete params.proxyCountry;
const filteredParams = Object.entries(params).filter(([_, value]) => value !== void 0 && value !== "").reduce((acc, [key, value]) => ({
...acc,
[key]: value
}), {});
return new URLSearchParams(filteredParams);
}
/**
* Create a browser session
* @param options Browser session configuration
* @returns Response containing devtoolsUrl
*/
create(options = {}) {
const search = this.buildBrowserSearchParams(options);
let protocol = "wss";
if (this.baseUrl.startsWith("http://")) protocol = "ws";
return { browserWSEndpoint: `${protocol}://${this.baseUrl.replace(/^(.*?):\/\//, "")}/api/v2/browser?${search.toString()}` };
}
/**
* Async version of create method (maintains compatibility but calls the sync method directly)
*/
async createAsync(options = {}) {
return this.create(options);
}
/**
* Create a browser session
* @param options Browser session configuration
* @returns Response containing devtoolsUrl
*/
async createSession(options = {}) {
const search = this.buildBrowserSearchParams(options);
try {
const task = await this.request(`/api/v2/browser?${search.toString()}`, "GET", void 0, {}, true);
if (!task.data.success) throw new Error(`Failed to create browser session: ${JSON.stringify(task.data)}`);
if (!task.data.taskId) throw new Error("Failed to create browser session: taskId is missing");
return { browserWSEndpoint: `wss://browser.scrapeless.com/browser/${task.data.taskId}?token=${this.apiKey}` };
} catch (error) {
throw new Error(`Failed to create browser session: ${error}`);
}
}
};
//#endregion
//#region src/services/scraping.ts
var ScrapingService = class extends BaseService {
basePath = "/api/v1/scraper";
constructor(apiKey, baseUrl, timeout) {
super(apiKey, baseUrl, timeout);
}
/**
* Create a scraping request to extract data from websites
* @param request Scraping request parameters including actor, input, and proxy settings
* @returns Task ID and status information for the scraping task
*/
async createTask(request) {
const requestWithSync = {
...request,
async: true
};
return await this.request(`${this.basePath}/request`, "POST", requestWithSync, {}, true);
}
/**
* Get the result of a scraping task
* @param taskId The ID of the scraping task
* @returns The scraped data and task status
*/
async getTaskResult(taskId) {
return this.request(`${this.basePath}/result/${taskId}`, "GET", void 0, {}, true);
}
/**
* Perform a scraping operation and wait for the result
* @param request Scraping request parameters
* @returns The scraped data result
*/
async scrape(request) {
const requestWithSync = {
...request,
async: false
};
const response = await this.createTask(requestWithSync);
if (response.status === 200) return response.data;
while (true) {
await sleep(1e3);
const result = await this.getTaskResult(response.data.taskId);
if (result.status === 200) return result.data;
}
}
};
//#endregion
//#region src/services/deepserp.ts
var DeepSerpService = class extends ScrapingService {
constructor(apiKey, baseUrl, timeout) {
super(apiKey, baseUrl, timeout);
}
};
//#endregion
//#region src/services/universal.ts
var UniversalService = class extends BaseService {
basePath = "/api/v2/unlocker";
constructor(apiKey, baseUrl, timeout) {
super(apiKey, baseUrl, timeout);
}
/**
* Scrape any website using the Universal Scraping API
* @param request Scraping request
*/
async scrape(request) {
return this.request(`${this.basePath}/request`, "POST", request);
}
};
//#endregion
//#region src/services/captcha.ts
var CaptchaService = class extends BaseService {
basePath = "/api/v1";
constructor(apiKey, baseUrl, timeout) {
super(apiKey, baseUrl, timeout);
}
/**
* Create a captcha solving task
* @param data Captcha request data
*/
async captchaCreate(data) {
return this.request(`${this.basePath}/createTask`, "POST", data, {}, true);
}
/**
* Get captcha solving result
* @param taskId Task ID
*/
async captchaResultGet(taskId) {
return this.request(`${this.basePath}/getTaskResult/${taskId}`, "GET", void 0, {}, true);
}
/**
* Create captcha task and wait for result
* @param data Captcha request data
*/
async captchaSolver(data) {
const task = await this.captchaCreate(data);
const result = await this.captchaResultGet(task.data.taskId);
if (result.data.success) return result.data;
while (true) {
await sleep(1e3);
const result$1 = await this.captchaResultGet(task.data.taskId);
if (result$1.data.success) return result$1.data;
}
}
};
//#endregion
//#region src/services/proxies.ts
/**
* ProxiesService provides functionality for working with residential proxies
*/
var ProxiesService = class extends BaseService {
constructor(apiKey, baseUrl, timeout) {
super(apiKey, baseUrl, timeout);
}
/**
* Generate a proxy URL string based on the provided configuration
* @param proxy Proxy configuration
* @returns Formatted proxy URL string
*/
proxy(proxy) {
const token = this.apiKey;
const baseURL = "http://CHANNEL-proxy.residential-country_";
return `${baseURL}${proxy.country}-r_${proxy.sessionDuration}m-s_${proxy.sessionId}:${token}@${proxy.gateway}`;
}
/**
* Create a new residential proxy with the specified configuration
* @param options Proxy configuration options
* @returns Formatted proxy URL string
*/
createProxy(options) {
return this.proxy(options);
}
/**
* Generate a random session ID for use with proxies
* @returns A random string suitable for use as a session ID
*/
generateSessionId() {
const timestamp = Date.now().toString(36);
const randomStr = Math.random().toString(36).substring(2, 10);
return `${timestamp}-${randomStr}`;
}
};
//#endregion
//#region src/services/actor.ts
/**
* Actor service class for interacting with the Scrapeless Actor API
*/
var ActorService = class extends BaseService {
basePath = "/api/v1/actors";
constructor(apiKey, baseUrl, timeout) {
super(apiKey, baseUrl, timeout);
}
/**
* Run an Actor
* @param actorId Actor ID
* @param data Run request data
*/
async run(actorId, data) {
return this.request(`${this.basePath}/${actorId}/runs`, "POST", data);
}
/**
* Get Actor run info
* @param runId Run ID
*/
async getRunInfo(runId) {
return this.request(`${this.basePath}/runs/${runId}`);
}
/**
* Abort a running Actor
* @param actorId Actor ID
* @param runId Run ID
*/
async abortRun(actorId, runId) {
return this.request(`${this.basePath}/${actorId}/runs/${runId}`, "DELETE");
}
/**
* Trigger a build for an Actor
* @param actorId Actor ID
*/
async build(actorId) {
return this.request(`${this.basePath}/${actorId}/builds`, "POST");
}
/**
* Get Actor build status
* @param actorId Actor ID
* @param buildId Build ID
*/
async getBuildStatus(actorId, buildId) {
return this.request(`${this.basePath}/${actorId}/builds/${buildId}`);
}
/**
* Abort an Actor build
* @param actorId Actor ID
* @param buildId Build ID
*/
async abortBuild(actorId, buildId) {
return this.request(`${this.basePath}/${actorId}/builds/${buildId}`, "DELETE");
}
/**
* Get a list of all Actor runs
* @param params Pagination parameters
*/
async getRunList(params) {
const queryParams = new URLSearchParams();
queryParams.append("page", params.page.toString());
queryParams.append("pageSize", params.pageSize.toString());
if (params.desc !== void 0) queryParams.append("desc", params.desc ? "1" : "0");
return this.request(`${this.basePath}/runs?${queryParams.toString()}`);
}
};
//#endregion
//#region src/services/storage/http/dataset.ts
/**
* Dataset storage service implementation
*/
var DatasetStorage = class extends BaseService {
basePath = "/api/v1/dataset";
/**
* List all available datasets
* @param params Pagination and filtering parameters
* @returns List of datasets
*/
async listDatasets(params) {
const queryParams = new URLSearchParams();
queryParams.append("page", params.page.toString());
queryParams.append("pageSize", params.pageSize.toString());
if (params.desc !== void 0) queryParams.append("desc", params.desc ? "1" : "0");
if (params.actorId) queryParams.append("actorId", params.actorId);
if (params.runId) queryParams.append("runId", params.runId);
return this.request(`${this.basePath}?${queryParams.toString()}`);
}
/**
* Create a new dataset
* @param name Name of the dataset
* @returns The created dataset info
*/
async createDataset(name) {
return this.request(`${this.basePath}`, "POST", { name });
}
/**
* Update an existing dataset
* @param datasetId ID of the dataset to update
* @param name New name for the dataset
* @returns The updated dataset info
*/
async updateDataset(datasetId, name) {
return this.request(`${this.basePath}/${datasetId}`, "PUT", { name });
}
/**
* Delete a dataset
* @param datasetId ID of the dataset to delete
* @returns Operation result
*/
async delDataset(datasetId) {
return this.request(`${this.basePath}/${datasetId}`, "DELETE");
}
/**
* Add items to a dataset
* @param datasetId ID of the dataset to add items to
* @param items Array of objects to add
* @returns Operation result
*/
async addItems(datasetId, items) {
return this.request(`${this.basePath}/${datasetId}/items`, "POST", { items });
}
/**
* Get items from a dataset
* @param datasetId ID of the dataset to get items from
* @param params Pagination parameters
* @returns Paginated list of items
*/
async getItems(datasetId, params) {
const queryParams = new URLSearchParams();
queryParams.append("page", params.page.toString());
queryParams.append("pageSize", params.pageSize.toString());
if (params.desc !== void 0) queryParams.append("desc", params.desc ? "1" : "0");
return this.request(`${this.basePath}/${datasetId}/items?${queryParams.toString()}`);
}
/**
* Get dataset
* @param datasetId ID of the dataset
* @returns Dataset info
*/
async getDataset(datasetId) {
return this.request(`${this.basePath}/${datasetId}`);
}
};
//#endregion
//#region src/services/storage/http/kv.ts
/**
* Key-Value storage service implementation
*/
var KVStorage = class extends BaseService {
basePath = "/api/v1/kv";
/**
* List all available namespaces
* @param params Pagination parameters
* @returns List of namespaces
*/
async listNamespaces(params) {
const queryParams = new URLSearchParams();
queryParams.append("page", params.page.toString());
queryParams.append("pageSize", params.pageSize.toString());
if (params.desc !== void 0) queryParams.append("desc", params.desc ? "1" : "0");
return this.request(`${this.basePath}/namespaces?${queryParams.toString()}`);
}
/**
* Create a new namespace
* @param name Name of the namespace
* @returns The created namespace
*/
async createNamespace(name) {
return this.request(`${this.basePath}/namespaces`, "POST", { name });
}
/**
* Get a namespace by ID
* @param namespaceId ID of the namespace
* @returns Namespace information
*/
async getNamespace(namespaceId) {
return this.request(`${this.basePath}/${namespaceId}`);
}
/**
* Delete a namespace
* @param namespaceId ID of the namespace to delete
* @returns Operation result
*/
async delNamespace(namespaceId) {
return this.request(`${this.basePath}/${namespaceId}`, "DELETE");
}
/**
* Rename a namespace
* @param namespaceId ID of the namespace to rename
* @param name New name for the namespace
* @returns Updated namespace
*/
async renameNamespace(namespaceId, name) {
return this.request(`${this.basePath}/${namespaceId}/rename`, "PUT", { name });
}
/**
* List keys in a namespace
* @param namespaceId ID of the namespace to list keys from
* @param params Pagination parameters
* @returns List of keys
*/
async listKeys(namespaceId, params) {
const queryParams = new URLSearchParams();
queryParams.append("page", params.page.toString());
queryParams.append("pageSize", params.pageSize.toString());
return this.request(`${this.basePath}/${namespaceId}/keys?${queryParams.toString()}`);
}
/**
* Delete a value from a namespace
* @param namespaceId ID of the namespace
* @param key Key to delete
* @returns Operation result
*/
async delValue(namespaceId, key) {
return this.request(`${this.basePath}/${namespaceId}/${key}`, "DELETE");
}
/**
* Bulk set multiple key-value pairs in a namespace
* @param namespaceId ID of the namespace
* @param data Array of key-value data
* @returns Operation result
*/
async bulkSetValue(namespaceId, data) {
return this.request(`${this.basePath}/${namespaceId}/bulk`, "POST", { Items: data });
}
/**
* Bulk delete multiple keys from a namespace
* @param namespaceId ID of the namespace
* @param keys Array of keys to delete
* @returns Operation result
*/
async bulkDelValue(namespaceId, keys) {
return this.request(`${this.basePath}/${namespaceId}/bulk`, "POST", { keys });
}
/**
* Set a key-value pair in a namespace
* @param namespaceId ID of the namespace
* @param data Key-value data
* @returns Operation result
*/
async setValue(namespaceId, data) {
return this.request(`${this.basePath}/${namespaceId}/key`, "PUT", {
key: data.key,
value: data.value,
expiration: data.expiration
});
}
/**
* Get a value by key from a namespace
* @param namespaceId ID of the namespace
* @param key Key to retrieve
* @returns The value as a string
*/
async getValue(namespaceId, key) {
return this.request(`${this.basePath}/${namespaceId}/${key}`);
}
};
//#endregion
//#region src/services/storage/http/object.ts
/**
* Object storage service implementation
*/
var ObjectStorage = class extends BaseService {
basePath = "/api/v1/object";
/**
* List all available buckets
* @param params Pagination and filtering parameters
* @returns List of buckets
*/
async listBuckets(params) {
const queryParams = new URLSearchParams();
queryParams.append("page", params.page.toString());
queryParams.append("pageSize", params.pageSize.toString());
if (params.desc !== void 0) queryParams.append("desc", params.desc ? "1" : "0");
if (params.actor) queryParams.append("actor", params.actor);
if (params.runId) queryParams.append("runId", params.runId);
return this.request(`${this.basePath}/buckets?${queryParams.toString()}`);
}
/**
* Create a new bucket
* @param data Bucket creation parameters
* @returns The created bucket
*/
async createBucket(data) {
return this.request(`${this.basePath}/buckets`, "POST", data);
}
/**
* Delete a bucket
* @param bucketId ID of the bucket to delete
* @returns Operation result
*/
async deleteBucket(bucketId) {
return this.request(`${this.basePath}/buckets/${bucketId}`, "DELETE");
}
/**
* Get information about a bucket
* @param bucketId ID of the bucket
* @returns Bucket information
*/
async getBucket(bucketId) {
return this.request(`${this.basePath}/buckets/${bucketId}`);
}
/**
* List objects in a bucket
* @param bucketId ID of the bucket
* @param params Pagination and search parameters
* @returns List of objects
*/
async list(bucketId, params) {
const queryParams = new URLSearchParams();
queryParams.append("page", params.page.toString());
queryParams.append("pageSize", params.pageSize.toString());
if (params.search !== void 0) queryParams.append("search", params.search);
return this.request(`${this.basePath}/buckets/${bucketId}/objects?${queryParams.toString()}`);
}
/**
* Get an object by ID
* @param bucketId ID of the bucket
* @param objectId ID of the object to retrieve
* @returns The object data as a Blob
*/
async get(bucketId, objectId) {
return this.request(`${this.basePath}/buckets/${bucketId}/${objectId}`);
}
/**
* Upload an object to a bucket
* @param bucketId ID of the bucket
* @param data Upload parameters
* @returns Upload response
*/
async put(bucketId, data) {
const formData = new FormData();
formData.append("file", data.file);
if (data.actorId) formData.append("actorId", data.actorId);
if (data.runId) formData.append("runId", data.runId);
return this.request(`${this.basePath}/buckets/${bucketId}/object`, "POST", formData, { "Content-Type": void 0 });
}
/**
* Delete an object from a bucket
* @param bucketId ID of the bucket
* @param objectId ID of the object to delete
* @returns Operation result
*/
async delete(bucketId, objectId) {
return this.request(`${this.basePath}/buckets/${bucketId}/${objectId}`, "DELETE");
}
};
//#endregion
//#region src/services/storage/http/queue.ts
/**
* Queue storage service implementation
*/
var QueueStorage = class extends BaseService {
basePath = "/api/v1/queue";
/**
* List all available queues
* @param params Pagination parameters
* @returns List of queues
*/
async list(params) {
const queryParams = new URLSearchParams();
queryParams.append("page", params.page.toString());
queryParams.append("pageSize", params.pageSize.toString());
if (params.desc !== void 0) queryParams.append("desc", params.desc ? "1" : "0");
return this.request(`${this.basePath}/queues?${queryParams.toString()}`);
}
/**
* Create a new queue
* @param data Queue creation parameters
* @returns The created queue info
*/
async create(data) {
return this.request(`${this.basePath}`, "POST", data);
}
/**
* Get a queue by name
* @param name Name of the queue
* @returns Queue information
*/
async get(name, queueId) {
const queryParams = new URLSearchParams();
queryParams.append("name", name);
queryParams.append("id", queueId || "");
return this.request(`${this.basePath}?${queryParams.toString()}`);
}
/**
* Update a queue
* @param queueId ID of the queue to update
* @param data Queue update parameters
*/
async update(queueId, data) {
return await this.request(`${this.basePath}/${queueId}`, "PUT", data);
}
/**
* Delete a queue
* @param queueId ID of the queue to delete
* @returns Operation result
*/
async delete(queueId) {
return this.request(`${this.basePath}/${queueId}`, "DELETE");
}
/**
* Push a message to a queue
* @param queueId ID of the queue
* @param params Message parameters
* @returns Push response with message ID
*/
async push(queueId, params) {
return this.request(`${this.basePath}/${queueId}/push`, "POST", params);
}
/**
* Pull messages from a queue
* @param queueId ID of the queue
* @param limit The maximum number of records to be returned, max 100, min 1
* @returns Array of queue messages
*/
async pull(queueId, limit) {
if (limit) {
const queryParams = new URLSearchParams();
queryParams.append("limit", limit.toString());
return this.request(`${this.basePath}/${queueId}/pull?${queryParams.toString()}`);
}
return this.request(`${this.basePath}/${queueId}/pull`);
}
/**
* Acknowledge a message in a queue
* @param queueId ID of the queue
* @param msgId ID of the message to acknowledge
* @returns Operation result
*/
async ack(queueId, msgId) {
return this.request(`${this.basePath}/${queueId}/ack/${msgId}`, "POST");
}
};
//#endregion
//#region src/services/storage/http/index.ts
/**
* StorageService provides access to all Actor storage services
*/
var HttpStorageService = class extends BaseService {
/**
* Dataset storage service for structured data
*/
dataset;
/**
* Key-Value storage service for files and data records
*/
kv;
/**
* Object storage service for files and binary data
*/
object;
/**
* Queue storage service for task queues
*/
queue;
/**
* Create a new StorageService instance
* @param apiKey API key for authentication
* @param baseUrl Base URL for the API
* @param timeout Request timeout in milliseconds
*/
constructor(apiKey, baseUrl, timeout) {
super(apiKey, baseUrl, timeout);
this.dataset = new DatasetStorage(apiKey, baseUrl, timeout);
this.kv = new KVStorage(apiKey, baseUrl, timeout);
this.object = new ObjectStorage(apiKey, baseUrl, timeout);
this.queue = new QueueStorage(apiKey, baseUrl, timeout);
}
};
//#endregion
//#region src/services/memory.ts
var MemoryService = class {
storagePath = "storage";
basePath = path.default.resolve(this.storagePath);
getStoragePath(subPath) {
return path.default.join(this.basePath, subPath);
}
paginateArray(items, page = 1, pageSize = 10) {
const total = items.length;
const start = (page - 1) * pageSize;
const end = Math.min(start + pageSize, total);
const pagedItems = items.slice(start, end);
return {
items: pagedItems,
total,
page,
pageSize,
totalPage: Math.ceil(total / pageSize)
};
}
async mkdir(dirPath) {
await fs.default.promises.mkdir(dirPath, { recursive: true });
}
async writeJsonFile(filePath, data) {
await fs.default.promises.writeFile(filePath, JSON.stringify(data), "utf-8");
}
async readFile(metaPath) {
return fs.default.promises.readFile(metaPath, "utf-8");
}
async rm(path$6, options) {
return fs.default.promises.rm(path$6, options);
}
async rmDir(path$6, options) {
return fs.default.promises.rmdir(path$6, options);
}
async isNameExists(dirPath, name) {
const skipDirs = new Set([
"queues_stores",
"datasets",
"kv_stores",
"objects_stores",
"metadata.json"
]);
const entries = await fs.default.promises.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) continue;
if (skipDirs.has(entry.name)) continue;
const metaDataPath = path.default.join(dirPath, entry.name, "metadata.json");
try {
const file = await fs.default.promises.readFile(metaDataPath, "utf-8");
const metaData = JSON.parse(file);
if (metaData.name === name) return true;
} catch {
continue;
}
}
return false;
}
};
//#endregion
//#region src/utils/memory.ts
const metadataStore = {
datasets: {
id: "default",
name: "default",
actorId: "default",
runId: "default",
stats: {}
},
kv_stores: {
id: "default",
name: "default",
actorId: "default",
runId: "default"
},
objects_stores: {
id: "default",
name: "default",
description: "default",
actorId: "default",
runId: "default",
size: 0
},
queues_stores: {
id: "default",
name: "default",
teamId: "default",
actorId: "default",
runId: "default",
description: "default",
stats: {}
}
};
async function createRoot() {
try {
const rootDir = "storage";
const rootPath = node_path.default.resolve(rootDir);
if (!node_fs.default.existsSync(rootPath)) await node_fs.default.promises.mkdir(rootPath);
} catch {}
}
async function createDir(dir) {
try {
const storagePath = node_path.default.resolve(`storage/${dir}`);
if (!node_fs.default.existsSync(storagePath)) {
await node_fs.default.promises.mkdir(storagePath, { recursive: true });
await node_fs.default.promises.mkdir(node_path.default.join(storagePath, "default"));
const metadata = metadataStore[dir];
await node_fs.default.promises.writeFile(node_path.default.join(storagePath, "default", "metadata.json"), JSON.stringify({
...metadata,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString()
}));
if (dir === "kv_stores") await node_fs.default.promises.writeFile(node_path.default.join(storagePath, "default", "INPUT.json"), "");
}
} catch {}
}
//#endregion
//#region src/services/storage/memory/dataset.ts
/**
* Dataset storage service implementation (local file system)
*/
var LocalDatasetStorage = class extends MemoryService {
datasetDir = "datasets";
/**
* Constructor: ensure dataset directory exists
*/
constructor() {
super();
createDir(this.datasetDir);
}
/**
* List all datasets with pagination and optional sorting
* @param params Pagination and sorting parameters
*/
async listDatasets(params) {
const dirPath = this.getStoragePath(this.datasetDir);
let entries;
try {
entries = await fs.default.promises.readdir(dirPath, { withFileTypes: true });
} catch {
return {
items: [],
total: 0,
page: params.page || 1,
pageSize: params.pageSize || 10,
totalPage: 0
};
}
const allDatasets = [];
for (const entry of entries) {
if (!entry.isDirectory()) continue;
const name = entry.name;
const metaPath = path.default.join(dirPath, name, "metadata.json");
try {
const file = await this.readFile(metaPath);
const meta = JSON.parse(file);
allDatasets.push(meta);
} catch {
continue;
}
}
if (!params.desc) allDatasets.sort((a, b) => {
if (!a.createdAt && !b.createdAt) return 0;
if (!a.createdAt) return 1;
if (!b.createdAt) return -1;
return new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime();
});
return this.paginateArray(allDatasets, params.page, params.pageSize);
}
/**
* Get dataset metadata by datasetId
* @param datasetId Dataset unique identifier
*/
async getDataset(datasetId) {
if (!datasetId) throw new Error("datasetId must not be empty");
const dirPath = this.getStoragePath(path.default.join(this.datasetDir, datasetId));
const metaPath = path.default.join(dirPath, "metadata.json");
try {
const file = await this.readFile(metaPath);
return JSON.parse(file);
} catch {
throw new Error("Dataset not found");
}
}
/**
* Create a new dataset with a unique name
* @param name Dataset name
*/
async createDataset(name) {
try {
if (!name) throw new Error("name must not be empty");
const exist = await this.isNameExists(this.getStoragePath(this.datasetDir), name);
if (exist) throw new Error("The name of the dataset already exists");
const id = (0, uuid.v4)();
const dirPath = this.getStoragePath(path.default.join(this.datasetDir, id));
await this.mkdir(dirPath);
const now = new Date();
const meta = {
id,
name,
createdAt: now,
updatedAt: now,
fields: [],
stats: {
count: 0,
size: 0
}
};
const metaPath = path.default.join(dirPath, "metadata.json");
await this.writeJsonFile(metaPath, meta);
return meta;
} catch {
throw new Error("Create dataset failed");
}
}
/**
* Update dataset name by datasetId
* @param datasetId Dataset unique identifier
* @param name New dataset name
*/
async updateDataset(datasetId, name) {
if (!datasetId) throw new Error("datasetId must not be empty");
if (!name) throw new Error("name must not be empty");
const dirPath = this.getStoragePath(path.default.join(this.datasetDir, datasetId));
const metaPath = path.default.join(dirPath, "metadata.json");
let meta;
try {
const file = await this.readFile(metaPath);
meta = JSON.parse(file);
meta.name = name;
meta.updatedAt = new Date();
await this.writeJsonFile(metaPath, meta);
return meta;
} catch {
throw new Error("Dataset not found");
}
}
/**
* Delete a dataset by datasetId
* @param datasetId Dataset unique identifier
*/
async delDataset(datasetId) {
if (!datasetId) throw new Error("datasetId must not be empty");
const dirPath = this.getStoragePath(path.default.join(this.datasetDir, datasetId));
try {
await this.rm(dirPath, {
recursive: true,
force: true
});
return {
success: true,
message: "dataset deleted successfully"
};
} catch (e) {
return {
success: false,
message: String(e)
};
}
}
/**
* Add items to a dataset, auto-increment file names, update fields (deduplicated)
* @param datasetId Dataset unique identifier
* @param items Array of items to add
*/
async addItems(datasetId, items) {
if (!datasetId) throw new Error("datasetId must not be empty");
const dirPath = this.getStoragePath(path.default.join(this.datasetDir, datasetId));
const metaPath = path.default.join(dirPath, "metadata.json");
let meta;
try {
const file = await this.readFile(metaPath);
meta = JSON.parse(file);
} catch {
return {
success: false,
message: "Dataset not found"
};
}
try {
const files = await fs.default.promises.readdir(dirPath);
let maxIndex = 0;
files.forEach((f) => {
if (/^\d{8}\.json$/.test(f)) {
const idx = parseInt(f.slice(0, 8), 10);
if (idx > maxIndex) maxIndex = idx;
}
});
const fieldsSet = new Set(meta.fields || []);
for (let i = 0; i < items.length; i++) {
const item = items[i];
Object.keys(item).forEach((key) => fieldsSet.add(key));
const index = maxIndex + i + 1;
const fileName = `${index.toString().padStart(8, "0")}.json`;
const filePath = path.default.join(dirPath, fileName);
await this.writeJsonFile(filePath, item);
}
meta.fields = Array.from(fieldsSet);
meta.updatedAt = new Date();
meta.stats = meta.stats || {
count: 0,
size: 0
};
meta.stats.count += items.length;
await this.writeJsonFile(metaPath, meta);
return {
success: true,
message: "Items added"
};
} catch {
throw new Error("Add to dataset failed");
}
}
/**
* Get items from a dataset with pagination
* @param datasetId Dataset unique identifier
* @param params Pagination parameters
*/
async getItems(datasetId, params) {
if (!datasetId) throw new Error("datasetId must not be empty");
const dirPath = this.getStoragePath(path.default.join(this.datasetDir, datasetId));
let files;
try {
files = await fs.default.promises.readdir(dirPath);
files = files.filter((f) => /^\d{8}\.json$/.test(f));
files.sort();
const items = [];
for (const file of files) {
const filePath = path.default.join(dirPath, file);
const data = await this.readFile(filePath);
items.push(JSON.parse(data));
}
return this.paginateArray(items, params.page, params.pageSize);
} catch {
return {
items: [],
total: 0,
page: params.page || 1,
pageSize: params.pageSize || 10,
totalPage: 0
};
}
}
};
//#endregion
//#region src/services/storage/memory/kv.ts
/**
* Key-Value storage service implementation (local file system)
*/
var LocalKVStorage = class extends MemoryService {
kvDir = "kv_stores";
/**
* Constructor: ensure kv directory exists
*/
constructor() {
super();
createDir(this.kvDir);
}
/**
* List all available namespaces
* @param params Pagination parameters
*/
async listNamespaces(params) {
const dirPath = this.getStoragePath(this.kvDir);
let entries;
try {
entries = await fs.default.promises.readdir(dirPath, { withFileTypes: true });
} catch {
return {
items: [],
total: 0,
page: params.page || 1,
pageSize: params.pageSize || 10,
totalPage: 0
};
}
const allNamespaces = [];
for (const entry of entries) {
if (!entry.isDirectory()) continue;
const nsPath = path.default.join(dirPath, entry.name);
const metaPath = path.default.join(nsPath, "metadata.json");
try {
const file = await this.readFile(metaPath);
const meta = JSON.parse(file);
allNamespaces.push({
id: meta.id,
name: meta.name,
createdAt: meta.createdAt,
updatedAt: meta.updatedAt,
actorId: meta.actorId || "",
runId: meta.runId || "",
stats: {
count: 0,
size: 0
}
});
} catch {
continue;
}
}
if (!params.desc) allNamespaces.sort((a, b) => {
if (!a.createdAt && !b.createdAt) return 0;
if (!a.createdAt) return 1;
if (!b.createdAt) return -1;
return new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime();
});
const paged = this.paginateArray(allNamespaces, params.page, params.pageSize);
for (const ns of paged.items) {
const nsPath = path.default.join(dirPath, ns.id);
try {
const files = await fs.default.promises.readdir(nsPath);
let count = 0;
let size = 0;
for (const f of files) {
if (f === "metadata.json" || !f.endsWith(".json")) continue;
count++;
const keyInfo = await this.readFile(path.default.join(nsPath, f));
size += JSON.parse(keyInfo).size;
}
ns.stats = {
c