@dataql/node
Version:
DataQL core SDK for unified data management with MongoDB and GraphQL - Production Multi-Cloud Ready
845 lines (844 loc) • 34.9 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.Data = void 0;
const validate_js_1 = require("./validate.js");
const hashSchema_js_1 = require("./hashSchema.js");
const uuid_1 = require("uuid");
const DocumentScope_js_1 = require("./DocumentScope.js");
const utils_js_1 = require("./utils.js");
const index_js_1 = require("./plugins/index.js");
const config_js_1 = require("./config.js");
function nowISO() {
return new Date().toISOString();
}
function addTimestampsOnCreate(docOrDocs) {
const now = nowISO();
if (Array.isArray(docOrDocs)) {
for (const doc of docOrDocs) {
if (!doc.createdAt)
doc.createdAt = now;
if (!doc.updatedAt)
doc.updatedAt = now;
}
}
else if (docOrDocs && typeof docOrDocs === "object") {
if (!docOrDocs.createdAt)
docOrDocs.createdAt = now;
if (!docOrDocs.updatedAt)
docOrDocs.updatedAt = now;
}
}
function addTimestampsOnUpdate(docOrDocs) {
const now = nowISO();
if (Array.isArray(docOrDocs)) {
for (const doc of docOrDocs) {
doc.updatedAt = now;
}
}
else if (docOrDocs && typeof docOrDocs === "object") {
docOrDocs.updatedAt = now;
}
}
class Data {
constructor(options = {}) {
this.collections = {};
this.schemaHashCache = {}; // collectionName -> hash
this.appToken = options.appToken || "default";
this.env = options?.env || "prod";
this.devPrefix = options?.devPrefix || "dev_";
this.dbName = options?.dbName;
this.connectionString = options?.connectionString;
this.customConnection = options?.customConnection;
this.globalConfig = { ...config_js_1.DEFAULT_GLOBAL_CONFIG, ...options.globalConfig };
this.deviceId = this.getDeviceId();
this.pluginManager = new index_js_1.PluginManager(this);
}
getDeviceId() {
if (typeof window !== "undefined" && window.localStorage) {
let deviceId = window.localStorage.getItem("dataql_device_id");
if (!deviceId) {
deviceId = (0, uuid_1.v4)();
window.localStorage.setItem("dataql_device_id", deviceId);
}
return deviceId;
}
else {
// Node.js: generate per process
if (!this.deviceId) {
this.deviceId = (0, uuid_1.v4)();
}
return this.deviceId;
}
}
async getBestEndpoint() {
// Return cached endpoint if it's still valid (cache for 5 minutes)
if (this.bestEndpointCache &&
Date.now() - this.bestEndpointCache.timestamp < 5 * 60 * 1000) {
return this.bestEndpointCache.endpoint;
}
// If global routing is disabled, return the default endpoint
if (!this.globalConfig.enableGlobalRouting) {
return (0, config_js_1.getWorkerUrl)();
}
try {
// Select the best endpoint based on performance
const bestEndpoint = await (0, config_js_1.selectBestEndpoint)(undefined, // Use all endpoints
5 // Max concurrent tests
);
// Cache the result
this.bestEndpointCache = {
endpoint: bestEndpoint,
timestamp: Date.now(),
};
console.log(`[DataQL] Selected best endpoint: ${bestEndpoint}`);
return bestEndpoint;
}
catch (error) {
console.warn(`[DataQL] Failed to select best endpoint, falling back to default:`, error);
return this.globalConfig.fallbackEndpoint || (0, config_js_1.getWorkerUrl)();
}
}
async fetchWithCustomConnection(input, init) {
const url = input instanceof Request ? input.url : input;
// Priority 1: Use custom connection if provided
if (this.customConnection) {
let options;
if (input instanceof Request) {
// Clone the request to avoid consuming the body
const clonedRequest = input.clone();
options = {
method: clonedRequest.method,
headers: Object.fromEntries(clonedRequest.headers.entries()),
body: clonedRequest.body,
...init,
};
}
else {
options = init || {};
}
console.log("[Data SDK] Using custom connection", {
url,
method: options.method || "GET",
});
return await this.customConnection.request(url, options);
}
// Priority 2: Fallback to regular fetch
const method = input instanceof Request ? input.method : init?.method || "GET";
console.log("[Data SDK] Using standard fetch", { url, method });
return await fetch(input, init);
}
collection(name, schema, options) {
const actualName = this.env === "dev" ? `${this.devPrefix}${name}` : name;
this.collections[actualName] = { name: actualName, schema, options };
const self = this;
const ensureSession = async () => {
const schemaHash = await (0, hashSchema_js_1.hashSchema)(schema);
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/session/create`;
let payload = { hash: schemaHash };
// Include dbName if provided
if (self.dbName) {
payload.dbName = self.dbName;
}
// Include MongoDB URI if provided
if (self.connectionString) {
payload.mongoUri = self.connectionString;
}
// Only send schema if not already sent for this collection
if (!self.schemaHashCache[actualName] ||
self.schemaHashCache[actualName] !== schemaHash) {
payload.schema = { [actualName]: schema };
}
let payloadStr = JSON.stringify(payload);
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: payloadStr,
});
let data;
try {
data = await res.json();
}
catch (e) {
throw new Error(`Failed to parse JSON from /session/create. Status: ${res.status}. Response body could not be parsed as JSON.`);
}
if (!data.sessionId) {
console.error("[Data SDK] /session/create missing sessionId. Response:", data, "Status:", res.status);
throw new Error("Failed to get sessionId from worker");
}
// Cache the hash for this collection
self.schemaHashCache[actualName] = schemaHash;
return data.sessionId;
};
// Create a callable function that also has methods attached
const collectionFunction = (filter) => {
return (0, DocumentScope_js_1.createDocumentScope)(actualName, filter, schema, self.fetchWithCustomConnection.bind(self), self.appToken, ensureSession);
};
// Define the create function first so it can be referenced
const createFunction = async (doc) => {
if (Array.isArray(doc)) {
// Ensure each doc has an id
for (const d of doc) {
if (!d.id)
d.id = (0, uuid_1.v4)();
}
addTimestampsOnCreate(doc);
// Bulk create
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/create`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
data: doc,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return null;
if (data.length === 1)
return data[0];
}
return data;
}
else {
// Ensure doc has an id
if (!doc.id)
doc.id = (0, uuid_1.v4)();
addTimestampsOnCreate(doc);
(0, validate_js_1.validateAgainstSchema)(schema, doc);
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/create`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
data: doc,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return null;
if (data.length === 1)
return data[0];
}
return {
insertedId: data.id || data._id || (data && data.id),
result: data,
};
}
};
// Attach collection properties and methods to the function
const props = {
schema,
options,
get $type() {
return undefined;
},
create: createFunction,
// Create unique document - returns existing if found, creates new if not
async createUnique(doc) {
// Extract comparable fields (excluding ID and subdocument fields)
const comparableFields = (0, utils_js_1.extractComparableFields)(doc, schema);
// Find all existing documents
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const findUrl = `${baseUrl}/data/${actualName}/find`;
const findRes = await self.fetchWithCustomConnection(findUrl, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
}),
});
let existingDocs = await findRes.json();
if (findRes.status !== 200 || existingDocs.error) {
throw new Error(existingDocs.error || "Failed to find existing documents");
}
// Ensure existingDocs is an array
if (!Array.isArray(existingDocs)) {
existingDocs = existingDocs ? [existingDocs] : [];
}
// Check if any existing document matches the comparable fields
for (const existingDoc of existingDocs) {
const existingComparableFields = (0, utils_js_1.extractComparableFields)(existingDoc, schema);
if ((0, utils_js_1.areFieldsMatching)(comparableFields, existingComparableFields)) {
// Found a matching document, return it
return {
insertedId: existingDoc.id || existingDoc._id,
result: existingDoc,
isExisting: true,
};
}
}
// No matching document found, create a new one
const createResult = await createFunction(doc);
return {
insertedId: createResult.insertedId,
result: createResult.result || createResult,
isExisting: false,
};
},
// SQL-style alias for create
async insert(doc) {
return createFunction(doc);
},
async find(filter = {}) {
// For now, only support single filter or empty (backend does not support findMany by array of filters)
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/find`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
...filter,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return [];
if (data.length === 1)
return [data[0]];
return data;
}
if (data == null)
return [];
return [data];
},
async update(filter, update, upsert) {
if (Array.isArray(filter)) {
addTimestampsOnUpdate(filter);
// Bulk update
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/update`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
data: filter,
upsert,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return null;
if (data.length === 1)
return data[0];
}
return data;
}
else {
addTimestampsOnUpdate(update);
// Single update
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/update`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
data: { ...filter, ...update },
upsert,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return null;
if (data.length === 1)
return data[0];
}
return data;
}
},
async upsert(doc) {
if (Array.isArray(doc)) {
// Ensure each doc has an id
for (const d of doc) {
if (!d.id)
d.id = (0, uuid_1.v4)();
}
addTimestampsOnCreate(doc);
// Bulk upsert
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/upsert`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
data: doc,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return null;
if (data.length === 1)
return data[0];
}
return data;
}
else {
// Ensure doc has an id
if (!doc.id)
doc.id = (0, uuid_1.v4)();
addTimestampsOnCreate(doc);
(0, validate_js_1.validateAgainstSchema)(schema, doc);
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/upsert`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
data: doc,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return null;
if (data.length === 1)
return data[0];
}
return {
insertedId: data.id || data._id || (data && data.id),
result: data,
};
}
},
async delete(filter) {
if (Array.isArray(filter)) {
// Bulk delete: [{id}, ...]
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/delete`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
data: filter,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return null;
if (data.length === 1)
return data[0];
}
return data;
}
else {
// Single delete
const sid = await ensureSession();
const baseUrl = await self.getBestEndpoint();
const url = `${baseUrl}/data/${actualName}/delete`;
const res = await self.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${self.appToken}`,
},
body: JSON.stringify({
sessionId: sid,
schema: { [actualName]: schema },
data: filter,
}),
});
let data = await res.json();
if (data.error)
throw new Error(data.error);
if (Array.isArray(data)) {
if (data.length === 0)
return null;
if (data.length === 1)
return data[0];
}
return data;
}
},
};
if ("name" in props)
delete props.name;
Object.assign(collectionFunction, props);
return collectionFunction;
}
// Transaction support
async transaction(fn) {
const operations = [];
const txProxy = {};
// Create transaction proxy objects
for (const [name, collectionInfo] of Object.entries(this.collections)) {
txProxy[name] = {
create: (doc) => {
operations.push({ action: "create", collection: name, data: doc });
},
update: (filter, update) => {
operations.push({
action: "update",
collection: name,
filter,
update,
});
},
upsert: (doc) => {
operations.push({ action: "upsert", collection: name, data: doc });
},
delete: (filter) => {
operations.push({ action: "delete", collection: name, filter });
},
};
}
// Execute the transaction function
const result = await fn(txProxy);
// Send all operations to server
if (operations.length > 0) {
const baseUrl = await this.getBestEndpoint();
const url = `${baseUrl}/transaction`;
const res = await this.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${this.appToken}`,
},
body: JSON.stringify({
operations,
}),
});
const data = await res.json();
if (data.error)
throw new Error(data.error);
}
return result;
}
/**
* Introspect a database and generate DataQL schemas automatically
* Routes through DataQL's infrastructure: SDK → Worker → Lambda → Database
*/
async introspect(databaseUrl, options) {
try {
console.log(`[DataQL] Starting database introspection via infrastructure...`);
// Route introspection request through DataQL infrastructure
const baseUrl = await this.getBestEndpoint();
const url = `${baseUrl}/introspect`;
const res = await this.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${this.appToken}`,
},
body: JSON.stringify({
databaseUrl,
options: {
databaseName: options?.databaseName,
sampleSize: options?.sampleSize || 100,
maxDepth: options?.maxDepth || 5,
includeIndexes: options?.includeIndexes !== false,
excludeCollections: options?.excludeCollections,
includeCollections: options?.includeCollections,
},
}),
});
const result = await res.json();
if (!res.ok || result.error) {
return {
success: false,
error: result.error || `HTTP ${res.status}: ${res.statusText}`,
};
}
// Auto-register discovered schemas with DataQL
if (result.data?.schemas) {
for (const [collectionName, schema] of Object.entries(result.data.schemas)) {
console.log(`[DataQL] Auto-registering schema for collection: ${collectionName}`);
// Store the schema for later use when collection() is called
const actualName = this.env === "dev"
? `${this.devPrefix}${collectionName}`
: collectionName;
this.collections[actualName] = {
name: actualName,
schema,
introspected: true,
};
}
}
console.log(`[DataQL] Introspection completed successfully!`);
console.log(`[DataQL] Discovered ${Object.keys(result.data?.schemas || {}).length} collections`);
return {
success: true,
data: result.data,
schemas: result.data?.schemas,
};
}
catch (error) {
console.error("[DataQL] Introspection failed:", error);
return {
success: false,
error: error.message || "Unknown introspection error",
};
}
}
/**
* Migrate entire database to DataQL cloud infrastructure in one click
* Routes through DataQL's infrastructure: SDK → Worker → Lambda → Database
*/
async migrateToCloud(databaseUrl, options) {
try {
console.log(`[DataQL] Starting cloud migration for: ${databaseUrl}`);
// Route migration request through DataQL infrastructure
const baseUrl = await this.getBestEndpoint();
const url = `${baseUrl}/migrate`;
const res = await this.fetchWithCustomConnection(url, {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${this.appToken}`,
},
body: JSON.stringify({
databaseUrl,
options: {
sampleSize: options?.sampleSize || 100,
maxDepth: options?.maxDepth || 5,
includeIndexes: options?.includeIndexes !== false,
excludeCollections: options?.excludeCollections,
includeCollections: options?.includeCollections,
batchSize: options?.batchSize || 1000,
validateData: options?.validateData !== false,
preserveIds: options?.preserveIds !== false,
createBackup: options?.createBackup !== false,
},
}),
});
const result = await res.json();
if (!res.ok || result.error) {
return {
success: false,
error: result.error || `HTTP ${res.status}: ${res.statusText}`,
};
}
// Auto-register discovered schemas with DataQL
if (result.schemas) {
for (const [collectionName, schema] of Object.entries(result.schemas)) {
console.log(`[DataQL] Auto-registering migrated schema for collection: ${collectionName}`);
// Store the schema for later use when collection() is called
const actualName = this.env === "dev"
? `${this.devPrefix}${collectionName}`
: collectionName;
this.collections[actualName] = {
name: actualName,
schema,
migrated: true,
};
}
}
// Log success details
if (result.success) {
console.log(`[DataQL] ✅ Cloud migration completed successfully!`);
console.log(`[DataQL] 🎯 Migration ID: ${result.migrationId}`);
if (result.cloudDatabase) {
console.log(`[DataQL] ☁️ Cloud database: ${result.cloudDatabase.name} (${result.cloudDatabase.region})`);
console.log(`[DataQL] 🔗 New connection: ${result.cloudDatabase.connectionString}`);
}
if (result.migration) {
console.log(`[DataQL] 📊 Migrated: ${result.migration.collections} collections, ${result.migration.documents} documents`);
console.log(`[DataQL] ⏱️ Duration: ${result.migration.duration}ms`);
}
if (result.schemas) {
console.log(`[DataQL] 📝 Registered ${Object.keys(result.schemas).length} schemas`);
}
if (result.rollback?.supported) {
console.log(`[DataQL] 🔄 Rollback: ${result.rollback.instructions}`);
}
}
return result;
}
catch (error) {
console.error("[DataQL] Cloud migration failed:", error);
return {
success: false,
error: error.message || "Unknown migration error",
};
}
}
/**
* Plugin System Methods
*/
/**
* Register a plugin with DataQL
*
* @param plugin - The plugin to register
* @param config - Plugin configuration
*
* @example
* ```typescript
* // Register an analytics plugin
* await data.registerPlugin(analyticsPlugin, {
* apiKey: "your-api-key",
* endpoint: "https://analytics.example.com"
* });
*
* // Register a database adapter plugin
* await data.registerPlugin(postgresAdapter);
* ```
*/
async registerPlugin(plugin, config) {
await this.pluginManager.register(plugin, config);
}
/**
* Unregister a plugin
*
* @param pluginId - ID of the plugin to unregister
*/
async unregisterPlugin(pluginId) {
await this.pluginManager.unregister(pluginId);
}
/**
* Get a registered plugin
*
* @param pluginId - ID of the plugin to get
*/
getPlugin(pluginId) {
return this.pluginManager.getPlugin(pluginId);
}
/**
* Get all registered plugins
*/
getPlugins() {
return this.pluginManager.getPlugins();
}
/**
* Get plugins by type
*
* @param type - Plugin type to filter by
*/
getPluginsByType(type) {
return this.pluginManager.getPluginsByType(type);
}
/**
* Check if a plugin is registered
*
* @param pluginId - ID of the plugin to check
*/
hasPlugin(pluginId) {
return this.pluginManager.hasPlugin(pluginId);
}
/**
* Initialize all registered plugins
*/
async initializePlugins() {
await this.pluginManager.initializeAll();
}
/**
* Get plugin system statistics
*/
getPluginStats() {
return this.pluginManager.getStats();
}
/**
* Execute a hook with all registered handlers
*
* @param hook - The hook to execute
* @param data - Data to pass to hook handlers
*/
async executeHook(hook, data) {
return this.pluginManager.executeHook(hook, data);
}
/**
* Get extension methods from plugins
*/
getExtensions() {
return this.pluginManager.getExtensions();
}
/**
* Extend the Data class with plugin methods dynamically
*
* This method allows plugins to add new methods to the Data class
* instance, enabling seamless integration of plugin functionality.
*/
async applyExtensions() {
const extensions = this.getExtensions();
for (const [methodName, method] of Object.entries(extensions)) {
if (typeof method === "function") {
// Bind the method to this instance
this[methodName] = method.bind(this);
}
else {
// Add properties directly
this[methodName] = method;
}
}
}
}
exports.Data = Data;