UNPKG

@mochabug/adapt-plugin-toolkit

Version:

The API toolkit to facilitate mochabug adapt plugin development

4 lines 912 kB
{ "version": 3, "sources": ["../../src/api.ts", "../../src/genproto/mochabugapis/adapt/graph/signal_data_pb.ts", "../../src/genproto/buf/validate/validate_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/signal_format_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/jtd_schema_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/vertex_metadata_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/exchange_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/transceiver_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/signal_descriptor_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/receiver_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/signal_binding_pb.ts", "../../src/genproto/mochabugapis/adapt/graph/transmitter_pb.ts", "../../src/genproto/mochabugapis/adapt/runtime/v1/runtime_pb.ts", "../../src/genproto/mochabugapis/adapt/automations/v1/automations_pb.ts", "../../src/genproto/google/api/annotations_pb.ts", "../../src/genproto/google/api/http_pb.ts", "../../src/genproto/google/api/client_pb.ts", "../../src/genproto/google/api/launch_stage_pb.ts", "../../src/genproto/mochabugapis/adapt/runtime/v1/store_pb.ts", "../../src/grpcweb.ts", "../../src/genproto/mochabugapis/adapt/runtime/v1/incoming_pb.ts"], "sourcesContent": ["// Copyright (c) 2023 mochabug AB. All rights reserved.\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use\n// this file except in compliance with the License. You may obtain a copy of the\n// License at http://www.apache.org/licenses/LICENSE-2.0\n// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n// KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED\n// WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,\n// MERCHANTABLITY OR NON-INFRINGEMENT.\n// See the Apache Version 2.0 License for specific language governing permissions\n// and limitations under the License.\n\nimport {\n create,\n enumFromJson,\n fromJson,\n JsonValue,\n toJson\n} from '@bufbuild/protobuf';\nimport {\n FieldMaskSchema,\n timestampDate,\n timestampFromDate,\n ValueJson,\n ValueSchema\n} from '@bufbuild/protobuf/wkt';\nimport { Client, Code, ConnectError, createClient } from '@connectrpc/connect';\nimport {\n SignalData,\n SignalDataSchema\n} from './genproto/mochabugapis/adapt/graph/signal_data_pb';\nimport {\n SignalFormatJson,\n SignalFormatSchema\n} from './genproto/mochabugapis/adapt/graph/signal_format_pb';\nimport {\n VertexMetadataJson,\n VertexMetadataSchema\n} from './genproto/mochabugapis/adapt/graph/vertex_metadata_pb';\nimport {\n ConfiguratorService,\n ExchangeOperation,\n ExecutorService,\n ListIncomingSignalsResponseJson,\n ListIncomingSignalsResponseSchema,\n Namespace,\n NamespaceJson,\n NamespaceSchema,\n PluginService,\n Token\n} from './genproto/mochabugapis/adapt/runtime/v1/runtime_pb';\nimport {\n ConditionalDeleteOp_Precondition,\n ConditionalDeleteOp_PreconditionSchema,\n ConditionalInsertOp_Precondition,\n ConditionalInsertOp_PreconditionSchema,\n GetValue,\n SelectOpJson,\n TimestampRangeSchema,\n WriteOperation,\n WriteOperationSchema\n} from './genproto/mochabugapis/adapt/runtime/v1/store_pb';\nimport { createGrpcWebTransport } from './grpcweb';\nimport {\n ConfiguratorEnvironment,\n Environment,\n ExecutorEnvironment\n} from './types';\n\nexport * from './genproto/mochabugapis/adapt/automations/v1/automations_pb';\nexport * from './genproto/mochabugapis/adapt/graph/exchange_pb';\nexport * from './genproto/mochabugapis/adapt/graph/jtd_schema_pb';\nexport * from './genproto/mochabugapis/adapt/graph/receiver_pb';\nexport * from './genproto/mochabugapis/adapt/graph/signal_binding_pb';\nexport * from './genproto/mochabugapis/adapt/graph/signal_data_pb';\nexport * from './genproto/mochabugapis/adapt/graph/signal_descriptor_pb';\nexport * from './genproto/mochabugapis/adapt/graph/signal_format_pb';\nexport * from './genproto/mochabugapis/adapt/graph/transceiver_pb';\nexport * from './genproto/mochabugapis/adapt/graph/transmitter_pb';\nexport * from './genproto/mochabugapis/adapt/graph/vertex_metadata_pb';\nexport * from './genproto/mochabugapis/adapt/runtime/v1/incoming_pb';\nexport * from './genproto/mochabugapis/adapt/runtime/v1/runtime_pb';\nexport * from './genproto/mochabugapis/adapt/runtime/v1/store_pb';\nexport * from './types';\nexport { Code, ConnectError };\nexport type { ValueJson };\n\n/**\n * Convert camelCase field names to snake_case for protocol buffer field masks\n */\nconst CAMEL_TO_SNAKE_MAP: Record<string, string> = {\n config: 'config',\n metadata: 'metadata',\n configuredServices: 'configured_services'\n};\n\nfunction toSnakeCase(camelCase: string): string {\n return CAMEL_TO_SNAKE_MAP[camelCase] ?? camelCase;\n}\n\n/**\n * Array representing asset directories.\n */\nexport type AssetDirectory = {\n /**\n * Name of the asset directory.\n */\n name: string;\n\n /**\n * Type of the asset (file or directory).\n */\n type: 'file' | 'directory';\n}[];\n\n/**\n * Represents the content of an asset file.\n */\nexport type AssetFile = {\n /**\n * Content of the asset file as a readable stream.\n */\n content: ReadableStream;\n\n /**\n * MIME type of the asset file.\n */\n mime: string;\n};\n\n/**\n * Base class for API interactions. Extended by specific API implementations.\n * Not intended for direct use.\n */\nexport class ApiBase {\n protected env: Environment;\n protected pluginService: Client<typeof PluginService>;\n protected pluginToken: string;\n\n /**\n * Initializes the ApiBase instance with environment settings and a plugin token.\n *\n * @param env - Environment configuration for API communication.\n * @param pluginToken - Token for plugin authentication.\n */\n constructor(env: Environment, pluginToken: string) {\n this.env = env;\n this.pluginToken = pluginToken;\n this.pluginService = createClient(\n PluginService,\n createGrpcWebTransport({\n fetcher: this.env.plugin,\n interceptors: [\n (next) => async (req) => {\n req.header.set('Authorization', `Bearer ${this.pluginToken}`);\n return await next(req);\n }\n ]\n })\n );\n }\n\n /**\n * Retrieves a single plugin-scoped variable by its name.\n * Supports dot notation for nested variable names (e.g., 'api.credentials.key').\n *\n * @param name - The name of the variable (1-100 chars, pattern: ^[_$a-zA-Z][_$a-zA-Z0-9]*(?:\\.[_$a-zA-Z][_$a-zA-Z0-9]*)*$).\n * @returns A promise that resolves with the variable value (undefined if not found).\n *\n * @example\n * ```typescript\n * const apiUrl = await api.getSystemVariable<string>('api.url');\n * const nested = await api.getSystemVariable<string>('config.database.host');\n * ```\n */\n async getSystemVariable<T = ValueJson>(name: string): Promise<T> {\n const response = await this.pluginService.batchGetSystemVariables({\n names: [name]\n });\n const vares = response.items[name];\n return (vares ? toJson(ValueSchema, vares) : undefined) as T;\n }\n\n /**\n * Retrieves multiple plugin-scoped variables by their names with full type safety.\n * Supports dot notation for nested variable names.\n *\n * @template T - Record type mapping variable names to their expected types.\n * @param keys - Names of the variables to retrieve (1-100 unique names, each 1-100 chars).\n * @returns A promise that resolves with an object mapping variable names to their typed values.\n * @throws {ConnectError} Code.InvalidArgument if keys array is empty or exceeds 100 items.\n *\n * @example\n * ```typescript\n * const vars = await api.getSystemVariables<{\n * 'api.url': string;\n * 'api.timeout': number;\n * 'api.enabled': boolean;\n * }>('api.url', 'api.timeout', 'api.enabled');\n *\n * // Fully typed!\n * vars['api.url']; // string\n * vars['api.timeout']; // number\n * vars['api.enabled']; // boolean\n * ```\n */\n async getSystemVariables<T extends Record<string, any>>(\n ...keys: Array<keyof T & string>\n ): Promise<T> {\n const response = await this.pluginService.batchGetSystemVariables({\n names: keys\n });\n const res: Record<string, ValueJson> = {};\n for (const [key, value] of Object.entries(response.items)) {\n res[key] = toJson(ValueSchema, value);\n }\n return res as T;\n }\n\n /**\n * Retrieves a single user-scoped variable by its name.\n * Supports dot notation for nested variable names (e.g., 'user.preferences.theme').\n *\n * @param name - The name of the variable (1-100 chars, pattern: ^[_$a-zA-Z][_$a-zA-Z0-9]*(?:\\.[_$a-zA-Z][_$a-zA-Z0-9]*)*$).\n * @returns A promise that resolves with the user variable value (undefined if not found).\n *\n * @example\n * ```typescript\n * const theme = await api.getUserVariable<string>('user.preferences.theme');\n * const email = await api.getUserVariable<string>('user.email');\n * ```\n */\n async getUserVariable<T = ValueJson>(name: string): Promise<T> {\n const response = await this.pluginService.batchGetUserVariables({\n names: [name]\n });\n const vares = response.items[name];\n return (vares ? toJson(ValueSchema, vares) : undefined) as T;\n }\n\n /**\n * Retrieves multiple user-scoped variables by their names with full type safety.\n * Supports dot notation for nested variable names.\n *\n * @template T - Record type mapping variable names to their expected types.\n * @param keys - Names of the user variables to retrieve (1-100 unique names, each 1-100 chars).\n * @returns A promise that resolves with an object mapping variable names to their typed values.\n * @throws {ConnectError} Code.InvalidArgument if keys array is empty or exceeds 100 items.\n *\n * @example\n * ```typescript\n * const vars = await api.getUserVariables<{\n * 'user.email': string;\n * 'user.age': number;\n * 'user.verified': boolean;\n * }>('user.email', 'user.age', 'user.verified');\n *\n * // Fully typed!\n * vars['user.email']; // string\n * vars['user.age']; // number\n * vars['user.verified']; // boolean\n * ```\n */\n async getUserVariables<T extends Record<string, any>>(\n ...keys: Array<keyof T & string>\n ): Promise<T> {\n const response = await this.pluginService.batchGetUserVariables({\n names: keys\n });\n const res: Record<string, ValueJson> = {};\n for (const [key, value] of Object.entries(response.items)) {\n res[key] = toJson(ValueSchema, value);\n }\n return res as T;\n }\n\n /**\n * Retrieves a single user OAuth2 bearer token by its name.\n * Returns a Token object containing the access token, type (typically 'Bearer'), and expiration timestamp.\n *\n * @param name - The name of the user token (1-100 chars, pattern: ^[_$a-zA-Z][_$a-zA-Z0-9]*(?:\\.[_$a-zA-Z][_$a-zA-Z0-9]*)*$).\n * @returns A promise that resolves with the Token object (undefined if not found).\n * Token contains: { name, token, type, expires }\n *\n * @example\n * ```typescript\n * const githubToken = await api.getUserToken('github');\n * if (githubToken) {\n * console.log(githubToken.token, githubToken.type, githubToken.expires);\n * }\n * ```\n */\n async getUserToken(name: string): Promise<Token> {\n const response = await this.pluginService.batchGetUserTokens({\n names: [name]\n });\n return response.items[name];\n }\n\n /**\n * Retrieves multiple user OAuth2 bearer tokens by their names.\n * Each Token contains the access token, type (typically 'Bearer'), and expiration timestamp.\n *\n * @param names - Names of the user tokens to retrieve (1-100 unique names, each 1-100 chars).\n * @returns A promise that resolves with an object mapping token names to their Token objects.\n * Each Token contains: { name, token, type, expires }\n * @throws {ConnectError} Code.InvalidArgument if names array is empty or exceeds 100 items.\n *\n * @example\n * ```typescript\n * const tokens = await api.getUserTokens('github', 'google');\n *\n * tokens['github']; // Token | undefined\n * tokens['google']; // Token | undefined\n * ```\n */\n async getUserTokens(...names: string[]): Promise<Record<string, Token>> {\n const response = await this.pluginService.batchGetUserTokens({\n names\n });\n return response.items;\n }\n\n /**\n * Retrieves a single plugin OAuth2 bearer token from a service account by its name.\n * Returns a Token object containing the access token, type (typically 'Bearer'), and expiration timestamp.\n *\n * @param name - The name of the plugin token (1-100 chars, pattern: ^[_$a-zA-Z][_$a-zA-Z0-9]*(?:\\.[_$a-zA-Z][_$a-zA-Z0-9]*)*$).\n * @returns A promise that resolves with the Token object (undefined if not found).\n * Token contains: { name, token, type, expires }\n *\n * @example\n * ```typescript\n * const serviceToken = await api.getSystemToken('service_account');\n * if (serviceToken) {\n * console.log(serviceToken.token, serviceToken.expires);\n * }\n * ```\n */\n async getSystemToken(name: string): Promise<Token> {\n const response = await this.pluginService.batchGetSystemTokens({\n names: [name]\n });\n return response.items[name];\n }\n\n /**\n * Retrieves multiple plugin OAuth2 bearer tokens from service accounts by their names.\n * Each Token contains the access token, type (typically 'Bearer'), and expiration timestamp.\n *\n * @param names - Names of the plugin tokens to retrieve (1-100 unique names, each 1-100 chars).\n * @returns A promise that resolves with an object mapping token names to their Token objects.\n * Each Token contains: { name, token, type, expires }\n * @throws {ConnectError} Code.InvalidArgument if names array is empty or exceeds 100 items.\n *\n * @example\n * ```typescript\n * const tokens = await api.getSystemTokens('service_account', 'backup_service');\n *\n * tokens['service_account']; // Token | undefined\n * tokens['backup_service']; // Token | undefined\n * ```\n */\n async getSystemTokens(...names: string[]): Promise<Record<string, Token>> {\n const response = await this.pluginService.batchGetSystemTokens({\n names\n });\n return response.items;\n }\n\n /**\n * Creates a fetch function for user-level HTTP proxy requests.\n *\n * User services are vertex-level configurations where each user can provide\n * their own certificates, API keys, and credentials.\n *\n * @param name - The service name defined in vertex user_services\n * @returns A fetch function configured with HTTP proxy headers\n *\n * @example\n * ```typescript\n * const fetch = api.userHttpProxy(\"stripe_api\");\n * const res = await fetch(\"https://api.stripe.com/v1/charges\");\n * ```\n */\n userHttpProxy(name: string): typeof fetch {\n return (input: string | Request | URL, init?: RequestInit) => {\n const req = new Request(input, init);\n req.headers.set(\n 'X-Mochabug-Adapt-Plugin-HttpProxy-Token',\n `Bearer ${this.pluginToken}`\n );\n req.headers.set('X-Mochabug-Adapt-Plugin-HttpProxy-Path', `user:${name}`);\n req.headers.set('Host', new URL(req.url).host);\n\n return this.env.httpproxy.fetch(req);\n };\n }\n\n /**\n * Creates a fetch function for system-level HTTP proxy requests.\n *\n * System services are plugin-wide configurations shared across all vertices.\n * Configured once by plugin administrators.\n *\n * @param name - The service name defined in manifest system_services\n * @returns A fetch function configured with HTTP proxy headers\n *\n * @example\n * ```typescript\n * const fetch = api.systemHttpProxy(\"internal_api\");\n * const res = await fetch(\"https://internal.example.com/data\");\n * ```\n */\n systemHttpProxy(name: string): typeof fetch {\n return (input: string | Request | URL, init?: RequestInit) => {\n const req = new Request(input, init);\n req.headers.set(\n 'X-Mochabug-Adapt-Plugin-HttpProxy-Token',\n `Bearer ${this.pluginToken}`\n );\n req.headers.set(\n 'X-Mochabug-Adapt-Plugin-HttpProxy-Path',\n `system:${name}`\n );\n req.headers.set('Host', new URL(req.url).host);\n return this.env.httpproxy.fetch(req);\n };\n }\n\n /**\n * Validates an access token to ensure it's authorized for the current plugin and execution context.\n * The authorization check verifies the token has the required scopes for plugin operations.\n *\n * @param token - The access token to validate (max 10000 chars).\n * @returns A promise that resolves when the token is successfully validated.\n * @throws {ConnectError} Code.Unauthenticated if the token is invalid or Code.PermissionDenied if unauthorized.\n *\n * @example\n * ```typescript\n * try {\n * await api.authorize(userProvidedToken);\n * // Token is valid, proceed with operations\n * } catch (error) {\n * // Token is invalid or lacks permissions\n * }\n * ```\n */\n async authorize(token: string): Promise<void> {\n await this.pluginService.authorize({ accessToken: token });\n }\n\n /**\n * Get information about configured system services.\n * System services are plugin-wide configurations shared across all vertices.\n *\n * @returns A promise that resolves with a map of configured services where keys are service paths\n * (using dot notation for nested services) and values indicate if the service is configured.\n *\n * @example\n * ```typescript\n * const services = await api.getSystemServices();\n * // Returns { \"http_service\": true, \"oneof_service.option_a\": true, \"grouped.nested\": true }\n * ```\n */\n async getSystemServices(): Promise<{ [key: string]: boolean }> {\n const response = await this.pluginService.getSystemServices({});\n return response.configuredServices;\n }\n\n /**\n * Reads a file from the plugin's assets directory.\n *\n * @param path - The file path relative to the assets directory.\n * @returns A promise that resolves with the file content and MIME type.\n * @throws {ConnectError} If the file is not found or is actually a directory.\n */\n async readFile(path: string): Promise<AssetFile> {\n let url = new URL('https://runtimeapi');\n url.pathname = path;\n const res = await this.env.assets.fetch(url, { method: 'GET' });\n if (!res.ok) {\n throw mapHttpError(res.status);\n }\n\n const contentType = res.headers.get('content-type');\n if (\n !contentType ||\n !contentType.toLowerCase().includes('application/octet-stream')\n ) {\n throw new ConnectError(\n 'Invalid file. Are you reading a directory?',\n Code.InvalidArgument\n );\n }\n\n return { content: res.body as ReadableStream, mime: contentType };\n }\n\n /**\n * Reads a directory listing from the plugin's assets directory.\n *\n * @param path - The directory path relative to the assets directory.\n * @returns A promise that resolves with the directory content as an array of assets.\n * @throws {ConnectError} If the directory is not found or is actually a file.\n */\n async readDirectory(path: string): Promise<AssetDirectory> {\n let url = new URL('https://runtimeapi');\n url.pathname = path;\n const res = await this.env.assets.fetch(url, { method: 'GET' });\n if (!res.ok) {\n throw mapHttpError(res.status);\n }\n\n const contentType = res.headers.get('content-type');\n if (\n !contentType ||\n !contentType.toLowerCase().includes('application/json')\n ) {\n throw new ConnectError(\n 'Invalid directory. Are you reading a file?',\n Code.InvalidArgument\n );\n }\n return (await res.json()) as AssetDirectory;\n }\n}\n\n/**\n * Represents the result of a batch read operation from the store.\n * Keys that were not found in the store are not present in the result.\n */\nexport class BatchReadResult {\n readonly result: Record<string, GetValue>;\n\n /**\n * Constructs a BatchReadResult from the store's batch read response.\n * @param result - Map of keys to their retrieved values (keys not found are excluded).\n * @internal\n */\n constructor(result: Record<string, GetValue>) {\n this.result = result;\n }\n\n /**\n * Retrieves the result of a specific read operation by its key as JSON.\n *\n * @param key - The key to retrieve from the batch result.\n * @returns The value and metadata, or undefined if the key was not found in the store.\n * - value: The parsed JSON value\n * - ttl: Remaining TTL in seconds (0 = no expiration)\n * - lastModified: When the value was last modified\n * - etag: Entity tag for concurrency control\n *\n * @example\n * ```typescript\n * const result = await store.readBatch(['user:1', 'user:2', 'user:3']);\n * const user1 = result.get<User>('user:1');\n * if (user1) {\n * console.log(user1.value.name, 'TTL:', user1.ttl);\n * }\n * ```\n */\n get<T = any>(\n key: string\n ):\n | {\n value: T;\n ttl: number;\n lastModified: Date;\n etag: string;\n }\n | undefined {\n const res = this.result[key];\n if (!res) {\n return undefined;\n }\n return {\n value: JSON.parse(new TextDecoder().decode(res.value)) as T,\n ttl: res.metadata!.ttl,\n lastModified: timestampDate(res.metadata!.lastModified!),\n etag: res.metadata!.etag\n };\n }\n\n /**\n * Retrieves the result of a specific read operation by its key as raw binary.\n *\n * @param key - The key to retrieve from the batch result.\n * @returns The raw binary value and metadata, or undefined if the key was not found in the store.\n * - value: The raw binary data as Uint8Array\n * - ttl: Remaining TTL in seconds (0 = no expiration)\n * - lastModified: When the value was last modified\n * - etag: Entity tag for concurrency control\n *\n * @example\n * ```typescript\n * const result = await store.readBatch(['asset:1', 'asset:2']);\n * const asset1 = result.getBinary('asset:1');\n * if (asset1) {\n * console.log('Size:', asset1.value.length, 'bytes');\n * }\n * ```\n */\n getBinary(key: string):\n | {\n ttl: number;\n value: Uint8Array;\n lastModified: Date;\n etag: string;\n }\n | undefined {\n const res = this.result[key];\n if (!res) {\n return undefined;\n }\n\n return {\n ttl: res.metadata!.ttl,\n value: res.value,\n lastModified: timestampDate(res.metadata!.lastModified!),\n etag: res.metadata!.etag\n };\n }\n}\n\n/**\n * Utility class for constructing batch write operations for the KV store.\n * Provides a fluent interface for building write batches.\n *\n * Note: Batch operations do not support preconditions. Use Store.insert() and Store.delete()\n * methods directly for conditional writes.\n */\nexport class WriteBatchBuilder {\n private ops: WriteOperation[] = [];\n\n /**\n * Adds an insert operation to the batch for JSON data or raw binary data.\n *\n * @param key - The key to insert (max 4096 characters).\n * @param value - The value to store (will be JSON-serialized unless it's Uint8Array).\n * @param ttl - Optional time-to-live in seconds (0 = no expiration).\n * @returns The current instance of the builder for chaining.\n *\n * @example\n * ```typescript\n * const batch = new WriteBatchBuilder()\n * .insert('user:1', { name: 'Alice' }, 3600)\n * .insert('user:2', { name: 'Bob' })\n * .build();\n * ```\n */\n insert<T = any>(key: string, value: T, ttl?: number): this {\n this.ops.push(\n create(WriteOperationSchema, {\n operation: {\n case: 'insert',\n value: {\n key,\n value: {\n value:\n value instanceof Uint8Array\n ? value\n : new TextEncoder().encode(JSON.stringify(value)),\n ttl\n }\n }\n }\n })\n );\n return this;\n }\n\n /**\n * Adds a delete operation to the batch.\n *\n * @param key - The key to delete (max 4096 characters).\n * @returns The current instance of the builder for chaining.\n *\n * @example\n * ```typescript\n * const batch = new WriteBatchBuilder()\n * .delete('temp:cache:1')\n * .delete('temp:cache:2')\n * .build();\n * ```\n */\n delete(key: string): this {\n this.ops.push(\n create(WriteOperationSchema, {\n operation: {\n case: 'delete',\n value: { key }\n }\n })\n );\n return this;\n }\n\n /**\n * Adds a range delete operation to the batch (lexicographic comparison).\n *\n * Range delete works like SelectOp for defining the range. Not setting any start or end will\n * delete the entire store when this batch is executed.\n *\n * @param start - The start key of the range. If omitted, deletes from the empty key.\n * @param end - The end key of the range. If omitted, deletes to the end.\n * @param startInclusive - Include start (>= vs >). Defaults to `false`.\n * @param endInclusive - Include end (<= vs <). Defaults to `false`.\n * @returns The current instance for method chaining.\n *\n * @example\n * ```typescript\n * // Delete all keys with prefix \"user/\"\n * builder.rangeDelete('user/', 'user/~');\n *\n * // Delete logs for a specific month\n * builder.rangeDelete('logs/2024-01/', 'logs/2024-02/');\n * ```\n */\n rangeDelete(\n start?: string,\n end?: string,\n startInclusive?: boolean,\n endInclusive?: boolean\n ): this {\n this.ops.push(\n create(WriteOperationSchema, {\n operation: {\n case: 'rangeDelete',\n value: {\n start,\n end,\n startInclusive,\n endInclusive\n }\n }\n })\n );\n return this;\n }\n\n /**\n * Finalizes the batch and returns the array of write operations.\n *\n * @returns The array of WriteOperation objects ready to be executed.\n */\n build(): WriteOperation[] {\n return this.ops;\n }\n}\n\n/**\n * Options for lock operations.\n */\nexport interface LockOptions {\n /**\n * Maximum number of retries if the lock is not acquired immediately. Defaults to 5.\n */\n maxRetries?: number;\n\n /**\n * Initial delay before retrying in milliseconds. Defaults to 50ms.\n */\n initialDelay?: number;\n\n /**\n * Maximum delay between retries in milliseconds. Defaults to 2000ms.\n */\n maxDelay?: number;\n}\n\n/**\n * Preconditions for conditional insert operations.\n * Used for implementing locks and optimistic concurrency control.\n *\n * IMPORTANT CONSTRAINTS (from proto validation):\n * - failIfExists CANNOT be combined with etagEquals, timestampEquals, or timestampRange\n * - When failIfExists is true, all other fields must be undefined\n * - etagEquals and timestamp conditions can be used together\n */\nexport interface InsertCondition {\n /**\n * Fail the insert if the key already exists.\n * When true, this cannot be combined with any other conditions.\n * Useful for acquiring locks atomically.\n */\n failIfExists?: boolean;\n\n /**\n * Only insert if the current etag matches this value.\n * Cannot be used with failIfExists.\n */\n etagEquals?: string;\n\n /**\n * Only insert if last_modified exactly matches this timestamp.\n * Cannot be used with failIfExists or timestampRange.\n */\n timestampEquals?: Date;\n\n /**\n * Only insert if last_modified falls within this time range.\n * Cannot be used with failIfExists or timestampEquals.\n */\n timestampRange: {\n /**\n * Start\n *\n * @generated from field: google.protobuf.Timestamp start = 1;\n */\n start?: Date;\n\n /**\n * End\n *\n * @generated from field: google.protobuf.Timestamp end = 2;\n */\n end?: Date;\n\n /**\n * Include start (>= vs >)\n *\n * @generated from field: bool start_inclusive = 3;\n */\n startInclusive: boolean;\n\n /**\n * Include end (<= vs <)\n *\n * @generated from field: bool end_inclusive = 4;\n */\n endInclusive: boolean;\n };\n}\n\n/**\n * Preconditions for conditional delete operations.\n * Used for safe deletion with verification.\n *\n * IMPORTANT CONSTRAINTS (from proto validation):\n * - mustExists CANNOT be combined with etagEquals, timestampEquals, or timestampRange\n * - When mustExists is true, all other fields must be undefined\n * - etagEquals and timestamp conditions can be used together\n */\nexport interface DeleteCondition {\n /**\n * Fail the delete if the key doesn't exist.\n * When true, this cannot be combined with any other conditions.\n */\n mustExists?: boolean;\n\n /**\n * Only delete if the current etag matches this value.\n * Cannot be used with mustExists.\n */\n etagEquals?: string;\n\n /**\n * Only delete if last_modified exactly matches this timestamp.\n * Cannot be used with mustExists or timestampRange.\n */\n timestampEquals?: Date;\n\n /**\n * Only delete if last_modified falls within this time range.\n * Cannot be used with mustExists or timestampEquals.\n */\n timestampRange: {\n /**\n * Start\n *\n * @generated from field: google.protobuf.Timestamp start = 1;\n */\n start?: Date;\n\n /**\n * End\n *\n * @generated from field: google.protobuf.Timestamp end = 2;\n */\n end?: Date;\n\n /**\n * Include start (>= vs >)\n *\n * @generated from field: bool start_inclusive = 3;\n */\n startInclusive: boolean;\n\n /**\n * Include end (<= vs <)\n *\n * @generated from field: bool end_inclusive = 4;\n */\n endInclusive: boolean;\n };\n}\n\n/**\n * Metadata returned from store write operations.\n */\nexport interface StoreMetadata {\n /**\n * Remaining time-to-live in seconds.\n * 0 means no expiration (the value will not expire).\n */\n ttl: number;\n\n /**\n * Timestamp when the value was last modified.\n * All operations in a write batch share the same timestamp.\n */\n lastModified: Date;\n\n /**\n * Entity tag for optimistic concurrency control.\n * Changes with each modification of the value.\n */\n etag: string;\n}\n\n/**\n * Internal type for store service clients\n * @internal\n */\ntype StoreServiceClient =\n | Client<typeof ExecutorService>\n | Client<typeof ConfiguratorService>;\n\n/**\n * Represents a store in a specific namespace, providing key-value storage operations.\n * Stores can be scoped to different namespaces with different lifetimes:\n * - NAMESPACE_VERTEX: Lives as long as the plugin in the project (vertex scope)\n * - NAMESPACE_PLUGIN: Lives as long as the plugin in the project (plugin-wide scope)\n * - NAMESPACE_VERTEX_INSTANCE: Lives as long as the instance (vertex scope, session storage)\n * - NAMESPACE_PLUGIN_INSTANCE: Lives as long as the instance (plugin-wide scope, session storage)\n */\nexport class Store {\n private service: StoreServiceClient;\n private namespace: Namespace;\n\n /**\n * Constructs a Store instance for a specific namespace.\n * @param service - The gRPC service client (ExecutorService or ConfiguratorService).\n * @param namespace - The namespace determining the store's scope and lifetime.\n * @internal\n */\n constructor(service: StoreServiceClient, namespace: NamespaceJson) {\n this.service = service;\n this.namespace = enumFromJson(NamespaceSchema, namespace);\n }\n\n /**\n * Inserts or updates a key-value pair in the store with JSON data.\n * Supports conditional writes for implementing locks and optimistic concurrency control.\n *\n * @param key - The key to insert (max 4096 characters).\n * @param value - The JSON value to store (will be JSON-serialized).\n * @param ttl - Optional time-to-live in seconds (0 = no expiration).\n * @param condition - Optional preconditions for the write:\n * - failIfExists: Fail if key already exists (cannot be combined with other conditions)\n * - etagEquals: Only write if current etag matches\n * - timestampEquals: Only write if last_modified exactly matches\n * - timestampRange: Only write if last_modified falls within range\n * @returns A promise that resolves with metadata (ttl, lastModified, etag) when the operation completes.\n * @throws {ConnectError} Code.FailedPrecondition if precondition fails, Code.InvalidArgument for invalid inputs.\n *\n * @example\n * ```typescript\n * // Simple insert\n * await store.insert('user:123', { name: 'Alice' }, 3600);\n *\n * // Conditional insert - fail if exists (for locks)\n * await store.insert('lock:resource', { owner: 'process-1' }, 30, { failIfExists: true });\n *\n * // Optimistic concurrency with etag\n * const data = await store.get('counter');\n * await store.insert('counter', data.data + 1, undefined, { etagEquals: data.etag });\n * ```\n */\n async insert<T = any>(\n key: string,\n value: T,\n ttl?: number,\n condition?: InsertCondition\n ): Promise<StoreMetadata> {\n let precondition: ConditionalInsertOp_Precondition | undefined;\n if (condition) {\n precondition = create(ConditionalInsertOp_PreconditionSchema, {});\n if (condition.failIfExists) {\n precondition.failIfExists = true;\n } else {\n if (condition.etagEquals) {\n precondition.etag = condition.etagEquals;\n }\n if (condition.timestampEquals) {\n precondition.timestampCondition = {\n case: 'lastModifiedEquals',\n value: timestampFromDate(condition.timestampEquals)\n };\n } else if (condition.timestampRange) {\n precondition.timestampCondition = {\n case: 'lastModifiedRange',\n value: create(TimestampRangeSchema, {\n start: condition.timestampRange.start\n ? timestampFromDate(condition.timestampRange.start)\n : undefined,\n end: condition.timestampRange.end\n ? timestampFromDate(condition.timestampRange.end)\n : undefined,\n startInclusive: condition.timestampRange.startInclusive,\n endInclusive: condition.timestampRange.endInclusive\n })\n };\n }\n }\n }\n\n const res = await this.service.writeStore({\n operation: {\n operation: {\n case: 'insert',\n value: {\n key,\n value: {\n value:\n value instanceof Uint8Array\n ? value\n : new TextEncoder().encode(JSON.stringify(value)),\n ttl\n },\n precondition\n }\n }\n },\n namespace: this.namespace\n });\n\n return {\n ttl: res.metadata!.ttl,\n lastModified: timestampDate(res.metadata!.lastModified!),\n etag: res.metadata!.etag\n };\n }\n\n /**\n * Deletes a key-value pair from the store.\n * Supports conditional deletes for safe removal operations.\n *\n * @param key - The key to delete (max 4096 characters).\n * @param condition - Optional preconditions for the delete:\n * - mustExists: Fail if key doesn't exist (cannot be combined with other conditions)\n * - etagEquals: Only delete if current etag matches\n * - timestampEquals: Only delete if last_modified exactly matches\n * - timestampRange: Only delete if last_modified falls within range\n * @returns A promise that resolves when the operation completes.\n * @throws {ConnectError} Code.FailedPrecondition if precondition fails.\n *\n * @example\n * ```typescript\n * // Simple delete\n * await store.delete('temp:data');\n *\n * // Conditional delete - only if exists\n * await store.delete('lock:resource', { mustExists: true });\n *\n * // Delete with etag verification\n * const data = await store.get('config');\n * await store.delete('config', { etagEquals: data.etag });\n * ```\n */\n async delete(key: string, condition?: DeleteCondition): Promise<void> {\n let precondition: ConditionalDeleteOp_Precondition | undefined;\n if (condition) {\n precondition = create(ConditionalDeleteOp_PreconditionSchema, {});\n if (condition.mustExists) {\n precondition.mustExists = true;\n } else {\n if (condition.etagEquals) {\n precondition.etag = condition.etagEquals;\n }\n if (condition.timestampEquals) {\n precondition.timestampCondition = {\n case: 'lastModifiedEquals',\n value: timestampFromDate(condition.timestampEquals)\n };\n } else if (condition.timestampRange) {\n precondition.timestampCondition = {\n case: 'lastModifiedRange',\n value: create(TimestampRangeSchema, {\n start: condition.timestampRange.start\n ? timestampFromDate(condition.timestampRange.start)\n : undefined,\n end: condition.timestampRange.end\n ? timestampFromDate(condition.timestampRange.end)\n : undefined,\n startInclusive: condition.timestampRange.startInclusive,\n endInclusive: condition.timestampRange.endInclusive\n })\n };\n }\n }\n }\n\n await this.service.writeStore({\n operation: {\n operation: {\n case: 'delete',\n value: {\n key,\n precondition\n }\n }\n },\n namespace: this.namespace\n });\n }\n\n /**\n * Deletes all items within a specified key range from the store (lexicographic comparison).\n *\n * Range delete works like SelectOp for defining the range. Not setting any start or end will\n * delete the entire store.\n *\n * Note: Range delete operations do not support preconditions. For conditional deletes,\n * use the {@link delete} method.\n *\n * @param start - The start key of the range. If omitted, deletes from the empty key.\n * @param end - The end key of the range. If omitted, deletes to the end.\n * @param startInclusive - Include start (>= vs >). Defaults to `false`.\n * @param endInclusive - Include end (<= vs <). Defaults to `false`.\n * @returns A promise that resolves when the range delete operation completes.\n *\n * @example\n * ```typescript\n * // Delete all keys with prefix \"user/\"\n * await store.rangeDelete('user/', 'user/~');\n *\n * // Delete logs for a specific month\n * await store.rangeDelete('logs/2024-01/', 'logs/2024-02/');\n *\n * // Delete all items in the store\n * await store.rangeDelete();\n * ```\n */\n async rangeDelete(\n start?: string,\n end?: string,\n startInclusive?: boolean,\n endInclusive?: boolean\n ): Promise<void> {\n await this.service.batchWriteStore({\n namespace: this.namespace,\n operations: [\n {\n operation: {\n case: 'rangeDelete',\n value: {\n start,\n end,\n startInclusive,\n endInclusive\n }\n }\n }\n ]\n });\n }\n\n /**\n * Retrieves a value by its key from the store as JSON.\n *\n * @param key - The key to retrieve (max 4096 characters).\n * @returns A promise that resolves with the retrieved JSON value and metadata, or undefined if key not found.\n * - data: The parsed JSON value\n * - ttl: Remaining TTL in seconds (0 = no expiration)\n * - etag: The ETag for optimistic concurrency control\n * - lastModified: Timestamp of last modification\n *\n * @example\n * ```typescript\n * const user = await store.get<User>('user:123');\n * if (user) {\n * console.log(user.data.name, 'expires in', user.ttl, 'seconds');\n * }\n * ```\n */\n async get<T = any>(\n key: string\n ): Promise<\n | {\n data: T;\n ttl: number;\n etag: string;\n lastModified: Date;\n }\n | undefined\n > {\n const res = await this.service.batchReadStore({\n keys: [key],\n namespace: this.namespace\n });\n\n const resGet = res.items[key];\n if (resGet) {\n return {\n data: JSON.parse(new TextDecoder().decode(resGet.value)) as T,\n ttl: resGet.metadata!.ttl,\n etag: resGet.metadata!.etag,\n lastModified: timestampDate(resGet.metadata!.lastModified!)\n };\n }\n }\n\n /**\n * Retrieves a value by its key from the store as raw binary.\n *\n * @param key - The key to retrieve (max 4096 characters).\n * @returns A promise that resolves with the retrieved binary value and metadata, or undefined if key not found.\n * - data: The raw binary value as Uint8Array\n * - ttl: Remaining TTL in seconds (0 = no expiration)\n * - etag: The ETag for optimistic concurrency control\n * - lastModified: Timestamp of last modification\n *\n * @example\n * ```typescript\n * const asset = await store.getBinary('asset:image.png');\n * if (asset) {\n * console.log('Image size:', asset.data.length, 'bytes');\n * }\n * ```\n */\n async getBinary(key: string): Promise<\n | {\n data: Uint8Array<ArrayBufferLike>;\n ttl: number;\n etag: string;\n lastModified: Date;\n }\n | undefined\n > {\n const res = await this.service.batchReadStore({\n keys: [key],\n namespace: this.namespace\n });\n\n const resGet = res.items[key];\n if (resGet) {\n return {\n data: resGet.value,\n ttl: resGet.metadata!.ttl,\n etag: resGet.metadata!.etag,\n lastModified: timestampDate(resGet.metadata!.lastModified!)\n };\n }\n }\n\n /**\n * Performs a range query on keys using lexicographic comparison, returning parsed JSON data.\n *\n * Range queries allow scanning keys within a specific range. For example:\n * - \"user/\" to \"user/~\" - all keys starting with \"user/\"\n * - \"logs/2024-01/\" to \"logs/2024-02/\" - all logs from January 2024\n *\n * Not setting start or end will scan the entire store.\n * Results are always returned in ascending order by key.\n *\n * @param select - The select operation specifying:\n * - start: Optional start key (max 4096 chars). Omit to start from empty key\n * - end: Optional end key (max 4096 chars)\n * - startInclusive: Whether to include start key (>= vs >)\n * - endInclusive: Whether to include end key (<= vs <)\n * - limit: Max results (1-1000)\n * - pageToken: Optional token to continue from previous query\n * @returns A promise that resolves to an object containing:\n * - items: Array of values with metadata (value as JSON type T, TTL, etag, lastModified)\n * - nextToken: Optional pagination token to continue the query\n *\n * @example\n * ```typescript\n * // Get all user keys\n * const result = await store.selectRange<User>({\n * start: 'user/',\n * end: 'user/~',\n * startInclusive: true,\n * endInclusive: false,\n * limit: 100\n * });\n *\n * // Continue pagination\n * if (result.nextToken) {\n * const next = await store.selectRange({\n * start: 'user/',\n * end: 'user/~',\n * startInclusive: true,\n * endInclusive: false,\n * limit: 100,\n * pageToken: result.nextToken\n * });\n * }\n * ```\n */\n async selectRange<T = JsonValue>(\n select: SelectOpJson\n ): Promise<{\n nextToken?: string;\n items: {\n key: string;\n value: T;\n ttl: number;\n etag: string;\n lastModified: Date;\n }[];\n }> {\n const res = await this.service.selectStore({\n namespace: this.namespace,\n operation: select\n });\n\n return {\n nextToken: res.nextToken,\n items: Object.entries(res.items)\n .sort(([keyA], [keyB]) => keyA.localeCompare(keyB))\n .map(([key, val]) => ({\n key,\n value: JSON.parse(new TextDecoder().decode(val.value)) as T,\n ttl: val.metadata!.ttl,\n etag: val.metadata!.etag,\n lastModified: timestampDate(val.metadata!.lastModified!)\n }))\n };\n }\n\n /**\n * Performs a range query on keys using lexicographic comparison, returning raw binary data.\n *\n * Range queries allow scanning keys within a specific range. For example:\n * - \"user/\" to \"user/~\" - all keys starting with \"user/\"\n * - \"logs/2024-01/\" to \"logs/2024-02/\" - all logs from January 2024\n *\n * Not setting start or end will scan the entire store.\n * Results are always returned in ascending order by key.\n *\n * @param select - The select operation specifying:\n * - start: Optional start key (max 4096 chars). Omit to start from empty key\n * - end: Optional end key (max 4096 chars)\n * - startInclusive: Whether to include start key (>= vs >)\n * - endInclusive: Whether to include end key (<= vs <)\n * - limit: Max results (1-1000)\n * - pageToken: Optional token to continue from previous query\n * @returns A promise that resolves to an object containing:\n * - items: Array of values with metadata (value as Uint8Array, TTL, etag, lastModified)\n * - nextToken: Optional pagination token to continue the query\n *\n * @example\n * ```typescript\n * // Get all binary assets\n * const result = await store.selectRangeBinary({\n * start: 'assets/',\n * end: 'assets/~',\n * startInclusive: true,\n * endInclusive: false,\n * limit: 50\n * });\n *\n * // Continue pagination\n * if (result.nextToken) {\n * const next = await store.selectRangeBinary({\n * start: 'assets/',\n * end: 'assets/~',\n * startInclusive: true,\n * endInclusive: false,\n * limit: 50,\n * pageToken: result.nextToken\n * });\n * }\n * ```\n */\n async selectRangeBinary(select: SelectOpJson): Promise<{\n nextToken?: string;\n items: {\n value: Uint8Array<ArrayBufferLike>;\n ttl: number;\n etag: string;\n lastModified: Date;\n }[];\n }> {\n const res = await this.service.selectStore({\n namespace: this.namespace,\n operation: select\n });\n\n return {\n nextToken: res.nextToken,\n items: Object.values(res.items).map((val) => ({\n value: val.value,\n ttl: val.metadata!.ttl,\n etag: val.metadata!.etag,\n lastModified: timestampDate(val.metadata!.lastModified!)\n }))\n };\n }\n\n /**\n * Executes a batch of write operations on the store.\n * All operations are atomic - either all succeed or all fail.\n * All operations in a batch share the same timestamp.\n * Interfering operations on the same key have undefined order.\n *\n * Note: Batch operations do not support preconditions. Use the single insert/delete\n * methods for conditional writes.\n *\n * @param ops - The write operations to execute (1-500 operations). Use WriteBatchBuilder to construct.\n * @returns A promise that resolves with metadata for each inserted key (deletes don't return metadata).\n * @throws {ConnectError} Code.InvalidArgument if invalid operations or limits exceeded.\n *\n * @example\n * ```typescript\n * const ops = new WriteBatchBuilder()\n * .insert('user:1', { name: 'Alice' }, 3600)\n * .insert('user:2', { name: 'Bob' }, 3600)\n * .delete('temp:old')\n * .build();\n *\n * const metadata = await store.writeBatch(ops);\n * console.log('Insert metadata:', metadata);\n * ```\n */\n async writeBatch(ops: WriteOperation[]): Promise<{\n [key: string]: StoreMetadata;\n }> {\n const response = await this.service.batchWriteStore({\n namespace: this.namespace,\n operations: ops\n });\n\n let result: { [key: string]: StoreMetadata } = {};\n for (const [key, metadata] of Object.entries(response.metadata)) {\n result[key] = {\n ttl: metadata.ttl,\n lastModified: timestampDate(metadata.lastModified!),\n etag: metadata.etag\n };\n }\n return result;\n }\n\n /**\n * Executes a batch of read operations on the store.\n * Keys not found are ignored (not present in the result).\n *\n * @param keys - The keys to read (1-10000 keys, each max 4096 characters).\n * @returns A promise that resolves with a BatchReadResult containing found items.\n *\n * @example\n * ```typescript\n * const result = await store.readBatch(['user:1', 'user:2', 'user:3']);\n * const user1 = result.get<User>('user:1');\n * if (user1) {\n * console.log(user1.value.name);\n * }\n * ```\n */\n async readBatch(keys: string[]): Promise<BatchReadResult> {\n const response = await this.service.batchReadStore({\n namespace: this.namespace,\n keys\n });\n return new BatchReadResult(response.items);\n }\n\n /**\n * Attempts to acquire a lock for a specified duration.\n * Uses an atomic insert operation with failIfExists precondition that fails if the key already exists.\n *\n * @param locker - The unique identifier for the lock (max 4096 chars).\n * @param ttl - Time-to-live in seconds (defaults to 5 seconds if not specified).\n * @returns A promise that resolves with true if the lock was acquired, false if already held.\n *\n * @example\n * ```typescript\n * // Try to acquire lock with defa