nehoid
Version:
Advanced unique ID generation utility with multi-layer encoding, collision detection, and context-aware features
1,302 lines (1,296 loc) • 145 kB
JavaScript
'use strict';
/*---------------------------------------------------------------------------------------------
* Copyright (c) NEHONIX INC. All rights reserved.
* Licensed under the MIT License. See LICENSE in the project root for license information.
*--------------------------------------------------------------------------------------------*/
/**
* Wrapper for nehonix-uri-processor with browser-safe fallbacks
* This prevents Express from being bundled in browser environments
*/
// Try to import processor, but don't fail if it's not available
let processor = null;
try {
// Dynamic import to prevent bundling in browser
if (typeof window === 'undefined') {
// Only import in Node.js environment
processor = require('nehonix-uri-processor').__processor__;
}
}
catch (e) {
// Processor not available, will use fallbacks
}
// Simple fallback encoders for browser environments
const fallbackEncoders = {
base64: (str) => {
if (typeof btoa !== 'undefined') {
return btoa(str);
}
return Buffer.from(str).toString('base64');
},
hex: (str) => {
return Array.from(str)
.map(c => c.charCodeAt(0).toString(16).padStart(2, '0'))
.join('');
},
rawHex: (str) => str,
base58: (str) => {
// Simple base58 implementation
const ALPHABET = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz';
const bytes = Array.from(str).map(c => c.charCodeAt(0));
let num = BigInt(0);
for (const byte of bytes) {
num = num * BigInt(256) + BigInt(byte);
}
let encoded = '';
while (num > 0) {
const remainder = Number(num % BigInt(58));
encoded = ALPHABET[remainder] + encoded;
num = num / BigInt(58);
}
return encoded || ALPHABET[0];
}
};
const fallbackDecoders = {
base64: (str) => {
if (typeof atob !== 'undefined') {
return atob(str);
}
return Buffer.from(str, 'base64').toString();
},
hex: (str) => {
const bytes = str.match(/.{1,2}/g) || [];
return bytes.map(byte => String.fromCharCode(parseInt(byte, 16))).join('');
},
rawHex: (str) => str,
base58: (str) => {
// Simple base58 decode
const ALPHABET = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz';
let num = BigInt(0);
for (const char of str) {
const index = ALPHABET.indexOf(char);
if (index === -1)
throw new Error('Invalid base58 character');
num = num * BigInt(58) + BigInt(index);
}
const bytes = [];
while (num > 0) {
bytes.unshift(Number(num % BigInt(256)));
num = num / BigInt(256);
}
return String.fromCharCode(...bytes);
}
};
const __processor__ = {
encode: (input, encoding) => {
if (processor) {
return processor.encode(input, encoding);
}
// Use fallback
const encoder = fallbackEncoders[encoding];
return encoder ? encoder(input) : input;
},
decode: (input, encoding) => {
if (processor) {
return processor.decode(input, encoding);
}
// Use fallback
const decoder = fallbackDecoders[encoding];
return decoder ? decoder(input) : input;
},
encodeMultiple: (input, encodings) => {
if (processor) {
return processor.encodeMultiple(input, encodings);
}
// Fallback: apply encodings in sequence
let result = input;
const results = encodings.map(encoding => {
const encoder = fallbackEncoders[encoding];
result = encoder ? encoder(result) : result;
return { encoded: result, encoding };
});
return { results };
},
encodeMultipleAsync: async (input, encodings) => {
if (processor) {
return await processor.encodeMultipleAsync(input, encodings);
}
// Fallback: same as sync version
return __processor__.encodeMultiple(input, encodings);
},
autoDetectAndDecode: (input) => {
if (processor) {
return processor.autoDetectAndDecode(input);
}
// Fallback: try to detect encoding
const hexRegex = /^[0-9a-f]+$/i;
const base64Regex = /^[A-Za-z0-9+/]+=*$/;
if (hexRegex.test(input)) {
return { val: () => fallbackDecoders.hex(input) };
}
else if (base64Regex.test(input) && input.length % 4 === 0) {
try {
return { val: () => fallbackDecoders.base64(input) };
}
catch {
return { val: () => input };
}
}
return { val: () => input };
}
};
/**
* Core encoding utilities for data transformation and compression.
*
* The Encoder class provides a comprehensive set of encoding and decoding methods
* supporting multiple algorithms including base64, hex, ROT13, compression schemes,
* and specialized encodings. It serves as the foundation for NehoID's transformation
* capabilities and supports both synchronous and asynchronous operations.
*
* @example
* ```typescript
* // Basic encoding and decoding
* const encoded = await Encoder.encode('hello world', 'base64');
* const decoded = Encoder.decode(encoded, 'base64');
*
* // Multiple encodings in sequence
* const multiEncoded = await Encoder.encode('data', ['base64', 'hex']);
* const multiDecoded = Encoder.decode(multiEncoded, ['hex', 'base64']);
*
* // Compression
* const compressed = Encoder.compress('long text to compress', 'gzip');
* const decompressed = Encoder.decompress(compressed, 'gzip');
* ```
*/
class Encoder {
/**
* Encode a string using one or more encoding schemes asynchronously.
*
* Applies encoding transformations in sequence, where each encoding
* is applied to the result of the previous encoding. This allows for
* complex encoding pipelines to be built programmatically.
*
* @param input - The string to encode
* @param encodings - Single encoding type or array of encoding types to apply in sequence
* @returns Promise that resolves to the encoded string
*
* @example
* ```typescript
* // Single encoding
* const base64Result = await Encoder.encode('hello', 'base64');
* // Output: 'aGVsbG8='
*
* // Multiple encodings
* const result = await Encoder.encode('data', ['base64', 'hex']);
* // Applies base64 first, then hex to the result
*
* // URL-safe encoding
* const urlSafe = await Encoder.encode('user input', 'urlSafeBase64');
* ```
*/
static async encode(input, encodings) {
const encodingArray = Array.isArray(encodings) ? encodings : [encodings];
let result = input;
const enc = await __processor__.encodeMultipleAsync(result, encodingArray);
result = enc.results[enc.results.length - 1].encoded;
return result;
}
/**
* Decode a string using one or more decoding schemes.
*
* Reverses encoding transformations by applying decodings in reverse order.
* Supports both manual specification of decoding types and automatic detection.
*
* @param input - The encoded string to decode
* @param encodings - Single decoding type or array of decoding types to apply in reverse order
* @param opt - Optional decoding configuration
* @param opt.autoDetect - Whether to attempt automatic encoding detection (experimental)
* @returns The decoded string
*
* @example
* ```typescript
* // Single decoding
* const original = Encoder.decode('aGVsbG8=', 'base64');
* // Output: 'hello'
*
* // Multiple decodings (reverse order)
* const result = Encoder.decode(encodedData, ['hex', 'base64']);
* // Decodes hex first, then base64
*
* // With auto-detection
* const decoded = Encoder.decode(encodedStr, 'base64', { autoDetect: true });
* ```
*/
static decode(input, encodings, opt) {
const encodingArray = Array.isArray(encodings) ? encodings : [encodings];
let result = input;
// Decode in reverse order
for (const encoding of encodingArray.reverse()) {
if (opt?.autoDetect) {
result = __processor__.autoDetectAndDecode(result).val();
}
else {
result = __processor__.decode(result, encoding);
}
}
return result;
}
/**
* Compress a string using LZ77 or GZIP-style compression.
*
* Reduces the size of input strings using dictionary-based compression algorithms.
* LZ77 uses sliding window compression, while GZIP uses LZW-style dictionary compression.
* Both methods are lossless and can be reversed using the decompress method.
*
* @param input - The string to compress
* @param method - Compression algorithm to use ('lz77' or 'gzip')
* @returns The compressed and base64-encoded string
*
* @example
* ```typescript
* // LZ77 compression (good for repetitive data)
* const compressed = Encoder.compress('abababababab', 'lz77');
* const decompressed = Encoder.decompress(compressed, 'lz77');
*
* // GZIP compression (good for large texts)
* const text = 'A very long string with lots of repetition and patterns...';
* const gzipped = Encoder.compress(text, 'gzip');
* const original = Encoder.decompress(gzipped, 'gzip');
*
* // Measure compression ratio
* const ratio = gzipped.length / text.length;
* console.log(`Compression ratio: ${ratio.toFixed(2)}`);
* ```
*/
static compress(input, method) {
if (!input)
return "";
switch (method) {
case "lz77":
// Basic LZ77-inspired compression
let compressed = "";
let i = 0;
while (i < input.length) {
// Look for repeated sequences
let maxLength = 0;
let maxOffset = 0;
// Search window (limited to previous 255 chars for simplicity)
const searchLimit = Math.min(i, 255);
for (let offset = 1; offset <= searchLimit; offset++) {
let length = 0;
while (i + length < input.length &&
input[i - offset + length] === input[i + length] &&
length < 255 // Limit match length
) {
length++;
}
if (length > maxLength) {
maxLength = length;
maxOffset = offset;
}
}
if (maxLength >= 4) {
// Only use compression for sequences of 4+ chars
// Format: <flag><offset><length><next char>
compressed += String.fromCharCode(0xff); // Flag for compressed sequence
compressed += String.fromCharCode(maxOffset);
compressed += String.fromCharCode(maxLength);
i += maxLength;
}
else {
// Literal character
if (input.charCodeAt(i) === 0xff) {
// Escape the flag character
compressed += String.fromCharCode(0xff) + String.fromCharCode(0);
}
else {
compressed += input[i];
}
i++;
}
}
return btoa(compressed); // Base64 encode for safe storage
case "gzip":
// For gzip, we'll use a dictionary-based approach since we can't use native gzip in browser
const dictionary = {};
let nextCode = 256; // Start after ASCII
let result = [];
// Initialize dictionary with single characters
for (let i = 0; i < 256; i++) {
dictionary[String.fromCharCode(i)] = i;
}
let currentSequence = "";
for (let i = 0; i < input.length; i++) {
const char = input[i];
const newSequence = currentSequence + char;
if (dictionary[newSequence] !== undefined) {
currentSequence = newSequence;
}
else {
// Output code for current sequence
result.push(dictionary[currentSequence]);
// Add new sequence to dictionary if there's room
if (nextCode < 65536) {
// Limit dictionary size
dictionary[newSequence] = nextCode++;
}
currentSequence = char;
}
}
// Output code for remaining sequence
if (currentSequence !== "") {
result.push(dictionary[currentSequence]);
}
// Convert to string and base64 encode
return btoa(result.map((code) => String.fromCharCode(code)).join(""));
default:
return input;
}
}
/**
* Decompress a string that was compressed using the compress method.
*
* Reverses the compression applied by the compress method, restoring the
* original string. Supports both LZ77 and GZIP decompression algorithms.
*
* @param input - The compressed string to decompress
* @param method - Decompression algorithm to use ('lz77' or 'gzip')
* @returns The decompressed original string
*
* @example
* ```typescript
* // Round-trip compression
* const original = 'This is a long string with repetitive patterns...';
* const compressed = Encoder.compress(original, 'lz77');
* const decompressed = Encoder.decompress(compressed, 'lz77');
* // decompressed === original
*
* // Error handling
* try {
* const result = Encoder.decompress('invalid-compressed-data', 'gzip');
* } catch (error) {
* console.error('Decompression failed:', error);
* }
*
* // Check decompression success
* const result = Encoder.decompress(compressedData, 'lz77');
* if (!result) {
* console.error('Decompression returned empty result');
* }
* ```
*/
static decompress(input, method) {
if (!input)
return "";
switch (method) {
case "lz77":
try {
// Base64 decode
const compressed = atob(input);
let decompressed = "";
let i = 0;
while (i < compressed.length) {
if (compressed.charCodeAt(i) === 0xff) {
i++;
if (i < compressed.length && compressed.charCodeAt(i) === 0) {
// Escaped flag character
decompressed += String.fromCharCode(0xff);
i++;
}
else if (i + 1 < compressed.length) {
// Compressed sequence
const offset = compressed.charCodeAt(i);
const length = compressed.charCodeAt(i + 1);
i += 2;
// Copy sequence from already decompressed data
const start = decompressed.length - offset;
for (let j = 0; j < length; j++) {
decompressed += decompressed[start + j];
}
}
}
else {
// Literal character
decompressed += compressed[i];
i++;
}
}
return decompressed;
}
catch (e) {
console.error("LZ77 decompression error:", e);
return input;
}
case "gzip":
try {
// Base64 decode
const compressed = atob(input);
const codes = Array.from(compressed).map((char) => char.charCodeAt(0));
// Initialize dictionary with single characters
const dictionary = [];
for (let i = 0; i < 256; i++) {
dictionary[i] = String.fromCharCode(i);
}
let nextCode = 256;
let result = "";
let oldCode = codes[0];
let character = dictionary[oldCode];
result = character;
for (let i = 1; i < codes.length; i++) {
const code = codes[i];
let entry;
if (code < dictionary.length) {
entry = dictionary[code];
}
else if (code === nextCode) {
entry = character + character[0];
}
else {
throw new Error("Invalid code");
}
result += entry;
// Add to dictionary
if (nextCode < 65536) {
// Limit dictionary size
dictionary[nextCode++] = character + entry[0];
}
character = entry;
}
return result;
}
catch (e) {
console.error("Dictionary decompression error:", e);
return input;
}
default:
return input;
}
}
}
/**
* Advanced encoding pipeline for processing and transforming IDs.
*
* The EncodingPipeline provides a fluent interface for building complex encoding workflows
* that can combine multiple encoding schemes, compression, and metadata preservation.
* It supports both forward encoding and reverse decoding operations, making it ideal
* for secure ID transformations and data serialization.
*
* @example
* ```typescript
* // Basic encoding pipeline
* const pipeline = new EncodingPipeline()
* .addEncoder('base64')
* .addEncoder('urlSafeBase64')
* .addCompression('gzip');
*
* const encoded = pipeline.process('my-sensitive-id');
*
* // Reverse the encoding
* const original = pipeline.reverse(encoded);
* ```
*
* @example
* ```typescript
* // Complex pipeline with metadata
* const securePipeline = new EncodingPipeline()
* .addEncoders(['base64', 'hex', 'rot13'])
* .addCompression('lz77')
* .enableReversibility()
* .addMetadata('version', '1.0')
* .addMetadata('timestamp', Date.now());
*
* const result = securePipeline.process('user-data-123');
* console.log('Config:', securePipeline.getConfig());
* ```
*/
class EncodingPipeline {
constructor() {
this.encoders = [];
this.compressionMethod = "none";
this.isReversible = false;
this.metadata = {};
}
/**
* Add a single encoder to the pipeline.
*
* Encoders are applied in the order they are added. Each encoder transforms
* the output of the previous step in the pipeline.
*
* @param encoder - The encoding type to add to the pipeline
* @returns The pipeline instance for method chaining
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addEncoder('base64')
* .addEncoder('urlSafeBase64');
* ```
*/
addEncoder(encoder) {
this.encoders.push(encoder);
return this;
}
/**
* Add multiple encoders to the pipeline at once.
*
* This is more efficient than calling addEncoder multiple times
* and maintains the order of the encoders array.
*
* @param encoders - Array of encoding types to add
* @returns The pipeline instance for method chaining
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addEncoders(['base64', 'hex', 'rot13']);
* ```
*/
addEncoders(encoders) {
this.encoders.push(...encoders);
return this;
}
/**
* Add compression to the pipeline.
*
* Compression is applied after all encoders and can significantly reduce
* the size of the encoded output. Supports LZ77 and GZIP-style compression.
*
* @param method - The compression method to use ('lz77' or 'gzip')
* @returns The pipeline instance for method chaining
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addEncoder('base64')
* .addCompression('lz77');
* ```
*/
addCompression(method) {
this.compressionMethod = method;
return this;
}
/**
* Enable reversibility for the pipeline.
*
* When enabled, the pipeline stores its configuration as metadata
* in the encoded output, allowing for automatic reversal using the reverse() method.
*
* @returns The pipeline instance for method chaining
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addEncoder('base64')
* .enableReversibility();
*
* const encoded = pipeline.process('data');
* const decoded = pipeline.reverse(encoded); // Works automatically
* ```
*/
enableReversibility() {
this.isReversible = true;
return this;
}
/**
* Disable reversibility for the pipeline.
*
* When disabled, the pipeline configuration is not stored,
* making the output more compact but irreversible.
*
* @returns The pipeline instance for method chaining
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addEncoder('base64')
* .disableReversibility(); // Explicitly disable
* ```
*/
disableReversibility() {
this.isReversible = false;
return this;
}
/**
* Add custom metadata to the pipeline.
*
* Metadata is preserved in reversible pipelines and can be used
* for versioning, debugging, or additional context information.
*
* @param key - The metadata key
* @param value - The metadata value (can be any serializable type)
* @returns The pipeline instance for method chaining
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addMetadata('version', '2.1.0')
* .addMetadata('created', new Date().toISOString())
* .addMetadata('environment', 'production');
* ```
*/
addMetadata(key, value) {
this.metadata[key] = value;
return this;
}
/**
* Process input through the complete encoding pipeline.
*
* Applies all configured encoders, compression, and metadata in sequence.
* If reversibility is enabled, the pipeline configuration is prepended to the output.
*
* @param input - The string to process through the pipeline
* @returns The fully encoded and processed string
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addEncoders(['base64', 'hex'])
* .addCompression('gzip')
* .enableReversibility();
*
* const result = pipeline.process('sensitive-data');
* // Result includes encoded data + pipeline config for reversal
* ```
*/
process(input) {
let result = input;
// Apply encoders in sequence
if (this.encoders.length > 0) {
const enc = __processor__.encodeMultiple(result, this.encoders);
result = enc.results[enc.results.length - 1].encoded;
}
// Apply compression if specified
if (this.compressionMethod !== "none") {
result = Encoder.compress(result, this.compressionMethod);
}
// If reversible, prepend metadata
if (this.isReversible) {
// Store pipeline configuration as a prefix
const config = {
e: this.encoders,
c: this.compressionMethod,
m: this.metadata,
};
// Convert to JSON and encode in base64
const configStr = __processor__.encode(JSON.stringify(config), "base64");
// Add as prefix with separator
result = `${configStr}:${result}`;
}
return result;
}
/**
* Reverse the pipeline processing to recover original input.
*
* Only works if the pipeline was configured with reversibility enabled.
* Automatically extracts the pipeline configuration from the encoded string
* and applies reverse transformations in the correct order.
*
* @param input - The encoded string to reverse
* @returns The original input string, or null if reversal is not possible
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addEncoder('base64')
* .enableReversibility();
*
* const encoded = pipeline.process('my-data');
* const original = pipeline.reverse(encoded);
* // original === 'my-data'
* ```
*
* @example
* ```typescript
* // Error handling
* const result = pipeline.reverse('invalid-encoded-string');
* if (result === null) {
* console.error('Could not reverse the encoding');
* }
* ```
*/
reverse(input) {
// Check if input has reversible format
if (!input.includes(":")) {
return null;
}
try {
// Split into config and content
const [configStr, content] = input.split(":", 2);
// Decode and parse config
const config = JSON.parse(atob(configStr));
let result = content;
// Reverse compression if applied
if (config.c !== "none") {
result = Encoder.decompress(result, config.c);
}
// Reverse encoders in reverse order
if (config.e.length > 0) {
result = Encoder.decode(result, config.e);
}
return result;
}
catch (e) {
console.error("Error reversing pipeline:", e);
return null;
}
}
/**
* Get the current pipeline configuration.
*
* Returns a snapshot of all pipeline settings including encoders,
* compression method, reversibility flag, and metadata.
*
* @returns Configuration object containing all pipeline settings
*
* @example
* ```typescript
* const pipeline = new EncodingPipeline()
* .addEncoder('base64')
* .addCompression('gzip')
* .addMetadata('version', '1.0');
*
* const config = pipeline.getConfig();
* console.log(config);
* // {
* // encoders: ['base64'],
* // compression: 'gzip',
* // reversible: false,
* // metadata: { version: '1.0' }
* // }
* ```
*/
getConfig() {
return {
encoders: [...this.encoders],
compression: this.compressionMethod,
reversible: this.isReversible,
metadata: { ...this.metadata },
};
}
}
class Generator {
static generateRandomString(length, alphabet) {
return Array.from({ length }, () => alphabet[Math.floor(Math.random() * alphabet.length)]).join("");
}
static generate(options = {}) {
const opts = {
size: 8,
segments: 4,
separator: "-",
encoding: "rawHex",
prefix: "",
includeTimestamp: false,
alphabet: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789",
compression: "none",
reversible: false,
randomness: 'fast',
includeChecksum: false,
...this.DEFAULT_OPTIONS,
...options
};
const segments = [];
if (opts.includeTimestamp) {
const timestamp = Date.now().toString(36);
segments.push(timestamp);
}
for (let i = 0; i < opts.segments; i++) {
const randomString = this.generateRandomString(opts.size, opts.alphabet);
const multiEnc = __processor__.encodeMultiple(randomString, Array.isArray(opts.encoding) ? opts.encoding : []);
const encoded = Array.isArray(opts.encoding)
? multiEnc.results[multiEnc.results.length - 1].encoded
: __processor__.encode(randomString, opts.encoding);
segments.push(encoded);
}
let id = segments.join(opts.separator);
if (opts.compression !== "none") {
id = Encoder.compress(id, opts.compression);
}
return opts.prefix ? `${opts.prefix}${opts.separator}${id}` : id;
}
static async safe(options) {
let attempts = 0;
let id;
while (attempts < options.maxAttempts) {
id = this.generate();
if (await options.checkFunction(id)) {
return id;
}
attempts++;
if (options.backoffType === "exponential") {
await new Promise((resolve) => setTimeout(resolve, Math.pow(2, attempts)));
}
else {
await new Promise((resolve) => setTimeout(resolve, 100 * attempts));
}
}
throw new Error(`Failed to generate unique ID after ${options.maxAttempts} attempts`);
}
static batch(options) {
const ids = new Set();
const { count, format = "standard", ensureUnique = true } = options;
while (ids.size < count) {
const id = format === "standard"
? this.generate()
: format === "uuid"
? this.uuid()
: format === "nano"
? this.nano()
: this.short();
if (!ensureUnique || !ids.has(id)) {
ids.add(id);
}
}
return Array.from(ids);
}
static uuid() {
return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, (c) => {
const r = (Math.random() * 16) | 0;
const v = c === "x" ? r : (r & 0x3) | 0x8;
return v.toString(16);
});
}
static nano(size = 12) {
return this.generateRandomString(size, "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789");
}
static short(length = 8) {
return this.generate({
size: length,
segments: 1,
encoding: "urlSafeBase64",
});
}
static hex(length = 32) {
return this.generateRandomString(length, "0123456789abcdef");
}
}
Generator.DEFAULT_OPTIONS = {
size: 8,
segments: 4,
separator: "-",
encoding: "rawHex",
prefix: "",
includeTimestamp: false,
alphabet: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789",
compression: "none",
reversible: false,
// New options with defaults
randomness: 'fast',
includeChecksum: false,
};
/* ---------------------------------------------------------------------------------------------
* Copyright (c) NEHONIX INC. All rights reserved.
* Licensed under the MIT License. See LICENSE in the project root for license information.
* -------------------------------------------------------------------------------------------
*/
/**
* Revolutionary features that set NehoID apart from all other ID generation libraries
* @author NEHONIX
* @since 20/05/2025
*/
class NehoIdAdvenced {
/**
* 🌌 QUANTUM-ENTANGLED IDs
* Generate IDs that are quantum mechanically entangled with each other
* When one ID changes state, its entangled partners instantly reflect the change
*/
static quantum(options = {}) {
const { entanglementGroup = "default", quantumSeed, coherenceTime = 30000, } = options;
// Generate quantum-inspired ID using quantum principles
const quantumState = NehoIdAdvenced.generateQuantumState(quantumSeed);
const baseId = Generator.nano(16);
const quantumId = `q_${quantumState}_${baseId}`;
// Create entanglement
const entanglement = {
id: quantumId,
entangledWith: [],
coherenceState: "coherent",
lastMeasurement: Date.now(),
};
// Find other IDs in the same entanglement group
const groupIds = Array.from(NehoIdAdvenced.quantumRegistry.values())
.filter((e) => e.id.includes(entanglementGroup))
.map((e) => e.id);
// Entangle with existing IDs in the group
entanglement.entangledWith = groupIds;
groupIds.forEach((id) => {
const existing = NehoIdAdvenced.quantumRegistry.get(id);
if (existing) {
existing.entangledWith.push(quantumId);
}
});
NehoIdAdvenced.quantumRegistry.set(quantumId, entanglement);
// Set decoherence timer
setTimeout(() => {
const ent = NehoIdAdvenced.quantumRegistry.get(quantumId);
if (ent) {
ent.coherenceState = "decoherent";
}
}, coherenceTime);
return quantumId;
}
/**
* 🧬 BIOMETRIC-BASED IDs
* Generate IDs based on unique biological characteristics
*/
static biometric(options) {
const bioHash = NehoIdAdvenced.createBiometricHash(options);
const timestamp = Date.now().toString(36);
const stability = NehoIdAdvenced.calculateBiometricStability(options);
return `bio_${bioHash}_${stability}_${timestamp}`;
}
/**
* ML-PREDICTIVE IDs
* IDs that predict future usage patterns and optimize accordingly
*/
static predictive(options = {}) {
const prediction = NehoIdAdvenced.generateMLPrediction(options);
const baseId = Generator.nano(12);
const confidence = options.confidenceThreshold || 0.8;
return `ml_${prediction.pattern}_${confidence.toString(36)}_${baseId}`;
}
/**
*
* BLOCKCHAIN-VERIFIED IDs
* IDs that are cryptographically verified on a blockchain
*/
static blockchain(options = {}) {
const { networkId = "neho-chain", consensusType = "proof-of-stake" } = options;
// Generate blockchain-style hash
const blockHash = NehoIdAdvenced.generateBlockHash();
const nonce = ++NehoIdAdvenced.blockchainNonce;
const merkleRoot = NehoIdAdvenced.calculateMerkleRoot([blockHash, nonce.toString()]);
return `bc_${networkId}_${merkleRoot}_${nonce.toString(36)}`;
}
/**
* NEURO-COGNITIVE IDs
* IDs based on brain activity patterns and cognitive states
*/
static neuroCognitive(options = {}) {
const { emotionalState = "neutral", cognitiveLoad = 0.5 } = options;
const neuroPattern = NehoIdAdvenced.analyzeNeuralPattern(options);
const cognitiveHash = NehoIdAdvenced.hashCognitiveState(emotionalState, cognitiveLoad);
const brainwaveSignature = NehoIdAdvenced.processBrainwaves(options.brainwavePattern || []);
return `neuro_${neuroPattern}_${cognitiveHash}_${brainwaveSignature}`;
}
/**
* DNA-SEQUENCE IDs
* IDs based on genetic algorithms and DNA-like structures
*/
static dnaSequence(options = {}) {
const { mutationRate = 0.001, generationCount = 1 } = options;
let sequence = NehoIdAdvenced.generateInitialDNASequence();
// Apply genetic algorithm evolution
for (let gen = 0; gen < generationCount; gen++) {
sequence = NehoIdAdvenced.evolveDNASequence(sequence, mutationRate);
}
const checksum = NehoIdAdvenced.calculateDNAChecksum(sequence);
return `dna_${sequence}_g${generationCount}_${checksum}`;
}
/**
* SYNAPTIC-NETWORK IDs
* IDs that mimic neural network synaptic connections
*/
static synaptic(options = {}) {
const { neurotransmitterType = "dopamine", plasticity = 0.7 } = options;
const synapticPattern = NehoIdAdvenced.generateSynapticPattern(options);
const connectionStrength = Math.floor(plasticity * 1000).toString(36);
const neurotransmitterCode = NehoIdAdvenced.encodeNeurotransmitter(neurotransmitterType);
return `syn_${synapticPattern}_${neurotransmitterCode}_${connectionStrength}`;
}
/**
* PROBABILITY-CLOUD IDs
* IDs that exist in multiple probable states simultaneously
*/
static probabilityCloud(states = []) {
const baseId = Generator.nano(8);
const cloudStates = states.length > 0 ? states : ["alpha", "beta", "gamma"];
return cloudStates.map((state, index) => {
const probability = (1 / cloudStates.length).toFixed(3);
return `prob_${state}_${probability}_${baseId}_${index}`;
});
}
/**
* METAMORPHIC IDs
* IDs that change form based on context while maintaining core identity
*/
static metamorphic(baseContext) {
const coreIdentity = Generator.nano(16);
const history = [];
return {
getId: (currentContext) => {
const contextHash = NehoIdAdvenced.hashString(currentContext);
const morphedId = `meta_${coreIdentity}_${contextHash}`;
history.push(morphedId);
return morphedId;
},
getHistory: () => [...history],
};
}
/**
* WAVE-FUNCTION IDs
* IDs based on wave interference patterns
*/
static waveFunction(frequency = 440, amplitude = 1) {
const wavePattern = NehoIdAdvenced.generateWavePattern(frequency, amplitude);
const interference = NehoIdAdvenced.calculateWaveInterference(wavePattern);
const resonance = NehoIdAdvenced.findResonanceFrequency(frequency);
return `wave_${frequency.toString(36)}_${interference}_${resonance}`;
}
// Helper methods for revolutionary features
static generateQuantumState(seed) {
// Simulate quantum state generation
const entropy = seed ? NehoIdAdvenced.hashString(seed) : Math.random().toString(36);
const qubits = entropy.substring(0, 8);
return Buffer.from(qubits)
.toString("base64")
.replace(/[+=\/]/g, "")
.substring(0, 8);
}
static createBiometricHash(options) {
const combined = [
options.fingerprint || "",
options.voicePrint || "",
options.retinalPattern || "",
JSON.stringify(options.keystrokeDynamics || []),
JSON.stringify(options.mouseMovementPattern || []),
].join("");
return NehoIdAdvenced.hashString(combined).substring(0, 16);
}
static calculateBiometricStability(options) {
// Calculate stability score based on biometric data quality
let stability = 1.0;
if (options.keystrokeDynamics && options.keystrokeDynamics.length > 0) {
const variance = NehoIdAdvenced.calculateVariance(options.keystrokeDynamics);
stability *= 1 - Math.min(variance, 0.5);
}
return Math.floor(stability * 1000).toString(36);
}
static generateMLPrediction(options) {
// Simulate ML prediction
const features = options.userBehaviorVector || [0.5, 0.3, 0.8];
const pattern = features.reduce((acc, val) => acc + val, 0).toString(36);
return { pattern: pattern.substring(0, 8) };
}
static generateBlockHash() {
const data = Date.now().toString() + Math.random().toString();
return NehoIdAdvenced.hashString(data).substring(0, 16);
}
static calculateMerkleRoot(data) {
if (data.length === 1)
return NehoIdAdvenced.hashString(data[0]).substring(0, 16);
const newLevel = [];
for (let i = 0; i < data.length; i += 2) {
const left = data[i];
const right = data[i + 1] || left;
newLevel.push(NehoIdAdvenced.hashString(left + right));
}
return NehoIdAdvenced.calculateMerkleRoot(newLevel);
}
static analyzeNeuralPattern(options) {
const brainwaves = options.brainwavePattern || [0.5, 0.3, 0.8, 0.6];
const pattern = brainwaves
.map((w) => Math.floor(w * 16).toString(16))
.join("");
return pattern.substring(0, 8);
}
static hashCognitiveState(emotional, cognitive) {
const combined = emotional + cognitive.toString();
return NehoIdAdvenced.hashString(combined).substring(0, 6);
}
static processBrainwaves(brainwaves) {
if (brainwaves.length === 0)
return "neutral";
const avg = brainwaves.reduce((a, b) => a + b, 0) / brainwaves.length;
return Math.floor(avg * 1000).toString(36);
}
static generateInitialDNASequence() {
const bases = ["A", "T", "G", "C"];
return Array.from({ length: 12 }, () => bases[Math.floor(Math.random() * 4)]).join("");
}
static evolveDNASequence(sequence, mutationRate) {
const bases = ["A", "T", "G", "C"];
return sequence
.split("")
.map((base) => {
if (Math.random() < mutationRate) {
return bases[Math.floor(Math.random() * 4)];
}
return base;
})
.join("");
}
static calculateDNAChecksum(sequence) {
return NehoIdAdvenced.hashString(sequence).substring(0, 4);
}
static calculateStellarPosition(constellation) {
// Simulate stellar position calculation
const hash = NehoIdAdvenced.hashString(constellation + Date.now().toString());
return hash.substring(0, 8);
}
static getCosmicTime() {
// Cosmic time based on universal constants
const cosmicEpoch = Date.now() - new Date("2000-01-01").getTime();
return Math.floor(cosmicEpoch / 1000).toString(36);
}
static getSolarWindData() {
// Simulate solar wind data
const speed = Math.floor(Math.random() * 800 + 300); // 300-1100 km/s
return speed.toString(36);
}
static generateSynapticPattern(options) {
const pathway = options.neuronPathway || "default";
const strength = options.synapticStrength || 0.5;
return NehoIdAdvenced.hashString(pathway + strength.toString()).substring(0, 8);
}
static encodeNeurotransmitter(type) {
const codes = {
dopamine: "DA",
serotonin: "SE",
acetylcholine: "AC",
gaba: "GA",
};
return codes[type] || "XX";
}
static generateWavePattern(frequency, amplitude) {
const pattern = [];
for (let i = 0; i < 10; i++) {
pattern.push(amplitude * Math.sin((2 * Math.PI * frequency * i) / 100));
}
return pattern;
}
static calculateWaveInterference(pattern) {
const interference = pattern.reduce((acc, val, i) => {
return acc + val * Math.cos(i);
}, 0);
return Math.floor(Math.abs(interference) * 1000).toString(36);
}
static findResonanceFrequency(baseFreq) {
// Find harmonic resonance
const harmonics = [1, 2, 3, 5, 7, 11];
const resonance = harmonics[Math.floor(Math.random() * harmonics.length)];
return (baseFreq * resonance).toString(36);
}
static hashString(str) {
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = (hash << 5) - hash + char;
hash = hash & hash; // Convert to 32bit integer
}
return Math.abs(hash).toString(36);
}
static calculateVariance(values) {
const mean = values.reduce((a, b) => a + b, 0) / values.length;
const variance = values.reduce((acc, val) => acc + Math.pow(val - mean, 2), 0) /
values.length;
return variance;
}
/**
* CROSS-DIMENSIONAL IDs
* IDs that exist across multiple dimensions and realities
*/
static crossDimensional(dimensions = ["alpha", "beta", "gamma"]) {
const baseReality = Generator.nano(12);
const dimensionalIds = new Map();
dimensions.forEach((dimension, index) => {
const dimensionalShift = NehoIdAdvenced.hashString(dimension + baseReality);
const dimensionalId = `dim_${dimension}_${dimensionalShift.substring(0, 8)}_${index}`;
dimensionalIds.set(dimension, dimensionalId);
});
return dimensionalIds;
}
/**
* 🎼 HARMONIC-RESONANCE IDs
* IDs based on musical harmony and acoustic resonance
*/
static harmonicResonance(baseNote = "A4", scale = "major") {
const frequencies = NehoIdAdvenced.generateMusicalScale(baseNote, scale);
const harmonics = NehoIdAdvenced.calculateHarmonics(frequencies);
const resonance = NehoIdAdvenced.findOptimalResonance(harmonics);
return `harmonic_${baseNote}_${scale}_${resonance}`;
}
static generateMusicalScale(baseNote, scale) {
// Simplified musical scale generation
const baseFreq = 440; // A4
const intervals = scale === "major" ? [0, 2, 4, 5, 7, 9, 11] : [0, 2, 3, 5, 7, 8, 10];
return intervals.map((interval) => baseFreq * Math.pow(2, interval / 12));
}
static calculateHarmonics(frequencies) {
const harmonicSum = frequencies.reduce((sum, freq) => sum + (freq % 100), 0);
return Math.floor(harmonicSum).toString(36);
}
static findOptimalResonance(harmonics) {
return NehoIdAdvenced.hashString(harmonics).substring(0, 6);
}
}
NehoIdAdvenced.quantumRegistry = new Map();
NehoIdAdvenced.mlModel = null;
NehoIdAdvenced.blockchainNonce = 0;
NehoIdAdvenced.cosmicData = {};
/* ---------------------------------------------------------------------------------------------
* Integration file to add advanced features to the main NehoID class
* Copyright (c) NEHONIX INC. All rights reserved.
* Licensed under the MIT License. See LICENSE in the project root for license information.
* -------------------------------------------------------------------------------------------
*/
/**
* Enhanced NehoID class with advanced features
* Provides additional ID generation capabilities
*/
class NehoIDV2 {
/**
* ADVANCED COMBO METHODS
* These combine multiple advanced features for enhanced uniqueness
*/
/**
* Ultimate ID: Combines quantum, biometric, and ML features
*/
static ultimate(options = {}) {
const quantumId = NehoIDV2.quantum({ entanglementGroup: options.quantumGroup });
const biometricHash = options.biometricData
? NehoIDV2.biometric(options.biometricData).split("_")[1]
: "none";
const mlPrediction = NehoIDV2.predictive({
userBehaviorVector: options.mlFeatures,
});
return `ultimate_${quantumId.split("_")[1]}_${biometricHash}_${mlPrediction.split("_")[1]}`;
}
/**
* Neuro-harmonic ID: Combines brain patterns with musical harmony
*/
static neuroHarmonic(emotionalState, baseNote) {
const neuroId = NehoIDV2.neuroCognitive({
emotionalState: emotionalState,
});
const harmonicId = NehoIDV2.harmonicResonance(baseNote);
return `neuro-harmonic_${neuroId.split("_")[1]}_${harmonicId.split("_")[3]}`;
}
/**
* ADAPTIVE ID SYSTEM
* IDs that evolve and adapt over time
*/
static createAdaptiveSystem(baseConfig) {
const adaptiveState = {
generation: 0,
learningRate: 0.1,
evolutionHistory: [],
contextMemory: new Map(),
};
return {
generateNext: (context) => {
adaptiveState.generation++;
// Learn from context
if (context) {
const currentCount = adaptiveState.contextMemory.get(context) || 0;
adaptiveState.contextMemory.set(context, currentCount + 1);
}
// Generate adaptive ID based on learning
const adaptiveness = Math.min(adaptiveState.generation * adaptiveState.learningRate, 1);
const contextInfluence = context
? (adaptiveState.contextMemory.get(context) || 0) * 0.1
: 0;
const adaptiveId = `adaptive_gen${adaptiveState.generation}_${adaptiveness.toFixed(2)}_${contextInfluence.toFixed(2)}_${Generator.nano(8)}`;
adaptiveState.evolutionHistory.push(adaptiveId);
return adaptiveId;
},
getEvolutionHistory: () => adaptiveState.evolutionHistory,
ge