UNPKG

node-red-contrib-knx-ultimate

Version:

Control your KNX and KNX Secure intallation via Node-Red! A bunch of KNX nodes, with integrated Philips HUE control, ETS group address importer, and KNX routing between interfaces. Easy to use and highly configurable.

1,319 lines (1,224 loc) 306 kB
// KNX Ultimate AI / Traffic Analyzer const loggerClass = require('./utils/sysLogger') const dptlib = require('knxultimate').dptlib const fs = require('fs') const path = require('path') const { spawn } = require('child_process') const { getRequestAccessToken, normalizeAuthFromAccessTokenQuery } = require('./utils/httpAdminAccessToken') let googleTranslateTTS = null try { googleTranslateTTS = require('google-translate-tts') } catch (error) { googleTranslateTTS = null } const coerceBoolean = (value) => (value === true || value === 'true') let adminEndpointsRegistered = false const aiRuntimeNodes = new Map() const knxAiVueDistDir = path.join(__dirname, 'plugins', 'knxUltimateAI-vue') const sendKnxAiVueIndex = (req, res) => { const entryPath = path.join(knxAiVueDistDir, 'index.html') fs.readFile(entryPath, 'utf8', (error, html) => { if (error || typeof html !== 'string') { res.status(503).type('text/plain').send('KNX AI Vue build not found. Run "npm run knx-ai:build" in the module root.') return } const rawToken = getRequestAccessToken(req) if (!rawToken) { res.type('text/html').send(html) return } const encodedToken = encodeURIComponent(rawToken) const htmlWithToken = html .replace('./assets/app.js', `./assets/app.js?access_token=${encodedToken}`) .replace('./assets/app.css', `./assets/app.css?access_token=${encodedToken}`) res.type('text/html').send(htmlWithToken) }) } const sendStaticFileSafe = ({ rootDir, relativePath, res }) => { const rootPath = path.resolve(rootDir) const requestedPath = String(relativePath || '').replace(/^\/+/, '') const fullPath = path.resolve(rootPath, requestedPath) if (!fullPath.startsWith(rootPath + path.sep) && fullPath !== rootPath) { res.status(403).type('text/plain').send('Forbidden') return } fs.stat(fullPath, (statError, stats) => { if (statError || !stats || !stats.isFile()) { res.status(404).type('text/plain').send('File not found') return } res.sendFile(fullPath, (sendError) => { if (!sendError || res.headersSent) return res.status(sendError.statusCode || 500).type('text/plain').send(sendError.message || String(sendError)) }) }) } const GOOGLE_TRANSLATE_MAX_CHARS = 200 const stripId3v2 = (buffer) => { if (!Buffer.isBuffer(buffer) || buffer.length < 10) return buffer if (buffer[0] !== 0x49 || buffer[1] !== 0x44 || buffer[2] !== 0x33) return buffer const size = ((buffer[6] & 0x7f) << 21) | ((buffer[7] & 0x7f) << 14) | ((buffer[8] & 0x7f) << 7) | (buffer[9] & 0x7f) const tagEnd = 10 + size if (tagEnd <= 10 || tagEnd >= buffer.length) return buffer return buffer.subarray(tagEnd) } const splitGoogleTranslateText = (text, maxLen = GOOGLE_TRANSLATE_MAX_CHARS) => { const chunks = [] let remaining = String(text || '').trim() if (!remaining) return chunks const breakChars = ['\n', '.', '!', '?', ';', ':', ',', ' '] while (remaining.length > maxLen) { const window = remaining.slice(0, maxLen + 1) let breakAt = -1 for (const ch of breakChars) { const idx = window.lastIndexOf(ch) if (idx > breakAt) breakAt = idx } if (breakAt <= 0) breakAt = maxLen const cutAt = breakAt === maxLen ? maxLen : breakAt + 1 const chunk = remaining.slice(0, cutAt).trim() if (chunk) chunks.push(chunk) remaining = remaining.slice(cutAt).trimStart() } if (remaining) chunks.push(remaining) return chunks } const synthesizeGoogleTranslateSpeech = async ({ text, voice = 'it', slow = false } = {}) => { if (!googleTranslateTTS || typeof googleTranslateTTS.synthesize !== 'function') { throw new Error('Google Translate TTS is not available') } const resolvedVoice = typeof voice === 'string' && voice.includes('-') ? voice.split('-')[0] : String(voice || 'it') const textChunks = splitGoogleTranslateText(text, GOOGLE_TRANSLATE_MAX_CHARS) if (!textChunks.length) return Buffer.from([]) if (textChunks.length === 1) { return await googleTranslateTTS.synthesize({ text: textChunks[0], voice: resolvedVoice, slow: slow === true }) } const buffers = [] for (let i = 0; i < textChunks.length; i += 1) { // Google Translate TTS accepts only short chunks; concatenate the resulting mp3 frames. // eslint-disable-next-line no-await-in-loop const chunkBuffer = await googleTranslateTTS.synthesize({ text: textChunks[i], voice: resolvedVoice, slow: slow === true }) buffers.push(i === 0 ? chunkBuffer : stripId3v2(chunkBuffer)) } return Buffer.concat(buffers) } const sanitizeApiKey = (value) => { if (value === undefined || value === null) return '' let key = String(value).trim() if (key === '') return '' // Node-RED password placeholder when credential is already set if (key === '__PWRD__') return '' // Common copy/paste mistakes key = key.replace(/^authorization:\s*/i, '') key = key.replace(/^bearer\s+/i, '') key = key.replace(/^"(.+)"$/, '$1').replace(/^'(.+)'$/, '$1') // If user pasted a full header line, extract the token-like part const match = key.match(/(sk-[A-Za-z0-9_-]{10,})/) if (match) return match[1] return key } const safeStringify = (value) => { try { if (value === undefined) return '' if (typeof value === 'string') return value return JSON.stringify(value) } catch (error) { return String(value) } } const truncatePromptText = (value, maxChars = 10000) => { const text = String(value || '') const limit = Math.max(256, Number(maxChars) || 0) if (text.length <= limit) return text const marker = '\n...[truncated]' const keep = Math.max(0, limit - marker.length) return text.slice(0, keep) + marker } const compactObjectForPrompt = (value, { preferredKeys = [], maxEntries = 40, formatValue } = {}) => { if (!value || typeof value !== 'object' || Array.isArray(value)) return {} const source = value const out = {} const preferred = Array.isArray(preferredKeys) ? preferredKeys.map(key => String(key || '').trim()).filter(Boolean) : [] const preferredSet = new Set(preferred) const keys = [ ...preferred, ...Object.keys(source).filter(key => !preferredSet.has(key)) ] const limit = Math.max(1, Number(maxEntries) || 1) for (const key of keys) { if (!Object.prototype.hasOwnProperty.call(source, key)) continue const raw = source[key] const normalized = typeof formatValue === 'function' ? formatValue(raw, key) : raw out[key] = normalized if (Object.keys(out).length >= limit) break } return out } const takeLastItemsByCharBudget = (items, maxChars = 7000) => { const source = Array.isArray(items) ? items : [] const limit = Math.max(200, Number(maxChars) || 0) const selected = [] let total = 0 for (let i = source.length - 1; i >= 0; i -= 1) { const item = String(source[i] || '') if (!item) continue const next = item.length + (selected.length > 0 ? 1 : 0) if (selected.length > 0 && (total + next) > limit) break selected.push(item) total += next } return selected.reverse() } const buildLlmSummarySnapshot = (summary) => { const s = summary && typeof summary === 'object' ? summary : {} const topGAs = Array.isArray(s.topGAs) ? s.topGAs.slice(0, 30) : [] const topGaKeys = topGAs .map(item => String(item && item.ga ? item.ga : '').trim()) .filter(Boolean) const graph = s.graph && typeof s.graph === 'object' ? { windowSec: Number(s.graph.windowSec || 0), edges: (Array.isArray(s.graph.edges) ? s.graph.edges : []).slice(0, 60), hotEdgesDelta: (Array.isArray(s.graph.hotEdgesDelta) ? s.graph.hotEdgesDelta : []).slice(0, 40), anomalyLifecycle: (Array.isArray(s.graph.anomalyLifecycle) ? s.graph.anomalyLifecycle : []).slice(0, 30) } : {} const flowMapTopology = s.flowMapTopology && typeof s.flowMapTopology === 'object' ? { mode: String(s.flowMapTopology.mode || '').trim(), windowSec: Number(s.flowMapTopology.windowSec || 0), nodes: (Array.isArray(s.flowMapTopology.nodes) ? s.flowMapTopology.nodes : []).slice(0, 80).map((node) => ({ id: String(node && node.id ? node.id : '').trim(), displayId: String(node && node.displayId ? node.displayId : '').trim(), kind: String(node && node.kind ? node.kind : '').trim(), subtitle: String(node && node.subtitle ? node.subtitle : '').trim(), payload: compactPayloadForNodeLabel(node && Object.prototype.hasOwnProperty.call(node, 'payload') ? node.payload : '', 36), anomalyCount: Number(node && node.anomalyCount ? node.anomalyCount : 0), lastSeenAtMs: Number(node && node.lastSeenAtMs ? node.lastSeenAtMs : 0) })), edges: (Array.isArray(s.flowMapTopology.edges) ? s.flowMapTopology.edges : []).slice(0, 120).map((edge) => ({ from: String(edge && edge.from ? edge.from : '').trim(), to: String(edge && edge.to ? edge.to : '').trim(), linkType: String(edge && edge.linkType ? edge.linkType : '').trim(), event: String(edge && edge.event ? edge.event : '').trim(), currentWindowCount: Number(edge && edge.currentWindowCount ? edge.currentWindowCount : 0), totalCount: Number(edge && edge.totalCount ? edge.totalCount : 0), delta: Number(edge && edge.delta ? edge.delta : 0), delayMs: Number(edge && edge.delayMs ? edge.delayMs : 0), lastAt: String(edge && edge.lastAt ? edge.lastAt : '').trim() })) } : undefined return { meta: s.meta && typeof s.meta === 'object' ? s.meta : {}, counters: s.counters && typeof s.counters === 'object' ? s.counters : {}, byEvent: s.byEvent && typeof s.byEvent === 'object' ? s.byEvent : {}, topGAs, topSources: (Array.isArray(s.topSources) ? s.topSources : []).slice(0, 20), patterns: (Array.isArray(s.patterns) ? s.patterns : []).slice(0, 30), gaLastSeenAt: compactObjectForPrompt(s.gaLastSeenAt, { preferredKeys: topGaKeys, maxEntries: 60 }), gaLastPayload: compactObjectForPrompt(s.gaLastPayload, { preferredKeys: topGaKeys, maxEntries: 60, formatValue: value => compactPayloadForNodeLabel(value, 42) }), flowKnownCount: Number(s.flowKnownCount || 0), busConnection: s.busConnection && typeof s.busConnection === 'object' ? s.busConnection : {}, anomalyLifecycle: (Array.isArray(s.anomalyLifecycle) ? s.anomalyLifecycle : []).slice(-40), graph, flowMapTopology } } const extractJsonFragmentFromText = (value) => { const text = String(value || '').trim() if (!text) throw new Error('Empty AI response') const normalizeCandidate = (input) => String(input || '') .replace(/^\uFEFF/, '') .replace(/^\s*json\s*\n/i, '') .trim() const tryParse = (input) => { const source = normalizeCandidate(input) if (!source) return null try { return JSON.parse(source) } catch (error) { } // Fallback: tolerate comments and trailing commas that some models emit. const relaxed = source .replace(/\/\*[\s\S]*?\*\//g, '') .replace(/^\s*\/\/.*$/gm, '') .replace(/,\s*([}\]])/g, '$1') .trim() if (!relaxed || relaxed === source) return null try { return JSON.parse(relaxed) } catch (error) { return null } } const extractBalancedJsonSlices = (input, maxSlices = 24) => { const source = String(input || '') const out = [] for (let i = 0; i < source.length; i += 1) { const ch = source[i] if (ch !== '{' && ch !== '[') continue const stack = [ch === '{' ? '}' : ']'] let inString = false let escaped = false for (let j = i + 1; j < source.length; j += 1) { const current = source[j] if (inString) { if (escaped) { escaped = false continue } if (current === '\\') { escaped = true continue } if (current === '"') inString = false continue } if (current === '"') { inString = true continue } if (current === '{') { stack.push('}') continue } if (current === '[') { stack.push(']') continue } if ((current === '}' || current === ']') && stack.length) { if (current !== stack[stack.length - 1]) break stack.pop() if (!stack.length) { const slice = normalizeCandidate(source.slice(i, j + 1)) if (slice) out.push(slice) i = j break } } } if (out.length >= maxSlices) break } return out } const candidates = [] const seen = new Set() const pushCandidate = (input) => { const normalized = normalizeCandidate(input) if (!normalized || seen.has(normalized)) return seen.add(normalized) candidates.push(normalized) } pushCandidate(text) const fencedRe = /```(?:[a-zA-Z0-9_-]+)?\s*([\s\S]*?)```/g let fenceMatch while ((fenceMatch = fencedRe.exec(text)) !== null) { pushCandidate(fenceMatch[1]) } for (const candidate of candidates) { const direct = tryParse(candidate) if (direct !== null) return direct const objectStart = candidate.indexOf('{') const objectEnd = candidate.lastIndexOf('}') if (objectStart !== -1 && objectEnd !== -1 && objectEnd > objectStart) { const parsedObject = tryParse(candidate.slice(objectStart, objectEnd + 1)) if (parsedObject !== null) return parsedObject } const arrayStart = candidate.indexOf('[') const arrayEnd = candidate.lastIndexOf(']') if (arrayStart !== -1 && arrayEnd !== -1 && arrayEnd > arrayStart) { const parsedArray = tryParse(candidate.slice(arrayStart, arrayEnd + 1)) if (parsedArray !== null) return parsedArray } const balancedSlices = extractBalancedJsonSlices(candidate) for (const slice of balancedSlices) { const parsedSlice = tryParse(slice) if (parsedSlice !== null) return parsedSlice } } const preview = text.slice(0, 180).replace(/\s+/g, ' ').trim() throw new Error(`The LLM response did not contain valid JSON${preview ? ` (preview: ${preview})` : ''}`) } const normalizeValueForCompare = (value) => { if (value === undefined) return 'undefined' if (value === null) return 'null' if (Buffer.isBuffer(value)) return `buffer:${value.toString('hex')}` if (typeof value === 'object') return safeStringify(value) return String(value) } const nowMs = () => Date.now() const roundTo = (value, digits = 2) => { const n = Number(value) if (!Number.isFinite(n)) return 0 const f = 10 ** Math.max(0, Number(digits) || 0) return Math.round(n * f) / f } const percentileFromArray = (values, percentile = 0.95) => { const arr = Array.isArray(values) ? values.filter(v => Number.isFinite(Number(v))).map(v => Number(v)) : [] if (!arr.length) return 0 arr.sort((a, b) => a - b) const p = Math.max(0, Math.min(1, Number(percentile) || 0)) if (arr.length === 1) return arr[0] const idx = Math.floor((arr.length - 1) * p) return arr[idx] } const edgeKey = (from, to) => `${from} -> ${to}` const computeAnomalySeverity = (payload) => { const p = payload || {} let ratio = 1 if (p.thresholdPerSec > 0 && p.ratePerSec > 0) ratio = Number(p.ratePerSec) / Number(p.thresholdPerSec) if (p.thresholdChanges > 0 && p.changesInWindow > 0) ratio = Number(p.changesInWindow) / Number(p.thresholdChanges) if (!Number.isFinite(ratio) || ratio <= 0) ratio = 1 if (ratio >= 3) return { label: 'critical', score: roundTo(ratio, 2) } if (ratio >= 2) return { label: 'high', score: roundTo(ratio, 2) } if (ratio >= 1.25) return { label: 'medium', score: roundTo(ratio, 2) } return { label: 'low', score: roundTo(ratio, 2) } } const SVG_REQUEST_RE = /\b(svg|chart|graph|plot|diagram|bar|pie|line|grafico|grafici|diagramma|istogramma|torta)\b/i const SVG_PRESENT_RE = /```svg[\s\S]*?```|<svg[\s>][\s\S]*?<\/svg>/i const escapeXml = (value) => String(value || '') .replace(/&/g, '&amp;') .replace(/</g, '&lt;') .replace(/>/g, '&gt;') .replace(/"/g, '&quot;') .replace(/'/g, '&apos;') const truncateLabel = (value, maxLen = 14) => { const s = String(value || '') if (s.length <= maxLen) return s return s.slice(0, Math.max(1, maxLen - 2)) + '..' } const shouldGenerateSvgChart = (question) => SVG_REQUEST_RE.test(String(question || '')) const stripPayloadDecimals = (value) => { if (value === undefined || value === null) return value if (typeof value === 'number') { if (!Number.isFinite(value)) return value return Math.trunc(value) } if (Array.isArray(value)) return value.map(v => stripPayloadDecimals(v)) if (typeof value === 'object') { const out = {} Object.keys(value).forEach((k) => { out[k] = stripPayloadDecimals(value[k]) }) return out } if (typeof value === 'string') { const s = String(value).trim() if (s === '') return '' if (/^[+-]?\d+(?:\.\d+)?$/.test(s)) { const n = Number(s) if (Number.isFinite(n)) return String(Math.trunc(n)) } if ((s.startsWith('{') && s.endsWith('}')) || (s.startsWith('[') && s.endsWith(']'))) { try { const parsed = JSON.parse(s) return safeStringify(stripPayloadDecimals(parsed)) } catch (error) { return s } } return s } return value } const compactPayloadForNodeLabel = (value, maxLen = 28) => { const normalizedPayload = stripPayloadDecimals(value) let s = normalizeValueForCompare(normalizedPayload) s = String(s || '').replace(/\s+/g, ' ').trim() if (s.length <= maxLen) return s return s.slice(0, Math.max(1, maxLen - 2)) + '..' } const normalizeAreaText = (value) => String(value || '') .replace(/\s+/g, ' ') .replace(/[–—]/g, '-') .trim() const slugifyAreaText = (value) => normalizeAreaText(value) .normalize('NFKD') .replace(/[\u0300-\u036f]/g, '') .toLowerCase() .replace(/[^a-z0-9]+/g, '-') .replace(/^-+|-+$/g, '') .slice(0, 80) || 'area' const pushUniqueValue = (list, value, maxItems = 6) => { const normalized = normalizeAreaText(value) if (!normalized) return if (!Array.isArray(list)) return if (list.includes(normalized)) return if (list.length >= maxItems) return list.push(normalized) } const normalizeGaRoleValue = (value, fallback = 'auto') => { const raw = normalizeAreaText(value).toLowerCase() if (['auto', 'command', 'status', 'neutral'].includes(raw)) return raw return fallback } const parseEtsHierarchyLabel = (value) => { const raw = normalizeAreaText(value) if (!raw) { return { raw: '', deviceLabel: '', mainGroup: '', middleGroup: '', hierarchyPath: '' } } const match = raw.match(/^\(([^()]+)\)\s*(.*)$/) if (!match) { return { raw, deviceLabel: raw, mainGroup: '', middleGroup: '', hierarchyPath: '' } } const hierarchy = String(match[1] || '') .split('->') .map(part => normalizeAreaText(part)) .filter(Boolean) return { raw, deviceLabel: normalizeAreaText(match[2] || raw), mainGroup: hierarchy[0] || '', middleGroup: hierarchy[1] || '', hierarchyPath: hierarchy.join(' / ') } } const AREA_TAG_RULES = [ { tag: 'lighting', pattern: /\b(light|lights|lighting|luce|luci|lamp|dimmer)\b/i }, { tag: 'hvac', pattern: /\b(hvac|clima|climate|fan\s?coil|fancoil|heating|cooling|thermo|temp|temperature)\b/i }, { tag: 'shading', pattern: /\b(blind|blinds|shutter|shutters|jalousie|curtain|curtains|tapparella|tapparelle)\b/i }, { tag: 'presence', pattern: /\b(presence|occupancy|motion|presence detector|pir|presence sensor|presence)\b/i }, { tag: 'access', pattern: /\b(door|doors|window|windows|access|lock|badge|porta|porte|finestra|finestre)\b/i }, { tag: 'energy', pattern: /\b(power|energy|meter|consumption|load|carico|consumo|misura)\b/i } ] const inferAreaTags = ({ mainGroup, middleGroup, deviceLabel, dpt }) => { const text = [mainGroup, middleGroup, deviceLabel, dpt].filter(Boolean).join(' ') const tags = [] AREA_TAG_RULES.forEach((rule) => { if (rule.pattern.test(text)) tags.push(rule.tag) }) return tags } const buildSuggestedAreasFromCsv = (csv) => { const rows = Array.isArray(csv) ? csv : [] const areasById = new Map() let hierarchicalGaCount = 0 let secondaryGroupCount = 0 let mainGroupCount = 0 const ensureArea = ({ id, kind, name, parentName, pathTokens }) => { const key = String(id || '').trim() if (!key) return null if (!areasById.has(key)) { areasById.set(key, { id: key, kind: String(kind || 'area').trim() || 'area', name: normalizeAreaText(name || ''), parentName: normalizeAreaText(parentName || ''), pathTokens: Array.isArray(pathTokens) ? pathTokens.map(token => normalizeAreaText(token)).filter(Boolean) : [], gaSet: new Set(), dptSet: new Set(), tags: new Set(), sampleGAs: [], sampleLabels: [] }) if (kind === 'secondary_group') secondaryGroupCount += 1 if (kind === 'main_group') mainGroupCount += 1 } return areasById.get(key) } const registerAreaRow = ({ areaId, kind, name, parentName, pathTokens, row, parsed }) => { const area = ensureArea({ id: areaId, kind, name, parentName, pathTokens }) if (!area) return const ga = normalizeAreaText(row && row.ga) const dpt = normalizeAreaText(row && row.dpt) if (ga) area.gaSet.add(ga) if (dpt) area.dptSet.add(dpt) pushUniqueValue(area.sampleGAs, ga, 6) pushUniqueValue(area.sampleLabels, parsed && parsed.deviceLabel, 4) inferAreaTags({ mainGroup: parsed && parsed.mainGroup, middleGroup: parsed && parsed.middleGroup, deviceLabel: parsed && parsed.deviceLabel, dpt }).forEach(tag => area.tags.add(tag)) } rows.forEach((row) => { const ga = normalizeAreaText(row && row.ga) if (!ga) return const parsed = parseEtsHierarchyLabel(row && row.devicename) if (parsed.mainGroup || parsed.middleGroup) hierarchicalGaCount += 1 if (parsed.mainGroup) { registerAreaRow({ areaId: `main:${slugifyAreaText(parsed.mainGroup)}`, kind: 'main_group', name: parsed.mainGroup, parentName: '', pathTokens: [parsed.mainGroup], row, parsed }) } if (parsed.mainGroup && parsed.middleGroup) { registerAreaRow({ areaId: `secondary:${slugifyAreaText(parsed.mainGroup)}:${slugifyAreaText(parsed.middleGroup)}`, kind: 'secondary_group', name: parsed.middleGroup, parentName: parsed.mainGroup, pathTokens: [parsed.mainGroup, parsed.middleGroup], row, parsed }) } }) const suggested = Array.from(areasById.values()) .map((entry) => { const gaCount = entry.gaSet.size const dptCount = entry.dptSet.size const path = entry.pathTokens.join(' / ') return { id: entry.id, kind: entry.kind, name: entry.name, baseName: entry.name, parentId: entry.kind === 'secondary_group' ? `main:${slugifyAreaText(entry.parentName)}` : '', parentName: entry.parentName, baseParentName: entry.parentName, path, basePath: path, gaCount, dptCount, gaList: Array.from(entry.gaSet.values()).sort(), dptList: Array.from(entry.dptSet.values()).sort(), tags: Array.from(entry.tags.values()).sort(), baseTags: Array.from(entry.tags.values()).sort(), sampleGAs: entry.sampleGAs.slice(0, 6), sampleLabels: entry.sampleLabels.slice(0, 4), description: entry.kind === 'secondary_group' ? `${entry.parentName || 'ETS'} / ${entry.name} (${gaCount} GA)` : `${entry.name} (${gaCount} GA)`, priority: entry.kind === 'secondary_group' ? 2 : 1 } }) .sort((a, b) => { if (b.priority !== a.priority) return b.priority - a.priority if (b.gaCount !== a.gaCount) return b.gaCount - a.gaCount return String(a.path || a.name || '').localeCompare(String(b.path || b.name || '')) }) return { source: rows.length ? 'ets_csv' : 'none', generatedAt: new Date().toISOString(), totals: { gaCount: rows.length, hierarchicalGaCount, suggestedAreaCount: suggested.length, secondaryGroupCount, mainGroupCount }, suggested } } const buildGaCatalogFromCsv = (csv) => { const rows = Array.isArray(csv) ? csv : [] const byGa = new Map() rows.forEach((row) => { const ga = normalizeAreaText(row && row.ga) if (!ga || byGa.has(ga)) return const parsed = parseEtsHierarchyLabel(row && row.devicename) const dpt = normalizeAreaText(row && row.dpt) const label = normalizeAreaText(parsed.deviceLabel || row.devicename || ga) const roleDetails = inferSignalRoleDetails({ label, dpt }) const tags = inferAreaTags({ mainGroup: parsed.mainGroup, middleGroup: parsed.middleGroup, deviceLabel: label, dpt }) byGa.set(ga, { ga, dpt, label, etsName: normalizeAreaText(row && row.devicename), baseRole: roleDetails.role, baseRoleSource: roleDetails.source, role: roleDetails.role, roleSource: roleDetails.source, roleOverride: 'auto', mainGroup: parsed.mainGroup || '', middleGroup: parsed.middleGroup || '', hierarchyPath: parsed.hierarchyPath || '', tags, valueOptions: getDptValueOptions(dpt) }) }) return Array.from(byGa.values()) .sort((a, b) => { const left = `${a.hierarchyPath} ${a.label} ${a.ga}`.trim() const right = `${b.hierarchyPath} ${b.label} ${b.ga}`.trim() return left.localeCompare(right) }) } const applyGaRoleOverridesToCatalog = ({ catalog, roleOverrides }) => { const rawCatalog = Array.isArray(catalog) ? catalog : [] const overrides = roleOverrides && typeof roleOverrides === 'object' ? roleOverrides : {} return rawCatalog.map((item) => { const ga = String(item && item.ga ? item.ga : '').trim() const overrideRole = normalizeGaRoleValue(overrides[ga], 'auto') return Object.assign({}, item, { role: overrideRole === 'auto' ? normalizeGaRoleValue(item && item.baseRole ? item.baseRole : item && item.role ? item.role : 'neutral', 'neutral') : overrideRole, roleSource: overrideRole === 'auto' ? String(item && item.baseRoleSource ? item.baseRoleSource : item && item.roleSource ? item.roleSource : 'unknown_rule') : 'user_override', roleOverride: overrideRole }) }) } const isAmbiguousGaRoleSource = (source) => { const value = normalizeAreaText(source).toLowerCase() return value === 'dpt_rule' || value === 'unknown_rule' } const normalizeGaRoleSuggestionPayload = ({ payload, gaCatalogMap }) => { const parsed = payload && typeof payload === 'object' ? payload : {} const rawRoles = Array.isArray(parsed) ? parsed : Array.isArray(parsed.roles) ? parsed.roles : Array.isArray(parsed.items) ? parsed.items : [] const overrides = {} rawRoles.forEach((entry) => { const ga = normalizeAreaText(entry && (entry.ga || entry.groupAddress || entry.address)) if (!ga || !gaCatalogMap.has(ga)) return const role = normalizeGaRoleValue(entry && entry.role, 'auto') if (role === 'auto') return overrides[ga] = role }) return overrides } const normalizeLanguageCode = (value, fallback = 'en') => { const raw = normalizeAreaText(value).toLowerCase() if (!raw) return fallback const match = raw.match(/^[a-z]{2,3}/) return match ? match[0] : fallback } const extractLanguageCodeFromHeader = (value, fallback = 'en') => { const raw = normalizeAreaText(value) if (!raw) return fallback const first = raw.split(',')[0] || '' return normalizeLanguageCode(first, fallback) } const languageNameFromCode = (value) => { const code = normalizeLanguageCode(value, 'en') const map = { it: 'Italian', en: 'English', de: 'German', fr: 'French', es: 'Spanish', pt: 'Portuguese', nl: 'Dutch' } return map[code] || code } const enrichSuggestedAreasWithSummary = ({ baseSnapshot, summary }) => { const snapshot = baseSnapshot && typeof baseSnapshot === 'object' ? baseSnapshot : buildSuggestedAreasFromCsv([]) const gaLastSeenAt = summary && typeof summary.gaLastSeenAt === 'object' ? summary.gaLastSeenAt : {} const gaLastPayload = summary && typeof summary.gaLastPayload === 'object' ? summary.gaLastPayload : {} const analysisWindowSec = Math.max(30, Number(summary && summary.meta && summary.meta.analysisWindowSec) || 0) const activeCutoffMs = nowMs() - (analysisWindowSec * 1000) let activeAreaCount = 0 const suggested = (Array.isArray(snapshot.suggested) ? snapshot.suggested : []).map((area) => { let activeGaCount = 0 let lastSeenAtMs = 0 const recentPayloads = [] ; (Array.isArray(area.sampleGAs) ? area.sampleGAs : []).forEach((ga) => { const ts = new Date(String(gaLastSeenAt[ga] || '')).getTime() if (Number.isFinite(ts) && ts > 0) { lastSeenAtMs = Math.max(lastSeenAtMs, ts) if (ts >= activeCutoffMs) activeGaCount += 1 } if (gaLastPayload[ga] !== undefined) { pushUniqueValue(recentPayloads, `${ga}: ${compactPayloadForNodeLabel(gaLastPayload[ga], 22)}`, 4) } }) if (activeGaCount > 0) activeAreaCount += 1 return Object.assign({}, area, { activeGaCount, activityPct: area.gaCount > 0 ? roundTo((activeGaCount / area.gaCount) * 100, 1) : 0, lastSeenAt: lastSeenAtMs > 0 ? new Date(lastSeenAtMs).toISOString() : '', recentPayloads }) }) return { source: snapshot.source || 'none', generatedAt: new Date().toISOString(), totals: Object.assign({}, snapshot.totals || {}, { activeAreaCount }), suggested } } const buildAreasPromptContext = (areasSnapshot) => { const suggested = Array.isArray(areasSnapshot && areasSnapshot.suggested) ? areasSnapshot.suggested : [] if (!suggested.length) return '' const lines = suggested.slice(0, 12).map((area) => { const tags = Array.isArray(area.tags) && area.tags.length ? ` tags=${area.tags.join(',')}` : '' const activity = area.gaCount > 0 ? ` active=${Number(area.activeGaCount || 0)}/${Number(area.gaCount || 0)}` : '' return `- ${area.path || area.name} [${area.kind}]${activity}${tags}` }) return [ 'Suggested installation areas derived from ETS hierarchy:', lines.join('\n') ].join('\n') } const ensureDirectorySync = (dirPath) => { const target = String(dirPath || '').trim() if (!target) return false try { fs.mkdirSync(target, { recursive: true }) return true } catch (error) { return false } } const readJsonFileSafe = (filePath, fallbackValue) => { try { if (!fs.existsSync(filePath)) return fallbackValue const raw = fs.readFileSync(filePath, 'utf8') if (!raw || String(raw).trim() === '') return fallbackValue return JSON.parse(raw) } catch (error) { return fallbackValue } } const normalizeAreaOverridePayload = (payload) => { const p = payload && typeof payload === 'object' ? payload : {} const normalized = {} if (Object.prototype.hasOwnProperty.call(p, 'name')) normalized.name = normalizeAreaText(p.name) if (Object.prototype.hasOwnProperty.call(p, 'description')) normalized.description = normalizeAreaText(p.description) if (Object.prototype.hasOwnProperty.call(p, 'deleted')) normalized.deleted = p.deleted === true if (Object.prototype.hasOwnProperty.call(p, 'tags')) { normalized.tags = Array.isArray(p.tags) ? Array.from(new Set(p.tags.map(tag => slugifyAreaText(tag)).filter(Boolean))).slice(0, 12) : [] } if (Object.prototype.hasOwnProperty.call(p, 'gaList')) { normalized.gaList = Array.isArray(p.gaList) ? Array.from(new Set(p.gaList.map(ga => normalizeAreaText(ga)).filter(Boolean))).slice(0, 5000) : [] } return normalized } const normalizeCustomAreaId = (value, fallback = '') => { const raw = normalizeAreaText(value || fallback) const slug = slugifyAreaText(raw) return slug ? `custom:${slug}` : '' } const applyAreaOverridesToSnapshot = ({ snapshot, overrides, gaCatalog }) => { const baseSnapshot = snapshot && typeof snapshot === 'object' ? snapshot : buildSuggestedAreasFromCsv([]) const rawOverrides = overrides && typeof overrides === 'object' ? overrides : {} const gaCatalogMap = new Map((Array.isArray(gaCatalog) ? gaCatalog : []).map(item => [String(item && item.ga ? item.ga : '').trim(), item])) const baseAreas = Array.isArray(baseSnapshot.suggested) ? baseSnapshot.suggested : [] const byId = new Map() baseAreas.forEach((area) => { const override = rawOverrides[area.id] && typeof rawOverrides[area.id] === 'object' ? normalizeAreaOverridePayload(rawOverrides[area.id]) : {} if (override.deleted === true) return byId.set(area.id, Object.assign({}, area, { customName: Object.prototype.hasOwnProperty.call(override, 'name') ? override.name : '', customDescription: Object.prototype.hasOwnProperty.call(override, 'description') ? override.description : '', customTags: Object.prototype.hasOwnProperty.call(override, 'tags') ? override.tags : null, customGaList: Object.prototype.hasOwnProperty.call(override, 'gaList') ? override.gaList : null, hasOverride: Object.keys(override).length > 0 })) }) Object.keys(rawOverrides).forEach((overrideId) => { if (byId.has(overrideId)) return const override = normalizeAreaOverridePayload(rawOverrides[overrideId]) if (override.deleted === true) return const customGaList = Array.isArray(override.gaList) ? override.gaList.filter(ga => gaCatalogMap.has(ga)) : [] const inferredTags = new Set(Array.isArray(override.tags) ? override.tags : []) const sampleLabels = [] const dptSet = new Set() customGaList.forEach((ga) => { const item = gaCatalogMap.get(ga) if (!item) return if (item.dpt) dptSet.add(item.dpt) pushUniqueValue(sampleLabels, item.label, 4) ; (Array.isArray(item.tags) ? item.tags : []).forEach(tag => inferredTags.add(tag)) }) const customName = normalizeAreaText(override.name || overrideId.replace(/^custom:/, '')) const isLlmGenerated = String(overrideId || '').startsWith('llm:') byId.set(overrideId, { id: overrideId, kind: isLlmGenerated ? 'custom_llm' : 'custom_manual', name: customName, baseName: customName, parentId: '', parentName: '', baseParentName: '', path: customName, basePath: customName, gaCount: customGaList.length, dptCount: dptSet.size, gaList: customGaList, dptList: Array.from(dptSet.values()).sort(), tags: Array.from(inferredTags.values()).sort(), baseTags: Array.from(inferredTags.values()).sort(), sampleGAs: customGaList.slice(0, 6), sampleLabels, description: normalizeAreaText(override.description || `${customName} (${customGaList.length} GA)`), priority: 3, customName, customDescription: normalizeAreaText(override.description || ''), customTags: Array.isArray(override.tags) ? override.tags.slice(0, 12) : null, customGaList, hasOverride: true }) }) const resolveAreaName = (area) => normalizeAreaText((area && area.customName) || (area && area.baseName) || (area && area.name)) byId.forEach((area) => { const parentArea = area.parentId ? byId.get(area.parentId) : null const resolvedName = resolveAreaName(area) const resolvedParentName = parentArea ? resolveAreaName(parentArea) : normalizeAreaText(area.baseParentName || area.parentName) const resolvedPath = parentArea ? [normalizeAreaText(parentArea.path || parentArea.name), resolvedName].filter(Boolean).join(' / ') : resolvedName let tags = Array.isArray(area.customTags) ? area.customTags.slice(0, 12) : (Array.isArray(area.baseTags) ? area.baseTags.slice(0, 12) : []) let gaList = Array.isArray(area.gaList) ? area.gaList.slice() : [] let dptList = Array.isArray(area.dptList) ? area.dptList.slice() : [] let sampleGAs = Array.isArray(area.sampleGAs) ? area.sampleGAs.slice(0, 6) : [] let sampleLabels = Array.isArray(area.sampleLabels) ? area.sampleLabels.slice(0, 4) : [] if (Array.isArray(area.gaList) && Array.isArray(area.customGaList)) { const filtered = area.customGaList .filter(ga => gaCatalogMap.has(ga)) gaList = filtered const nextDptSet = new Set() const nextLabelSet = [] const inferredTags = new Set() filtered.forEach((ga) => { const item = gaCatalogMap.get(ga) if (!item) return if (item.dpt) nextDptSet.add(item.dpt) pushUniqueValue(nextLabelSet, item.label, 4) ; (Array.isArray(item.tags) ? item.tags : []).forEach(tag => inferredTags.add(tag)) }) dptList = Array.from(nextDptSet.values()).sort() sampleGAs = filtered.slice(0, 6) sampleLabels = nextLabelSet if (!Array.isArray(area.customTags)) tags = Array.from(inferredTags.values()).sort() } const gaCount = gaList.length const dptCount = dptList.length const description = area.customDescription !== '' ? area.customDescription : area.kind === 'secondary_group' ? `${resolvedParentName || 'ETS'} / ${resolvedName} (${gaCount} GA)` : `${resolvedName} (${gaCount} GA)` Object.assign(area, { name: resolvedName, parentName: resolvedParentName, path: resolvedPath, tags, description, gaList, dptList, gaCount, dptCount, sampleGAs, sampleLabels }) }) return Object.assign({}, baseSnapshot, { generatedAt: new Date().toISOString(), suggested: Array.from(byId.values()) }) } const DEFAULT_AREA_PROFILES = [ { id: 'area_diagnostic', builtIn: true, name: 'Control Area', description: 'General read-only diagnostic of the selected area based on ETS structure and current KNX activity.', minActivityPct: 20, maxSilentPct: 60, maxAnomalies: 2, targetTags: [] }, { id: 'lighting_area', builtIn: true, name: 'Lighting Area', description: 'Focus on lighting-oriented areas and highlight low activity or repeated anomalies.', minActivityPct: 15, maxSilentPct: 70, maxAnomalies: 1, targetTags: ['lighting'] }, { id: 'hvac_area', builtIn: true, name: 'HVAC Area', description: 'Focus on HVAC-oriented areas and check whether the related addresses are alive.', minActivityPct: 10, maxSilentPct: 80, maxAnomalies: 1, targetTags: ['hvac'] } ] const clampNumber = (value, { min = 0, max = 100, fallback = 0 } = {}) => { const n = Number(value) if (!Number.isFinite(n)) return fallback if (n < min) return min if (n > max) return max return n } const normalizeProfileText = (value, fallback = '') => normalizeAreaText(value || fallback) const normalizeAreaProfilePayload = (payload, fallbackId = '') => { const p = payload && typeof payload === 'object' ? payload : {} const name = normalizeProfileText(p.name, 'Custom Area Profile') const baseId = normalizeAreaText(p.id || fallbackId || name) return { id: slugifyAreaText(baseId), builtIn: false, name, description: normalizeProfileText(p.description), minActivityPct: clampNumber(p.minActivityPct, { min: 0, max: 100, fallback: 20 }), maxSilentPct: clampNumber(p.maxSilentPct, { min: 0, max: 100, fallback: 60 }), maxAnomalies: clampNumber(p.maxAnomalies, { min: 0, max: 999, fallback: 2 }), targetTags: Array.isArray(p.targetTags) ? Array.from(new Set(p.targetTags.map(tag => slugifyAreaText(tag)).filter(Boolean))).slice(0, 12) : [] } } const mergeAreaProfiles = ({ customProfiles }) => { const out = new Map() DEFAULT_AREA_PROFILES.forEach((profile) => { out.set(profile.id, Object.assign({}, profile)) }) ; (Array.isArray(customProfiles) ? customProfiles : []).forEach((profile, index) => { const normalized = normalizeAreaProfilePayload(profile, `custom-${index + 1}`) if (!normalized.id) return out.set(normalized.id, normalized) }) return Array.from(out.values()) } const severityRank = (status) => { const value = String(status || '').toLowerCase() if (value === 'fail') return 3 if (value === 'warn') return 2 if (value === 'pass') return 1 return 0 } const buildAreaProfileReport = ({ area, profile, summary, anomalies, generatedAt }) => { const safeArea = area && typeof area === 'object' ? area : {} const safeProfile = profile && typeof profile === 'object' ? profile : {} const safeSummary = summary && typeof summary === 'object' ? summary : {} const gaList = Array.isArray(safeArea.gaList) ? safeArea.gaList.slice() : [] const gaSet = new Set(gaList.map(ga => String(ga || '').trim()).filter(Boolean)) const gaLastSeenAt = safeSummary && typeof safeSummary.gaLastSeenAt === 'object' ? safeSummary.gaLastSeenAt : {} const gaLastPayload = safeSummary && typeof safeSummary.gaLastPayload === 'object' ? safeSummary.gaLastPayload : {} const analysisWindowSec = Math.max(30, Number(safeSummary && safeSummary.meta && safeSummary.meta.analysisWindowSec) || 0) const activeCutoffMs = nowMs() - (analysisWindowSec * 1000) const activeGAs = [] const silentGAs = [] gaList.forEach((ga) => { const ts = new Date(String(gaLastSeenAt[ga] || '')).getTime() if (Number.isFinite(ts) && ts > 0 && ts >= activeCutoffMs) { activeGAs.push(ga) } else { silentGAs.push(ga) } }) const relevantAnomalies = (Array.isArray(anomalies) ? anomalies : []) .filter((entry) => { const ga = String(entry && entry.payload && entry.payload.ga ? entry.payload.ga : '').trim() return ga && gaSet.has(ga) }) .slice(-50) .reverse() const totalGAs = gaList.length const activeGaCount = activeGAs.length const silentGaCount = silentGAs.length const activityPct = totalGAs > 0 ? roundTo((activeGaCount / totalGAs) * 100, 1) : 0 const silentPct = totalGAs > 0 ? roundTo((silentGaCount / totalGAs) * 100, 1) : 0 const tagMismatch = Array.isArray(safeProfile.targetTags) && safeProfile.targetTags.length > 0 ? !safeProfile.targetTags.some(tag => Array.isArray(safeArea.tags) && safeArea.tags.includes(tag)) : false const checks = [ { id: 'scope_match', title: 'Profile scope alignment', status: tagMismatch ? 'warn' : 'pass', message: tagMismatch ? `Area tags ${Array.isArray(safeArea.tags) ? safeArea.tags.join(', ') : 'n/a'} do not match profile focus ${safeProfile.targetTags.join(', ')}.` : 'Area tags are compatible with the selected profile.', metrics: { areaTags: Array.isArray(safeArea.tags) ? safeArea.tags : [], targetTags: Array.isArray(safeProfile.targetTags) ? safeProfile.targetTags : [] } }, { id: 'activity', title: 'Area activity', status: activityPct >= Number(safeProfile.minActivityPct || 0) ? 'pass' : (activityPct > 0 ? 'warn' : 'fail'), message: `${activeGaCount}/${totalGAs} GA active in the last ${analysisWindowSec}s.`, metrics: { activeGaCount, totalGAs, activityPct, minActivityPct: Number(safeProfile.minActivityPct || 0) } }, { id: 'silence', title: 'Silent addresses', status: silentPct <= Number(safeProfile.maxSilentPct || 100) ? 'pass' : (silentPct < 100 ? 'warn' : 'fail'), message: `${silentGaCount}/${totalGAs} GA silent in the current analysis window.`, metrics: { silentGaCount, totalGAs, silentPct, maxSilentPct: Number(safeProfile.maxSilentPct || 0) }, sample: silentGAs.slice(0, 10).map(ga => ({ ga, lastPayload: gaLastPayload[ga] || '' })) }, { id: 'anomalies', title: 'Recent anomalies in area', status: relevantAnomalies.length <= Number(safeProfile.maxAnomalies || 0) ? 'pass' : 'warn', message: `${relevantAnomalies.length} recent anomalies match the selected area.`, metrics: { anomalyCount: relevantAnomalies.length, maxAnomalies: Number(safeProfile.maxAnomalies || 0) } } ] const suggestions = [] if (tagMismatch) suggestions.push('Check whether the selected profile is appropriate for this area or add matching tags.') if (activityPct < Number(safeProfile.minActivityPct || 0)) suggestions.push('Run a guided verification on the area or trigger live activity before diagnosing.') if (silentGaCount > 0) suggestions.push('Inspect the silent GA list first: they are the best candidates for missing feedback or dormant devices.') if (relevantAnomalies.length > Number(safeProfile.maxAnomalies || 0)) suggestions.push('Open the anomaly list for this area and correlate the failing GA with the ETS object names.') if (!suggestions.length) suggestions.push('Area looks consistent in read-only mode. Continue with a focused active test only if the issue is still reproducible.') const overallStatus = checks .map(check => check.status) .sort((a, b) => severityRank(b) - severityRank(a))[0] || 'pass' return { id: `${safeProfile.id || 'profile'}:${safeArea.id || 'area'}:${Date.now()}`, generatedAt: generatedAt || new Date().toISOString(), mode: 'read_only', overallStatus, source: { type: 'profile', profileId: safeProfile.id || '', areaId: safeArea.id || '' }, profile: { id: safeProfile.id || '', name: safeProfile.name || '', description: safeProfile.description || '', builtIn: safeProfile.builtIn === true }, area: { id: safeArea.id || '', name: safeArea.name || '', path: safeArea.path || safeArea.name || '', tags: Array.isArray(safeArea.tags) ? safeArea.tags : [] }, metrics: { totalGAs, activeGaCount, silentGaCount, activityPct, silentPct, anomalyCount: relevantAnomalies.length, analysisWindowSec }, checks, suggestions, anomalyHighlights: relevantAnomalies.slice(0, 8).map((entry) => ({ at: entry.at || '', type: entry && entry.payload ? entry.payload.type : '', ga: entry && entry.payload ? entry.payload.ga : '', payload: entry && entry.payload ? entry.payload : {} })) } } const parseActuatorPayloadInput = (value) => { if (value === undefined || value === null) return '' if (typeof value !== 'string') return value const raw = value.trim() if (raw === '') return '' if (/^(true|false)$/i.test(raw)) return raw.toLowerCase() === 'true' if (/^[+-]?\d+(?:\.\d+)?$/.test(raw)) return Number(raw) if ((raw.startsWith('{') && raw.endsWith('}')) || (raw.startsWith('[') && raw.endsWith(']'))) { try { return JSON.parse(raw) } catch (error) { return raw } } return raw } const normalizeActuatorTestPresetPayload = (payload, fallbackId = '') => { const p = payload && typeof payload === 'object' ? payload : {} const name = normalizeProfileText(p.name, 'Actuator Test') const baseId = normalizeAreaText(p.id || fallbackId || name) const sharedTimeout = clampNumber(p.timeoutMs, { min: 500, max: 60000, fallback: 5000 }) return { id: slugifyAreaText(baseId), name, description: normalizeProfileText(p.description), commandGA: normalizeAreaText(p.commandGA), commandDPT: normalizeAreaText(p.commandDPT), commandPayload: typeof p.commandPayload === 'string' ? p.commandPayload : safeStringify(p.commandPayload), statusGA: normalizeAreaText(p.statusGA), statusDPT: normalizeAreaText(p.statusDPT), statusWriteTimeoutMs: clampNumber(p.statusWriteTimeoutMs !== undefined ? p.statusWriteTimeoutMs : p.timeoutMs, { min: 500, max: 60000, fallback: sharedTimeout }), statusResponseTimeoutMs: clampNumber(p.statusResponseTimeoutMs !== undefined ? p.statusResponseTimeoutMs : p.timeoutMs, { min: 500, max: 60000, fallback: sharedTimeout }) } } const mergeActuatorTestPresets = ({ customPresets }) => { return (Array.isArray(customPresets) ? customPresets : []) .map((preset, index) => normalizeActuatorTestPresetPayload(preset, `actuator-${index + 1}`)) .filter(preset => preset.id && preset.commandGA && preset.commandDPT) } const SIGNAL_STATUS_RE = /\b(status|state|feedback|fb|stato|riscontro|indicazione|actual|actual value|current state)\b/i const SIGNAL_COMMAND_RE = /\b(command|cmd|switch|control|set|setpoint|on\/off|dim|dimmer|move|step|up|down|open|close|toggle|scene|comando|attiva|attivazione|start|stop)\b/i const SIGNAL_SENSOR_RE = /\b(sensor|misura|measure|actual|temperature|temperatura|humidity|umidita|lux|brightness|illuminance|co2|meter|energy|power|consumption|wind|rain|anemometer|counter)\b/i const SIGNAL_CATEGORY_RULES = [ { id: 'lighting', pattern: /\b(light|lights|lighting|luce|luci|lamp|dimmer|dim)\b/i }, { id: 'hvac', pattern: /\b(hvac|clima|climate|fan\s?coil|fancoil|heating|cooling|thermo|temp|temperature|setpoint|mode)\b/i }, { id: 'shading', pattern: /\b(blind|blinds|shutter|shutters|jalousie|curtain|curtains|tapparella|tapparelle|venetian)\b/i }, { id: 'access', pattern: /\b(door|doors|window|windows|lock|unlock|badge|porta|porte|finestra|finestre|serratura)\b/i }, { id: 'scene', pattern: /\b(scene|scenario|scena)\b/i } ] const normalizeSignalText = (value) => normalizeAreaText(value) .normalize('NFKD') .replace(/[\u0300-\u036f]/g, '') .toLowerCase() const ACTION_PATTERN_GROUPS = [ { type: 'on',