entodicton
Version:
1,303 lines (1,218 loc) • 82.9 kB
JavaScript
const { Semantics, Semantic } = require('./src/semantics')
const { Generators, Generator } = require('./src/generators')
const { Config } = require('./src/config')
const { loadInstance, setupArgs, gs, processContext, getObjects, setupProcessB, processContextsB } = require('./src/configHelpers')
const Digraph = require('./src/digraph')
const { project } = require('./src/project')
const { project:project2 } = require('./src/project2')
const fetch = require('node-fetch')
const base64 = require('base-64')
const deepEqual = require('deep-equal')
const _ = require('lodash')
const stringify = require('json-stable-stringify')
const Lines = require('./lines')
const flattens = require('./src/flatten')
const { appendNoDups, updateQueries, safeNoDups, stableId, where, suggestAssociationsFix, suggestAssociationsFixFromSummaries, validProps } = require('./src/helpers')
const runtime = require('./runtime')
const sortJson = runtime.sortJson
const debug = require('./src/debug')
const getConfig_getObjectsCheck = (config, testConfig) => {
let testConfigName = config.name
if (testConfig.testModuleName) {
testConfigName = testConfig.testModuleName
}
const checks = (testConfig.checks && testConfig.checks.objects) || []
if (Array.isArray(checks)) {
const kmToChecks = { [testConfigName]: checks.filter((check) => !check.km) }
for (const check of checks) {
if (check.km) {
kmToChecks[check.km] = config.km(check.km).testConfig.checks.objects
}
}
return kmToChecks
} else {
return checks
}
}
const getSuggestion = (diff) => {
return diff.map((element) => {
return element.marker
})
}
const getSuggestionMessage = (suggestion) => {
return `Try adding this to the associations: { context: ${JSON.stringify(getSuggestion(suggestion))}, choose: <indexOfMainElement> },\n If that does not work look at the logs and check when the operators become wrong during an interation. Deduce the change based on the previous iteration and what operator was applied.`
}
const getConfig_getContextCheck = (testConfig) => {
return (testConfig.checks && testConfig.checks.context) || []
}
const pickContext = (contextChecks) => (context) => {
return project2(context, contextChecks)
}
const pickObjects = (config, testConfig, getObjects) => {
/*
let testConfigName = config.name
if (testConfig.testModuleName) {
objects = getObjects(config.config.objects)(config.getConfigs()[testConfig.testModuleName].uuid)
testConfigName = testConfig.testModuleName
}
*/
const checks = getConfig_getObjectsCheck(config, testConfig)
const projection = {}
for (const km in checks) {
const objects = getObjects(km)
if (!objects) {
throw new Error(`In the checks for ${config.name} the KM ${km} does not exist`)
}
if (checks[km] && checks[km].find((check) => check.match && check.apply)) {
projection[km] = project2(objects, checks[km])
} else {
projection[km] = project(objects, checks[km])
}
}
return projection
}
const sameJSON = (json1, json2) => {
const sjson1 = sortJson(json1, { depth: 25 })
const sjson2 = sortJson(json2, { depth: 25 })
return JSON.stringify(sjson1) === JSON.stringify(sjson2)
}
const vimdiff = (actualJSON, expectedJSON, title) => {
const path = '.'
const actual = sortJson(actualJSON, { depth: 25 })
runtime.fs.writeFileSync(`${path}/actual.json`, JSON.stringify({ title, actual }, 0, 2))
const expected = sortJson(expectedJSON, { depth: 25 })
runtime.fs.writeFileSync(`${path}/expected.json`, JSON.stringify({ title, expected }, 0, 2))
// console.log(`vimdiff ${path}/actual.json ${path}/expected.json`)
{
const editor = runtime.process.env.EDITOR || 'vimdiff'
// const child = runtime.child_process.spawn(editor, [`${path}/expected.json`, `${path}/actual.json`], { stdio: 'inherit' })
console.log(`${editor} ${path}/expected.json ${path}/actual.json`)
runtime.child_process.execSync(`${editor} ${path}/expected.json ${path}/actual.json`, { stdio: 'inherit' })
}
}
const matching = (actual, expected) => {
if (!deepEqual(stringify(sortJson(actual, { depth: 25 })), stringify(sortJson(expected, { depth: 25 })))) {
return false
}
return true
}
const analyzeMetaData = (right, wrong) => {
const rOpChoices = right.opChoices
const wOpChoices = wrong.opChoices
if (!rOpChoices || !wOpChoices) {
return []
}
const n = rOpChoices.length > wOpChoices.length ? wOpChoices.length : rOpChoices.length
for (let i = 0; i < n; ++i) {
const rOp = rOpChoices[i].op
const wOp = wOpChoices[i].op
if (rOp[0] !== wOp[0] || rOp[1] !== wOp[1]) {
return [rOpChoices[i].ops.filter((op) => !(op[0] === rOp[0] && op[1] === rOp[1])).concat([rOp])]
}
}
return []
}
const processContexts = async (contexts, params) => {
const contextsPrime = []
const generated = []
const logs = []
for (const context of contexts) {
const result = await processContext(context, Object.assign({}, params, { logs }))
contextsPrime.push(result.context)
generated.push(result.generated)
}
return { contexts: contextsPrime, generated, logs }
}
const convertToStable = (objects) => {
if (true) {
return objects
} else {
const stableObjects = Object.assign({}, objects)
stableObjects.namespaced = {}
let counter = 0
Object.keys(((objects || {}).namespaced || {})).forEach((uuid) => {
stableObjects.namespaced[`${counter}`] = objects.namespaced[uuid]
counter += 1
})
return stableObjects
}
}
const writeTestFile = (fn, tests) => {
const stabilize = (tests) => {
for (const test of tests) {
for (const opChoice of test.metadata.opChoices) {
opChoice.ops.sort()
}
test.metadata.opChoices.sort()
}
}
stabilize(tests)
runtime.fs.writeFileSync(fn, stringify(tests, { space: 2 }), { encoding: 'utf8', flag: 'w+' })
}
const writeTest = (fn, query, objects, generated, paraphrases, responses, contexts, associations, metadata, config, saveDeveloper, paraphrasesParenthesized, generatedParenthesized, summaries) => {
let tests = []
if (runtime.fs.existsSync(fn)) {
tests = JSON.parse(runtime.fs.readFileSync(fn))
}
for (const association of associations) {
association.sort()
}
associations.sort()
// tests[query] = sortJson({ paraphrases, responses, contexts, objects: convertToStable(objects), associations, metadata, config, developerTest: saveDeveloper }, { depth: 25 })
const results = sortJson({
query,
summaries,
paraphrases,
responses,
contexts,
objects: convertToStable(objects),
associations,
metadata,
config,
developerTest: saveDeveloper,
paraphrasesParenthesized,
generatedParenthesized
}, { depth: 25 })
let wasSet = false
tests.forEach((test, index) => {
if (test.query === query) {
tests[index] = results
wasSet = true
}
})
if (!wasSet) {
tests.push(results)
}
// fs.writeFileSync(fn, stringify(tests, { space: 2 }), { encoding: 'utf8', flag: 'w+' })
writeTestFile(fn, tests)
}
const combineRange = (r1, r2) => {
let start = r2.start
if (r1.start < r2.start) {
start = r1.start
}
let end = r2.end
if (r1.end > r2.end) {
end = r2.end
}
return { start, end }
}
const overlaps = (r1, context) => {
if (!context.range) {
return true
}
const r2 = context.range
if (r1.start <= r2.end && r1.start >= r2.start) {
return true
}
if (r1.end <= r2.end && r1.end >= r2.start) {
return true
}
return false
}
const doWithRetries = async (n, url, queryParams, data) => {
if (!queryParams) {
queryParams = ''
}
for (let i = 0; i < n; ++i) {
const result = await fetch(`${url}/process${queryParams}`, {
method: 'POST',
body: JSON.stringify(data),
timeout: 1000 * 60 * 5, // it does not respect this timeout so that's why I have the retries
headers: {
mode: 'no-cors',
'Content-Type': 'application/json'
}
})
if (result.ok) {
return JSON.parse(JSON.stringify(await result.json()))
}
if (result.status === 504) {
if (n === 0) {
throw new Error(`Error ${result.status} - ${result.statusText}`)
} else {
continue
}
}
if (result.status >= 500 && result.status < 600) {
throw new Error(`Error ${result.status} - ${result.statusText}.`)
} if (result.status >= 404) {
throw new Error(`Error ${result.status} - ${result.statusText} - Trying it connect to ${url}`)
} else {
throw new Error(`Error ${result.status} - ${result.statusText}`)
}
}
}
const throwErrorHandler = (error) => {
throw error
}
const _process = async (config, query, { initializer, commandLineArgs, credentials, writeTests, isProcess, isModule, isTest, saveDeveloper, rebuildingTemplate, testConfig, testsFN, errorHandler = throwErrorHandler } = {}) => {
if (credentials) {
config.server(credentials.server, credentials.key)
}
const url = config._server
const queryParams = config._queryParams || ''
const retries = 2
writeTests = !!writeTests
// ensure same start state
try {
if (writeTests) {
await config.rebuild()
const objects = getObjects(config.config.objects)(config.uuid)
}
} catch (error) {
throw error
}
let { data, /* generators, semantics, */ hierarchy } = setupProcessB({ config, initializer, allowDelta: true, rebuildingTemplate })
if (commandLineArgs && commandLineArgs.checkForLoop) {
data.checkForLoop = commandLineArgs.checkForLoop
}
if (rebuildingTemplate) {
data.errors_ignore_contextual_priorities_non_existant_ops = true
}
let queries = query.split('\\n')
const summaries = [] // for error
try {
const response = {
hierarchy: [],
load_cache_time: 0.0,
logs: [],
metadata: {
opChoices: []
},
times: 0.0,
clientSideTimes: 0.0,
trace: '',
contexts: [],
generated: [],
paraphrases: [],
paraphrasesParenthesized: [],
generatedParenthesized: [],
responses: [],
associations: [],
summaries: []
}
let startCounter = 0
while (true) {
if (queries.length === 0) {
break
}
config.updateData(data)
data.utterance = queries[0]
data.start_counter = startCounter
let json = await doWithRetries(retries, url, queryParams, data)
let resetData = false
if (json.code === 'NOT_IN_CACHE') {
resetData = true
const setupB = setupProcessB({ config, allowDelta: false })
data = setupB.data
hierarchy = setupB.hierarchy
data.utterance = queries[0]
json = await doWithRetries(retries, url, queryParams, data)
}
if (json.cacheKey) {
config.cacheKey = json.cacheKey
if (resetData) {
if (queries.length > 1) {
const setupB = setupProcessB({ config, allowDelta: true })
data = setupB.data
hierarchy = setupB.hierarchy
}
}
}
json.contexts = json.results
startCounter = json.end_counter + 1
delete json.results
if (json.status !== 200) {
throw json
} else {
let clientSideTime
let start
if (isTest) {
start = runtime.performance.performance.now()
}
const summary = { summaries: json.summaries, length: json.contexts.length }
summaries.push(summary)
const { contextsPrime, generatedPrime, paraphrasesPrime, paraphrasesParenthesizedPrime, generatedParenthesizedPrime, responsesPrime } =
await processContextsB({ isTest, isProcess, isModule, rebuildingTemplate, config, hierarchy, json, commandLineArgs /*, generators, semantics */ })
if (isTest) {
const end = runtime.performance.performance.now()
clientSideTime = end - start
}
response.associations = json.associations
response.learned_contextual_priorities = json.learned_contextual_priorities
response.hierarchy = json.hierarchy
response.load_cache_time += json.load_cache_time
appendNoDups(response.logs, json.logs)
response.memory_free_percent = json.memory_free_percent
// appendNoDups(response.metadata.associations, json.metadata.associations)
// appendNoDups(response.metadata.priorities, json.metadata.priorities)
appendNoDups(response.metadata.opChoices, json.metadata.opChoices)
response.times += json.times
response.clientSideTimes += clientSideTime
response.trace = response.trace.concat(json.trace)
response.version = json.version
response.explain_priorities = json.explain_priorities
response.contextual_priorities_ambiguities = json.contextual_priorities_ambiguities
response.rtf_associations = json.rtf_associations
response.contexts = response.contexts.concat(contextsPrime)
response.generated = response.generated.concat(generatedPrime)
response.paraphrases = response.paraphrases.concat(paraphrasesPrime)
response.paraphrasesParenthesized = response.paraphrasesParenthesized.concat(paraphrasesParenthesizedPrime)
response.generatedParenthesized = response.generatedParenthesized.concat(generatedParenthesizedPrime)
response.responses = response.responses.concat(responsesPrime)
response.summaries.push(summary)
queries = queries.slice(1)
}
}
if (writeTests) {
const actualConfig = getConfigForTest(config, testConfig)
const saveObjects = { ...config.config.objects }
saveObjects.nameToUUID = {}
for (const km of config.configs) {
saveObjects.nameToUUID[km.name] = km.uuid
}
writeTest(testsFN, query, saveObjects, response.generated, response.paraphrases, response.responses, response.contexts, response.associations, response.metadata, actualConfig, saveDeveloper, response.paraphrasesParenthesized, response.generatedParenthesized, response.summaries)
}
return response
} catch (error) {
error.summaries = summaries
error.query = query
errorHandler(error)
}
}
const getConfigForTest = (config, testConfig) => {
const includes = testConfig.include || testConfig.includes
if (!includes) {
return {}
}
const configForTest = {}
for (const key of Object.keys(includes)) {
if (key === 'words') {
const words = config.config.words
configForTest.words = {
literals: {},
patterns: [],
hierarchy: []
}
const literals = config.config.words.literals
let includesWord = (word) => true
if (Array.isArray(includes.words)) {
includesWord = (word) => includes.words.includes(word)
}
for (const key in literals) {
if (!includesWord(key)) {
continue
}
const defs = []
for (const def of literals[key]) {
// TODO handle thie uuids the right way
defs.push(Object.assign({}, def, { uuid: undefined }))
}
configForTest.words.literals[key] = defs
}
const patterns = config.config.words.patterns || []
configForTest.words.patterns = patterns.map((pattern) => Object.assign({}, pattern, { uuid: undefined }))
const hierarchy = config.config.words.hierarchy || []
configForTest.words.hierarchy = hierarchy.map((hierarchy) => Object.assign({}, hierarchy, { uuid: undefined }))
} else if (key === 'operators') {
let include = (operator) => true
if (Array.isArray(includes.operators)) {
include = (operator) => includes.operators.includes(operator.pattern)
}
configForTest.operators = config.config.operators.filter(include).map((operator) => Object.assign({}, operator, { uuid: undefined }))
} else if (key === 'bridges') {
let include = (operator) => true
if (Array.isArray(includes.bridges)) {
include = (bridge) => includes.bridges.includes(bridge.id)
}
configForTest.bridges = config.config.bridges.filter(include).map((bridge) => Object.assign({}, bridge, { uuid: undefined }))
} else {
configForTest[key] = config.config[key]
}
}
return configForTest
}
const runTest = async (config, expected, { args, verbose, testConfig, debug, timings = { server: 0, client: 0 } }) => {
const test = expected.query
if (args.query && args.query != test) {
// no run this
return
}
// initialize in between test so state is not preserved since the test was adding without state
await config.rebuild()
const errorHandler = (error) => {
if (error.metadata) {
const priorities = analyzeMetaData(expected.metadata, error.metadata)
if (priorities.length > 0) {
const log = `Hint, if the results are flakey try adding the specified priorities ${JSON.stringify(priorities)}`
error.logs.push(log)
}
}
defaultErrorHandler(error)
}
const objects = getObjects(config.config.objects)(config.uuid)
try {
if (testConfig.initializer) {
const args = {}
setupArgs(args, config)
await testConfig.initializer(args)
}
const result = await _process(config, test, { errorHandler, isTest: true, isProcess: true, isModule: false })
result.query = test
if (debug) {
defaultInnerProcess(config, errorHandler, result)
}
if (verbose) {
const widths = [100, 60]
const lines = new Lines(widths)
lines.setElement(0, 0, test)
lines.setElement(0, 1, `time on server: ${result.times.toFixed(2)} client: ${(result.clientSideTimes / 1000).toFixed(2)}`)
timings.server += result.times
timings.client += result.clientSideTimes / 1000
lines.log()
}
const expectedObjects = sortJson(convertToStable(expected.objects), { depth: 25 })
delete expectedObjects.nameToUUID
const actualObjects = sortJson(convertToStable(config.config.objects), { depth: 25 })
const failedParaphrases = !matching(result.paraphrases, expected.paraphrases)
let failedParaphrasesParenthesized = !matching(result.paraphrasesParenthesized, expected.paraphrasesParenthesized)
let failed_generatedParenthesized = !matching(result.generatedParenthesized, expected.generatedParenthesized)
// TODO fix the naming conventions: camelcase + use actual instead of result
const failedResponses = !matching(result.responses, expected.responses)
const failedContexts = !matching(result.contexts, expected.contexts)
const failedObjects = !matching(actualObjects, expectedObjects)
if (args.testNoParenthesized) {
failedParaphrasesParenthesized = false
failed_generatedParenthesized = false
}
const contextChecks = config.getContextChecks()
const pickedResultContexts = result.contexts.map(pickContext(contextChecks))
const pickedExpectedContexts = expected.contexts.map(pickContext(contextChecks))
const failedCheckedContexts = !matching(pickedResultContexts, pickedExpectedContexts)
const expectedGetObjects = (name) => {
if (!name) {
name = config.name
}
return expected.objects.namespaced[expected.objects.nameToUUID[name]] || {}
}
const expectedChecked = sortJson(pickObjects(config, testConfig, expectedGetObjects), { depth: 25 })
const actualGetObjects = (name) => {
if (!name) {
name = config.name
}
const km = config.configs.find((km) => km.name === name)
return config.config.objects.namespaced[km.uuid] || {}
}
const actualChecked = sortJson(pickObjects(config, testConfig, actualGetObjects), { depth: 25 })
const failedChecked = !matching(actualObjects, expectedObjects)
const failedChecks = !matching(actualObjects, expectedObjects)
const failedChecked_objects = !matching(actualChecked, expectedChecked)
const actualConfig = sortJson(convertToStable(getConfigForTest(config, testConfig)), { depth: 25 })
const expectedConfig = sortJson(convertToStable(expected.config), { depth: 25 })
const failedConfig = !matching(actualConfig, expectedConfig)
const failed = failedChecked_objects || failedParaphrases || failedParaphrasesParenthesized || failed_generatedParenthesized || failedResponses || failedContexts || failedObjects || failedConfig || failedChecked || failedCheckedContexts
if (expected.metadata && result.metadata && failed) {
const priorities = analyzeMetaData(expected.metadata, result.metadata)
if (priorities.length > 0) {
const log = `Hint, if the results are flakey try adding the specified priorities ${JSON.stringify(priorities)}`
result.logs.push(log)
}
}
if (failed) {
return {
utterance: test,
expected: {
responses: expected.responses,
paraphrases: expected.paraphrases,
paraphrasesParenthesized: expected.paraphrasesParenthesized,
generatedParenthesized: expected.generatedParenthesized,
results: expected.contexts,
checked: expectedChecked,
checkedContexts: pickedExpectedContexts,
objects: expectedObjects,
config: expected.config,
summaries: expected.summaries
},
actual: {
responses: result.responses,
paraphrases: result.paraphrases,
paraphrasesParenthesized: result.paraphrasesParenthesized,
generatedParenthesized: result.generatedParenthesized,
results: result.contexts,
checked: actualChecked,
checkedContexts: pickedResultContexts,
objects: actualObjects,
config: actualConfig,
summaries: result.summaries
}
}
}
} catch (error) {
if (verbose) {
console.log(test)
}
if (error.metadata) {
const priorities = analyzeMetaData(expected.metadata, error.metadata)
if (priorities.length > 0) {
const log = `Hint, if the results are flakey try adding the specified priorities ${JSON.stringify(priorities)}`
error.logs.push(log)
}
}
throw error
}
}
const runTestsHelper = async (config, tests, failed, juicyBits) => {
const { stopAtFirstError } = juicyBits
while (true) {
if (tests.length === 0) {
return Promise.resolve(failed)
}
const test = tests.shift()
const result = await runTest(config, test, juicyBits)
if (result != null) {
failed.push(result)
if (stopAtFirstError) {
return failed
}
}
}
}
const runTests = async (config, testFile, juicyBits) => {
const tests = JSON.parse(runtime.fs.readFileSync(testFile))
if (juicyBits.verbose) {
console.log('\n', testFile, '-----------------------------------------------', '\n')
}
const v = await runTestsHelper(config, [...tests], [], juicyBits)
return v
}
const saveTest = async (testFile, config, test, expected, testConfig, saveDeveloper) => {
await config.rebuild()
const objects = getObjects(config.config.objects)(config.uuid)
console.log(test)
const result = await _process(config, test, { isTest: true, isProcess: true, isModule: true })
// const actualObjects = config.config.objects
const actualConfig = getConfigForTest(config, testConfig)
const args = {
}
const saveObjects = { ...config.config.objects }
saveObjects.nameToUUID = {}
for (const km of config.configs) {
saveObjects.nameToUUID[km.name] = km.uuid
}
writeTest(testFile, test, saveObjects, result.generated, result.paraphrases, result.responses, result.contexts, result.associations, result.metadata, actualConfig, saveDeveloper, result.paraphrasesParenthesized, result.generatedParenthesized, result.summaries)
}
const saveTestsHelper = async (testFile, config, tests, todo, testConfig, saveDeveloper) => {
if (todo.length === 0) {
return
}
const test = todo.pop()
await config.rebuild()
const result = await saveTest(testFile, config, test, tests[test], testConfig, saveDeveloper)
// initialize in between test so state is not preserved since the test was adding without state
// config.initialize({force: true})
await config.rebuild()
return saveTestsHelper(testFile, config, tests, todo, testConfig, saveDeveloper)
}
const saveTests = (config, testFile, testConfig) => {
const tests = JSON.parse(runtime.fs.readFileSync(testFile))
console.log(testFile)
return saveTestsHelper(testFile, config, tests, tests.map((test) => test.query), testConfig)
}
/*
const showExamples = (testFile) => {
const tests = JSON.parse(fs.readFileSync(testFile))
Object.keys(tests).forEach((test) => console.log(test))
}
*/
const showInfo = (description, section, config) => {
console.log(JSON.stringify(config.getInfo(), null, 2))
}
/*
const submitBugToAPI = async (subscription_id, subscription_password, config) => {
console.log('********* Submitting bug *********')
const body = { description: config.config.description, config: config.config }
// fetch('http://localhost:5000/bug', {
fetch('https://thinktelligence.com/api/bug', {
method: 'POST',
body: JSON.stringify(body),
headers: {
mode: 'no-cors', // Type of mode of the request
'Content-Type': 'application/json', // request content type
Authorization: 'Basic ' + base64.encode(subscription_id + ':' + subscription_password)
}
}).then((result) => result.json())
.then((json) => {
if (json.status === 404) {
console.log(`Error submitting the bug: ${json.status} ${json.statusText}`)
} else {
console.log(`New bug id id ${json.id}`)
}
})
}
const submitBug = async (subscription_id, subscription_password, config, utterance, retries = 2) => {
// TODO remove these from the config
const properties = ['expected_contexts', 'expected_generated']
properties.forEach((property) => {
if (!config.get('expected_contexts')) {
throw 'Missing property expected_contexts'
}
})
return _process(config, utterance)
.then((responses) => {
let hasError = false
if (!matching(responses.contexts, config.config.expected_contexts)) {
console.log('JSON does not match')
console.log('actual', JSON.stringify(responses.contexts))
console.log('expected', JSON.stringify(config.config.expected_contexts))
hasError = true
}
if (!matching(responses.generated, config.config.expected_generated)) {
console.log('Generated does not match')
console.log('actual', JSON.stringify(responses.generated))
console.log('expected', JSON.stringify(config.config.expected_generated))
hasError = true
}
if (hasError) {
submitBugToAPI(subscription_id, subscription_password, config)
} else {
throw '\n\n *** The expected contexts matched the generated contexts so the bug was not submitted since its working. ***\n\n:'
}
})
.catch((error) => {
console.log('Error', error)
throw error
})
}
*/
const defaultErrorHandler = async (error) => {
if (error.logs) {
console.log('\nlogs: ')
for (const log of error.logs) {
console.log('\n ', log)
}
}
if (error.trace) {
console.log('trace: ', error.trace)
}
if (error.config) {
console.log('objects', runtime.util.inspect(error.config.get('objects'), { depth: Infinity, sorted: true }))
}
if (error.stack) {
console.log('error: ')
console.log(error.stack)
}
let doErrorExit = false
if (error.errors) {
console.log('error: ')
for (const e of error.errors) {
if (e.logs) {
console.log('\nlogs: ')
for (const log of e.logs) {
console.log('\n ', log)
}
}
if (e.error) {
console.log('\n ', e.error)
} else {
console.log('\n ', e)
}
doErrorExit = true
}
}
if (error.error) {
console.log('error: ')
for (const e of error.error) {
console.log('\n ', e)
doErrorExit = true
}
}
if (error.query) {
console.log('query: ', error.query)
doErrorExit = true
}
if (typeof runtime.process.exit === 'function' && doErrorExit) {
runtime.process.exit(-1)
}
throw error
}
const printContextualPrioritiesAmbiguities = (cpa) => {
console.log('Contextual Priorities Ambiguities')
for (const counter in cpa) {
console.log(` Counter ${counter}`)
for (const choices of cpa[counter]) {
console.log(' [')
for (const choice of choices) {
console.log(' [')
for (const element of choice) {
console.log(` ${JSON.stringify(element)},`)
}
console.log(' ],')
}
console.log(' ],')
}
}
}
const defaultInnerProcess = (config, errorHandler, responses) => {
if (responses.errors) {
console.log('Errors')
responses.errors.forEach((error) => console.log(` ${error}`))
}
console.log("KM's loaded (ordered)", config.configs.map((c) => c.name))
// console.log('This is the global objects from running semantics:\n', config.objects)
if (!_.isEmpty(responses.learned_contextual_priorities)) {
console.log('\nThe learned contextual priorties are :\n')
for (const lcp of responses.learned_contextual_priorities) {
console.log(` ${JSON.stringify(lcp)},\n`)
}
console.log('\n')
}
if (responses.logs) {
console.log('Logs')
responses.logs.forEach((log) => console.log(` ${log}`))
}
if (responses.contextual_priorities_ambiguities) {
printContextualPrioritiesAmbiguities(responses.contextual_priorities_ambiguities)
}
console.log(responses.trace)
if (true) {
if (global.beforeObjects) {
console.log('objects', runtime.jsonDiff.diffString(global.beforeObjects, config.get('objects')))
} else {
console.log('objects', runtime.util.inspect(config.get('objects'), { depth: Infinity, sorted: true }))
}
}
const pickEm = () => {
const picked = {}
const namespaced = config.get('objects').namespaced
for (const prop of getConfig_getObjectCheck(config.testConfig)) {
if (prop.km) {
/*
const objects = namespaced[prop.km]]
picked[prop.km] = {}
for (let p of c.testConfig.check) {
if (p.km) {
continue
}
picked[p] = objects[p]
}
*/
console.log('TODO implement this if needed')
} else {
const objects = namespaced[config.uuid]
picked[prop] = objects[prop]
}
}
return picked
}
if (responses.explain_priorities) {
console.log('Explain Priorities')
for (const [inputs, output, reason] of responses.explain_priorities) {
console.log(` inputs: ${JSON.stringify(inputs)} output: ${JSON.stringify(output)} reason: ${reason}`)
}
}
// const objects = config.get('objects').namespaced[config.uuid]
const actualGetObjects = (name) => {
if (!name) {
name = config.name
}
const km = config.configs.find((km) => km.name === name)
return config.config.objects.namespaced[km.uuid] || {}
}
const picked = sortJson(pickObjects(config, config.testConfig, actualGetObjects), { depth: 25 })
if (!_.isEmpty(picked)) {
console.log('--- Object showing only the checked values ---')
console.log(JSON.stringify(picked, null, 2))
}
const contextChecks = config.getContextChecks()
const pickedResultContexts = responses.contexts.map(pickContext(contextChecks))
if (pickedResultContexts.some((context) => Object.keys(context).length > 0)) {
console.log('--- Contexts showing only the checked values ---')
console.log(JSON.stringify(pickedResultContexts, null, 2))
}
console.log('--- The contexts are ----------')
console.log(JSON.stringify(sortJson(responses.contexts, { depth: 25 }), null, 2))
console.log('')
/*
console.log('--- The generated strings are ----------')
for (const response of responses.generated) {
console.log(response)
}
*/
console.log('')
const screen_width = process.stdout.columns
// || 0 for when running without a console
const widths = [70, 10, Math.max(80, screen_width - 71 || 0)]
const lines = new Lines(widths)
lines.setElement(0, 0, '--- The paraphrases are ----------')
lines.setElement(0, 2, '--- The response strings are ----------')
lines.log()
for (let i = 0; i < responses.paraphrases.length; ++i) {
// dont show events
if (responses.contexts[i].hidden) {
continue
}
lines.setElement(0, 0, responses.paraphrases[i])
if ((responses.responses[i] || []).length > 0) {
lines.setElement(0, 2, responses.responses[i])
} else {
lines.setElement(0, 2, '')
}
lines.log()
}
}
const defaultProcess = ({ config, errorHandler }) => async (promise) => {
try {
const responses = await promise
defaultInnerProcess(config, errorHandler, responses)
} catch (error) {
error.config = config
defaultErrorHandler(error)
}
}
// builtTemplate saveInstance
const rebuildTemplate = async ({ config, instance, target, previousResultss, rebuild, startOfChanges, template, errorHandler = defaultErrorHandler }) => {
const accumulators = {
resultss: [],
fragments: [],
semantics: [],
associations: [],
summaries: [],
learned_contextual_priorities: []
}
config.fragmentsBeingBuilt = []
const toProperties = (queryStringOrProperties) => {
if (typeof queryStringOrProperties === 'string') {
return { query: queryStringOrProperties }
} else {
return queryStringOrProperties
}
}
const fragmentToTodo = (query, index) => {
const pr = instance.fragments[index]
return Object.assign({}, toProperties(query), { property: 'fragments', previousResults: pr, skipSemantics: false })
}
const looper = async (configs) => {
if (configs.length === 0) {
finish()
return
}
const { property, hierarchy, query: queryOrExtraConfig, previousResults, initializer, skipSemantics } = configs.shift()
// queries are strings or { query: "blah", development: true/false }
if (typeof queryOrExtraConfig === 'string' || queryOrExtraConfig.query || queryOrExtraConfig.isFragment) {
let query = queryOrExtraConfig
const isFragment = queryOrExtraConfig.isFragment
if (typeof queryOrExtraConfig === 'string') {
query = { query }
}
config.config.skipSemantics = skipSemantics && !isFragment
const transitoryMode = global.transitoryMode
if (isFragment || property === 'fragments') {
global.transitoryMode = true
}
if (hierarchy) {
for (const edge of hierarchy) {
if (Array.isArray(edge)) {
config.addHierarchy(edge[0], edge[1])
} else {
config.addHierarchy(edge)
}
}
}
try {
let results
let prMessage = ''
const suggestFix = (newSummaries) => {
if (!previousResults) {
return
}
const suggestion = suggestAssociationsFixFromSummaries(previousResults.summaries, newSummaries)
if (suggestion.length > 0) {
console.log(getSuggestionMessage(suggestion))
}
}
if (!rebuild && previousResults && previousResults.query === query.query) {
results = previousResults
prMessage = ' Using previous results. use -rtf for a hard rebuild of everything on the server side.'
await loadInstance(config, { resultss: [results] })
} else {
try {
results = await _process(config, query.query, { initializer, rebuildingTemplate: true })
} catch (e) {
if (e.summaries && e.summaries.length > 0) {
suggestFix(e.summaries)
}
throw e
}
}
if (config.config.debug) {
// TODO pass in the error handler like the other ones
defaultInnerProcess(config, defaultErrorHandler, results)
}
if (results.contexts.length > 1) {
console.log(`query "${query.query}". There is ${results.contexts.length} contexts in the results. Make sure its producing the results that you expect.`)
suggestFix(results.summaries)
throw new Error(`query "${query.query}". There is ${results.contexts.length} contexts in the results. Make sure its producing the results that you expect.`)
} else if (results.paraphrases[0].toLowerCase() !== query.query.toLowerCase()) {
console.log(`query "${query.query}". The paraphrase is different from the query "${results.paraphrases[0]}".${prMessage}`)
// suggestFix(results.summaries)
} else {
console.log(`query ${isFragment ? 'fragment' : ''}"${query.query}".${prMessage}`)
}
global.transitoryMode = transitoryMode
config.config.skipSemantics = null
results.query = query.query
results.isFragment = isFragment
results.skipSemantics = skipSemantics
results.development = query.development
results.key = { query: query.query, hierarchy }
accumulators[property].push(results)
if (isFragment) {
config.fragmentsBeingBuilt.push({ query: query.query, contexts: results.contexts })
}
accumulators.summaries = accumulators.summaries.concat(results.summaries)
accumulators.associations = accumulators.associations.concat(results.associations)
accumulators.learned_contextual_priorities = accumulators.learned_contextual_priorities.concat(results.learned_contextual_priorities)
await looper(configs)
} catch (e) {
const error = { errors: [e], query: query.query }
config.config.skipSemantics = null
errorHandler(error)
}
} else if (typeof queryOrExtraConfig === 'function') {
console.log('calling initialize function')
const initFunction = queryOrExtraConfig
const objects = config.get('objects')
const args = { objects, getObjects: getObjects(objects) }
setupArgs(args, config, config.logs, config.hierarchy)
// this watch stuff was for allowing fragment to be made in the template load. that is not a good idea
// because needs rebuild would need to run the rebuild to know if a rebuild was needed. the fragment creates
// need to go in the intializer
// const new_fragments = []
// config.watchNewFragments(new_fragments)
await initFunction(args)
/*
if (new_fragments.length > 0) {
configs = configs.concat(new_fragments.map(fragmentToTodo))
}
*/
accumulators[property].push({ apply: queryOrExtraConfig })
await looper(configs)
} else {
// extra config is def from a time like operators or bridges or words etc
// it will just get added to the config
const extraConfig = queryOrExtraConfig
console.log('config', extraConfig)
if (extraConfig.stop) {
await looper([])
} else {
try {
config.addInternal(_.cloneDeep(extraConfig), { handleCalculatedProps: true })
} catch (e) {
const where = extraConfig.where ? ` ${extraConfig.where}` : ''
throw new Error(`Error processing extra config${where}: ${e.stack}}`)
}
accumulators[property].push({ extraConfig: true, ...extraConfig })
await looper(configs)
}
}
}
const finish = () => {
config.fragmentsBeingBuilt = []
const instanceName = `${target}.instance.json`
console.log(`Writing instance file ${instanceName}`)
const stabilizeAssociations = (associations) => {
for (const association of associations) {
association.sort()
}
associations.sort()
}
const stabilizeOutput = (template) => {
stabilizeAssociations(template.associations)
const stabilize = (results) => {
for (let i = 0; i < results.length; ++i) {
const result = results[i]
if (result.apply) {
result.apply = result.apply.toString()
} else if (result.extraConfig) {
} else {
delete result.load_cache_time
delete result.times
delete result.clientSideTimes
delete result.memory_free_percent
delete result.logs
delete result.version
result.hierarchy.sort()
stabilizeAssociations(result.associations)
result.learned_contextual_priorities = safeNoDups(result.learned_contextual_priorities)
}
}
}
stabilize(template.resultss)
stabilize(template.fragments)
return template
}
stabilizeOutput(accumulators)
runtime.fs.writeFileSync(instanceName, JSON.stringify(Object.assign({ configs: template.configs.map(updateQueries) }, accumulators), 0, 2))
// km tests file
const testsName = `./${target}.test.json`
if (!runtime.fs.existsSync(testsName)) {
console.log(`Writing km file tests file "${testsName}" since it does not exist`)
runtime.fs.writeFileSync(testsName, JSON.stringify({}, 0, 2))
}
}
let todo = []
todo = todo.concat((template.initializers || []).map((query) => { return { initializer: true, property: 'resultss', query, skipSemantics: false || query.skipSemantics } }))
/*
todo = todo.concat((template.configs || []).map((query, index) => {
let pr
if (index < startOfChanges || (!startOfChanges && index < previousResultss.length)) {
pr = previousResultss[index]
}
return { property: 'resultss', query, previousResults: pr, skipSemantics: false || query.skipSemantics }
}))
*/
if (template.configs) {
for (let configsIndex = 0; configsIndex < template.configs.length; ++configsIndex) {
const query = template.configs[configsIndex]
// account for things for example associations being added to the config while debugginer
const pr = previousResultss && previousResultss.find((pr) => pr.query === query)
todo.push({ property: 'resultss', query, previousResults: pr, skipSemantics: false || query.skipSemantics })
}
}
todo = todo.concat((template.fragments || []).map(fragmentToTodo))
/*
todo = todo.concat((template.fragments || []).map((query, index) => {
const pr = instance.fragments[index]
return Object.assign({}, toProperties(query), { property: 'fragments', previousResults: pr, skipSemantics: false })
}))
*/
todo = todo.concat((template.semantics || []).map((definition) => {
return { property: 'semantics', query: `${definition.from}\n${definition.to}`, skipSemantics: true }
}))
await looper([...todo])
}
const checkTemplate = (template) => {
if (!template) {
return
}
if (template.checks) {
throw new Error("The 'checks' property should be in the 'test' property not the 'template' property")
}
validProps(['fragments', 'configs'], template.template, 'template.template')
}
const checkTest = (testConfig) => {
if (!testConfig) {
return
}
if (!testConfig.name) {
throw new Error("The 'test' property is missing the 'name' property that contains the name of the '<km>.test.json' file")
}
if (!testConfig.contents) {
throw new Error("The 'test' property is missing the 'contents' property that contains contents of the '<km>.test.json' file")
}
if (testConfig.checks?.context && typeof testConfig.checks.context === 'function') {
throw new Error(`The 'test.check.context' property should not be a function for ${testConfig.name}. If you are using defaultContextCheck then do this defaultContextCheck().`)
}
}
const knowledgeModuleImpl = async ({
includes,
config: configStruct,
api, apiKMs,
initializer,
terminator,
multiApiInitializer,
sendObjectsToServer,
module: moduleFromJSFile,
description,
section,
newWay,
demo,
test,
template,
errorHandler = defaultErrorHandler,
process: processResults = defaultProcess,
stopAtFirstFailure = true,
...rest
} = {}) => {
const unknownArgs = Object.keys(rest)
if (unknownArgs.length > 0) {
throw new Error(`Unknown arguments to knowledgeModule: ${unknownArgs.join()}`)
}
const testConfig = test
if (!moduleFromJSFile) {
throw new Error("'module' is a required parameter. The value should be either 'module' or a lambda that will be called when the file is acting as a module.")
}
if (!configStruct) {
throw new Error("'config' or 'createConfig' is a required parameter. The value should the config that defines the knowledge module.")
}
const setupConfig = (config) => {
if (!config.name) {
throw new Error("config must have 'name' set to the knowledge module name.")
}
config.description = description
if (typeof testConfig === 'object') {
if (testConfig.contents) {
config.tests = testConfig.contents
test = testConfig.name
}
} else {
if (runtime.fs && runtime.fs.existsSync(test)) {
config.tests = JSON.parse(runtime.fs.readFileSync(test))
} else {
config.tests = []
}
}
config.setTestConfig(testConfig)
}
const createConfig = async () => {
const config = new Config(configStruct, moduleFromJSFile, _process, apiKMs)
if (sendObjectsToServer) {
config.setSendObjectsToServer()
}
setupConfig(config)
config.expect_template = !!template
config.setTerminator(terminator)
config.stop_auto_rebuild()
await config.add(...(includes || []))
if (api) {
config.setApi(api)
}
if (multiApiInitializer) {
await config.setMultiApi(multiApiInitializer)
}
if (initializer) {
config.initializer(initializer)
}
await config.restart_auto_rebuild()
return config
}
if (!description) {
throw new Error("'description' is a required parameter. The value should the description of the knowledge module.")
}
if (!testConfig) {
throw new Error("'test' is a required parameter. The value should the path to the file used to store the tests of the knowledge module and the contents of the file in the form { name: <filePath>, contexts: <json> }.")
}
checkTest(testConfig)
const isProcess = require.main === moduleFromJSFile
if (isProcess) {
let config
try {
const parser = new runtime.ArgumentParser({
description: 'Entodicton knowledge module'
})
const helpDebugWord = 'In order to get a debug break when a specific word is created set the DEBUG_WORD environment variable to the JSON of the association to break on. For example DEBUG_WORD=\'"the"\''
const helpDebugAssociation = 'In order to get a debug break when a specific association is created set the DEBUG_ASSOCIATION environment variable to the JSON of the association to break on. For example DEBUG_ASSOCIATION=\'[["the", 0], ["mammal", 1]]\''
const helpDebugHierarchy = 'In order to get a debug break when a specific hierarchy is created set the DEBUG_HIERARCHY environment variable to the JSON of the child-parent pair to break on. For example DEBUG_HIERARCHY=\'[["cat", 1], ["mammel", 1]]\''
const helpDebugPriority = 'In order to get a debug break when a specific set of priorities is created set set DEBUG_PRIORITY environment variable to the JSON of the priorities that you want to break on. For example DEBUG_PRIORITY=\'[["verb", 0], ["article", 0]]\''
const helpDebugContextualPriority = 'In order to get a debug break when a specific set of contextual priorities is created set set DEBUG_CONTEXTUAL_PRIORITY environment variable to the JSON of the priorities that you want to break on. For example DEBUG_CONTEXTUAL_PRIORITY=\'{ context: [["verb", 0], ["article", 0], select: 1}\''
const helpDebugBridge = 'In order to get a debug break when a specific bridge is created set the DEBUG_BRIDGE environment variable to id to break on. For example DEBUG_BRIDGE=\'car\''
const helpDebugOperator = 'In order to get a debug break when a specific hierarcy is created set the DEBUG_OPERATOR environment variable to debug any config loaded. For example DEBUG_OPERATOR=\'([operator] ([arg]))\''
parser.add_argument('-tmn', '--testModuleName', { help: 'When running tests instead of using the current modules tests use the specified modules tests' })
parser.add_argument('-t', '--test', { action: 'store_true', help: 'Run the tests. Create tests by running with the --query + --save flag' })
parser.add_argument('-tv', '--testVerbose', { action: 'store_true', help: 'Run the tests in verbose mode. Create tests by running with the --query or --loop with the --save flag' })
// parser.add_argument('-ttr', '--testToRun', { help: 'Only the specified test will be run' })
parser.add_argument('-tva', '--testAllVerbose', { action: 'store_true', help: 'Run the tests in verbose mode. All the tests will be run instead of stopping at first failure. Create tests by running with the --query or --loop with the --save flag. if -q is specified the tests will be run for just the specified query.' })
parser.add_argument('-tnp', '--testNoParenthesized', { action: 'store_true', help: 'Don\' check parenthesized differences for the tests' })
parser.add_argument('-n', '--count', { help: 'Number of times to run the tests. Default is one. Use this to check for flakey test. If possible the system will print out a message with the word "hint" suggesting how to fix the problem' })
// parser.add_argument('-b', '--build', { help: 'Specify the template file name of the form <kmName>. There should be a file called <baseKmName>.<kmName>.template.json with the queries to run. For example { queries: [...] }. The template file will be run and generate an instantiation called <baseKmName>.<kmName>.instance.json and a file called <kmName>.js that will load the template file (this is file generated only if not already existing) and a test file called <KmName>.tests.json. This ca