UNPKG

@neuraflash/einstein-lang

Version:

A command-line tool to easily create and train natural language models on Salesforce Einstein

143 lines (142 loc) 4.6 kB
#!/usr/bin/env node --harmony var co = require('co'); var prompt = require('co-prompt'); var program = require('commander'); var curl = require('curlrequest'); program .arguments('<action>') .option('-a, --auth <auth>', 'The auth token to use') .option('-f, --file <file>', 'The path to the file to upload') .option('-n, --name <datasetname>', 'The name of the dataset') .option('-d, --dataset <dataset>', 'The dataset id') .option('-m, --model <model>', 'The model id') .option('-p, --phrase <phrase>', 'The phrase to try prediction on') .action(function(action) { co(function *() { switch (action) { case 'upload': var authToken = program.auth || (yield prompt('Authentication Token: ')); var filePath = program.file || (yield prompt('File Path: ')); var datasetName = program.datasetname || (yield prompt('Dataset Name: ')); var options = { method: 'POST', headers: { 'Authorization': 'Bearer ' + authToken, 'Cache-Control': 'no-cache', 'Content-Type': 'multipart/form-data' }, form: [ 'data=@' + filePath, 'type=text-intent', 'name=' + datasetName, ], url: 'https://api.einstein.ai/v2/language/datasets/upload' } curl.request(options, function(err, res){ if (err) { console.error(err); process.exit(1); } console.dir(JSON.parse(res), {depth: null, colors: true}); process.exit(); }); break; case 'verify-upload': var authToken = program.auth || (yield prompt('Authentication Token: ')); var datasetId = program.dataset || (yield prompt('Dataset ID: ')); var options = { method: 'GET', headers: { 'Authorization': 'Bearer ' + authToken, 'Cache-Control': 'no-cache' }, url: 'https://api.einstein.ai/v2/language/datasets/' + datasetId } curl.request(options, function(err, res){ if (err) { console.error(err); process.exit(1); } console.dir(JSON.parse(res), {depth: null, colors: true}); process.exit(); }); break; case 'train': var authToken = program.auth || (yield prompt('Authentication Token: ')); var datasetId = program.dataset || (yield prompt('Dataset ID: ')); var modelName = program.datasetname || (yield prompt('Model Name: ')); var options = { method: 'POST', headers: { 'Authorization': 'Bearer ' + authToken, 'Cache-Control': 'no-cache', 'Content-Type': 'multipart/form-data' }, form: [ 'datasetId=' + datasetId, 'name=' + modelName, ], url: 'https://api.einstein.ai/v2/language/train' } curl.request(options, function(err, res){ if (err) { console.error(err); process.exit(1); } console.dir(JSON.parse(res), {depth: null, colors: true}); process.exit(); }); break; case 'train-status': var authToken = program.auth || (yield prompt('Authentication Token: ')); var modelId = program.model || (yield prompt('Model ID: ')); var options = { method: 'GET', headers: { 'Authorization': 'Bearer ' + authToken, 'Cache-Control': 'no-cache' }, url: 'https://api.einstein.ai/v2/language/train/' + modelId } curl.request(options, function(err, res){ if (err) { console.error(err); process.exit(1); } console.dir(JSON.parse(res), {depth: null, colors: true}); process.exit(); }); break; case 'predict': var authToken = program.auth || (yield prompt('Authentication Token: ')); var modelId = program.model || (yield prompt('Model ID: ')); var phrase = program.phrase || (yield prompt('Phrase for Prediction: ')); var options = { method: 'POST', headers: { 'Authorization': 'Bearer ' + authToken, 'Cache-Control': 'no-cache', 'Content-Type': 'multipart/form-data' }, form: [ 'modelId=' + modelId, 'document=' + phrase, ], url: 'https://api.einstein.ai/v2/language/intent' } curl.request(options, function(err, res){ if (err) { console.error(err); process.exit(1); } console.dir(JSON.parse(res), {depth: null, colors: true}); process.exit(); }); break; default: console.error(action + " is not a valid arugment"); process.exit(1); } }); }) .parse(process.argv);