@handit.ai/ai-wrapper
Version:
🤖 Intelligent AI execution system with built-in tracking, evaluation, and self-improvement capabilities. The complete AI intelligence platform for enterprise applications.
702 lines (694 loc) • 35.5 kB
JavaScript
"use strict";
function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
function _createForOfIteratorHelper(r, e) { var t = "undefined" != typeof Symbol && r[Symbol.iterator] || r["@@iterator"]; if (!t) { if (Array.isArray(r) || (t = _unsupportedIterableToArray(r)) || e && r && "number" == typeof r.length) { t && (r = t); var _n = 0, F = function F() {}; return { s: F, n: function n() { return _n >= r.length ? { done: !0 } : { done: !1, value: r[_n++] }; }, e: function e(r) { throw r; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var o, a = !0, u = !1; return { s: function s() { t = t.call(r); }, n: function n() { var r = t.next(); return a = r.done, r; }, e: function e(r) { u = !0, o = r; }, f: function f() { try { a || null == t["return"] || t["return"](); } finally { if (u) throw o; } } }; }
function _unsupportedIterableToArray(r, a) { if (r) { if ("string" == typeof r) return _arrayLikeToArray(r, a); var t = {}.toString.call(r).slice(8, -1); return "Object" === t && r.constructor && (t = r.constructor.name), "Map" === t || "Set" === t ? Array.from(r) : "Arguments" === t || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(t) ? _arrayLikeToArray(r, a) : void 0; } }
function _arrayLikeToArray(r, a) { (null == a || a > r.length) && (a = r.length); for (var e = 0, n = Array(a); e < a; e++) n[e] = r[e]; return n; }
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
function _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }
function _regenerator() { /*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/babel/babel/blob/main/packages/babel-helpers/LICENSE */ var e, t, r = "function" == typeof Symbol ? Symbol : {}, n = r.iterator || "@@iterator", o = r.toStringTag || "@@toStringTag"; function i(r, n, o, i) { var c = n && n.prototype instanceof Generator ? n : Generator, u = Object.create(c.prototype); return _regeneratorDefine2(u, "_invoke", function (r, n, o) { var i, c, u, f = 0, p = o || [], y = !1, G = { p: 0, n: 0, v: e, a: d, f: d.bind(e, 4), d: function d(t, r) { return i = t, c = 0, u = e, G.n = r, a; } }; function d(r, n) { for (c = r, u = n, t = 0; !y && f && !o && t < p.length; t++) { var o, i = p[t], d = G.p, l = i[2]; r > 3 ? (o = l === n) && (u = i[(c = i[4]) ? 5 : (c = 3, 3)], i[4] = i[5] = e) : i[0] <= d && ((o = r < 2 && d < i[1]) ? (c = 0, G.v = n, G.n = i[1]) : d < l && (o = r < 3 || i[0] > n || n > l) && (i[4] = r, i[5] = n, G.n = l, c = 0)); } if (o || r > 1) return a; throw y = !0, n; } return function (o, p, l) { if (f > 1) throw TypeError("Generator is already running"); for (y && 1 === p && d(p, l), c = p, u = l; (t = c < 2 ? e : u) || !y;) { i || (c ? c < 3 ? (c > 1 && (G.n = -1), d(c, u)) : G.n = u : G.v = u); try { if (f = 2, i) { if (c || (o = "next"), t = i[o]) { if (!(t = t.call(i, u))) throw TypeError("iterator result is not an object"); if (!t.done) return t; u = t.value, c < 2 && (c = 0); } else 1 === c && (t = i["return"]) && t.call(i), c < 2 && (u = TypeError("The iterator does not provide a '" + o + "' method"), c = 1); i = e; } else if ((t = (y = G.n < 0) ? u : r.call(n, G)) !== a) break; } catch (t) { i = e, c = 1, u = t; } finally { f = 1; } } return { value: t, done: y }; }; }(r, o, i), !0), u; } var a = {}; function Generator() {} function GeneratorFunction() {} function GeneratorFunctionPrototype() {} t = Object.getPrototypeOf; var c = [][n] ? t(t([][n]())) : (_regeneratorDefine2(t = {}, n, function () { return this; }), t), u = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(c); function f(e) { return Object.setPrototypeOf ? Object.setPrototypeOf(e, GeneratorFunctionPrototype) : (e.__proto__ = GeneratorFunctionPrototype, _regeneratorDefine2(e, o, "GeneratorFunction")), e.prototype = Object.create(u), e; } return GeneratorFunction.prototype = GeneratorFunctionPrototype, _regeneratorDefine2(u, "constructor", GeneratorFunctionPrototype), _regeneratorDefine2(GeneratorFunctionPrototype, "constructor", GeneratorFunction), GeneratorFunction.displayName = "GeneratorFunction", _regeneratorDefine2(GeneratorFunctionPrototype, o, "GeneratorFunction"), _regeneratorDefine2(u), _regeneratorDefine2(u, o, "Generator"), _regeneratorDefine2(u, n, function () { return this; }), _regeneratorDefine2(u, "toString", function () { return "[object Generator]"; }), (_regenerator = function _regenerator() { return { w: i, m: f }; })(); }
function _regeneratorDefine2(e, r, n, t) { var i = Object.defineProperty; try { i({}, "", {}); } catch (e) { i = 0; } _regeneratorDefine2 = function _regeneratorDefine(e, r, n, t) { if (r) i ? i(e, r, { value: n, enumerable: !t, configurable: !t, writable: !t }) : e[r] = n;else { var o = function o(r, n) { _regeneratorDefine2(e, r, function (e) { return this._invoke(r, n, e); }); }; o("next", 0), o("throw", 1), o("return", 2); } }, _regeneratorDefine2(e, r, n, t); }
function asyncGeneratorStep(n, t, e, r, o, a, c) { try { var i = n[a](c), u = i.value; } catch (n) { return void e(n); } i.done ? t(u) : Promise.resolve(u).then(r, o); }
function _asyncToGenerator(n) { return function () { var t = this, e = arguments; return new Promise(function (r, o) { var a = n.apply(t, e); function _next(n) { asyncGeneratorStep(a, r, o, _next, _throw, "next", n); } function _throw(n) { asyncGeneratorStep(a, r, o, _next, _throw, "throw", n); } _next(void 0); }); }; }
function _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError("Cannot call a class as a function"); }
function _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, "value" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }
function _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, "prototype", { writable: !1 }), e; }
function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
var _require = require('@handit.ai/node'),
config = _require.config,
fetchPrompts = _require.fetchPrompts,
trackNode = _require.trackNode,
startTracing = _require.startTracing,
endTracing = _require.endTracing;
var OpenAI = require('openai');
var _require2 = require('@google/generative-ai'),
GoogleGenerativeAI = _require2.GoogleGenerativeAI;
var Anthropic = require('@anthropic-ai/sdk');
var AIWrapper = /*#__PURE__*/function () {
function AIWrapper() {
var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
_ref$handitApiKey = _ref.handitApiKey,
handitApiKey = _ref$handitApiKey === void 0 ? process.env.HANDIT_API_KEY : _ref$handitApiKey,
_ref$openaiApiKey = _ref.openaiApiKey,
openaiApiKey = _ref$openaiApiKey === void 0 ? process.env.OPENAI_API_KEY || null : _ref$openaiApiKey,
_ref$googleApiKey = _ref.googleApiKey,
googleApiKey = _ref$googleApiKey === void 0 ? process.env.GOOGLE_API_KEY || null : _ref$googleApiKey,
_ref$anthropicApiKey = _ref.anthropicApiKey,
anthropicApiKey = _ref$anthropicApiKey === void 0 ? process.env.ANTHROPIC_API_KEY || null : _ref$anthropicApiKey,
_ref$trackingUrl = _ref.trackingUrl,
trackingUrl = _ref$trackingUrl === void 0 ? null : _ref$trackingUrl,
_ref$performanceUrl = _ref.performanceUrl,
performanceUrl = _ref$performanceUrl === void 0 ? null : _ref$performanceUrl,
_ref$ssoTrackingUrl = _ref.ssoTrackingUrl,
ssoTrackingUrl = _ref$ssoTrackingUrl === void 0 ? null : _ref$ssoTrackingUrl;
_classCallCheck(this, AIWrapper);
if (!handitApiKey) {
throw new Error('handitApiKey is required. Provide it directly or set HANDIT_API_KEY environment variable.');
}
// Configure handit
config({
apiKey: handitApiKey,
trackingUrl: trackingUrl,
performanceUrl: performanceUrl,
ssoTrackingUrl: ssoTrackingUrl
});
// Initialize AI providers
this.providers = {};
if (openaiApiKey) {
this.providers.openai = new OpenAI({
apiKey: openaiApiKey
});
}
if (googleApiKey) {
this.providers.google = new GoogleGenerativeAI(googleApiKey);
}
if (anthropicApiKey) {
this.providers.anthropic = new Anthropic({
apiKey: anthropicApiKey
});
}
}
return _createClass(AIWrapper, [{
key: "executeAgent",
value: function () {
var _executeAgent = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee(_ref2) {
var agentName, input, _ref2$provider, provider, _ref2$model, model, _ref2$nodeName, nodeName, _ref2$additionalOptio, additionalOptions, executionId, tracingResult, prompts, systemPrompt, userPrompt, fullPrompt, output, _t;
return _regenerator().w(function (_context) {
while (1) switch (_context.n) {
case 0:
agentName = _ref2.agentName, input = _ref2.input, _ref2$provider = _ref2.provider, provider = _ref2$provider === void 0 ? 'openai' : _ref2$provider, _ref2$model = _ref2.model, model = _ref2$model === void 0 ? null : _ref2$model, _ref2$nodeName = _ref2.nodeName, nodeName = _ref2$nodeName === void 0 ? 'ai-execution' : _ref2$nodeName, _ref2$additionalOptio = _ref2.additionalOptions, additionalOptions = _ref2$additionalOptio === void 0 ? {} : _ref2$additionalOptio;
executionId = null;
_context.p = 1;
_context.n = 2;
return startTracing({
agentName: agentName
});
case 2:
tracingResult = _context.v;
executionId = tracingResult === null || tracingResult === void 0 ? void 0 : tracingResult.executionId;
// Fetch prompts from handit
_context.n = 3;
return fetchPrompts({
agentName: agentName
});
case 3:
prompts = _context.v;
if (!(!prompts || !prompts.length)) {
_context.n = 4;
break;
}
throw new Error("No prompts found for agent: ".concat(agentName));
case 4:
// Get the system prompt (assuming first prompt is system)
systemPrompt = prompts.find(function (p) {
return p.type === 'system';
}) || prompts[0];
userPrompt = prompts.find(function (p) {
return p.type === 'user';
}) || prompts[1]; // Prepare the full prompt
fullPrompt = this._preparePrompt(systemPrompt, userPrompt, input); // Call the appropriate AI provider
_context.n = 5;
return this._callAIProvider({
provider: provider,
model: model,
prompt: fullPrompt,
input: input,
additionalOptions: additionalOptions
});
case 5:
output = _context.v;
if (!executionId) {
_context.n = 6;
break;
}
_context.n = 6;
return trackNode({
input: input,
output: output,
nodeName: nodeName,
agentName: agentName,
nodeType: 'llm',
executionId: executionId
});
case 6:
return _context.a(2, {
success: true,
output: output,
executionId: executionId,
prompts: prompts
});
case 7:
_context.p = 7;
_t = _context.v;
if (!executionId) {
_context.n = 8;
break;
}
_context.n = 8;
return trackNode({
input: input,
output: {
error: _t.message,
stack: _t.stack
},
nodeName: nodeName,
agentName: agentName,
nodeType: 'llm',
executionId: executionId
});
case 8:
return _context.a(2, {
success: false,
error: _t.message,
executionId: executionId
});
case 9:
_context.p = 9;
if (!executionId) {
_context.n = 10;
break;
}
_context.n = 10;
return endTracing({
executionId: executionId,
agentName: agentName
});
case 10:
return _context.f(9);
case 11:
return _context.a(2);
}
}, _callee, this, [[1, 7, 9, 11]]);
}));
function executeAgent(_x) {
return _executeAgent.apply(this, arguments);
}
return executeAgent;
}()
}, {
key: "_callAIProvider",
value: function () {
var _callAIProvider2 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee2(_ref3) {
var provider, model, prompt, input, additionalOptions, _t2;
return _regenerator().w(function (_context2) {
while (1) switch (_context2.n) {
case 0:
provider = _ref3.provider, model = _ref3.model, prompt = _ref3.prompt, input = _ref3.input, additionalOptions = _ref3.additionalOptions;
if (this.providers[provider]) {
_context2.n = 1;
break;
}
throw new Error("AI provider '".concat(provider, "' not configured or not supported"));
case 1:
_t2 = provider;
_context2.n = _t2 === 'openai' ? 2 : _t2 === 'google' ? 4 : _t2 === 'anthropic' ? 6 : 8;
break;
case 2:
_context2.n = 3;
return this._callOpenAI({
model: model,
prompt: prompt,
additionalOptions: additionalOptions
});
case 3:
return _context2.a(2, _context2.v);
case 4:
_context2.n = 5;
return this._callGoogle({
model: model,
prompt: prompt,
additionalOptions: additionalOptions
});
case 5:
return _context2.a(2, _context2.v);
case 6:
_context2.n = 7;
return this._callAnthropic({
model: model,
prompt: prompt,
additionalOptions: additionalOptions
});
case 7:
return _context2.a(2, _context2.v);
case 8:
throw new Error("Unsupported provider: ".concat(provider));
case 9:
return _context2.a(2);
}
}, _callee2, this);
}));
function _callAIProvider(_x2) {
return _callAIProvider2.apply(this, arguments);
}
return _callAIProvider;
}()
}, {
key: "_callOpenAI",
value: function () {
var _callOpenAI2 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee3(_ref4) {
var _ref4$model, model, prompt, _ref4$additionalOptio, additionalOptions, messages, response;
return _regenerator().w(function (_context3) {
while (1) switch (_context3.n) {
case 0:
_ref4$model = _ref4.model, model = _ref4$model === void 0 ? 'gpt-3.5-turbo' : _ref4$model, prompt = _ref4.prompt, _ref4$additionalOptio = _ref4.additionalOptions, additionalOptions = _ref4$additionalOptio === void 0 ? {} : _ref4$additionalOptio;
messages = this._formatPromptForOpenAI(prompt);
_context3.n = 1;
return this.providers.openai.chat.completions.create(_objectSpread({
model: model,
messages: messages
}, additionalOptions));
case 1:
response = _context3.v;
return _context3.a(2, response.choices[0].message.content);
}
}, _callee3, this);
}));
function _callOpenAI(_x3) {
return _callOpenAI2.apply(this, arguments);
}
return _callOpenAI;
}()
}, {
key: "_callGoogle",
value: function () {
var _callGoogle2 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee4(_ref5) {
var _ref5$model, model, prompt, _ref5$additionalOptio, additionalOptions, genModel, result;
return _regenerator().w(function (_context4) {
while (1) switch (_context4.n) {
case 0:
_ref5$model = _ref5.model, model = _ref5$model === void 0 ? 'gemini-pro' : _ref5$model, prompt = _ref5.prompt, _ref5$additionalOptio = _ref5.additionalOptions, additionalOptions = _ref5$additionalOptio === void 0 ? {} : _ref5$additionalOptio;
genModel = this.providers.google.getGenerativeModel({
model: model
});
_context4.n = 1;
return genModel.generateContent(_objectSpread({
contents: [{
role: 'user',
parts: [{
text: prompt
}]
}]
}, additionalOptions));
case 1:
result = _context4.v;
return _context4.a(2, result.response.text());
}
}, _callee4, this);
}));
function _callGoogle(_x4) {
return _callGoogle2.apply(this, arguments);
}
return _callGoogle;
}()
}, {
key: "_callAnthropic",
value: function () {
var _callAnthropic2 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee5(_ref6) {
var _ref6$model, model, prompt, _ref6$additionalOptio, additionalOptions, response;
return _regenerator().w(function (_context5) {
while (1) switch (_context5.n) {
case 0:
_ref6$model = _ref6.model, model = _ref6$model === void 0 ? 'claude-3-sonnet-20240229' : _ref6$model, prompt = _ref6.prompt, _ref6$additionalOptio = _ref6.additionalOptions, additionalOptions = _ref6$additionalOptio === void 0 ? {} : _ref6$additionalOptio;
_context5.n = 1;
return this.providers.anthropic.messages.create(_objectSpread({
model: model,
max_tokens: additionalOptions.max_tokens || 1000,
messages: [{
role: 'user',
content: prompt
}]
}, additionalOptions));
case 1:
response = _context5.v;
return _context5.a(2, response.content[0].text);
}
}, _callee5, this);
}));
function _callAnthropic(_x5) {
return _callAnthropic2.apply(this, arguments);
}
return _callAnthropic;
}()
}, {
key: "_preparePrompt",
value: function _preparePrompt(systemPrompt, userPrompt, input) {
var fullPrompt = '';
if (systemPrompt) {
fullPrompt += "System: ".concat(systemPrompt.content, "\n\n");
}
if (userPrompt) {
// Replace placeholders in user prompt
var userContent = userPrompt.content;
userContent = userContent.replace(/\{\{input\}\}/g, input);
userContent = userContent.replace(/\{\{user_input\}\}/g, input);
fullPrompt += "User: ".concat(userContent);
} else {
fullPrompt += "User: ".concat(input);
}
return fullPrompt;
}
}, {
key: "_formatPromptForOpenAI",
value: function _formatPromptForOpenAI(prompt) {
// Try to extract system and user messages from the combined prompt
var lines = prompt.split('\n');
var messages = [];
var currentRole = null;
var currentContent = '';
var _iterator = _createForOfIteratorHelper(lines),
_step;
try {
for (_iterator.s(); !(_step = _iterator.n()).done;) {
var line = _step.value;
if (line.startsWith('System:')) {
if (currentRole && currentContent.trim()) {
messages.push({
role: currentRole,
content: currentContent.trim()
});
}
currentRole = 'system';
currentContent = line.replace('System:', '').trim();
} else if (line.startsWith('User:')) {
if (currentRole && currentContent.trim()) {
messages.push({
role: currentRole,
content: currentContent.trim()
});
}
currentRole = 'user';
currentContent = line.replace('User:', '').trim();
} else if (line.trim()) {
currentContent += '\n' + line;
}
}
// Add the last message
} catch (err) {
_iterator.e(err);
} finally {
_iterator.f();
}
if (currentRole && currentContent.trim()) {
messages.push({
role: currentRole,
content: currentContent.trim()
});
}
// If no proper format detected, treat as user message
if (messages.length === 0) {
messages.push({
role: 'user',
content: prompt
});
}
return messages;
}
// Smart AI execution methods with full tracking and optimization
}, {
key: "runSmartAgent",
value: function () {
var _runSmartAgent = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee6(_ref7) {
var agentName, input, _ref7$model, model, _ref7$nodeName, nodeName, _ref7$additionalOptio, additionalOptions;
return _regenerator().w(function (_context6) {
while (1) switch (_context6.n) {
case 0:
agentName = _ref7.agentName, input = _ref7.input, _ref7$model = _ref7.model, model = _ref7$model === void 0 ? 'gpt-3.5-turbo' : _ref7$model, _ref7$nodeName = _ref7.nodeName, nodeName = _ref7$nodeName === void 0 ? 'openai-smart-execution' : _ref7$nodeName, _ref7$additionalOptio = _ref7.additionalOptions, additionalOptions = _ref7$additionalOptio === void 0 ? {} : _ref7$additionalOptio;
_context6.n = 1;
return this.executeAgent({
agentName: agentName,
input: input,
provider: 'openai',
model: model,
nodeName: nodeName,
additionalOptions: additionalOptions
});
case 1:
return _context6.a(2, _context6.v);
}
}, _callee6, this);
}));
function runSmartAgent(_x6) {
return _runSmartAgent.apply(this, arguments);
}
return runSmartAgent;
}()
}, {
key: "runOptimizedAgent",
value: function () {
var _runOptimizedAgent = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee7(_ref8) {
var agentName, input, _ref8$model, model, _ref8$nodeName, nodeName, _ref8$additionalOptio, additionalOptions;
return _regenerator().w(function (_context7) {
while (1) switch (_context7.n) {
case 0:
agentName = _ref8.agentName, input = _ref8.input, _ref8$model = _ref8.model, model = _ref8$model === void 0 ? 'gemini-pro' : _ref8$model, _ref8$nodeName = _ref8.nodeName, nodeName = _ref8$nodeName === void 0 ? 'google-optimized-execution' : _ref8$nodeName, _ref8$additionalOptio = _ref8.additionalOptions, additionalOptions = _ref8$additionalOptio === void 0 ? {} : _ref8$additionalOptio;
_context7.n = 1;
return this.executeAgent({
agentName: agentName,
input: input,
provider: 'google',
model: model,
nodeName: nodeName,
additionalOptions: additionalOptions
});
case 1:
return _context7.a(2, _context7.v);
}
}, _callee7, this);
}));
function runOptimizedAgent(_x7) {
return _runOptimizedAgent.apply(this, arguments);
}
return runOptimizedAgent;
}()
}, {
key: "runTrackedAgent",
value: function () {
var _runTrackedAgent = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee8(_ref9) {
var agentName, input, _ref9$model, model, _ref9$nodeName, nodeName, _ref9$additionalOptio, additionalOptions;
return _regenerator().w(function (_context8) {
while (1) switch (_context8.n) {
case 0:
agentName = _ref9.agentName, input = _ref9.input, _ref9$model = _ref9.model, model = _ref9$model === void 0 ? 'claude-3-sonnet-20240229' : _ref9$model, _ref9$nodeName = _ref9.nodeName, nodeName = _ref9$nodeName === void 0 ? 'anthropic-tracked-execution' : _ref9$nodeName, _ref9$additionalOptio = _ref9.additionalOptions, additionalOptions = _ref9$additionalOptio === void 0 ? {} : _ref9$additionalOptio;
_context8.n = 1;
return this.executeAgent({
agentName: agentName,
input: input,
provider: 'anthropic',
model: model,
nodeName: nodeName,
additionalOptions: additionalOptions
});
case 1:
return _context8.a(2, _context8.v);
}
}, _callee8, this);
}));
function runTrackedAgent(_x8) {
return _runTrackedAgent.apply(this, arguments);
}
return runTrackedAgent;
}() // Provider-specific methods with better naming
}, {
key: "runWithOpenAI",
value: function () {
var _runWithOpenAI = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee9(_ref0) {
var agentName, input, _ref0$model, model, _ref0$nodeName, nodeName, _ref0$additionalOptio, additionalOptions;
return _regenerator().w(function (_context9) {
while (1) switch (_context9.n) {
case 0:
agentName = _ref0.agentName, input = _ref0.input, _ref0$model = _ref0.model, model = _ref0$model === void 0 ? 'gpt-3.5-turbo' : _ref0$model, _ref0$nodeName = _ref0.nodeName, nodeName = _ref0$nodeName === void 0 ? 'openai-execution' : _ref0$nodeName, _ref0$additionalOptio = _ref0.additionalOptions, additionalOptions = _ref0$additionalOptio === void 0 ? {} : _ref0$additionalOptio;
_context9.n = 1;
return this.executeAgent({
agentName: agentName,
input: input,
provider: 'openai',
model: model,
nodeName: nodeName,
additionalOptions: additionalOptions
});
case 1:
return _context9.a(2, _context9.v);
}
}, _callee9, this);
}));
function runWithOpenAI(_x9) {
return _runWithOpenAI.apply(this, arguments);
}
return runWithOpenAI;
}()
}, {
key: "runWithGoogle",
value: function () {
var _runWithGoogle = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee0(_ref1) {
var agentName, input, _ref1$model, model, _ref1$nodeName, nodeName, _ref1$additionalOptio, additionalOptions;
return _regenerator().w(function (_context0) {
while (1) switch (_context0.n) {
case 0:
agentName = _ref1.agentName, input = _ref1.input, _ref1$model = _ref1.model, model = _ref1$model === void 0 ? 'gemini-pro' : _ref1$model, _ref1$nodeName = _ref1.nodeName, nodeName = _ref1$nodeName === void 0 ? 'google-execution' : _ref1$nodeName, _ref1$additionalOptio = _ref1.additionalOptions, additionalOptions = _ref1$additionalOptio === void 0 ? {} : _ref1$additionalOptio;
_context0.n = 1;
return this.executeAgent({
agentName: agentName,
input: input,
provider: 'google',
model: model,
nodeName: nodeName,
additionalOptions: additionalOptions
});
case 1:
return _context0.a(2, _context0.v);
}
}, _callee0, this);
}));
function runWithGoogle(_x0) {
return _runWithGoogle.apply(this, arguments);
}
return runWithGoogle;
}()
}, {
key: "runWithAnthropic",
value: function () {
var _runWithAnthropic = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee1(_ref10) {
var agentName, input, _ref10$model, model, _ref10$nodeName, nodeName, _ref10$additionalOpti, additionalOptions;
return _regenerator().w(function (_context1) {
while (1) switch (_context1.n) {
case 0:
agentName = _ref10.agentName, input = _ref10.input, _ref10$model = _ref10.model, model = _ref10$model === void 0 ? 'claude-3-sonnet-20240229' : _ref10$model, _ref10$nodeName = _ref10.nodeName, nodeName = _ref10$nodeName === void 0 ? 'anthropic-execution' : _ref10$nodeName, _ref10$additionalOpti = _ref10.additionalOptions, additionalOptions = _ref10$additionalOpti === void 0 ? {} : _ref10$additionalOpti;
_context1.n = 1;
return this.executeAgent({
agentName: agentName,
input: input,
provider: 'anthropic',
model: model,
nodeName: nodeName,
additionalOptions: additionalOptions
});
case 1:
return _context1.a(2, _context1.v);
}
}, _callee1, this);
}));
function runWithAnthropic(_x1) {
return _runWithAnthropic.apply(this, arguments);
}
return runWithAnthropic;
}() // Legacy method names for backward compatibility
}, {
key: "executeWithOpenAI",
value: function () {
var _executeWithOpenAI = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee10(options) {
return _regenerator().w(function (_context10) {
while (1) switch (_context10.n) {
case 0:
_context10.n = 1;
return this.runWithOpenAI(options);
case 1:
return _context10.a(2, _context10.v);
}
}, _callee10, this);
}));
function executeWithOpenAI(_x10) {
return _executeWithOpenAI.apply(this, arguments);
}
return executeWithOpenAI;
}()
}, {
key: "executeWithGoogle",
value: function () {
var _executeWithGoogle = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee11(options) {
return _regenerator().w(function (_context11) {
while (1) switch (_context11.n) {
case 0:
_context11.n = 1;
return this.runWithGoogle(options);
case 1:
return _context11.a(2, _context11.v);
}
}, _callee11, this);
}));
function executeWithGoogle(_x11) {
return _executeWithGoogle.apply(this, arguments);
}
return executeWithGoogle;
}()
}, {
key: "executeWithAnthropic",
value: function () {
var _executeWithAnthropic = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee12(options) {
return _regenerator().w(function (_context12) {
while (1) switch (_context12.n) {
case 0:
_context12.n = 1;
return this.runWithAnthropic(options);
case 1:
return _context12.a(2, _context12.v);
}
}, _callee12, this);
}));
function executeWithAnthropic(_x12) {
return _executeWithAnthropic.apply(this, arguments);
}
return executeWithAnthropic;
}() // Method to get available prompts for an agent
}, {
key: "getPrompts",
value: function () {
var _getPrompts = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee13(_ref11) {
var agentName;
return _regenerator().w(function (_context13) {
while (1) switch (_context13.n) {
case 0:
agentName = _ref11.agentName;
_context13.n = 1;
return fetchPrompts({
agentName: agentName
});
case 1:
return _context13.a(2, _context13.v);
}
}, _callee13);
}));
function getPrompts(_x13) {
return _getPrompts.apply(this, arguments);
}
return getPrompts;
}() // Method to manually track a node execution
}, {
key: "track",
value: function () {
var _track = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee14(_ref12) {
var input, output, nodeName, agentName, _ref12$nodeType, nodeType, executionId;
return _regenerator().w(function (_context14) {
while (1) switch (_context14.n) {
case 0:
input = _ref12.input, output = _ref12.output, nodeName = _ref12.nodeName, agentName = _ref12.agentName, _ref12$nodeType = _ref12.nodeType, nodeType = _ref12$nodeType === void 0 ? 'llm' : _ref12$nodeType, executionId = _ref12.executionId;
_context14.n = 1;
return trackNode({
input: input,
output: output,
nodeName: nodeName,
agentName: agentName,
nodeType: nodeType,
executionId: executionId
});
case 1:
return _context14.a(2, _context14.v);
}
}, _callee14);
}));
function track(_x14) {
return _track.apply(this, arguments);
}
return track;
}()
}]);
}(); // Export the class and individual functions for flexibility
module.exports = {
AIWrapper: AIWrapper,
// Re-export handit functions for direct access if needed
fetchPrompts: fetchPrompts,
trackNode: trackNode,
startTracing: startTracing,
endTracing: endTracing,
config: config
};