You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

515 lines
17 KiB

11 months ago
  1. const AIbitat = require("./aibitat");
  2. const AgentPlugins = require("./aibitat/plugins");
  3. const {
  4. WorkspaceAgentInvocation,
  5. } = require("../../models/workspaceAgentInvocation");
  6. const { WorkspaceChats } = require("../../models/workspaceChats");
  7. const { safeJsonParse } = require("../http");
  8. const { USER_AGENT, WORKSPACE_AGENT } = require("./defaults");
  9. const ImportedPlugin = require("./imported");
  10. const { AgentFlows } = require("../agentFlows");
  11. class AgentHandler {
  12. #invocationUUID;
  13. #funcsToLoad = [];
  14. invocation = null;
  15. aibitat = null;
  16. channel = null;
  17. provider = null;
  18. model = null;
  19. constructor({ uuid }) {
  20. this.#invocationUUID = uuid;
  21. }
  22. log(text, ...args) {
  23. console.log(`\x1b[36m[AgentHandler]\x1b[0m ${text}`, ...args);
  24. }
  25. closeAlert() {
  26. this.log(`End ${this.#invocationUUID}::${this.provider}:${this.model}`);
  27. }
  28. async #chatHistory(limit = 10) {
  29. try {
  30. const rawHistory = (
  31. await WorkspaceChats.where(
  32. {
  33. workspaceId: this.invocation.workspace_id,
  34. user_id: this.invocation.user_id || null,
  35. thread_id: this.invocation.thread_id || null,
  36. api_session_id: null,
  37. include: true,
  38. },
  39. limit,
  40. { id: "desc" }
  41. )
  42. ).reverse();
  43. const agentHistory = [];
  44. rawHistory.forEach((chatLog) => {
  45. agentHistory.push(
  46. {
  47. from: USER_AGENT.name,
  48. to: WORKSPACE_AGENT.name,
  49. content: chatLog.prompt,
  50. state: "success",
  51. },
  52. {
  53. from: WORKSPACE_AGENT.name,
  54. to: USER_AGENT.name,
  55. content: safeJsonParse(chatLog.response)?.text || "",
  56. state: "success",
  57. }
  58. );
  59. });
  60. return agentHistory;
  61. } catch (e) {
  62. this.log("Error loading chat history", e.message);
  63. return [];
  64. }
  65. }
  66. checkSetup() {
  67. switch (this.provider) {
  68. case "openai":
  69. if (!process.env.OPEN_AI_KEY)
  70. throw new Error("OpenAI API key must be provided to use agents.");
  71. break;
  72. case "anthropic":
  73. if (!process.env.ANTHROPIC_API_KEY)
  74. throw new Error("Anthropic API key must be provided to use agents.");
  75. break;
  76. case "lmstudio":
  77. if (!process.env.LMSTUDIO_BASE_PATH)
  78. throw new Error("LMStudio base path must be provided to use agents.");
  79. break;
  80. case "ollama":
  81. if (!process.env.OLLAMA_BASE_PATH)
  82. throw new Error("Ollama base path must be provided to use agents.");
  83. break;
  84. case "groq":
  85. if (!process.env.GROQ_API_KEY)
  86. throw new Error("Groq API key must be provided to use agents.");
  87. break;
  88. case "togetherai":
  89. if (!process.env.TOGETHER_AI_API_KEY)
  90. throw new Error("TogetherAI API key must be provided to use agents.");
  91. break;
  92. case "azure":
  93. if (!process.env.AZURE_OPENAI_ENDPOINT || !process.env.AZURE_OPENAI_KEY)
  94. throw new Error(
  95. "Azure OpenAI API endpoint and key must be provided to use agents."
  96. );
  97. break;
  98. case "koboldcpp":
  99. if (!process.env.KOBOLD_CPP_BASE_PATH)
  100. throw new Error(
  101. "KoboldCPP must have a valid base path to use for the api."
  102. );
  103. break;
  104. case "localai":
  105. if (!process.env.LOCAL_AI_BASE_PATH)
  106. throw new Error(
  107. "LocalAI must have a valid base path to use for the api."
  108. );
  109. break;
  110. case "gemini":
  111. if (!process.env.GEMINI_API_KEY)
  112. throw new Error("Gemini API key must be provided to use agents.");
  113. break;
  114. case "openrouter":
  115. if (!process.env.OPENROUTER_API_KEY)
  116. throw new Error("OpenRouter API key must be provided to use agents.");
  117. break;
  118. case "mistral":
  119. if (!process.env.MISTRAL_API_KEY)
  120. throw new Error("Mistral API key must be provided to use agents.");
  121. break;
  122. case "generic-openai":
  123. if (!process.env.GENERIC_OPEN_AI_BASE_PATH)
  124. throw new Error("API base path must be provided to use agents.");
  125. break;
  126. case "perplexity":
  127. if (!process.env.PERPLEXITY_API_KEY)
  128. throw new Error("Perplexity API key must be provided to use agents.");
  129. break;
  130. case "textgenwebui":
  131. if (!process.env.TEXT_GEN_WEB_UI_BASE_PATH)
  132. throw new Error(
  133. "TextWebGenUI API base path must be provided to use agents."
  134. );
  135. break;
  136. case "bedrock":
  137. if (
  138. !process.env.AWS_BEDROCK_LLM_ACCESS_KEY_ID ||
  139. !process.env.AWS_BEDROCK_LLM_ACCESS_KEY ||
  140. !process.env.AWS_BEDROCK_LLM_REGION
  141. )
  142. throw new Error(
  143. "AWS Bedrock Access Keys and region must be provided to use agents."
  144. );
  145. break;
  146. case "fireworksai":
  147. if (!process.env.FIREWORKS_AI_LLM_API_KEY)
  148. throw new Error(
  149. "FireworksAI API Key must be provided to use agents."
  150. );
  151. break;
  152. case "deepseek":
  153. if (!process.env.DEEPSEEK_API_KEY)
  154. throw new Error("DeepSeek API Key must be provided to use agents.");
  155. break;
  156. case "litellm":
  157. if (!process.env.LITE_LLM_BASE_PATH)
  158. throw new Error(
  159. "LiteLLM API base path and key must be provided to use agents."
  160. );
  161. break;
  162. case "apipie":
  163. if (!process.env.APIPIE_LLM_API_KEY)
  164. throw new Error("ApiPie API Key must be provided to use agents.");
  165. break;
  166. case "xai":
  167. if (!process.env.XAI_LLM_API_KEY)
  168. throw new Error("xAI API Key must be provided to use agents.");
  169. break;
  170. case "novita":
  171. if (!process.env.NOVITA_LLM_API_KEY)
  172. throw new Error("Novita API Key must be provided to use agents.");
  173. break;
  174. case "nvidia-nim":
  175. if (!process.env.NVIDIA_NIM_LLM_BASE_PATH)
  176. throw new Error(
  177. "NVIDIA NIM base path must be provided to use agents."
  178. );
  179. break;
  180. default:
  181. throw new Error(
  182. "No workspace agent provider set. Please set your agent provider in the workspace's settings"
  183. );
  184. }
  185. }
  186. /**
  187. * Finds the default model for a given provider. If no default model is set for it's associated ENV then
  188. * it will return a reasonable base model for the provider if one exists.
  189. * @param {string} provider - The provider to find the default model for.
  190. * @returns {string|null} The default model for the provider.
  191. */
  192. providerDefault(provider = this.provider) {
  193. switch (provider) {
  194. case "openai":
  195. return process.env.OPEN_MODEL_PREF ?? "gpt-4o";
  196. case "anthropic":
  197. return process.env.ANTHROPIC_MODEL_PREF ?? "claude-3-sonnet-20240229";
  198. case "lmstudio":
  199. return process.env.LMSTUDIO_MODEL_PREF ?? "server-default";
  200. case "ollama":
  201. return process.env.OLLAMA_MODEL_PREF ?? "llama3:latest";
  202. case "groq":
  203. return process.env.GROQ_MODEL_PREF ?? "llama3-70b-8192";
  204. case "togetherai":
  205. return (
  206. process.env.TOGETHER_AI_MODEL_PREF ??
  207. "mistralai/Mixtral-8x7B-Instruct-v0.1"
  208. );
  209. case "azure":
  210. return null;
  211. case "koboldcpp":
  212. return process.env.KOBOLD_CPP_MODEL_PREF ?? null;
  213. case "gemini":
  214. return process.env.GEMINI_MODEL_PREF ?? "gemini-pro";
  215. case "localai":
  216. return process.env.LOCAL_AI_MODEL_PREF ?? null;
  217. case "openrouter":
  218. return process.env.OPENROUTER_MODEL_PREF ?? "openrouter/auto";
  219. case "mistral":
  220. return process.env.MISTRAL_MODEL_PREF ?? "mistral-medium";
  221. case "generic-openai":
  222. return process.env.GENERIC_OPEN_AI_MODEL_PREF ?? null;
  223. case "perplexity":
  224. return process.env.PERPLEXITY_MODEL_PREF ?? "sonar-small-online";
  225. case "textgenwebui":
  226. return null;
  227. case "bedrock":
  228. return process.env.AWS_BEDROCK_LLM_MODEL_PREFERENCE ?? null;
  229. case "fireworksai":
  230. return process.env.FIREWORKS_AI_LLM_MODEL_PREF ?? null;
  231. case "deepseek":
  232. return process.env.DEEPSEEK_MODEL_PREF ?? "deepseek-chat";
  233. case "litellm":
  234. return process.env.LITE_LLM_MODEL_PREF ?? null;
  235. case "apipie":
  236. return process.env.APIPIE_LLM_MODEL_PREF ?? null;
  237. case "xai":
  238. return process.env.XAI_LLM_MODEL_PREF ?? "grok-beta";
  239. case "novita":
  240. return process.env.NOVITA_LLM_MODEL_PREF ?? "deepseek/deepseek-r1";
  241. case "nvidia-nim":
  242. return process.env.NVIDIA_NIM_LLM_MODEL_PREF ?? null;
  243. default:
  244. return null;
  245. }
  246. }
  247. /**
  248. * Attempts to find a fallback provider and model to use if the workspace
  249. * does not have an explicit `agentProvider` and `agentModel` set.
  250. * 1. Fallback to the workspace `chatProvider` and `chatModel` if they exist.
  251. * 2. Fallback to the system `LLM_PROVIDER` and try to load the the associated default model via ENV params or a base available model.
  252. * 3. Otherwise, return null - will likely throw an error the user can act on.
  253. * @returns {object|null} - An object with provider and model keys.
  254. */
  255. #getFallbackProvider() {
  256. // First, fallback to the workspace chat provider and model if they exist
  257. if (
  258. this.invocation.workspace.chatProvider &&
  259. this.invocation.workspace.chatModel
  260. ) {
  261. return {
  262. provider: this.invocation.workspace.chatProvider,
  263. model: this.invocation.workspace.chatModel,
  264. };
  265. }
  266. // If workspace does not have chat provider and model fallback
  267. // to system provider and try to load provider default model
  268. const systemProvider = process.env.LLM_PROVIDER;
  269. const systemModel = this.providerDefault(systemProvider);
  270. if (systemProvider && systemModel) {
  271. return {
  272. provider: systemProvider,
  273. model: systemModel,
  274. };
  275. }
  276. return null;
  277. }
  278. /**
  279. * Finds or assumes the model preference value to use for API calls.
  280. * If multi-model loading is supported, we use their agent model selection of the workspace
  281. * If not supported, we attempt to fallback to the system provider value for the LLM preference
  282. * and if that fails - we assume a reasonable base model to exist.
  283. * @returns {string|null} the model preference value to use in API calls
  284. */
  285. #fetchModel() {
  286. // Provider was not explicitly set for workspace, so we are going to run our fallback logic
  287. // that will set a provider and model for us to use.
  288. if (!this.provider) {
  289. const fallback = this.#getFallbackProvider();
  290. if (!fallback) throw new Error("No valid provider found for the agent.");
  291. this.provider = fallback.provider; // re-set the provider to the fallback provider so it is not null.
  292. return fallback.model; // set its defined model based on fallback logic.
  293. }
  294. // The provider was explicitly set, so check if the workspace has an agent model set.
  295. if (this.invocation.workspace.agentModel)
  296. return this.invocation.workspace.agentModel;
  297. // Otherwise, we have no model to use - so guess a default model to use via the provider
  298. // and it's system ENV params and if that fails - we return either a base model or null.
  299. return this.providerDefault();
  300. }
  301. #providerSetupAndCheck() {
  302. this.provider = this.invocation.workspace.agentProvider ?? null; // set provider to workspace agent provider if it exists
  303. this.model = this.#fetchModel();
  304. if (!this.provider)
  305. throw new Error("No valid provider found for the agent.");
  306. this.log(`Start ${this.#invocationUUID}::${this.provider}:${this.model}`);
  307. this.checkSetup();
  308. }
  309. async #validInvocation() {
  310. const invocation = await WorkspaceAgentInvocation.getWithWorkspace({
  311. uuid: String(this.#invocationUUID),
  312. });
  313. if (invocation?.closed)
  314. throw new Error("This agent invocation is already closed");
  315. this.invocation = invocation ?? null;
  316. }
  317. parseCallOptions(args, config = {}, pluginName) {
  318. const callOpts = {};
  319. for (const [param, definition] of Object.entries(config)) {
  320. if (
  321. definition.required &&
  322. (!Object.prototype.hasOwnProperty.call(args, param) ||
  323. args[param] === null)
  324. ) {
  325. this.log(
  326. `'${param}' required parameter for '${pluginName}' plugin is missing. Plugin may not function or crash agent.`
  327. );
  328. continue;
  329. }
  330. callOpts[param] = Object.prototype.hasOwnProperty.call(args, param)
  331. ? args[param]
  332. : definition.default || null;
  333. }
  334. return callOpts;
  335. }
  336. async #attachPlugins(args) {
  337. for (const name of this.#funcsToLoad) {
  338. // Load child plugin
  339. if (name.includes("#")) {
  340. const [parent, childPluginName] = name.split("#");
  341. if (!Object.prototype.hasOwnProperty.call(AgentPlugins, parent)) {
  342. this.log(
  343. `${parent} is not a valid plugin. Skipping inclusion to agent cluster.`
  344. );
  345. continue;
  346. }
  347. const childPlugin = AgentPlugins[parent].plugin.find(
  348. (child) => child.name === childPluginName
  349. );
  350. if (!childPlugin) {
  351. this.log(
  352. `${parent} does not have child plugin named ${childPluginName}. Skipping inclusion to agent cluster.`
  353. );
  354. continue;
  355. }
  356. const callOpts = this.parseCallOptions(
  357. args,
  358. childPlugin?.startupConfig?.params,
  359. name
  360. );
  361. this.aibitat.use(childPlugin.plugin(callOpts));
  362. this.log(
  363. `Attached ${parent}:${childPluginName} plugin to Agent cluster`
  364. );
  365. continue;
  366. }
  367. // Load flow plugin. This is marked by `@@flow_` in the array of functions to load.
  368. if (name.startsWith("@@flow_")) {
  369. const uuid = name.replace("@@flow_", "");
  370. const plugin = AgentFlows.loadFlowPlugin(uuid);
  371. if (!plugin) {
  372. this.log(
  373. `Flow ${uuid} not found in flows directory. Skipping inclusion to agent cluster.`
  374. );
  375. continue;
  376. }
  377. this.aibitat.use(plugin.plugin());
  378. this.log(
  379. `Attached flow ${plugin.name} (${plugin.flowName}) plugin to Agent cluster`
  380. );
  381. continue;
  382. }
  383. // Load imported plugin. This is marked by `@@` in the array of functions to load.
  384. // and is the @@hubID of the plugin.
  385. if (name.startsWith("@@")) {
  386. const hubId = name.replace("@@", "");
  387. const valid = ImportedPlugin.validateImportedPluginHandler(hubId);
  388. if (!valid) {
  389. this.log(
  390. `Imported plugin by hubId ${hubId} not found in plugin directory. Skipping inclusion to agent cluster.`
  391. );
  392. continue;
  393. }
  394. const plugin = ImportedPlugin.loadPluginByHubId(hubId);
  395. const callOpts = plugin.parseCallOptions();
  396. this.aibitat.use(plugin.plugin(callOpts));
  397. this.log(
  398. `Attached ${plugin.name} (${hubId}) imported plugin to Agent cluster`
  399. );
  400. continue;
  401. }
  402. // Load single-stage plugin.
  403. if (!Object.prototype.hasOwnProperty.call(AgentPlugins, name)) {
  404. this.log(
  405. `${name} is not a valid plugin. Skipping inclusion to agent cluster.`
  406. );
  407. continue;
  408. }
  409. const callOpts = this.parseCallOptions(
  410. args,
  411. AgentPlugins[name].startupConfig.params
  412. );
  413. const AIbitatPlugin = AgentPlugins[name];
  414. this.aibitat.use(AIbitatPlugin.plugin(callOpts));
  415. this.log(`Attached ${name} plugin to Agent cluster`);
  416. }
  417. }
  418. async #loadAgents() {
  419. // Default User agent and workspace agent
  420. this.log(`Attaching user and default agent to Agent cluster.`);
  421. this.aibitat.agent(USER_AGENT.name, await USER_AGENT.getDefinition());
  422. this.aibitat.agent(
  423. WORKSPACE_AGENT.name,
  424. await WORKSPACE_AGENT.getDefinition(this.provider)
  425. );
  426. this.#funcsToLoad = [
  427. ...((await USER_AGENT.getDefinition())?.functions || []),
  428. ...((await WORKSPACE_AGENT.getDefinition())?.functions || []),
  429. ];
  430. }
  431. async init() {
  432. await this.#validInvocation();
  433. this.#providerSetupAndCheck();
  434. return this;
  435. }
  436. async createAIbitat(
  437. args = {
  438. socket,
  439. }
  440. ) {
  441. this.aibitat = new AIbitat({
  442. provider: this.provider ?? "openai",
  443. model: this.model ?? "gpt-4o",
  444. chats: await this.#chatHistory(20),
  445. handlerProps: {
  446. invocation: this.invocation,
  447. log: this.log,
  448. },
  449. });
  450. // Attach standard websocket plugin for frontend communication.
  451. this.log(`Attached ${AgentPlugins.websocket.name} plugin to Agent cluster`);
  452. this.aibitat.use(
  453. AgentPlugins.websocket.plugin({
  454. socket: args.socket,
  455. muteUserReply: true,
  456. introspection: true,
  457. })
  458. );
  459. // Attach standard chat-history plugin for message storage.
  460. this.log(
  461. `Attached ${AgentPlugins.chatHistory.name} plugin to Agent cluster`
  462. );
  463. this.aibitat.use(AgentPlugins.chatHistory.plugin());
  464. // Load required agents (Default + custom)
  465. await this.#loadAgents();
  466. // Attach all required plugins for functions to operate.
  467. await this.#attachPlugins(args);
  468. }
  469. startAgentCluster() {
  470. return this.aibitat.start({
  471. from: USER_AGENT.name,
  472. to: this.channel ?? WORKSPACE_AGENT.name,
  473. content: this.invocation.prompt,
  474. });
  475. }
  476. }
  477. module.exports.AgentHandler = AgentHandler;