UNPKG

purchase-mcp-server

Version:

Purchase and budget management server handling requisitions, purchase orders, expenses, budgets, and vendor management with ERP access for data extraction

919 lines 36.5 kB
import { getMongoClient } from "../utils/mongodb.js"; import { config } from "../utils/config.js"; import { logger } from "../utils/logger.js"; import axios from "axios"; import { LLMClient } from "../utils/llm.js"; import { getTypesenseClient } from "../utils/typesense.js"; import { ObjectId } from "mongodb"; export class DocumentToolHandler { constructor() { this.typesenseClient = getTypesenseClient(); this.S3_API_TOKEN = config.s3ApiToken || ''; this.S3_GENERATE_HTML_URL = config.s3GenerateHtmlUrl || ''; } async parseDocumentLink(arguments_) { const { document_link } = arguments_; if (!document_link) { throw new Error("URL is required"); } try { // Call the parse_to_document_link function to process the document const { success, md_content } = await this.parseToDocumentLink(document_link); if (!success || !md_content) { return [{ type: "text", text: `Failed to parse document from URL: ${document_link}`, title: "Document Parsing Error" }]; } // Return the parsed content as TextContent return [{ type: "text", text: md_content, title: `Parsed document from ${document_link}`, format: "markdown" }]; } catch (error) { logger.error(`Error parsing document from URL ${document_link}:`, error); return [{ type: "text", text: `Error parsing document: ${error.message}`, title: "Document Parsing Error" }]; } } async parseToDocumentLink(document_link) { try { // Implement document parsing logic here // This is a placeholder implementation const response = await axios.post(config.llamaParseUrl || '', { url: document_link, api_key: config.llamaApiKey, model: config.vendorModel }); return { success: true, md_content: response.data.content }; } catch (error) { logger.error("Error in parseToDocumentLink:", error); return { success: false }; } } async createUpdateCasefile(arguments_) { const { imo, content, casefile, session_id, user_id } = arguments_; if (!imo) throw new Error("IMO is required"); if (!content) throw new Error("content is required"); if (!casefile) throw new Error("casefile is required"); if (!session_id) throw new Error("session_id is required"); try { const prompt = await this.getPrompt("casefilewriter"); if (!prompt) { throw new Error("Failed to load prompt from database"); } const formatInstructions = ` Respond in the following JSON format: { "content": "<rewritten or cleaned summarized version of the raw content>", "topic": "<short summary of the case>", "flag": "<value of the flag generated by LLM", "importance": "<low/medium/high>" } `.trim(); const systemMessage = `${prompt}\n\n${formatInstructions}`; const userMessage = `Casefile: ${casefile}\n\nRaw Content: ${content}`; const llmClient = new LLMClient(config.openaiApiKey || ''); const result = await llmClient.ask({ query: userMessage, systemPrompt: systemMessage, modelName: "gpt-4", jsonMode: true, temperature: 0 }); // Validate output keys if (!this.validateLLMResponse(result)) { throw new Error(`Invalid LLM response format: ${JSON.stringify(result)}`); } const { topic, content: processedContent, flag, importance } = result; // Get existing links const db = await getMongoClient(config.mongoUri); const linkDocument = await db.db(config.dbName).collection("casefile_data") .findOne({ sessionId: session_id }, { projection: { links: 1, _id: 0 } }); const existingLinks = linkDocument?.links || []; existingLinks.forEach((entry) => delete entry.synergy_link); // Generate HTML content link const contentLink = await this.generateHtmlAndGetFinalLink(processedContent, imo); const links = (contentLink ? [{ link: contentLink, linkHeader: 'Answer Content' }] : []).concat(existingLinks); // Get vessel information const vesselDoc = await db.db(config.dbName).collection("vessels").findOne({ imo }) || {}; const vesselName = vesselDoc?.name || "Unknown Vessel"; // Create casefile document const casefileDoc = { imo, vesselName, topic, content: processedContent, flag, importance, links, sessionId: session_id, userId: user_id, createdAt: new Date(), updatedAt: new Date() }; // Update or insert casefile await db.db(config.dbName).collection("casefiles").updateOne({ imo, sessionId: session_id }, { $set: casefileDoc }, { upsert: true }); return [{ type: "text", text: JSON.stringify(casefileDoc, null, 2), title: "Casefile Updated" }]; } catch (error) { logger.error("Error in createUpdateCasefile:", error); throw error; } } async getPrompt(agentName) { try { const db = await getMongoClient(config.mongoUri); const document = await db.db(config.dbName).collection("mcp_agent_store") .findOne({ name: agentName }, { projection: { answerprompt: 1, _id: 0 } }); return document?.answerprompt || "get the relevant response based on the task"; } catch (error) { logger.error(`Error accessing MongoDB in get_prompt: ${error}`); return null; } } async generateHtmlAndGetFinalLink(body, imo) { const headers = { 'Authorization': `Bearer ${this.S3_API_TOKEN}`, 'Content-Type': 'application/json' }; const currentUnixTime = Math.floor(Date.now() / 1000); const filename = `answer_content_${imo}_${currentUnixTime}`; const payload = { type: "reports", fileName: filename, body }; try { const response = await axios.post(this.S3_GENERATE_HTML_URL, payload, { headers }); return response.data.url; } catch (error) { logger.error("Failed to generate HTML:", error); return null; } } validateLLMResponse(response) { const requiredKeys = ["content", "topic", "flag", "importance"]; return requiredKeys.every(key => key in response); } async googleSearch(args) { const query = args.query; if (!query) { throw new Error("Search query is required"); } const url = "https://api.perplexity.ai/chat/completions"; const headers = { "Content-Type": "application/json", "Authorization": `Bearer ${process.env.PERPLEXITY_API_KEY}` }; const payload = { model: "sonar-reasoning-pro", messages: [ { role: "system", content: "You are an expert assistant helping with reasoning tasks." }, { role: "user", content: query } ], max_tokens: 2000, temperature: 0.2, top_p: 0.9, search_domain_filter: null, return_images: false, return_related_questions: false, search_recency_filter: "week", top_k: 0, stream: false, presence_penalty: 0, frequency_penalty: 1, response_format: null }; try { const response = await axios.post(url, payload, { headers, timeout: 100000 // 100 seconds timeout }); if (response.status === 200) { const result = response.data; const citations = result.citations || []; const content = result.choices[0].message.content; const responseText = `Response: ${content}\n\nCitations: ${JSON.stringify(citations, null, 2)}`; return [{ type: "text", text: responseText }]; } else { const errorText = `Error: ${response.status}, ${response.data}`; return [{ type: "text", text: errorText }]; } } catch (error) { logger.error("Failure to execute the search operation:", error); throw error; } } async findRelevantVendors(args) { const collection = "vendor4"; // Hardcoded as per Python implementation const session_id = args.session_id || "testing"; try { // Ensure all required keys are present (even if undefined) const searchArgs = { vendorName: args.vendorName, service: args.service, locationRegion: args.locationRegion }; // Query vendor search const results = await this.queryVendorSearch(searchArgs); // Get data link const dataLink = await this.getDataLink(results); // Insert data link to MongoDB const linkHeader = "vendor search results"; await this.insertDataLinkToMongoDB(dataLink, linkHeader, session_id); // Format results as JSON const resultsText = JSON.stringify(results, null, 2); // Get artifact data const artifactData = await this.getArtifact("vendor_search", dataLink); return [ { type: "text", text: resultsText, title: "Vendor Search Results", format: "json" }, { type: "text", text: JSON.stringify(artifactData, null, 2), title: "Vendor Search Results", format: "json" } ]; } catch (error) { logger.error(`Error retrieving stats for collection ${collection}:`, error); throw new Error(`Error retrieving collection stats: ${error}`); } } async queryVendorSearch(args) { try { const searchParameters = { q: args.vendorName || '*', query_by: 'vendorName,service,locationRegion', filter_by: this.buildVendorFilter(args), per_page: 250, sort_by: 'vendorName:asc' }; const response = await this.typesenseClient .collections('vendor4') .documents() .search(searchParameters); return response; } catch (error) { logger.error("Error in vendor search:", error); throw error; } } buildVendorFilter(args) { const filters = []; if (args.service) { filters.push(`service:${args.service}`); } if (args.locationRegion) { filters.push(`locationRegion:${args.locationRegion}`); } return filters.join(' && ') || '*'; } async getDataLink(results) { // Implementation depends on your data link generation logic return JSON.stringify(results); } async insertDataLinkToMongoDB(dataLink, linkHeader, sessionId, imo, vesselName) { try { const db = await getMongoClient(config.mongoUri); const collection = db.db(config.dbName).collection("casefile_data"); await collection.updateOne({ sessionId }, { $push: { links: { link: dataLink, linkHeader, imo, vesselName } } }, { upsert: true }); } catch (error) { logger.error("Error inserting data link to MongoDB:", error); throw error; } } async getArtifact(type, dataLink) { // Implementation depends on your artifact generation logic return { type, dataLink, timestamp: new Date().toISOString() }; } async getAllVesselPurchaseRequisitions(args) { const collection = "purchase"; const session_id = args.session_id || "testing"; const imo = args.imo; const start_date = args.start_date; const end_date = args.end_date; if (!imo) { throw new Error("IMO is required"); } try { // Compose filter_by string from inputs const filterParts = [`imo:=${imo}`]; if (start_date) { const startTs = Math.floor(new Date(start_date).getTime() / 1000); filterParts.push(`purchaseRequisitionDate:>=${startTs}`); } if (end_date) { const endTs = Math.floor(new Date(end_date).getTime() / 1000); filterParts.push(`purchaseRequisitionDate:<=${endTs}`); } const filterBy = filterParts.join(" && "); // Build query for export const query = { filter_by: filterBy }; logger.debug("[Typesense Query]", query); const exportResult = await this.typesenseClient .collections(collection) .documents() .export(query); // Process the export result const exportText = typeof exportResult === 'string' ? exportResult : Buffer.from(exportResult).toString('utf-8'); const documents = exportText .split('\n') .filter(line => line.trim()) .map(line => JSON.parse(line)); const vesselName = documents[0]?.vesselName; const imoExtracted = documents[0]?.imo; // Get data link and insert to MongoDB const dataLink = await this.getDataLink(documents); const linkHeader = `Purchase requisition export for IMO ${imo}`; await this.insertDataLinkToMongoDB(dataLink, linkHeader, session_id, imo, vesselName); // Format results const formattedResults = { found: documents.length, out_of: documents.length, page: 1, hits: documents }; // Get artifact data const artifactData = await this.getArtifact("get_all_vessel_purchase_requisitions", dataLink); return [ { type: "text", text: JSON.stringify(formattedResults, null, 2), title: `Exported purchase requisitions for IMO ${imo}`, format: "json" }, { type: "text", text: JSON.stringify(artifactData, null, 2), title: `Purchase requisition export artifact for IMO ${imo}`, format: "json" } ]; } catch (error) { logger.error("Error executing purchase requisition export:", error); throw new Error(`Error exporting purchase requisitions: ${error}`); } } async getVesselExpenseData(args) { const collection = "expense"; const session_id = args.session_id || "testing"; const imo = args.imo; const start_date = args.start_date; const end_date = args.end_date; const excludeFieldsStr = "_id,docId,fleetId,vesselId,fleetManagerId,technicalSuperintendentId"; if (!imo) { throw new Error("IMO is required"); } try { // Compose filter_by string from inputs const filterParts = [`imo:=${imo}`]; if (start_date) { const startTs = Math.floor(new Date(start_date).getTime() / 1000); filterParts.push(`expenseDate:>=${startTs}`); } if (end_date) { const endTs = Math.floor(new Date(end_date).getTime() / 1000); filterParts.push(`expenseDate:<=${endTs}`); } const filterBy = filterParts.join(" && "); // Build query for export const query = { filter_by: filterBy, exclude_fields: excludeFieldsStr }; logger.debug("[Typesense Query]", query); const exportResult = await this.typesenseClient .collections(collection) .documents() .export(query); // Process the export result const exportText = typeof exportResult === 'string' ? exportResult : Buffer.from(exportResult).toString('utf-8'); const documents = exportText .split('\n') .filter(line => line.trim()) .map(line => JSON.parse(line)); const vesselName = documents[0]?.vesselName; const imoExtracted = documents[0]?.imo; // Get data link and insert to MongoDB const dataLink = await this.getDataLink(documents); const linkHeader = `Expense export for IMO ${imo}`; await this.insertDataLinkToMongoDB(dataLink, linkHeader, session_id, imo, vesselName); // Format results const formattedResults = { found: documents.length, out_of: documents.length, page: 1, hits: documents }; // Get artifact data const artifactData = await this.getArtifact("get_vessel_expense_data", dataLink); return [ { type: "text", text: JSON.stringify(formattedResults, null, 2), title: `Exported expense records for IMO ${imo}`, format: "json" }, { type: "text", text: JSON.stringify(artifactData, null, 2), title: `Expense export artifact for IMO ${imo}`, format: "json" } ]; } catch (error) { logger.error("Error executing expense export:", error); throw new Error(`Error exporting expense records: ${error}`); } } async getCompleteVesselBudgetData(args) { const collection = "budget"; const session_id = args.session_id || "testing"; const imo = args.imo; const start_date = args.start_date; const end_date = args.end_date; const excludeFieldsStr = "_id,docId,fleetId,vesselId,fleetManagerId,technicalSuperintendentId"; if (!imo) { throw new Error("IMO is required"); } try { // Compose filter_by string from inputs const filterParts = [`imo:=${imo}`]; if (start_date) { const startTs = Math.floor(new Date(start_date).getTime() / 1000); filterParts.push(`date:>=${startTs}`); } if (end_date) { const endTs = Math.floor(new Date(end_date).getTime() / 1000); filterParts.push(`date:<=${endTs}`); } const filterBy = filterParts.join(" && "); // Build query for export const query = { filter_by: filterBy, exclude_fields: excludeFieldsStr }; logger.debug("[Typesense Query]", query); const exportResult = await this.typesenseClient .collections(collection) .documents() .export(query); // Process the export result const exportText = typeof exportResult === 'string' ? exportResult : Buffer.from(exportResult).toString('utf-8'); const documents = exportText .split('\n') .filter(line => line.trim()) .map(line => JSON.parse(line)); const vesselName = documents[0]?.vesselName; const imoExtracted = documents[0]?.imo; // Get data link and insert to MongoDB const dataLink = await this.getDataLink(documents); const linkHeader = `Budget export for IMO ${imo}`; await this.insertDataLinkToMongoDB(dataLink, linkHeader, session_id, imo, vesselName); // Format results const formattedResults = { found: documents.length, out_of: documents.length, page: 1, hits: documents }; // Get artifact data const artifactData = await this.getArtifact("get_complete_vessel_budget_data", dataLink); return [ { type: "text", text: JSON.stringify(formattedResults, null, 2), title: `Exported budget records for IMO ${imo}`, format: "json" }, { type: "text", text: JSON.stringify(artifactData, null, 2), title: `Budget export artifact for IMO ${imo}`, format: "json" } ]; } catch (error) { logger.error("Error executing budget export:", error); throw new Error(`Error exporting budget records: ${error}`); } } async writeCasefileData(args) { const operation = args.operation; if (!operation) { throw new Error("Operation is required"); } try { switch (operation) { case "write_casefile": return await this.createCasefile(args); case "write_page": return await this.updateCasefile(args); default: throw new Error(`Unsupported operation for writeCasefileData: '${operation}'`); } } catch (error) { logger.error("Error in writeCasefileData:", error); throw error; } } async createCasefile(args) { const { casefileName, casefileSummary, currentStatus, importance = 0, category = "purchase", role, imo } = args; let vesselName = null; let vesselId = null; if (imo) { const vesselInfo = await this.getVesselName(imo); vesselName = vesselInfo.vesselName; vesselId = vesselInfo.vesselId; } const db = await getMongoClient(config.mongoUri); const collection = db.db(config.dbName).collection("casefiles"); const data = { vesselId, imo, vesselName, casefile: casefileName, currentStatus, summary: casefileSummary, originalImportance: importance, importance, category, role, followUp: "", createdAt: new Date(), updatedAt: new Date(), index: [], pages: [] }; logger.info(data); const result = await collection.insertOne(data); logger.info(result); const casefileId = result.insertedId.toString(); const casefileUrl = await this.generateCasefileWeblink(casefileId); await collection.updateOne({ _id: result.insertedId }, { $set: { link: casefileUrl } }); // Typesense update const typesenseData = { ...data }; delete typesenseData.index; delete typesenseData.pages; delete typesenseData._id; typesenseData.id = casefileId; typesenseData.vesselId = vesselId?.toString() || null; try { logger.info(typesenseData); await this.pushToTypesense(typesenseData, "create"); return [{ type: "text", text: `Casefile created with casefile url: ${casefileUrl}`, title: "create casefile" }]; } catch (error) { logger.error("Error creating casefile:", error); throw error; } } async updateCasefile(args) { const { casefile_url, casefileSummary, importance, tags = [], topic, summary, mailId, currentStatus, casefileName, facts, links = [], detailed_report = "" } = args; if (!casefile_url) { throw new Error("Casefile URL is required"); } const db = await getMongoClient(config.mongoUri); const collection = db.db(config.dbName).collection("casefiles"); let casefileId = casefile_url; if (!this.isValidObjectId(casefileId || "")) { casefileId = await this.linkToId(casefile_url || ""); } // Normalize tags const normalizedTags = Array.isArray(tags) ? tags : [tags]; // Process facts and summary const processedSummary = facts ? `${summary} <br> ${facts}` : summary; // Prepare links const processedLinks = [ { link: await this.markdownToHtmlLink(detailed_report) }, ...links.map((link) => ({ link })) ]; // Build update pipeline const updatePipeline = []; // Stage 1: Base field updates const setStage = { updatedAt: new Date() }; if (casefileName !== undefined) setStage.casefile = casefileName; if (currentStatus !== undefined) setStage.currentStatus = currentStatus; if (casefileSummary !== undefined) setStage.summary = casefileSummary; if (importance !== undefined) setStage.importance = importance; if (Object.keys(setStage).length > 1) { updatePipeline.push({ $set: setStage }); } // Stage 2: Ensure arrays exist and compute next page number updatePipeline.push({ $set: { pages: { $ifNull: ["$pages", []] }, index: { $ifNull: ["$index", []] }, _nextPageNum: { $add: [ { $max: [ { $ifNull: [{ $max: "$pages.pagenum" }, 0] }, { $ifNull: [{ $max: "$index.pagenum" }, 0] } ] }, 1 ] } } }); // Stage 3: Update tags if (normalizedTags.length > 0) { updatePipeline.push({ $set: { tags: { $setUnion: [ { $ifNull: ["$tags", []] }, normalizedTags ] } } }); } // Stage 4: Append to pages and index updatePipeline.push({ $set: { pages: { $concatArrays: [ "$pages", [{ pagenum: "$_nextPageNum", summary: processedSummary, createdAt: new Date(), subject: topic, flag: topic, type: "QA_Agent", link: processedLinks, plan_status: "unprocessed" }] ] }, index: { $concatArrays: [ "$index", [{ pagenum: "$_nextPageNum", type: "QA_Agent", createdAt: new Date(), topic, plan_status: "unprocessed" }] ] } } }); // Stage 5: Cleanup updatePipeline.push({ $unset: "_nextPageNum" }); // Execute update await collection.updateOne({ _id: new ObjectId(casefileId) }, updatePipeline); // Typesense update try { const mongoResult = await collection.findOne({ _id: new ObjectId(casefileId) }); const updateFields = { id: casefileId, summary: mongoResult?.summary, originalImportance: mongoResult?.originalImportance, importance: mongoResult?.importance || 0, plan_status: mongoResult?.plan_status, tag: mongoResult?.tag, createdAt: mongoResult?.createdAt, updatedAt: mongoResult?.updatedAt, casefile: mongoResult?.casefile, currentStatus: mongoResult?.currentStatus, vesselId: mongoResult?.vesselId?.toString(), imo: mongoResult?.imo, vesselName: mongoResult?.vesselName, category: mongoResult?.category, conversationTopic: mongoResult?.conversationTopic, role: mongoResult?.role, followUp: mongoResult?.followUp || "" }; logger.info(updateFields); await this.pushToTypesense(updateFields, "upsert"); return [{ type: "text", text: `Casefile updated with casefile url: ${casefile_url}`, title: "update casefile" }]; } catch (error) { logger.error("Error updating casefile:", error); throw error; } } async getVesselName(imo) { const db = await getMongoClient(config.mongoUri); const vessel = await db.db(config.dbName).collection("vessels").findOne({ imo }); return { vesselName: vessel?.name || null, vesselId: vessel?._id?.toString() || null }; } async generateCasefileWeblink(casefileId) { // Use a default URL if config.baseUrl is not available const baseUrl = config.baseUrl || "http://localhost:3000"; return `${baseUrl}/casefile/${casefileId}`; } async linkToId(link) { const db = await getMongoClient(config.mongoUri); const casefile = await db.db(config.dbName).collection("casefiles").findOne({ link }); return casefile?._id?.toString() || ""; } async markdownToHtmlLink(markdown) { // Implement your markdown to HTML link conversion logic here return markdown; } isValidObjectId(id) { return /^[0-9a-fA-F]{24}$/.test(id); } async pushToTypesense(data, action) { // Implement your Typesense push logic here await this.typesenseClient .collections("emailCasefile") .documents() .import([data], { action }); } async getCasefiles(args) { const query = args.query; const imo = args.imo; const minImportance = args.min_importance || 0; const pageSize = args.page_size || 10; const pagination = args.pagination || 1; try { // Build filter_by array const filterBy = []; if (imo) { filterBy.push(`imo:${imo}`); } if (minImportance) { filterBy.push(`importance_score:>${minImportance}`); } filterBy.push("category:purchase"); // Join filter conditions const filterByString = filterBy.join("&&"); // Build Typesense query const typesenseQuery = query ? { q: query, query_by: "embedding_text", per_page: pageSize, exclude_fields: "embedding", prefix: false, filter_by: filterByString, page: pagination } : { q: "*", query_by: "embedding, embedding_text", per_page: pageSize, exclude_fields: "embedding", prefix: false, filter_by: filterByString, page: pagination }; // Execute search const result = await this.typesenseClient .collections("emailCasefile") .documents() .search(typesenseQuery); // Format results const formattedResult = result.hits.map((item) => ({ casefile_id: item.document.id, casefile_name: item.document.casefile, current_status: item.document.currentStatus || "", summary: item.document.summary, importance: item.document.importance_score, casefile_url: item.document.link })); return [{ type: "text", text: JSON.stringify(formattedResult, null, 2), title: "Casefile Search Results" }]; } catch (error) { logger.error("Error searching casefiles:", error); throw error; } } async getCasefilePlan(args) { const casefileUrl = args.casefile_url; try { let casefileId = casefileUrl; if (!this.isValidObjectId(casefileId || "")) { casefileId = await this.linkToId(casefileUrl || ""); } const db = await getMongoClient(config.mongoUri); const collection = db.db(config.dbName).collection("casefiles"); // Check if casefile exists const casefile = await collection.findOne({ _id: new ObjectId(casefileId) }); if (!casefile) { return [{ type: "text", text: `Casefile ${casefileUrl} not found` }]; } // Get the latest entry in casefilePlans array const pipeline = [ { $match: { _id: new ObjectId(casefileId) } }, { $project: { _id: 0, latest_plan: { $arrayElemAt: ["$casefilePlans", -1] } } } ]; const results = await collection.aggregate(pipeline).toArray(); return [{ type: "text", text: JSON.stringify(results, null, 2), title: `Casefile Plans for ${casefileUrl}` }]; } catch (error) { logger.error("Error getting casefile plans:", error); return [{ type: "text", text: `Error getting casefile plans: ${error}` }]; } } } //# sourceMappingURL=documentTools.js.map