UNPKG

@guyycodes/plugin-sdk

Version:

AI-powered plugin scaffolding tool - Create full-stack applications with 7+ AI models, 50+ business integrations, and production-ready infrastructure

1,550 lines (1,307 loc) 237 kB
// Integrations.js // This file contains the functions to create the integration points for the backend. const fs = require('fs-extra'); const path = require('path'); const chalk = require('chalk'); async function createIntegrationsFiles(serverPath) { console.log(chalk.blue('🔌 Creating integration files...')); // Create main integrations directory const integrationsPath = path.join(serverPath, 'integrations'); fs.ensureDirSync(integrationsPath); // Create category directories const categories = [ 'accounting', 'business_intelligence', 'collaboration', 'crm', 'marketing', 'model_context_protocol', 'payments', 'platforms', 'scheduling', 'storefront' ]; categories.forEach(category => { fs.ensureDirSync(path.join(integrationsPath, category)); }); // Create __init__.py files await createIntegrationsInit(integrationsPath); categories.forEach(async (category) => { await createCategoryInit(path.join(integrationsPath, category)); }); // Create integration files await createQuickBooksIntegration(path.join(integrationsPath, 'accounting')); await createLookerStudioIntegration(path.join(integrationsPath, 'business_intelligence')); await createSlackIntegration(path.join(integrationsPath, 'collaboration')); await createHubSpotIntegration(path.join(integrationsPath, 'crm')); await createMailchimpIntegration(path.join(integrationsPath, 'marketing')); await createMcpIntegration(path.join(integrationsPath, 'model_context_protocol')); await createSquareIntegration(path.join(integrationsPath, 'payments')); await createStripeIntegration(path.join(integrationsPath, 'payments')); await createSimplePracticeIntegration(path.join(integrationsPath, 'platforms')); await createTherapyNotesIntegration(path.join(integrationsPath, 'platforms')); await createCalendlyIntegration(path.join(integrationsPath, 'scheduling')); await createShopifyIntegration(path.join(integrationsPath, 'storefront')); // Create README await createIntegrationsReadme(integrationsPath); console.log(chalk.green('✅ Integration files created successfully')); } async function createIntegrationsInit(integrationsPath) { const initPy = `""" Small Business Integrations Package Provides integration tools for various business platforms and services """`; fs.writeFileSync(path.join(integrationsPath, '__init__.py'), initPy); } async function createCategoryInit(categoryPath) { const initPy = `""" Integration category package """`; fs.writeFileSync(path.join(categoryPath, '__init__.py'), initPy); } async function createQuickBooksIntegration(accountingPath) { const quickbooksPy = `""" QuickBooks Integration Tool Example of how to integrate QuickBooks with the agent system """ import os import json from typing import Dict, Any, Optional from langchain_core.tools import tool import aiohttp # QuickBooks OAuth configuration QB_CLIENT_ID = os.getenv("QUICKBOOKS_CLIENT_ID") QB_CLIENT_SECRET = os.getenv("QUICKBOOKS_CLIENT_SECRET") QB_REDIRECT_URI = os.getenv("QUICKBOOKS_REDIRECT_URI", "http://localhost:3000/quickbooks/callback") QB_ENVIRONMENT = os.getenv("QUICKBOOKS_ENVIRONMENT", "sandbox") # 'sandbox' or 'production' # Base URLs QB_BASE_URL = "https://sandbox-quickbooks.api.intuit.com" if QB_ENVIRONMENT == "sandbox" else "https://quickbooks.api.intuit.com" @tool async def quickbooks_search(query: str, entity_type: str = "all") -> str: """ Search QuickBooks for customers, invoices, payments, or other entities. Args: query: Search term (e.g., customer name, invoice number) entity_type: Type of entity to search ('customer', 'invoice', 'payment', 'all') """ try: # In a real implementation, you'd need OAuth token management access_token = await get_quickbooks_token() if not access_token: return json.dumps({ "error": "QuickBooks not connected. Please authenticate first.", "auth_url": f"/quickbooks/auth" }) headers = { "Authorization": f"Bearer {access_token}", "Accept": "application/json" } # Example search endpoints search_results = [] async with aiohttp.ClientSession() as session: if entity_type in ["customer", "all"]: # Search customers customer_query = f"SELECT * FROM Customer WHERE DisplayName LIKE '%{query}%'" async with session.get( f"{QB_BASE_URL}/v3/company/{company_id}/query", params={"query": customer_query}, headers=headers ) as resp: if resp.status == 200: data = await resp.json() search_results.extend([{ "type": "customer", "id": c["Id"], "name": c["DisplayName"], "balance": c.get("Balance", 0) } for c in data.get("QueryResponse", {}).get("Customer", [])]) if entity_type in ["invoice", "all"]: # Search invoices invoice_query = f"SELECT * FROM Invoice WHERE DocNumber LIKE '%{query}%'" # Similar API call... return json.dumps({ "query": query, "results": search_results, "count": len(search_results) }, indent=2) except Exception as e: return json.dumps({ "error": f"QuickBooks search failed: {str(e)}" }) @tool def quickbooks_create_invoice(customer_id: str, line_items: list, due_date: str = None) -> str: """ Create a new invoice in QuickBooks. Args: customer_id: QuickBooks customer ID line_items: List of items with description, amount, quantity due_date: Optional due date (YYYY-MM-DD format) """ # Implementation would create invoice via API pass @tool def quickbooks_get_balance(customer_id: str) -> str: """ Get customer balance and recent transactions from QuickBooks. Args: customer_id: QuickBooks customer ID """ # Implementation would fetch customer financial data pass # OAuth helper functions async def get_quickbooks_token() -> Optional[str]: """Get stored QuickBooks access token""" # In production, implement proper OAuth token storage/refresh return os.getenv("QUICKBOOKS_ACCESS_TOKEN") # Add to your tools.py: # from tools.quickbooks_integration import quickbooks_search, quickbooks_create_invoice, quickbooks_get_balance # # AVAILABLE_TOOLS = { # "web_search": web_search, # "calculator": calculator, # "quickbooks_search": quickbooks_search, # "quickbooks_create_invoice": quickbooks_create_invoice, # "quickbooks_get_balance": quickbooks_get_balance, # } `; fs.writeFileSync(path.join(accountingPath, 'quickbooks_integration.py'), quickbooksPy); } async function createLookerStudioIntegration(biPath) { const lookerPy = `""" Looker Studio (Google Data Studio) Integration for Small Business Analytics This module provides integration with Google's Looker Studio API for managing data sources, reports, and analytics dashboards. Requirements: pip install google-api-python-client google-auth google-auth-oauthlib google-auth-httplib2 """ import os import logging from typing import Dict, List, Optional, Any, Union from datetime import datetime, timedelta import json from googleapiclient.discovery import build from googleapiclient.errors import HttpError from google.oauth2.credentials import Credentials from google.oauth2 import service_account from tools import tool logger = logging.getLogger(__name__) # Google API configuration GOOGLE_SERVICE_ACCOUNT_FILE = os.getenv("GOOGLE_SERVICE_ACCOUNT_FILE") GOOGLE_OAUTH_CREDENTIALS = os.getenv("GOOGLE_OAUTH_CREDENTIALS") LOOKER_STUDIO_SCOPES = [ 'https://www.googleapis.com/auth/datastudio', 'https://www.googleapis.com/auth/drive', 'https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/analytics.readonly' ] def get_credentials(): """Get Google credentials for API access""" if GOOGLE_SERVICE_ACCOUNT_FILE and os.path.exists(GOOGLE_SERVICE_ACCOUNT_FILE): return service_account.Credentials.from_service_account_file( GOOGLE_SERVICE_ACCOUNT_FILE, scopes=LOOKER_STUDIO_SCOPES ) elif GOOGLE_OAUTH_CREDENTIALS: # Parse OAuth credentials from environment variable creds_data = json.loads(GOOGLE_OAUTH_CREDENTIALS) return Credentials.from_authorized_user_info(creds_data, LOOKER_STUDIO_SCOPES) else: logger.warning("Google credentials not configured") return None # Initialize Google services try: credentials = get_credentials() if credentials: drive_service = build('drive', 'v3', credentials=credentials) sheets_service = build('sheets', 'v4', credentials=credentials) # Note: Looker Studio API is limited, most operations use Drive API else: drive_service = None sheets_service = None except Exception as e: logger.error(f"Failed to initialize Google services: {e}") drive_service = None sheets_service = None # --- Report Management --- @tool def looker_studio_list_reports( folder_id: Optional[str] = None, limit: int = 20 ) -> Dict[str, Any]: """ List Looker Studio reports accessible to the user Args: folder_id: Optional folder ID to search within limit: Maximum number of reports to return Returns: Dict containing report information """ try: if not drive_service: return {"error": "Google Drive not initialized. Please configure credentials."} # Looker Studio reports are stored as Google Drive files query = "mimeType='application/vnd.google-analytics.data-source' or mimeType='application/vnd.google.datastudio'" if folder_id: query += f" and '{folder_id}' in parents" response = drive_service.files().list( q=query, pageSize=limit, fields="files(id, name, modifiedTime, createdTime, owners, webViewLink, description)" ).execute() reports = [] for file in response.get('files', []): reports.append({ "id": file['id'], "name": file['name'], "created_time": file.get('createdTime'), "modified_time": file.get('modifiedTime'), "owner": file.get('owners', [{}])[0].get('displayName', 'Unknown'), "web_view_link": file.get('webViewLink'), "description": file.get('description', '') }) return { "reports": reports, "count": len(reports) } except HttpError as e: logger.error(f"Google API error: {e}") return {"error": f"Google API error: {str(e)}"} @tool def looker_studio_copy_report( report_id: str, new_name: str, folder_id: Optional[str] = None ) -> Dict[str, Any]: """ Create a copy of an existing Looker Studio report Args: report_id: ID of the report to copy new_name: Name for the new report folder_id: Optional folder to place the copy in Returns: Dict containing new report information """ try: if not drive_service: return {"error": "Google Drive not initialized. Please configure credentials."} copy_metadata = { 'name': new_name } if folder_id: copy_metadata['parents'] = [folder_id] response = drive_service.files().copy( fileId=report_id, body=copy_metadata ).execute() return { "id": response['id'], "name": response['name'], "web_view_link": f"https://lookerstudio.google.com/reporting/{response['id']}", "created": True } except HttpError as e: logger.error(f"Google API error: {e}") return {"error": f"Google API error: {str(e)}"} @tool def looker_studio_share_report( report_id: str, email: str, role: str = "reader", send_notification: bool = True, message: Optional[str] = None ) -> Dict[str, Any]: """ Share a Looker Studio report with another user Args: report_id: ID of the report to share email: Email address to share with role: Permission role (reader, writer, commenter) send_notification: Whether to send email notification message: Optional message to include in notification Returns: Dict containing sharing status """ try: if not drive_service: return {"error": "Google Drive not initialized. Please configure credentials."} permission = { 'type': 'user', 'role': role, 'emailAddress': email } request_body = {} if send_notification: request_body['sendNotificationEmail'] = True if message: request_body['emailMessage'] = message response = drive_service.permissions().create( fileId=report_id, body=permission, **request_body ).execute() return { "permission_id": response['id'], "email": email, "role": role, "shared": True } except HttpError as e: logger.error(f"Google API error: {e}") return {"error": f"Google API error: {str(e)}"} # --- Data Source Management --- @tool def looker_studio_create_sheets_datasource( spreadsheet_id: str, sheet_name: str, datasource_name: str, description: Optional[str] = None ) -> Dict[str, Any]: """ Create a data source from a Google Sheets spreadsheet Args: spreadsheet_id: ID of the Google Sheets spreadsheet sheet_name: Name of the sheet to use as data source datasource_name: Name for the data source in Looker Studio description: Optional description Returns: Dict containing data source information """ try: if not sheets_service: return {"error": "Google Sheets not initialized. Please configure credentials."} # First, verify the sheet exists and get its structure sheet_metadata = sheets_service.spreadsheets().get( spreadsheetId=spreadsheet_id ).execute() sheet_found = False for sheet in sheet_metadata.get('sheets', []): if sheet['properties']['title'] == sheet_name: sheet_found = True break if not sheet_found: return {"error": f"Sheet '{sheet_name}' not found in spreadsheet"} # Get data range to understand structure range_name = f"{sheet_name}!A1:Z1" result = sheets_service.spreadsheets().values().get( spreadsheetId=spreadsheet_id, range=range_name ).execute() headers = result.get('values', [[]])[0] return { "message": "Data source connection ready", "spreadsheet_id": spreadsheet_id, "sheet_name": sheet_name, "datasource_name": datasource_name, "fields": headers, "note": "Use this data source in Looker Studio by connecting to the Google Sheets file" } except HttpError as e: logger.error(f"Google API error: {e}") return {"error": f"Google API error: {str(e)}"} @tool def looker_studio_refresh_data_source( report_id: str, datasource_name: Optional[str] = None ) -> Dict[str, Any]: """ Trigger a refresh of data sources in a report Args: report_id: ID of the report datasource_name: Optional specific data source to refresh Returns: Dict containing refresh status """ try: # Note: Direct data refresh API is limited # Reports auto-refresh when viewed or can be scheduled return { "message": "Data refresh triggered", "report_id": report_id, "datasource_name": datasource_name, "note": "Looker Studio automatically refreshes data when reports are viewed. For scheduled refreshes, configure in the Looker Studio UI." } except Exception as e: logger.error(f"Error: {e}") return {"error": f"Error: {str(e)}"} # --- Analytics & Metrics --- @tool def looker_studio_export_report_data( report_id: str, format: str = "csv", page_number: Optional[int] = None ) -> Dict[str, Any]: """ Export data from a Looker Studio report Args: report_id: ID of the report format: Export format (csv, pdf) page_number: Specific page to export (for multi-page reports) Returns: Dict containing export information """ try: if not drive_service: return {"error": "Google Drive not initialized. Please configure credentials."} # Get report metadata file_metadata = drive_service.files().get( fileId=report_id, fields="name, modifiedTime, webViewLink" ).execute() export_info = { "report_id": report_id, "report_name": file_metadata.get('name'), "format": format, "export_url": f"https://lookerstudio.google.com/reporting/{report_id}/page/{page_number or 1}/export", "instructions": f"To export as {format.upper()}, visit the report and use File > Download as > {format.upper()}" } if format == "pdf": export_info["pdf_options"] = { "layout": "portrait/landscape", "size": "letter/A4", "include_link": True } return export_info except HttpError as e: logger.error(f"Google API error: {e}") return {"error": f"Google API error: {str(e)}"} @tool def looker_studio_create_calculated_field( field_name: str, formula: str, description: Optional[str] = None ) -> Dict[str, Any]: """ Create a calculated field formula for Looker Studio Args: field_name: Name for the calculated field formula: Formula expression (e.g., "Revenue - Cost") description: Optional description of the field Returns: Dict containing calculated field configuration """ try: # Validate basic formula syntax valid_functions = [ "SUM", "AVG", "COUNT", "COUNT_DISTINCT", "MAX", "MIN", "CONCAT", "REGEXP_EXTRACT", "REGEXP_REPLACE", "CAST", "DATE", "DATETIME", "PARSE_DATE", "FORMAT_DATETIME", "CASE", "WHEN", "THEN", "ELSE", "END", "IF", "AND", "OR", "NOT", "IN" ] calculated_field = { "name": field_name, "formula": formula, "description": description or f"Calculated field: {field_name}", "type": "calculated", "aggregation": "AUTO", "example_formulas": [ "Revenue - Cost", "CONCAT(FirstName, ' ', LastName)", "CASE WHEN Revenue > 1000 THEN 'High' ELSE 'Low' END", "COUNT_DISTINCT(CustomerID)", "PARSE_DATE('%Y%m%d', DateString)" ], "supported_functions": valid_functions } return { "calculated_field": calculated_field, "instructions": "Add this calculated field in Looker Studio: Resource > Manage added data sources > Edit > Add a field" } except Exception as e: logger.error(f"Error: {e}") return {"error": f"Error: {str(e)}"} # --- Dashboard Templates --- @tool def looker_studio_get_dashboard_templates( business_type: str = "general" ) -> Dict[str, Any]: """ Get recommended dashboard templates for different business types Args: business_type: Type of business (retail, saas, ecommerce, service, general) Returns: Dict containing dashboard template recommendations """ templates = { "retail": { "name": "Retail Analytics Dashboard", "sections": [ { "name": "Sales Overview", "metrics": ["Total Revenue", "Units Sold", "Average Order Value", "YoY Growth"], "charts": ["Time series", "Scorecard", "Geo map", "Bar chart by product"] }, { "name": "Inventory", "metrics": ["Stock Levels", "Turnover Rate", "Out of Stock Items", "Reorder Points"], "charts": ["Table", "Heat map", "Gauge chart"] }, { "name": "Customer Analytics", "metrics": ["Customer Count", "Repeat Rate", "Customer Lifetime Value", "Acquisition Cost"], "charts": ["Funnel", "Cohort analysis", "Pie chart by segment"] } ] }, "saas": { "name": "SaaS Metrics Dashboard", "sections": [ { "name": "Revenue Metrics", "metrics": ["MRR", "ARR", "Churn Rate", "Net Revenue Retention"], "charts": ["Time series", "Waterfall chart", "Scorecard with trend"] }, { "name": "User Engagement", "metrics": ["DAU/MAU", "Feature Adoption", "Session Duration", "User Actions"], "charts": ["Line chart", "Heat map", "Histogram"] }, { "name": "Customer Success", "metrics": ["NPS Score", "Support Tickets", "Resolution Time", "Health Score"], "charts": ["Gauge", "Bar chart", "Scatter plot"] } ] }, "ecommerce": { "name": "E-commerce Dashboard", "sections": [ { "name": "Sales Performance", "metrics": ["Revenue", "Conversion Rate", "Cart Abandonment", "AOV"], "charts": ["Time series", "Funnel", "Scorecard"] }, { "name": "Traffic Sources", "metrics": ["Sessions by Source", "Bounce Rate", "Page Views", "User Flow"], "charts": ["Pie chart", "Sankey diagram", "Table"] }, { "name": "Product Performance", "metrics": ["Best Sellers", "Product Views", "Add to Cart Rate", "Revenue by Category"], "charts": ["Bar chart", "Treemap", "Bubble chart"] } ] }, "service": { "name": "Service Business Dashboard", "sections": [ { "name": "Bookings & Revenue", "metrics": ["Bookings", "Revenue", "Utilization Rate", "Average Ticket"], "charts": ["Calendar heat map", "Time series", "Scorecard"] }, { "name": "Client Management", "metrics": ["Active Clients", "Retention Rate", "Client Satisfaction", "Referral Rate"], "charts": ["Donut chart", "Bar chart", "Table with conditional formatting"] }, { "name": "Operations", "metrics": ["Service Delivery Time", "Resource Utilization", "Cost per Service", "Profit Margin"], "charts": ["Gantt chart", "Stacked bar", "Line chart"] } ] } } template = templates.get(business_type, templates["general"] if business_type == "general" else templates["retail"]) return { "template": template, "business_type": business_type, "setup_instructions": [ "1. Create a new Looker Studio report", "2. Connect your data sources (Google Sheets, BigQuery, etc.)", "3. Add pages for each section", "4. Create the recommended charts and scorecards", "5. Apply filters for date range and other dimensions", "6. Set up scheduled email delivery if needed" ], "best_practices": [ "Keep dashboards focused on key metrics", "Use consistent color schemes", "Add date range controls", "Include data freshness indicators", "Optimize for mobile viewing" ] } # --- Scheduling & Automation --- @tool def looker_studio_schedule_email_delivery( report_id: str, recipients: Union[str, List[str]], frequency: str = "weekly", day_of_week: Optional[str] = None, time: str = "09:00", message: Optional[str] = None ) -> Dict[str, Any]: """ Configure scheduled email delivery for a report Args: report_id: ID of the report recipients: Email address(es) to send to frequency: Delivery frequency (daily, weekly, monthly) day_of_week: Day for weekly delivery (monday-sunday) time: Time to send (HH:MM format) message: Optional message to include Returns: Dict containing schedule configuration """ try: if isinstance(recipients, str): recipients = [recipients] schedule_config = { "report_id": report_id, "recipients": recipients, "frequency": frequency, "time": time, "enabled": True } if frequency == "weekly" and day_of_week: schedule_config["day_of_week"] = day_of_week elif frequency == "monthly": schedule_config["day_of_month"] = 1 # Default to first of month if message: schedule_config["message"] = message return { "schedule": schedule_config, "instructions": [ "To set up email delivery in Looker Studio:", "1. Open the report", "2. Click Share > Schedule email delivery", "3. Add recipients and configure schedule", "4. Choose format (PDF attachment or link)", "5. Add optional message and save" ], "note": "Email delivery must be configured within Looker Studio UI" } except Exception as e: logger.error(f"Error: {e}") return {"error": f"Error: {str(e)}"} # --- BigQuery Integration --- @tool def looker_studio_connect_bigquery( project_id: str, dataset_id: str, table_id: Optional[str] = None, custom_query: Optional[str] = None ) -> Dict[str, Any]: """ Create a BigQuery data source connection for Looker Studio Args: project_id: Google Cloud project ID dataset_id: BigQuery dataset ID table_id: Optional specific table (if not using custom query) custom_query: Optional custom SQL query Returns: Dict containing connection information """ try: connection_info = { "connector": "BigQuery", "project_id": project_id, "dataset_id": dataset_id } if custom_query: connection_info["type"] = "custom_query" connection_info["query"] = custom_query connection_info["note"] = "Ensure query is optimized for performance" elif table_id: connection_info["type"] = "table" connection_info["table_id"] = table_id else: connection_info["type"] = "dataset" connection_info["note"] = "All tables in dataset will be available" return { "connection": connection_info, "setup_steps": [ "1. In Looker Studio, click 'Create' > 'Data source'", "2. Select 'BigQuery' connector", f"3. Choose project '{project_id}'", f"4. Select dataset '{dataset_id}'", "5. Choose table or enter custom query", "6. Configure field types and aggregations", "7. Name and save the data source" ], "optimization_tips": [ "Use partitioned tables for better performance", "Pre-aggregate data when possible", "Limit date ranges in queries", "Use clustering for frequently filtered columns" ] } except Exception as e: logger.error(f"Error: {e}") return {"error": f"Error: {str(e)}"} # Add to AVAILABLE_TOOLS in main tools module LOOKER_STUDIO_TOOLS = [ looker_studio_list_reports, looker_studio_copy_report, looker_studio_share_report, looker_studio_create_sheets_datasource, looker_studio_refresh_data_source, looker_studio_export_report_data, looker_studio_create_calculated_field, looker_studio_get_dashboard_templates, looker_studio_schedule_email_delivery, looker_studio_connect_bigquery ] `; fs.writeFileSync(path.join(biPath, 'looker_studio_integration.py'), lookerPy); } async function createSlackIntegration(collabPath) { const slackPy = `""" Slack Integration for Small Business Team Communication This module provides integration with Slack's Web API for messaging, channel management, user interactions, and workflow automation. Requirements: pip install slack-sdk """ import os import logging from typing import Dict, List, Optional, Any, Union from datetime import datetime, timedelta from slack_sdk import WebClient from slack_sdk.errors import SlackApiError from tools import tool logger = logging.getLogger(__name__) # Initialize Slack client SLACK_BOT_TOKEN = os.getenv("SLACK_BOT_TOKEN") SLACK_USER_TOKEN = os.getenv("SLACK_USER_TOKEN") # For user-specific actions # Use bot token by default slack_client = WebClient(token=SLACK_BOT_TOKEN) if SLACK_BOT_TOKEN else None slack_user_client = WebClient(token=SLACK_USER_TOKEN) if SLACK_USER_TOKEN else None # --- Channel Management --- @tool def slack_list_channels( exclude_archived: bool = True, types: str = "public_channel,private_channel", limit: int = 100 ) -> Dict[str, Any]: """ List all channels in the workspace Args: exclude_archived: Whether to exclude archived channels types: Comma-separated channel types (public_channel, private_channel, mpim, im) limit: Maximum number of channels to return Returns: Dict containing channel information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} response = slack_client.conversations_list( exclude_archived=exclude_archived, types=types, limit=limit ) channels = [] for channel in response.get("channels", []): channels.append({ "id": channel["id"], "name": channel.get("name", "Direct Message"), "is_private": channel.get("is_private", False), "is_archived": channel.get("is_archived", False), "is_general": channel.get("is_general", False), "topic": channel.get("topic", {}).get("value", ""), "purpose": channel.get("purpose", {}).get("value", ""), "num_members": channel.get("num_members", 0), "created": datetime.fromtimestamp(channel.get("created", 0)).isoformat() if channel.get("created") else None }) return { "channels": channels, "count": len(channels) } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} @tool def slack_create_channel( name: str, is_private: bool = False, description: Optional[str] = None ) -> Dict[str, Any]: """ Create a new channel Args: name: Name of the channel (lowercase, no spaces) is_private: Whether the channel should be private description: Channel description/purpose Returns: Dict containing new channel information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} response = slack_client.conversations_create( name=name, is_private=is_private ) channel = response["channel"] channel_id = channel["id"] # Set topic/purpose if provided if description: slack_client.conversations_setPurpose( channel=channel_id, purpose=description ) return { "id": channel_id, "name": channel["name"], "is_private": channel.get("is_private", False), "created": True, "creator": channel.get("creator"), "description": description } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} @tool def slack_archive_channel( channel_id: str ) -> Dict[str, Any]: """ Archive a channel Args: channel_id: The ID of the channel to archive Returns: Success status """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} slack_client.conversations_archive(channel=channel_id) return { "success": True, "message": f"Channel {channel_id} has been archived" } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} # --- Messaging --- @tool def slack_send_message( channel: str, text: str, thread_ts: Optional[str] = None, blocks: Optional[List[Dict]] = None, attachments: Optional[List[Dict]] = None ) -> Dict[str, Any]: """ Send a message to a channel or user Args: channel: Channel ID, channel name (with #), or user ID text: Message text (required even with blocks/attachments as fallback) thread_ts: Thread timestamp to reply in a thread blocks: Block Kit blocks for rich formatting attachments: Legacy attachments for rich content Returns: Dict containing message information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} kwargs = { "channel": channel, "text": text } if thread_ts: kwargs["thread_ts"] = thread_ts if blocks: kwargs["blocks"] = blocks if attachments: kwargs["attachments"] = attachments response = slack_client.chat_postMessage(**kwargs) return { "ts": response["ts"], "channel": response["channel"], "text": text, "thread_ts": thread_ts, "success": True } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} @tool def slack_send_dm( user_id: str, text: str, blocks: Optional[List[Dict]] = None ) -> Dict[str, Any]: """ Send a direct message to a user Args: user_id: User ID to send message to text: Message text blocks: Block Kit blocks for rich formatting Returns: Dict containing message information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} # Open a DM channel with the user response = slack_client.conversations_open(users=user_id) channel_id = response["channel"]["id"] # Send the message return slack_send_message(channel_id, text, blocks=blocks) except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} @tool def slack_update_message( channel: str, ts: str, text: str, blocks: Optional[List[Dict]] = None, attachments: Optional[List[Dict]] = None ) -> Dict[str, Any]: """ Update an existing message Args: channel: Channel ID where the message exists ts: Timestamp of the message to update text: New message text blocks: New Block Kit blocks attachments: New attachments Returns: Dict containing update status """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} kwargs = { "channel": channel, "ts": ts, "text": text } if blocks: kwargs["blocks"] = blocks if attachments: kwargs["attachments"] = attachments response = slack_client.chat_update(**kwargs) return { "ts": response["ts"], "channel": response["channel"], "text": text, "updated": True } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} @tool def slack_delete_message( channel: str, ts: str ) -> Dict[str, Any]: """ Delete a message Args: channel: Channel ID where the message exists ts: Timestamp of the message to delete Returns: Success status """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} slack_client.chat_delete(channel=channel, ts=ts) return { "success": True, "message": f"Message deleted from {channel}" } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} # --- User Management --- @tool def slack_list_users( include_bots: bool = False, limit: int = 100 ) -> Dict[str, Any]: """ List users in the workspace Args: include_bots: Whether to include bot users limit: Maximum number of users to return Returns: Dict containing user information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} response = slack_client.users_list(limit=limit) users = [] for user in response.get("members", []): # Skip bots unless requested if user.get("is_bot") and not include_bots: continue # Skip deleted users if user.get("deleted"): continue users.append({ "id": user["id"], "name": user.get("name"), "real_name": user.get("real_name"), "display_name": user.get("profile", {}).get("display_name"), "email": user.get("profile", {}).get("email"), "is_admin": user.get("is_admin", False), "is_owner": user.get("is_owner", False), "is_bot": user.get("is_bot", False), "status_text": user.get("profile", {}).get("status_text"), "status_emoji": user.get("profile", {}).get("status_emoji"), "timezone": user.get("tz"), "image_48": user.get("profile", {}).get("image_48") }) return { "users": users, "count": len(users) } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} @tool def slack_get_user_info( user_id: str ) -> Dict[str, Any]: """ Get detailed information about a specific user Args: user_id: User ID to get information for Returns: Dict containing detailed user information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} response = slack_client.users_info(user=user_id) user = response["user"] return { "id": user["id"], "name": user.get("name"), "real_name": user.get("real_name"), "display_name": user.get("profile", {}).get("display_name"), "email": user.get("profile", {}).get("email"), "phone": user.get("profile", {}).get("phone"), "title": user.get("profile", {}).get("title"), "is_admin": user.get("is_admin", False), "is_owner": user.get("is_owner", False), "is_bot": user.get("is_bot", False), "status_text": user.get("profile", {}).get("status_text"), "status_emoji": user.get("profile", {}).get("status_emoji"), "timezone": user.get("tz"), "locale": user.get("locale"), "profile": { "image_original": user.get("profile", {}).get("image_original"), "image_512": user.get("profile", {}).get("image_512"), "fields": user.get("profile", {}).get("fields", {}) } } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} # --- File Sharing --- @tool def slack_upload_file( channels: Union[str, List[str]], file_path: Optional[str] = None, content: Optional[str] = None, filename: Optional[str] = None, title: Optional[str] = None, initial_comment: Optional[str] = None ) -> Dict[str, Any]: """ Upload a file to Slack Args: channels: Channel ID(s) to share the file in file_path: Path to local file to upload content: Content to upload as a file (alternative to file_path) filename: Name for the file title: Title of the file initial_comment: Message to post with the file Returns: Dict containing file information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} kwargs = { "channels": channels if isinstance(channels, str) else ",".join(channels) } if file_path: kwargs["file"] = file_path elif content: kwargs["content"] = content if filename: kwargs["filename"] = filename else: return {"error": "Either file_path or content must be provided"} if title: kwargs["title"] = title if initial_comment: kwargs["initial_comment"] = initial_comment response = slack_client.files_upload_v2(**kwargs) return { "file_id": response["file"]["id"], "name": response["file"]["name"], "title": response["file"].get("title"), "mimetype": response["file"]["mimetype"], "size": response["file"]["size"], "url_private": response["file"]["url_private"], "permalink": response["file"]["permalink"], "channels": response["file"]["channels"], "success": True } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} # --- Search --- @tool def slack_search_messages( query: str, sort: str = "timestamp", sort_dir: str = "desc", count: int = 20 ) -> Dict[str, Any]: """ Search for messages in Slack (requires user token) Args: query: Search query (supports Slack search modifiers) sort: Sort by 'score' or 'timestamp' sort_dir: Sort direction 'asc' or 'desc' count: Number of results to return Returns: Dict containing search results """ try: if not slack_user_client: return {"error": "Slack user token not configured. Please set SLACK_USER_TOKEN."} response = slack_user_client.search_messages( query=query, sort=sort, sort_dir=sort_dir, count=count ) messages = [] for match in response.get("messages", {}).get("matches", []): messages.append({ "text": match.get("text"), "user": match.get("user"), "username": match.get("username"), "ts": match.get("ts"), "channel": match.get("channel", {}).get("name"), "channel_id": match.get("channel", {}).get("id"), "permalink": match.get("permalink") }) return { "query": query, "messages": messages, "total": response.get("messages", {}).get("total", 0), "count": len(messages) } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} # --- Reactions --- @tool def slack_add_reaction( channel: str, timestamp: str, emoji: str ) -> Dict[str, Any]: """ Add an emoji reaction to a message Args: channel: Channel ID where the message exists timestamp: Timestamp of the message emoji: Emoji name (without colons, e.g., 'thumbsup') Returns: Success status """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} slack_client.reactions_add( channel=channel, timestamp=timestamp, name=emoji ) return { "success": True, "message": f"Added :{emoji}: reaction" } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} # --- Reminders --- @tool def slack_create_reminder( text: str, time: str, user: Optional[str] = None ) -> Dict[str, Any]: """ Create a reminder Args: text: Reminder text time: When to trigger (e.g., "in 5 minutes", "tomorrow", "every Tuesday") user: User ID to create reminder for (defaults to caller) Returns: Dict containing reminder information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} kwargs = { "text": text, "time": time } if user: kwargs["user"] = user response = slack_client.reminders_add(**kwargs) reminder = response["reminder"] return { "id": reminder["id"], "text": reminder["text"], "user": reminder["user"], "time": reminder.get("time"), "complete_ts": reminder.get("complete_ts"), "recurring": reminder.get("recurring", False) } except SlackApiError as e: logger.error(f"Slack API error: {e}") return {"error": f"Slack API error: {e.response['error']}"} # --- Scheduled Messages --- @tool def slack_schedule_message( channel: str, post_at: datetime, text: str, blocks: Optional[List[Dict]] = None ) -> Dict[str, Any]: """ Schedule a message for future delivery Args: channel: Channel ID to send to post_at: When to send the message text: Message text blocks: Block Kit blocks for rich formatting Returns: Dict containing scheduled message information """ try: if not slack_client: return {"error": "Slack not initialized. Please configure SLACK_BOT_TOKEN."} kwargs = { "channel": channel, "post_at": int(post_at.timestamp()), "text": text } if blocks: kwargs["blocks"] = blocks response = slack_client.chat_scheduleMessage(**kwargs) return { "scheduled_message_id": response["scheduled_message_id"], "post_at": response["post_at"], "channel": channel, "text": text, "su