@podx/cli
Version:
π» Command-line interface for PODx - Advanced Twitter/X scraping and crypto analysis toolkit
857 lines (700 loc) β’ 29.9 kB
text/typescript
import chalk from 'chalk';
import { execSync } from 'child_process';
import { existsSync, readdirSync, rmSync } from 'fs';
import { promises as fs } from 'fs';
import path from 'path';
import { logger } from '@podx/core';
import ora from 'ora';
import { formatFileSize, formatDuration } from '../utils';
import { Spinner } from '../utils';
export async function runStatus(): Promise<void> {
console.log(chalk.blue.bold('π SYSTEM STATUS π\n'));
try {
// Check environment
console.log(chalk.cyan('π§ Environment:'));
console.log(` Node.js: ${process.version}`);
console.log(` Platform: ${process.platform}`);
console.log(` Architecture: ${process.arch}`);
// Check configuration
console.log(chalk.cyan('\nβοΈ Configuration:'));
const configStatus = checkConfiguration();
console.log(` Credentials: ${configStatus.credentials ? chalk.green('β
Set') : chalk.red('β Missing')}`);
console.log(` Target: ${configStatus.target ? chalk.green('β
Set') : chalk.yellow('β οΈ Default')}`);
// Check data files
console.log(chalk.cyan('\nπ Data Files:'));
const dataStatus = checkDataFiles();
console.log(` Scraped tweets: ${dataStatus.mainTweets ? chalk.green('β
Exists') : chalk.red('β None')}`);
console.log(` Account data: ${dataStatus.accountData ? chalk.green('β
Exists') : chalk.gray('β None')}`);
console.log(` Analysis files: ${dataStatus.analysisFiles ? chalk.green('β
Exists') : chalk.gray('β None')}`);
// Check build status
console.log(chalk.cyan('\nποΈ Build Status:'));
const buildStatus = { typescript: existsSync('dist'), distribution: existsSync('dist') };
console.log(` TypeScript: ${buildStatus.typescript ? chalk.green('β
Compiled') : chalk.red('β Errors')}`);
console.log(` Distribution: ${buildStatus.distribution ? chalk.green('β
Built') : chalk.red('β Missing')}`);
// Performance metrics
console.log(chalk.cyan('\nβ‘ Performance:'));
const perfStatus = checkPerformance();
console.log(` Memory usage: ${perfStatus.memory}`);
console.log(` Uptime: ${perfStatus.uptime}`);
logger.info("Status check completed", { operation: "status_check" });
} catch (error) {
console.log(chalk.red('β Error checking system status'));
logger.error("Status check failed", { operation: "status_check" }, error as Error);
}
}
export async function runClean(allData = false): Promise<void> {
logger.info("Starting cleanup operation", { operation: "cleanup", allData });
try {
const spinner = new Spinner('Cleaning up...').start();
// Always clean build artifacts
const buildPaths = ['dist', 'build', '*.tsbuildinfo'];
for (const buildPath of buildPaths) {
if (existsSync(buildPath)) {
rmSync(buildPath, { recursive: true, force: true });
spinner.updateMessage(`Removed ${buildPath}`);
}
}
if (allData) {
// Clean all data
const dataPaths = ['scraped_data', 'data', '*.json'];
for (const dataPath of dataPaths) {
if (dataPath.includes('*')) {
// Handle glob patterns for JSON files (excluding config files)
const fs = await import('fs');
const files = fs.readdirSync('.').filter(f =>
f.endsWith('.json') &&
!f.includes('package') &&
!f.includes('tsconfig') &&
!f.includes('convex')
);
for (const file of files) {
fs.unlinkSync(file);
spinner.updateMessage(`Removed ${file}`);
}
} else if (existsSync(dataPath)) {
rmSync(dataPath, { recursive: true, force: true });
spinner.updateMessage(`Removed ${dataPath}`);
}
}
}
spinner.succeed('Cleanup completed successfully!');
logger.info("Cleanup operation completed", { operation: "cleanup", allData });
} catch (error) {
logger.error("Cleanup operation failed", { operation: "cleanup" }, error as Error);
throw error;
}
}
export async function runBuild(): Promise<void> {
logger.info("Starting build operation", { operation: "build" });
try {
const spinner = new Spinner('Building project...').start();
// Run TypeScript compilation
execSync('bun run build', { stdio: 'inherit' });
spinner.succeed('Build completed successfully!');
// Check build artifacts
if (existsSync('dist')) {
const files = readdirSync('dist');
console.log(chalk.gray(` Generated ${files.length} files in dist/`));
}
logger.info("Build operation completed", { operation: "build" });
} catch (error) {
logger.error("Build operation failed", { operation: "build" }, error as Error);
throw error;
}
}
function checkConfiguration() {
const credentials = {
username: process.env.XSERVE_USERNAME,
password: process.env.XSERVE_PASSWORD,
email: process.env.XSERVE_EMAIL
};
const target = process.env.TARGET_USERNAME;
return {
credentials: !!(credentials.username && credentials.password),
target: !!target
};
}
function checkDataFiles() {
const mainTweets = existsSync('scraped_tweets.json');
const accountData = existsSync('scraped_data') && readdirSync('scraped_data').length > 0;
const analysisData = existsSync('data/analysis') && readdirSync('data/analysis').length > 0;
return {
mainTweets,
accountData,
analysisFiles: analysisData
};
}
function checkPerformance() {
const memUsage = process.memoryUsage();
const uptime = process.uptime();
return {
memory: formatFileSize(memUsage.heapUsed),
uptime: formatDuration(uptime * 1000) // Convert to ms
};
}
import { PodxAPIServer } from '@podx/api';
import { ConvexStorage } from '@podx/core';
import { promises as fs } from 'fs';
export async function runPodxAPI(port: number): Promise<void> {
logger.info("Starting PODX API server", { operation: "api_start", port });
const spinner = ora(chalk.cyan('π Starting PODX API Server...')).start();
try {
const api = new PodxAPIServer();
// Start the server
const server = api.start(port);
spinner.succeed(chalk.green(`β
PODX API server running on port ${port}`));
console.log(chalk.cyan('\nπ‘ API Endpoints:'));
console.log(chalk.blue('Health & System:'));
console.log(chalk.gray(` GET http://localhost:${port}/api/v1/health`));
console.log(chalk.gray(` GET http://localhost:${port}/api/v1/ready`));
console.log(chalk.gray(` GET http://localhost:${port}/api/v1/admin/metrics`));
console.log(chalk.blue('\nPublic Endpoints:'));
console.log(chalk.gray(` GET http://localhost:${port}/api/v1/signals/top`));
console.log(chalk.gray(` GET http://localhost:${port}/api/v1/tokens/trending`));
console.log(chalk.blue('\nPremium Endpoints (Auth Required):'));
console.log(chalk.gray(` GET http://localhost:${port}/api/v1/signals`));
console.log(chalk.gray(` GET http://localhost:${port}/api/v1/analysis/market`));
console.log(chalk.gray(` POST http://localhost:${port}/api/v1/webhooks`));
console.log(chalk.blue('\nAdmin Endpoints:'));
console.log(chalk.gray(` POST http://localhost:${port}/api/v1/admin/refresh`));
console.log(chalk.yellow('\nπ‘ Generate tokens: podx token --tier premium --duration 7d'));
console.log(chalk.yellow('β οΈ Press Ctrl+C to stop the server'));
// Handle graceful shutdown
const shutdown = () => {
logger.info("Shutting down PODX API server", { operation: "api_shutdown" });
api.stop();
console.log(chalk.red('\nπ Server stopped'));
process.exit(0);
};
process.on('SIGINT', shutdown);
process.on('SIGTERM', shutdown);
// Keep the process alive
await new Promise(() => {}); // This will never resolve, keeping the server running
} catch (error) {
spinner.fail(chalk.red('β Failed to start PODX API server'));
logger.error("Failed to start PODX API server", { operation: "api_start", port }, error as Error);
throw error;
}
}
export async function runGenerateToken(
userId: string = 'podx-user',
tier: string = 'free',
duration: string = 'unlimited',
name?: string
): Promise<void> {
logger.info("Generating PODX community JWT token", {
operation: "generate_token",
userId,
tier,
jwtDuration: duration,
name
});
try {
// Validate tier
if (!['free', 'premium', 'admin'].includes(tier)) {
console.log(chalk.red('β Invalid tier. Use: free, premium, or admin'));
return;
}
// Validate duration
if (!['1h', '1d', '7d', '30d', 'unlimited'].includes(duration)) {
console.log(chalk.red('β Invalid duration. Use: 1h, 1d, 7d, 30d, or unlimited'));
return;
}
const api = new PodxAPIServer();
const token = await api.generateCommunityToken(
userId,
tier as 'free' | 'premium' | 'admin',
duration as '1h' | '1d' | '7d' | '30d' | 'unlimited'
);
console.log(chalk.blue.bold('\nπ« PODX Community JWT Token Generated'));
console.log(chalk.gray('β'.repeat(60)));
console.log(chalk.cyan(`π€ User ID: ${userId}`));
console.log(chalk.cyan(`π·οΈ Tier: ${tier}`));
console.log(chalk.cyan(`β±οΈ Duration: ${duration === 'unlimited' ? 'βΎοΈ Unlimited' : duration}`));
if (name) {
console.log(chalk.cyan(`π Name: ${name}`));
}
console.log(chalk.gray('β'.repeat(60)));
console.log(chalk.yellow(`π Token:\n${token}`));
console.log(chalk.gray('β'.repeat(60)));
console.log(chalk.green('\nπ Usage Examples:'));
console.log(chalk.gray('# Test health endpoint'));
console.log(chalk.white(`curl http://localhost:3000/api/v1/health`));
console.log(chalk.gray('\n# Access premium endpoints'));
console.log(chalk.white(`curl -H "Authorization: Bearer ${token}" \\`));
console.log(chalk.white(` http://localhost:3000/api/v1/analysis/market`));
console.log(chalk.gray('\n# Create webhook'));
console.log(chalk.white(`curl -X POST -H "Authorization: Bearer ${token}" \\`));
console.log(chalk.white(` -H "Content-Type: application/json" \\`));
console.log(chalk.white(` -d '{"url":"https://your-webhook.com","events":["signals"]}' \\`));
console.log(chalk.white(` http://localhost:3000/api/v1/webhooks`));
console.log(chalk.blue('\nπ‘ Pro Tips:'));
console.log(chalk.gray('β’ Store this token securely - it grants API access'));
console.log(chalk.gray('β’ Use environment variables: export PODX_TOKEN="<token>"'));
console.log(chalk.gray('β’ Generate separate tokens for different applications'));
if (duration === 'unlimited') {
console.log(chalk.yellow('β’ β οΈ Unlimited tokens never expire - keep them safe!'));
}
logger.info("PODX community JWT token generated successfully", {
operation: "generate_token",
userId,
tier,
jwtDuration: duration
});
} catch (error) {
console.log(chalk.red('β Failed to generate JWT token'));
logger.error("Failed to generate JWT token", {
operation: "generate_token",
userId,
tier,
jwtDuration: duration
}, error as Error);
throw error;
}
}
export async function runConvexSave(inputFile: string, dataType: string): Promise<void> {
logger.info("Starting Convex data save", { operation: "convex_save", inputFile, dataType });
const spinner = ora(chalk.cyan('πΎ Saving data to Convex...')).start();
try {
// Load the data file
const dataContent = await fs.readFile(inputFile, 'utf-8');
const data = JSON.parse(dataContent);
spinner.text = chalk.cyan('π Connecting to Convex...');
const convexClient = new ConvexStorage();
spinner.text = chalk.cyan('π€ Uploading data...');
switch (dataType) {
case 'analysis':
await convexClient.saveAnalysis(data);
break;
case 'tokens':
await convexClient.saveTokenAnalysis(data);
break;
case 'accounts':
await convexClient.saveAccountReputation(data);
break;
default:
throw new Error(`Unknown data type: ${dataType}`);
}
spinner.succeed(chalk.green('β
Data saved to Convex successfully!'));
logger.info("Convex data save completed", {
operation: "convex_save",
inputFile,
dataType,
recordCount: Array.isArray(data) ? data.length : 1
});
} catch (error) {
spinner.fail(chalk.red('β Failed to save data to Convex'));
logger.error("Convex data save failed", {
operation: "convex_save",
inputFile,
dataType
}, error as Error);
throw error;
}
}
export async function runSearchTweets(options: any): Promise<void> {
logger.info("Starting Twitter search", {
operation: "twitter_search",
query: options.query,
count: options.count,
mode: options.mode
});
const spinner = ora(chalk.cyan('π Searching Twitter...')).start();
try {
const { SearchScraper } = await import('@podx/scraper/scrapers');
const scraper = new SearchScraper();
const searchOptions = {
query: options.query,
maxTweets: options.count || 100,
searchMode: options.mode || 'Latest',
filters: {
minLikes: options.minLikes,
minRetweets: options.minRetweets,
dateFrom: options.dateFrom,
dateTo: options.dateTo,
excludeRetweets: options.noRetweets
}
};
spinner.text = chalk.cyan(`π Searching for: "${options.query}"`);
const results = await scraper.searchTweets(searchOptions);
spinner.text = chalk.cyan('πΎ Saving search results...');
await fs.writeFile(options.output || 'search_results.json', JSON.stringify(results, null, 2));
spinner.succeed(chalk.green(`β
Found ${results.length} tweets`));
// Display summary
console.log(chalk.cyan('\nπ Search Results:'));
console.log(chalk.gray(` Query: "${options.query}"`));
console.log(chalk.gray(` Results: ${results.length} tweets`));
console.log(chalk.gray(` Saved to: ${options.output || 'search_results.json'}`));
logger.info("Twitter search completed", {
operation: "twitter_search",
query: options.query,
resultCount: results.length
});
} catch (error) {
spinner.fail(chalk.red('β Twitter search failed'));
logger.error("Twitter search failed", {
operation: "twitter_search",
query: options.query
}, error as Error);
throw error;
}
}
export async function runListScrape(options: any): Promise<void> {
logger.info("Starting Twitter list scrape", {
operation: "list_scrape",
listId: options.id,
count: options.count
});
const spinner = ora(chalk.cyan('π Scraping Twitter list...')).start();
try {
const { SearchScraper } = await import('@podx/scraper');
const searchScraper = new SearchScraper();
const listOptions = {
listId: options.id,
maxTweets: options.count || 100,
includeRetweets: options.includeRetweets || false
};
const tweets = await searchScraper.scrapeList(listOptions);
if (tweets.length === 0) {
spinner.warn(chalk.yellow('β οΈ No tweets found in list'));
return;
}
// Save tweets to file
const filename = `list_${options.id}_tweets.json`;
const outputPath = path.join(process.cwd(), 'scraped_data', filename);
await fs.mkdir(path.dirname(outputPath), { recursive: true });
await fs.writeFile(outputPath, JSON.stringify(tweets, null, 2));
spinner.succeed(chalk.green(`β
Scraped ${tweets.length} tweets from list ${options.id}`));
console.log(chalk.gray(`π Saved to: ${outputPath}`));
logger.info("List scraping completed", {
operation: "list_scrape",
listId: options.id,
tweetsScraped: tweets.length,
outputFile: outputPath
});
} catch (error) {
spinner.fail(chalk.red('β Twitter list scraping failed'));
logger.error("Twitter list scraping failed", {
operation: "list_scrape",
listId: options.id
}, error as Error);
throw error;
}
}
// Backward-compatible wrappers expected by interactive handler
export async function runTwitterSearch(options: any): Promise<void> {
return runSearchTweets(options);
}
export async function runCryptoTwitterSearch(options: any): Promise<void> {
return runCryptoSearch(options);
}
export async function runViewAnalysis(): Promise<void> {
logger.info('View analysis not implemented; open analysis file manually', { operation: 'view_analysis' });
}
export async function runTopSignals(): Promise<void> {
logger.info('Top signals view not implemented in CLI', { operation: 'top_signals' });
}
export async function runTrendingTokens(): Promise<void> {
logger.info('Trending tokens view not implemented in CLI', { operation: 'trending_tokens' });
}
export async function runCryptoSearch(options: any): Promise<void> {
// Parse tokens properly
let tokens = [];
if (options.tokens) {
if (typeof options.tokens === 'string') {
tokens = options.tokens.split(',').map((t: string) => t.trim());
} else if (Array.isArray(options.tokens)) {
tokens = options.tokens;
} else {
tokens = [options.tokens];
}
}
logger.info("Starting crypto Twitter search", {
operation: "crypto_search",
tokens: tokens.length > 0 ? tokens : 'general_crypto_search'
});
console.log(chalk.cyan(`π Searching for crypto content: ${tokens.length > 0 ? tokens.join(', ') : 'general crypto terms'}`));
const spinner = ora(chalk.cyan('π Running searches...')).start();
try {
const { SearchScraper } = await import('@podx/scraper');
const searchScraper = new SearchScraper();
// Build crypto-focused search queries
const cryptoQueries = [];
if (tokens.length > 0) {
// Search for specific tokens with broader queries
for (const token of tokens) {
cryptoQueries.push(`$${token.toUpperCase()}`);
cryptoQueries.push(`${token.toUpperCase()}`);
if (token.toLowerCase() !== 'sol') { // Avoid sol being too generic
cryptoQueries.push(`${token.toLowerCase()}`);
}
}
} else {
// General crypto search terms
cryptoQueries.push('$BTC OR $ETH OR $SOL');
cryptoQueries.push('crypto');
cryptoQueries.push('Bitcoin');
cryptoQueries.push('Ethereum');
cryptoQueries.push('Solana');
}
console.log(chalk.gray(`π Search queries: ${cryptoQueries.join(', ')}`));
const allTweets: any[] = [];
const maxTweetsPerQuery = Math.max(50, Math.floor((options.count || 200) / cryptoQueries.length));
console.log(chalk.gray(`π Max tweets per query: ${maxTweetsPerQuery}`));
for (let i = 0; i < cryptoQueries.length; i++) {
const query = cryptoQueries[i];
spinner.text = `Searching ${i + 1}/${cryptoQueries.length}: "${query}"`;
const searchOptions = {
query,
maxTweets: maxTweetsPerQuery,
searchMode: 'Latest' as any,
minLikes: options.minEngagement || 0, // Don't filter by likes by default
includeRetweets: true // Include retweets for more volume
};
try {
const tweets = await searchScraper.searchTweets(searchOptions);
allTweets.push(...tweets);
console.log(chalk.green(`β
Found ${tweets.length} tweets for "${query}"`));
} catch (error) {
console.log(chalk.yellow(`β οΈ Search failed for "${query}": ${error}`));
}
// Small delay between searches
await new Promise(resolve => setTimeout(resolve, 1000));
}
if (allTweets.length === 0) {
spinner.warn(chalk.yellow('β οΈ No crypto content found'));
return;
}
spinner.text = 'Analyzing crypto signals...';
// Analyze the tweets for crypto signals
const { CryptoAnalyzer } = await import('@podx/scraper');
const analyzer = new CryptoAnalyzer();
// Set the tweets for analysis
analyzer.tweets = allTweets;
const tokenMentions = analyzer.extractTokenMentions();
const signals = analyzer.generateSignals(tokenMentions);
// Save results
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const filename = `crypto_search_${timestamp}.json`;
const outputPath = path.join(process.cwd(), 'scraped_data', filename);
await fs.mkdir(path.dirname(outputPath), { recursive: true });
await fs.writeFile(outputPath, JSON.stringify({
searchQueries: cryptoQueries,
tweets: allTweets,
analysis: {
tokenMentions,
signals
},
metadata: {
generatedAt: new Date().toISOString(),
totalTweets: allTweets.length,
tokensFound: tokenMentions.length,
signalsGenerated: signals.length
}
}, null, 2));
spinner.succeed(chalk.green(`β
Found ${allTweets.length} crypto tweets with ${tokenMentions.length} token mentions`));
console.log(chalk.gray(`π Saved to: ${outputPath}`));
logger.info("Crypto search completed", {
operation: "crypto_search",
tweetsFound: allTweets.length,
tokensFound: tokenMentions.length,
signalsGenerated: signals.length,
outputFile: outputPath
});
} catch (error) {
spinner.fail(chalk.red('β Crypto search failed'));
logger.error("Crypto search failed", { operation: "crypto_search" }, error as Error);
throw error;
}
}
export async function runHashtagSearch(options: any): Promise<void> {
const hashtags = options.hashtags.split(',').map((h: string) => h.trim());
logger.info("Starting hashtag search", { operation: "hashtag_search", hashtags });
console.log(chalk.cyan(`π·οΈ Searching hashtags: ${hashtags.join(', ')}`));
const spinner = ora(chalk.cyan('π Running hashtag searches...')).start();
try {
const { SearchScraper } = await import('@podx/scraper');
const searchScraper = new SearchScraper();
const tweets = await searchScraper.searchHashtags(hashtags, parseInt(options.count) || 100);
if (tweets.length === 0) {
spinner.warn(chalk.yellow('β οΈ No tweets found for hashtags'));
return;
}
// Save results
const outputPath = path.join(process.cwd(), options.output || 'hashtag_search.json');
await fs.mkdir(path.dirname(outputPath), { recursive: true });
await fs.writeFile(outputPath, JSON.stringify({
searchType: 'hashtags',
hashtags,
tweets,
metadata: {
generatedAt: new Date().toISOString(),
totalTweets: tweets.length
}
}, null, 2));
spinner.succeed(chalk.green(`β
Found ${tweets.length} tweets with hashtags`));
console.log(chalk.gray(`π Saved to: ${outputPath}`));
logger.info("Hashtag search completed", {
operation: "hashtag_search",
tweetsFound: tweets.length,
outputFile: outputPath
});
} catch (error) {
spinner.fail(chalk.red('β Hashtag search failed'));
logger.error("Hashtag search failed", { operation: "hashtag_search" }, error as Error);
throw error;
}
}
export async function runTickerSearch(options: any): Promise<void> {
const tickers = options.tickers.split(',').map((t: string) => t.trim());
logger.info("Starting ticker search", { operation: "ticker_search", tickers });
console.log(chalk.cyan(`π° Searching tickers: ${tickers.join(', ')}`));
const spinner = ora(chalk.cyan('π Running ticker searches...')).start();
try {
const { SearchScraper } = await import('@podx/scraper');
const searchScraper = new SearchScraper();
const tweets = await searchScraper.searchTickers(tickers, parseInt(options.count) || 100);
if (tweets.length === 0) {
spinner.warn(chalk.yellow('β οΈ No tweets found for tickers'));
return;
}
// Save results
const outputPath = path.join(process.cwd(), options.output || 'ticker_search.json');
await fs.mkdir(path.dirname(outputPath), { recursive: true });
await fs.writeFile(outputPath, JSON.stringify({
searchType: 'tickers',
tickers,
tweets,
metadata: {
generatedAt: new Date().toISOString(),
totalTweets: tweets.length
}
}, null, 2));
spinner.succeed(chalk.green(`β
Found ${tweets.length} tweets with tickers`));
console.log(chalk.gray(`π Saved to: ${outputPath}`));
logger.info("Ticker search completed", {
operation: "ticker_search",
tweetsFound: tweets.length,
outputFile: outputPath
});
} catch (error) {
spinner.fail(chalk.red('β Ticker search failed'));
logger.error("Ticker search failed", { operation: "ticker_search" }, error as Error);
throw error;
}
}
export async function runReplySearch(options: any): Promise<void> {
const tweetIds = options.ids.split(',').map((id: string) => id.trim());
logger.info("Starting reply search", { operation: "reply_search", tweetIds });
console.log(chalk.cyan(`π¬ Searching replies for ${tweetIds.length} tweets`));
const spinner = ora(chalk.cyan('π Fetching replies...')).start();
try {
const { SearchScraper } = await import('@podx/scraper');
const searchScraper = new SearchScraper();
const result = await searchScraper.searchReplies(tweetIds, parseInt(options.count) || 50);
if (result.replies.length === 0) {
spinner.warn(chalk.yellow('β οΈ No replies found'));
return;
}
// Save results
const outputPath = path.join(process.cwd(), options.output || 'replies_search.json');
await fs.mkdir(path.dirname(outputPath), { recursive: true });
// Convert Map to Object for JSON serialization
const repliesByTweetObj: Record<string, any[]> = {};
for (const [tweetId, replies] of result.byTweet.entries()) {
repliesByTweetObj[tweetId] = replies;
}
await fs.writeFile(outputPath, JSON.stringify({
searchType: 'replies',
tweetIds,
totalReplies: result.replies.length,
repliesByTweet: repliesByTweetObj,
allReplies: result.replies,
metadata: {
generatedAt: new Date().toISOString(),
tweetsSearched: tweetIds.length,
totalReplies: result.replies.length
}
}, null, 2));
spinner.succeed(chalk.green(`β
Found ${result.replies.length} replies across ${tweetIds.length} tweets`));
console.log(chalk.gray(`π Saved to: ${outputPath}`));
logger.info("Reply search completed", {
operation: "reply_search",
repliesFound: result.replies.length,
tweetsSearched: tweetIds.length,
outputFile: outputPath
});
} catch (error) {
spinner.fail(chalk.red('β Reply search failed'));
logger.error("Reply search failed", { operation: "reply_search" }, error as Error);
throw error;
}
}
export async function runSolanaContracts(inputFile: string, outputFile: string): Promise<void> {
logger.info("Starting Solana contract analysis", { operation: "solana_contracts", inputFile, outputFile });
const spinner = ora(chalk.cyan('π Analyzing Solana contracts...')).start();
try {
// Load tweets
const tweetData = await fs.readFile(inputFile, 'utf-8');
const tweets = JSON.parse(tweetData);
// Initialize crypto analyzer with tweets
const { CryptoAnalyzer } = await import('@podx/scraper');
const analyzer = new CryptoAnalyzer();
// Load tweets into analyzer (assuming it has a method to set tweets)
if (typeof analyzer.loadTweets === 'function') {
await analyzer.loadTweets(inputFile);
} else {
// Manually set tweets if no loadTweets method
(analyzer as any).tweets = tweets;
}
spinner.text = chalk.cyan('π Extracting Solana contracts...');
// Analyze Solana contracts
const contractAnalysis = analyzer.analyzeSolanaContracts();
const analysis = {
generatedAt: new Date().toISOString(),
inputFile,
totalTweets: tweets.length,
solanaAnalysis: contractAnalysis,
contractDetails: contractAnalysis.contractMentions.map((mention: any) => ({
contract: mention.contract,
mentions: mention.mentions,
firstSeen: mention.firstSeen,
lastSeen: mention.lastSeen,
contexts: mention.contexts.slice(0, 3), // First 3 contexts
tweetIds: mention.tweets.slice(0, 5).map((t: any) => t.id) // First 5 tweet IDs
}))
};
spinner.text = chalk.cyan('πΎ Saving contract analysis...');
await fs.writeFile(outputFile, JSON.stringify(analysis, null, 2));
spinner.succeed(chalk.green(`β
Found ${contractAnalysis.totalContracts} unique Solana contracts in ${tweets.length} tweets`));
if (contractAnalysis.totalContracts > 0) {
console.log(chalk.gray(`π Top contracts:`));
contractAnalysis.contractMentions.slice(0, 5).forEach((contract: any, i: number) => {
console.log(chalk.gray(` ${i + 1}. ${contract.contract} (${contract.mentions} mentions)`));
});
}
logger.info("Solana contract analysis completed", {
operation: "solana_contracts",
inputFile,
outputFile,
tweetCount: tweets.length,
contractCount: contracts.length
});
} catch (error) {
spinner.fail(chalk.red('β Solana contract analysis failed'));
logger.error("Solana contract analysis failed", {
operation: "solana_contracts",
inputFile,
outputFile
}, error as Error);
throw error;
}
}
// Simple placeholder until replies scraping is implemented here
export async function runRepliesScrape(tweetId: string, maxReplies: number): Promise<void> {
logger.warn('runRepliesScrape (api) is a no-op placeholder', {
operation: 'replies_scrape',
tweetId,
maxReplies
});
}