@podx/cli
Version:
š» Command-line interface for PODx - Advanced Twitter/X scraping and crypto analysis toolkit
886 lines (746 loc) ⢠33 kB
text/typescript
import chalk from 'chalk';
import type { Tweet, TweetReply } from '@podx/core/types';
import { CryptoAnalyzer } from '@podx/scraper';
import { CommentScraper } from '@podx/scraper';
import { promises as fs } from 'fs';
import { logger } from '@podx/core';
import { Spinner, ProgressBar } from '../utils';
import { formatNumber } from '../utils';
import { scrapeAccount, scrapeWithAnalysis } from '@podx/scraper';
export async function runCryptoAnalysis(inputFile: string, outputFile: string): Promise<void> {
logger.info("Starting crypto analysis", { operation: "crypto_analysis", inputFile, outputFile });
const spinner = new Spinner('š Analyzing crypto signals...').start();
try {
const analyzer = new CryptoAnalyzer();
await analyzer.loadTweets(inputFile);
spinner.updateMessage('š Extracting token mentions...');
const tokenMentions = analyzer.extractTokenMentions();
spinner.updateMessage('šÆ Generating trading signals...');
const signals = analyzer.generateSignals(tokenMentions);
spinner.updateMessage('š¾ Saving analysis results...');
const analysisResults = {
summary: {
totalTweets: analyzer.getTweets().length,
totalTokenMentions: tokenMentions.length,
uniqueTokens: new Set(tokenMentions.map(t => t.symbol)).size,
generatedSignals: signals.length,
generatedAt: new Date().toISOString()
},
tokenMentions,
signals
};
await fs.writeFile(outputFile, JSON.stringify(analysisResults, null, 2));
spinner.succeed('Crypto analysis completed!');
// Display summary
console.log(chalk.cyan('\nš Analysis Summary:'));
console.log(chalk.gray(` Tweets analyzed: ${formatNumber(analysisResults.summary.totalTweets)}`));
console.log(chalk.gray(` Token mentions: ${formatNumber(analysisResults.summary.totalTokenMentions)}`));
console.log(chalk.gray(` Unique tokens: ${formatNumber(analysisResults.summary.uniqueTokens)}`));
console.log(chalk.gray(` Trading signals: ${formatNumber(analysisResults.summary.generatedSignals)}`));
logger.info("Crypto analysis completed", {
operation: "crypto_analysis",
inputFile,
outputFile,
tweetCount: analysisResults.summary.totalTweets,
tokenCount: analysisResults.summary.totalTokenMentions,
signalCount: analysisResults.summary.generatedSignals
});
} catch (error) {
spinner.fail('Crypto analysis failed');
logger.error("Crypto analysis failed", { operation: "crypto_analysis", inputFile, outputFile }, error as Error);
throw error;
}
}
export async function runAccountAnalysis(inputFile: string, outputFile: string): Promise<void> {
logger.info("Starting account analysis", { operation: "account_analysis", inputFile, outputFile });
const spinner = new Spinner('š„ Analyzing accounts...').start();
try {
const analyzer = new CryptoAnalyzer();
await analyzer.loadTweets(inputFile);
spinner.updateMessage('š Detecting bot accounts...');
const accountProfiles = analyzer.getAccountProfiles();
spinner.updateMessage('š Calculating reputation scores...');
const reputationAnalysis = {
reputationBreakdown: accountProfiles.reduce((acc: Record<string, number>, a) => {
acc[a.reputation] = (acc[a.reputation] || 0) + 1;
return acc;
}, {}),
topTrusted: accountProfiles
.filter(a => a.reputation === 'trusted')
.sort((a, b) => b.avgEngagement - a.avgEngagement)
.slice(0, 10)
};
spinner.updateMessage('š¾ Saving account analysis...');
const analysisResults = {
summary: {
totalAccounts: accountProfiles.length,
suspiciousAccounts: accountProfiles.filter(a => a.reputation === 'suspicious').length,
botAccounts: accountProfiles.filter(a => a.reputation === 'bot').length,
trustedAccounts: accountProfiles.filter(a => a.reputation === 'trusted').length,
generatedAt: new Date().toISOString()
},
accountProfiles,
reputationAnalysis
};
await fs.writeFile(outputFile, JSON.stringify(analysisResults, null, 2));
spinner.succeed('Account analysis completed!');
// Display summary
console.log(chalk.cyan('\nš„ Account Analysis Summary:'));
console.log(chalk.gray(` Total accounts: ${formatNumber(analysisResults.summary.totalAccounts)}`));
console.log(chalk.gray(` Trusted: ${formatNumber(analysisResults.summary.trustedAccounts)}`));
console.log(chalk.gray(` Suspicious: ${formatNumber(analysisResults.summary.suspiciousAccounts)}`));
console.log(chalk.gray(` Bots: ${formatNumber(analysisResults.summary.botAccounts)}`));
logger.info("Account analysis completed", {
operation: "account_analysis",
inputFile,
outputFile,
accountCount: analysisResults.summary.totalAccounts,
trustedCount: analysisResults.summary.trustedAccounts,
suspiciousCount: analysisResults.summary.suspiciousAccounts,
botCount: analysisResults.summary.botAccounts
});
} catch (error) {
spinner.fail('Account analysis failed');
logger.error("Account analysis failed", { operation: "account_analysis", inputFile, outputFile }, error as Error);
throw error;
}
}
export async function runRepliesAnalysis(inputFile: string, token: string, outputFile: string, maxReplies: number): Promise<void> {
logger.info("Starting replies analysis", {
operation: "replies_analysis",
inputFile,
outputFile,
token,
maxReplies
});
const spinner = new Spinner(`š¬ Analyzing replies for ${token}...`).start();
try {
// Load tweets
const tweetData = await fs.readFile(inputFile, 'utf-8');
const tweets: Tweet[] = JSON.parse(tweetData);
// Filter tweets mentioning the token
const tokenTweets = tweets.filter(tweet =>
tweet.text?.toLowerCase().includes(token.toLowerCase())
);
spinner.updateMessage(`š Found ${tokenTweets.length} tweets mentioning ${token}`);
if (tokenTweets.length === 0) {
spinner.updateMessage(`No tweets found mentioning ${token}`);
spinner.stop();
return;
}
// Scrape replies for each tweet
const commentScraper = new CommentScraper();
const allReplies: TweetReply[] = [];
for (let i = 0; i < Math.min(tokenTweets.length, 10); i++) { // Limit to 10 tweets for performance
const tweet = tokenTweets[i];
if (!tweet) continue;
spinner.updateMessage(`š¬ Scraping replies for tweet ${i + 1}/${Math.min(tokenTweets.length, 10)}`);
try {
const replies = await commentScraper.scrapeTweetReplies(tweet.id, maxReplies);
allReplies.push(...replies);
} catch (error) {
logger.warn(`Failed to scrape replies for tweet ${tweet.id}`, {
operation: "replies_analysis",
tweetId: tweet.id,
error: (error as Error).message
});
}
}
// Analyze sentiment of replies
spinner.updateMessage('š Analyzing sentiment...');
const sentimentAnalysis = analyzeReplySentiment(allReplies);
// Save results
const analysisResults = {
summary: {
token,
tweetsAnalyzed: tokenTweets.length,
totalReplies: allReplies.length,
averageSentiment: sentimentAnalysis.averageSentiment,
sentimentDistribution: sentimentAnalysis.distribution,
generatedAt: new Date().toISOString()
},
tokenTweets: tokenTweets.slice(0, 10), // Save only first 10 tweets
replies: allReplies,
sentimentAnalysis
};
await fs.writeFile(outputFile, JSON.stringify(analysisResults, null, 2));
spinner.succeed('Replies analysis completed!');
// Display summary
console.log(chalk.cyan('\nš¬ Replies Analysis Summary:'));
console.log(chalk.gray(` Token: ${token}`));
console.log(chalk.gray(` Tweets analyzed: ${analysisResults.summary.tweetsAnalyzed}`));
console.log(chalk.gray(` Total replies: ${analysisResults.summary.totalReplies}`));
console.log(chalk.gray(` Average sentiment: ${analysisResults.summary.averageSentiment?.toFixed(2) || 'N/A'}`));
logger.info("Replies analysis completed", {
operation: "replies_analysis",
token,
tweetsAnalyzed: analysisResults.summary.tweetsAnalyzed,
totalReplies: analysisResults.summary.totalReplies
});
} catch (error) {
spinner.fail('Replies analysis failed');
logger.error("Replies analysis failed", {
operation: "replies_analysis",
inputFile,
outputFile,
token
}, error as Error);
throw error;
}
}
function analyzeReplySentiment(replies: TweetReply[]) {
if (replies.length === 0) {
return {
averageSentiment: 0,
distribution: { positive: 0, negative: 0, neutral: 0 }
};
}
let totalSentiment = 0;
const distribution = { positive: 0, negative: 0, neutral: 0 };
for (const reply of replies) {
const sentiment = reply.sentiment || 'neutral';
distribution[sentiment as keyof typeof distribution]++;
const sentimentScore = reply.sentimentScore || 0;
totalSentiment += sentimentScore;
}
return {
averageSentiment: totalSentiment / replies.length,
distribution
};
}
// Additional imports already handled above
export interface DataCollectionOptions {
tweets: boolean;
replies: boolean;
cryptoAnalysis: boolean;
accountAnalysis: boolean;
locationData: boolean;
convexSave: boolean;
}
export async function runSelectiveDataCollection(
targetUsername: string,
dataOptions: DataCollectionOptions,
advancedOptions: {
maxTweets: number;
maxRepliesPerTweet?: number;
tokenFilter?: string;
includeRetweets: boolean;
}
): Promise<void> {
console.log(chalk.blue.bold('\nš STARTING DATA COLLECTION OPERATION š\n'));
const spinner = new Spinner('Initializing scrapers...').start();
try {
const analysisResults: Record<string, unknown> = {};
// Step 1: Scrape tweets if requested
if (dataOptions.tweets) {
spinner.updateMessage(`š¦ Scraping tweets from @${targetUsername}...`);
const scraperConfig = {
targetUsername,
maxTweets: advancedOptions.maxTweets
};
await runScraping(scraperConfig);
spinner.succeed(`Scraped tweets from @${targetUsername}`);
// Load the scraped tweets for further processing
try {
const tweetsData = await fs.readFile('scraped_tweets.json', 'utf-8');
const tweets = JSON.parse(tweetsData);
analysisResults.tweets = {
count: tweets.length,
loaded: true,
file: 'scraped_tweets.json'
};
console.log(chalk.gray(`š¤ Loaded ${tweets.length} tweets for analysis`));
} catch (error) {
console.warn('Failed to load scraped tweets for analysis');
analysisResults.tweets = { count: 0, loaded: false };
}
}
// Step 2: Scrape replies if requested
if (dataOptions.replies) {
spinner.updateMessage('š¬ Scraping replies...');
try {
const { CommentScraper } = await import('@podx/scraper');
const commentScraper = new CommentScraper();
// Load the scraped tweets to get replies for
const tweetsData = await fs.readFile('scraped_tweets.json', 'utf-8');
const tweets = JSON.parse(tweetsData);
const allReplies: any[] = [];
// Get replies for first 10 tweets (to avoid rate limits)
for (const tweet of tweets.slice(0, 10)) {
try {
const replies = await commentScraper.scrapeReplies(tweet.id, 20); // Max 20 replies per tweet
allReplies.push({
originalTweet: tweet,
replies
});
} catch (error) {
console.warn(`Failed to scrape replies for tweet ${tweet.id}`);
}
}
// Save replies
await fs.writeFile('scraped_replies.json', JSON.stringify(allReplies, null, 2));
analysisResults.replies = { totalReplies: allReplies.reduce((sum, t) => sum + t.replies.length, 0) };
spinner.succeed(`Scraped replies for ${allReplies.length} tweets`);
} catch (error) {
spinner.warn('Reply scraping failed - continuing without replies');
console.warn('Reply scraping error:', (error as Error).message);
}
}
// Step 3: Run crypto analysis if requested
if (dataOptions.cryptoAnalysis) {
spinner.updateMessage('š Running crypto analysis...');
const cryptoResults = await runCryptoAnalysisInternal('scraped_tweets.json', 'crypto_analysis.json');
analysisResults.crypto = cryptoResults;
spinner.succeed('Crypto analysis completed');
}
// Step 4: Run account analysis if requested
if (dataOptions.accountAnalysis) {
spinner.updateMessage('š„ Running account analysis...');
const accountResults = await runAccountAnalysisInternal('scraped_tweets.json', 'account_analysis.json');
analysisResults.accounts = accountResults;
spinner.succeed('Account analysis completed');
}
// Step 5: Save to Convex if requested
if (dataOptions.convexSave) {
spinner.updateMessage('š¾ Saving to Convex...');
try {
const { convexStorage } = await import('@podx/core');
// Save analysis results to Convex
if (analysisResults.crypto) {
await convexStorage.saveAnalysis({
generatedAt: new Date().toISOString(),
totalTweets: analysisResults.tweets?.count || 0,
totalAccounts: analysisResults.accounts?.totalAccounts || 0,
tokenMentions: analysisResults.crypto.tokenMentions || [],
signals: analysisResults.crypto.signals || [],
accountProfiles: analysisResults.accounts?.accounts || [],
summary: {
topTokens: analysisResults.crypto.topTokens || [],
signalDistribution: analysisResults.crypto.signalDistribution || {},
accountReputation: analysisResults.accounts?.reputationDistribution || {}
}
});
spinner.succeed('Analysis data saved to Convex database');
} else {
spinner.warn('No analysis data to save to Convex');
}
} catch (error) {
spinner.warn('Convex save failed - data saved locally only');
console.warn('Convex error:', (error as Error).message);
}
}
console.log(chalk.green.bold('\nš DATA COLLECTION COMPLETED! š'));
console.log(chalk.cyan('Summary of collected data:'));
if (dataOptions.tweets) console.log(chalk.green(` ā
Tweets: scraped_data/${targetUsername}/`));
if (dataOptions.replies) console.log(chalk.green(' ā
Replies: scraped'));
if (dataOptions.cryptoAnalysis) console.log(chalk.green(' ā
Crypto Analysis: crypto_analysis.json'));
if (dataOptions.accountAnalysis) console.log(chalk.green(' ā
Account Analysis: account_analysis.json'));
if (dataOptions.convexSave) console.log(chalk.green(' ā
Convex: data saved to database'));
logger.info("Data collection operation completed", {
operation: "data_collection",
user: targetUsername,
options: dataOptions,
advancedOptions
});
} catch (error) {
spinner.fail('Data collection failed');
logger.error("Data collection operation failed", {
operation: "data_collection",
user: targetUsername,
options: dataOptions
}, error as Error);
throw error;
}
}
export async function runScraping(config: { targetUsername: string, maxTweets: number }): Promise<void> {
logger.info("Starting scraping operation", {
operation: "scraping",
user: config.targetUsername,
maxTweets: config.maxTweets
});
await scrapeAccount(config.targetUsername, config.maxTweets);
}
async function runCryptoAnalysisInternal(inputFile: string, outputFile: string): Promise<{ tokenMentions: any[]; signals: any[] }> {
const analyzer = new CryptoAnalyzer();
try {
await analyzer.loadTweets(inputFile);
const tokenMentions = analyzer.extractTokenMentions();
const signals = analyzer.generateSignals(tokenMentions);
// Save results
const fs = await import('fs/promises');
await fs.writeFile(outputFile, JSON.stringify({
tokenMentions,
signals,
generatedAt: new Date().toISOString()
}, null, 2));
return { tokenMentions, signals };
} catch (error) {
logger.error("Crypto analysis failed", { operation: "crypto_analysis", inputFile }, error as Error);
throw error;
}
}
async function runAccountAnalysisInternal(inputFile: string, outputFile: string): Promise<any[]> {
const analyzer = new CryptoAnalyzer();
try {
await analyzer.loadTweets(inputFile);
const accountProfiles = analyzer.getAccountProfiles();
// Save results
const fs = await import('fs/promises');
await fs.writeFile(outputFile, JSON.stringify({
accountProfiles,
generatedAt: new Date().toISOString()
}, null, 2));
return accountProfiles;
} catch (error) {
logger.error("Account analysis failed", { operation: "account_analysis", inputFile }, error as Error);
throw error;
}
}
export interface BatchScrapingConfig {
usernames?: string | string[];
usernamesFile?: string;
maxTweets: number;
concurrent: number;
delaySeconds: number;
}
export async function runBatchScraping(config: BatchScrapingConfig): Promise<void> {
logger.info("Starting batch scraping operation", {
operation: "batch_scraping",
concurrent: config.concurrent,
maxTweets: config.maxTweets,
delaySeconds: config.delaySeconds
});
let usernames: string[] = [];
// Parse usernames from input
if (config.usernames) {
usernames = Array.isArray(config.usernames)
? config.usernames
: config.usernames.split(',').map(u => u.trim()).filter(u => u);
} else if (config.usernamesFile) {
try {
const fileContent = await fs.readFile(config.usernamesFile, 'utf-8');
usernames = fileContent.split('\n').map(u => u.trim()).filter(u => u);
} catch (error) {
logger.error("Failed to read usernames file", {
operation: "batch_scraping",
file: config.usernamesFile
}, error as Error);
throw new Error(`Failed to read usernames file: ${config.usernamesFile}`);
}
} else {
throw new Error('Either --usernames or --file must be provided');
}
if (usernames.length === 0) {
throw new Error('No valid usernames found');
}
console.log(chalk.blue.bold(`\nš BATCH SCRAPING ${usernames.length} ACCOUNTS š\n`));
console.log(chalk.cyan(`Accounts: ${usernames.join(', ')}`));
console.log(chalk.cyan(`Max tweets per account: ${config.maxTweets}`));
console.log(chalk.cyan(`Concurrent scrapes: ${config.concurrent}`));
console.log(chalk.cyan(`Delay between batches: ${config.delaySeconds}s\n`));
const results: { username: string; success: boolean; error?: string; tweetsCount?: number }[] = [];
const totalStartTime = Date.now();
// Process accounts in batches
for (let i = 0; i < usernames.length; i += config.concurrent) {
const batch = usernames.slice(i, i + config.concurrent);
const batchNumber = Math.floor(i / config.concurrent) + 1;
const totalBatches = Math.ceil(usernames.length / config.concurrent);
console.log(chalk.yellow(`\nš¦ Processing batch ${batchNumber}/${totalBatches}: ${batch.join(', ')}`));
// Process batch concurrently
const batchPromises = batch.map(async (username) => {
const spinner = new Spinner(`Scraping @${username}...`).start();
const startTime = Date.now();
try {
await scrapeAccount(username, config.maxTweets, (progress) => {
spinner.updateMessage(`@${username}: ${progress.count}/${progress.max} tweets`);
});
const duration = Date.now() - startTime;
spinner.succeed(`@${username} completed (${Math.round(duration/1000)}s)`);
results.push({
username,
success: true,
tweetsCount: config.maxTweets
});
logger.info("Batch account scraping completed", {
operation: "batch_scraping",
user: username,
duration,
maxTweets: config.maxTweets
});
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error);
spinner.fail(`@${username} failed: ${errorMsg}`);
results.push({
username,
success: false,
error: errorMsg
});
logger.error("Batch account scraping failed", {
operation: "batch_scraping",
user: username
}, error as Error);
}
});
await Promise.all(batchPromises);
// Add delay between batches (except for the last batch)
if (i + config.concurrent < usernames.length && config.delaySeconds > 0) {
const delaySpinner = new Spinner(`Waiting ${config.delaySeconds}s before next batch...`).start();
await new Promise(resolve => setTimeout(resolve, config.delaySeconds * 1000));
delaySpinner.succeed('Ready for next batch');
}
}
const totalDuration = Date.now() - totalStartTime;
const successCount = results.filter(r => r.success).length;
const failureCount = results.filter(r => !r.success).length;
// Final summary
console.log(chalk.green.bold('\nš BATCH SCRAPING COMPLETED! š'));
console.log(chalk.cyan(`Total duration: ${Math.round(totalDuration/1000)}s`));
console.log(chalk.green(`ā
Successful: ${successCount}`));
if (failureCount > 0) {
console.log(chalk.red(`ā Failed: ${failureCount}`));
}
// Show detailed results
console.log(chalk.blue.bold('\nš DETAILED RESULTS:'));
results.forEach(result => {
if (result.success) {
console.log(chalk.green(` ā
@${result.username}: ${result.tweetsCount} tweets ā scraped_data/${result.username}/`));
} else {
console.log(chalk.red(` ā @${result.username}: ${result.error}`));
}
});
logger.info("Batch scraping operation completed", {
operation: "batch_scraping",
totalAccounts: usernames.length,
successCount,
failureCount,
totalDuration,
results: results.map(r => ({ username: r.username, success: r.success }))
});
// Function is declared to return void to satisfy isolated declarations; consumers log results directly.
return;
}
// Enhanced analysis with bot detection and token shilling monitoring
export async function runAdvancedAnalysis(
targetUsername: string,
maxTweets: number,
options: {
includeBotAnalysis?: boolean;
includeTokenAnalysis?: boolean;
analyzeReplies?: boolean;
outputFile?: string;
} = {}
): Promise<void> {
logger.info("Starting advanced analysis", {
operation: "advanced_analysis",
targetUsername,
maxTweets,
options
});
const spinner = new Spinner(`š Starting advanced analysis for @${targetUsername}...`).start();
try {
const analysisOptions = {
includeBotAnalysis: options.includeBotAnalysis ?? true,
includeTokenAnalysis: options.includeTokenAnalysis ?? true,
analyzeReplies: options.analyzeReplies ?? true,
progressCallback: (progress: { count: number; max: number }) => {
spinner.updateMessage(`š¦ Scraping @${targetUsername}: ${progress.count}/${progress.max} tweets`);
}
};
// Use enhanced scraping with integrated analysis
const result = await scrapeWithAnalysis(targetUsername, maxTweets, analysisOptions);
if (result.analysis) {
// Display comprehensive analysis summary
spinner.succeed('Advanced analysis completed!');
console.log(chalk.cyan.bold('\nš ADVANCED ANALYSIS REPORT š'));
console.log(chalk.cyan(`Target: @${targetUsername}`));
console.log(chalk.cyan(`Tweets scraped: ${formatNumber(result.tweets.length)}`));
// Bot Detection Summary
if (options.includeBotAnalysis) {
console.log(chalk.yellow.bold('\nš¤ BOT DETECTION ANALYSIS:'));
console.log(chalk.gray(` Suspicious replies: ${formatNumber(result.analysis.summary.suspiciousReplies)}`));
console.log(chalk.gray(` Bot probability: ${(result.analysis.summary.botProbability * 100).toFixed(1)}%`));
console.log(chalk.gray(` Network risk: ${(result.analysis.summary.networkRisk * 100).toFixed(1)}%`));
if (result.analysis.patterns.genericReplies.length > 0) {
console.log(chalk.red(` Generic replies detected: ${result.analysis.patterns.genericReplies.length}`));
}
if (result.analysis.patterns.coordinatedActivity.length > 0) {
console.log(chalk.red(` Coordinated activities: ${result.analysis.patterns.coordinatedActivity.length}`));
}
}
// Token Shilling Analysis
if (options.includeTokenAnalysis && result.analysis.tokenShilling) {
console.log(chalk.magenta.bold('\nš° TOKEN SHILLING ANALYSIS:'));
console.log(chalk.gray(` Active shill campaigns: ${formatNumber(result.analysis.tokenShilling.activeShillCampaigns)}`));
console.log(chalk.gray(` Suspicious tokens tracked: ${formatNumber(result.analysis.tokenShilling.suspiciousTokens.length)}`));
console.log(chalk.gray(` Bot accounts detected: ${formatNumber(result.analysis.tokenShilling.botAccountsTracked.length)}`));
console.log(chalk.gray(` Security alerts generated: ${formatNumber(result.analysis.tokenShilling.alerts.length)}`));
// Show high-risk tokens
if (result.analysis.pumpDumpAnalysis.riskTokens.length > 0) {
console.log(chalk.red.bold('\nā ļø HIGH-RISK TOKENS DETECTED:'));
result.analysis.pumpDumpAnalysis.riskTokens.forEach(token => {
console.log(chalk.red(` š“ ${token.token} (Risk: ${(token.riskLevel * 100).toFixed(1)}%)`));
});
}
// Show alerts
if (result.analysis.tokenShilling.alerts.length > 0) {
console.log(chalk.red.bold('\nšØ SECURITY ALERTS:'));
result.analysis.tokenShilling.alerts.slice(0, 3).forEach(alert => {
console.log(chalk.red(` šØ ${alert.alertType.toUpperCase()}: ${alert.tokenSymbol} (${alert.severity})`));
});
}
}
// Recommendations
if (result.analysis.recommendations.length > 0) {
console.log(chalk.green.bold('\nš” RECOMMENDATIONS:'));
result.analysis.recommendations.slice(0, 5).forEach(rec => {
console.log(chalk.green(` ā ${rec}`));
});
}
// Save analysis to custom file if requested
if (options.outputFile) {
await fs.writeFile(options.outputFile, JSON.stringify(result.analysis, null, 2));
console.log(chalk.blue(`\nš¾ Analysis saved to: ${options.outputFile}`));
}
console.log(chalk.blue(`\nš¾ Data files created:`));
console.log(chalk.gray(` Tweets: ${result.filename}`));
if (result.analysisFilename) {
console.log(chalk.gray(` Analysis: ${result.analysisFilename}`));
}
} else {
spinner.succeed(`Tweets scraped successfully (no analysis performed)`);
console.log(chalk.cyan(`\nš¦ Scraped ${formatNumber(result.tweets.length)} tweets`));
console.log(chalk.gray(` File: ${result.filename}`));
}
logger.info("Advanced analysis completed", {
operation: "advanced_analysis",
targetUsername,
tweetCount: result.tweets.length,
analysisPerformed: !!result.analysis,
suspiciousReplies: result.analysis?.summary.suspiciousReplies || 0,
botProbability: result.analysis?.summary.botProbability || 0,
alerts: result.analysis?.tokenShilling?.alerts.length || 0
});
} catch (error) {
spinner.fail('Advanced analysis failed');
logger.error("Advanced analysis failed", {
operation: "advanced_analysis",
targetUsername,
options
}, error as Error);
throw error;
}
}
// Batch analysis with bot detection and token monitoring
export async function runBatchAdvancedAnalysis(
usernames: string[],
maxTweets: number,
options: {
includeBotAnalysis?: boolean;
includeTokenAnalysis?: boolean;
analyzeReplies?: boolean;
concurrent?: number;
delay?: number;
} = {}
): Promise<void> {
logger.info("Starting batch advanced analysis", {
operation: "batch_advanced_analysis",
usernames: usernames.length,
maxTweets,
options
});
const concurrent = options.concurrent ?? 2;
const delay = options.delay ?? 10;
console.log(chalk.blue.bold(`\nš BATCH ADVANCED ANALYSIS - ${usernames.length} ACCOUNTS š\n`));
console.log(chalk.cyan(`Accounts: ${usernames.join(', ')}`));
console.log(chalk.cyan(`Max tweets per account: ${maxTweets}`));
console.log(chalk.cyan(`Bot analysis: ${options.includeBotAnalysis ?? true ? 'enabled' : 'disabled'}`));
console.log(chalk.cyan(`Token analysis: ${options.includeTokenAnalysis ?? true ? 'enabled' : 'disabled'}`));
console.log(chalk.cyan(`Reply analysis: ${options.analyzeReplies ?? true ? 'enabled' : 'disabled'}`));
const results: {
username: string;
success: boolean;
error?: string;
botProbability?: number;
suspiciousReplies?: number;
alerts?: number;
}[] = [];
// Process accounts in batches
for (let i = 0; i < usernames.length; i += concurrent) {
const batch = usernames.slice(i, i + concurrent);
const batchNumber = Math.floor(i / concurrent) + 1;
const totalBatches = Math.ceil(usernames.length / concurrent);
console.log(chalk.yellow(`\nš¦ Processing batch ${batchNumber}/${totalBatches}: ${batch.join(', ')}`));
const batchPromises = batch.map(async (username) => {
const spinner = new Spinner(`š Analyzing @${username}...`).start();
try {
const result = await scrapeWithAnalysis(username, maxTweets, {
includeBotAnalysis: options.includeBotAnalysis ?? true,
includeTokenAnalysis: options.includeTokenAnalysis ?? true,
analyzeReplies: options.analyzeReplies ?? true,
progressCallback: (progress) => {
spinner.updateMessage(`@${username}: ${progress.count}/${progress.max} tweets`);
}
});
const botProb = result.analysis?.summary.botProbability || 0;
const suspiciousReplies = result.analysis?.summary.suspiciousReplies || 0;
const alerts = result.analysis?.tokenShilling?.alerts.length || 0;
spinner.succeed(`@${username}: Bot ${(botProb * 100).toFixed(1)}%, ${suspiciousReplies} suspicious, ${alerts} alerts`);
results.push({
username,
success: true,
botProbability: botProb,
suspiciousReplies,
alerts
});
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error);
spinner.fail(`@${username}: ${errorMsg}`);
results.push({
username,
success: false,
error: errorMsg
});
}
});
await Promise.all(batchPromises);
// Add delay between batches
if (i + concurrent < usernames.length && delay > 0) {
const delaySpinner = new Spinner(`Waiting ${delay}s before next batch...`).start();
await new Promise(resolve => setTimeout(resolve, delay * 1000));
delaySpinner.succeed('Ready for next batch');
}
}
// Final summary
const successCount = results.filter(r => r.success).length;
const failureCount = results.filter(r => !r.success).length;
const avgBotProb = results.filter(r => r.success && r.botProbability !== undefined)
.reduce((sum, r) => sum + r.botProbability!, 0) / successCount;
const totalAlerts = results.filter(r => r.success)
.reduce((sum, r) => sum + (r.alerts || 0), 0);
console.log(chalk.green.bold('\nš BATCH ADVANCED ANALYSIS COMPLETED! š'));
console.log(chalk.cyan(`ā
Successful: ${successCount}`));
if (failureCount > 0) {
console.log(chalk.red(`ā Failed: ${failureCount}`));
}
console.log(chalk.yellow(`š¤ Average bot probability: ${(avgBotProb * 100).toFixed(1)}%`));
console.log(chalk.red(`šØ Total security alerts: ${totalAlerts}`));
// Show high-risk accounts
const highRiskAccounts = results.filter(r =>
r.success && (r.botProbability! > 0.7 || (r.alerts || 0) > 0)
);
if (highRiskAccounts.length > 0) {
console.log(chalk.red.bold('\nā ļø HIGH-RISK ACCOUNTS:'));
highRiskAccounts.forEach(account => {
console.log(chalk.red(` š“ @${account.username}: Bot ${(account.botProbability! * 100).toFixed(1)}%, ${account.alerts} alerts`));
});
}
logger.info("Batch advanced analysis completed", {
operation: "batch_advanced_analysis",
totalAccounts: usernames.length,
successCount,
failureCount,
avgBotProbability: avgBotProb,
totalAlerts,
highRiskAccounts: highRiskAccounts.length
});
}