UNPKG

@turingnova/robots

Version:

Next.js robots.tsx generator - Automatically create and serve robots.txt for Next.js applications

42 lines 1.65 kB
export interface RobotsConfig { userAgent?: string; allow?: string[]; disallow?: string[]; sitemap?: string; crawlDelay?: number; host?: string; customRules?: string[]; } export interface RobotsTxtContent { content: string; tsxContent: string; } export interface RobotsRule { userAgent: string; allow: string[]; disallow: string[]; crawlDelay?: number; } export interface ParsedRobotsTxt { rules: RobotsRule[]; sitemaps: string[]; hosts: string[]; customRules: string[]; } export declare function generateRobotsTxt(config?: RobotsConfig): string; export declare function generateRobotsTsx(config?: RobotsConfig): string; export declare function createRobotsFile(config?: RobotsConfig, outputPath?: string, format?: 'tsx' | 'txt'): void; export declare function generateRobotsContent(config?: RobotsConfig): RobotsTxtContent; export declare function parseRobotsTxt(content: string): ParsedRobotsTxt; export declare function parseRobotsTxtToConfig(content: string): RobotsConfig; export declare function validateRobotsTxt(content: string): { isValid: boolean; errors: string[]; }; export declare function readRobotsFile(filePath: string): string; export declare function updateRobotsFile(filePath: string, config: RobotsConfig, format?: 'tsx' | 'txt'): void; export declare function getDefaultConfig(): RobotsConfig; export declare function mergeConfigs(base: RobotsConfig, updates: Partial<RobotsConfig>): RobotsConfig; export declare function formatRobotsTxtForDisplay(content: string): string; export declare function autoInit(): void; //# sourceMappingURL=index.d.ts.map