# My website is for people, not robots. # I will make a few exceptions for useful # or non-commercial services like Internet Archive. User-agent: ia_archiver Disallow: User-Agent: search.marginalia.nu Disallow: User-agent: WibyBot Disallow: # these bots annoy me, so they get nothing. User-agent: redditbot User-agent: AdsBot-Google User-agent: Amazonbot User-agent: anthropic-ai User-agent: AwarioRssBot User-agent: AwarioSmartBot User-agent: Bytespider User-agent: CCBot User-agent: ChatGPT-User User-agent: ClaudeBot User-agent: Claude-Web User-agent: cohere-ai User-agent: DataForSeoBot User-agent: FacebookBot User-agent: Google-Extended User-agent: GPTBot User-agent: ImagesiftBot User-agent: magpie-crawler User-agent: omgili User-agent: omgilibot User-agent: peer39_crawler User-agent: peer39_crawler/1.0 User-agent: PerplexityBot User-agent: YouBot User-agent: Applebot User-agent: FireCrawlAgent User-agent: Twitterbot User-agent: AhrefsBot User-agent: AhrefsSiteAudit User-agent: MJ12bot User-agent: SiteAuditBot User-agent: SemrushBot-BA User-agent: SemrushBot-SI User-agent: SemrushBot-SWA User-agent: SemrushBot-CT User-agent: SplitSignalBot User-agent: SemrushBot-COUB User-agent: dotbot User-agent: rogerbot User-agent: Screaming Frog SEO Spider User-agent: cognitiveSEO User-agent: OnCrawl Disallow: / # Everybody else User-agent: * Disallow: / Allow: /index.html Allow: /$