From b42ba43c70b8265411bff7842d87b5c50159f073 Mon Sep 17 00:00:00 2001 From: Francis Go Date: Sun, 29 Dec 2024 16:03:04 +0000 Subject: [PATCH 1/2] Update robots.txt to improve crawler access --- public/robots.txt | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/public/robots.txt b/public/robots.txt index b124e3d8..a7ee002c 100644 --- a/public/robots.txt +++ b/public/robots.txt @@ -1,14 +1,18 @@ # See https://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file -User-Agent: Google +# Disallow everything by default +User-Agent: * +Disallow: / + +# Allow Google crawler +User-Agent: Googlebot Allow: / +# Allow DuckDuck crawler User-Agent: DuckDuckBot Allow: / -User-Agent: bingbot -Crawl-delay: 5 +# Allow Bing crawler +User-Agent: Bingbot Allow: / - -User-Agent: * -Disallow: / +Crawl-delay: 5 From e04bf8a0371b3700ee55634fcd4ccca9dbb8dd7e Mon Sep 17 00:00:00 2001 From: Francis Go Date: Sun, 5 Jan 2025 13:36:01 +0000 Subject: [PATCH 2/2] Use Googlebot as User-Agent --- public/robots.txt | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/public/robots.txt b/public/robots.txt index a7ee002c..e5100c7a 100644 --- a/public/robots.txt +++ b/public/robots.txt @@ -1,18 +1,14 @@ # See https://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file -# Disallow everything by default -User-Agent: * -Disallow: / - -# Allow Google crawler -User-Agent: Googlebot +User-Agent: GoogleBot Allow: / -# Allow DuckDuck crawler User-Agent: DuckDuckBot Allow: / -# Allow Bing crawler -User-Agent: Bingbot -Allow: / +User-Agent: bingbot Crawl-delay: 5 +Allow: / + +User-Agent: * +Disallow: /