feat(robots): add configuration to disable web crawlers

This commit is contained in:
Jacky 2025-03-19 09:56:29 +00:00
parent 24205ec07f
commit 1800b0c4cd
No known key found for this signature in database
GPG key ID: 215C21B10DF38B4D

View file

@ -0,0 +1,26 @@
# Nginx UI Template Start
name = "Disable Robots"
author = "@0xJacky"
description = { en = "Disable Robots", zh_CN = "禁止搜索引擎爬虫"}
[variables.userAgents]
type = "string"
name = { en = "User Agents", zh_CN = "用户代理"}
value = "Googlebot|Bingbot|Baiduspider|YandexBot|Slurp|DuckDuckBot|Sogou|360Spider|facebot|AhrefsBot|SEMrushBot"
# Nginx UI Template End
# Nginx UI Custom Start
map $http_user_agent $is_bad_bot {
default 0;
~*({{ .userAgents }}) 1;
}
# Nginx UI Custom End
if ($is_bad_bot) {
return 444;
}
location = /robots.txt {
add_header Content-Type text/plain;
add_header Cache-Control "no-store, no-cache, must-revalidate";
return 200 "User-agent: *\nDisallow: /\n";
}