Slow down Gitea 404s to mess with scrapers/bots
Seems to have helped quite a lot for dealing with AI scrapers using up all available server resources
This commit is contained in:
@@ -18,6 +18,8 @@ server {
|
|||||||
|
|
||||||
client_max_body_size 121M;
|
client_max_body_size 121M;
|
||||||
|
|
||||||
|
proxy_intercept_errors on;
|
||||||
|
|
||||||
location ~ ^/(avatars|repo-avatars)/.*$ {
|
location ~ ^/(avatars|repo-avatars)/.*$ {
|
||||||
proxy_buffers 1024 8k;
|
proxy_buffers 1024 8k;
|
||||||
proxy_pass http://_gitea_web;
|
proxy_pass http://_gitea_web;
|
||||||
@@ -52,5 +54,18 @@ server {
|
|||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
error_page 404 = @slow_404;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Slow down 404 responses to make scraping random URLs less attractive
|
||||||
|
location @slow_404 {
|
||||||
|
internal;
|
||||||
|
default_type text/plain;
|
||||||
|
content_by_lua_block {
|
||||||
|
ngx.sleep(10)
|
||||||
|
ngx.status = 404
|
||||||
|
ngx.say("Not Found")
|
||||||
|
ngx.exit(ngx.HTTP_NOT_FOUND)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user