Seems to have helped quite a lot for dealing with AI scrapers using up all available server resources
72 lines
2.1 KiB
Plaintext
72 lines
2.1 KiB
Plaintext
# Generated by Chef
|
|
upstream _gitea_web {
|
|
server <%= @upstream_host %>:<%= @upstream_port %>;
|
|
}
|
|
|
|
server {
|
|
server_name <%= @server_name %>;
|
|
listen <%= "#{node['openresty']['listen_ip']}:" if node['openresty']['listen_ip'] %>443 ssl http2;
|
|
listen <%= "[#{node['openresty']['listen_ipv6']}]" %>:443 ssl http2;
|
|
|
|
ssl_certificate <%= @ssl_cert %>;
|
|
ssl_certificate_key <%= @ssl_key %>;
|
|
|
|
access_log <%= node[:openresty][:log_dir] %>/<%= @server_name %>.access.log;
|
|
error_log <%= node[:openresty][:log_dir] %>/<%= @server_name %>.error.log warn;
|
|
|
|
add_header Strict-Transport-Security "max-age=31536000";
|
|
|
|
client_max_body_size 121M;
|
|
|
|
proxy_intercept_errors on;
|
|
|
|
location ~ ^/(avatars|repo-avatars)/.*$ {
|
|
proxy_buffers 1024 8k;
|
|
proxy_pass http://_gitea_web;
|
|
expires 30d;
|
|
proxy_set_header Connection $http_connection;
|
|
proxy_set_header Upgrade $http_upgrade;
|
|
proxy_set_header Host $host;
|
|
proxy_set_header X-Real-IP $remote_addr;
|
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
proxy_set_header X-Forwarded-Proto $scheme;
|
|
}
|
|
|
|
# Docker registry
|
|
location /v2/ {
|
|
client_max_body_size 0;
|
|
proxy_buffers 1024 8k;
|
|
proxy_pass http://_gitea_web;
|
|
proxy_set_header Connection $http_connection;
|
|
proxy_set_header Upgrade $http_upgrade;
|
|
proxy_set_header Host $host;
|
|
proxy_set_header X-Real-IP $remote_addr;
|
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
proxy_set_header X-Forwarded-Proto $scheme;
|
|
}
|
|
|
|
location / {
|
|
proxy_buffers 1024 8k;
|
|
proxy_pass http://_gitea_web;
|
|
proxy_set_header Connection $http_connection;
|
|
proxy_set_header Upgrade $http_upgrade;
|
|
proxy_set_header Host $host;
|
|
proxy_set_header X-Real-IP $remote_addr;
|
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
proxy_set_header X-Forwarded-Proto $scheme;
|
|
error_page 404 = @slow_404;
|
|
}
|
|
|
|
# Slow down 404 responses to make scraping random URLs less attractive
|
|
location @slow_404 {
|
|
internal;
|
|
default_type text/plain;
|
|
content_by_lua_block {
|
|
ngx.sleep(10)
|
|
ngx.status = 404
|
|
ngx.say("Not Found")
|
|
ngx.exit(ngx.HTTP_NOT_FOUND)
|
|
}
|
|
}
|
|
}
|