From f014330b14b0d9821f0e3d023480d30d73ca6713 Mon Sep 17 00:00:00 2001
From: badbl0cks <4161747+badbl0cks@users.noreply.github.com>
Date: Sun, 8 Feb 2026 12:50:28 -0800
Subject: [PATCH] Add HAProxy PoW challenge, simple bad bot blocking, and
adjust mounts
Replace single deploy/haproxy.cfg with deploy/haproxy/{haproxy.cfg,challenge.html}.
HAProxy now runs a WebCrypto-based proof-of-work challenge using a stick-table,
URI normalization and a challenge backend. docker-compose mounts the haproxy
directory, and also switches the site DB volume to ./db to be consistent. Update robots.txt.ts to
add a honeypot path for bad bot blocking.
---
deploy/docker-compose.yml | 4 +-
deploy/haproxy.cfg | 43 ----------
deploy/haproxy/challenge.html | 143 ++++++++++++++++++++++++++++++++++
deploy/haproxy/haproxy.cfg | 85 ++++++++++++++++++++
src/pages/robots.txt.ts | 3 +
5 files changed, 233 insertions(+), 45 deletions(-)
delete mode 100644 deploy/haproxy.cfg
create mode 100644 deploy/haproxy/challenge.html
create mode 100644 deploy/haproxy/haproxy.cfg
diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml
index 8cfaefe..f752649 100644
--- a/deploy/docker-compose.yml
+++ b/deploy/docker-compose.yml
@@ -4,7 +4,7 @@ services:
restart: always
container_name: badblocks-personal-site
volumes:
- - /srv/badblocks-personal-site/db:/db
+ - ./db:/db
networks:
- proxynet
env_file:
@@ -70,7 +70,7 @@ services:
- "${PUBLIC_IP}:443:443"
- "${PUBLIC_IP}:8404:8404"
volumes:
- - ./haproxy.cfg:/usr/local/etc/haproxy/haproxy.cfg:ro
+ - ./haproxy:/usr/local/etc/haproxy:ro
- ./certs:/certs:ro
restart: always
networks:
diff --git a/deploy/haproxy.cfg b/deploy/haproxy.cfg
deleted file mode 100644
index 4e7fc5e..0000000
--- a/deploy/haproxy.cfg
+++ /dev/null
@@ -1,43 +0,0 @@
-global
- daemon
- log stdout format raw local0 info
- maxconn 2000
-
-defaults
- mode http
- log global
- timeout connect 5s
- timeout client 30s
- timeout server 30s
- timeout check 5s
- retries 3
- option httplog
- option dontlognull
- option redispatch
-
-frontend http
- bind :80
- mode http
-
- http-request redirect scheme https unless { ssl_fc }
-
-frontend https
- bind :443 ssl crt /certs/fullcert.pem
-
- http-response set-header Strict-Transport-Security "max-age=16000000; includeSubDomains; preload;"
- default_backend main
-
-backend main
- balance leastconn
- option httpchk GET /
- http-check expect status 200
-
- server badblocks-personal-site badblocks-personal-site:4321 check resolvers docker resolve-prefer ipv4 init-addr none
-
-resolvers docker
- nameserver dns1 127.0.0.11:53
- resolve_retries 3
- timeout resolve 1s
- timeout retry 1s
- hold valid 10s
- hold obsolete 30s
diff --git a/deploy/haproxy/challenge.html b/deploy/haproxy/challenge.html
new file mode 100644
index 0000000..73eeca0
--- /dev/null
+++ b/deploy/haproxy/challenge.html
@@ -0,0 +1,143 @@
+
+
+
Challenge Accepted!
+
+
+
+
+
+
diff --git a/deploy/haproxy/haproxy.cfg b/deploy/haproxy/haproxy.cfg
new file mode 100644
index 0000000..c3a6d7b
--- /dev/null
+++ b/deploy/haproxy/haproxy.cfg
@@ -0,0 +1,85 @@
+global
+ daemon
+ log stdout format raw local0 info
+ maxconn 2000
+ # For normalize-uri
+ expose-experimental-directives
+
+defaults
+ mode http
+ log global
+ timeout connect 5s
+ timeout client 30s
+ timeout server 30s
+ timeout check 5s
+ retries 3
+ option httplog
+ option dontlognull
+ option redispatch
+
+frontend http
+ bind :80
+ mode http
+
+ http-request redirect scheme https unless { ssl_fc }
+
+frontend www
+ bind :443 ssl crt /certs/fullcert.pem
+
+ # 2 general purpose tags in this stick-table (name defaults to frontend name, i.e. www)
+ stick-table type ipv6 size 1m expire 2d store gpt(2)
+ http-request track-sc0 src
+ http-request normalize-uri path-merge-slashes
+ http-request normalize-uri path-strip-dot
+ http-request normalize-uri path-strip-dotdot
+
+ # Drop the connection immediately if the requester previously requested the honeypot path)
+ http-request silent-drop if { sc_get_gpt(0,0) gt 0 }
+
+ # Protect all paths except /robots.txt, /.well-known/*, and /favicon.ico
+ acl unprotected_path path -m reg ^/(robots.txt|\.well-known/.*|favicon\.ico|_challenge)$
+ # Matches the default config of anubis of triggering on "Mozilla"
+ acl protected_ua hdr(User-Agent) -m beg Mozilla/
+ # Set stick table index 0 to 1 if request is for honeypot path
+ http-request sc-set-gpt(0,0) 1 if { path -m beg /blokmeplz/ }
+ http-request silent-drop if { path -m beg /blokmeplz/ }
+
+ acl accepted sc_get_gpt(1,0) gt 0
+ http-request return status 200 content-type "text/html; charset=UTF-8" hdr "Cache-control" "max-age=0, no-cache" lf-file /usr/local/etc/haproxy/challenge.html if !unprotected_path protected_ua !accepted
+ use_backend challenge if { path -m beg /_challenge }
+
+ http-response set-header Strict-Transport-Security "max-age=16000000; includeSubDomains; preload;"
+ default_backend main
+
+backend challenge
+ mode http
+ option http-buffer-request
+ # The parameter to table must match the stick table used in the frontend.
+ http-request track-sc0 src table www
+ acl challenge_req method POST
+ http-request set-var(txn.tries) req.body_param(tries)
+ http-request set-var(txn.ts) req.body_param(ts)
+ http-request set-var(txn.host) hdr(Host),host_only
+ http-request set-var(txn.hash) src,concat(;,txn.host,),concat(;,txn.ts,),concat(;,txn.tries),sha2,hex
+ acl ts_recent date,neg,add(txn.ts) ge -60
+ # 4 is the difficulty, should match "diff" in challenge.html.
+ acl hash_good var(txn.hash) -m reg 0{4}.*
+ http-request sc-set-gpt(1,0) 1 if challenge_req ts_recent hash_good
+ http-request return status 200 if challenge_req hash_good
+ http-request return status 400 content-type "text/html; charset=UTF-8" hdr "Cache-control" "max-age=0" string "Bad request" if !challenge_req OR !hash_good
+
+backend main
+ mode http
+ balance leastconn
+ option httpchk GET /health
+ http-check expect status 200
+
+ server badblocks-personal-site badblocks-personal-site:4321 check resolvers docker resolve-prefer ipv4 init-addr none
+
+resolvers docker
+ nameserver dns1 127.0.0.11:53
+ resolve_retries 3
+ timeout resolve 1s
+ timeout retry 1s
+ hold valid 10s
+ hold obsolete 30s
diff --git a/src/pages/robots.txt.ts b/src/pages/robots.txt.ts
index cbac251..2d83a78 100644
--- a/src/pages/robots.txt.ts
+++ b/src/pages/robots.txt.ts
@@ -69,6 +69,9 @@ DisallowAITraining: /
Content-Usage: ai=n
Allow: /
+User-Agent: *
+Disallow: /blokmeplz/
+
Sitemap: ${sitemapURL.href}
`;