diff --git a/backup apr 17 2025.7z b/backup apr 17 2025.7z new file mode 100644 index 0000000..b4eafe8 Binary files /dev/null and b/backup apr 17 2025.7z differ diff --git a/checkpoint_service/data/GeoLite2-ASN.mmdb b/checkpoint_service/data/GeoLite2-ASN.mmdb new file mode 100644 index 0000000..a10fe80 Binary files /dev/null and b/checkpoint_service/data/GeoLite2-ASN.mmdb differ diff --git a/checkpoint_service/data/GeoLite2-Country.mmdb b/checkpoint_service/data/GeoLite2-Country.mmdb new file mode 100644 index 0000000..eb2b22d Binary files /dev/null and b/checkpoint_service/data/GeoLite2-Country.mmdb differ diff --git a/checkpoint_service/data/checkpoint_secret.json b/checkpoint_service/data/checkpoint_secret.json new file mode 100644 index 0000000..0d47275 --- /dev/null +++ b/checkpoint_service/data/checkpoint_secret.json @@ -0,0 +1 @@ +{"hmac_secret":"HJ9EY5oN0pO71AUN/2faoYnJhZyTr875iWbw6Nl8rc8=","created_at":"2025-04-28T18:28:59.6204724-05:00","updated_at":"2025-04-28T18:33:55.3259956-05:00"} \ No newline at end of file diff --git a/checkpoint_service/data/checkpoint_tokendb/000001.sst b/checkpoint_service/data/checkpoint_tokendb/000001.sst new file mode 100644 index 0000000..7c33d89 Binary files /dev/null and b/checkpoint_service/data/checkpoint_tokendb/000001.sst differ diff --git a/checkpoint_service/data/checkpoint_tokendb/000002.sst b/checkpoint_service/data/checkpoint_tokendb/000002.sst new file mode 100644 index 0000000..397b55d Binary files /dev/null and b/checkpoint_service/data/checkpoint_tokendb/000002.sst differ diff --git a/checkpoint_service/data/checkpoint_tokendb/000002.vlog b/checkpoint_service/data/checkpoint_tokendb/000002.vlog new file mode 100644 index 0000000..cd0ab81 Binary files /dev/null and b/checkpoint_service/data/checkpoint_tokendb/000002.vlog differ diff --git a/checkpoint_service/data/checkpoint_tokendb/000003.sst b/checkpoint_service/data/checkpoint_tokendb/000003.sst new file mode 100644 index 0000000..c54d7f8 Binary files /dev/null and b/checkpoint_service/data/checkpoint_tokendb/000003.sst differ diff --git a/checkpoint_service/data/checkpoint_tokendb/000003.vlog b/checkpoint_service/data/checkpoint_tokendb/000003.vlog new file mode 100644 index 0000000..cac470d Binary files /dev/null and b/checkpoint_service/data/checkpoint_tokendb/000003.vlog differ diff --git a/checkpoint_service/data/checkpoint_tokendb/DISCARD b/checkpoint_service/data/checkpoint_tokendb/DISCARD new file mode 100644 index 0000000..9e0f96a Binary files /dev/null and b/checkpoint_service/data/checkpoint_tokendb/DISCARD differ diff --git a/checkpoint_service/data/checkpoint_tokendb/KEYREGISTRY b/checkpoint_service/data/checkpoint_tokendb/KEYREGISTRY new file mode 100644 index 0000000..cfde57e --- /dev/null +++ b/checkpoint_service/data/checkpoint_tokendb/KEYREGISTRY @@ -0,0 +1 @@ +~t(ðæ4¥ ø2³Dî•æHello Badger \ No newline at end of file diff --git a/checkpoint_service/data/checkpoint_tokendb/MANIFEST b/checkpoint_service/data/checkpoint_tokendb/MANIFEST new file mode 100644 index 0000000..ac07759 Binary files /dev/null and b/checkpoint_service/data/checkpoint_tokendb/MANIFEST differ diff --git a/checkpoint_service/data/data/GeoLite2-ASN.mmdb b/checkpoint_service/data/data/GeoLite2-ASN.mmdb new file mode 100644 index 0000000..a10fe80 Binary files /dev/null and b/checkpoint_service/data/data/GeoLite2-ASN.mmdb differ diff --git a/checkpoint_service/data/data/GeoLite2-Country.mmdb b/checkpoint_service/data/data/GeoLite2-Country.mmdb new file mode 100644 index 0000000..eb2b22d Binary files /dev/null and b/checkpoint_service/data/data/GeoLite2-Country.mmdb differ diff --git a/checkpoint_service/data/data/checkpoint_secret.json b/checkpoint_service/data/data/checkpoint_secret.json new file mode 100644 index 0000000..0d47275 --- /dev/null +++ b/checkpoint_service/data/data/checkpoint_secret.json @@ -0,0 +1 @@ +{"hmac_secret":"HJ9EY5oN0pO71AUN/2faoYnJhZyTr875iWbw6Nl8rc8=","created_at":"2025-04-28T18:28:59.6204724-05:00","updated_at":"2025-04-28T18:33:55.3259956-05:00"} \ No newline at end of file diff --git a/checkpoint_service/data/data/checkpoint_tokendb/000001.sst b/checkpoint_service/data/data/checkpoint_tokendb/000001.sst new file mode 100644 index 0000000..7c33d89 Binary files /dev/null and b/checkpoint_service/data/data/checkpoint_tokendb/000001.sst differ diff --git a/checkpoint_service/data/data/checkpoint_tokendb/000002.sst b/checkpoint_service/data/data/checkpoint_tokendb/000002.sst new file mode 100644 index 0000000..397b55d Binary files /dev/null and b/checkpoint_service/data/data/checkpoint_tokendb/000002.sst differ diff --git a/checkpoint_service/data/data/checkpoint_tokendb/000002.vlog b/checkpoint_service/data/data/checkpoint_tokendb/000002.vlog new file mode 100644 index 0000000..cd0ab81 Binary files /dev/null and b/checkpoint_service/data/data/checkpoint_tokendb/000002.vlog differ diff --git a/checkpoint_service/data/data/checkpoint_tokendb/000003.sst b/checkpoint_service/data/data/checkpoint_tokendb/000003.sst new file mode 100644 index 0000000..c54d7f8 Binary files /dev/null and b/checkpoint_service/data/data/checkpoint_tokendb/000003.sst differ diff --git a/checkpoint_service/data/data/checkpoint_tokendb/000003.vlog b/checkpoint_service/data/data/checkpoint_tokendb/000003.vlog new file mode 100644 index 0000000..cac470d Binary files /dev/null and b/checkpoint_service/data/data/checkpoint_tokendb/000003.vlog differ diff --git a/checkpoint_service/data/data/checkpoint_tokendb/DISCARD b/checkpoint_service/data/data/checkpoint_tokendb/DISCARD new file mode 100644 index 0000000..9e0f96a Binary files /dev/null and b/checkpoint_service/data/data/checkpoint_tokendb/DISCARD differ diff --git a/checkpoint_service/data/data/checkpoint_tokendb/KEYREGISTRY b/checkpoint_service/data/data/checkpoint_tokendb/KEYREGISTRY new file mode 100644 index 0000000..cfde57e --- /dev/null +++ b/checkpoint_service/data/data/checkpoint_tokendb/KEYREGISTRY @@ -0,0 +1 @@ +~t(ðæ4¥ ø2³Dî•æHello Badger \ No newline at end of file diff --git a/checkpoint_service/data/data/checkpoint_tokendb/MANIFEST b/checkpoint_service/data/data/checkpoint_tokendb/MANIFEST new file mode 100644 index 0000000..ac07759 Binary files /dev/null and b/checkpoint_service/data/data/checkpoint_tokendb/MANIFEST differ diff --git a/checkpoint_service/go.mod b/checkpoint_service/go.mod new file mode 100644 index 0000000..58286e9 --- /dev/null +++ b/checkpoint_service/go.mod @@ -0,0 +1,39 @@ +module checkpoint_service + +go 1.24.1 + +require ( + github.com/BurntSushi/toml v1.5.0 + github.com/cloudflare/ahocorasick v0.0.0-20240916140611-054963ec9396 + github.com/dgraph-io/badger/v4 v4.7.0 + github.com/gofiber/fiber/v2 v2.52.6 + github.com/mileusna/useragent v1.3.5 + github.com/oschwald/geoip2-golang v1.11.0 +) + +require ( + github.com/andybalholm/brotli v1.1.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/google/flatbuffers v25.2.10+incompatible // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/oschwald/maxminddb-golang v1.13.0 // indirect + github.com/rivo/uniseg v0.2.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasthttp v1.51.0 // indirect + github.com/valyala/tcplisten v1.0.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/otel v1.35.0 // indirect + go.opentelemetry.io/otel/metric v1.35.0 // indirect + go.opentelemetry.io/otel/trace v1.35.0 // indirect + golang.org/x/net v0.38.0 // indirect + golang.org/x/sys v0.31.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect +) diff --git a/checkpoint_service/go.sum b/checkpoint_service/go.sum new file mode 100644 index 0000000..37cbf65 --- /dev/null +++ b/checkpoint_service/go.sum @@ -0,0 +1,76 @@ +github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= +github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M= +github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cloudflare/ahocorasick v0.0.0-20240916140611-054963ec9396 h1:W2HK1IdCnCGuLUeyizSCkwvBjdj0ZL7mxnJYQ3poyzI= +github.com/cloudflare/ahocorasick v0.0.0-20240916140611-054963ec9396/go.mod h1:tGWUZLZp9ajsxUOnHmFFLnqnlKXsCn6GReG4jAD59H0= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgraph-io/badger/v4 v4.7.0 h1:Q+J8HApYAY7UMpL8d9owqiB+odzEc0zn/aqOD9jhc6Y= +github.com/dgraph-io/badger/v4 v4.7.0/go.mod h1:He7TzG3YBy3j4f5baj5B7Zl2XyfNe5bl4Udl0aPemVA= +github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM= +github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI= +github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38= +github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/gofiber/fiber/v2 v2.52.6 h1:Rfp+ILPiYSvvVuIPvxrBns+HJp8qGLDnLJawAu27XVI= +github.com/gofiber/fiber/v2 v2.52.6/go.mod h1:YEcBbO/FB+5M1IZNBP9FO3J9281zgPAreiI1oqg8nDw= +github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q= +github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mileusna/useragent v1.3.5 h1:SJM5NzBmh/hO+4LGeATKpaEX9+b4vcGg2qXGLiNGDws= +github.com/mileusna/useragent v1.3.5/go.mod h1:3d8TOmwL/5I8pJjyVDteHtgDGcefrFUX4ccGOMKNYYc= +github.com/oschwald/geoip2-golang v1.11.0 h1:hNENhCn1Uyzhf9PTmquXENiWS6AlxAEnBII6r8krA3w= +github.com/oschwald/geoip2-golang v1.11.0/go.mod h1:P9zG+54KPEFOliZ29i7SeYZ/GM6tfEL+rgSn03hYuUo= +github.com/oschwald/maxminddb-golang v1.13.0 h1:R8xBorY71s84yO06NgTmQvqvTvlS/bnYZrrWX1MElnU= +github.com/oschwald/maxminddb-golang v1.13.0/go.mod h1:BU0z8BfFVhi1LQaonTwwGQlsHUEu9pWNdMfmq4ztm0o= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.51.0 h1:8b30A5JlZ6C7AS81RsWjYMQmrZG6feChmgAolCl1SqA= +github.com/valyala/fasthttp v1.51.0/go.mod h1:oI2XroL+lI7vdXyYoQk03bXBThfFl2cVdIA3Xl7cH8g= +github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8= +github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= +go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= +go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= +go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= +go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= +go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= +golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= +golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/checkpoint_service/main.go b/checkpoint_service/main.go new file mode 100644 index 0000000..4d8ec17 --- /dev/null +++ b/checkpoint_service/main.go @@ -0,0 +1,66 @@ +package main + +import ( + "context" + "flag" + "log" + "os" + "os/signal" + "syscall" + "time" + + "checkpoint_service/middleware" + + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/compress" + "github.com/gofiber/fiber/v2/middleware/logger" +) + +func main() { + // Parse command-line flags + port := flag.String("port", "8080", "Port to listen on") + skipCheckpoint := flag.Bool("skip-checkpoint", false, "Skip the checkpoint middleware") + flag.Parse() + + // Create Fiber app + app := fiber.New() + + // Request logging + app.Use(logger.New()) + + // Response compression + app.Use(compress.New()) + + // Load and apply middleware plugins + for _, handler := range middleware.LoadPlugins(*skipCheckpoint) { + app.Use(handler) + } + log.Println("Loaded middleware plugins") + + // API group for proof-of-work endpoints + api := app.Group("/api") + api.Get("/pow/challenge", middleware.GetCheckpointChallengeHandler) + api.Post("/pow/verify", middleware.VerifyCheckpointHandler) + api.Get("/verify", middleware.VerifyCheckpointHandler) + + // Start the server in a goroutine + go func() { + addr := ":" + *port + log.Printf("Checkpoint service starting on %s", addr) + if err := app.Listen(addr); err != nil { + log.Fatalf("Server error: %v", err) + } + }() + + // Graceful shutdown on SIGINT/SIGTERM + quit := make(chan os.Signal, 1) + signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) + <-quit + log.Println("Shutting down server...") + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + if err := app.ShutdownWithContext(ctx); err != nil { + log.Fatalf("Server forced to shutdown: %v", err) + } + log.Println("Server exiting") +} diff --git a/checkpoint_service/middleware/checkpoint.go b/checkpoint_service/middleware/checkpoint.go new file mode 100644 index 0000000..9d6091e --- /dev/null +++ b/checkpoint_service/middleware/checkpoint.go @@ -0,0 +1,1482 @@ +// middleware provides a small proof-of-work puzzle that users solve before +// accessing protected pages or APIs, plus transparent reverse-proxy support. +// It issues HMAC-signed tokens bound to IP/browser, stores them in BadgerDB, +// and automatically cleans up expired data. +package middleware + +import ( + "context" + "crypto/hmac" + cryptorand "crypto/rand" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "log" + "net" + "net/http" + "net/http/httputil" + "net/url" + "os" + "path/filepath" + "regexp" + "strings" + "sync" + "sync/atomic" + "time" + + "bytes" + "encoding/gob" + + "html/template" + + "github.com/dgraph-io/badger/v4" + "github.com/gofiber/fiber/v2" + "github.com/mileusna/useragent" +) + +// --- Configuration --- + +// Config struct holds all configurable parameters for the Checkpoint middleware +type Config struct { + // General Settings + Difficulty int // Number of leading zeros for PoW hash + TokenExpiration time.Duration // Validity period for issued tokens + CookieName string // Name of the cookie used to store tokens + CookieDomain string // Domain scope for the cookie (e.g., ".example.com" for subdomains) + SaltLength int // Length of the salt used in challenges + + // Rate Limiting & Expiration + MaxAttemptsPerHour int // Max PoW verification attempts per IP per hour + MaxNonceAge time.Duration // Max age for used nonces before cleanup + ChallengeExpiration time.Duration // Time limit for solving a challenge + + // File Paths + SecretConfigPath string // Path to the persistent HMAC secret file + TokenStoreDBPath string // Directory path for the BadgerDB token store + InterstitialPaths []string // Paths to search for the interstitial HTML page + + // Security Settings + CheckPoSTimes bool // Enable Proof-of-Space-Time consistency checks + PoSTimeConsistencyRatio float64 // Allowed ratio between fastest and slowest PoS runs + HTMLCheckpointExclusions []string // Path prefixes to exclude from HTML checkpoint + HTMLCheckpointExcludedExtensions map[string]bool // File extensions to exclude (lowercase, '.') + DangerousQueryPatterns []*regexp.Regexp // Regex patterns to block in query strings + BlockDangerousPathChars bool // Block paths containing potentially dangerous characters (;, `) + // User Agent validation settings + UserAgentValidationExclusions []string // Path prefixes to skip UA validation + UserAgentRequiredPrefixes map[string]string // Path prefix -> required UA prefix + // Note: Binding to IP, User Agent, and Browser Hint is always enabled. + + // Reverse Proxy Settings + ReverseProxyMappings map[string]string // Map of hostname to backend URL (e.g., "app.example.com": "http://127.0.0.1:8080") +} + +var ( + // Global configuration instance + checkpointConfig Config + + // Secret key used for HMAC verification - automatically generated on startup + hmacSecret []byte + // Used nonces to prevent replay attacks - use sync.Map for concurrency + usedNonces sync.Map // map[string]time.Time + // IP-based rate limiting for token generation - use sync.Map for concurrency + ipRateLimit sync.Map // map[string]*atomic.Int64 (or similar atomic counter) + // Challenge parameters store with request IDs - use sync.Map for concurrency + challengeStore sync.Map // map[string]ChallengeParams + // Global token store (now BadgerDB based) + tokenStore *TokenStore + // in-memory cache for the interstitial HTML to avoid repeated disk reads + interstitialContent string + interstitialOnce sync.Once + interstitialLoadErr error + // parsed template for interstitial page + interstitialTmpl *template.Template + interstitialTmplOnce sync.Once + interstitialTmplErr error + // pool for gob encoding buffers to reduce allocations + gobBufferPool = sync.Pool{ + New: func() interface{} { return new(bytes.Buffer) }, + } +) + +// Need atomic package for ipRateLimit counter + +func init() { + // Load complete configuration from checkpoint.toml (required) + var cfg Config + if err := LoadConfig("checkpoint", &cfg); err != nil { + log.Fatalf("Failed to load checkpoint config: %v", err) + } + SetConfig(cfg) + // Register sanitization plugin (cleanup URLs/queries before checkpoint) + RegisterPlugin("sanitize", RequestSanitizationMiddleware) + // Register checkpoint plugin + RegisterPlugin("checkpoint", New) + + // Initialize stores AFTER config is potentially set/loaded + // Ensure tokenStore is initialized before use + var err error + tokenStore, err = NewTokenStore(checkpointConfig.TokenStoreDBPath) + if err != nil { + log.Fatalf("CRITICAL: Failed to initialize TokenStore database: %v", err) + } + + // Initialize secret + _ = initSecret() + + // Start cleanup timer for nonces/ip rates (token cleanup handled by DB TTL) + _ = startCleanupTimer() +} + +// SecretConfig contains configuration for the Checkpoint system (for secret file persistence) +type SecretConfig struct { + HmacSecret []byte `json:"hmac_secret"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +// --- End Configuration --- + +// SetConfig swaps in your custom Config (usually loaded from TOML). +// Do this before using the middleware, ideally at startup. +func SetConfig(cfg Config) { + checkpointConfig = cfg + // Re-initialization of token store path is complex with BadgerDB, recommend restart. + // Other config changes can be applied dynamically if needed. +} + +// --- Token Store (BadgerDB Implementation) --- + +// StoredTokenData holds the relevant information persisted for each token hash. +// This includes binding information needed for verification. +type StoredTokenData struct { + ClientIPHash string // Hash of IP used during issuance + UserAgentHash string // Hash of User Agent used during issuance + BrowserHint string // Browser Hint used during issuance + LastVerified time.Time // Last time this token was successfully validated + ExpiresAt time.Time // Original expiration time of the token (for reference, TTL enforces) +} + +// TokenStore manages persistent storage of verified tokens using BadgerDB. +type TokenStore struct { + DB *badger.DB +} + +// NewTokenStore initializes and returns a new TokenStore using BadgerDB. +func NewTokenStore(dbPath string) (*TokenStore, error) { + if err := os.MkdirAll(dbPath, 0755); err != nil { + return nil, fmt.Errorf("failed to create token store directory %s: %w", dbPath, err) + } + opts := badger.DefaultOptions(dbPath) + // Tune options for performance if needed (e.g., memory usage) + opts.Logger = nil // Disable default Badger logger unless debugging + db, err := badger.Open(opts) + if err != nil { + return nil, fmt.Errorf("failed to open token store database at %s: %w", dbPath, err) + } + store := &TokenStore{DB: db} + // Start BadgerDB's own value log GC routine (optional but recommended) + go store.runValueLogGC() + return store, nil +} + +// Close closes the BadgerDB database. +// Should be called during graceful shutdown. +func (store *TokenStore) Close() error { + if store.DB != nil { + log.Println("Closing TokenStore database...") + return store.DB.Close() + } + return nil +} + +// runValueLogGC runs BadgerDB's value log garbage collection periodically. +func (store *TokenStore) runValueLogGC() { + ticker := time.NewTicker(5 * time.Minute) + defer ticker.Stop() + for range ticker.C { + again: + err := store.DB.RunValueLogGC(0.7) // Run GC if 70% space can be reclaimed + if err == nil { + goto again // Run GC multiple times if needed + } + if err != badger.ErrNoRewrite { + log.Printf("WARNING: BadgerDB RunValueLogGC error: %v", err) + } + } +} + +// encodeTokenData serializes StoredTokenData using gob. +func encodeTokenData(data *StoredTokenData) ([]byte, error) { + // get a buffer from pool + buf := gobBufferPool.Get().(*bytes.Buffer) + buf.Reset() + enc := gob.NewEncoder(buf) + if err := enc.Encode(data); err != nil { + gobBufferPool.Put(buf) + return nil, fmt.Errorf("failed to gob encode token data: %w", err) + } + // copy out the bytes to avoid retaining large buffer + out := make([]byte, buf.Len()) + copy(out, buf.Bytes()) + buf.Reset() + gobBufferPool.Put(buf) + return out, nil +} + +// decodeTokenData deserializes StoredTokenData using gob. +func decodeTokenData(encoded []byte) (*StoredTokenData, error) { + var data StoredTokenData + // use a reader to avoid extra buffer allocation + reader := bytes.NewReader(encoded) + dec := gob.NewDecoder(reader) + if err := dec.Decode(&data); err != nil { + return nil, fmt.Errorf("failed to gob decode token data: %w", err) + } + return &data, nil +} + +// addToken stores the token data in BadgerDB with a TTL. +func (store *TokenStore) addToken(tokenHash string, data *StoredTokenData) error { + encodedData, err := encodeTokenData(data) + if err != nil { + return err // Error already wrapped + } + + // Calculate TTL based on the token's specific expiration + ttl := time.Until(data.ExpiresAt) + if ttl <= 0 { + log.Printf("Attempted to add already expired token hash %s", tokenHash) + return nil // Don't add already expired tokens + } + + err = store.DB.Update(func(txn *badger.Txn) error { + e := badger.NewEntry([]byte(tokenHash), encodedData).WithTTL(ttl) + return txn.SetEntry(e) + }) + + if err != nil { + return fmt.Errorf("failed to add token hash %s to DB: %w", tokenHash, err) + } + return nil +} + +// updateTokenVerification updates the LastVerified time for an existing token. +func (store *TokenStore) updateTokenVerification(tokenHash string) error { + return store.DB.Update(func(txn *badger.Txn) error { + item, err := txn.Get([]byte(tokenHash)) + if err != nil { + // If token expired or was deleted between check and update, log and ignore. + if err == badger.ErrKeyNotFound { + log.Printf("Token hash %s not found during update verification (likely expired/deleted)", tokenHash) + return nil // Not a critical error in this context + } + return fmt.Errorf("failed to get token %s for update: %w", tokenHash, err) + } + + var storedData *StoredTokenData + err = item.Value(func(val []byte) error { + storedData, err = decodeTokenData(val) + return err + }) + if err != nil { + return fmt.Errorf("failed to decode token %s value for update: %w", tokenHash, err) + } + + // Update LastVerified and re-encode + storedData.LastVerified = time.Now() + encodedData, err := encodeTokenData(storedData) + if err != nil { + return err + } + + // Set the entry again (TTL remains the same based on original ExpiresAt) + ttl := time.Until(storedData.ExpiresAt) + if ttl <= 0 { + return nil + } // Don't update if expired + e := badger.NewEntry([]byte(tokenHash), encodedData).WithTTL(ttl) + return txn.SetEntry(e) + }) +} + +// lookupTokenData retrieves token data from BadgerDB. +// Returns the data, true if found and not expired, or false otherwise. +// Added context parameter +func (store *TokenStore) lookupTokenData(ctx context.Context, tokenHash string) (*StoredTokenData, bool, error) { + var storedData *StoredTokenData + var found bool + + err := store.DB.View(func(txn *badger.Txn) error { + // Check context cancellation within the transaction + if ctx.Err() != nil { + return ctx.Err() + } + item, err := txn.Get([]byte(tokenHash)) + if err != nil { + if err == badger.ErrKeyNotFound { + return nil // Not found, not an error for lookup + } + return fmt.Errorf("failed to get token hash %s from DB: %w", tokenHash, err) + } + + // Key exists, decode the value + err = item.Value(func(val []byte) error { + // Check context cancellation before decoding + if ctx.Err() != nil { + return ctx.Err() + } + var decodeErr error + storedData, decodeErr = decodeTokenData(val) + return decodeErr + }) + if err != nil { + // If context was cancelled, return that error + if ctx.Err() != nil { + return ctx.Err() + } + // Return actual decoding error + return fmt.Errorf("failed to decode StoredTokenData for hash %s: %w", tokenHash, err) + } + + // Check expiration explicitly just in case TTL mechanism has latency + if time.Now().After(storedData.ExpiresAt) { + log.Printf("Token hash %s found but expired (ExpiresAt: %v)", tokenHash, storedData.ExpiresAt) + storedData = nil // Treat as not found if expired + return nil + } + + found = true + return nil + }) + + if err != nil { + // Don't log here, return the error to the caller (validateToken) + return nil, false, err // Return the actual error + } + + return storedData, found, nil // Success +} + +// --- End Token Store --- + +// CloseTokenStore provides a package-level function to close the global token store. +// This should be called during application shutdown. +func CloseTokenStore() error { + if tokenStore != nil { + return tokenStore.Close() + } + return nil +} + +// loadInterstitialHTML returns the cached interstitial HTML (loads once from disk) +func loadInterstitialHTML() (string, error) { + interstitialOnce.Do(func() { + for _, path := range checkpointConfig.InterstitialPaths { + if data, err := os.ReadFile(path); err == nil { + interstitialContent = string(data) + return + } + } + interstitialLoadErr = fmt.Errorf("could not find checkpoint interstitial HTML at any configured path") + }) + return interstitialContent, interstitialLoadErr +} + +// getInterstitialTemplate parses the cached HTML as a Go template (once) +func getInterstitialTemplate() (*template.Template, error) { + interstitialTmplOnce.Do(func() { + raw, err := loadInterstitialHTML() + if err != nil { + interstitialTmplErr = err + return + } + interstitialTmpl, interstitialTmplErr = template.New("interstitial").Parse(raw) + }) + return interstitialTmpl, interstitialTmplErr +} + +// serveInterstitial serves the challenge page using a Go template for safe interpolation +func serveInterstitial(c *fiber.Ctx) error { + requestID := generateRequestID(c) + c.Status(200) + c.Set("Content-Type", "text/html; charset=utf-8") + tmpl, err := getInterstitialTemplate() + if err != nil { + log.Printf("WARNING: %v", err) + return c.SendString("Security verification required. Please refresh the page.") + } + // prepare data for template + host := c.Hostname() + originalURL, _ := c.Locals("originalURL").(string) + targetPath := c.Path() + if originalURL != "" { + targetPath = originalURL + } + data := struct { + TargetPath string + RequestID string + Host string + FullURL string + }{ + TargetPath: targetPath, + RequestID: requestID, + Host: host, + FullURL: c.BaseURL() + targetPath, + } + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + log.Printf("ERROR: Interstitial template execution failed: %v", err) + return c.SendString("Security verification required. Please refresh the page.") + } + return c.SendString(buf.String()) +} + +// checkPoSTimes ensures that memory proof run times are within the allowed ratio +func checkPoSTimes(times []int64) error { + if len(times) != 3 { + return fmt.Errorf("invalid PoS run times length") + } + minT, maxT := times[0], times[0] + for _, t := range times[1:] { + if t < minT { + minT = t + } + if t > maxT { + maxT = t + } + } + if checkpointConfig.CheckPoSTimes && float64(maxT) > float64(minT)*checkpointConfig.PoSTimeConsistencyRatio { + return fmt.Errorf("PoS run times ('i') are not consistent (ratio %.2f > %.2f)", + float64(maxT)/float64(minT), checkpointConfig.PoSTimeConsistencyRatio) + } + return nil +} + +// getDomainFromHost returns the base domain from a hostname +// For proper cookie sharing in both production and development +func getDomainFromHost(hostname string) string { + // Handle localhost development + if hostname == "localhost" || strings.HasPrefix(hostname, "localhost:") || + hostname == "127.0.0.1" || strings.HasPrefix(hostname, "127.0.0.1:") { + return "" // Use host-only cookies for localhost + } + + // For IP addresses, use host-only cookies + if net.ParseIP(strings.Split(hostname, ":")[0]) != nil { + return "" // IP address - use host-only + } + + parts := strings.Split(hostname, ".") + if len(parts) <= 1 { + return hostname // single word domain - unlikely + } + + // For standard domains, return domain with leading dot + if len(parts) >= 2 { + // Return parent domain for proper cookie sharing + domain := parts[len(parts)-2] + "." + parts[len(parts)-1] + return "." + domain // Leading dot is important + } + + return "" // Fallback to host-only cookie +} + +// issueToken handles token generation, cookie setting, and JSON response +func issueToken(c *fiber.Ctx, token CheckpointToken) error { + // 1. Generate the token hash + tokenHash := calculateTokenHash(token) + + // 2. Create the data to store in the DB + storedData := &StoredTokenData{ + ClientIPHash: token.ClientIP, // Assumes token struct is already populated + UserAgentHash: token.UserAgent, + BrowserHint: token.BrowserHint, + LastVerified: token.LastVerified, + ExpiresAt: token.ExpiresAt, // Store original expiration + } + + // 3. Add to the database + if err := tokenStore.addToken(tokenHash, storedData); err != nil { + log.Printf("ERROR: Failed to store token in DB for hash %s: %v", tokenHash, err) + // Decide if this is fatal or just a warning. For now, log and continue. + // return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to store verification proof"}) + } + + // 4. Sign the token (as before) + token.Signature = "" // Clear signature before marshalling for signing + tokenBytesForSig, _ := json.Marshal(token) + token.Signature = computeTokenSignature(token, tokenBytesForSig) + + // 5. Prepare final token for cookie + finalBytes, err := json.Marshal(token) + if err != nil { + log.Printf("ERROR: Failed to marshal final token: %v", err) + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to prepare token"}) + } + tokenStr := base64.StdEncoding.EncodeToString(finalBytes) + + // 6. Set cookie + // Determine if we're serving on HTTPS or HTTP + isSecure := true + // Check if we're in development mode using non-secure connection + if strings.HasPrefix(c.Protocol(), "http") && !strings.HasPrefix(c.BaseURL(), "https") { + isSecure = false // Running on http:// (dev mode) + } + + // Get domain for cookie - either from config or auto-detect + cookieDomain := checkpointConfig.CookieDomain + if cookieDomain == "" { + // Auto-detect - for development convenience + cookieDomain = getDomainFromHost(c.Hostname()) + } + + // Set SameSite based on domain - use Lax for cross-subdomain + sameSite := "Strict" + if cookieDomain != "" { + sameSite = "Lax" // Lax allows subdomain sharing better than Strict + } + + c.Cookie(&fiber.Cookie{ + Name: checkpointConfig.CookieName, + Value: tokenStr, + Expires: token.ExpiresAt, // Cookie expires when token expires + Path: "/", + Domain: cookieDomain, + HTTPOnly: true, + SameSite: sameSite, + Secure: isSecure, // Only set Secure in HTTPS environments + }) + + return c.JSON(fiber.Map{"token": tokenStr, "expires_at": token.ExpiresAt}) +} + +// Initialize a secure random secret key or load from persistent storage +func initSecret() bool { + if _, err := os.Stat(checkpointConfig.SecretConfigPath); err == nil { + // Config file exists, try to load it + if loadedSecret := loadSecretFromFile(); loadedSecret != nil { + hmacSecret = loadedSecret + log.Printf("Loaded existing HMAC secret from %s", checkpointConfig.SecretConfigPath) + return true + } + } + + // No config file or loading failed, generate a new secret + hmacSecret = make([]byte, 32) + _, err := cryptorand.Read(hmacSecret) + if err != nil { + // Critical security error - don't continue with insecure random numbers + log.Fatalf("CRITICAL: Could not generate secure random secret: %v", err) + } + + // Ensure data directory exists + if err := os.MkdirAll(filepath.Dir(checkpointConfig.SecretConfigPath), 0755); err != nil { + log.Printf("WARNING: Could not create data directory: %v", err) + return true + } + + // Save the new secret to file + config := SecretConfig{ + HmacSecret: hmacSecret, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if configBytes, err := json.Marshal(config); err == nil { + if err := os.WriteFile(checkpointConfig.SecretConfigPath, configBytes, 0600); err != nil { + log.Printf("WARNING: Could not save HMAC secret to file: %v", err) + } else { + log.Printf("Created and saved new HMAC secret to %s", checkpointConfig.SecretConfigPath) + } + } + + return true +} + +// loadSecretFromFile loads the HMAC secret from persistent storage +func loadSecretFromFile() []byte { + configBytes, err := os.ReadFile(checkpointConfig.SecretConfigPath) + if err != nil { + log.Printf("ERROR: Could not read secret config file: %v", err) + return nil + } + + var config SecretConfig + if err := json.Unmarshal(configBytes, &config); err != nil { + log.Printf("ERROR: Could not parse secret config file: %v", err) + return nil + } + + if len(config.HmacSecret) < 16 { + log.Printf("ERROR: Secret from file is too short, generating a new one") + return nil + } + + // Update the last loaded time + config.UpdatedAt = time.Now() + if configBytes, err := json.Marshal(config); err == nil { + if err := os.WriteFile(checkpointConfig.SecretConfigPath, configBytes, 0600); err != nil { + log.Printf("WARNING: Could not update HMAC secret file: %v", err) + } + } + + return config.HmacSecret +} + +// Start a timer to periodically clean up the nonce and rate limit maps +func startCleanupTimer() bool { + ticker := time.NewTicker(1 * time.Hour) + go func() { + for range ticker.C { + cleanupExpiredData() + cleanupExpiredChallenges() + } + }() + return true +} + +// Clean up expired nonces and rate limit data +func cleanupExpiredData() { + // Clean up used nonces + now := time.Now() + expiredNonceCount := 0 + usedNonces.Range(func(key, value interface{}) bool { + nonce := key.(string) + timestamp := value.(time.Time) + if now.Sub(timestamp) > checkpointConfig.MaxNonceAge { + usedNonces.Delete(nonce) + expiredNonceCount++ + } + return true // continue iteration + }) + if expiredNonceCount > 0 { + log.Printf("Checkpoint: Cleaned up %d expired nonces.", expiredNonceCount) + } + + // Reset IP rate limits every hour by deleting all entries + ipRateLimit.Range(func(key, value interface{}) bool { + ipRateLimit.Delete(key) + return true + }) + log.Println("Checkpoint: IP rate limits reset.") +} + +// CheckpointToken represents a validated token +type CheckpointToken struct { + Nonce string `json:"g"` // Nonce + Challenge string `json:"-"` // Derived server-side, not in token + Salt string `json:"-"` // Derived server-side, not in token + Difficulty int `json:"-"` // Derived server-side, not in token + ExpiresAt time.Time `json:"exp"` + ClientIP string `json:"cip,omitempty"` + UserAgent string `json:"ua,omitempty"` + BrowserHint string `json:"bh,omitempty"` + Entropy string `json:"ent,omitempty"` + Created time.Time `json:"crt"` + LastVerified time.Time `json:"lvf,omitempty"` + Signature string `json:"sig,omitempty"` + TokenFormat int `json:"fmt"` +} + +// ChallengeParams stores parameters for a challenge +type ChallengeParams struct { + Challenge string `json:"challenge"` // Base64 encoded + Salt string `json:"salt"` // Base64 encoded + Difficulty int `json:"difficulty"` + ExpiresAt time.Time `json:"expires_at"` + ClientIP string `json:"-"` + PoSSeed string `json:"pos_seed"` // Hex encoded +} + +// isExcludedHTMLPath checks if a path should be excluded from the HTML checkpoint. +// Exclusions happen based on configured prefixes or file extensions. +func isExcludedHTMLPath(path string) bool { + // 1. Check path prefixes + for _, prefix := range checkpointConfig.HTMLCheckpointExclusions { + if strings.HasPrefix(path, prefix) { + return true // Excluded by prefix + } + } + + // 2. Check file extension using the set + ext := strings.ToLower(filepath.Ext(path)) + if ext != "" { + if _, exists := checkpointConfig.HTMLCheckpointExcludedExtensions[ext]; exists { + return true // Excluded by file extension + } + } + + // 3. If not excluded by prefix or extension, it needs the checkpoint + return false +} + +// DirectProxy returns a handler that simply forwards the request/response to targetURL. +// Headers, status codes, and body are passed through without modification. +func DirectProxy(targetURL string) fiber.Handler { + target, err := url.Parse(targetURL) + if err != nil { + return func(c *fiber.Ctx) error { + log.Printf("ERROR: Invalid target URL %s: %v", targetURL, err) + return fiber.ErrBadGateway + } + } + + proxy := httputil.NewSingleHostReverseProxy(target) + + // Set up custom director to properly map headers + originalDirector := proxy.Director + proxy.Director = func(req *http.Request) { + originalDirector(req) + + // Add X-Forwarded headers + req.Header.Set("X-Forwarded-Host", req.Host) + req.Header.Set("X-Forwarded-Proto", "http") // Update to https when needed + + if v := req.Header.Get("X-Forwarded-For"); v != "" { + req.Header.Set("X-Forwarded-For", v+", "+req.RemoteAddr) + } else { + req.Header.Set("X-Forwarded-For", req.RemoteAddr) + } + } + + return func(c *fiber.Ctx) error { + // Create proxy request + proxyReq, err := http.NewRequest( + string(c.Method()), + target.String()+c.Path(), + bytes.NewReader(c.Body()), + ) + if err != nil { + log.Printf("ERROR: Failed to create proxy request: %v", err) + return fiber.ErrBadGateway + } + + // Copy all headers from the Fiber context to the proxy request + c.Request().Header.VisitAll(func(key, value []byte) { + proxyReq.Header.Set(string(key), string(value)) + }) + + // Execute the proxy request + proxyRes, err := http.DefaultClient.Do(proxyReq) + if err != nil { + log.Printf("ERROR: Proxy request failed: %v", err) + return fiber.ErrBadGateway + } + defer proxyRes.Body.Close() + + // Copy all headers from the proxy response to Fiber's response + for key, values := range proxyRes.Header { + for _, value := range values { + c.Response().Header.Add(key, value) + } + } + + // Set the status code + c.Status(proxyRes.StatusCode) + + // Copy the body + body, err := io.ReadAll(proxyRes.Body) + if err != nil { + log.Printf("ERROR: Failed to read proxy response body: %v", err) + return fiber.ErrBadGateway + } + + return c.Send(body) + } +} + +// isBlockedBot checks concurrently if the User-Agent indicates a known bot +// or doesn't have a standard browser prefix. +// It returns true as soon as one check decides to block. +func isBlockedBot(userAgent string) bool { + if userAgent == "" { + // Empty User-Agent is suspicious, block it + log.Printf("INFO: UA blocked - empty user agent") + return true + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() // Ensure context is cancelled eventually + + resultChan := make(chan bool, 2) // Buffered channel for results + + // Goroutine 1: Library-based bot check + go func() { + ua := useragent.Parse(userAgent) + shouldBlock := ua.Bot + if shouldBlock { + log.Printf("INFO: UA blocked by library (Bot detected: %s): %s", ua.Name, userAgent) + } + select { + case resultChan <- shouldBlock: + case <-ctx.Done(): // Don't send if context is cancelled + } + }() + + // Goroutine 2: Prefix check + go func() { + // Standard browser User-Agent prefixes + standardPrefixes := []string{"Mozilla/", "Opera/", "DuckDuckGo/", "Dart/"} + hasStandardPrefix := false + + for _, prefix := range standardPrefixes { + if strings.HasPrefix(userAgent, prefix) { + hasStandardPrefix = true + break + } + } + + // Block if it does NOT have a standard prefix + shouldBlock := !hasStandardPrefix + + if shouldBlock { + log.Printf("INFO: UA blocked by prefix check (doesn't have standard prefix): %s", userAgent) + } + select { + case resultChan <- shouldBlock: + case <-ctx.Done(): // Don't send if context is cancelled + } + }() + + // Wait for results and decide + result1 := <-resultChan + if result1 { + cancel() // Found a reason to block, cancel the other check + return true + } + + // First check didn't block, wait for the second result + result2 := <-resultChan + // cancel() is deferred, so it will run anyway, ensuring cleanup + return result2 // Block if the second check decided to block +} + +// New gives you a Fiber handler that does the POW challenge (HTML/API) or proxies requests. +func New() fiber.Handler { + return func(c *fiber.Ctx) error { + host := c.Hostname() + targetURL, useProxy := checkpointConfig.ReverseProxyMappings[host] + path := c.Path() + + // --- User-Agent Validation --- + // Only check User-Agent if path is not in exclusion list + skipUA := false + for _, prefix := range checkpointConfig.UserAgentValidationExclusions { + if strings.HasPrefix(path, prefix) { + skipUA = true + break + } + } + + if !skipUA { + // First check required UA prefixes for specific paths + for p, required := range checkpointConfig.UserAgentRequiredPrefixes { + if strings.HasPrefix(path, p) { + ua := c.Get("User-Agent") + if !strings.HasPrefix(ua, required) { + log.Printf("INFO: UA blocked by required prefix %s: %s", required, ua) + if strings.HasPrefix(path, "/api") { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{ + "error": "Access denied for automated clients.", + "reason": "useragent", + }) + } + return c.Status(fiber.StatusForbidden).SendString("Access denied for automated clients.") + } + break + } + } + + // Then do general bot check for all non-excluded paths + userAgent := c.Get("User-Agent") + if isBlockedBot(userAgent) { + if strings.HasPrefix(path, "/api") { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{ + "error": "Access denied for automated clients.", + "reason": "useragent", + }) + } + return c.Status(fiber.StatusForbidden).SendString("Access denied for automated clients.") + } + } + + // Handle any API endpoints + if strings.HasPrefix(path, "/api") { + // Always serve PoW endpoints locally (challenge & verify) + if strings.HasPrefix(path, "/api/pow/") || strings.HasPrefix(path, "/api/verify") { + log.Printf("API checkpoint endpoint %s - handling locally", path) + return c.Next() + } + // Other API paths: skip checkpoint + if useProxy { + // Proxy to backend for proxied hosts + log.Printf("API proxying endpoint %s to %s", path, targetURL) + return DirectProxy(targetURL)(c) + } + log.Printf("API endpoint %s - bypassing checkpoint", path) + return c.Next() + } + + // --- Reverse Proxy Logic --- + if useProxy { + // Check for existing valid token cookie + tokenCookie := c.Cookies(checkpointConfig.CookieName) + log.Printf("Proxy: Checking token for host %s, path %s, cookie present: %v", + host, path, tokenCookie != "") + + // Check if this is an excluded path (API endpoints, etc) + if isExcludedHTMLPath(path) { + log.Printf("Excluded path %s for proxied host %s - proxying without token check", path, host) + + // Direct transparent proxy (preserves all headers/content types) + return DirectProxy(targetURL)(c) + } + + valid, err := validateToken(tokenCookie, c) + + if err != nil { + // Log validation errors but treat as invalid for proxying + log.Printf("Error validating token for proxied host %s, path %s: %v", host, path, err) + } + + if valid { + log.Printf("Valid token found for proxied host %s, path %s - forwarding request", host, path) + // Token is valid, proxy the request + // Direct transparent proxy (preserves all headers/content types) + return DirectProxy(targetURL)(c) + } else { + // Add debug logging + log.Printf("No valid token for proxied host %s, path %s - serving interstitial", host, path) + + // Save the original full URL for potential redirection after verification + c.Locals("originalURL", c.OriginalURL()) + + // No valid token, serve the interstitial challenge page. + return serveInterstitial(c) + } + } + + // --- Standard HTML/Static/API Logic (No Proxy Mapping) --- + // Skip checkpoint for excluded paths (e.g., static assets, API endpoints handled separately) + if isExcludedHTMLPath(path) { + return c.Next() + } + + // --- Path needs checkpoint (potential HTML page) --- + tokenCookie := c.Cookies(checkpointConfig.CookieName) + if tokenCookie != "" { + valid, err := validateToken(tokenCookie, c) + if err != nil { + // Log validation errors but still serve interstitial for safety + log.Printf("Error validating token for path %s: %v", path, err) + // Fall through to serve interstitial + } else if valid { + // Token is valid, proceed to the requested page/handler + return c.Next() + } + // If token was present but invalid/expired, fall through to serve interstitial + } + + // No valid token found, serve the interstitial challenge page. + return serveInterstitial(c) + } +} + +// generateRequestID creates a unique ID for this verification request +func generateRequestID(c *fiber.Ctx) string { + challenge, salt := generateChallenge() + // Generate PoS seed + posSeedBytes := make([]byte, 32) + if n, err := cryptorand.Read(posSeedBytes); err != nil { + log.Fatalf("CRITICAL: Failed to generate PoS seed: %v", err) + } else if n != len(posSeedBytes) { + log.Fatalf("CRITICAL: Short read generating PoS seed: read %d bytes", n) + } + posSeed := hex.EncodeToString(posSeedBytes) + // Generate request ID + randBytes := make([]byte, 16) + if n, err := cryptorand.Read(randBytes); err != nil { + log.Fatalf("CRITICAL: Failed to generate request ID: %v", err) + } else if n != len(randBytes) { + log.Fatalf("CRITICAL: Short read generating request ID: read %d bytes", n) + } + requestID := hex.EncodeToString(randBytes) + + // Base64-encode the hex challenge and salt for storage + encodedChallenge := base64.StdEncoding.EncodeToString([]byte(challenge)) + encodedSalt := base64.StdEncoding.EncodeToString([]byte(salt)) + params := ChallengeParams{ + Challenge: encodedChallenge, + Salt: encodedSalt, + Difficulty: checkpointConfig.Difficulty, + ExpiresAt: time.Now().Add(checkpointConfig.ChallengeExpiration), + ClientIP: getRealIP(c), + PoSSeed: posSeed, + } + challengeStore.Store(requestID, params) + return requestID +} + +func cleanupExpiredChallenges() { + now := time.Now() + expiredChallengeCount := 0 + challengeStore.Range(func(key, value interface{}) bool { + id := key.(string) + params := value.(ChallengeParams) + if now.After(params.ExpiresAt) { + challengeStore.Delete(id) + expiredChallengeCount++ + } + return true // continue iteration + }) + if expiredChallengeCount > 0 { + log.Printf("Checkpoint: Cleaned up %d expired challenges.", expiredChallengeCount) + } +} + +// GetCheckpointChallengeHandler serves challenge parameters via API +func GetCheckpointChallengeHandler(c *fiber.Ctx) error { + requestID := c.Query("id") + if requestID == "" { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Missing request ID"}) + } + + // Apply rate limiting to challenge generation + clientIP := getRealIP(c) + val, _ := ipRateLimit.LoadOrStore(clientIP, new(atomic.Int64)) + ipCounter := val.(*atomic.Int64) + attempts := ipCounter.Add(1) // Increment and get new value + + // Limit to a reasonable number of challenge requests per hour (using the same MaxAttemptsPerHour config) + if attempts > int64(checkpointConfig.MaxAttemptsPerHour) { + return c.Status(fiber.StatusTooManyRequests).JSON(fiber.Map{"error": "Too many challenge requests. Please try again later."}) + } + + val, exists := challengeStore.Load(requestID) + if !exists { + return c.Status(fiber.StatusNotFound).JSON(fiber.Map{"error": "Challenge not found or expired"}) + } + params := val.(ChallengeParams) + + if clientIP != params.ClientIP { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{"error": "IP address mismatch for challenge"}) + } + decoySeedBytes := make([]byte, 8) + cryptorand.Read(decoySeedBytes) + decoySeed := hex.EncodeToString(decoySeedBytes) + decoyFields := make([]map[string]interface{}, 0) + decoyFieldCount := 2 + int(decoySeedBytes[0])%3 + for i := 0; i < decoyFieldCount; i++ { + nameLen := 5 + int(decoySeedBytes[i%8])%8 + valLen := 8 + int(decoySeedBytes[(i+1)%8])%24 + name := randomHexString(nameLen) + val := randomHexString(valLen) + decoyFields = append(decoyFields, map[string]interface{}{name: val}) + } + return c.JSON(fiber.Map{ + "a": params.Challenge, // challenge + "b": params.Salt, // salt + "c": params.Difficulty, // difficulty + "d": params.PoSSeed, // pos_seed + "e": decoySeed, // decoy_seed + "f": decoyFields, // decoy_fields + }) +} + +func randomHexString(n int) string { + b := make([]byte, (n+1)/2) + if m, err := cryptorand.Read(b); err != nil { + log.Fatalf("CRITICAL: Failed to generate random hex string: %v", err) + } else if m != len(b) { + log.Fatalf("CRITICAL: Short read generating random hex string: read %d bytes", m) + } + s := hex.EncodeToString(b) + if len(s) < n { + log.Fatalf("CRITICAL: Random hex string too short: got %d hex chars, want %d", len(s), n) + } + return s[:n] +} + +func getFullClientIP(c *fiber.Ctx) string { + ip := getRealIP(c) + if ip == "" { + return "unknown" + } + h := sha256.Sum256([]byte(ip)) + return hex.EncodeToString(h[:8]) +} + +func hashUserAgent(userAgent string) string { + if userAgent == "" { + return "" + } + hash := sha256.Sum256([]byte(userAgent)) + return hex.EncodeToString(hash[:8]) +} + +func extractBrowserFingerprint(c *fiber.Ctx) string { + headers := []string{ + c.Get("Sec-CH-UA"), c.Get("Sec-CH-UA-Platform"), c.Get("Sec-CH-UA-Mobile"), + c.Get("Sec-CH-UA-Platform-Version"), c.Get("Sec-CH-UA-Arch"), c.Get("Sec-CH-UA-Model"), + } + var validHeaders []string + for _, h := range headers { + if h != "" { + validHeaders = append(validHeaders, h) + } + } + if len(validHeaders) == 0 { + return "" + } + fingerprint := strings.Join(validHeaders, "|") + hash := sha256.Sum256([]byte(fingerprint)) + return hex.EncodeToString(hash[:12]) +} + +func validateToken(tokenStr string, c *fiber.Ctx) (bool, error) { + // Explicitly handle missing token case first. + if tokenStr == "" { + return false, nil // No token cookie found, definitely not valid. + } + + // 1. Decode the token string from the cookie + tokenBytes, err := base64.StdEncoding.DecodeString(tokenStr) + if err != nil { + // Invalid Base64 encoding - treat as invalid token, not a system error + return false, nil + } + + // Check for empty byte slice after decoding + if len(tokenBytes) == 0 { + // Decoded to empty - treat as invalid token + return false, nil + } + + // 2. Unmarshal + var token CheckpointToken + if err := json.Unmarshal(tokenBytes, &token); err != nil { + // Invalid JSON structure - treat as invalid token + return false, nil // Error seen in logs comes from here, now returns nil error + } + + // 3. Basic expiration check based on ExpiresAt field in the token itself + // Note: Return nil error for expired token, it's just invalid. + if time.Now().After(token.ExpiresAt) { + return false, nil // Token itself says it's expired + } + + // 4. Check token signature first (Format 2+) + if token.TokenFormat < 2 { + return false, nil // Old format not supported/secure - invalid + } + if !verifyTokenSignature(token, tokenBytes) { + return false, nil // Invalid signature - invalid + } + + // 5. Calculate the token hash to look up in the database + tokenHash := calculateTokenHash(token) + + // 6. Look up the token data in BadgerDB + storedData, found, dbErr := tokenStore.lookupTokenData(c.Context(), tokenHash) + if dbErr != nil { + // Actual DB error during lookup - THIS is a real error to return + return false, fmt.Errorf("token DB lookup failed: %w", dbErr) + } + if !found { + // Token hash not found in DB or explicitly expired according to DB record + return false, nil + } + + // 7. *** CRITICAL: Verify bindings against stored data and current request *** + // Compare Client IP Hash + currentPartialIP := getFullClientIP(c) + if storedData.ClientIPHash != currentPartialIP { + return false, nil // IP mismatch - invalid + } + + // Compare User Agent Hash + currentUserAgent := hashUserAgent(c.Get("User-Agent")) + if storedData.UserAgentHash != currentUserAgent { + return false, nil // User agent mismatch - invalid + } + + // Compare Browser Hint + currentBrowserHint := extractBrowserFingerprint(c) + // Only enforce if hint was stored AND current hint is available + if storedData.BrowserHint != "" && currentBrowserHint != "" && storedData.BrowserHint != currentBrowserHint { + return false, nil // Browser hint mismatch - invalid + } + + // 8. All checks passed! Token is valid and bound correctly. + // Update LastVerified time in the database (best effort, log errors) + if err := tokenStore.updateTokenVerification(tokenHash); err != nil { + log.Printf("WARNING: Failed to update token verification time for hash %s: %v", tokenHash, err) + } + + // Refresh the cookie with potentially updated ExpiresAt (if sliding window desired) or just LastVerified. + // For simplicity, we'll just refresh with the same ExpiresAt for now. + token.LastVerified = time.Now() + updateTokenCookie(c, token) // Resign and set cookie + + return true, nil +} + +func updateTokenCookie(c *fiber.Ctx, token CheckpointToken) { + // Determine if we're serving on HTTPS or HTTP + isSecure := true + // Check if we're in development mode using non-secure connection + if strings.HasPrefix(c.Protocol(), "http") && !strings.HasPrefix(c.BaseURL(), "https") { + isSecure = false // Running on http:// (dev mode) + } + + // Get domain for cookie - either from config or auto-detect + cookieDomain := checkpointConfig.CookieDomain + if cookieDomain == "" { + // Auto-detect - for development convenience + cookieDomain = getDomainFromHost(c.Hostname()) + } + + // Set SameSite based on domain - use Lax for cross-subdomain + sameSite := "Strict" + if cookieDomain != "" { + sameSite = "Lax" // Lax allows subdomain sharing better than Strict + } + + // Recompute signature because LastVerified might have changed + token.Signature = "" + tempBytes, _ := json.Marshal(token) + token.Signature = computeTokenSignature(token, tempBytes) // Compute signature on token WITHOUT old signature + + finalTokenBytes, err := json.Marshal(token) // Marshal again with new signature + if err != nil { + log.Printf("Error marshaling token for cookie update: %v", err) + return + } + tokenStr := base64.StdEncoding.EncodeToString(finalTokenBytes) + c.Cookie(&fiber.Cookie{ + Name: checkpointConfig.CookieName, + Value: tokenStr, + Expires: token.ExpiresAt, // Use original expiration + Path: "/", + Domain: cookieDomain, + HTTPOnly: true, + SameSite: sameSite, + Secure: isSecure, // Only set Secure in HTTPS environments + }) +} + +func verifyProofOfWork(challenge, salt, nonce string, difficulty int) bool { + inputStr := challenge + salt + nonce + hash := calculateHash(inputStr) + prefix := strings.Repeat("0", difficulty) + return strings.HasPrefix(hash, prefix) +} + +func calculateHash(input string) string { + hash := sha256.Sum256([]byte(input)) + return hex.EncodeToString(hash[:]) +} + +func computeTokenSignature(token CheckpointToken, tokenBytes []byte) string { + tokenCopy := token + tokenCopy.Signature = "" // Ensure signature field is empty for signing + tokenToSign, _ := json.Marshal(tokenCopy) + h := hmac.New(sha256.New, hmacSecret) + h.Write(tokenToSign) + return hex.EncodeToString(h.Sum(nil)) +} + +func verifyTokenSignature(token CheckpointToken, tokenBytes []byte) bool { + if token.Signature == "" { + return false + } + expectedSignature := computeTokenSignature(token, tokenBytes) + return hmac.Equal([]byte(token.Signature), []byte(expectedSignature)) +} + +// VerifyCheckpointHandler verifies the challenge solution +func VerifyCheckpointHandler(c *fiber.Ctx) error { + clientIP := getRealIP(c) + + var req CheckpointVerifyRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request format"}) + } + + // Challenge lookup + challengeVal, challengeExists := challengeStore.Load(req.RequestID) + if !challengeExists { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid or expired request ID"}) + } + params := challengeVal.(ChallengeParams) + + if clientIP != params.ClientIP { // Check against IP stored with challenge + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{"error": "IP address mismatch for challenge"}) + } + + decodedChallenge := "" + if decoded, err := base64.StdEncoding.DecodeString(params.Challenge); err == nil { + decodedChallenge = string(decoded) + } else { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to decode challenge"}) + } + decodedSalt := "" + if decoded, err := base64.StdEncoding.DecodeString(params.Salt); err == nil { + decodedSalt = string(decoded) + } else { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to decode salt"}) + } + + if req.Nonce == "" { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Nonce ('g') required"}) + } + + // --- Nonce Check --- + nonceKey := req.Nonce + decodedChallenge + _, nonceExists := usedNonces.Load(nonceKey) + if nonceExists { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "This solution has already been used"}) + } + // --- End Nonce Check --- + + if !verifyProofOfWork(decodedChallenge, decodedSalt, req.Nonce, params.Difficulty) { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid proof-of-work solution"}) + } + + // --- Store Used Nonce (only after PoW is verified) --- + usedNonces.Store(nonceKey, time.Now()) + // --- End Store Used Nonce --- + + // Validate PoS hashes and times if provided + if len(req.PoSHashes) == 3 && len(req.PoSTimes) == 3 { + if req.PoSHashes[0] != req.PoSHashes[1] || req.PoSHashes[1] != req.PoSHashes[2] { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "PoS hashes ('h') do not match"}) + } + if len(req.PoSHashes[0]) != 64 { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid PoS hash ('h') length"}) + } + if err := checkPoSTimes(req.PoSTimes); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": err.Error()}) + } + } else if checkpointConfig.CheckPoSTimes && (len(req.PoSHashes) != 0 || len(req.PoSTimes) != 0) { + // If PoS checking is enabled, but incorrect number of hashes/times provided + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid PoS data provided"}) + } + + // Challenge is valid, remove it from store + challengeStore.Delete(req.RequestID) + + entropyBytes := make([]byte, 8) + _, err := cryptorand.Read(entropyBytes) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to generate secure token entropy"}) + } + entropy := hex.EncodeToString(entropyBytes) + + // *** Gather current binding info for the new token *** + now := time.Now() + expiresAt := now.Add(checkpointConfig.TokenExpiration) + browserHint := extractBrowserFingerprint(c) + clientIPHash := getFullClientIP(c) + userAgentHash := hashUserAgent(c.Get("User-Agent")) + + token := CheckpointToken{ + Nonce: req.Nonce, + ExpiresAt: expiresAt, + ClientIP: clientIPHash, + UserAgent: userAgentHash, + BrowserHint: browserHint, + Entropy: entropy, + Created: now, + LastVerified: now, + TokenFormat: 2, + } + + // Add a response header indicating success for the proxy + c.Set("X-Checkpoint-Status", "success") + log.Printf("Successfully verified challenge for IP %s, issuing token", clientIP) + + // Issue token (handles DB storage, signing, cookie setting) + return issueToken(c, token) +} + +// Renamed request struct +type CheckpointVerifyRequest struct { + RequestID string `json:"request_id"` + Nonce string `json:"g"` + PoSHashes []string `json:"h"` + PoSTimes []int64 `json:"i"` + DecoyHashes []string `json:"j"` + DecoyTimes []int64 `json:"k"` + DecoyFields []map[string]interface{} `json:"l"` +} + +func generateChallenge() (string, string) { + randomBytes := make([]byte, 16) + _, err := cryptorand.Read(randomBytes) + if err != nil { + log.Fatalf("CRITICAL: Failed to generate secure random challenge: %v", err) + } + saltBytes := make([]byte, checkpointConfig.SaltLength) + _, err = cryptorand.Read(saltBytes) + if err != nil { + log.Fatalf("CRITICAL: Failed to generate secure random salt: %v", err) + } + salt := hex.EncodeToString(saltBytes) + return hex.EncodeToString(randomBytes), salt +} + +// calculateTokenHash calculates a unique hash for storing the token status +// IMPORTANT: This hash is now used as the key in the database. +func calculateTokenHash(token CheckpointToken) string { + // Hash relevant fields that identify this specific verification instance + // Using Nonce, Entropy, and Creation time ensures uniqueness per issuance. + data := fmt.Sprintf("%s:%s:%d", + token.Nonce, + token.Entropy, + token.Created.UnixNano()) + hash := sha256.Sum256([]byte(data)) + return hex.EncodeToString(hash[:]) +} + +// RequestSanitizationMiddleware spots malicious patterns (SQLi, XSS, path traversal) +// and returns 403 immediately to keep your app safe. +func RequestSanitizationMiddleware() fiber.Handler { + return func(c *fiber.Ctx) error { + // Check URL path for directory traversal + path := c.Path() + if strings.Contains(path, "../") || strings.Contains(path, "..\\") { + log.Printf("Security block: Directory traversal attempt in path: %s from IP: %s", path, getRealIP(c)) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + + // Check query parameters for malicious patterns + query := c.Request().URI().QueryString() + if len(query) > 0 { + queryStr := string(query) + + // Check for dangerous characters if configured + if checkpointConfig.BlockDangerousPathChars { + if strings.Contains(queryStr, ";") || strings.Contains(queryStr, "\\") || strings.Contains(queryStr, "`") { + log.Printf("Security block: Dangerous character in query from IP: %s, Query: %s", getRealIP(c), queryStr) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + } + + // Check for configured attack patterns + for _, pattern := range checkpointConfig.DangerousQueryPatterns { + if pattern.MatchString(queryStr) { + log.Printf("Security block: Malicious pattern match in query from IP: %s, Pattern: %s, Query: %s", + getRealIP(c), pattern.String(), queryStr) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + } + } + + return c.Next() + } +} diff --git a/checkpoint_service/middleware/config/checkpoint.toml b/checkpoint_service/middleware/config/checkpoint.toml new file mode 100644 index 0000000..1baf766 --- /dev/null +++ b/checkpoint_service/middleware/config/checkpoint.toml @@ -0,0 +1,77 @@ +# ----------------------------------------------------------------------------- +# Checkpoint Middleware Configuration (checkpoint.toml) +# +# All durations are parsed via time.ParseDuration (e.g. "24h"). +# Arrays and tables map directly to the Config struct fields. +# ----------------------------------------------------------------------------- + +# === GENERAL SETTINGS === +# Number of leading zeros required in PoW hash +Difficulty = 4 +# Validity period for issued tokens +TokenExpiration = "24h" +# Name of the cookie used to store the checkpoint token +CookieName = "checkpoint_token" +# Domain attribute for the cookie; empty = host-only (localhost) +CookieDomain = "" +# Length of the random salt in bytes for challenges +SaltLength = 16 + +# === RATE LIMITING & EXPIRATION === +# Max PoW verification attempts per IP per hour +MaxAttemptsPerHour = 10 +# Max age for used nonces before cleanup +MaxNonceAge = "24h" +# Time allowed for solving a challenge +ChallengeExpiration = "5m" + +# === PERSISTENCE PATHS === +# File where HMAC secret is stored +SecretConfigPath = "./data/checkpoint_secret.json" +# Directory for BadgerDB token store +TokenStoreDBPath = "./data/checkpoint_tokendb" +# Ordered fallback paths for interstitial HTML +InterstitialPaths = [ + "./public/static/pow-interstitial.html", + "./develop/static/pow-interstitial.html" +] + +# === SECURITY SETTINGS === +# Enable Proof-of-Space-Time consistency checks +CheckPoSTimes = true +# Allowed ratio between slowest and fastest PoS runs +PoSTimeConsistencyRatio = 1.35 + +# === HTML CHECKPOINT EXCLUSIONS === +# Path prefixes to skip PoW interstitial +HTMLCheckpointExclusions = ["/api"] +# File extensions to skip PoW check +HTMLCheckpointExcludedExtensions = { ".jpg" = true, ".jpeg" = true, ".png" = true, ".gif" = true, ".svg" = true, ".webp" = true, ".ico" = true, ".bmp" = true, ".tif" = true, ".tiff" = true, ".mp4" = true, ".webm" = true, ".css" = true, ".js" = true, ".mjs" = true, ".woff" = true, ".woff2" = true, ".ttf" = true, ".otf" = true, ".eot" = true, ".json" = true, ".xml" = true, ".txt" = true, ".pdf" = true, ".map" = true, ".wasm" = true } + +# === QUERY SANITIZATION === +# Regex patterns (case-insensitive) to block in query strings +DangerousQueryPatterns = [ + "(?i)union\\s+select", + "(?i)drop\\s+table", + "(?i)insert\\s+into", + "(?i) 0 { + blockType = "asn_name_group" + blockValue = groupName + log.Printf("INFO: Blocking IP %s based on %s: %s (ASN: %d, Org: '%s')", ipStr, blockType, blockValue, clientASN, asnOrg) + customPage = asnGroupBlockPages[groupName] + // No need to unlock here, defer handles it + return cacheAndReturnBlockResult(ipStr, blockType, blockValue, customPage, asnOrg) + } + } + // RUnlock happens via defer + } + } else if asnErr != nil && !strings.Contains(asnErr.Error(), "cannot be found in the database") { + // Log errors other than "not found" + log.Printf("WARNING: GeoIP ASN lookup error for IP %s: %v", ipStr, asnErr) + } + + // --- Cache the result before returning --- // + computedEntry := blockCacheEntry{ + blocked: false, + expiresAt: time.Now().Add(ipBlockCacheTTL), + } + ipBlockCacheMutex.Lock() + ipBlockCache[ipStr] = computedEntry + ipBlockCacheMutex.Unlock() + return false, "", "", "", "" // Not blocked +} + +// Helper function to cache block results +func cacheAndReturnBlockResult(ipStr string, blockType string, blockValue string, customPage string, asnOrgName string) (bool, string, string, string, string) { + // Create the cache entry + computedEntry := blockCacheEntry{ + blocked: true, + blockType: blockType, + blockValue: blockValue, + customPage: customPage, + asnOrgName: asnOrgName, + expiresAt: time.Now().Add(ipBlockCacheTTL), + } + + // Use a separate defer+recover to ensure we don't crash the entire server + // if there's any issue with the cache + func() { + defer func() { + if r := recover(); r != nil { + log.Printf("RECOVERED from panic while caching result: %v", r) + } + }() + + ipBlockCacheMutex.Lock() + defer ipBlockCacheMutex.Unlock() // Use defer to ensure unlock happens + ipBlockCache[ipStr] = computedEntry + }() + + return true, blockType, blockValue, customPage, asnOrgName +} + +// buildASNNameMatchers creates Aho-Corasick matchers for faster ASN name checking +func buildASNNameMatchers() { + // Acquire write lock before modifying the global map + asnNameMatchersMutex.Lock() + defer asnNameMatchersMutex.Unlock() + + // Clear any existing matchers first + asnNameMatchers = make(map[string]*ahocorasick.Matcher) + + for groupName, nameList := range blockedASNNames { + // Skip if the name list is empty + if len(nameList) == 0 { + log.Printf("Skipping matcher build for empty group: %s", groupName) + continue + } + + // Convert names to lowercase byte slices for case-insensitive matching + dict := make([][]byte, 0, len(nameList)) + for _, name := range nameList { + if name != "" { + dict = append(dict, []byte(strings.ToLower(name))) + } + } + + // Only create a matcher if we have patterns + if len(dict) > 0 { + // Use a recovery mechanism in case the matcher creation fails + func() { + defer func() { + if r := recover(); r != nil { + log.Printf("PANIC while building Aho-Corasick matcher for group %s: %v", groupName, r) + // Ensure the entry for this group is nil if creation failed + asnNameMatchers[groupName] = nil + } + }() + + // This assignment happens under the write lock + asnNameMatchers[groupName] = ahocorasick.NewMatcher(dict) + log.Printf("Built Aho-Corasick matcher for ASN name group: %s (%d patterns)", groupName, len(dict)) + }() + } else { + log.Printf("No valid patterns found for ASN name group: %s", groupName) + } + } + // Unlock happens via defer +} + +// ReloadGeoIPDatabases closes and reopens the GeoIP database readers +// to load updated database files. Safe to call while the server is running. +func ReloadGeoIPDatabases() { + // Close existing readers if they're open + if geoipCountryReader != nil { + geoipCountryReader.Close() + geoipCountryReader = nil + } + if geoipASNReader != nil { + geoipASNReader.Close() + geoipASNReader = nil + } + + // Re-initialize the readers + initGeoIP() + log.Printf("GeoIP databases reloaded") +} + +// getRealIP gets the real client IP when behind a reverse proxy +// It checks X-Forwarded-For header first, then falls back to c.IP() +func getRealIP(c *fiber.Ctx) string { + // Check X-Forwarded-For header first + if xff := c.Get("X-Forwarded-For"); xff != "" { + // X-Forwarded-For can contain multiple IPs (client, proxy1, proxy2, ...) + // The first one is the original client IP + ips := strings.Split(xff, ",") + if len(ips) > 0 { + // Get the first IP and trim whitespace + clientIP := strings.TrimSpace(ips[0]) + // Validate it's a real IP + if net.ParseIP(clientIP) != nil { + log.Printf("Using X-Forwarded-For IP: %s (original: %s)", clientIP, c.IP()) + return clientIP + } + } + } + + // Also check for custom Remote-Addr header that might be set by some proxies + if remoteAddr := c.Get("$remote_addr"); remoteAddr != "" { + // Validate it's a real IP + if net.ParseIP(remoteAddr) != nil { + log.Printf("Using $remote_addr IP: %s (original: %s)", remoteAddr, c.IP()) + return remoteAddr + } + } + + // Fallback to default IP + return c.IP() +} diff --git a/checkpoint_service/middleware/middleware/checkpoint.go b/checkpoint_service/middleware/middleware/checkpoint.go new file mode 100644 index 0000000..9d6091e --- /dev/null +++ b/checkpoint_service/middleware/middleware/checkpoint.go @@ -0,0 +1,1482 @@ +// middleware provides a small proof-of-work puzzle that users solve before +// accessing protected pages or APIs, plus transparent reverse-proxy support. +// It issues HMAC-signed tokens bound to IP/browser, stores them in BadgerDB, +// and automatically cleans up expired data. +package middleware + +import ( + "context" + "crypto/hmac" + cryptorand "crypto/rand" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "log" + "net" + "net/http" + "net/http/httputil" + "net/url" + "os" + "path/filepath" + "regexp" + "strings" + "sync" + "sync/atomic" + "time" + + "bytes" + "encoding/gob" + + "html/template" + + "github.com/dgraph-io/badger/v4" + "github.com/gofiber/fiber/v2" + "github.com/mileusna/useragent" +) + +// --- Configuration --- + +// Config struct holds all configurable parameters for the Checkpoint middleware +type Config struct { + // General Settings + Difficulty int // Number of leading zeros for PoW hash + TokenExpiration time.Duration // Validity period for issued tokens + CookieName string // Name of the cookie used to store tokens + CookieDomain string // Domain scope for the cookie (e.g., ".example.com" for subdomains) + SaltLength int // Length of the salt used in challenges + + // Rate Limiting & Expiration + MaxAttemptsPerHour int // Max PoW verification attempts per IP per hour + MaxNonceAge time.Duration // Max age for used nonces before cleanup + ChallengeExpiration time.Duration // Time limit for solving a challenge + + // File Paths + SecretConfigPath string // Path to the persistent HMAC secret file + TokenStoreDBPath string // Directory path for the BadgerDB token store + InterstitialPaths []string // Paths to search for the interstitial HTML page + + // Security Settings + CheckPoSTimes bool // Enable Proof-of-Space-Time consistency checks + PoSTimeConsistencyRatio float64 // Allowed ratio between fastest and slowest PoS runs + HTMLCheckpointExclusions []string // Path prefixes to exclude from HTML checkpoint + HTMLCheckpointExcludedExtensions map[string]bool // File extensions to exclude (lowercase, '.') + DangerousQueryPatterns []*regexp.Regexp // Regex patterns to block in query strings + BlockDangerousPathChars bool // Block paths containing potentially dangerous characters (;, `) + // User Agent validation settings + UserAgentValidationExclusions []string // Path prefixes to skip UA validation + UserAgentRequiredPrefixes map[string]string // Path prefix -> required UA prefix + // Note: Binding to IP, User Agent, and Browser Hint is always enabled. + + // Reverse Proxy Settings + ReverseProxyMappings map[string]string // Map of hostname to backend URL (e.g., "app.example.com": "http://127.0.0.1:8080") +} + +var ( + // Global configuration instance + checkpointConfig Config + + // Secret key used for HMAC verification - automatically generated on startup + hmacSecret []byte + // Used nonces to prevent replay attacks - use sync.Map for concurrency + usedNonces sync.Map // map[string]time.Time + // IP-based rate limiting for token generation - use sync.Map for concurrency + ipRateLimit sync.Map // map[string]*atomic.Int64 (or similar atomic counter) + // Challenge parameters store with request IDs - use sync.Map for concurrency + challengeStore sync.Map // map[string]ChallengeParams + // Global token store (now BadgerDB based) + tokenStore *TokenStore + // in-memory cache for the interstitial HTML to avoid repeated disk reads + interstitialContent string + interstitialOnce sync.Once + interstitialLoadErr error + // parsed template for interstitial page + interstitialTmpl *template.Template + interstitialTmplOnce sync.Once + interstitialTmplErr error + // pool for gob encoding buffers to reduce allocations + gobBufferPool = sync.Pool{ + New: func() interface{} { return new(bytes.Buffer) }, + } +) + +// Need atomic package for ipRateLimit counter + +func init() { + // Load complete configuration from checkpoint.toml (required) + var cfg Config + if err := LoadConfig("checkpoint", &cfg); err != nil { + log.Fatalf("Failed to load checkpoint config: %v", err) + } + SetConfig(cfg) + // Register sanitization plugin (cleanup URLs/queries before checkpoint) + RegisterPlugin("sanitize", RequestSanitizationMiddleware) + // Register checkpoint plugin + RegisterPlugin("checkpoint", New) + + // Initialize stores AFTER config is potentially set/loaded + // Ensure tokenStore is initialized before use + var err error + tokenStore, err = NewTokenStore(checkpointConfig.TokenStoreDBPath) + if err != nil { + log.Fatalf("CRITICAL: Failed to initialize TokenStore database: %v", err) + } + + // Initialize secret + _ = initSecret() + + // Start cleanup timer for nonces/ip rates (token cleanup handled by DB TTL) + _ = startCleanupTimer() +} + +// SecretConfig contains configuration for the Checkpoint system (for secret file persistence) +type SecretConfig struct { + HmacSecret []byte `json:"hmac_secret"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +// --- End Configuration --- + +// SetConfig swaps in your custom Config (usually loaded from TOML). +// Do this before using the middleware, ideally at startup. +func SetConfig(cfg Config) { + checkpointConfig = cfg + // Re-initialization of token store path is complex with BadgerDB, recommend restart. + // Other config changes can be applied dynamically if needed. +} + +// --- Token Store (BadgerDB Implementation) --- + +// StoredTokenData holds the relevant information persisted for each token hash. +// This includes binding information needed for verification. +type StoredTokenData struct { + ClientIPHash string // Hash of IP used during issuance + UserAgentHash string // Hash of User Agent used during issuance + BrowserHint string // Browser Hint used during issuance + LastVerified time.Time // Last time this token was successfully validated + ExpiresAt time.Time // Original expiration time of the token (for reference, TTL enforces) +} + +// TokenStore manages persistent storage of verified tokens using BadgerDB. +type TokenStore struct { + DB *badger.DB +} + +// NewTokenStore initializes and returns a new TokenStore using BadgerDB. +func NewTokenStore(dbPath string) (*TokenStore, error) { + if err := os.MkdirAll(dbPath, 0755); err != nil { + return nil, fmt.Errorf("failed to create token store directory %s: %w", dbPath, err) + } + opts := badger.DefaultOptions(dbPath) + // Tune options for performance if needed (e.g., memory usage) + opts.Logger = nil // Disable default Badger logger unless debugging + db, err := badger.Open(opts) + if err != nil { + return nil, fmt.Errorf("failed to open token store database at %s: %w", dbPath, err) + } + store := &TokenStore{DB: db} + // Start BadgerDB's own value log GC routine (optional but recommended) + go store.runValueLogGC() + return store, nil +} + +// Close closes the BadgerDB database. +// Should be called during graceful shutdown. +func (store *TokenStore) Close() error { + if store.DB != nil { + log.Println("Closing TokenStore database...") + return store.DB.Close() + } + return nil +} + +// runValueLogGC runs BadgerDB's value log garbage collection periodically. +func (store *TokenStore) runValueLogGC() { + ticker := time.NewTicker(5 * time.Minute) + defer ticker.Stop() + for range ticker.C { + again: + err := store.DB.RunValueLogGC(0.7) // Run GC if 70% space can be reclaimed + if err == nil { + goto again // Run GC multiple times if needed + } + if err != badger.ErrNoRewrite { + log.Printf("WARNING: BadgerDB RunValueLogGC error: %v", err) + } + } +} + +// encodeTokenData serializes StoredTokenData using gob. +func encodeTokenData(data *StoredTokenData) ([]byte, error) { + // get a buffer from pool + buf := gobBufferPool.Get().(*bytes.Buffer) + buf.Reset() + enc := gob.NewEncoder(buf) + if err := enc.Encode(data); err != nil { + gobBufferPool.Put(buf) + return nil, fmt.Errorf("failed to gob encode token data: %w", err) + } + // copy out the bytes to avoid retaining large buffer + out := make([]byte, buf.Len()) + copy(out, buf.Bytes()) + buf.Reset() + gobBufferPool.Put(buf) + return out, nil +} + +// decodeTokenData deserializes StoredTokenData using gob. +func decodeTokenData(encoded []byte) (*StoredTokenData, error) { + var data StoredTokenData + // use a reader to avoid extra buffer allocation + reader := bytes.NewReader(encoded) + dec := gob.NewDecoder(reader) + if err := dec.Decode(&data); err != nil { + return nil, fmt.Errorf("failed to gob decode token data: %w", err) + } + return &data, nil +} + +// addToken stores the token data in BadgerDB with a TTL. +func (store *TokenStore) addToken(tokenHash string, data *StoredTokenData) error { + encodedData, err := encodeTokenData(data) + if err != nil { + return err // Error already wrapped + } + + // Calculate TTL based on the token's specific expiration + ttl := time.Until(data.ExpiresAt) + if ttl <= 0 { + log.Printf("Attempted to add already expired token hash %s", tokenHash) + return nil // Don't add already expired tokens + } + + err = store.DB.Update(func(txn *badger.Txn) error { + e := badger.NewEntry([]byte(tokenHash), encodedData).WithTTL(ttl) + return txn.SetEntry(e) + }) + + if err != nil { + return fmt.Errorf("failed to add token hash %s to DB: %w", tokenHash, err) + } + return nil +} + +// updateTokenVerification updates the LastVerified time for an existing token. +func (store *TokenStore) updateTokenVerification(tokenHash string) error { + return store.DB.Update(func(txn *badger.Txn) error { + item, err := txn.Get([]byte(tokenHash)) + if err != nil { + // If token expired or was deleted between check and update, log and ignore. + if err == badger.ErrKeyNotFound { + log.Printf("Token hash %s not found during update verification (likely expired/deleted)", tokenHash) + return nil // Not a critical error in this context + } + return fmt.Errorf("failed to get token %s for update: %w", tokenHash, err) + } + + var storedData *StoredTokenData + err = item.Value(func(val []byte) error { + storedData, err = decodeTokenData(val) + return err + }) + if err != nil { + return fmt.Errorf("failed to decode token %s value for update: %w", tokenHash, err) + } + + // Update LastVerified and re-encode + storedData.LastVerified = time.Now() + encodedData, err := encodeTokenData(storedData) + if err != nil { + return err + } + + // Set the entry again (TTL remains the same based on original ExpiresAt) + ttl := time.Until(storedData.ExpiresAt) + if ttl <= 0 { + return nil + } // Don't update if expired + e := badger.NewEntry([]byte(tokenHash), encodedData).WithTTL(ttl) + return txn.SetEntry(e) + }) +} + +// lookupTokenData retrieves token data from BadgerDB. +// Returns the data, true if found and not expired, or false otherwise. +// Added context parameter +func (store *TokenStore) lookupTokenData(ctx context.Context, tokenHash string) (*StoredTokenData, bool, error) { + var storedData *StoredTokenData + var found bool + + err := store.DB.View(func(txn *badger.Txn) error { + // Check context cancellation within the transaction + if ctx.Err() != nil { + return ctx.Err() + } + item, err := txn.Get([]byte(tokenHash)) + if err != nil { + if err == badger.ErrKeyNotFound { + return nil // Not found, not an error for lookup + } + return fmt.Errorf("failed to get token hash %s from DB: %w", tokenHash, err) + } + + // Key exists, decode the value + err = item.Value(func(val []byte) error { + // Check context cancellation before decoding + if ctx.Err() != nil { + return ctx.Err() + } + var decodeErr error + storedData, decodeErr = decodeTokenData(val) + return decodeErr + }) + if err != nil { + // If context was cancelled, return that error + if ctx.Err() != nil { + return ctx.Err() + } + // Return actual decoding error + return fmt.Errorf("failed to decode StoredTokenData for hash %s: %w", tokenHash, err) + } + + // Check expiration explicitly just in case TTL mechanism has latency + if time.Now().After(storedData.ExpiresAt) { + log.Printf("Token hash %s found but expired (ExpiresAt: %v)", tokenHash, storedData.ExpiresAt) + storedData = nil // Treat as not found if expired + return nil + } + + found = true + return nil + }) + + if err != nil { + // Don't log here, return the error to the caller (validateToken) + return nil, false, err // Return the actual error + } + + return storedData, found, nil // Success +} + +// --- End Token Store --- + +// CloseTokenStore provides a package-level function to close the global token store. +// This should be called during application shutdown. +func CloseTokenStore() error { + if tokenStore != nil { + return tokenStore.Close() + } + return nil +} + +// loadInterstitialHTML returns the cached interstitial HTML (loads once from disk) +func loadInterstitialHTML() (string, error) { + interstitialOnce.Do(func() { + for _, path := range checkpointConfig.InterstitialPaths { + if data, err := os.ReadFile(path); err == nil { + interstitialContent = string(data) + return + } + } + interstitialLoadErr = fmt.Errorf("could not find checkpoint interstitial HTML at any configured path") + }) + return interstitialContent, interstitialLoadErr +} + +// getInterstitialTemplate parses the cached HTML as a Go template (once) +func getInterstitialTemplate() (*template.Template, error) { + interstitialTmplOnce.Do(func() { + raw, err := loadInterstitialHTML() + if err != nil { + interstitialTmplErr = err + return + } + interstitialTmpl, interstitialTmplErr = template.New("interstitial").Parse(raw) + }) + return interstitialTmpl, interstitialTmplErr +} + +// serveInterstitial serves the challenge page using a Go template for safe interpolation +func serveInterstitial(c *fiber.Ctx) error { + requestID := generateRequestID(c) + c.Status(200) + c.Set("Content-Type", "text/html; charset=utf-8") + tmpl, err := getInterstitialTemplate() + if err != nil { + log.Printf("WARNING: %v", err) + return c.SendString("Security verification required. Please refresh the page.") + } + // prepare data for template + host := c.Hostname() + originalURL, _ := c.Locals("originalURL").(string) + targetPath := c.Path() + if originalURL != "" { + targetPath = originalURL + } + data := struct { + TargetPath string + RequestID string + Host string + FullURL string + }{ + TargetPath: targetPath, + RequestID: requestID, + Host: host, + FullURL: c.BaseURL() + targetPath, + } + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + log.Printf("ERROR: Interstitial template execution failed: %v", err) + return c.SendString("Security verification required. Please refresh the page.") + } + return c.SendString(buf.String()) +} + +// checkPoSTimes ensures that memory proof run times are within the allowed ratio +func checkPoSTimes(times []int64) error { + if len(times) != 3 { + return fmt.Errorf("invalid PoS run times length") + } + minT, maxT := times[0], times[0] + for _, t := range times[1:] { + if t < minT { + minT = t + } + if t > maxT { + maxT = t + } + } + if checkpointConfig.CheckPoSTimes && float64(maxT) > float64(minT)*checkpointConfig.PoSTimeConsistencyRatio { + return fmt.Errorf("PoS run times ('i') are not consistent (ratio %.2f > %.2f)", + float64(maxT)/float64(minT), checkpointConfig.PoSTimeConsistencyRatio) + } + return nil +} + +// getDomainFromHost returns the base domain from a hostname +// For proper cookie sharing in both production and development +func getDomainFromHost(hostname string) string { + // Handle localhost development + if hostname == "localhost" || strings.HasPrefix(hostname, "localhost:") || + hostname == "127.0.0.1" || strings.HasPrefix(hostname, "127.0.0.1:") { + return "" // Use host-only cookies for localhost + } + + // For IP addresses, use host-only cookies + if net.ParseIP(strings.Split(hostname, ":")[0]) != nil { + return "" // IP address - use host-only + } + + parts := strings.Split(hostname, ".") + if len(parts) <= 1 { + return hostname // single word domain - unlikely + } + + // For standard domains, return domain with leading dot + if len(parts) >= 2 { + // Return parent domain for proper cookie sharing + domain := parts[len(parts)-2] + "." + parts[len(parts)-1] + return "." + domain // Leading dot is important + } + + return "" // Fallback to host-only cookie +} + +// issueToken handles token generation, cookie setting, and JSON response +func issueToken(c *fiber.Ctx, token CheckpointToken) error { + // 1. Generate the token hash + tokenHash := calculateTokenHash(token) + + // 2. Create the data to store in the DB + storedData := &StoredTokenData{ + ClientIPHash: token.ClientIP, // Assumes token struct is already populated + UserAgentHash: token.UserAgent, + BrowserHint: token.BrowserHint, + LastVerified: token.LastVerified, + ExpiresAt: token.ExpiresAt, // Store original expiration + } + + // 3. Add to the database + if err := tokenStore.addToken(tokenHash, storedData); err != nil { + log.Printf("ERROR: Failed to store token in DB for hash %s: %v", tokenHash, err) + // Decide if this is fatal or just a warning. For now, log and continue. + // return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to store verification proof"}) + } + + // 4. Sign the token (as before) + token.Signature = "" // Clear signature before marshalling for signing + tokenBytesForSig, _ := json.Marshal(token) + token.Signature = computeTokenSignature(token, tokenBytesForSig) + + // 5. Prepare final token for cookie + finalBytes, err := json.Marshal(token) + if err != nil { + log.Printf("ERROR: Failed to marshal final token: %v", err) + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to prepare token"}) + } + tokenStr := base64.StdEncoding.EncodeToString(finalBytes) + + // 6. Set cookie + // Determine if we're serving on HTTPS or HTTP + isSecure := true + // Check if we're in development mode using non-secure connection + if strings.HasPrefix(c.Protocol(), "http") && !strings.HasPrefix(c.BaseURL(), "https") { + isSecure = false // Running on http:// (dev mode) + } + + // Get domain for cookie - either from config or auto-detect + cookieDomain := checkpointConfig.CookieDomain + if cookieDomain == "" { + // Auto-detect - for development convenience + cookieDomain = getDomainFromHost(c.Hostname()) + } + + // Set SameSite based on domain - use Lax for cross-subdomain + sameSite := "Strict" + if cookieDomain != "" { + sameSite = "Lax" // Lax allows subdomain sharing better than Strict + } + + c.Cookie(&fiber.Cookie{ + Name: checkpointConfig.CookieName, + Value: tokenStr, + Expires: token.ExpiresAt, // Cookie expires when token expires + Path: "/", + Domain: cookieDomain, + HTTPOnly: true, + SameSite: sameSite, + Secure: isSecure, // Only set Secure in HTTPS environments + }) + + return c.JSON(fiber.Map{"token": tokenStr, "expires_at": token.ExpiresAt}) +} + +// Initialize a secure random secret key or load from persistent storage +func initSecret() bool { + if _, err := os.Stat(checkpointConfig.SecretConfigPath); err == nil { + // Config file exists, try to load it + if loadedSecret := loadSecretFromFile(); loadedSecret != nil { + hmacSecret = loadedSecret + log.Printf("Loaded existing HMAC secret from %s", checkpointConfig.SecretConfigPath) + return true + } + } + + // No config file or loading failed, generate a new secret + hmacSecret = make([]byte, 32) + _, err := cryptorand.Read(hmacSecret) + if err != nil { + // Critical security error - don't continue with insecure random numbers + log.Fatalf("CRITICAL: Could not generate secure random secret: %v", err) + } + + // Ensure data directory exists + if err := os.MkdirAll(filepath.Dir(checkpointConfig.SecretConfigPath), 0755); err != nil { + log.Printf("WARNING: Could not create data directory: %v", err) + return true + } + + // Save the new secret to file + config := SecretConfig{ + HmacSecret: hmacSecret, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if configBytes, err := json.Marshal(config); err == nil { + if err := os.WriteFile(checkpointConfig.SecretConfigPath, configBytes, 0600); err != nil { + log.Printf("WARNING: Could not save HMAC secret to file: %v", err) + } else { + log.Printf("Created and saved new HMAC secret to %s", checkpointConfig.SecretConfigPath) + } + } + + return true +} + +// loadSecretFromFile loads the HMAC secret from persistent storage +func loadSecretFromFile() []byte { + configBytes, err := os.ReadFile(checkpointConfig.SecretConfigPath) + if err != nil { + log.Printf("ERROR: Could not read secret config file: %v", err) + return nil + } + + var config SecretConfig + if err := json.Unmarshal(configBytes, &config); err != nil { + log.Printf("ERROR: Could not parse secret config file: %v", err) + return nil + } + + if len(config.HmacSecret) < 16 { + log.Printf("ERROR: Secret from file is too short, generating a new one") + return nil + } + + // Update the last loaded time + config.UpdatedAt = time.Now() + if configBytes, err := json.Marshal(config); err == nil { + if err := os.WriteFile(checkpointConfig.SecretConfigPath, configBytes, 0600); err != nil { + log.Printf("WARNING: Could not update HMAC secret file: %v", err) + } + } + + return config.HmacSecret +} + +// Start a timer to periodically clean up the nonce and rate limit maps +func startCleanupTimer() bool { + ticker := time.NewTicker(1 * time.Hour) + go func() { + for range ticker.C { + cleanupExpiredData() + cleanupExpiredChallenges() + } + }() + return true +} + +// Clean up expired nonces and rate limit data +func cleanupExpiredData() { + // Clean up used nonces + now := time.Now() + expiredNonceCount := 0 + usedNonces.Range(func(key, value interface{}) bool { + nonce := key.(string) + timestamp := value.(time.Time) + if now.Sub(timestamp) > checkpointConfig.MaxNonceAge { + usedNonces.Delete(nonce) + expiredNonceCount++ + } + return true // continue iteration + }) + if expiredNonceCount > 0 { + log.Printf("Checkpoint: Cleaned up %d expired nonces.", expiredNonceCount) + } + + // Reset IP rate limits every hour by deleting all entries + ipRateLimit.Range(func(key, value interface{}) bool { + ipRateLimit.Delete(key) + return true + }) + log.Println("Checkpoint: IP rate limits reset.") +} + +// CheckpointToken represents a validated token +type CheckpointToken struct { + Nonce string `json:"g"` // Nonce + Challenge string `json:"-"` // Derived server-side, not in token + Salt string `json:"-"` // Derived server-side, not in token + Difficulty int `json:"-"` // Derived server-side, not in token + ExpiresAt time.Time `json:"exp"` + ClientIP string `json:"cip,omitempty"` + UserAgent string `json:"ua,omitempty"` + BrowserHint string `json:"bh,omitempty"` + Entropy string `json:"ent,omitempty"` + Created time.Time `json:"crt"` + LastVerified time.Time `json:"lvf,omitempty"` + Signature string `json:"sig,omitempty"` + TokenFormat int `json:"fmt"` +} + +// ChallengeParams stores parameters for a challenge +type ChallengeParams struct { + Challenge string `json:"challenge"` // Base64 encoded + Salt string `json:"salt"` // Base64 encoded + Difficulty int `json:"difficulty"` + ExpiresAt time.Time `json:"expires_at"` + ClientIP string `json:"-"` + PoSSeed string `json:"pos_seed"` // Hex encoded +} + +// isExcludedHTMLPath checks if a path should be excluded from the HTML checkpoint. +// Exclusions happen based on configured prefixes or file extensions. +func isExcludedHTMLPath(path string) bool { + // 1. Check path prefixes + for _, prefix := range checkpointConfig.HTMLCheckpointExclusions { + if strings.HasPrefix(path, prefix) { + return true // Excluded by prefix + } + } + + // 2. Check file extension using the set + ext := strings.ToLower(filepath.Ext(path)) + if ext != "" { + if _, exists := checkpointConfig.HTMLCheckpointExcludedExtensions[ext]; exists { + return true // Excluded by file extension + } + } + + // 3. If not excluded by prefix or extension, it needs the checkpoint + return false +} + +// DirectProxy returns a handler that simply forwards the request/response to targetURL. +// Headers, status codes, and body are passed through without modification. +func DirectProxy(targetURL string) fiber.Handler { + target, err := url.Parse(targetURL) + if err != nil { + return func(c *fiber.Ctx) error { + log.Printf("ERROR: Invalid target URL %s: %v", targetURL, err) + return fiber.ErrBadGateway + } + } + + proxy := httputil.NewSingleHostReverseProxy(target) + + // Set up custom director to properly map headers + originalDirector := proxy.Director + proxy.Director = func(req *http.Request) { + originalDirector(req) + + // Add X-Forwarded headers + req.Header.Set("X-Forwarded-Host", req.Host) + req.Header.Set("X-Forwarded-Proto", "http") // Update to https when needed + + if v := req.Header.Get("X-Forwarded-For"); v != "" { + req.Header.Set("X-Forwarded-For", v+", "+req.RemoteAddr) + } else { + req.Header.Set("X-Forwarded-For", req.RemoteAddr) + } + } + + return func(c *fiber.Ctx) error { + // Create proxy request + proxyReq, err := http.NewRequest( + string(c.Method()), + target.String()+c.Path(), + bytes.NewReader(c.Body()), + ) + if err != nil { + log.Printf("ERROR: Failed to create proxy request: %v", err) + return fiber.ErrBadGateway + } + + // Copy all headers from the Fiber context to the proxy request + c.Request().Header.VisitAll(func(key, value []byte) { + proxyReq.Header.Set(string(key), string(value)) + }) + + // Execute the proxy request + proxyRes, err := http.DefaultClient.Do(proxyReq) + if err != nil { + log.Printf("ERROR: Proxy request failed: %v", err) + return fiber.ErrBadGateway + } + defer proxyRes.Body.Close() + + // Copy all headers from the proxy response to Fiber's response + for key, values := range proxyRes.Header { + for _, value := range values { + c.Response().Header.Add(key, value) + } + } + + // Set the status code + c.Status(proxyRes.StatusCode) + + // Copy the body + body, err := io.ReadAll(proxyRes.Body) + if err != nil { + log.Printf("ERROR: Failed to read proxy response body: %v", err) + return fiber.ErrBadGateway + } + + return c.Send(body) + } +} + +// isBlockedBot checks concurrently if the User-Agent indicates a known bot +// or doesn't have a standard browser prefix. +// It returns true as soon as one check decides to block. +func isBlockedBot(userAgent string) bool { + if userAgent == "" { + // Empty User-Agent is suspicious, block it + log.Printf("INFO: UA blocked - empty user agent") + return true + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() // Ensure context is cancelled eventually + + resultChan := make(chan bool, 2) // Buffered channel for results + + // Goroutine 1: Library-based bot check + go func() { + ua := useragent.Parse(userAgent) + shouldBlock := ua.Bot + if shouldBlock { + log.Printf("INFO: UA blocked by library (Bot detected: %s): %s", ua.Name, userAgent) + } + select { + case resultChan <- shouldBlock: + case <-ctx.Done(): // Don't send if context is cancelled + } + }() + + // Goroutine 2: Prefix check + go func() { + // Standard browser User-Agent prefixes + standardPrefixes := []string{"Mozilla/", "Opera/", "DuckDuckGo/", "Dart/"} + hasStandardPrefix := false + + for _, prefix := range standardPrefixes { + if strings.HasPrefix(userAgent, prefix) { + hasStandardPrefix = true + break + } + } + + // Block if it does NOT have a standard prefix + shouldBlock := !hasStandardPrefix + + if shouldBlock { + log.Printf("INFO: UA blocked by prefix check (doesn't have standard prefix): %s", userAgent) + } + select { + case resultChan <- shouldBlock: + case <-ctx.Done(): // Don't send if context is cancelled + } + }() + + // Wait for results and decide + result1 := <-resultChan + if result1 { + cancel() // Found a reason to block, cancel the other check + return true + } + + // First check didn't block, wait for the second result + result2 := <-resultChan + // cancel() is deferred, so it will run anyway, ensuring cleanup + return result2 // Block if the second check decided to block +} + +// New gives you a Fiber handler that does the POW challenge (HTML/API) or proxies requests. +func New() fiber.Handler { + return func(c *fiber.Ctx) error { + host := c.Hostname() + targetURL, useProxy := checkpointConfig.ReverseProxyMappings[host] + path := c.Path() + + // --- User-Agent Validation --- + // Only check User-Agent if path is not in exclusion list + skipUA := false + for _, prefix := range checkpointConfig.UserAgentValidationExclusions { + if strings.HasPrefix(path, prefix) { + skipUA = true + break + } + } + + if !skipUA { + // First check required UA prefixes for specific paths + for p, required := range checkpointConfig.UserAgentRequiredPrefixes { + if strings.HasPrefix(path, p) { + ua := c.Get("User-Agent") + if !strings.HasPrefix(ua, required) { + log.Printf("INFO: UA blocked by required prefix %s: %s", required, ua) + if strings.HasPrefix(path, "/api") { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{ + "error": "Access denied for automated clients.", + "reason": "useragent", + }) + } + return c.Status(fiber.StatusForbidden).SendString("Access denied for automated clients.") + } + break + } + } + + // Then do general bot check for all non-excluded paths + userAgent := c.Get("User-Agent") + if isBlockedBot(userAgent) { + if strings.HasPrefix(path, "/api") { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{ + "error": "Access denied for automated clients.", + "reason": "useragent", + }) + } + return c.Status(fiber.StatusForbidden).SendString("Access denied for automated clients.") + } + } + + // Handle any API endpoints + if strings.HasPrefix(path, "/api") { + // Always serve PoW endpoints locally (challenge & verify) + if strings.HasPrefix(path, "/api/pow/") || strings.HasPrefix(path, "/api/verify") { + log.Printf("API checkpoint endpoint %s - handling locally", path) + return c.Next() + } + // Other API paths: skip checkpoint + if useProxy { + // Proxy to backend for proxied hosts + log.Printf("API proxying endpoint %s to %s", path, targetURL) + return DirectProxy(targetURL)(c) + } + log.Printf("API endpoint %s - bypassing checkpoint", path) + return c.Next() + } + + // --- Reverse Proxy Logic --- + if useProxy { + // Check for existing valid token cookie + tokenCookie := c.Cookies(checkpointConfig.CookieName) + log.Printf("Proxy: Checking token for host %s, path %s, cookie present: %v", + host, path, tokenCookie != "") + + // Check if this is an excluded path (API endpoints, etc) + if isExcludedHTMLPath(path) { + log.Printf("Excluded path %s for proxied host %s - proxying without token check", path, host) + + // Direct transparent proxy (preserves all headers/content types) + return DirectProxy(targetURL)(c) + } + + valid, err := validateToken(tokenCookie, c) + + if err != nil { + // Log validation errors but treat as invalid for proxying + log.Printf("Error validating token for proxied host %s, path %s: %v", host, path, err) + } + + if valid { + log.Printf("Valid token found for proxied host %s, path %s - forwarding request", host, path) + // Token is valid, proxy the request + // Direct transparent proxy (preserves all headers/content types) + return DirectProxy(targetURL)(c) + } else { + // Add debug logging + log.Printf("No valid token for proxied host %s, path %s - serving interstitial", host, path) + + // Save the original full URL for potential redirection after verification + c.Locals("originalURL", c.OriginalURL()) + + // No valid token, serve the interstitial challenge page. + return serveInterstitial(c) + } + } + + // --- Standard HTML/Static/API Logic (No Proxy Mapping) --- + // Skip checkpoint for excluded paths (e.g., static assets, API endpoints handled separately) + if isExcludedHTMLPath(path) { + return c.Next() + } + + // --- Path needs checkpoint (potential HTML page) --- + tokenCookie := c.Cookies(checkpointConfig.CookieName) + if tokenCookie != "" { + valid, err := validateToken(tokenCookie, c) + if err != nil { + // Log validation errors but still serve interstitial for safety + log.Printf("Error validating token for path %s: %v", path, err) + // Fall through to serve interstitial + } else if valid { + // Token is valid, proceed to the requested page/handler + return c.Next() + } + // If token was present but invalid/expired, fall through to serve interstitial + } + + // No valid token found, serve the interstitial challenge page. + return serveInterstitial(c) + } +} + +// generateRequestID creates a unique ID for this verification request +func generateRequestID(c *fiber.Ctx) string { + challenge, salt := generateChallenge() + // Generate PoS seed + posSeedBytes := make([]byte, 32) + if n, err := cryptorand.Read(posSeedBytes); err != nil { + log.Fatalf("CRITICAL: Failed to generate PoS seed: %v", err) + } else if n != len(posSeedBytes) { + log.Fatalf("CRITICAL: Short read generating PoS seed: read %d bytes", n) + } + posSeed := hex.EncodeToString(posSeedBytes) + // Generate request ID + randBytes := make([]byte, 16) + if n, err := cryptorand.Read(randBytes); err != nil { + log.Fatalf("CRITICAL: Failed to generate request ID: %v", err) + } else if n != len(randBytes) { + log.Fatalf("CRITICAL: Short read generating request ID: read %d bytes", n) + } + requestID := hex.EncodeToString(randBytes) + + // Base64-encode the hex challenge and salt for storage + encodedChallenge := base64.StdEncoding.EncodeToString([]byte(challenge)) + encodedSalt := base64.StdEncoding.EncodeToString([]byte(salt)) + params := ChallengeParams{ + Challenge: encodedChallenge, + Salt: encodedSalt, + Difficulty: checkpointConfig.Difficulty, + ExpiresAt: time.Now().Add(checkpointConfig.ChallengeExpiration), + ClientIP: getRealIP(c), + PoSSeed: posSeed, + } + challengeStore.Store(requestID, params) + return requestID +} + +func cleanupExpiredChallenges() { + now := time.Now() + expiredChallengeCount := 0 + challengeStore.Range(func(key, value interface{}) bool { + id := key.(string) + params := value.(ChallengeParams) + if now.After(params.ExpiresAt) { + challengeStore.Delete(id) + expiredChallengeCount++ + } + return true // continue iteration + }) + if expiredChallengeCount > 0 { + log.Printf("Checkpoint: Cleaned up %d expired challenges.", expiredChallengeCount) + } +} + +// GetCheckpointChallengeHandler serves challenge parameters via API +func GetCheckpointChallengeHandler(c *fiber.Ctx) error { + requestID := c.Query("id") + if requestID == "" { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Missing request ID"}) + } + + // Apply rate limiting to challenge generation + clientIP := getRealIP(c) + val, _ := ipRateLimit.LoadOrStore(clientIP, new(atomic.Int64)) + ipCounter := val.(*atomic.Int64) + attempts := ipCounter.Add(1) // Increment and get new value + + // Limit to a reasonable number of challenge requests per hour (using the same MaxAttemptsPerHour config) + if attempts > int64(checkpointConfig.MaxAttemptsPerHour) { + return c.Status(fiber.StatusTooManyRequests).JSON(fiber.Map{"error": "Too many challenge requests. Please try again later."}) + } + + val, exists := challengeStore.Load(requestID) + if !exists { + return c.Status(fiber.StatusNotFound).JSON(fiber.Map{"error": "Challenge not found or expired"}) + } + params := val.(ChallengeParams) + + if clientIP != params.ClientIP { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{"error": "IP address mismatch for challenge"}) + } + decoySeedBytes := make([]byte, 8) + cryptorand.Read(decoySeedBytes) + decoySeed := hex.EncodeToString(decoySeedBytes) + decoyFields := make([]map[string]interface{}, 0) + decoyFieldCount := 2 + int(decoySeedBytes[0])%3 + for i := 0; i < decoyFieldCount; i++ { + nameLen := 5 + int(decoySeedBytes[i%8])%8 + valLen := 8 + int(decoySeedBytes[(i+1)%8])%24 + name := randomHexString(nameLen) + val := randomHexString(valLen) + decoyFields = append(decoyFields, map[string]interface{}{name: val}) + } + return c.JSON(fiber.Map{ + "a": params.Challenge, // challenge + "b": params.Salt, // salt + "c": params.Difficulty, // difficulty + "d": params.PoSSeed, // pos_seed + "e": decoySeed, // decoy_seed + "f": decoyFields, // decoy_fields + }) +} + +func randomHexString(n int) string { + b := make([]byte, (n+1)/2) + if m, err := cryptorand.Read(b); err != nil { + log.Fatalf("CRITICAL: Failed to generate random hex string: %v", err) + } else if m != len(b) { + log.Fatalf("CRITICAL: Short read generating random hex string: read %d bytes", m) + } + s := hex.EncodeToString(b) + if len(s) < n { + log.Fatalf("CRITICAL: Random hex string too short: got %d hex chars, want %d", len(s), n) + } + return s[:n] +} + +func getFullClientIP(c *fiber.Ctx) string { + ip := getRealIP(c) + if ip == "" { + return "unknown" + } + h := sha256.Sum256([]byte(ip)) + return hex.EncodeToString(h[:8]) +} + +func hashUserAgent(userAgent string) string { + if userAgent == "" { + return "" + } + hash := sha256.Sum256([]byte(userAgent)) + return hex.EncodeToString(hash[:8]) +} + +func extractBrowserFingerprint(c *fiber.Ctx) string { + headers := []string{ + c.Get("Sec-CH-UA"), c.Get("Sec-CH-UA-Platform"), c.Get("Sec-CH-UA-Mobile"), + c.Get("Sec-CH-UA-Platform-Version"), c.Get("Sec-CH-UA-Arch"), c.Get("Sec-CH-UA-Model"), + } + var validHeaders []string + for _, h := range headers { + if h != "" { + validHeaders = append(validHeaders, h) + } + } + if len(validHeaders) == 0 { + return "" + } + fingerprint := strings.Join(validHeaders, "|") + hash := sha256.Sum256([]byte(fingerprint)) + return hex.EncodeToString(hash[:12]) +} + +func validateToken(tokenStr string, c *fiber.Ctx) (bool, error) { + // Explicitly handle missing token case first. + if tokenStr == "" { + return false, nil // No token cookie found, definitely not valid. + } + + // 1. Decode the token string from the cookie + tokenBytes, err := base64.StdEncoding.DecodeString(tokenStr) + if err != nil { + // Invalid Base64 encoding - treat as invalid token, not a system error + return false, nil + } + + // Check for empty byte slice after decoding + if len(tokenBytes) == 0 { + // Decoded to empty - treat as invalid token + return false, nil + } + + // 2. Unmarshal + var token CheckpointToken + if err := json.Unmarshal(tokenBytes, &token); err != nil { + // Invalid JSON structure - treat as invalid token + return false, nil // Error seen in logs comes from here, now returns nil error + } + + // 3. Basic expiration check based on ExpiresAt field in the token itself + // Note: Return nil error for expired token, it's just invalid. + if time.Now().After(token.ExpiresAt) { + return false, nil // Token itself says it's expired + } + + // 4. Check token signature first (Format 2+) + if token.TokenFormat < 2 { + return false, nil // Old format not supported/secure - invalid + } + if !verifyTokenSignature(token, tokenBytes) { + return false, nil // Invalid signature - invalid + } + + // 5. Calculate the token hash to look up in the database + tokenHash := calculateTokenHash(token) + + // 6. Look up the token data in BadgerDB + storedData, found, dbErr := tokenStore.lookupTokenData(c.Context(), tokenHash) + if dbErr != nil { + // Actual DB error during lookup - THIS is a real error to return + return false, fmt.Errorf("token DB lookup failed: %w", dbErr) + } + if !found { + // Token hash not found in DB or explicitly expired according to DB record + return false, nil + } + + // 7. *** CRITICAL: Verify bindings against stored data and current request *** + // Compare Client IP Hash + currentPartialIP := getFullClientIP(c) + if storedData.ClientIPHash != currentPartialIP { + return false, nil // IP mismatch - invalid + } + + // Compare User Agent Hash + currentUserAgent := hashUserAgent(c.Get("User-Agent")) + if storedData.UserAgentHash != currentUserAgent { + return false, nil // User agent mismatch - invalid + } + + // Compare Browser Hint + currentBrowserHint := extractBrowserFingerprint(c) + // Only enforce if hint was stored AND current hint is available + if storedData.BrowserHint != "" && currentBrowserHint != "" && storedData.BrowserHint != currentBrowserHint { + return false, nil // Browser hint mismatch - invalid + } + + // 8. All checks passed! Token is valid and bound correctly. + // Update LastVerified time in the database (best effort, log errors) + if err := tokenStore.updateTokenVerification(tokenHash); err != nil { + log.Printf("WARNING: Failed to update token verification time for hash %s: %v", tokenHash, err) + } + + // Refresh the cookie with potentially updated ExpiresAt (if sliding window desired) or just LastVerified. + // For simplicity, we'll just refresh with the same ExpiresAt for now. + token.LastVerified = time.Now() + updateTokenCookie(c, token) // Resign and set cookie + + return true, nil +} + +func updateTokenCookie(c *fiber.Ctx, token CheckpointToken) { + // Determine if we're serving on HTTPS or HTTP + isSecure := true + // Check if we're in development mode using non-secure connection + if strings.HasPrefix(c.Protocol(), "http") && !strings.HasPrefix(c.BaseURL(), "https") { + isSecure = false // Running on http:// (dev mode) + } + + // Get domain for cookie - either from config or auto-detect + cookieDomain := checkpointConfig.CookieDomain + if cookieDomain == "" { + // Auto-detect - for development convenience + cookieDomain = getDomainFromHost(c.Hostname()) + } + + // Set SameSite based on domain - use Lax for cross-subdomain + sameSite := "Strict" + if cookieDomain != "" { + sameSite = "Lax" // Lax allows subdomain sharing better than Strict + } + + // Recompute signature because LastVerified might have changed + token.Signature = "" + tempBytes, _ := json.Marshal(token) + token.Signature = computeTokenSignature(token, tempBytes) // Compute signature on token WITHOUT old signature + + finalTokenBytes, err := json.Marshal(token) // Marshal again with new signature + if err != nil { + log.Printf("Error marshaling token for cookie update: %v", err) + return + } + tokenStr := base64.StdEncoding.EncodeToString(finalTokenBytes) + c.Cookie(&fiber.Cookie{ + Name: checkpointConfig.CookieName, + Value: tokenStr, + Expires: token.ExpiresAt, // Use original expiration + Path: "/", + Domain: cookieDomain, + HTTPOnly: true, + SameSite: sameSite, + Secure: isSecure, // Only set Secure in HTTPS environments + }) +} + +func verifyProofOfWork(challenge, salt, nonce string, difficulty int) bool { + inputStr := challenge + salt + nonce + hash := calculateHash(inputStr) + prefix := strings.Repeat("0", difficulty) + return strings.HasPrefix(hash, prefix) +} + +func calculateHash(input string) string { + hash := sha256.Sum256([]byte(input)) + return hex.EncodeToString(hash[:]) +} + +func computeTokenSignature(token CheckpointToken, tokenBytes []byte) string { + tokenCopy := token + tokenCopy.Signature = "" // Ensure signature field is empty for signing + tokenToSign, _ := json.Marshal(tokenCopy) + h := hmac.New(sha256.New, hmacSecret) + h.Write(tokenToSign) + return hex.EncodeToString(h.Sum(nil)) +} + +func verifyTokenSignature(token CheckpointToken, tokenBytes []byte) bool { + if token.Signature == "" { + return false + } + expectedSignature := computeTokenSignature(token, tokenBytes) + return hmac.Equal([]byte(token.Signature), []byte(expectedSignature)) +} + +// VerifyCheckpointHandler verifies the challenge solution +func VerifyCheckpointHandler(c *fiber.Ctx) error { + clientIP := getRealIP(c) + + var req CheckpointVerifyRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request format"}) + } + + // Challenge lookup + challengeVal, challengeExists := challengeStore.Load(req.RequestID) + if !challengeExists { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid or expired request ID"}) + } + params := challengeVal.(ChallengeParams) + + if clientIP != params.ClientIP { // Check against IP stored with challenge + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{"error": "IP address mismatch for challenge"}) + } + + decodedChallenge := "" + if decoded, err := base64.StdEncoding.DecodeString(params.Challenge); err == nil { + decodedChallenge = string(decoded) + } else { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to decode challenge"}) + } + decodedSalt := "" + if decoded, err := base64.StdEncoding.DecodeString(params.Salt); err == nil { + decodedSalt = string(decoded) + } else { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to decode salt"}) + } + + if req.Nonce == "" { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Nonce ('g') required"}) + } + + // --- Nonce Check --- + nonceKey := req.Nonce + decodedChallenge + _, nonceExists := usedNonces.Load(nonceKey) + if nonceExists { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "This solution has already been used"}) + } + // --- End Nonce Check --- + + if !verifyProofOfWork(decodedChallenge, decodedSalt, req.Nonce, params.Difficulty) { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid proof-of-work solution"}) + } + + // --- Store Used Nonce (only after PoW is verified) --- + usedNonces.Store(nonceKey, time.Now()) + // --- End Store Used Nonce --- + + // Validate PoS hashes and times if provided + if len(req.PoSHashes) == 3 && len(req.PoSTimes) == 3 { + if req.PoSHashes[0] != req.PoSHashes[1] || req.PoSHashes[1] != req.PoSHashes[2] { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "PoS hashes ('h') do not match"}) + } + if len(req.PoSHashes[0]) != 64 { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid PoS hash ('h') length"}) + } + if err := checkPoSTimes(req.PoSTimes); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": err.Error()}) + } + } else if checkpointConfig.CheckPoSTimes && (len(req.PoSHashes) != 0 || len(req.PoSTimes) != 0) { + // If PoS checking is enabled, but incorrect number of hashes/times provided + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid PoS data provided"}) + } + + // Challenge is valid, remove it from store + challengeStore.Delete(req.RequestID) + + entropyBytes := make([]byte, 8) + _, err := cryptorand.Read(entropyBytes) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to generate secure token entropy"}) + } + entropy := hex.EncodeToString(entropyBytes) + + // *** Gather current binding info for the new token *** + now := time.Now() + expiresAt := now.Add(checkpointConfig.TokenExpiration) + browserHint := extractBrowserFingerprint(c) + clientIPHash := getFullClientIP(c) + userAgentHash := hashUserAgent(c.Get("User-Agent")) + + token := CheckpointToken{ + Nonce: req.Nonce, + ExpiresAt: expiresAt, + ClientIP: clientIPHash, + UserAgent: userAgentHash, + BrowserHint: browserHint, + Entropy: entropy, + Created: now, + LastVerified: now, + TokenFormat: 2, + } + + // Add a response header indicating success for the proxy + c.Set("X-Checkpoint-Status", "success") + log.Printf("Successfully verified challenge for IP %s, issuing token", clientIP) + + // Issue token (handles DB storage, signing, cookie setting) + return issueToken(c, token) +} + +// Renamed request struct +type CheckpointVerifyRequest struct { + RequestID string `json:"request_id"` + Nonce string `json:"g"` + PoSHashes []string `json:"h"` + PoSTimes []int64 `json:"i"` + DecoyHashes []string `json:"j"` + DecoyTimes []int64 `json:"k"` + DecoyFields []map[string]interface{} `json:"l"` +} + +func generateChallenge() (string, string) { + randomBytes := make([]byte, 16) + _, err := cryptorand.Read(randomBytes) + if err != nil { + log.Fatalf("CRITICAL: Failed to generate secure random challenge: %v", err) + } + saltBytes := make([]byte, checkpointConfig.SaltLength) + _, err = cryptorand.Read(saltBytes) + if err != nil { + log.Fatalf("CRITICAL: Failed to generate secure random salt: %v", err) + } + salt := hex.EncodeToString(saltBytes) + return hex.EncodeToString(randomBytes), salt +} + +// calculateTokenHash calculates a unique hash for storing the token status +// IMPORTANT: This hash is now used as the key in the database. +func calculateTokenHash(token CheckpointToken) string { + // Hash relevant fields that identify this specific verification instance + // Using Nonce, Entropy, and Creation time ensures uniqueness per issuance. + data := fmt.Sprintf("%s:%s:%d", + token.Nonce, + token.Entropy, + token.Created.UnixNano()) + hash := sha256.Sum256([]byte(data)) + return hex.EncodeToString(hash[:]) +} + +// RequestSanitizationMiddleware spots malicious patterns (SQLi, XSS, path traversal) +// and returns 403 immediately to keep your app safe. +func RequestSanitizationMiddleware() fiber.Handler { + return func(c *fiber.Ctx) error { + // Check URL path for directory traversal + path := c.Path() + if strings.Contains(path, "../") || strings.Contains(path, "..\\") { + log.Printf("Security block: Directory traversal attempt in path: %s from IP: %s", path, getRealIP(c)) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + + // Check query parameters for malicious patterns + query := c.Request().URI().QueryString() + if len(query) > 0 { + queryStr := string(query) + + // Check for dangerous characters if configured + if checkpointConfig.BlockDangerousPathChars { + if strings.Contains(queryStr, ";") || strings.Contains(queryStr, "\\") || strings.Contains(queryStr, "`") { + log.Printf("Security block: Dangerous character in query from IP: %s, Query: %s", getRealIP(c), queryStr) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + } + + // Check for configured attack patterns + for _, pattern := range checkpointConfig.DangerousQueryPatterns { + if pattern.MatchString(queryStr) { + log.Printf("Security block: Malicious pattern match in query from IP: %s, Pattern: %s, Query: %s", + getRealIP(c), pattern.String(), queryStr) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + } + } + + return c.Next() + } +} diff --git a/checkpoint_service/middleware/middleware/config/checkpoint.toml b/checkpoint_service/middleware/middleware/config/checkpoint.toml new file mode 100644 index 0000000..1baf766 --- /dev/null +++ b/checkpoint_service/middleware/middleware/config/checkpoint.toml @@ -0,0 +1,77 @@ +# ----------------------------------------------------------------------------- +# Checkpoint Middleware Configuration (checkpoint.toml) +# +# All durations are parsed via time.ParseDuration (e.g. "24h"). +# Arrays and tables map directly to the Config struct fields. +# ----------------------------------------------------------------------------- + +# === GENERAL SETTINGS === +# Number of leading zeros required in PoW hash +Difficulty = 4 +# Validity period for issued tokens +TokenExpiration = "24h" +# Name of the cookie used to store the checkpoint token +CookieName = "checkpoint_token" +# Domain attribute for the cookie; empty = host-only (localhost) +CookieDomain = "" +# Length of the random salt in bytes for challenges +SaltLength = 16 + +# === RATE LIMITING & EXPIRATION === +# Max PoW verification attempts per IP per hour +MaxAttemptsPerHour = 10 +# Max age for used nonces before cleanup +MaxNonceAge = "24h" +# Time allowed for solving a challenge +ChallengeExpiration = "5m" + +# === PERSISTENCE PATHS === +# File where HMAC secret is stored +SecretConfigPath = "./data/checkpoint_secret.json" +# Directory for BadgerDB token store +TokenStoreDBPath = "./data/checkpoint_tokendb" +# Ordered fallback paths for interstitial HTML +InterstitialPaths = [ + "./public/static/pow-interstitial.html", + "./develop/static/pow-interstitial.html" +] + +# === SECURITY SETTINGS === +# Enable Proof-of-Space-Time consistency checks +CheckPoSTimes = true +# Allowed ratio between slowest and fastest PoS runs +PoSTimeConsistencyRatio = 1.35 + +# === HTML CHECKPOINT EXCLUSIONS === +# Path prefixes to skip PoW interstitial +HTMLCheckpointExclusions = ["/api"] +# File extensions to skip PoW check +HTMLCheckpointExcludedExtensions = { ".jpg" = true, ".jpeg" = true, ".png" = true, ".gif" = true, ".svg" = true, ".webp" = true, ".ico" = true, ".bmp" = true, ".tif" = true, ".tiff" = true, ".mp4" = true, ".webm" = true, ".css" = true, ".js" = true, ".mjs" = true, ".woff" = true, ".woff2" = true, ".ttf" = true, ".otf" = true, ".eot" = true, ".json" = true, ".xml" = true, ".txt" = true, ".pdf" = true, ".map" = true, ".wasm" = true } + +# === QUERY SANITIZATION === +# Regex patterns (case-insensitive) to block in query strings +DangerousQueryPatterns = [ + "(?i)union\\s+select", + "(?i)drop\\s+table", + "(?i)insert\\s+into", + "(?i) 0 { + blockType = "asn_name_group" + blockValue = groupName + log.Printf("INFO: Blocking IP %s based on %s: %s (ASN: %d, Org: '%s')", ipStr, blockType, blockValue, clientASN, asnOrg) + customPage = asnGroupBlockPages[groupName] + // No need to unlock here, defer handles it + return cacheAndReturnBlockResult(ipStr, blockType, blockValue, customPage, asnOrg) + } + } + // RUnlock happens via defer + } + } else if asnErr != nil && !strings.Contains(asnErr.Error(), "cannot be found in the database") { + // Log errors other than "not found" + log.Printf("WARNING: GeoIP ASN lookup error for IP %s: %v", ipStr, asnErr) + } + + // --- Cache the result before returning --- // + computedEntry := blockCacheEntry{ + blocked: false, + expiresAt: time.Now().Add(ipBlockCacheTTL), + } + ipBlockCacheMutex.Lock() + ipBlockCache[ipStr] = computedEntry + ipBlockCacheMutex.Unlock() + return false, "", "", "", "" // Not blocked +} + +// Helper function to cache block results +func cacheAndReturnBlockResult(ipStr string, blockType string, blockValue string, customPage string, asnOrgName string) (bool, string, string, string, string) { + // Create the cache entry + computedEntry := blockCacheEntry{ + blocked: true, + blockType: blockType, + blockValue: blockValue, + customPage: customPage, + asnOrgName: asnOrgName, + expiresAt: time.Now().Add(ipBlockCacheTTL), + } + + // Use a separate defer+recover to ensure we don't crash the entire server + // if there's any issue with the cache + func() { + defer func() { + if r := recover(); r != nil { + log.Printf("RECOVERED from panic while caching result: %v", r) + } + }() + + ipBlockCacheMutex.Lock() + defer ipBlockCacheMutex.Unlock() // Use defer to ensure unlock happens + ipBlockCache[ipStr] = computedEntry + }() + + return true, blockType, blockValue, customPage, asnOrgName +} + +// buildASNNameMatchers creates Aho-Corasick matchers for faster ASN name checking +func buildASNNameMatchers() { + // Acquire write lock before modifying the global map + asnNameMatchersMutex.Lock() + defer asnNameMatchersMutex.Unlock() + + // Clear any existing matchers first + asnNameMatchers = make(map[string]*ahocorasick.Matcher) + + for groupName, nameList := range blockedASNNames { + // Skip if the name list is empty + if len(nameList) == 0 { + log.Printf("Skipping matcher build for empty group: %s", groupName) + continue + } + + // Convert names to lowercase byte slices for case-insensitive matching + dict := make([][]byte, 0, len(nameList)) + for _, name := range nameList { + if name != "" { + dict = append(dict, []byte(strings.ToLower(name))) + } + } + + // Only create a matcher if we have patterns + if len(dict) > 0 { + // Use a recovery mechanism in case the matcher creation fails + func() { + defer func() { + if r := recover(); r != nil { + log.Printf("PANIC while building Aho-Corasick matcher for group %s: %v", groupName, r) + // Ensure the entry for this group is nil if creation failed + asnNameMatchers[groupName] = nil + } + }() + + // This assignment happens under the write lock + asnNameMatchers[groupName] = ahocorasick.NewMatcher(dict) + log.Printf("Built Aho-Corasick matcher for ASN name group: %s (%d patterns)", groupName, len(dict)) + }() + } else { + log.Printf("No valid patterns found for ASN name group: %s", groupName) + } + } + // Unlock happens via defer +} + +// ReloadGeoIPDatabases closes and reopens the GeoIP database readers +// to load updated database files. Safe to call while the server is running. +func ReloadGeoIPDatabases() { + // Close existing readers if they're open + if geoipCountryReader != nil { + geoipCountryReader.Close() + geoipCountryReader = nil + } + if geoipASNReader != nil { + geoipASNReader.Close() + geoipASNReader = nil + } + + // Re-initialize the readers + initGeoIP() + log.Printf("GeoIP databases reloaded") +} + +// getRealIP gets the real client IP when behind a reverse proxy +// It checks X-Forwarded-For header first, then falls back to c.IP() +func getRealIP(c *fiber.Ctx) string { + // Check X-Forwarded-For header first + if xff := c.Get("X-Forwarded-For"); xff != "" { + // X-Forwarded-For can contain multiple IPs (client, proxy1, proxy2, ...) + // The first one is the original client IP + ips := strings.Split(xff, ",") + if len(ips) > 0 { + // Get the first IP and trim whitespace + clientIP := strings.TrimSpace(ips[0]) + // Validate it's a real IP + if net.ParseIP(clientIP) != nil { + log.Printf("Using X-Forwarded-For IP: %s (original: %s)", clientIP, c.IP()) + return clientIP + } + } + } + + // Also check for custom Remote-Addr header that might be set by some proxies + if remoteAddr := c.Get("$remote_addr"); remoteAddr != "" { + // Validate it's a real IP + if net.ParseIP(remoteAddr) != nil { + log.Printf("Using $remote_addr IP: %s (original: %s)", remoteAddr, c.IP()) + return remoteAddr + } + } + + // Fallback to default IP + return c.IP() +} diff --git a/checkpoint_service/middleware/middleware/plugin.go b/checkpoint_service/middleware/middleware/plugin.go new file mode 100644 index 0000000..6305f0b --- /dev/null +++ b/checkpoint_service/middleware/middleware/plugin.go @@ -0,0 +1,47 @@ +// Package middleware contains a simple plugin system for Fiber middleware. +// Register plugins by name and factory, then main.go will load them automatically. +package middleware + +import ( + "path/filepath" + + "github.com/BurntSushi/toml" + "github.com/gofiber/fiber/v2" +) + +// Plugin holds a plugin's name and a function that makes its handler. +type Plugin struct { + Name string + Factory func() fiber.Handler +} + +// registry stores every plugin we've registered. +var registry []Plugin + +// RegisterPlugin tags a plugin with a name and a factory so we can use it in the app. +func RegisterPlugin(name string, factory func() fiber.Handler) { + registry = append(registry, Plugin{Name: name, Factory: factory}) +} + +// LoadPlugins returns the handler functions for each plugin. +// If skipCheckpoint is true, it skips the plugin named "checkpoint". +func LoadPlugins(skipCheckpoint bool) []fiber.Handler { + var handlers []fiber.Handler + for _, p := range registry { + if skipCheckpoint && p.Name == "checkpoint" { + continue + } + handlers = append(handlers, p.Factory()) + } + return handlers +} + +// LoadConfig loads the TOML file at middleware/config/[name].toml +// and decodes it into the struct you provide. +func LoadConfig(name string, v interface{}) error { + path := filepath.Join("middleware", "config", name+".toml") + if _, err := toml.DecodeFile(path, v); err != nil { + return err + } + return nil +} diff --git a/checkpoint_service/middleware/plugin.go b/checkpoint_service/middleware/plugin.go new file mode 100644 index 0000000..6305f0b --- /dev/null +++ b/checkpoint_service/middleware/plugin.go @@ -0,0 +1,47 @@ +// Package middleware contains a simple plugin system for Fiber middleware. +// Register plugins by name and factory, then main.go will load them automatically. +package middleware + +import ( + "path/filepath" + + "github.com/BurntSushi/toml" + "github.com/gofiber/fiber/v2" +) + +// Plugin holds a plugin's name and a function that makes its handler. +type Plugin struct { + Name string + Factory func() fiber.Handler +} + +// registry stores every plugin we've registered. +var registry []Plugin + +// RegisterPlugin tags a plugin with a name and a factory so we can use it in the app. +func RegisterPlugin(name string, factory func() fiber.Handler) { + registry = append(registry, Plugin{Name: name, Factory: factory}) +} + +// LoadPlugins returns the handler functions for each plugin. +// If skipCheckpoint is true, it skips the plugin named "checkpoint". +func LoadPlugins(skipCheckpoint bool) []fiber.Handler { + var handlers []fiber.Handler + for _, p := range registry { + if skipCheckpoint && p.Name == "checkpoint" { + continue + } + handlers = append(handlers, p.Factory()) + } + return handlers +} + +// LoadConfig loads the TOML file at middleware/config/[name].toml +// and decodes it into the struct you provide. +func LoadConfig(name string, v interface{}) error { + path := filepath.Join("middleware", "config", name+".toml") + if _, err := toml.DecodeFile(path, v); err != nil { + return err + } + return nil +} diff --git a/data/GeoLite2-ASN.mmdb b/data/GeoLite2-ASN.mmdb new file mode 100644 index 0000000..a10fe80 Binary files /dev/null and b/data/GeoLite2-ASN.mmdb differ diff --git a/data/GeoLite2-Country.mmdb b/data/GeoLite2-Country.mmdb new file mode 100644 index 0000000..eb2b22d Binary files /dev/null and b/data/GeoLite2-Country.mmdb differ diff --git a/data/checkpoint_secret.json b/data/checkpoint_secret.json new file mode 100644 index 0000000..0d47275 --- /dev/null +++ b/data/checkpoint_secret.json @@ -0,0 +1 @@ +{"hmac_secret":"HJ9EY5oN0pO71AUN/2faoYnJhZyTr875iWbw6Nl8rc8=","created_at":"2025-04-28T18:28:59.6204724-05:00","updated_at":"2025-04-28T18:33:55.3259956-05:00"} \ No newline at end of file diff --git a/data/checkpoint_tokendb/000001.sst b/data/checkpoint_tokendb/000001.sst new file mode 100644 index 0000000..7c33d89 Binary files /dev/null and b/data/checkpoint_tokendb/000001.sst differ diff --git a/data/checkpoint_tokendb/000002.sst b/data/checkpoint_tokendb/000002.sst new file mode 100644 index 0000000..397b55d Binary files /dev/null and b/data/checkpoint_tokendb/000002.sst differ diff --git a/data/checkpoint_tokendb/000002.vlog b/data/checkpoint_tokendb/000002.vlog new file mode 100644 index 0000000..cd0ab81 Binary files /dev/null and b/data/checkpoint_tokendb/000002.vlog differ diff --git a/data/checkpoint_tokendb/000003.sst b/data/checkpoint_tokendb/000003.sst new file mode 100644 index 0000000..c54d7f8 Binary files /dev/null and b/data/checkpoint_tokendb/000003.sst differ diff --git a/data/checkpoint_tokendb/000003.vlog b/data/checkpoint_tokendb/000003.vlog new file mode 100644 index 0000000..cac470d Binary files /dev/null and b/data/checkpoint_tokendb/000003.vlog differ diff --git a/data/checkpoint_tokendb/DISCARD b/data/checkpoint_tokendb/DISCARD new file mode 100644 index 0000000..9e0f96a Binary files /dev/null and b/data/checkpoint_tokendb/DISCARD differ diff --git a/data/checkpoint_tokendb/KEYREGISTRY b/data/checkpoint_tokendb/KEYREGISTRY new file mode 100644 index 0000000..cfde57e --- /dev/null +++ b/data/checkpoint_tokendb/KEYREGISTRY @@ -0,0 +1 @@ +~t(ðæ4¥ ø2³Dî•æHello Badger \ No newline at end of file diff --git a/data/checkpoint_tokendb/MANIFEST b/data/checkpoint_tokendb/MANIFEST new file mode 100644 index 0000000..ac07759 Binary files /dev/null and b/data/checkpoint_tokendb/MANIFEST differ diff --git a/develop/css/docs.css b/develop/css/docs.css new file mode 100644 index 0000000..de20abf --- /dev/null +++ b/develop/css/docs.css @@ -0,0 +1,466 @@ +/* + * Documentation Pages Specific Styling + * Complements u.css without overriding its styles + */ + +:root { + --background-color: #121212; + --card-gradient-start: #1e1e1e; + --card-gradient-end: #333; + --header-background: #262626; + --text-color: #fff; + --accent-color: #4285F4; + --subtext-color: #ccc; + --code-background: #2c2c2c; + --border-color: #444; + --note-background: rgba(33, 150, 243, 0.15); + --note-border: #2196F3; + --warning-background: rgba(255, 193, 7, 0.15); + --warning-border: #FFC107; + --security-background: rgba(29, 39, 30, 0.7); + --security-border: #4CAF50; + --overlay-background: rgba(0, 0, 0, 0.85); +} + +/* Reset */ +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +body { + background: var(--background-color); + color: var(--text-color); + line-height: 1.6; + padding: 0; + margin: 0; +} + +/* Documentation Layout */ +.container { + max-width: 1000px; + margin: 0 auto; + padding: 1rem 2rem; + padding-top: 0.5rem; +} + +/* Documentation Typography */ +h1, h2, h3, h4 { + color: var(--accent-color); + font-weight: 600; + text-align: center; +} + +h1 { + margin-top: 0.5em; + border-bottom: 2px solid var(--border-color); + padding-bottom: 0.3em; + font-size: 2.2rem; + margin-bottom: 1.5rem; +} + +h2 { + border-bottom: 1px solid var(--border-color); + padding-bottom: 0.3em; + font-size: 1.8rem; + margin-top: 2.5rem; +} + +h3 { + font-size: 1.4rem; + margin-top: 0.7rem; + margin-bottom: 0.7rem; +} + +p, ul, ol { + margin: 1em 0; + font-size: 1.05rem; + line-height: 1.7; +} + +ol { + padding-left: 2.5rem; +} + +ul { + padding-left: 2rem; +} + +li { + margin-bottom: 0.5rem; +} + +ol li { + padding-left: 0.5rem; +} + +ol li ul { + margin-top: 0.5rem; + margin-bottom: 1rem; +} + +a { + color: var(--accent-color); + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + +/* Code Formatting */ +code { + font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace; + background-color: var(--code-background); + padding: 0.2em 0.4em; + border-radius: 3px; + font-size: 0.9em; + text-wrap: nowrap; +} + +pre { + background-color: var(--code-background); + border-radius: 5px; + padding: 1.2rem; + overflow: auto; + margin: 1.5em 0; + border: 1px solid var(--border-color); +} + +pre code { + background-color: transparent; + padding: 0; + font-size: 0.95rem; + line-height: 1.5; +} + +/* Diagram/Example Sections */ +.diagram, .example { + text-align: center; + margin: 35px 0; + padding: 25px; + background-color: rgba(30, 30, 30, 0.5); + border-radius: 8px; + border: 1px solid var(--border-color); + box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1); +} + +.diagram img, .example img { + max-width: 100%; + height: auto; + cursor: pointer; + transition: opacity 0.2s ease; +} + +.diagram img:hover, .example img:hover { + opacity: 0.85; +} + +.example h3 { + text-align: left; + margin-top: 0; + margin-bottom: 15px; +} + +/* Tables */ +table { + border-collapse: collapse; + width: 100%; + margin: 25px 0; + background-color: rgba(30, 30, 30, 0.5); + border-radius: 5px; + overflow: hidden; +} + +.table-container { + width: 100%; + overflow-x: auto; + margin: 25px 0; + border-radius: 5px; + border: 1px solid var(--border-color); + background-color: rgba(30, 30, 30, 0.5); +} + +.table-container table { + margin: 0; + border: none; +} + +th, td { + border: 1px solid var(--border-color); + padding: 12px 16px; + text-align: left; +} + +th { + background-color: var(--header-background); + font-weight: 600; +} + +tr:nth-child(even) { + background-color: rgba(40, 40, 40, 0.5); +} + +/* Callout Boxes */ +.note, .warning, .security { + padding: 18px 22px; + margin: 1.8rem 0; + border-radius: 5px; + border-left: 4px solid; +} + +.note { + background-color: var(--note-background); + border-color: var(--note-border); +} + +.warning { + background-color: var(--warning-background); + border-color: var(--warning-border); +} + +.security { + background-color: var(--security-background); + border-color: var(--security-border); + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.15); +} + +.security h3 { + color: #6FCF7C; + margin-top: 0; +} + +.security ul { + margin-bottom: 0; +} + +/* Footer */ +footer { + text-align: center; + padding: 20px 0; + border-top: 1px solid var(--border-color); + color: var(--subtext-color); + font-size: 0.9rem; +} + +/* Table of Contents */ +.toc { + background-color: rgba(30, 30, 30, 0.5); + border-radius: 8px; + padding: 20px; + margin: 20px 0 30px 0; + border: 1px solid var(--border-color); +} + +.toc h2 { + margin-top: 0; + text-align: center; + border-bottom: 1px solid var(--border-color); + padding-bottom: 10px; + margin-bottom: 15px; + color: var(--accent-color); +} + +.toc ul { + list-style-type: none; + padding-left: 0; + margin: 0; + display: flex; + flex-wrap: wrap; + gap: 10px; + justify-content: center; +} + +.toc li { + margin-bottom: 8px; + flex: 0 0 auto; +} + +.toc a { + display: block; + padding: 5px 15px; + border-radius: 4px; + transition: background-color 0.2s ease; + background-color: rgba(20, 20, 20, 0.5); + white-space: nowrap; +} + +.toc a:hover { + background-color: rgba(50, 50, 50, 0.5); + text-decoration: none; +} + +/* Code Examples */ +.code-example { + position: relative; +} + +.code-label { + position: absolute; + top: -12px; + right: 10px; + background-color: var(--accent-color); + color: white; + font-size: 0.8rem; + padding: 2px 8px; + border-radius: 4px; +} + +/* Button Group Styles (from lazy-video.html) */ +.lv-btn-group { + display: flex; + justify-content: space-between; + align-items: center; + gap: 18px; + margin: 36px 0 10px 0; +} + +.lv-btn { + display: inline-flex; + align-items: center; + padding: 10px 24px; + border-radius: 8px; + font-weight: 700; + font-size: 1.05rem; + text-decoration: none; + transition: background 0.18s, color 0.18s, border 0.18s; + cursor: pointer; +} + +.lv-btn-primary { + border: none; + background: var(--accent-color, #4285F4); + color: #fff; +} + +.lv-btn-primary:hover { + background: #2563eb; /* Slightly darker blue for hover */ +} + +.lv-btn-outline { + background: transparent; + color: #fff; + border: 1.5px solid var(--accent-color, #4285F4); +} + +.lv-btn-outline:hover { + background: rgba(66,133,244,0.08); /* Subtle background on hover */ +} + +.lv-size-info { + font-size: 1rem; + color: #888; + background: rgba(0,0,0,0.05); + border-radius: 4px; + padding: 7px 18px; +} + +.lv-size-info span { + color: var(--accent-color, #4285F4); + font-weight: 600; +} + +/* Feature Cards */ +.feature-card { + background-color: rgba(40, 40, 40, 0.5); + border-radius: 8px; + padding: 20px; + border: 1px solid var(--border-color); + margin-bottom: 15px; +} + +.feature-card h3 { + color: var(--accent-color); + margin-top: 0; + text-align: left; + border-bottom: 1px solid rgba(255, 255, 255, 0.1); + padding-bottom: 10px; +} + +/* Section Styling */ +.section { + scroll-margin-top: 20px; +} + +/* Version Note in Footer */ +.doc-version-note { + text-align: center; + color: #888; + font-size: 0.98rem; + margin-bottom: 10px; +} + +/* Lazy Video Component Styling */ +lazy-video { + display: block; + margin: 30px auto; + max-width: 600px; +} + +.url-patterns { + list-style: none; + padding: 0; + margin: 0; +} + +.url-patterns li { + padding: 3px 0; +} + +.url-patterns code { + background: rgba(0,0,0,0.05); + padding: 2px 5px; + border-radius: 3px; + font-size: 0.9em; + font-family: monospace; +} + +/* Responsive Adjustments */ +@media (max-width: 768px) { + .container { + padding: 1rem; + } + + h1 { + font-size: 1.8rem; + } + + h2 { + font-size: 1.5rem; + } + + h3 { + font-size: 1.2rem; + } + + .toc ul { + flex-direction: column; + align-items: stretch; + } + + .toc a { + text-align: center; + white-space: normal; + } + + /* Mobile styles for button group */ + .lv-btn-group { + flex-direction: column !important; + gap: 14px; + margin: 30px 0; + align-items: stretch; + width: 100%; + } + + .lv-btn { + justify-content: center; + text-align: center; + width: 100%; + } + + .lv-size-info { + text-align: center; + width: 100%; + } +} \ No newline at end of file diff --git a/develop/css/lightbox.css b/develop/css/lightbox.css new file mode 100644 index 0000000..82f97f8 --- /dev/null +++ b/develop/css/lightbox.css @@ -0,0 +1,144 @@ +.lightbox { + display: none; + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.85); + z-index: 1000; + justify-content: center; + align-items: center; + padding: 0; +} + +.lightbox.active { + display: flex; +} + +.lightbox-content { + position: relative; + width: 90%; + max-width: 1200px; + height: 85vh; + background-color: #121212; + padding: 20px; + border-radius: 10px; + box-shadow: 0 5px 30px rgba(0, 0, 0, 0.3); + display: flex; + flex-direction: column; + overflow: hidden; +} + +.lightbox-img-container { + flex: 1; + overflow: hidden; + position: relative; + display: flex; + justify-content: center; + align-items: center; +} + +.lightbox-img { + display: block; + max-width: 100%; + max-height: 100%; + object-fit: contain; + cursor: grab; + border-radius: 5px; + transform-origin: center center; + user-select: none; + will-change: transform; +} + +.lightbox-img.grabbing { + cursor: grabbing; +} + +.lightbox-close { + position: absolute; + top: 10px; + right: 10px; + width: 32px; + height: 32px; + background-color: #9B59B6; + color: white; + border-radius: 50%; + text-align: center; + line-height: 32px; + cursor: pointer; + font-weight: bold; + font-size: 18px; + z-index: 1010; + box-shadow: 0 2px 5px rgba(0, 0, 0, 0.3); +} + +.lightbox-caption { + margin-top: 15px; + text-align: center; + color: #ccc; + font-size: 0.9rem; +} + +.zoom-controls { + display: flex; + align-items: center; + justify-content: center; + margin-top: 15px; + padding: 10px 0; + border-top: 1px solid #444; +} + +.zoom-label { + margin-right: 10px; + font-size: 0.9rem; + color: #ccc; +} + +.zoom-slider { + -webkit-appearance: none; + width: 70%; + height: 6px; + border-radius: 3px; + background: #444; + outline: none; +} + +.zoom-slider::-webkit-slider-thumb { + -webkit-appearance: none; + appearance: none; + width: 18px; + height: 18px; + border-radius: 50%; + background: #9B59B6; + cursor: pointer; +} + +.zoom-slider::-moz-range-thumb { + width: 18px; + height: 18px; + border-radius: 50%; + background: #9B59B6; + cursor: pointer; + border: none; +} + +.zoom-value { + margin-left: 10px; + font-size: 0.9rem; + min-width: 40px; + color: #ccc; +} + +@media (max-width: 768px), (max-width: 1024px) and (orientation: landscape) { + .lightbox-content { + width: 100%; + height: 100%; + padding: 15px; + border-radius: 0; + } + + .zoom-controls { + display: flex; + } +} \ No newline at end of file diff --git a/develop/css/u.css b/develop/css/u.css new file mode 100644 index 0000000..1854a5f --- /dev/null +++ b/develop/css/u.css @@ -0,0 +1,120 @@ +/* Reset */ +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +/* Common Font Declarations */ +@font-face { + font-family: Poppins; + src: url(/webfonts/Poppins-Regular.woff2) format("woff2"); + font-weight: 400; + font-style: normal; + font-display: swap; +} + +@font-face { + font-family: Poppins; + src: url(/webfonts/Poppins-SemiBold.woff2) format("woff2"); + font-weight: 600; + font-style: normal; + font-display: swap; +} + +body { + font-family: Poppins, sans-serif; +} + +/* External Link Indicator */ +a[rel~=external]:not([href^="mailto:"]):not([href^="tel:"])::after, +a[target="_blank"]:not([href^="mailto:"]):not([href^="tel:"])::after { + content: ""; + display: inline-block; + width: 1.1em; + height: 1.1em; + margin-left: .25em; + margin-bottom: .25em; + vertical-align: middle; + background-color: currentColor; + -webkit-mask: url("data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCIgZmlsbD0ibm9uZSIgc3Ryb2tlPSJjdXJyZW50Q29sb3IiIHN0cm9rZS13aWR0aD0iMS43NSIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIj48cGF0aCBkPSJNMTIgNkg2YTIgMiAwIDAgMC0yIDJ2MTBhMiAyIDAgMCAwIDIgMmgxMGEyIDIgMCAwIDAgMi0ydi02bS03IDFsOS05bS01IDBoNXY1Ii8+PC9zdmc+") no-repeat center; + mask: url("data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCIgZmlsbD0ibm9uZSIgc3Ryb2tlPSJjdXJyZW50Q29sb3IiIHN0cm9rZS13aWR0aD0iMS43NSIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIj48cGF0aCBkPSJNMTIgNkg2YTIgMiAwIDAgMC0yIDJ2MTBhMiAyIDAgMCAwIDIgMmgxMGEyIDIgMCAwIDAgMi0ydi02bS03IDFsOS05bS01IDBoNXY1Ii8+PC9zdmc+") no-repeat center; + -webkit-mask-size: contain; + mask-size: contain; +} + +@media (hover:hover) and (pointer:fine) { + a[rel~=external]:not([href^="mailto:"]):not([href^="tel:"]), + a[target="_blank"]:not([href^="mailto:"]):not([href^="tel:"]) { + position: relative; + } + + a[rel~=external]:not([href^="mailto:"]):not([href^="tel:"])::before, + a[target="_blank"]:not([href^="mailto:"]):not([href^="tel:"])::before { + content: "Opens in new tab"; + position: absolute; + top: 50%; + left: 100%; + transform: translateY(-50%); + margin-left: 5px; + background-color: rgba(25, 25, 25, .9); + color: #fff; + padding: 5px 8px; + border-radius: 4px; + font-size: 12px; + white-space: nowrap; + opacity: 0; + pointer-events: none; + transition: opacity .18s ease-in-out; + z-index: 10; + } + + a[rel~=external]:not([href^="mailto:"]):not([href^="tel:"]):hover::before, + a[target="_blank"]:not([href^="mailto:"]):not([href^="tel:"]):hover::before { + transition-delay: 60ms; + opacity: 1; + } +} + +/* Scrollbar Styling */ +::-webkit-scrollbar { + width: 4px; + height: 4px; +} + +::-webkit-scrollbar-track { + background: #2d2d2d; + border-radius: 5px; +} + +::-webkit-scrollbar-thumb { + background: #4d9cfa; + border-radius: 5px; +} + +::-webkit-scrollbar-thumb:hover { + background: #3971a3; +} + +/* Base HTML behaviors */ +html { + scroll-behavior: smooth; +} + +@view-transition { + navigation: auto; +} + +::view-transition-old(root), +::view-transition-new(root) { + animation-duration: 0.44s; /* Increase duration to 0.44 seconds */ +} + +/* Disable view transitions for users with reduced motion preference enabled */ +@media (prefers-reduced-motion) { + ::view-transition-group(*), + ::view-transition-old(*), + ::view-transition-new(*) { + animation: none !important; + } + } \ No newline at end of file diff --git a/develop/html/ai-san.html b/develop/html/ai-san.html new file mode 100644 index 0000000..f404b81 --- /dev/null +++ b/develop/html/ai-san.html @@ -0,0 +1,224 @@ + + + + + + + Text Cleaner - caileb.com + + + + + + + + + + + +
+
+

Text Cleaner

+

Convert formatted text to clean plain text

+
+ +
+ + + +
+ + +
+ + + +
+
+ + + + diff --git a/develop/html/checkpoint.html b/develop/html/checkpoint.html new file mode 100644 index 0000000..1486a0d --- /dev/null +++ b/develop/html/checkpoint.html @@ -0,0 +1,743 @@ + + + + + + + Checkpoint Documentation + + + + + + + + + + + + + + + + + +
+
+

Disclaimer: Some internal fields and implementation details are omitted here for security reasons.

+
+

Checkpoint Protection System

+ + + +
+

Overview

+

Checkpoint Protection asks visitors to solve a quick puzzle before letting them through, cutting down on automated traffic while keeping the experience smooth for real users.

+
    +
  • No account or personal data needed
  • +
  • Privacy-focused and lightweight
  • +
  • Blocks bots and scripts effectively
  • +
  • Works seamlessly in modern browsers
  • +
+
+ +
+

How It Works

+

When you navigate to a protected page, the middleware checks for a valid token cookie (__Host-checkpoint_token).

+
    +
  1. If the token is present, the server verifies its signature and confirms it's bound to your device.
  2. +
  3. Missing or invalid tokens trigger an interstitial page with a request ID.
  4. +
  5. The browser fetches challenge data from /api/pow/challenge?id=REQUEST_ID. This payload includes a random challenge, salt, difficulty, and hidden parameters.
  6. +
  7. The client runs two proofs in parallel: +
      +
    • Proof of Work: finds a nonce such that SHA‑256(challenge + salt + nonce) meets the difficulty.
    • +
    • Proof of Space: allocates and hashes large memory buffers to confirm resource availability.
    • +
    +
  8. +
  9. Results are sent to /api/pow/verify along with the request ID.
  10. +
  11. On success, the server issues a signed token (valid for 24h) and sets it as a cookie for future visits.
  12. +
+ +
+

Checkpoint Protection Flow

+ Checkpoint Protection Flow Diagram +
+
+ +
+

Challenge Generation

+

+ Challenges are generated using cryptographically secure random bytes combined with a salt for additional entropy: +

+
+ Go +
func generateChallenge() (string, string) {
+    // Generate a random challenge
+    randomBytes := make([]byte, 16)
+    _, err := cryptorand.Read(randomBytes)
+    if err != nil {
+        log.Fatalf("CRITICAL: Failed to generate secure random challenge: %v", err)
+    }
+    
+    // Generate a random salt for additional entropy
+    saltBytes := make([]byte, saltLength)
+    _, err = cryptorand.Read(saltBytes)
+    if err != nil {
+        log.Fatalf("CRITICAL: Failed to generate secure random salt: %v", err)
+    }
+    
+    return hex.EncodeToString(randomBytes), hex.EncodeToString(saltBytes)
+}
+
+
+

+ Security Note: The system uses Go's crypto/rand package for secure random number generation, ensuring challenges cannot be predicted even by sophisticated attackers. +

+
+ +

Challenge Parameters

+

+ Challenges are stored with a unique request ID and include parameters for verification: +

+
+ Go +
type ChallengeParams struct {
+    Challenge  string    `json:"challenge"` // Base64 encoded
+    Salt       string    `json:"salt"`      // Base64 encoded
+    Difficulty int       `json:"difficulty"`
+    ExpiresAt  time.Time `json:"expires_at"`
+    ClientIP   string    `json:"-"`
+    PoSSeed    string    `json:"pos_seed"` // Hex encoded
+}
+
+ +

+ When a client requests a challenge, the parameters are delivered in an obfuscated format to prevent automated analysis: +

+
+ JSON +
{
+    "a": "base64-encoded-challenge",
+    "b": "base64-encoded-salt",
+    "c": 4,
+    "d": "hex-encoded-pos-seed"
+}
+
+
+ +
+

Proof Verification

+

+ The system performs a two-step verification process: +

+ +

1. Computational Proof (Proof of Work)

+

+ Verification checks that the hash of the challenge, salt, and nonce combination has the required number of leading zeros: +

+
+ Go +
func verifyProofOfWork(challenge, salt, nonce string, difficulty int) bool {
+    input := challenge + salt + nonce
+    hash := calculateHash(input)
+    
+    // Check if the hash has the required number of leading zeros
+    prefix := strings.Repeat("0", difficulty)
+    return strings.HasPrefix(hash, prefix)
+}
+
+func calculateHash(input string) string {
+    hash := sha256.Sum256([]byte(input))
+    return hex.EncodeToString(hash[:])
+}
+
+ +

2. Memory Proof (Proof of Space)

+

+ In addition to the computational work, clients must prove they can allocate and manipulate significant memory resources: +

+
    +
  • Clients allocate between 48MB to 160MB of memory (size determined by the PoS seed)
  • +
  • Client divides memory into 4-8 chunks and performs deterministic filling operations
  • +
  • The process is run three times, hashing the entire buffer each time
  • +
  • The resulting hashes and execution times are submitted for verification
  • +
+

+ The server verifies: +

+
    +
  • All three hashes are identical (proving deterministic execution)
  • +
  • Each hash is 64 characters (valid SHA-256)
  • +
  • Execution times are consistent (within 20% variation)
  • +
+
+

+ The dual-verification approach makes the system resistant to specialized hardware acceleration. While the computational proof can be solved by ASICs or GPUs, the memory proof is specifically designed to be inefficient on such hardware. +

+
+
+ +
+

Token Structure

+

+ Checkpoint tokens contain various fields for security and binding: +

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescriptionPurpose
NonceThe solution to the challengeVerification proof
ExpiresAtToken expiration timestampEnforces time-limited access (24 hours)
ClientIPHashed full client IPDevice binding (first 8 bytes of SHA-256)
UserAgentHashed user agentBrowser binding
BrowserHintDerived from Sec-CH-UA headersAdditional client identity verification
EntropyRandom dataPrevents token prediction/correlation
CreatedToken creation timestampToken age tracking
LastVerifiedLast verification timestampToken usage tracking
SignatureHMAC signaturePrevents token forgery
TokenFormatVersion numberBackward compatibility support
+
+
+ Go +
type CheckpointToken struct {
+    Nonce        string    `json:"g"` // Nonce
+    ExpiresAt    time.Time `json:"exp"`
+    ClientIP     string    `json:"cip,omitempty"`
+    UserAgent    string    `json:"ua,omitempty"`
+    BrowserHint  string    `json:"bh,omitempty"`
+    Entropy      string    `json:"ent,omitempty"`
+    Created      time.Time `json:"crt"`
+    LastVerified time.Time `json:"lvf,omitempty"`
+    Signature    string    `json:"sig,omitempty"`
+    TokenFormat  int       `json:"fmt"`
+}
+
+ +

Token Security

+

+ Every token is cryptographically signed using HMAC-SHA256 with a server-side secret: +

+
+ Go +
func computeTokenSignature(token CheckpointToken, tokenBytes []byte) string {
+    tokenCopy := token
+    tokenCopy.Signature = "" // Ensure signature field is empty for signing
+    tokenToSign, _ := json.Marshal(tokenCopy)
+    h := hmac.New(sha256.New, hmacSecret)
+    h.Write(tokenToSign)
+    return hex.EncodeToString(h.Sum(nil))
+}
+
+func verifyTokenSignature(token CheckpointToken, tokenBytes []byte) bool {
+    if token.Signature == "" {
+        return false
+    }
+    expectedSignature := computeTokenSignature(token, tokenBytes)
+    return hmac.Equal([]byte(token.Signature), []byte(expectedSignature))
+}
+
+ +

Token Storage

+

+ Successfully verified tokens are stored in a persistent store for faster validation: +

+
+ Go +
// TokenStore manages persistent storage of verified tokens
+type TokenStore struct {
+    VerifiedTokens map[string]time.Time `json:"verified_tokens"`
+    Mutex          sync.RWMutex         `json:"-"`
+    FilePath       string               `json:"-"`
+}
+
+// Each token is identified by a unique hash
+func calculateTokenHash(token CheckpointToken) string {
+    data := fmt.Sprintf("%s:%s:%d",
+        token.Nonce,              // Use nonce as part of the key
+        token.Entropy,            // Use entropy as part of the key
+        token.Created.UnixNano()) // Use creation time
+    hash := sha256.Sum256([]byte(data))
+    return hex.EncodeToString(hash[:])
+}
+
+
+ +
+

Security Features

+ +
+

Anti-Forgery Protections

+
    +
  • HMAC Signatures: Each token is cryptographically signed using HMAC-SHA256 to prevent tampering
  • +
  • Token Binding: Tokens are bound to client properties (hashed full IP, hashed user agent, browser client hints)
  • +
  • Random Entropy: Each token contains unique entropy to prevent token prediction or correlation
  • +
  • Format Versioning: Tokens include a format version to support evolving security requirements
  • +
+
+ +
+

Replay Prevention

+
    +
  • Nonce Tracking: Used nonces are tracked to prevent replay attacks
  • +
  • Expiration Times: All tokens and challenges have expiration times
  • +
  • Token Cleanup: Expired tokens are automatically purged from the system
  • +
  • Challenge Invalidation: Challenges are immediately invalidated after successful verification
  • +
+
+ +
+

Rate Limiting

+
    +
  • IP-Based Limits: Maximum verification attempts per hour (default: 10)
  • +
  • Request ID Binding: Challenge parameters are bound to the requesting IP
  • +
  • Challenge Expiration: Challenges expire after 5 minutes to prevent stockpiling
  • +
+
+ +
+

Advanced Verification

+
    +
  • Proof of Space: Memory-intensive operations prevent GPU/ASIC acceleration
  • +
  • Browser Fingerprinting: Secure client-hint headers verify legitimate browsers
  • +
  • Challenge Obfuscation: Challenges are encoded and structured to resist automated analysis
  • +
  • Persistent Secret: The system uses a persistent HMAC secret stored securely on disk
  • +
+
+
+ +
+

Configuration Options

+

+ The Checkpoint system can be configured through these constants: +

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ConstantDescriptionDefault
DifficultyNumber of leading zeros required in the hash4
TokenExpirationDuration for which a token is valid24 hours
Cookie Name__Host-checkpoint_tokenThe cookie name storing the issued token
maxAttemptsPerHourRate limit for verification attempts10
saltLengthLength of the random salt in bytes16
maxNonceAgeTime before nonces are cleaned up24 hours
challengeExpirationTime before a challenge expires5 minutes
+
+
+

+ Warning: Increasing the Difficulty significantly increases the computational work required by clients. + A value that's too high may result in poor user experience, especially on mobile devices. +

+
+
+ Go +
const (
+    // Difficulty defines the number of leading zeros required in hash
+    Difficulty = 4
+    // TokenExpiration sets token validity period
+    TokenExpiration = 24 * time.Hour
+    // CookieName defines the cookie name for tokens
+    CookieName = "__Host-checkpoint_token"
+    // Max verification attempts per IP per hour
+    maxAttemptsPerHour = 10
+    // Salt length for additional entropy
+    saltLength = 16
+)
+
+
+ +
+

Middleware Integration

+

+ The Checkpoint system provides a middleware handler that automatically protects HTML routes while bypassing API routes and static assets: +

+ +

HTMLCheckpointMiddleware

+

+ This middleware is optimized for HTML routes, with smart content-type detection and automatic exclusions for static assets and API endpoints. +

+
+ Go +
// HTMLCheckpointMiddleware handles challenges specifically for HTML pages
+func HTMLCheckpointMiddleware() fiber.Handler {
+    return func(c *fiber.Ctx) error {
+        // Allow certain paths to bypass verification
+        path := c.Path()
+        if path == "/video-player" || path == "/video-player.html" || strings.HasPrefix(path, "/videos/") {
+            return c.Next()
+        }
+        if strings.HasPrefix(path, "/api") {
+            return c.Next()
+        }
+        if path == "/favicon.ico" || (strings.Contains(path, ".") && !strings.HasSuffix(path, ".html")) {
+            return c.Next()
+        }
+        
+        // Only apply to HTML routes
+        isHtmlRoute := strings.HasSuffix(path, ".html") || path == "/" ||
+            (len(path) > 0 && !strings.Contains(path, "."))
+        if !isHtmlRoute {
+            return c.Next()
+        }
+
+        token := c.Cookies(CookieName)
+        if token != "" {
+            valid, err := validateToken(token, c)
+            if err == nil && valid {
+                return c.Next()
+            }
+        }
+        return serveInterstitial(c)
+    }
+}
+
+ +

Usage in Application

+
+ Go +
// Enable HTML checkpoint protection for all routes
+app.Use(middleware.HTMLCheckpointMiddleware())
+
+// API group with verification endpoints
+api := app.Group("/api")
+
+// Verification endpoints
+api.Post("/pow/verify", middleware.VerifyCheckpointHandler)
+api.Get("/pow/challenge", middleware.GetCheckpointChallengeHandler)
+
+// Example protected API endpoint
+api.Get("/protected", func(c *fiber.Ctx) error {
+    // Access is already verified by cookie presence
+    return c.JSON(fiber.Map{
+        "message": "You have accessed the protected endpoint!",
+        "time":    time.Now(),
+    })
+})
+
+
+ +
+

Client-Side Implementation

+

+ The client-side implementation is handled by the interstitial page and its associated JavaScript: +

+
    +
  1. Client attempts to access a protected resource
  2. +
  3. Server serves the interstitial page with a request ID
  4. +
  5. JavaScript fetches challenge parameters from /api/pow/challenge?id=REQUEST_ID
  6. +
  7. Two verification stages run in parallel: +
      +
    • Computational proof: Using Web Workers to find a valid nonce
    • +
    • Memory proof: Allocating and manipulating memory buffers
    • +
    +
  8. +
  9. Results are submitted to /api/pow/verify endpoint
  10. +
  11. On success, the server sets a cookie and redirects to the original URL
  12. +
+ +

Web Worker Implementation

+

+ Computational proof is handled by Web Workers to avoid freezing the UI: +

+
+ JavaScript +
function workerFunction() {
+    self.onmessage = function(e) {
+        const { type, data } = e.data;
+
+        if (type === 'pow') {
+            // PoW calculation
+            const { challenge, salt, startNonce, endNonce, target, batchId } = data;
+            let count = 0;
+            let solution = null;
+            
+            processNextNonce(startNonce);
+    
+            function processNextNonce(nonce) {
+                const input = String(challenge) + String(salt) + nonce.toString();
+                const msgBuffer = new TextEncoder().encode(input);
+                
+                crypto.subtle.digest('SHA-256', msgBuffer)
+                    .then(hashBuffer => {
+                        const hashArray = Array.from(new Uint8Array(hashBuffer));
+                        const result = hashArray.map(b => 
+                            b.toString(16).padStart(2, '0')).join('');
+                        
+                        count++;
+            
+                        if (result.startsWith(target)) {
+                            solution = { nonce: nonce.toString(), found: true };
+                            self.postMessage({
+                                type: 'pow_result',
+                                solution: solution,
+                                count: count,
+                                batchId: batchId
+                            });
+                            return;
+                        }
+                        
+                        if (nonce < endNonce && !solution) {
+                            setTimeout(() => processNextNonce(nonce + 1), 0);
+                        } else if (!solution) {
+                            self.postMessage({
+                                type: 'pow_result',
+                                solution: null,
+                                count: count,
+                                batchId: batchId
+                            });
+                        }
+                    });
+            }
+        }
+    };
+}
+
+ +

Memory Proof Implementation

+

+ The memory proof allocates and manipulates large buffers to verify client capabilities: +

+
+ JavaScript +
async function runProofOfSpace(seedHex, isDecoy) {
+    // Deterministic memory size (48MB to 160MB) based on seed
+    const minMB = 48, maxMB = 160;
+    let seedInt = parseInt(seedHex.slice(0, 8), 16);
+    const CHUNK_MB = minMB + (seedInt % (maxMB - minMB + 1));
+    const CHUNK_SIZE = CHUNK_MB * 1024 * 1024;
+    
+    // Chunk memory for controlled allocation
+    const chunkCount = 4 + (seedInt % 5); // 4-8 chunks
+    const chunkSize = Math.floor(CHUNK_SIZE / chunkCount);
+    
+    // Run the proof multiple times to verify consistency
+    const runs = 3;
+    const hashes = [];
+    const times = [];
+    
+    // For each run...
+    for (let r = 0; r < runs; r++) {
+        // Generate deterministic chunk order
+        let prng = seededPRNG(seedHex + r.toString(16));
+        let order = Array.from({length: chunkCount}, (_, i) => i);
+        for (let i = order.length - 1; i > 0; i--) {
+            const j = prng() % (i + 1);
+            [order[i], order[j]] = [order[j], order[i]];
+        }
+        
+        // Allocate and fill memory buffer
+        let t0 = performance.now();
+        let buf = new ArrayBuffer(CHUNK_SIZE);
+        let view = new Uint8Array(buf);
+        
+        // Fill buffer with deterministic pattern
+        for (let c = 0; c < chunkCount; c++) {
+            let chunkIdx = order[c];
+            let start = chunkIdx * chunkSize;
+            let end = (chunkIdx + 1) * chunkSize;
+            for (let i = start; i < end; i += 4096) {
+                view[i] = prng() & 0xFF;
+            }
+        }
+        
+        // Hash the entire buffer
+        let hashBuf = await crypto.subtle.digest('SHA-256', view);
+        let t2 = performance.now();
+        
+        // Convert hash to hex string
+        let hashHex = Array.from(new Uint8Array(hashBuf))
+            .map(b => b.toString(16).padStart(2, '0')).join('');
+        
+        // Store results
+        hashes.push(hashHex);
+        times.push(Math.round(t2 - t0));
+        
+        // Clean up
+        buf = null; view = null;
+    }
+    
+    return { hashes, times };
+}
+
+ +
+

+ The client-side implementation is designed to be difficult to reverse-engineer. The obfuscated API responses, minimal logging, and anti-debugging measures prevent automated circumvention. +

+
+
+ +
+

API Endpoints

+

+ The Checkpoint system exposes two primary API endpoints: +

+ +

1. Challenge Endpoint

+

+ Retrieves challenge parameters for a verification request: +

+
+ HTTP +
GET /api/pow/challenge?id=REQUEST_ID
+
+Response:
+{
+    "a": "base64-encoded-challenge",
+    "b": "base64-encoded-salt",
+    "c": 4,
+    "d": "hex-encoded-pos-seed"
+}
+
+ +

2. Verification Endpoint

+

+ Accepts proof solutions and issues tokens when valid: +

+
+ HTTP +
POST /api/pow/verify
+
+Request:
+{
+    "request_id": "unique-request-id",
+    "g": "nonce-solution",
+    "h": ["pos-hash1", "pos-hash2", "pos-hash3"],
+    "i": [time1, time2, time3]
+}
+
+Response:
+{
+    "token": "base64-encoded-token",
+    "expires_at": "2025-04-17T18:57:48Z"
+}
+
+ +
+

+ Backwards Compatibility: The older endpoint /api/verify is maintained for compatibility with existing clients. +

+
+
+ +
+
These docs reflect version 2.0 of the Checkpoint Protection System.
+

Last updated: Tuesday, April 16, 2025

+
+
+ + + \ No newline at end of file diff --git a/develop/html/index.html b/develop/html/index.html new file mode 100644 index 0000000..32aa174 --- /dev/null +++ b/develop/html/index.html @@ -0,0 +1,213 @@ + + + + + + + + caileb.com + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/develop/html/integrity-demo.html b/develop/html/integrity-demo.html new file mode 100644 index 0000000..83bc7cd --- /dev/null +++ b/develop/html/integrity-demo.html @@ -0,0 +1,550 @@ + + + + + + + Integrity Checker - caileb.com + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

Auto-Integrity Hash Demo

+ +
+

This is a live demonstration of automatic SRI hash generation.

+

The server automatically adds integrity hashes to all external resources when the site is built - no manual work required.

+

If you view the source code of this page, you'll see all external CSS and JavaScript files have integrity and crossorigin attributes that were added automatically during build.

+

This security feature protects against compromised CDNs and ensures resources haven't been tampered with.

+
+ +

External Scripts Working

+

These demos confirm that the external scripts are loaded and working correctly with their integrity hashes:

+ +
+
+

jQuery Demo

+

jQuery provides DOM manipulation and animation capabilities.

+
Running jQuery test...
+
+ +
+

Lodash Demo

+

Lodash provides utility functions for common programming tasks.

+
Running Lodash test...
+
+
+ +
+
+

Bootstrap Components

+

Bootstrap provides responsive UI components.

+
+
+ This is a Bootstrap alert component +
+
+
75%
+
+
+
+ +
+

Quicklink Demo

+

Quicklink prefetches links that are in the viewport.

+ +
+
+ +

Monitored Resources

+

The following resources have integrity checks automatically applied during build:

+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Resource TypeLocationIntegrity Added?
Stylesheet/css/u.cssNo (Local)
StylesheetBootstrap CSS (CDN)Yes (External)
Preloaded Script/js/u.jsNo (Local)
Preloaded ScriptjQuery (CDN)Yes (External)
Preloaded ScriptLodash (CDN)Yes (External)
ScriptQuicklink (CDN)Yes (External)
+
+
+ + + + + + + + + + + \ No newline at end of file diff --git a/develop/html/kb.html b/develop/html/kb.html new file mode 100644 index 0000000..c2ae846 --- /dev/null +++ b/develop/html/kb.html @@ -0,0 +1,398 @@ + + + + + + + + Caileb's Knowledgebase + + + + + + + + + + + + + +
+
+

Fail2ban

+
+ + +
+
+

Step 1: Install Fail2ban (Debian/Ubuntu)

+

First, install Fail2ban by running:

+
+ sudo apt install fail2ban +
+
+
+

Step 2: Navigate to the Fail2ban Directory

+

Change to the Fail2ban configuration directory:

+
+ cd /etc/fail2ban/ +
+
+
+

Step 3: Copy the Example Configuration File

+

Copy the example configuration file as a base for your custom configuration:

+
+ sudo cp jail.conf jail.local +
+
+
+

Step 4: Create a New Filter

+

Navigate to the filter.d directory and create a new filter file:

+
+ cd filter.d/ +
+
+ sudo nano nginx-4xx.conf +
+
+
+

Step 5: Define the Filter to Block Repeated 4xx Errors

+

Add the following content to the nginx-4xx.conf file:

+
+ [Definition]
failregex = ^<HOST>.*"(GET|POST|HEAD|CONNECT).*" (404|444|403|400) .*
ignoreregex = 127.0.0.1 127.0.0.0/8 10.0.0.0/8 172.16.0.0/12 192.168.0.0/16
+
+
+
+

Step 6: Edit the Jail Configuration to Use the New Filter

+

Go back to the previous directory and edit jail.local:

+
+ cd .. +
+
+ sudo nano jail.local +
+

Add the following section:

+
+ #
# Repeated 4xx errors (Nginx)
#
[nginx-4xx]
enabled = true
port = http,https
logpath = /var/log/nginx/access.log
maxretry = 4
+
+
+
+

Step 7: Restart Fail2ban for the Changes to Take Effect

+

Restart the Fail2ban service:

+
+ sudo systemctl restart fail2ban +
+
+
+

Step 8: Check the Filter Status

+

Verify the filter is working:

+
+ sudo fail2ban-client status nginx-4xx +
+

OR

+

For a prettified output:

+
+ sudo fail2ban-client get nginx-4xx banip | tr ' ' '\n' +
+
+
+
+
+ +
+

Node PM2

+

Restart

+
+                pm2 restart caileb.com
+            
+
+ +
+

FFmpeg

+

Highest quality AV1

+
+ ffmpeg -i input -c:v av1_nvenc -preset p7 -cq 1 -b:v 0 -qmin 1 -qmax 5 -rc-lookahead 250 -spatial-aq 1 -aq-strength 15 -refs 16 -temporal-aq 1 -c:a flac -compression_level 8 highest_quality.mkv +
+

Standard compression

+
+ ffmpeg -i input -vf "mpdecimate" -fps_mode vfr -c:v av1_nvenc -preset p7 -cq 30 -b:v 0 -maxrate 18.5M -bufsize 25M -g 240 -keyint_min 24 -rc vbr -c:a libopus -b:a 128k compressed.webm +
+

Extreme compression

+
+ ffmpeg -i input -vf "mpdecimate,scale=-1:1080" -fps_mode vfr -c:v av1_nvenc -preset p7 -rc vbr -b:v 6M -maxrate 12M -bufsize 18M -g 300 -keyint_min 34 -c:a libopus -b:a 96k compressed.webm +
+

Rocket.Chat

+
+ ffmpeg -i input -vf "mpdecimate,scale=-1:1440" -fps_mode vfr -c:v av1_nvenc -preset p7 -rc vbr -b:v 8M -maxrate 15M -bufsize 22M -g 270 -keyint_min 28 -c:a libopus -b:a 112k rocket_chat.webm +
+
+ +
+

Useful HTML Stuffs

+

Make iFrames/Images Lazy Load MDN

+

Replace FitVids or other similar JS libraries with CSS' aspect-ratio MDN

+
+ +
+

Malware Removal

+
    +
  1. + Malwarebytes Free +

    Easy-to-use tool that quickly detects and removes a broad range of malware.

    +
  2. +
  3. + Emsisoft Emergency Kit +

    Utilizes Bitdefender's engine on top of their own for a strong all-in-one cleanup.

    +
  4. +
  5. + Sophos Scan & Clean +

    Portable scanner with effective heuristic analysis for detecting malware.

    +
  6. +
  7. + Kaspersky Virus Removal Tool +

    Efficiently finds and removes stubborn malware threats.

    +
  8. +
+
+
+ + + + + diff --git a/develop/html/lazy-video.html b/develop/html/lazy-video.html new file mode 100644 index 0000000..0b0f625 --- /dev/null +++ b/develop/html/lazy-video.html @@ -0,0 +1,664 @@ + + + + + + + Lazy Video Component - Multi-Platform Video Embedding + + + + + + + + + + + + + + + + + +
+

Lazy Video Docs

+ + + +
+

Overview

+

+ Embedding videos with standard <iframe> tags can dramatically slow down your site and consume large amounts of data. Each iframe loads the full video player and related resources immediately-even if the user never interacts with it. On pages with several videos, this can add hundreds of megabytes to the initial page load, resulting in a sluggish and costly experience, especially for users on mobile devices or limited networks. +

+

How Lazy Video Helps

+

+ The <lazy-video> component solves this by loading only a lightweight thumbnail and play button at first. The actual video player is loaded only when the user clicks play (or when the video scrolls into view if autoload is enabled). This keeps your pages fast, responsive, and bandwidth-friendly. +

+
+ View Source + Download + ~17.0kB / 6.0kB (Gzip) +
+
+ +
+

Basic Usage

+

+ To get started, include the script on your page and use the custom element as shown below: +

+
+ HTML +
<lazy-video 
+    src="https://www.youtube.com/embed/wPr3kws2prM"
+    title="Till We Have Faces by Silent Planet">
+</lazy-video>
+
+
+

+ Always add a title for accessibility and better alt text on thumbnails. +

+
+
+ +
+

Officially Supported Platforms

+
+ + + + + + + + + + + + + + + + +
PlatformURL PatternNotes
YouTube +
    +
  • youtube.com/embed/ID
  • +
  • youtube.com/watch?v=ID
  • +
  • youtu.be/ID
  • +
+
Full support for thumbnails and parameters.
Bitchute +
    +
  • bitchute.com/video/ID/
  • +
  • bitchute.com/embed/ID/
  • +
+
Custom thumbnails are only needed if autoload is disabled.
+
+
+ +
+

Attributes

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributeDescriptionDefault
srcVideo embed URL (required)N/A
titleVideo title"Video"
widthWidth in pixels or percent100% (responsive)
heightHeight in pixels16:9 ratio
thumbnailCustom thumbnail URLAuto-detected per platform
thumbnail-qualityYouTube thumbnail quality (default, hq, mq, sd, maxres)Auto (maxres on desktop, hq on mobile)
serviceForce a specific service (youtube, bitchute)Auto-detected
sandboxExtra security for the iframe. Restricts what the embedded player can do. See MDN for details.allow-scripts allow-same-origin allow-popups allow-forms allow-presentation
no-cookieUse youtube-nocookie.com for YouTube (privacy-friendly)true
autoloadLoad video when scrolled into viewfalse (YouTube), true (Bitchute)
hide-titleHide the video title barfalse
alignSet alignment (left, right, center)center
container-fitMake video fill the container (FitVids style)false
+
+
+

+ Warning: Using autoload with many videos on one page can impact performance as users scroll. Use with care! +

+
+
+

+ Note: With container-fit, the component overrides max-width to 100% and sets max-height to auto, making it fill its container while keeping the aspect ratio. +

+
+
+ +
+

Styling & CSS Variables

+

+ You can customize the look of <lazy-video> using CSS variables: +

+
+ CSS +
lazy-video {
+    --lv-max-width: 600px;
+    --lv-border-radius: 8px;
+    --lv-play-button-color: #f00;
+    --lv-play-button-bg: rgba(0, 0, 0, 0.7);
+    --lv-show-title: none;
+}
+
+
+

Available CSS Variables

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CSS VariableDescriptionDefault
--lv-max-widthMaximum width of the video560px
--lv-aspect-ratioAspect ratio16 / 9
--lv-displayDisplay typeblock
--lv-positionCSS positionrelative
--lv-border-radiusBorder radius for the container0
--lv-marginContainer margin0 auto
--lv-margin-leftMargin for left alignment0
--lv-margin-rightMargin for right alignment0 0 0 auto
--lv-margin-centerMargin for center alignment0 auto
--lv-alignSet alignment (left, right, center)center
--lv-backgroundBackground color#000
--lv-thumbnail-opacityThumbnail opacity0.85
--lv-thumbnail-hover-opacityOpacity on hover1
--lv-thumbnail-object-fitObject-fit for thumbnailcover
--lv-play-button-widthPlay button width68px
--lv-play-button-heightPlay button height48px
--lv-play-button-bgPlay button backgroundrgba(33, 33, 33, 0.8)
--lv-play-button-bg-hoverPlay button hover backgroundrgba(230, 33, 23, 1)
--lv-play-button-colorPlay button arrow colorrgba(255, 255, 255, 0.9)
--lv-play-button-radiusPlay button border radius8px
--lv-play-button-arrow-sizePlay button arrow size12px 0 12px 20px
--lv-title-paddingTitle bar padding10px 12px
--lv-title-bgTitle backgroundrgba(0, 0, 0, 0.75)
--lv-title-colorTitle text colorwhite
--lv-title-font-familyTitle font familyRoboto, Arial, sans-serif
--lv-title-font-sizeTitle font size18px
--lv-title-font-weightTitle font weight500
--lv-title-line-heightTitle line height1.2
--lv-focus-outlineFocus outline2px solid #4285F4
--lv-focus-outline-offsetFocus outline offset2px
--lv-show-titleShow/hide title bar (use 'none' to hide)block
--lv-timestamp-rightTimestamp right position10px
--lv-timestamp-bottomTimestamp bottom position10px
--lv-timestamp-bgTimestamp backgroundrgba(0, 0, 0, 0.7)
--lv-timestamp-colorTimestamp text colorwhite
--lv-timestamp-paddingTimestamp padding2px 6px
--lv-timestamp-radiusTimestamp border radius3px
--lv-timestamp-font-sizeTimestamp font size12px
--lv-timestamp-font-familyTimestamp font familysystem-ui, sans-serif
--lv-loading-bgLoading backgroundrgba(0,0,0,0.7)
--lv-loading-colorLoading text colorwhite
--lv-loading-font-familyLoading font familysystem-ui, sans-serif
--lv-fallback-bgFallback background#1a1a1a
--lv-fallback-colorFallback text colorwhite
--lv-fallback-font-familyFallback font familysystem-ui, sans-serif
--lv-fallback-font-sizeFallback font size14px
+
+
+ +
+

Examples

+

YouTube Embed with Custom Size

+
+ HTML +
<lazy-video 
+    src="https://www.youtube.com/embed/wPr3kws2prM"
+    title="Till We Have Faces by Silent Planet"
+    width="50%"
+    height="260px"
+    thumbnail-quality="maxres">
+</lazy-video>
+
+
+ +
+

Bitchute with Autoload Off

+
+ HTML +
<lazy-video 
+    src="https://www.bitchute.com/video/zSfeNPF-OpY"
+    title="Trump Assassination Attempt Documents LOCKED Away. What are they Hiding?"
+    autoload="false"
+    thumbnail="https://static-3.bitchute.com/live/cover_images/nDPZqzyLkFKW/zSfeNPF-OpY_640x360.jpg">
+</lazy-video>
+
+
+ + +
+

+ With autoload="false" on Bitchute, users need to click twice: once to load the player, and again to play. This saves bandwidth but may be less convenient. +

+
+
+

Bitchute with Autoload

+
+ HTML +
<lazy-video 
+    src="https://www.bitchute.com/video/zSfeNPF-OpY"
+    title="Trump Assassination Attempt Documents LOCKED Away. What are they Hiding?">
+</lazy-video>
+
+
+ + +
+

Responsive Container (FitVids Style)

+
+ HTML +
<div style="max-width: 100%; width: 100%;">
+    <lazy-video 
+        src="https://www.youtube.com/embed/wPr3kws2prM"
+        title="Responsive container example"
+        container-fit="true">
+    </lazy-video>
+</div>
+
+
+
+ + +
+
+

+ container-fit="true" makes the video fill its parent container while keeping the aspect ratio. Great for fluid layouts. +

+
+
+

YouTube with Hidden Title Bar

+
+ HTML +
<lazy-video 
+    src="https://www.youtube.com/embed/wPr3kws2prM"
+    title="Hidden title example"
+    hide-title>
+</lazy-video>
+
+
+ + +
+

Global Title Control with CSS

+
+ CSS +
/* Hide titles for all videos */
+lazy-video {
+    --lv-show-title: none;
+}
+
+/* Hide titles for a group */
+.article-videos lazy-video {
+    --lv-show-title: none;
+}
+
+

Global Alignment Control with CSS

+
+ CSS +
/* Set alignment for all videos */
+lazy-video {
+    --lv-align: left;
+}
+
+/* Responsive alignment */
+@media (max-width: 768px) {
+    lazy-video {
+        --lv-align: center;
+    }
+}
+
+/* Different alignments for different contexts */
+.sidebar lazy-video {
+    --lv-align: right;
+}
+
+
+ +
+

Converting Existing iframes

+

+ You can convert existing video iframes to <lazy-video> by simply changing the tag name. +

+

Standard YouTube iframe:

+
+ HTML +
<iframe 
+    src="https://www.youtube.com/embed/wPr3kws2prM?start=30&rel=0&controls=0" 
+    width="560" 
+    height="315" 
+    title="Till We Have Faces by Silent Planet" 
+    frameborder="0" 
+    allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" 
+    allowfullscreen>
+</iframe>
+
+
+

Converted to <lazy-video> (just change the tag):

+
+ HTML +
<lazy-video 
+    src="https://www.youtube.com/embed/wPr3kws2prM?start=30&rel=0&controls=0" 
+    width="560" 
+    height="315" 
+    title="Till We Have Faces by Silent Planet" 
+    frameborder="0" 
+    allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" 
+    allowfullscreen>
+</lazy-video>
+
+
+ +
+

Security & Privacy

+

+ <lazy-video> is built with modern web security and privacy best practices: +

+
    +
  • + All embedded iframes use the credentialless attribute. This helps prevent credential leaks and keeps third-party content isolated from your site's cookies and storage. +
  • +
  • + The sandbox attribute is set by default, restricting what the embedded player can do and reducing risk from third-party content. +
  • +
  • + For YouTube, the youtube-nocookie.com domain is used by default, so no tracking cookies are set unless the user interacts with the video. +
  • +
+
+

+ Note: You can override the sandbox attribute if you need to enable additional features, but the default is designed for maximum safety. +

+
+
+ +
+

Browser Support

+

+ Works in all modern browsers (Chrome, Firefox, Safari, Edge). Uses standard web component APIs. For IE11 or older, use the custom-elements polyfill. +

+
+ +
+

Breaking Change

+

+ April 3, 2025: The old <lazy-youtube> element is no longer supported. Please update any code to use <lazy-video> instead. +

+
+ +
+
These docs reflect the latest release version of @lv.js.
+

Last updated: Friday, April 11th, 2025

+
+
+ + \ No newline at end of file diff --git a/develop/js/c.js b/develop/js/c.js new file mode 100644 index 0000000..bdbf3f4 --- /dev/null +++ b/develop/js/c.js @@ -0,0 +1,396 @@ +// Web Worker Script for hash computation +function workerFunction() { + self.onmessage = function(e) { + const { type, data } = e.data; + + if (type === 'pow') { + // PoW calculation + const { challenge, salt, startNonce, endNonce, target, batchId } = data; + let count = 0; + let solution = null; + + processNextNonce(startNonce); + + function processNextNonce(nonce) { + const input = String(challenge) + String(salt) + nonce.toString(); + const msgBuffer = new TextEncoder().encode(input); + + crypto.subtle.digest('SHA-256', msgBuffer) + .then(hashBuffer => { + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const result = hashArray.map(b => + b.toString(16).padStart(2, '0')).join(''); + + count++; + + if (result.startsWith(target)) { + solution = { nonce: nonce.toString(), found: true }; + self.postMessage({ + type: 'pow_result', + solution: solution, + count: count, + batchId: batchId + }); + return; + } + + if (count % 1000 === 0) { + self.postMessage({ + type: 'progress', + count: count, + batchId: batchId + }); + } + + if (nonce < endNonce && !solution) { + setTimeout(() => processNextNonce(nonce + 1), 0); + } else if (!solution) { + self.postMessage({ + type: 'pow_result', + solution: null, + count: count, + batchId: batchId + }); + } + }) + .catch(err => { + self.postMessage({ + type: 'error', + error: 'Crypto API error: ' + err.message + }); + }); + } + } else { + // Handle other message types if needed in the future + self.postMessage({ type: 'error', error: 'Unknown message type: ' + type }); + } + }; +} +const workerCode = "(" + workerFunction.toString() + ")()"; + +// Proof-of-Space Worker script with buffer pooling +function posWorkerFunction() { + self.onmessage = async function(e) { + const { type, seedHex, isDecoy } = e.data; + if (type === 'pos') { + const minMB = 48, maxMB = 160; + let seedInt = parseInt(seedHex.slice(0, 8), 16); + if (isNaN(seedInt)) seedInt = Math.floor(Math.random() * (maxMB - minMB + 1)); + const CHUNK_MB = isDecoy + ? (minMB + ((seedInt * 3 + 17) % (maxMB - minMB + 1))) + : (minMB + (seedInt % (maxMB - minMB + 1))); + const CHUNK_SIZE = CHUNK_MB * 1024 * 1024; + const chunkCount = 4 + (seedInt % 5); + const chunkSize = Math.floor(CHUNK_SIZE / chunkCount); + const FILL_STEP_4K = 4096, FILL_STEP_1K = 1024; + const FILL_STEP_SWITCH = 35 * 1024 * 1024; + const runs = 3; + + // Pre-allocate buffers + // Removed baseBuf as cpuBase calculation is unused + const mainBuf = new ArrayBuffer(CHUNK_SIZE); + const view = new Uint8Array(mainBuf); + const pressureBuf = new ArrayBuffer(16 * 1024 * 1024); + const pressureView = new Uint8Array(pressureBuf); + + // Removed CPU baseline calculation as it's unused upstream + + const hashes = []; + const times = []; + for (let r = 0; r < runs; r++) { + const prng = seededPRNG(seedHex + r.toString(16)); + // generate deterministic chunk order + const order = Array.from({ length: chunkCount }, (_, i) => i); + for (let i = order.length - 1; i > 0; i--) { + const j = prng() % (i + 1); + [order[i], order[j]] = [order[j], order[i]]; + } + + // fill view + const t0 = performance.now(); + for (let c = 0; c < chunkCount; c++) { + const idx = order[c]; + const start = idx * chunkSize; + const end = (idx === chunkCount - 1) ? CHUNK_SIZE : start + chunkSize; + const step = (start < FILL_STEP_SWITCH) ? FILL_STEP_4K : FILL_STEP_1K; + for (let i = start; i < end; i += step) view[i] = prng() & 0xFF; + } + const hashBuf = await crypto.subtle.digest('SHA-256', view); + const t2 = performance.now(); + hashes.push(Array.from(new Uint8Array(hashBuf)).map(b => b.toString(16).padStart(2, '0')).join('')); + times.push(Math.round(t2 - t0)); + + // memory pressure + for (let i = 0; i < pressureView.length; i += 4096) pressureView[i] = prng() & 0xFF; + } + + // Removed cpuBase from postMessage + self.postMessage({ type: 'pos_result', hashes, times }); + } + }; + function seededPRNG(seedHex) { + const s = []; + for (let i = 0; i < 4; i++) s[i] = parseInt(seedHex.substr(i * 8, 8), 16) >>> 0; + function rotl(x, k) { return ((x << k) | (x >>> (32 - k))) >>> 0; } + return function() { + const t = s[1] << 9; + let r = (s[0] * 5) >>> 0; + r = rotl(r, 7) * 9 >>> 0; + const tmp = s[0] ^ s[2]; + s[2] ^= s[1]; s[1] ^= s[3]; s[0] ^= s[1]; + s[3] ^= tmp; s[2] ^= t; s[3] = rotl(s[3], 11); + return r >>> 0; + }; + } +} +const posWorkerCode = "(" + posWorkerFunction.toString() + ")()"; + +// Main verification script +document.addEventListener('DOMContentLoaded', function() { + setTimeout(initVerification, 650); + + function initVerification() { + const dataEl = document.getElementById('verification-data'); + const targetPath = dataEl.getAttribute('data-target'); + const requestID = dataEl.getAttribute('data-request-id'); + + startVerification(); + + async function startVerification() { + try { + const challengeResponse = await fetch('/api/pow/challenge?id=' + encodeURIComponent(requestID), { + method: 'GET', + headers: { 'Accept': 'application/json' } + }); + + if (!challengeResponse.ok) { + throw new Error('Failed to get challenge parameters'); + } + + const challengeData = await challengeResponse.json(); + + // Extract real and decoy seeds using obfuscated keys + const realPosSeed = challengeData.d; // 'd' is pos_seed + const decoySeed = challengeData.e || (Math.random().toString(16).slice(2, 18)); // 'e' is decoy_seed + const decoyFields = challengeData.f || []; // 'f' is decoy_fields + + const verifier = new Verifier(challengeData, targetPath, requestID, decoySeed, decoyFields); + verifier.start(); + } catch (error) { + showError('Verification setup failed: ' + error.message); + } + } + + function createWorker() { + const blob = new Blob([workerCode], { type: 'text/javascript' }); + return new Worker(URL.createObjectURL(blob)); + } + + function createPosWorker() { + const blob = new Blob([posWorkerCode], { type: 'text/javascript' }); + return new Worker(URL.createObjectURL(blob)); + } + + function showError(message) { + const container = document.querySelector('.container'); + container.classList.add('error'); + container.classList.remove('success'); + + // Let CSS pseudo-elements render the ring and X on the existing spinner + const spinnerEl = document.querySelector('.spinner'); + + const statusEl = document.getElementById('status'); + statusEl.style.display = 'inline-block'; + statusEl.textContent = ''; // Keep this behavior as original, even if odd + statusEl.classList.add('error'); + statusEl.classList.remove('success'); + + const spinnerContainer = document.querySelector('.spinner-container'); + let errorDetails = document.getElementById('error-details'); + + if (!errorDetails) { + errorDetails = document.createElement('div'); + errorDetails.id = 'error-details'; + errorDetails.className = 'error-details'; + spinnerContainer.appendChild(errorDetails); + } + + // Hide error details to match success state layout + errorDetails.style.display = 'none'; // Keep this behavior as original + } + + function showSuccess() { + document.querySelector('.container').classList.add('success'); + document.getElementById('status').textContent = 'Redirecting'; + } + + function Verifier(params, targetPath, requestID, decoySeed, decoyFields) { + const workers = []; + const activeBatches = {}; + let powSolution = null; + let isRunning = false; + + const cpuCount = navigator.hardwareConcurrency || 4; + const workerCount = Math.max(1, Math.floor(cpuCount * 0.8)); + + const REDIRECT_DELAY = 1488; + + this.start = function() { + setTimeout(findProofOfWork, 100); + }; + + async function findProofOfWork() { + try { + isRunning = true; + + let decodedChallenge, decodedSalt; + try { + decodedChallenge = atob(params.a); + decodedSalt = atob(params.b); + } catch (e) { + throw new Error(`Failed to decode challenge/salt: ${e.message}`); + } + const target = '0'.repeat(params.c); + + for (let i = 0; i < workerCount; i++) { + const worker = createWorker(); + // Pass only 'e.data' as workerId parameter was unused + worker.onmessage = e => handleWorkerMessage(e.data); + worker.onerror = error => { + // Silently handle worker errors + }; + workers.push(worker); + } + + const totalRange = Number.MAX_SAFE_INTEGER; + const rangePerWorker = Math.floor(totalRange / workerCount); + + for (let i = 0; i < workers.length; i++) { + const startNonce = i * rangePerWorker; + const endNonce = (i === workers.length - 1) ? + totalRange : (i + 1) * rangePerWorker - 1; + + const workerId = `pow-worker-${i}`; + + activeBatches[workerId] = { + // Keep workerId here as it's used as key in activeBatches + workerId: i, + startNonce, + endNonce + }; + + workers[i].postMessage({ + type: 'pow', + data: { + challenge: decodedChallenge, + salt: decodedSalt, + startNonce, + endNonce, + target, + batchId: workerId + } + }); + } + + } catch (error) { + terminateWorkers(); + showError(error.message); + } + } + + // Removed unused 'workerId' parameter + function handleWorkerMessage(data) { + if (!isRunning) return; + + if (data.type === 'pow_result') { + if (activeBatches[data.batchId]) { + delete activeBatches[data.batchId]; + + if (data.solution && data.solution.found) { + if (!powSolution) { + powSolution = data.solution; + proofOfWorkFound(powSolution); + } + } + } + } else if (data.type === 'error') { + showError('Compatibility error: ' + data.error); + terminateWorkers(); + } + } + + async function proofOfWorkFound(solution) { + isRunning = false; + terminateWorkers(); + try { + // PoS via Worker + const posResult = await new Promise(res => { + const w = createPosWorker(); + w.onmessage = e => { if (e.data.type==='pos_result') { res(e.data); w.terminate(); }}; + w.postMessage({ type:'pos', seedHex: params.d, isDecoy:false }); + }); + const decoyResult = await new Promise(res => { + const w = createPosWorker(); + w.onmessage = e => { if (e.data.type==='pos_result') { res(e.data); w.terminate(); }}; + w.postMessage({ type:'pos', seedHex: decoySeed, isDecoy:true }); + }); + // Submit results + await submitSolution({ requestID, g: solution.nonce, + h: posResult.hashes, i: posResult.times, + j: decoyResult.hashes, k: decoyResult.times, + l: decoyFields }); + } catch (error) { + showError(error.message); + } + } + + function terminateWorkers() { + workers.forEach(worker => worker.terminate()); + } + + async function submitSolution(solutionData) { + try { + const response = await fetch('/api/pow/verify', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + request_id: solutionData.requestID, // Keep descriptive + g: solutionData.g, // Nonce + h: solutionData.h, // Real PoS Hashes + i: solutionData.i, // Real PoS Times + j: solutionData.j, // Decoy Hashes + k: solutionData.k, // Decoy Times + l: solutionData.l // Decoy Fields + }) + }); + + if (!response.ok) { + let errorMsg = `Verification failed: ${response.statusText}`; + try { + const errorData = await response.json(); + if (errorData && errorData.error) { + errorMsg += ` - ${errorData.error}`; + } else { + const text = await response.text(); + errorMsg += ` - Response: ${text}`; + } + } catch (parseError) { + // Silent catch + } + showError(errorMsg); + return; + } + + showSuccess(); + setTimeout(() => { + window.location.href = targetPath; + }, REDIRECT_DELAY); + } catch (error) { + showError('Verification failed. Please refresh the page.'); + } + } + } + } +}); \ No newline at end of file diff --git a/develop/js/cc.js b/develop/js/cc.js new file mode 100644 index 0000000..3e75777 --- /dev/null +++ b/develop/js/cc.js @@ -0,0 +1,858 @@ +/** + * Credit Card Tracker + * Tracks reward categories, spending limits, and payment history for credit cards + */ + +document.addEventListener('DOMContentLoaded', () => { + // DOM Elements + const cardsContainer = document.getElementById('cards-container'); + const emptyState = document.getElementById('empty-state'); + const searchInput = document.getElementById('search-input'); + const addCardBtn = document.getElementById('add-card-btn'); + const addFirstCardBtn = document.getElementById('add-first-card-btn'); + const cardModal = document.getElementById('card-modal'); + const closeModalBtn = document.getElementById('close-modal'); + const cardForm = document.getElementById('card-form'); + const modalTitle = document.getElementById('modal-title'); + const cardIdInput = document.getElementById('card-id'); + const addCategoryBtn = document.getElementById('add-category-btn'); + const categoriesContainer = document.getElementById('categories-container'); + + // Payment Modal Elements + const paymentModal = document.getElementById('payment-modal'); + const closePaymentModalBtn = document.getElementById('close-payment-modal'); + const paymentForm = document.getElementById('payment-form'); + const paymentCardId = document.getElementById('payment-card-id'); + const paymentCardName = document.getElementById('payment-card-name'); + const paymentCategory = document.getElementById('payment-category'); + const paymentAmount = document.getElementById('payment-amount'); + const paymentDate = document.getElementById('payment-date'); + + // Create Payment History Modal - Will be added to DOM later + const paymentHistoryModal = document.createElement('div'); + paymentHistoryModal.className = 'modal'; + paymentHistoryModal.id = 'payment-history-modal'; + + // Initialize cards from localStorage + let cards = loadCards(); + + // Display cards or empty state + renderCards(); + + // Set today's date as default for payment date + paymentDate.valueAsDate = new Date(); + + // Event Listeners + addCardBtn.addEventListener('click', () => openAddCardModal()); + addFirstCardBtn.addEventListener('click', () => openAddCardModal()); + closeModalBtn.addEventListener('click', () => closeModal(cardModal)); + closePaymentModalBtn.addEventListener('click', () => closeModal(paymentModal)); + cardForm.addEventListener('submit', handleCardFormSubmit); + addCategoryBtn.addEventListener('click', () => addCategoryField('', '', '')); + paymentForm.addEventListener('submit', handlePaymentFormSubmit); + searchInput.addEventListener('input', handleSearch); + + // Global event delegation for dynamically added card buttons + cardsContainer.addEventListener('click', handleCardActions); + + // Check for monthly resets on page load + checkMonthlyResets(); + + // Initialize payment history modal + initPaymentHistoryModal(); + + /** + * Initialize payment history modal + */ + function initPaymentHistoryModal() { + paymentHistoryModal.innerHTML = ` + + `; + + document.body.appendChild(paymentHistoryModal); + + // Add event listener to close button + document.getElementById('close-history-modal').addEventListener('click', () => { + closeModal(paymentHistoryModal); + }); + } + + /** + * Handle card action buttons via event delegation + */ + function handleCardActions(e) { + const target = e.target; + + // Find closest card element + const cardElement = target.closest('.credit-card'); + if (!cardElement) return; + + const cardId = cardElement.dataset.id; + const card = cards.find(c => c.id === cardId); + if (!card) return; + + // Payment button + if (target.closest('.payment-btn')) { + e.stopPropagation(); + openPaymentModal(card); + return; + } + + // Edit button + if (target.closest('.edit-btn')) { + e.stopPropagation(); + openEditCardModal(card); + return; + } + + // Delete button + if (target.closest('.delete-btn')) { + e.stopPropagation(); + deleteCard(cardId); + return; + } + + // View payment history (clicking on a category item) + const categoryItem = target.closest('.category-item'); + if (categoryItem && categoryItem.dataset.categoryName) { + const categoryName = categoryItem.dataset.categoryName; + const category = card.categories.find(c => c.name === categoryName); + if (category) { + openPaymentHistoryModal(card, category); + } + } + } + + /** + * Load cards from localStorage + */ + function loadCards() { + const storedCards = localStorage.getItem('creditCards'); + return storedCards ? JSON.parse(storedCards) : []; + } + + /** + * Save cards to localStorage + */ + function saveCards() { + localStorage.setItem('creditCards', JSON.stringify(cards)); + } + + /** + * Render all cards or empty state + */ + function renderCards() { + // Clear cards container except for the empty state + Array.from(cardsContainer.children).forEach(child => { + if (!child.classList.contains('empty-state')) { + child.remove(); + } + }); + + // Show empty state if no cards + if (cards.length === 0) { + emptyState.style.display = 'block'; + return; + } + + // Hide empty state and render cards + emptyState.style.display = 'none'; + + // Filter cards if search input has value + let filteredCards = cards; + const searchTerm = searchInput.value.toLowerCase().trim(); + + if (searchTerm) { + filteredCards = cards.filter(card => { + const nameMatch = card.name.toLowerCase().includes(searchTerm); + const bankMatch = card.bank.toLowerCase().includes(searchTerm); + const categoryMatch = card.categories.some(cat => + cat.name.toLowerCase().includes(searchTerm) + ); + + return nameMatch || bankMatch || categoryMatch; + }); + } + + // Render filtered cards + filteredCards.forEach(card => { + const cardElement = createCardElement(card); + cardsContainer.insertBefore(cardElement, emptyState); + }); + } + + /** + * Create a card element + */ + function createCardElement(card) { + const cardElement = document.createElement('div'); + cardElement.className = 'credit-card'; + cardElement.dataset.id = card.id; + + // Calculate days until cycle resets + const today = new Date(); + const currentDay = today.getDate(); + const cycleDay = parseInt(card.statementDate); + + let daysUntilReset; + if (currentDay === cycleDay) { + daysUntilReset = 0; + } else if (currentDay < cycleDay) { + daysUntilReset = cycleDay - currentDay; + } else { + // Calculate days until next month's cycle date + const lastDayOfMonth = new Date(today.getFullYear(), today.getMonth() + 1, 0).getDate(); + daysUntilReset = (lastDayOfMonth - currentDay) + cycleDay; + } + + // Create card HTML + cardElement.innerHTML = ` +
+
+

${card.name}

+
${card.bank}${card.lastDigits ? ` •••• ${card.lastDigits}` : ''}
+
+
+ + + +
+
+
+
+ Statement Cycle: + Day ${card.statementDate} +
+
+ Cycle Resets: + ${daysUntilReset === 0 ? 'Today' : `In ${daysUntilReset} day${daysUntilReset !== 1 ? 's' : ''}`} +
+ ${card.expiryDate ? ` +
+ Expires: + ${formatExpiryDate(card.expiryDate)} +
+ ` : ''} +
+
+ ${card.categories.map(category => { + const payments = category.payments; // Assumed to be an array now + const spent = payments.reduce((total, p) => total + parseFloat(p.amount), 0); + const hasLimit = category.limit > 0; + const percentUsed = hasLimit ? (spent / category.limit) * 100 : 0; + const isNearLimit = percentUsed >= 75 && percentUsed < 100; + const isAtLimit = percentUsed >= 100; + const hasPayments = payments.length > 0; + + // Calculate cash back amounts + const cashbackEarned = (spent * category.rate / 100).toFixed(2); + const maxCashback = hasLimit ? (category.limit * category.rate / 100).toFixed(2) : 0; + const cashbackDisplay = hasLimit ? ` ($${cashbackEarned}/$${maxCashback})` : ` ($${cashbackEarned})`; + + return ` +
+
+ ${category.name}: ${category.rate}% + ${hasLimit ? ` + $${spent.toFixed(2)} / $${category.limit.toFixed(2)}${cashbackDisplay} + ` : `$${spent.toFixed(2)}${cashbackDisplay}`} +
+ ${hasLimit ? ` +
+
+
+ ` : ''} + ${hasPayments ? `` : ''} +
+ `; + }).join('')} +
+ `; + + return cardElement; + } + + /** + * Format expiry date (YYYY-MM to MM/YYYY) + */ + function formatExpiryDate(dateString) { + const [year, month] = dateString.split('-'); + return `${month}/${year}`; + } + + /** + * Format a date for display (YYYY-MM-DD to MM/DD/YYYY) + */ + function formatDate(dateString) { + const date = new Date(dateString); + return date.toLocaleDateString('en-US', { + month: '2-digit', + day: '2-digit', + year: 'numeric' + }); + } + + /** + * Open modal to add a new card + */ + function openAddCardModal() { + // Reset form + cardForm.reset(); + cardIdInput.value = ''; + modalTitle.textContent = 'Add New Card'; + + // Clear existing category fields and add one empty one + clearCategoryFields(); + + // Open modal + openModal(cardModal); + } + + /** + * Open modal to edit an existing card + */ + function openEditCardModal(card) { + // Set form values + cardForm.reset(); + cardIdInput.value = card.id; + document.getElementById('card-name').value = card.name; + document.getElementById('card-bank').value = card.bank; + document.getElementById('last-digits').value = card.lastDigits || ''; + document.getElementById('expiry-date').value = card.expiryDate || ''; + document.getElementById('statement-date').value = card.statementDate; + + modalTitle.textContent = 'Edit Card'; + + // Clear existing category fields + clearCategoryFields(); + + // Add category fields for existing categories + if (card.categories.length === 0) { + } else { + card.categories.forEach(category => { + addCategoryField(category.name, category.rate, category.limit); + }); + } + + // Open modal + openModal(cardModal); + } + + /** + * Open modal to record a payment + */ + function openPaymentModal(card) { + paymentCardId.value = card.id; + paymentCardName.textContent = card.name; + + // Clear and populate category dropdown + paymentCategory.innerHTML = ''; + card.categories.forEach(category => { + if (!category) return; + const option = document.createElement('option'); + option.value = category.name; + option.textContent = `${category.name} (${category.rate}%)`; + + // Add info about limit if one exists + if (category.limit > 0) { + const payments = category.payments; // Assumed to be an array now + const spent = payments.reduce((total, p) => total + parseFloat(p.amount), 0); + const remaining = Math.max(0, category.limit - spent); + option.textContent += ` - $${remaining.toFixed(2)} remaining`; + } + + paymentCategory.appendChild(option); + }); + + // Reset other fields + paymentAmount.value = ''; + paymentDate.valueAsDate = new Date(); + document.getElementById('payment-note').value = ''; + + // Set modal title and button text for new payment + paymentModal.querySelector('.modal-title').textContent = 'Record Payment'; + paymentModal.querySelector('.save-btn').textContent = 'Record Payment'; + + // Open modal + openModal(paymentModal); + } + + /** + * Open payment history modal for a category + */ + function openPaymentHistoryModal(card, category) { + if (!card || !category) return; + const container = document.getElementById('payment-history-container'); + const modalTitle = paymentHistoryModal.querySelector('.modal-title'); + + modalTitle.textContent = `${card.name} - ${category.name} Payments`; + container.innerHTML = ''; // Clear previous content + + const payments = category.payments; // Assumed to be an array now + + if (payments.length === 0) { + container.innerHTML = ` +
+

No payment history

+

No payments have been recorded for this category yet.

+
+ `; + } else { + const sortedPayments = [...payments].sort((a, b) => + new Date(b.date) - new Date(a.date) + ); + + const historyHtml = ` +
+
+ Date + Amount + Note + Actions +
+ ${sortedPayments.map(payment => ` +
+ ${formatDate(payment.date)} + $${parseFloat(payment.amount).toFixed(2)} + ${payment.note || '-'} +
+ + +
+
+ `).join('')} +
+ `; + + container.innerHTML = historyHtml; + + // Add event listeners for edit and delete payment buttons + container.querySelectorAll('.edit-payment-btn').forEach(btn => { + btn.addEventListener('click', e => { + const paymentId = e.target.closest('.payment-history-item').dataset.paymentId; + const payment = payments.find(p => p.id === paymentId); + if (payment) { + editPayment(card.id, category.name, payment); + } + }); + }); + + container.querySelectorAll('.delete-payment-btn').forEach(btn => { + btn.addEventListener('click', e => { + const paymentId = e.target.closest('.payment-history-item').dataset.paymentId; + const payment = payments.find(p => p.id === paymentId); + if (payment) { + deletePayment(card.id, category.name, payment.id); + } + }); + }); + } + + // Open the modal + openModal(paymentHistoryModal); + } + + /** + * Edit a payment + */ + function editPayment(cardId, categoryName, payment) { + // Set up payment modal for editing + paymentCardId.value = cardId; + const card = cards.find(c => c.id === cardId); + if (!card) return; + + paymentCardName.textContent = card.name; + + // Populate category dropdown (with only the selected category) + paymentCategory.innerHTML = ''; + const category = card.categories.find(c => c.name === categoryName); + if (!category) return; + + const option = document.createElement('option'); + option.value = category.name; + option.textContent = `${category.name} (${category.rate}%)`; + paymentCategory.appendChild(option); + + // Set payment details + paymentAmount.value = payment.amount; + paymentDate.value = payment.date; + document.getElementById('payment-note').value = payment.note || ''; + + // Add payment ID to the form + const paymentIdInput = document.getElementById('payment-id') || document.createElement('input'); + paymentIdInput.type = 'hidden'; + paymentIdInput.id = 'payment-id'; + paymentIdInput.value = payment.id; + if (!document.getElementById('payment-id')) { + paymentForm.appendChild(paymentIdInput); + } + + // Change modal title and button text + paymentModal.querySelector('.modal-title').textContent = 'Edit Payment'; + paymentModal.querySelector('.save-btn').textContent = 'Update Payment'; + + // Close history modal and open payment modal with a slight delay for better transition + closeModal(paymentHistoryModal); + setTimeout(() => { + openModal(paymentModal); + }, 300); + } + + /** + * Delete a payment + */ + function deletePayment(cardId, categoryName, paymentId) { + if (!confirm('Are you sure you want to delete this payment?')) return; + + const cardIndex = cards.findIndex(c => c.id === cardId); + if (cardIndex === -1) return; + + const categoryIndex = cards[cardIndex].categories.findIndex(c => c.name === categoryName); + if (categoryIndex === -1) return; + + // Remove the payment from the array + const payments = cards[cardIndex].categories[categoryIndex].payments; // Assumed to be array + const paymentIndex = payments.findIndex(p => p.id === paymentId); + if (paymentIndex === -1) return; + + // Get payment amount for confirmation + const amount = payments[paymentIndex].amount; + + // Remove the payment + payments.splice(paymentIndex, 1); + + // Save changes + saveCards(); + + // Close the history modal + closeModal(paymentHistoryModal); + + // Show confirmation toast + showToast(`Payment of $${amount.toFixed(2)} has been deleted`); + + // Re-render cards + renderCards(); + } + + /** + * Open a modal + */ + function openModal(modal) { + modal.classList.add('active'); + document.body.style.overflow = 'hidden'; // Prevent background scrolling + + // Focus first input in modal for better accessibility + setTimeout(() => { + const firstInput = modal.querySelector('input:not([type="hidden"])'); + if (firstInput) firstInput.focus(); + }, 100); + } + + /** + * Close a modal + */ + function closeModal(modal) { + modal.classList.remove('active'); + document.body.style.overflow = ''; // Restore scrolling + } + + /** + * Clear all category fields + */ + function clearCategoryFields() { + categoriesContainer.innerHTML = ''; + } + + /** + * Add a category field to the form + */ + function addCategoryField(name = '', rate = '', limit = '') { + const categoryField = document.createElement('div'); + categoryField.className = 'category-inputs'; + categoryField.innerHTML = ` + + + + + `; + + // Add event listener to remove button + categoryField.querySelector('.remove-category').addEventListener('click', function() { + // Allow removing the last category field + categoryField.remove(); + }); + + categoriesContainer.appendChild(categoryField); + } + + /** + * Handle card form submission + */ + function handleCardFormSubmit(e) { + e.preventDefault(); + + // Get form values + const cardId = cardIdInput.value || generateId(); + const isEditing = !!cardIdInput.value; + const name = document.getElementById('card-name').value; + const bank = document.getElementById('card-bank').value; + const lastDigits = document.getElementById('last-digits').value; + const expiryDate = document.getElementById('expiry-date').value; + const statementDate = document.getElementById('statement-date').value; + + // Get categories + const categories = []; + const categoryInputs = categoriesContainer.querySelectorAll('.category-inputs'); + + categoryInputs.forEach(input => { + const catName = input.querySelector('.category-name').value.trim(); + const catRate = input.querySelector('.category-rate').value.trim(); + const catLimit = input.querySelector('.category-limit').value.trim(); + + // Only add this category if any reward detail is provided + if (!catName && !catRate && !catLimit) { + return; + } + + // Determine payments: Use existing if editing and found, otherwise default to empty array + let payments = []; + const existingCardData = isEditing ? cards.find(c => c.id === cardId) : null; + if (isEditing && existingCardData && existingCardData.categories) { + const existingCategoryData = existingCardData.categories.find(c => c.name === catName); + if (existingCategoryData && Array.isArray(existingCategoryData.payments)) { + payments = existingCategoryData.payments; + } + } + + categories.push({ + name: catName, + rate: catRate ? parseFloat(catRate) : 0, + limit: catLimit ? parseFloat(catLimit) : null, + payments: payments // Ensured to be an array + }); + }); + + // Create card object + const card = { + id: cardId, + name, + bank, + lastDigits, + expiryDate, + statementDate, + categories, // categories array now guaranteed to have .payments arrays + createdAt: new Date().toISOString() + }; + + // Update or add card + const existingCardIndex = isEditing ? cards.findIndex(c => c.id === cardId) : -1; + + if (existingCardIndex !== -1) { + // Preserve archived payments if editing + card.archivedPayments = cards[existingCardIndex].archivedPayments; + cards[existingCardIndex] = card; + } else { + cards.push(card); + } + + // Save and render cards + saveCards(); + renderCards(); + + // Show feedback toast + showToast(isEditing ? 'Card updated successfully' : 'Card added successfully'); + + // Close modal + closeModal(cardModal); + } + + /** + * Handle payment form submission + */ + function handlePaymentFormSubmit(e) { + e.preventDefault(); + + // Get form values + const cardId = paymentCardId.value; + const categoryName = paymentCategory.value; + const amount = parseFloat(paymentAmount.value); + const date = paymentDate.value; + const note = document.getElementById('payment-note').value; + const paymentId = document.getElementById('payment-id')?.value; + + // Find card and category + const cardIndex = cards.findIndex(c => c.id === cardId); + if (cardIndex === -1) return; + + const categoryIndex = cards[cardIndex].categories.findIndex(c => c.name === categoryName); + if (categoryIndex === -1) return; + + // Check if editing existing payment or adding new one + const payments = cards[cardIndex].categories[categoryIndex].payments; // Assumed to be array + + if (paymentId) { + // Find the payment + const paymentIndex = payments.findIndex(p => p.id === paymentId); + if (paymentIndex !== -1) { + // Update payment + payments[paymentIndex] = { + id: paymentId, + amount, + date, + note, + createdAt: payments[paymentIndex].createdAt, // Keep original creation date + updatedAt: new Date().toISOString() + }; + + // Show toast + showToast('Payment updated successfully'); + } + } else { + // Add new payment + const payment = { + id: generateId(), + amount, + date, + note, + createdAt: new Date().toISOString() + }; + + payments.push(payment); + + // Show toast + showToast(`Payment of $${amount.toFixed(2)} recorded`); + } + + // Save and render cards + saveCards(); + renderCards(); + + // Remove payment ID if it exists + if (document.getElementById('payment-id')) { + document.getElementById('payment-id').remove(); + } + + // Reset modal title + paymentModal.querySelector('.modal-title').textContent = 'Record Payment'; + + // Close modal + closeModal(paymentModal); + } + + /** + * Delete a card + */ + function deleteCard(cardId) { + if (!confirm('Are you sure you want to delete this card?')) return; + + const cardName = cards.find(card => card.id === cardId)?.name || 'Card'; + cards = cards.filter(card => card.id !== cardId); + saveCards(); + renderCards(); + + // Show feedback toast + showToast(`${cardName} has been deleted`); + } + + /** + * Show a toast notification + */ + function showToast(message) { + // Remove existing toast if any + const existingToast = document.querySelector('.toast'); + if (existingToast) { + existingToast.remove(); + } + + // Create toast element + const toast = document.createElement('div'); + toast.className = 'toast'; + toast.textContent = message; + document.body.appendChild(toast); + + // Trigger animation + setTimeout(() => toast.classList.add('show'), 10); + + // Auto remove after 3 seconds + setTimeout(() => { + toast.classList.remove('show'); + setTimeout(() => toast.remove(), 300); + }, 3000); + } + + /** + * Handle search + */ + function handleSearch() { + renderCards(); + } + + /** + * Generate a unique ID + */ + function generateId() { + return Date.now().toString(36) + Math.random().toString(36).substr(2, 5); + } + + /** + * Check for monthly cycle resets + */ + function checkMonthlyResets() { + const today = new Date(); + const lastCheck = localStorage.getItem('lastCycleCheck'); + + // If we haven't checked today + if (!lastCheck || new Date(lastCheck).toDateString() !== today.toDateString()) { + const currentDay = today.getDate(); + + // Check each card for cycle reset + cards.forEach(card => { + const cycleDay = parseInt(card.statementDate); + + // If today is the cycle reset day + if (currentDay === cycleDay) { + // Mark payments as from previous cycle + card.categories.forEach(category => { + // Only archive if there are payments + if (category.payments.length > 0) { + // Create an archive if none exists + if (!card.archivedPayments) { + card.archivedPayments = []; + } + + // Archive current cycle payments + const cycleData = { + date: today.toISOString(), + categories: [{ + name: category.name, + rate: category.rate, + payments: [...category.payments] + }] + }; + + // Add to archived payments + card.archivedPayments.push(cycleData); + + // Clear current payments + category.payments = []; + } + }); + } + }); + + // Save changes + saveCards(); + + // Update last check date + localStorage.setItem('lastCycleCheck', today.toISOString()); + } + } +}); \ No newline at end of file diff --git a/develop/js/docs.js b/develop/js/docs.js new file mode 100644 index 0000000..2050af4 --- /dev/null +++ b/develop/js/docs.js @@ -0,0 +1,48 @@ +import { highlightElement } from 'https://unpkg.com/@speed-highlight/core@1.2.7/dist/index.js'; + +document.addEventListener('DOMContentLoaded', () => { + // Initialize Syntax Highlighting + initSyntaxHighlighting(); +}); + +/** + * Initialize syntax highlighting using Speed Highlight JS + * This is a reusable function that applies syntax highlighting to code blocks + */ +async function initSyntaxHighlighting() { + try { + // Get all code blocks + const codeBlocks = document.querySelectorAll('.code-example pre code'); + + codeBlocks.forEach(block => { + // Determine language from code-label + let lang = 'html'; // Default to HTML + const example = block.closest('.code-example'); + if (example) { + const label = example.querySelector('.code-label'); + if (label) { + const labelText = label.textContent.trim().toLowerCase(); + if (labelText === 'css') lang = 'css'; + if (labelText === 'js' || labelText === 'javascript') lang = 'js'; + if (labelText === 'go' || labelText === 'golang') lang = 'go'; + if (labelText === 'json') lang = 'json'; + if (labelText === 'http') lang = 'http'; + } + } + + // Create a new element to hold the highlighted code + const highlighted = document.createElement('div'); + highlighted.className = `shj-lang-${lang}`; + highlighted.textContent = block.textContent; + + // Replace the pre with our new element + const pre = block.parentElement; + pre.parentNode.replaceChild(highlighted, pre); + + // Apply highlighting directly + highlightElement(highlighted, lang); + }); + } catch (error) { + console.warn('Syntax highlighting failed to initialize:', error); + } +} \ No newline at end of file diff --git a/develop/js/lightbox.js b/develop/js/lightbox.js new file mode 100644 index 0000000..51eead0 --- /dev/null +++ b/develop/js/lightbox.js @@ -0,0 +1,427 @@ +/** + * EasyLightbox - A simple, lightweight lightbox for images + */ +(function() { + // Default options + const defaultOptions = { + selector: '.lightbox-img, #flowDiagram', // Images that should trigger lightbox + captionAttribute: 'data-caption', // Attribute to retrieve caption from + zoomable: true, // Whether to enable zoom controls + maxZoom: 300, // Maximum zoom percentage + minZoom: 100, // Minimum zoom percentage + closeOnEsc: true, // Close on escape key + closeOnOutsideClick: true // Close when clicking outside image + }; + + // Create global object + window.EasyLightbox = { + options: { ...defaultOptions }, + + // Initialize with custom options + init: function(customOptions = {}) { + // Merge default options with custom options + this.options = { ...defaultOptions, ...customOptions }; + + // Create lightbox container if it doesn't exist + this._createLightbox(); + + // Initialize listeners for all matching elements + this._initImageListeners(); + + return this; + }, + + // Create the lightbox HTML structure if it doesn't exist + _createLightbox: function() { + // Check if lightbox already exists + if (document.getElementById("imageLightbox")) { + return; + } + + // Create lightbox container + const lightbox = document.createElement("div"); + lightbox.id = "imageLightbox"; + lightbox.className = "lightbox"; + + // Create lightbox content with simplified HTML + lightbox.innerHTML = ` + + `; + + // Add lightbox CSS link if not already present + if (!document.getElementById("lightbox-styles")) { + const link = document.createElement("link"); + link.id = "lightbox-styles"; + link.rel = "stylesheet"; + link.href = "/css/lightbox.css"; + document.head.appendChild(link); + } + + // Add to document + document.body.appendChild(lightbox); + + // Cache DOM elements + this.elements = { + lightbox: lightbox, + lightboxImg: document.getElementById("lightboxImg"), + lightboxCaption: document.getElementById("lightboxCaption"), + lightboxClose: document.getElementById("lightboxClose"), + zoomSlider: document.getElementById("zoomSlider"), + zoomValue: document.getElementById("zoomValue") + }; + + // Initialize event handlers inside the lightbox + this._initLightboxHandlers(); + }, + + // Initialize listeners for images that should open the lightbox + _initImageListeners: function() { + const images = document.querySelectorAll(this.options.selector); + const self = this; + + images.forEach(img => { + // *** FIX: Skip the actual lightbox image itself *** + if (img.id === "lightboxImg") return; + + // Skip if already initialized + if (img.dataset.lightboxInitialized) return; + + img.dataset.lightboxInitialized = "true"; + img.style.cursor = "pointer"; + + img.addEventListener("click", function() { + let caption = this.getAttribute(self.options.captionAttribute); + if (this.id === "flowDiagram" || !caption) { + caption = "Basic POW Flow Diagram"; + } + self.open(this, caption); + }); + }); + + // Special handling for flowDiagram if not caught by selector + const flowDiagram = document.getElementById("flowDiagram"); + if (flowDiagram && !flowDiagram.dataset.lightboxInitialized) { + flowDiagram.dataset.lightboxInitialized = "true"; + flowDiagram.style.cursor = "pointer"; + flowDiagram.addEventListener("click", function() { + self.open(this, "Basic POW Flow Diagram"); + }); + } + }, + + // Initialize lightbox event handlers for zooming, closing, etc. + _initLightboxHandlers: function() { + const self = this; + const elements = this.elements; + + let isDragging = false; + let startX, startY, startPanX, startPanY; + let panX = 0, + panY = 0; + + // Add zoom slider handler + if (this.options.zoomable && elements.zoomSlider) { + elements.zoomSlider.addEventListener("input", function() { + const value = this.value; + elements.zoomValue.textContent = value + "%"; + updateTransform(); + }); + } + + // Add close button handler + if (elements.lightboxClose) { + elements.lightboxClose.addEventListener("click", function(e) { + e.preventDefault(); + e.stopPropagation(); + self.close(); + }); + } + + // Add outside click handler + if (this.options.closeOnOutsideClick) { + elements.lightbox.addEventListener("click", function(e) { + if (e.target === elements.lightbox) { + self.close(); + } + }); + } + + // Add escape key handler + if (this.options.closeOnEsc) { + document.addEventListener("keydown", function(e) { + if (e.key === "Escape" && elements.lightbox.classList.contains("active")) { + self.close(); + } + }); + } + + // Add drag handlers for panning when zoomed + if (elements.lightboxImg) { + elements.lightboxImg.addEventListener("mousedown", startDrag); + elements.lightboxImg.addEventListener("touchstart", startDrag); + } + + function startDrag(e) { + // Only allow dragging when zoomed in + if (!self.options.zoomable || parseInt(elements.zoomSlider.value) <= 100) + return; + + e.preventDefault(); + + if (e.type === "touchstart") { + startX = e.touches[0].clientX; + startY = e.touches[0].clientY; + } else { + startX = e.clientX; + startY = e.clientY; + } + + startPanX = panX; + startPanY = panY; + isDragging = true; + + elements.lightboxImg.classList.add("grabbing"); + + document.addEventListener("mousemove", doDrag); + document.addEventListener("touchmove", doDrag); + document.addEventListener("mouseup", stopDrag); + document.addEventListener("touchend", stopDrag); + document.addEventListener("mouseleave", stopDrag); + } + + function doDrag(e) { + if (!isDragging) return; + + // Prevent default scroll/zoom behavior on touch devices + e.preventDefault(); + + let clientX, clientY; + if (e.type === "touchmove") { + // Ensure there's a touch point + if (e.touches.length === 0) return; + clientX = e.touches[0].clientX; + clientY = e.touches[0].clientY; + } else { + clientX = e.clientX; + clientY = e.clientY; + } + + const deltaX = clientX - startX; + const deltaY = clientY - startY; + + panX = startPanX + deltaX; + panY = startPanY + deltaY; + + // Apply the transform immediately for live dragging + updateTransform(); + } + + function stopDrag() { + if (!isDragging) return; + + isDragging = false; + elements.lightboxImg.classList.remove("grabbing"); + + document.removeEventListener("mousemove", doDrag); + document.removeEventListener("touchmove", doDrag); + document.removeEventListener("mouseup", stopDrag); + document.removeEventListener("touchend", stopDrag); + document.removeEventListener("mouseleave", stopDrag); + } + + function updateTransform() { + if (!self.options.zoomable) return; + const scale = parseInt(elements.zoomSlider.value) / 100; + elements.lightboxImg.style.transform = `scale(${scale}) translate(${ + panX / scale + }px, ${panY / scale}px)`; + } + + // Prevent scrolling on mobile when interacting with the lightbox + const isMobile = window.matchMedia( + "(max-width: 768px), (max-width: 1024px) and (orientation: landscape)" + ).matches; + if (isMobile && elements.lightboxImg) { + elements.lightboxImg.addEventListener("touchmove", function(e) { + if (e.touches.length > 1) { + e.preventDefault(); + } + }); + } + }, + + // Open the lightbox with a specific image + open: async function(imageElement, caption) { + if (!imageElement || !this.elements) return; + + const elements = this.elements; + let panX = 0, panY = 0; + + // Remove any previous SVG + if (elements.lightboxImg && elements.lightboxImg.parentNode) { + elements.lightboxImg.style.display = ''; + const prevSvg = elements.lightboxImg.parentNode.querySelector('svg.injected-svg'); + if (prevSvg) prevSvg.remove(); + } + + const src = imageElement.src || imageElement.getAttribute("data-fullsize") || ""; + const isSVG = src.toLowerCase().endsWith('.svg'); + + // Helper for zoom slider value + function getZoom() { + return elements.zoomSlider ? parseInt(elements.zoomSlider.value) / 100 : 1; + } + + // Helper to update SVG transform + function updateSVGTransform(svg, svgPanX, svgPanY, scale) { + svg.style.transform = `scale(${scale}) translate(${svgPanX/scale}px, ${svgPanY/scale}px)`; + } + + if (isSVG) { + elements.lightboxImg.style.display = 'none'; + try { + const resp = await fetch(src); + let svgText = await resp.text(); + const tempDiv = document.createElement('div'); + tempDiv.innerHTML = svgText; + const svg = tempDiv.querySelector('svg'); + if (svg) { + svg.classList.add('injected-svg'); + svg.style.transformOrigin = 'center center'; + svg.style.maxWidth = '100%'; + svg.style.maxHeight = '100%'; + svg.style.display = 'block'; + svg.style.cursor = 'grab'; + svg.style.userSelect = 'none'; + svg.removeAttribute('width'); + svg.removeAttribute('height'); + elements.lightboxImg.parentNode.appendChild(svg); + + // Set default zoom to 1.0x (100%) + if (elements.zoomSlider) { + elements.zoomSlider.value = 100; + elements.zoomValue.textContent = '100%'; + } + let svgPanX = 0, svgPanY = 0; + let isDragging = false, startX, startY, startPanX = 0, startPanY = 0; + let currentScale = getZoom(); + updateSVGTransform(svg, svgPanX, svgPanY, currentScale); + + // Drag logic for SVG + svg.addEventListener('mousedown', startDrag); + svg.addEventListener('touchstart', startDrag); + function startDrag(e) { + e.preventDefault(); + isDragging = true; + svg.classList.add('grabbing'); + if (e.type === 'touchstart') { + startX = e.touches[0].clientX; + startY = e.touches[0].clientY; + } else { + startX = e.clientX; + startY = e.clientY; + } + startPanX = svgPanX; + startPanY = svgPanY; + document.addEventListener('mousemove', doDrag); + document.addEventListener('touchmove', doDrag); + document.addEventListener('mouseup', stopDrag); + document.addEventListener('touchend', stopDrag); + document.addEventListener('mouseleave', stopDrag); + } + function doDrag(e) { + if (!isDragging) return; + e.preventDefault(); + let clientX, clientY; + if (e.type === 'touchmove') { + if (e.touches.length === 0) return; + clientX = e.touches[0].clientX; + clientY = e.touches[0].clientY; + } else { + clientX = e.clientX; + clientY = e.clientY; + } + const deltaX = clientX - startX; + const deltaY = clientY - startY; + svgPanX = startPanX + deltaX; + svgPanY = startPanY + deltaY; + updateSVGTransform(svg, svgPanX, svgPanY, getZoom()); + } + function stopDrag() { + if (!isDragging) return; + isDragging = false; + svg.classList.remove('grabbing'); + document.removeEventListener('mousemove', doDrag); + document.removeEventListener('touchmove', doDrag); + document.removeEventListener('mouseup', stopDrag); + document.removeEventListener('touchend', stopDrag); + document.removeEventListener('mouseleave', stopDrag); + } + // Zoom slider controls SVG scale + if (elements.zoomSlider) { + elements.zoomSlider.oninput = function() { + currentScale = getZoom(); + elements.zoomValue.textContent = Math.round(currentScale * 100) + '%'; + updateSVGTransform(svg, svgPanX, svgPanY, currentScale); + }; + } + } + } catch (e) { + elements.lightboxImg.style.display = ''; + } + } else { + elements.lightboxImg.src = src; + elements.lightboxImg.style.display = ''; + elements.lightboxImg.style.transform = "scale(1) translate(0px, 0px)"; + if (this.options.zoomable && elements.zoomSlider) { + elements.zoomSlider.value = 100; + elements.zoomValue.textContent = "100%"; + } + } + + const captionText = + caption || + imageElement.getAttribute(this.options.captionAttribute) || + imageElement.alt || + imageElement.getAttribute("title") || + ""; + elements.lightboxCaption.textContent = captionText; + + elements.lightbox.classList.add("active"); + document.body.style.overflow = "hidden"; + }, + + // Close the lightbox + close: function() { + if (!this.elements) return; + + this.elements.lightbox.classList.remove("active"); + document.body.style.overflow = ""; + } + }; + + // Auto-initialize on load + if (document.readyState === "loading") { + document.addEventListener("DOMContentLoaded", function() { + window.EasyLightbox.init(); + }); + } else { + window.EasyLightbox.init(); + } +})(); \ No newline at end of file diff --git a/develop/js/lv.js b/develop/js/lv.js new file mode 100644 index 0000000..6a068fd --- /dev/null +++ b/develop/js/lv.js @@ -0,0 +1,949 @@ +// Shared resources +const THUMBNAIL_CACHE = new Map(); +const THUMBNAIL_REGISTRY = new Map(); +const VIDEO_SERVICES = new Map(); + +// Common constants +const DEFAULT_ALLOW = "accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; fullscreen; web-share"; +const DEFAULT_SANDBOX = "allow-scripts allow-same-origin allow-popups allow-forms allow-presentation"; + +// Efficient image checking with modern fetch API +async function checkImage(url) { + if (THUMBNAIL_CACHE.has(url)) return THUMBNAIL_CACHE.get(url); + + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), 2000); + + try { + const response = await fetch(url, { method: 'HEAD', signal: controller.signal }); + clearTimeout(timeoutId); + const valid = response.ok; + THUMBNAIL_CACHE.set(url, valid); + return valid; + } catch { + clearTimeout(timeoutId); + THUMBNAIL_CACHE.set(url, false); + return false; + } +} + +// Helper for parsing URLs safely +function parseUrl(url) { + try { return new URL(url); } catch { return null; } +} + +/** + * Service Provider base class - each video service extends this + */ +class VideoServiceProvider { + constructor() { + this.name = 'generic'; + } + + canHandle(url) { return false; } + getVideoId(url) { return null; } + getEmbedUrl(videoId, params, element) { return ''; } + + getThumbnailUrls(videoId, quality, element) { + const customThumbnail = element.getAttribute("thumbnail"); + return customThumbnail ? [customThumbnail] : []; + } + + parseParams() { return {}; } + + getIframeAttributes(element) { + return { + frameborder: element.getAttribute("frameborder") || "0", + allow: element.getAttribute("allow") || DEFAULT_ALLOW, + sandbox: element.getAttribute("sandbox") || DEFAULT_SANDBOX + }; + } + + getDefaults() { return { autoload: false }; } +} + +/** + * YouTube service provider + */ +class YouTubeProvider extends VideoServiceProvider { + constructor() { + super(); + this.name = 'youtube'; + this.THUMBNAIL_QUALITIES = { + maxres: 'maxresdefault.jpg', + sd: 'sddefault.jpg', + hq: 'hqdefault.jpg', + mq: 'mqdefault.jpg', + default: 'default.jpg' + }; + this.URL_PATTERNS = [ + /youtube\.com\/embed\/([a-zA-Z0-9_-]{11})/, + /youtube\.com\/watch\?v=([a-zA-Z0-9_-]{11})/, + /youtu\.be\/([a-zA-Z0-9_-]{11})/ + ]; + } + + canHandle(url) { + return url && /youtube\.com|youtu\.be/.test(url); + } + + getVideoId(url) { + if (!url) return null; + + const parsedUrl = parseUrl(url); + if (parsedUrl) { + // Path-based ID extraction (/embed/ID or youtu.be/ID) + if (parsedUrl.pathname.startsWith("/embed/") || parsedUrl.hostname === "youtu.be") { + const parts = parsedUrl.pathname.split("/"); + return parts[parts.length > 2 ? 2 : 1]; + } + + // Query-based ID extraction (?v=ID) + const videoId = parsedUrl.searchParams.get("v"); + if (videoId) return videoId; + } + + // Fallback to regex matching + for (const pattern of this.URL_PATTERNS) { + const match = url.match(pattern); + if (match?.[1]) return match[1]; + } + + return null; + } + + getEmbedUrl(videoId, params = {}, element) { + // Determine domain based on cookie preference + const useNoCookie = element.getAttribute("no-cookie") !== "false"; + const domain = useNoCookie ? "youtube-nocookie.com" : "youtube.com"; + + // Build URL with parameters + let url = `https://www.${domain}/embed/${videoId}?autoplay=1`; + + // Add parameters + for (const [key, value] of Object.entries(params)) { + if (key !== 'autoplay' && key && value) { + url += `&${key}=${encodeURIComponent(value)}`; + } + } + + return url; + } + + getThumbnailUrls(videoId, quality, element) { + // Check for custom thumbnail first + const customThumbnail = element.getAttribute("thumbnail"); + if (customThumbnail) return [customThumbnail]; + + const baseUrl = `https://img.youtube.com/vi/${videoId}`; + const urls = []; + + // Choose quality based on device and user preference + if (quality && this.THUMBNAIL_QUALITIES[quality]) { + urls.push(`${baseUrl}/${this.THUMBNAIL_QUALITIES[quality]}`); + } else if (window.matchMedia("(max-width: 767px)").matches) { + urls.push(`${baseUrl}/${this.THUMBNAIL_QUALITIES.hq}`); + } else { + urls.push(`${baseUrl}/${this.THUMBNAIL_QUALITIES.maxres}`); + } + + // Only add fallbacks if they're different from what we already have + if (!urls.includes(`${baseUrl}/${this.THUMBNAIL_QUALITIES.hq}`)) { + urls.push(`${baseUrl}/${this.THUMBNAIL_QUALITIES.hq}`); + } + + if (!urls.includes(`${baseUrl}/${this.THUMBNAIL_QUALITIES.default}`)) { + urls.push(`${baseUrl}/${this.THUMBNAIL_QUALITIES.default}`); + } + + return urls; + } + + parseParams(url) { + const params = {}; + const parsedUrl = parseUrl(url); + if (!parsedUrl) return params; + + // Extract parameters from URL + for (const [key, value] of parsedUrl.searchParams.entries()) { + params[key] = value; + } + + // Handle YouTube-specific parameters + if (params.t || params.start) params.start = params.t || params.start; + if (params.list) params.playlist = params.list; + + return params; + } +} + +/** + * Bitchute service provider + */ +class BitchuteProvider extends VideoServiceProvider { + constructor() { + super(); + this.name = 'bitchute'; + this.URL_PATTERNS = [ + /bitchute\.com\/video\/([a-zA-Z0-9_-]+)/, + /bitchute\.com\/embed\/([a-zA-Z0-9_-]+)/ + ]; + } + + canHandle(url) { + return url && /bitchute\.com/.test(url); + } + + getVideoId(url) { + if (!url) return null; + + const parsedUrl = parseUrl(url); + if (parsedUrl) { + // Extract from path segments + const segments = parsedUrl.pathname.split('/').filter(Boolean); + for (let i = 0; i < segments.length - 1; i++) { + if ((segments[i] === "embed" || segments[i] === "video") && i + 1 < segments.length) { + return segments[i + 1]; + } + } + } + + // Fallback to regex matching + for (const pattern of this.URL_PATTERNS) { + const match = url.match(pattern); + if (match?.[1]) return match[1]; + } + + return null; + } + + getEmbedUrl(videoId) { + return `https://www.bitchute.com/embed/${videoId}/`; + } + + // Use parent class implementations for other methods + + getDefaults() { + return { autoload: true }; + } +} + +// Register service providers +VIDEO_SERVICES.set('youtube', new YouTubeProvider()); +VIDEO_SERVICES.set('bitchute', new BitchuteProvider()); + +class LazyVideo extends HTMLElement { + // Observable attributes + static get observedAttributes() { + return [ + "src", "title", "width", "height", "thumbnail-quality", + "no-cookie", "autoload", "frameborder", "allow", "loading", + "hide-title", "thumbnail", "service", "align", "container-fit" + ]; + } + + // CSS styles definition + static get styles() { + return ` + :host { + --lv-aspect-ratio: 16 / 9; + display: var(--lv-display, block); + position: var(--lv-position, relative); + width: var(--lv-width, 100%); + max-width: var(--lv-max-width, 560px); + aspect-ratio: var(--lv-aspect-ratio); + background: var(--lv-background, #000); + overflow: var(--lv-overflow, hidden); + border-radius: var(--lv-border-radius, 0); + margin: var(--lv-margin, 0 auto); + } + + :host([container-fit]) { + max-width: 100% !important; + max-height: auto !important; + width: 100%; + margin: 0; + } + + /* Alignment control through attribute */ + :host([align="left"]) { margin: var(--lv-margin-left, 0); } + :host([align="right"]) { margin: var(--lv-margin-right, 0 0 0 auto); } + :host([align="center"]) { margin: var(--lv-margin-center, 0 auto); } + + /* Alignment classes for CSS variable-based alignment */ + :host(.lv-align-left) { margin: var(--lv-margin-left, 0); } + :host(.lv-align-right) { margin: var(--lv-margin-right, 0 0 0 auto); } + :host(.lv-align-center) { margin: var(--lv-margin-center, 0 auto); } + + :host([hide-title]), :host(:where(:not([hide-title]))) { + --lv-show-title: var(--lv-show-title, 1); + } + + :host([hide-title]) [part="title-bar"] { + display: none; + } + :host([style*="height"]) { aspect-ratio: auto; } + + [part="placeholder"], [part="iframe"] { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + border: none; + } + + [part="placeholder"] { + cursor: pointer; + background: var(--lv-placeholder-bg, #000); + } + + [part="placeholder"]:focus { + outline: var(--lv-focus-outline, 2px solid #4285F4); + outline-offset: var(--lv-focus-outline-offset, 2px); + } + + [part="thumbnail"] { + width: 100%; + height: 100%; + object-fit: var(--lv-thumbnail-object-fit, cover); + opacity: var(--lv-thumbnail-opacity, 0.85); + } + + [part="placeholder"]:hover [part="thumbnail"], + [part="placeholder"]:focus [part="thumbnail"] { + opacity: var(--lv-thumbnail-hover-opacity, 1); + } + + [part="title-bar"] { + position: absolute; + top: 0; + left: 0; + width: 100%; + padding: var(--lv-title-padding, 10px 12px); + background: var(--lv-title-bg, rgba(0, 0, 0, 0.75)); + color: var(--lv-title-color, white); + font-family: var(--lv-title-font-family, Roboto, Arial, sans-serif); + font-size: var(--lv-title-font-size, 18px); + font-weight: var(--lv-title-font-weight, 500); + line-height: var(--lv-title-line-height, 1.2); + text-overflow: ellipsis; + white-space: nowrap; + overflow: hidden; + z-index: 2; + box-sizing: border-box; + display: var(--lv-show-title, block); + } + + [part="play-button"] { + position: absolute; + top: 50%; + left: 50%; + transform: translate(-50%, -50%); + width: var(--lv-play-button-width, 68px); + height: var(--lv-play-button-height, 48px); + background: var(--lv-play-button-bg, rgba(33, 33, 33, 0.8)); + border-radius: var(--lv-play-button-radius, 8px); + } + + [part="play-button"]::after { + content: ''; + position: absolute; + top: 50%; + left: 55%; + transform: translate(-50%, -50%); + border-style: solid; + border-width: var(--lv-play-button-arrow-size, 12px 0 12px 20px); + border-color: transparent transparent transparent var(--lv-play-button-color, rgba(255, 255, 255, 0.9)); + } + + [part="placeholder"]:hover [part="play-button"] { + background: var(--lv-play-button-bg-hover, rgba(230, 33, 23, 1)); + } + + [part="timestamp"] { + position: absolute; + right: var(--lv-timestamp-right, 10px); + bottom: var(--lv-timestamp-bottom, 10px); + background: var(--lv-timestamp-bg, rgba(0, 0, 0, 0.7)); + color: var(--lv-timestamp-color, white); + padding: var(--lv-timestamp-padding, 2px 6px); + border-radius: var(--lv-timestamp-radius, 3px); + font-size: var(--lv-timestamp-font-size, 12px); + font-family: var(--lv-timestamp-font-family, system-ui, sans-serif); + } + + [part="iframe"] { + opacity: 0; + animation: fadeIn 0.3s ease forwards; + } + + @keyframes fadeIn { to { opacity: 1; } } + + [part="loading"], [part="fallback-thumbnail"] { + display: flex; + align-items: center; + justify-content: center; + position: absolute; + width: 100%; + height: 100%; + } + + [part="loading"] { + background: var(--lv-loading-bg, rgba(0,0,0,0.7)); + color: var(--lv-loading-color, white); + font-family: var(--lv-loading-font-family, system-ui, sans-serif); + } + + [part="fallback-thumbnail"] { + background: var(--lv-fallback-bg, #1a1a1a); + color: var(--lv-fallback-color, white); + font-family: var(--lv-fallback-font-family, system-ui, sans-serif); + font-size: var(--lv-fallback-font-size, 14px); + } + `; + } + + constructor() { + super(); + this.attachShadow({ mode: "open" }); + this._loaded = false; + this._placeholder = null; + this._observer = null; + this._handlers = new Map(); + this._videoService = null; + } + + connectedCallback() { + if (!this.isConnected) return; + + if (!this._loaded && !this._placeholder) { + this._createPlaceholder(); + } + + // Setup autoloading if needed + if (this._getServiceOption('autoload')) { + this._setupObserver(); + } + + // Check for alignment from CSS variables + this._updateAlignmentFromCSS(); + + // Set up mutation observer for style changes + this._setupStyleObserver(); + } + + disconnectedCallback() { + this._cleanupObserver(); + this._cleanupEventHandlers(); + + // Clean up style observer + if (this._styleObserver) { + this._styleObserver.disconnect(); + this._styleObserver = null; + } + + // Cancel any animation frames + if (this._styleFrameId) { + cancelAnimationFrame(this._styleFrameId); + } + } + + attributeChangedCallback(name, oldValue, newValue) { + if (!this.isConnected) return; + + switch (name) { + case "src": + if (oldValue !== newValue && newValue !== null) { + this._loaded = false; + this._createPlaceholder(); + } + break; + case "width": + case "height": + this._updateStyles(); + break; + case "autoload": + newValue === "true" || newValue === "" ? this._setupObserver() : this._cleanupObserver(); + break; + case "thumbnail": + if (oldValue !== newValue) { + this._updateThumbnail(); + } + break; + case "service": + if (oldValue !== newValue) { + this._loaded = false; + this._createPlaceholder(); + } + break; + } + } + + _getServiceProvider(url) { + // Check for explicit service attribute first + const serviceName = this.getAttribute("service"); + if (serviceName && VIDEO_SERVICES.has(serviceName)) { + return VIDEO_SERVICES.get(serviceName); + } + + // Auto-detect from URL + if (url) { + for (const provider of VIDEO_SERVICES.values()) { + if (provider.canHandle(url)) { + return provider; + } + } + } + + // Default to YouTube if nothing else matches + return VIDEO_SERVICES.get('youtube'); + } + + _getServiceOption(option) { + // First check if attribute exists + if (this.hasAttribute(option)) { + const value = this.getAttribute(option); + // Handle boolean attributes + return value === "" || value === "true" || value !== "false"; + } + + // Then check service defaults + if (this._videoService?.getDefaults()[option] !== undefined) { + return this._videoService.getDefaults()[option]; + } + + return false; + } + + _cleanupObserver() { + if (this._observer) { + this._observer.disconnect(); + this._observer = null; + } + } + + _cleanupEventHandlers() { + this._handlers.forEach((handler, key) => { + const [element, event] = key.split('|'); + if (element && element.removeEventListener) { + element.removeEventListener(event, handler); + } + }); + this._handlers.clear(); + } + + _setupObserver() { + if (!window.IntersectionObserver) return; + + this._cleanupObserver(); + + this._observer = new IntersectionObserver(entries => { + if (entries[0].isIntersecting && !this._loaded) { + this._loadVideo(); + this._cleanupObserver(); + } + }, { + rootMargin: "300px", + threshold: 0.1 + }); + + this._observer.observe(this); + } + + _updateThumbnail() { + const img = this._placeholder?.querySelector('[part="thumbnail"]'); + if (!img) return; + + const customThumbnail = this.getAttribute("thumbnail"); + if (customThumbnail) { + img.src = customThumbnail; + // Remove any fallback thumbnail if present + const fallback = this._placeholder.querySelector('[part="fallback-thumbnail"]'); + if (fallback) fallback.remove(); + return; + } + + // Get service thumbnails + const videoId = this._placeholder.dataset.videoId; + if (videoId && this._videoService) { + const thumbnailQuality = this.getAttribute("thumbnail-quality"); + const thumbnailUrls = this._videoService.getThumbnailUrls(videoId, thumbnailQuality, this); + + if (thumbnailUrls.length > 0) { + this._loadThumbnail(thumbnailUrls, img); + } else { + this._createFallbackThumbnail(); + } + } + } + + _createFallbackThumbnail() { + if (!this._placeholder || this._placeholder.querySelector('[part="fallback-thumbnail"]')) { + return; // Already exists or no placeholder + } + + const fallback = document.createElement('div'); + fallback.setAttribute('part', 'fallback-thumbnail'); + + // Service-specific branding + if (this._videoService) { + const serviceName = this._videoService.name; + fallback.innerHTML = ` +
+
${serviceName.charAt(0).toUpperCase() + serviceName.slice(1)}
+
Click to play video
+
+ `; + } else { + fallback.textContent = 'No thumbnail available'; + } + + this._placeholder.appendChild(fallback); + } + + async _createPlaceholder() { + const src = this.getAttribute("src"); + + // Determine service provider & video ID + this._videoService = this._getServiceProvider(src); + const videoId = this._videoService?.getVideoId(src); + + if (!videoId) { + this.shadowRoot.innerHTML = ` + +

Error: Can't find video ID. Check the 'src' attribute.

+ `; + return; + } + + // Get parameters and create elements + this._videoParams = this._videoService.parseParams(src); + const title = this.getAttribute("title") || "Video"; + + // Build Shadow DOM + const style = document.createElement("style"); + style.textContent = LazyVideo.styles; + + const placeholder = this._buildPlaceholder(videoId, title); + + this.shadowRoot.innerHTML = ''; + this.shadowRoot.append(style, placeholder); + this._updateStyles(); + } + + _buildPlaceholder(videoId, title) { + // Create placeholder container + const placeholder = document.createElement("div"); + placeholder.setAttribute("part", "placeholder"); + placeholder.setAttribute("role", "button"); + placeholder.setAttribute("aria-label", `Play: ${title}`); + placeholder.setAttribute("tabindex", "0"); + placeholder.dataset.videoId = videoId; + placeholder.dataset.service = this._videoService.name; + this._placeholder = placeholder; + + // Create thumbnail image + const thumbnailQuality = this.getAttribute("thumbnail-quality"); + const thumbnailUrls = this._videoService.getThumbnailUrls(videoId, thumbnailQuality, this); + + // Add thumbnail image + const img = document.createElement("img"); + img.setAttribute("part", "thumbnail"); + img.alt = `Thumbnail for ${title}`; + img.loading = "lazy"; + img.decoding = "async"; + img.fetchPriority = "low"; + img.style.backgroundColor = "#111"; + placeholder.appendChild(img); + + // Start thumbnail loading process + if (thumbnailUrls.length > 0) { + this._setupThumbnailObserver(img, thumbnailUrls); + } else { + this._createFallbackThumbnail(); + } + + // Add title bar if not disabled + if (!this.hasAttribute("hide-title")) { + const titleBar = document.createElement("div"); + titleBar.setAttribute("part", "title-bar"); + titleBar.textContent = title; + placeholder.appendChild(titleBar); + } + + // Add play button + const playButton = document.createElement("div"); + playButton.setAttribute("part", "play-button"); + placeholder.appendChild(playButton); + + // Add timestamp if present in params + const startTime = parseInt(this._videoParams.start || this._videoParams.t, 10); + if (!isNaN(startTime) && startTime > 0) { + const timestamp = document.createElement("div"); + timestamp.setAttribute("part", "timestamp"); + timestamp.textContent = this._formatTime(startTime); + placeholder.appendChild(timestamp); + } + + // Set up interaction handlers + const handleInteraction = (e) => { + if (e.type === "click" || e.key === "Enter" || e.key === " ") { + if (e.type !== "click") e.preventDefault(); + this._loadVideo(); + } + }; + + placeholder.addEventListener("click", handleInteraction); + placeholder.addEventListener("keydown", handleInteraction); + + // Track handlers for cleanup + this._handlers.set(`${placeholder}|click`, handleInteraction); + this._handlers.set(`${placeholder}|keydown`, handleInteraction); + + return placeholder; + } + + _setupThumbnailObserver(imgElement, urls) { + if (!window.IntersectionObserver) { + this._loadThumbnail(urls, imgElement); + return; + } + + this._thumbnailLoadAttempted = false; + + const observer = new IntersectionObserver(async (entries) => { + if (entries[0].isIntersecting && !this._thumbnailLoadAttempted) { + this._thumbnailLoadAttempted = true; + + try { + await this._loadThumbnail(urls, imgElement); + } catch { + this._thumbnailLoadAttempted = false; + } finally { + observer.disconnect(); + } + } + }, { + rootMargin: "300px", + threshold: 0.1 + }); + + observer.observe(imgElement); + } + + async _loadThumbnail(urls, imgElement) { + // Custom thumbnails bypass validation + if (urls.length === 1 && urls[0] === this.getAttribute("thumbnail")) { + imgElement.src = urls[0]; + return true; + } + + // Cache key for shared thumbnails + const videoId = this._placeholder?.dataset?.videoId; + const service = this._placeholder?.dataset?.service; + const cacheKey = videoId && service ? `${service}:${videoId}` : null; + + // Try to use cached result + if (cacheKey && THUMBNAIL_REGISTRY.has(cacheKey)) { + try { + const bestUrl = await THUMBNAIL_REGISTRY.get(cacheKey); + if (bestUrl) { + imgElement.src = bestUrl; + return true; + } + } catch { + THUMBNAIL_REGISTRY.delete(cacheKey); + } + } + + // Find best thumbnail + let bestUrl = null; + + // Try parallel loading first + try { + const results = await Promise.all( + urls.map(url => checkImage(url) + .then(valid => ({ url, valid })) + .catch(() => ({ valid: false })) + ) + ); + + const bestResult = results.find(result => result.valid); + if (bestResult) bestUrl = bestResult.url; + } catch { + // Try sequential loading if parallel fails + for (const url of urls) { + try { + if (await checkImage(url)) { + bestUrl = url; + break; + } + } catch {} + } + } + + // Set the best URL or create fallback + if (bestUrl) { + imgElement.src = bestUrl; + if (cacheKey) THUMBNAIL_REGISTRY.set(cacheKey, Promise.resolve(bestUrl)); + return true; + } else { + this._createFallbackThumbnail(); + if (cacheKey) THUMBNAIL_REGISTRY.set(cacheKey, Promise.resolve(null)); + return false; + } + } + + _formatTime(seconds) { + const hours = Math.floor(seconds / 3600); + const minutes = Math.floor((seconds % 3600) / 60); + const secs = seconds % 60; + + return hours > 0 + ? `${hours}:${minutes.toString().padStart(2, "0")}:${secs.toString().padStart(2, "0")}` + : `${minutes}:${secs.toString().padStart(2, "0")}`; + } + + _updateStyles() { + const width = this.getAttribute("width"); + const height = this.getAttribute("height"); + + // Helper to check if a value already includes a CSS unit + const hasUnit = (value) => value && /[a-z%$]/.test(value); + + if (width) { + this.style.setProperty("width", hasUnit(width) ? width : `${width}px`); + } else { + this.style.removeProperty("width"); + } + + if (height) { + this.style.setProperty("height", hasUnit(height) ? height : `${height}px`); + } else { + this.style.removeProperty("height"); + } + + // For aspect ratio, use numeric values from width/height if possible + if (width && height) { + const numericWidth = parseFloat(width); + const numericHeight = parseFloat(height); + + if (!isNaN(numericWidth) && !isNaN(numericHeight)) { + this.style.setProperty("--lv-aspect-ratio", `${numericWidth} / ${numericHeight}`); + } + } + } + + _loadVideo() { + if (this._loaded || !this._placeholder) return; + + // Create loading indicator + const loading = document.createElement("div"); + loading.setAttribute("part", "loading"); + loading.textContent = "Loading..."; + this.shadowRoot.appendChild(loading); + + const videoId = this._placeholder.dataset.videoId; + const title = this.getAttribute("title") || "Video"; + + // Get the service if not already set + if (!this._videoService) { + const serviceName = this._placeholder.dataset.service; + this._videoService = VIDEO_SERVICES.get(serviceName) || VIDEO_SERVICES.get('youtube'); + } + + // Get embed URL and create iframe + const url = this._videoService.getEmbedUrl(videoId, this._videoParams, this); + + // Create iframe + const iframe = document.createElement("iframe"); + iframe.setAttribute("part", "iframe"); + iframe.loading = "lazy"; + iframe.src = url; + iframe.title = title; + + // Add credentialless attribute for enhanced security + iframe.setAttribute("credentialless", ""); + + // Add service-specific attributes + const iframeAttrs = this._videoService.getIframeAttributes(this); + for (const [name, value] of Object.entries(iframeAttrs)) { + iframe.setAttribute(name, value); + } + + // Handle loading indicator removal + const loadHandler = () => loading.parentNode?.removeChild(loading); + iframe.addEventListener("load", loadHandler, { once: true }); + this._handlers.set(`${iframe}|load`, loadHandler); + + // Replace placeholder with iframe + this._placeholder.replaceWith(iframe); + this._loaded = true; + this._placeholder = null; + + // Notify that video is loaded + this.dispatchEvent(new CustomEvent("video-loaded", { + bubbles: true, + detail: { videoId, service: this._videoService.name } + })); + } + + _setupStyleObserver() { + if (this._styleObserver) return; + + // Create a MutationObserver to watch for style attribute changes + this._styleObserver = new MutationObserver(() => { + this._updateAlignmentFromCSS(); + }); + + this._styleObserver.observe(this, { + attributes: true, + attributeFilter: ['style'] + }); + + // Also observe document/body style changes that might affect CSS variables + if (window.getComputedStyle) { + // Use requestAnimationFrame to limit performance impact + let frameId; + const checkStyles = () => { + frameId = requestAnimationFrame(() => { + this._updateAlignmentFromCSS(); + frameId = requestAnimationFrame(checkStyles); + }); + }; + + checkStyles(); + + // Store the frame ID for cleanup + this._styleFrameId = frameId; + } + } + + _updateAlignmentFromCSS() { + if (this.hasAttribute('container-fit')) return; + + // Get computed style + const computedStyle = window.getComputedStyle(this); + const alignValue = computedStyle.getPropertyValue('--lv-align').trim(); + + // Remove existing alignment classes + this.classList.remove('lv-align-left', 'lv-align-right', 'lv-align-center'); + + // Add appropriate class based on the CSS variable + if (alignValue === 'left') { + this.classList.add('lv-align-left'); + } else if (alignValue === 'right') { + this.classList.add('lv-align-right'); + } else if (alignValue === 'center') { + this.classList.add('lv-align-center'); + } + } +} + +// Register the component +if (document.readyState === "loading") { + document.addEventListener("DOMContentLoaded", () => customElements.define("lazy-video", LazyVideo)); +} else { + customElements.define("lazy-video", LazyVideo); +} \ No newline at end of file diff --git a/develop/js/u.js b/develop/js/u.js new file mode 100644 index 0000000..7a48867 --- /dev/null +++ b/develop/js/u.js @@ -0,0 +1,132 @@ +// Smooth scroll to ID +window.addEventListener("load", function () { setTimeout(() => { if (window.location.hash) { let t = window.location.hash.substring(1), o = document.getElementById(t); o && o.scrollIntoView({ behavior: "smooth", block: "start" }) } }, 135) }); + +// No card hover on touch +("ontouchstart" in window || navigator.maxTouchPoints > 0) && window.addEventListener("touchstart", function t() { document.body.classList.add("no-hover"), window.removeEventListener("touchstart", t, !1) }, !1); + +// Auto-add target="_blank" and secure rel (noopener & noreferrer) to external links, +// except those with the "eel" class +(() => { let e = document.baseURI, t = document.querySelectorAll("a[href]:not(.eel)"), r = window.location.hostname; for (let l = 0, n = t.length; l < n; l++) { let o = t[l]; try { let b = new URL(o.getAttribute("href"), e); if (b.hostname !== r) { "_blank" !== o.getAttribute("target") && o.setAttribute("target", "_blank"); let a = o.getAttribute("rel") || ""; /\bnoopener\b/.test(a) || (a += " noopener"), /\bnoreferrer\b/.test(a) || (a += " noreferrer"), o.setAttribute("rel", a.trim()) } } catch (i) { } } })(); + +// Switch to JPG for devices that don't support WebP +!async function () { await async function () { return new Promise((function (n) { const e = new Image; e.onload = function () { n(1 === e.width && 1 === e.height) }, e.onerror = function () { n(!1) }, e.src = "data:image/webp;base64,UklGRhYAAABXRUJQVlA4TAoAAAAvAAAAAEX/I/of" })) }() || document.querySelectorAll('img[src$=".webp"]').forEach((function (n) { n.src = n.src.replace(/\.webp$/i, ".jpg") })) }() + +// Link redirect animation +document.addEventListener('DOMContentLoaded', function () { + // Create and inject CSS for the animation + const style = document.createElement('style'); + style.textContent = ` + .link-arrow-container { + position: absolute; + pointer-events: none; + z-index: 9999; + width: 20px; + height: 20px; + right: 0px; + opacity: 0; + transform: translateX(-5px); + transition: transform 0.1s ease-out, opacity 0.1s ease-out; + /* Vertical alignment handled by parent flex settings */ + } + .link-arrow-container.animate { + opacity: 1; + transform: translateX(5px); + } + .link-arrow-container svg { + width: 100%; + height: 100%; + fill: currentColor; + display: block; + } + a[href]:not(.no-arrow-padding):not(a[target="_blank"]) { + position: relative; + padding-right: 24px; + display: inline-flex; + align-items: center; + } + `; + document.head.appendChild(style); + + // SVG arrow icon data + const svgArrow = ``; + + // Add arrow containers to eligible links on load + document.querySelectorAll('a[href]').forEach(link => { + const href = link.getAttribute('href'); + // Skip links that open in new tabs, are anchors, or javascript calls + if (link.getAttribute('target') === '_blank' || href.startsWith('#') || href.startsWith('javascript:')) { + return; + } + + // Create and append arrow container + const arrowContainer = document.createElement('div'); + arrowContainer.className = 'link-arrow-container'; + arrowContainer.innerHTML = svgArrow; + link.appendChild(arrowContainer); + }); + + // Delegated click listener on the body + document.body.addEventListener('click', function (e) { + // Find the closest ancestor link + const link = e.target.closest('a[href]'); + + // If no link was clicked, or checks fail, do nothing + if (!link) return; + + const href = link.getAttribute('href'); + if (link.getAttribute('target') === '_blank' || href.startsWith('#') || href.startsWith('javascript:')) { + return; + } + + // Skip if modifier keys are pressed + if (e.ctrlKey || e.metaKey || e.shiftKey) return; + + // Find the arrow container within this link + const arrowContainer = link.querySelector('.link-arrow-container'); + if (!arrowContainer) return; // Should exist, but safety check + + // Prevent default navigation + e.preventDefault(); + + // Animate the arrow + arrowContainer.classList.add('animate'); + + // Navigate after a delay + setTimeout(() => { + window.location.href = href; + }, 100); + }); + + // Reset animation state on page show (handles bfcache) + window.addEventListener('pageshow', function (event) { + if (event.persisted) { + document.querySelectorAll('.link-arrow-container.animate').forEach(arrow => { + arrow.classList.remove('animate'); + }); + } + }); + +}); + +// Quicklink 2.3.0 +!function (e, n) { "object" == typeof exports && "undefined" != typeof module ? n(exports) : "function" == typeof define && define.amd ? define(["exports"], n) : n(e.quicklink = {}) }(this, function (e) { function n(e) { return new Promise(function (n, r, t) { (t = new XMLHttpRequest).open("GET", e, t.withCredentials = !0), t.onload = function () { 200 === t.status ? n() : r() }, t.send() }) } var r, t = (r = document.createElement("link")).relList && r.relList.supports && r.relList.supports("prefetch") ? function (e) { return new Promise(function (n, r, t) { (t = document.createElement("link")).rel = "prefetch", t.href = e, t.onload = n, t.onerror = r, document.head.appendChild(t) }) } : n, o = window.requestIdleCallback || function (e) { var n = Date.now(); return setTimeout(function () { e({ didTimeout: !1, timeRemaining: function () { return Math.max(0, 50 - (Date.now() - n)) } }) }, 1) }, i = new Set, c = new Set, u = !1; function a(e) { if (e) { if (e.saveData) return new Error("Save-Data is enabled"); if (/2g/.test(e.effectiveType)) return new Error("network conditions are poor") } return !0 } function s(e, r, o) { var s = a(navigator.connection); return s instanceof Error ? Promise.reject(new Error("Cannot prefetch, " + s.message)) : (c.size > 0 && !u && console.warn("[Warning] You are using both prefetching and prerendering on the same document"), Promise.all([].concat(e).map(function (e) { if (!i.has(e)) return i.add(e), (r ? function (e) { return window.fetch ? fetch(e, { credentials: "include" }) : n(e) } : t)(new URL(e, location.href).toString()) }))) } function f(e, n) { var r = a(navigator.connection); if (r instanceof Error) return Promise.reject(new Error("Cannot prerender, " + r.message)); if (!HTMLScriptElement.supports("speculationrules")) return s(e), Promise.reject(new Error("This browser does not support the speculation rules API. Falling back to prefetch.")); if (document.querySelector('script[type="speculationrules"]')) return Promise.reject(new Error("Speculation Rules is already defined and cannot be altered.")); for (var t = 0, o = [].concat(e); t < o.length; t += 1) { var f = o[t]; if (window.location.origin !== new URL(f, window.location.href).origin) return Promise.reject(new Error("Only same origin URLs are allowed: " + f)); c.add(f) } i.size > 0 && !u && console.warn("[Warning] You are using both prefetching and prerendering on the same document"); var l = function (e) { var n = document.createElement("script"); n.type = "speculationrules", n.text = '{"prerender":[{"source": "list","urls": ["' + Array.from(e).join('","') + '"]}]}'; try { document.head.appendChild(n) } catch (e) { return e } return !0 }(c); return !0 === l ? Promise.resolve() : Promise.reject(l) } e.listen = function (e) { if (e || (e = {}), window.IntersectionObserver) { var n = function (e) { e = e || 1; var n = [], r = 0; function t() { r < e && n.length > 0 && (n.shift()(), r++) } return [function (e) { n.push(e) > 1 || t() }, function () { r--, t() }] }(e.throttle || 1 / 0), r = n[0], t = n[1], a = e.limit || 1 / 0, l = e.origins || [location.hostname], d = e.ignores || [], h = e.delay || 0, p = [], m = e.timeoutFn || o, w = "function" == typeof e.hrefFn && e.hrefFn, g = e.prerender || !1; u = e.prerenderAndPrefetch || !1; var v = new IntersectionObserver(function (n) { n.forEach(function (n) { if (n.isIntersecting) p.push((n = n.target).href), function (e, n) { n ? setTimeout(e, n) : e() }(function () { -1 !== p.indexOf(n.href) && (v.unobserve(n), (u || g) && c.size < 1 ? f(w ? w(n) : n.href).catch(function (n) { if (!e.onError) throw n; e.onError(n) }) : i.size < a && !g && r(function () { s(w ? w(n) : n.href, e.priority).then(t).catch(function (n) { t(), e.onError && e.onError(n) }) })) }, h); else { var o = p.indexOf((n = n.target).href); o > -1 && p.splice(o) } }) }, { threshold: e.threshold || 0 }); return m(function () { (e.el || document).querySelectorAll("a").forEach(function (e) { l.length && !l.includes(e.hostname) || function e(n, r) { return Array.isArray(r) ? r.some(function (r) { return e(n, r) }) : (r.test || r).call(r, n.href, n) }(e, d) || v.observe(e) }) }, { timeout: e.timeout || 2e3 }), function () { i.clear(), v.disconnect() } } }, e.prefetch = s, e.prerender = f }); + +quicklink.listen({ + origins: [], + ignores: [ + // Don't prefetch URL fragments from my own site + uri => uri.includes('caileb.com') && uri.includes('#'), + // Don't prefetch hosted services + uri => uri.includes('gallery.caileb.com'), + uri => uri.includes('jellyfin.caileb.com'), + uri => uri.includes('archive.caileb.com'), + uri => uri.includes('music.caileb.com'), + // Don't prefetch API's + /\/api\/?/, + /^api\./, + // Don't prefetch these file types + uri => /\.(zip|tar|7z|rar|js|apk|xapk|woff2|tff|otf|pdf|mp3|mp4|wav|exe|msi|bat|deb|rpm|bin|dmg|iso|csv|log|sql|xml|key|odp|ods|pps|ppt|xls|doc|jpg|jpeg|jpe|jif|jfif|jfi|png|gif|webp|tif|psd|raw|arw|cr2|nrw|k25|bmp|dib|heif|heic|ind|indd|indt|jp2|j2k|jpf|jpx|jpm|mj2|svg|ai|eps)$/i.test(uri), + // Don't prefetch these protocols + uri => /^(http|file|ftp|mailto|tel):/i.test(uri), + ] +}); \ No newline at end of file diff --git a/develop/static/datacenter-block.html b/develop/static/datacenter-block.html new file mode 100644 index 0000000..1450bf1 --- /dev/null +++ b/develop/static/datacenter-block.html @@ -0,0 +1,10 @@ + + + + + Blocked +

Connections from within Datacenter IP ranges are blocked due to spam

+

Detected as: {{.ASNName}}

+ + + \ No newline at end of file diff --git a/develop/static/default-block.html b/develop/static/default-block.html new file mode 100644 index 0000000..2f350a7 --- /dev/null +++ b/develop/static/default-block.html @@ -0,0 +1,10 @@ + + + + + + + Blocked +

Access Blocked

+ + \ No newline at end of file diff --git a/develop/static/error.html b/develop/static/error.html new file mode 100644 index 0000000..ebfe646 --- /dev/null +++ b/develop/static/error.html @@ -0,0 +1,22 @@ + + + + + + + Error + + + + + + + + + + + +

Something appears to have gone wrong.

+ + + \ No newline at end of file diff --git a/develop/static/india-block.html b/develop/static/india-block.html new file mode 100644 index 0000000..1046ddb --- /dev/null +++ b/develop/static/india-block.html @@ -0,0 +1,11 @@ + + + + + + + Access Restricted + Access Restricted + + + \ No newline at end of file diff --git a/develop/static/pow-interstitial.html b/develop/static/pow-interstitial.html new file mode 100644 index 0000000..e63058f --- /dev/null +++ b/develop/static/pow-interstitial.html @@ -0,0 +1,437 @@ + + + + + + + Security Checkpoint + + + + + + + + + + + + +
+
+

Security Checkpoint

+

Verifying your browser to protect from automated abuse. This may take a few seconds...

+
+
+
Redirecting
+
+
+
+ + +
+ + diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..33d8d1d --- /dev/null +++ b/go.mod @@ -0,0 +1,44 @@ +module caileb + +go 1.24.1 + +require ( + github.com/BurntSushi/toml v0.4.1 + github.com/andybalholm/brotli v1.1.1 + github.com/cloudflare/ahocorasick v0.0.0-20240916140611-054963ec9396 + github.com/dgraph-io/badger/v4 v4.7.0 + github.com/gofiber/fiber/v2 v2.52.6 + github.com/gofiber/template/html/v2 v2.1.3 + github.com/mileusna/useragent v1.3.5 + github.com/oschwald/geoip2-golang v1.11.0 + github.com/tdewolff/minify/v2 v2.22.4 +) + +require ( + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/gofiber/template v1.8.3 // indirect + github.com/gofiber/utils v1.1.0 // indirect + github.com/google/flatbuffers v25.2.10+incompatible // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/klauspost/compress v1.18.0 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/oschwald/maxminddb-golang v1.13.0 // indirect + github.com/rivo/uniseg v0.2.0 // indirect + github.com/tdewolff/parse/v2 v2.7.21 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasthttp v1.51.0 // indirect + github.com/valyala/tcplisten v1.0.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/otel v1.35.0 // indirect + go.opentelemetry.io/otel/metric v1.35.0 // indirect + go.opentelemetry.io/otel/trace v1.35.0 // indirect + golang.org/x/net v0.38.0 // indirect + golang.org/x/sys v0.32.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..abf9867 --- /dev/null +++ b/go.sum @@ -0,0 +1,90 @@ +github.com/BurntSushi/toml v0.4.1 h1:GaI7EiDXDRfa8VshkTj7Fym7ha+y8/XxIgD2okUIjLw= +github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= +github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cloudflare/ahocorasick v0.0.0-20240916140611-054963ec9396 h1:W2HK1IdCnCGuLUeyizSCkwvBjdj0ZL7mxnJYQ3poyzI= +github.com/cloudflare/ahocorasick v0.0.0-20240916140611-054963ec9396/go.mod h1:tGWUZLZp9ajsxUOnHmFFLnqnlKXsCn6GReG4jAD59H0= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgraph-io/badger/v4 v4.7.0 h1:Q+J8HApYAY7UMpL8d9owqiB+odzEc0zn/aqOD9jhc6Y= +github.com/dgraph-io/badger/v4 v4.7.0/go.mod h1:He7TzG3YBy3j4f5baj5B7Zl2XyfNe5bl4Udl0aPemVA= +github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINAEJdWGOM= +github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI= +github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38= +github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/gofiber/fiber/v2 v2.52.6 h1:Rfp+ILPiYSvvVuIPvxrBns+HJp8qGLDnLJawAu27XVI= +github.com/gofiber/fiber/v2 v2.52.6/go.mod h1:YEcBbO/FB+5M1IZNBP9FO3J9281zgPAreiI1oqg8nDw= +github.com/gofiber/template v1.8.3 h1:hzHdvMwMo/T2kouz2pPCA0zGiLCeMnoGsQZBTSYgZxc= +github.com/gofiber/template v1.8.3/go.mod h1:bs/2n0pSNPOkRa5VJ8zTIvedcI/lEYxzV3+YPXdBvq8= +github.com/gofiber/template/html/v2 v2.1.3 h1:n1LYBtmr9C0V/k/3qBblXyMxV5B0o/gpb6dFLp8ea+o= +github.com/gofiber/template/html/v2 v2.1.3/go.mod h1:U5Fxgc5KpyujU9OqKzy6Kn6Qup6Tm7zdsISR+VpnHRE= +github.com/gofiber/utils v1.1.0 h1:vdEBpn7AzIUJRhe+CiTOJdUcTg4Q9RK+pEa0KPbLdrM= +github.com/gofiber/utils v1.1.0/go.mod h1:poZpsnhBykfnY1Mc0KeEa6mSHrS3dV0+oBWyeQmb2e0= +github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q= +github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mileusna/useragent v1.3.5 h1:SJM5NzBmh/hO+4LGeATKpaEX9+b4vcGg2qXGLiNGDws= +github.com/mileusna/useragent v1.3.5/go.mod h1:3d8TOmwL/5I8pJjyVDteHtgDGcefrFUX4ccGOMKNYYc= +github.com/oschwald/geoip2-golang v1.11.0 h1:hNENhCn1Uyzhf9PTmquXENiWS6AlxAEnBII6r8krA3w= +github.com/oschwald/geoip2-golang v1.11.0/go.mod h1:P9zG+54KPEFOliZ29i7SeYZ/GM6tfEL+rgSn03hYuUo= +github.com/oschwald/maxminddb-golang v1.13.0 h1:R8xBorY71s84yO06NgTmQvqvTvlS/bnYZrrWX1MElnU= +github.com/oschwald/maxminddb-golang v1.13.0/go.mod h1:BU0z8BfFVhi1LQaonTwwGQlsHUEu9pWNdMfmq4ztm0o= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tdewolff/minify/v2 v2.22.4 h1:0/8K2fheOuYr5B4e5oCE1hGBVX6DQHLP0EGzdsDlYeg= +github.com/tdewolff/minify/v2 v2.22.4/go.mod h1:K/R8TT7aivpcU8QCNUU1UdR6etfnFPr7L11TO/X7shk= +github.com/tdewolff/parse/v2 v2.7.21 h1:OCuPFtGr4mXdnfKikQlUb0n654ROJANhBqCk+wioJ/A= +github.com/tdewolff/parse/v2 v2.7.21/go.mod h1:I7TXO37t3aSG9SlPUBefAhgIF8nt7yYUwVGgETIoBcA= +github.com/tdewolff/test v1.0.11 h1:FdLbwQVHxqG16SlkGveC0JVyrJN62COWTRyUFzfbtBE= +github.com/tdewolff/test v1.0.11/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.51.0 h1:8b30A5JlZ6C7AS81RsWjYMQmrZG6feChmgAolCl1SqA= +github.com/valyala/fasthttp v1.51.0/go.mod h1:oI2XroL+lI7vdXyYoQk03bXBThfFl2cVdIA3Xl7cH8g= +github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8= +github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= +go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y= +go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= +go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= +go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs= +go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc= +golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= +golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/main.go b/main.go new file mode 100644 index 0000000..e620e74 --- /dev/null +++ b/main.go @@ -0,0 +1,604 @@ +package main + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "context" + "flag" + "fmt" + "io" + "log" + "net/http" + "os" + "os/exec" + "os/signal" + "path/filepath" + "strconv" + "strings" + "syscall" + "time" + + "caileb/middleware" + "caileb/utils" + + "github.com/andybalholm/brotli" + "github.com/gofiber/fiber/v2" + "github.com/gofiber/fiber/v2/middleware/compress" + "github.com/gofiber/fiber/v2/middleware/logger" + "github.com/gofiber/template/html/v2" +) + +// getEnvInt tries to read an integer from the environment. +// If it's missing or invalid, it returns the fallback. +func getEnvInt(key string, fallback int) int { + if value, exists := os.LookupEnv(key); exists { + if intVal, err := strconv.Atoi(value); err == nil { + return intVal + } + } + return fallback +} + +// getEnvBool reads a boolean from the environment. +// Returns fallback if missing; accepts "true" or "1" as true. +func getEnvBool(key string, fallback bool) bool { + if value, exists := os.LookupEnv(key); exists { + return strings.ToLower(value) == "true" || value == "1" + } + return fallback +} + +// validatePathParam rejects path params with unsafe characters (../, slashes, etc.). +func validatePathParam(paramName string) fiber.Handler { + return func(c *fiber.Ctx) error { + param := c.Params(paramName) + + // Clean and validate path + cleanedParam := filepath.Clean(param) + + // Security checks + if cleanedParam != param || strings.Contains(param, "..") || + strings.Contains(param, "/") || strings.Contains(param, "\\") { + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + + // Alphanumeric validation + validChars := "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-." + for _, char := range param { + if !strings.ContainsRune(validChars, char) { + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + } + + return c.Next() + } +} + +// customCompression adds Brotli or gzip compression depending on client support. +func customCompression() fiber.Handler { + return func(c *fiber.Ctx) error { + // Check Accept-Encoding header + acceptEncoding := c.Get("Accept-Encoding") + + // Set Vary header + c.Append("Vary", "Accept-Encoding") + + // Process after the handler has been executed + if err := c.Next(); err != nil { + return err + } + + // Only compress if response is successful and has body + if c.Response().StatusCode() != 200 || len(c.Response().Body()) < 1024 { + return nil + } + + // Check content type for compressibility + contentType := string(c.Response().Header.ContentType()) + if !isCompressible(contentType) { + return nil + } + + body := c.Response().Body() + + // Apply compression based on client support + if strings.Contains(acceptEncoding, "br") { + // Brotli compression + var buf bytes.Buffer + writer := brotli.NewWriterLevel(&buf, 7) + + if _, err := writer.Write(body); err != nil { + return nil // Skip compression on error + } + + if err := writer.Close(); err != nil { + return nil // Skip compression on error + } + + compressed := buf.Bytes() + + // Only use compression if it's actually smaller + if len(compressed) < len(body) { + c.Response().Header.Set("Content-Encoding", "br") + c.Response().SetBodyRaw(compressed) + } + } else if strings.Contains(acceptEncoding, "gzip") { + // Let the built-in gzip middleware handle it + c.Response().Header.Set("Content-Encoding", "gzip") + } + + return nil + } +} + +// isCompressible returns true for common types that benefit from compression. +func isCompressible(contentType string) bool { + compressibleTypes := []string{ + "text/", "application/json", "application/javascript", + "application/xml", "image/svg", "font/", + "application/wasm", "application/xhtml", "application/rss", + } + + for _, t := range compressibleTypes { + if strings.Contains(contentType, t) { + return true + } + } + + return false +} + +// main parses flags, sets up middleware/plugins, and starts the server. +// It also handles graceful shutdown signals. +func main() { + // Parse command line flags + prodMode := flag.Bool("p", false, "Run in production mode") + devMode := flag.Bool("d", false, "Run in development mode") + daemonMode := flag.Bool("b", false, "Run as a daemon (background process)") + port := flag.String("port", "1488", "Port to listen on") + skipPOW := flag.Bool("skip-pow", false, "Skip proof-of-work protection") + flag.Parse() + + // Handle daemon mode - fork a new process and exit the parent + if *daemonMode && os.Getenv("_DAEMON_CHILD") != "1" { + // Prepare the command to run this program again as a child + cmd := exec.Command(os.Args[0], os.Args[1:]...) + cmd.Env = append(os.Environ(), "_DAEMON_CHILD=1") + cmd.Start() + + log.Printf("Server started in daemon mode with PID: %d\n", cmd.Process.Pid) + // Write the PID to a file for later reference + pidFile := "server.pid" + if err := os.WriteFile(pidFile, []byte(strconv.Itoa(cmd.Process.Pid)), 0644); err != nil { + log.Printf("Warning: Could not write PID file: %v\n", err) + } + + // Exit the parent process + os.Exit(0) + } + + // Set environment based on flags + if *prodMode { + os.Setenv("APP_ENV", "production") + } else if *devMode { + os.Setenv("APP_ENV", "development") + } else { + // Default to production mode if no mode is specified + os.Setenv("APP_ENV", "production") + } + + // Configure minification options from environment variables + opts := utils.DefaultMinifierOptions() + opts.MaxWorkers = getEnvInt("MINIFY_WORKERS", opts.MaxWorkers) + opts.SkipUnchanged = getEnvBool("MINIFY_SKIP_UNCHANGED", opts.SkipUnchanged) + opts.RemoveComments = getEnvBool("MINIFY_REMOVE_COMMENTS", opts.RemoveComments) + opts.KeepConditionalComments = getEnvBool("MINIFY_KEEP_CONDITIONAL_COMMENTS", opts.KeepConditionalComments) + opts.KeepSpecialComments = getEnvBool("MINIFY_KEEP_SPECIAL_COMMENTS", opts.KeepSpecialComments) + + // Minify assets from develop to public + log.Println("Minifying assets from /develop to /public directories...") + if err := utils.MinifyAssetsWithOptions(opts); err != nil { + log.Fatalf("Failed to minify assets: %v", err) + } + + // Setup the template engine + engine := html.New("./public/static", ".html") + engine.Reload(os.Getenv("APP_ENV") != "production") // Enable reloading in development mode + + // Create a new Fiber app with a custom error handler (serving error.html). + app := fiber.New(fiber.Config{ + ErrorHandler: func(c *fiber.Ctx, err error) error { + code := fiber.StatusInternalServerError + if e, ok := err.(*fiber.Error); ok { + code = e.Code + } + return c.Status(code).SendFile(filepath.Join("public", "html", "error.html")) + }, + StrictRouting: true, // Enable strict routing for better path validation + Views: engine, // Set the template engine + ProxyHeader: "X-Forwarded-For", // Trust X-Forwarded-For header + EnableTrustedProxyCheck: true, // Enable proxy checking + TrustedProxies: []string{"127.0.0.1", "::1"}, // Add your NAS IP here + }) + + // Logger middleware only in development mode + if os.Getenv("APP_ENV") != "production" { + // API routes: log method, path and latency only (no status) + app.Use(logger.New(logger.Config{ + Format: "${time} ${method} ${path} - ${latency}", + TimeFormat: "2006-01-02T15:04:05", + TimeZone: "Local", + Next: func(c *fiber.Ctx) bool { + // skip this logger for non-API paths + return !strings.HasPrefix(c.Path(), "/api") + }, + })) + // Non-API routes: log full details including status + app.Use(logger.New(logger.Config{ + Format: "${time} ${status} | ${method} ${path} - ${latency}", + TimeFormat: "2006-01-02T15:04:05", + TimeZone: "Local", + Next: func(c *fiber.Ctx) bool { + // skip this logger for API paths + return strings.HasPrefix(c.Path(), "/api") + }, + })) + log.Printf("Logger middleware enabled (%s mode)\n", os.Getenv("APP_ENV")) + } + + // Force text/html content type for HTML files for better compression detection + app.Use(func(c *fiber.Ctx) error { + path := c.Path() + // Only set content type for HTML files + if strings.HasSuffix(path, ".html") || path == "/" || (len(path) > 0 && !strings.Contains(path, ".")) { + c.Set("Content-Type", "text/html; charset=utf-8") + } else if strings.HasSuffix(path, ".json") { + c.Set("Content-Type", "application/json") + } + return c.Next() + }) + + // Built-in compression for non-Brotli clients + app.Use(compress.New(compress.Config{ + Level: 7, + Next: func(c *fiber.Ctx) bool { + // Skip if client accepts Brotli + return strings.Contains(c.Get("Accept-Encoding"), "br") + }, + })) + + // Custom Brotli compression for supported clients + app.Use(customCompression()) + + // Security headers middleware (improves site security) + app.Use(func(c *fiber.Ctx) error { + c.Set("X-Frame-Options", "SAMEORIGIN") + c.Set("X-Content-Type-Options", "nosniff") + c.Set("Referrer-Policy", "strict-origin-when-cross-origin") + c.Set("X-Permitted-Cross-Domain-Policies", "none") + c.Set("Cross-Origin-Opener-Policy", "same-origin") + c.Set("Strict-Transport-Security", "max-age=31536000; includeSubDomains; preload") + //c.Set("Content-Security-Policy", "base-uri 'self'; default-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; script-src 'self' 'unsafe-inline' blob: *.caileb.com; img-src * data:; font-src 'self'; worker-src 'self' blob:; frame-src *.youtube.com *.youtube-nocookie.com *.bitchute.com *.rumble.com rumble.com; connect-src 'self' *.youtube.com *.youtube-nocookie.com *.ytimg.com *.bitchute.com *.rumble.com *.caileb.com;") + return c.Next() + }) + + // Serve static files from the public directory with optimized caching + // This should be before HTML POW middleware to correctly handle static files + app.Static("/", "./public", fiber.Static{ + Compress: true, // Enable compression for static files + ByteRange: true, // Enable byte range requests + CacheDuration: 24 * time.Hour, + MaxAge: 86400, + }) + + // Special handler for favicon.ico to ensure it's properly served + app.Get("/favicon.ico", func(c *fiber.Ctx) error { + return c.SendFile("./public/favicon.ico", false) + }) + + // Load and apply registered middleware plugins + for _, handler := range middleware.LoadPlugins(*skipPOW) { + app.Use(handler) + } + log.Println("Loaded middleware plugins") + + // API group with POW protection + api := app.Group("/api") + + // Endpoint to verify POW solutions and issue tokens + api.Post("/pow/verify", middleware.VerifyCheckpointHandler) + + // Challenge endpoint for secure POW parameters + api.Get("/pow/challenge", middleware.GetCheckpointChallengeHandler) + + // Backwards compatibility for existing clients + api.Get("/verify", middleware.VerifyCheckpointHandler) + + // Homepage route: serve index.html from public/html/ with compression + app.Get("/", func(c *fiber.Ctx) error { + c.Set("Content-Type", "text/html; charset=utf-8") + c.Response().Header.Add("Vary", "Accept-Encoding") + return c.SendFile(filepath.Join("public", "html", "index.html")) + }) + + // Dynamic page route using the validation middleware + app.Get("/:page", validatePathParam("page"), func(c *fiber.Ctx) error { + page := c.Params("page") + c.Set("Content-Type", "text/html; charset=utf-8") + c.Response().Header.Add("Vary", "Accept-Encoding") + return c.SendFile(filepath.Join("public", "html", page+".html")) + }) + + // Catch-all: serve a 404 error page for unmatched routes + app.Use(func(c *fiber.Ctx) error { + c.Set("Content-Type", "text/html; charset=utf-8") + c.Response().Header.Add("Vary", "Accept-Encoding") + return c.Status(404).SendFile(filepath.Join("public", "html", "error.html")) + }) + + // Start the server + go func() { + addr := ":" + *port + log.Printf("Server starting on %s in %s mode\n", addr, os.Getenv("APP_ENV")) + if err := app.Listen(addr); err != nil { + log.Fatalf("Server error: %v", err) + } + }() + + // Start the GeoIP database update routine + go startGeoIPUpdateRoutine() + + // If running as daemon child, no need to wait for signals in foreground + if os.Getenv("_DAEMON_CHILD") == "1" { + // In daemon mode, we still need to wait for signals + // but we can close stdout/stderr + if f, err := os.OpenFile("/dev/null", os.O_RDWR, 0); err == nil { + // Redirect stdout/stderr to /dev/null for true daemon behavior + os.Stdout = f + os.Stderr = f + // Don't close f as it's now used by os.Stdout and os.Stderr + } + } + + // Graceful shutdown handling + quit := make(chan os.Signal, 1) + signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM) + <-quit + + log.Println("Shutting down server...") + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + if err := app.ShutdownWithContext(ctx); err != nil { + log.Fatalf("Server forced to shutdown: %v", err) + } + + // Close the token store database + if err := middleware.CloseTokenStore(); err != nil { + log.Printf("Error closing token store: %v", err) + } + + log.Println("Server exiting") +} + +// startGeoIPUpdateRoutine starts a goroutine that updates GeoIP databases daily +func startGeoIPUpdateRoutine() { + // Start immediately after server startup to ensure databases are fresh + updateGeoIPDatabases() + + // Then schedule daily updates + ticker := time.NewTicker(24 * time.Hour) + go func() { + for range ticker.C { + updateGeoIPDatabases() + } + }() +} + +// updateGeoIPDatabases downloads the latest GeoLite2 Country and ASN databases +func updateGeoIPDatabases() { + // MaxMind account credentials + accountID := "1015174" + licenseKey := "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" + + // Database paths and URLs + databases := []struct { + name string + url string + destFile string + }{ + { + name: "GeoLite2-Country", + url: "https://download.maxmind.com/geoip/databases/GeoLite2-Country/download?suffix=tar.gz", + destFile: "./data/GeoLite2-Country.mmdb", + }, + { + name: "GeoLite2-ASN", + url: "https://download.maxmind.com/geoip/databases/GeoLite2-ASN/download?suffix=tar.gz", + destFile: "./data/GeoLite2-ASN.mmdb", + }, + } + + // Ensure data directory exists + if err := os.MkdirAll("./data", 0755); err != nil { + log.Printf("ERROR: Failed to create data directory: %v", err) + return + } + + // Create HTTP client that follows redirects + client := &http.Client{ + Timeout: 5 * time.Minute, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + // MaxMind uses Cloudflare R2 for redirects, follow them + if len(via) >= 10 { + return fmt.Errorf("too many redirects") + } + // Add basic auth to the redirected request if needed + if req.URL.Host != "mm-prod-geoip-databases.a2649acb697e2c09b632799562c076f2.r2.cloudflarestorage.com" { + req.SetBasicAuth(accountID, licenseKey) + } + return nil + }, + } + + // Download and process each database + for _, db := range databases { + log.Printf("Checking for updates to %s...", db.name) + + // First, check if an update is needed via HEAD request + headReq, err := http.NewRequest("HEAD", db.url, nil) + if err != nil { + log.Printf("ERROR: Failed to create HEAD request for %s: %v", db.name, err) + continue + } + headReq.SetBasicAuth(accountID, licenseKey) + + headResp, err := client.Do(headReq) + if err != nil { + log.Printf("ERROR: Failed to make HEAD request for %s: %v", db.name, err) + continue + } + headResp.Body.Close() + + // Check if file exists and get its modification time + updateNeeded := true + if fileInfo, err := os.Stat(db.destFile); err == nil { + lastModified := headResp.Header.Get("Last-Modified") + if lastModified != "" { + remoteTime, err := time.Parse(time.RFC1123, lastModified) + if err == nil { + // Only update if remote file is newer + if !remoteTime.After(fileInfo.ModTime()) { + log.Printf("No update needed for %s, local copy is current", db.name) + updateNeeded = false + } + } + } + } + + if !updateNeeded { + continue + } + + // Download the database + log.Printf("Downloading %s...", db.name) + req, err := http.NewRequest("GET", db.url, nil) + if err != nil { + log.Printf("ERROR: Failed to create request for %s: %v", db.name, err) + continue + } + req.SetBasicAuth(accountID, licenseKey) + + resp, err := client.Do(req) + if err != nil { + log.Printf("ERROR: Failed to download %s: %v", db.name, err) + continue + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + log.Printf("ERROR: Failed to download %s: HTTP %d", db.name, resp.StatusCode) + continue + } + + // Create a temporary file to store the downloaded archive + tempFile, err := os.CreateTemp("", "geoip-*.tar.gz") + if err != nil { + log.Printf("ERROR: Failed to create temp file for %s: %v", db.name, err) + continue + } + defer os.Remove(tempFile.Name()) + + // Copy the response body to the temporary file + _, err = io.Copy(tempFile, resp.Body) + if err != nil { + log.Printf("ERROR: Failed to save downloaded %s: %v", db.name, err) + tempFile.Close() + continue + } + tempFile.Close() + + // Extract the .mmdb file from the tar.gz archive + extracted, err := extractMMDBFromTarGz(tempFile.Name(), db.name) + if err != nil { + log.Printf("ERROR: Failed to extract %s: %v", db.name, err) + continue + } + + // Move the extracted file to the destination + err = os.Rename(extracted, db.destFile) + if err != nil { + log.Printf("ERROR: Failed to move %s to destination: %v", db.name, err) + os.Remove(extracted) // Clean up + continue + } + + log.Printf("Successfully updated %s", db.name) + } + + // Reload the databases in the middleware + middleware.ReloadGeoIPDatabases() +} + +// extractMMDBFromTarGz extracts the .mmdb file from a tar.gz archive +func extractMMDBFromTarGz(tarGzPath, dbName string) (string, error) { + file, err := os.Open(tarGzPath) + if err != nil { + return "", err + } + defer file.Close() + + gzr, err := gzip.NewReader(file) + if err != nil { + return "", err + } + defer gzr.Close() + + tr := tar.NewReader(gzr) + + // Create a temporary directory for extraction + tempDir, err := os.MkdirTemp("", "geoip-extract-") + if err != nil { + return "", err + } + + // Find and extract the .mmdb file + var mmdbPath string + for { + header, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + os.RemoveAll(tempDir) + return "", err + } + + // Look for the .mmdb file in the archive + if strings.HasSuffix(header.Name, ".mmdb") && strings.Contains(header.Name, dbName) { + // Extract to temporary directory + mmdbPath = filepath.Join(tempDir, filepath.Base(header.Name)) + outFile, err := os.Create(mmdbPath) + if err != nil { + os.RemoveAll(tempDir) + return "", err + } + + if _, err := io.Copy(outFile, tr); err != nil { + outFile.Close() + os.RemoveAll(tempDir) + return "", err + } + outFile.Close() + break + } + } + + if mmdbPath == "" { + os.RemoveAll(tempDir) + return "", fmt.Errorf("no .mmdb file found in archive for %s", dbName) + } + + return mmdbPath, nil +} diff --git a/middleware/checkpoint.go b/middleware/checkpoint.go new file mode 100644 index 0000000..9d6091e --- /dev/null +++ b/middleware/checkpoint.go @@ -0,0 +1,1482 @@ +// middleware provides a small proof-of-work puzzle that users solve before +// accessing protected pages or APIs, plus transparent reverse-proxy support. +// It issues HMAC-signed tokens bound to IP/browser, stores them in BadgerDB, +// and automatically cleans up expired data. +package middleware + +import ( + "context" + "crypto/hmac" + cryptorand "crypto/rand" + "crypto/sha256" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "log" + "net" + "net/http" + "net/http/httputil" + "net/url" + "os" + "path/filepath" + "regexp" + "strings" + "sync" + "sync/atomic" + "time" + + "bytes" + "encoding/gob" + + "html/template" + + "github.com/dgraph-io/badger/v4" + "github.com/gofiber/fiber/v2" + "github.com/mileusna/useragent" +) + +// --- Configuration --- + +// Config struct holds all configurable parameters for the Checkpoint middleware +type Config struct { + // General Settings + Difficulty int // Number of leading zeros for PoW hash + TokenExpiration time.Duration // Validity period for issued tokens + CookieName string // Name of the cookie used to store tokens + CookieDomain string // Domain scope for the cookie (e.g., ".example.com" for subdomains) + SaltLength int // Length of the salt used in challenges + + // Rate Limiting & Expiration + MaxAttemptsPerHour int // Max PoW verification attempts per IP per hour + MaxNonceAge time.Duration // Max age for used nonces before cleanup + ChallengeExpiration time.Duration // Time limit for solving a challenge + + // File Paths + SecretConfigPath string // Path to the persistent HMAC secret file + TokenStoreDBPath string // Directory path for the BadgerDB token store + InterstitialPaths []string // Paths to search for the interstitial HTML page + + // Security Settings + CheckPoSTimes bool // Enable Proof-of-Space-Time consistency checks + PoSTimeConsistencyRatio float64 // Allowed ratio between fastest and slowest PoS runs + HTMLCheckpointExclusions []string // Path prefixes to exclude from HTML checkpoint + HTMLCheckpointExcludedExtensions map[string]bool // File extensions to exclude (lowercase, '.') + DangerousQueryPatterns []*regexp.Regexp // Regex patterns to block in query strings + BlockDangerousPathChars bool // Block paths containing potentially dangerous characters (;, `) + // User Agent validation settings + UserAgentValidationExclusions []string // Path prefixes to skip UA validation + UserAgentRequiredPrefixes map[string]string // Path prefix -> required UA prefix + // Note: Binding to IP, User Agent, and Browser Hint is always enabled. + + // Reverse Proxy Settings + ReverseProxyMappings map[string]string // Map of hostname to backend URL (e.g., "app.example.com": "http://127.0.0.1:8080") +} + +var ( + // Global configuration instance + checkpointConfig Config + + // Secret key used for HMAC verification - automatically generated on startup + hmacSecret []byte + // Used nonces to prevent replay attacks - use sync.Map for concurrency + usedNonces sync.Map // map[string]time.Time + // IP-based rate limiting for token generation - use sync.Map for concurrency + ipRateLimit sync.Map // map[string]*atomic.Int64 (or similar atomic counter) + // Challenge parameters store with request IDs - use sync.Map for concurrency + challengeStore sync.Map // map[string]ChallengeParams + // Global token store (now BadgerDB based) + tokenStore *TokenStore + // in-memory cache for the interstitial HTML to avoid repeated disk reads + interstitialContent string + interstitialOnce sync.Once + interstitialLoadErr error + // parsed template for interstitial page + interstitialTmpl *template.Template + interstitialTmplOnce sync.Once + interstitialTmplErr error + // pool for gob encoding buffers to reduce allocations + gobBufferPool = sync.Pool{ + New: func() interface{} { return new(bytes.Buffer) }, + } +) + +// Need atomic package for ipRateLimit counter + +func init() { + // Load complete configuration from checkpoint.toml (required) + var cfg Config + if err := LoadConfig("checkpoint", &cfg); err != nil { + log.Fatalf("Failed to load checkpoint config: %v", err) + } + SetConfig(cfg) + // Register sanitization plugin (cleanup URLs/queries before checkpoint) + RegisterPlugin("sanitize", RequestSanitizationMiddleware) + // Register checkpoint plugin + RegisterPlugin("checkpoint", New) + + // Initialize stores AFTER config is potentially set/loaded + // Ensure tokenStore is initialized before use + var err error + tokenStore, err = NewTokenStore(checkpointConfig.TokenStoreDBPath) + if err != nil { + log.Fatalf("CRITICAL: Failed to initialize TokenStore database: %v", err) + } + + // Initialize secret + _ = initSecret() + + // Start cleanup timer for nonces/ip rates (token cleanup handled by DB TTL) + _ = startCleanupTimer() +} + +// SecretConfig contains configuration for the Checkpoint system (for secret file persistence) +type SecretConfig struct { + HmacSecret []byte `json:"hmac_secret"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +// --- End Configuration --- + +// SetConfig swaps in your custom Config (usually loaded from TOML). +// Do this before using the middleware, ideally at startup. +func SetConfig(cfg Config) { + checkpointConfig = cfg + // Re-initialization of token store path is complex with BadgerDB, recommend restart. + // Other config changes can be applied dynamically if needed. +} + +// --- Token Store (BadgerDB Implementation) --- + +// StoredTokenData holds the relevant information persisted for each token hash. +// This includes binding information needed for verification. +type StoredTokenData struct { + ClientIPHash string // Hash of IP used during issuance + UserAgentHash string // Hash of User Agent used during issuance + BrowserHint string // Browser Hint used during issuance + LastVerified time.Time // Last time this token was successfully validated + ExpiresAt time.Time // Original expiration time of the token (for reference, TTL enforces) +} + +// TokenStore manages persistent storage of verified tokens using BadgerDB. +type TokenStore struct { + DB *badger.DB +} + +// NewTokenStore initializes and returns a new TokenStore using BadgerDB. +func NewTokenStore(dbPath string) (*TokenStore, error) { + if err := os.MkdirAll(dbPath, 0755); err != nil { + return nil, fmt.Errorf("failed to create token store directory %s: %w", dbPath, err) + } + opts := badger.DefaultOptions(dbPath) + // Tune options for performance if needed (e.g., memory usage) + opts.Logger = nil // Disable default Badger logger unless debugging + db, err := badger.Open(opts) + if err != nil { + return nil, fmt.Errorf("failed to open token store database at %s: %w", dbPath, err) + } + store := &TokenStore{DB: db} + // Start BadgerDB's own value log GC routine (optional but recommended) + go store.runValueLogGC() + return store, nil +} + +// Close closes the BadgerDB database. +// Should be called during graceful shutdown. +func (store *TokenStore) Close() error { + if store.DB != nil { + log.Println("Closing TokenStore database...") + return store.DB.Close() + } + return nil +} + +// runValueLogGC runs BadgerDB's value log garbage collection periodically. +func (store *TokenStore) runValueLogGC() { + ticker := time.NewTicker(5 * time.Minute) + defer ticker.Stop() + for range ticker.C { + again: + err := store.DB.RunValueLogGC(0.7) // Run GC if 70% space can be reclaimed + if err == nil { + goto again // Run GC multiple times if needed + } + if err != badger.ErrNoRewrite { + log.Printf("WARNING: BadgerDB RunValueLogGC error: %v", err) + } + } +} + +// encodeTokenData serializes StoredTokenData using gob. +func encodeTokenData(data *StoredTokenData) ([]byte, error) { + // get a buffer from pool + buf := gobBufferPool.Get().(*bytes.Buffer) + buf.Reset() + enc := gob.NewEncoder(buf) + if err := enc.Encode(data); err != nil { + gobBufferPool.Put(buf) + return nil, fmt.Errorf("failed to gob encode token data: %w", err) + } + // copy out the bytes to avoid retaining large buffer + out := make([]byte, buf.Len()) + copy(out, buf.Bytes()) + buf.Reset() + gobBufferPool.Put(buf) + return out, nil +} + +// decodeTokenData deserializes StoredTokenData using gob. +func decodeTokenData(encoded []byte) (*StoredTokenData, error) { + var data StoredTokenData + // use a reader to avoid extra buffer allocation + reader := bytes.NewReader(encoded) + dec := gob.NewDecoder(reader) + if err := dec.Decode(&data); err != nil { + return nil, fmt.Errorf("failed to gob decode token data: %w", err) + } + return &data, nil +} + +// addToken stores the token data in BadgerDB with a TTL. +func (store *TokenStore) addToken(tokenHash string, data *StoredTokenData) error { + encodedData, err := encodeTokenData(data) + if err != nil { + return err // Error already wrapped + } + + // Calculate TTL based on the token's specific expiration + ttl := time.Until(data.ExpiresAt) + if ttl <= 0 { + log.Printf("Attempted to add already expired token hash %s", tokenHash) + return nil // Don't add already expired tokens + } + + err = store.DB.Update(func(txn *badger.Txn) error { + e := badger.NewEntry([]byte(tokenHash), encodedData).WithTTL(ttl) + return txn.SetEntry(e) + }) + + if err != nil { + return fmt.Errorf("failed to add token hash %s to DB: %w", tokenHash, err) + } + return nil +} + +// updateTokenVerification updates the LastVerified time for an existing token. +func (store *TokenStore) updateTokenVerification(tokenHash string) error { + return store.DB.Update(func(txn *badger.Txn) error { + item, err := txn.Get([]byte(tokenHash)) + if err != nil { + // If token expired or was deleted between check and update, log and ignore. + if err == badger.ErrKeyNotFound { + log.Printf("Token hash %s not found during update verification (likely expired/deleted)", tokenHash) + return nil // Not a critical error in this context + } + return fmt.Errorf("failed to get token %s for update: %w", tokenHash, err) + } + + var storedData *StoredTokenData + err = item.Value(func(val []byte) error { + storedData, err = decodeTokenData(val) + return err + }) + if err != nil { + return fmt.Errorf("failed to decode token %s value for update: %w", tokenHash, err) + } + + // Update LastVerified and re-encode + storedData.LastVerified = time.Now() + encodedData, err := encodeTokenData(storedData) + if err != nil { + return err + } + + // Set the entry again (TTL remains the same based on original ExpiresAt) + ttl := time.Until(storedData.ExpiresAt) + if ttl <= 0 { + return nil + } // Don't update if expired + e := badger.NewEntry([]byte(tokenHash), encodedData).WithTTL(ttl) + return txn.SetEntry(e) + }) +} + +// lookupTokenData retrieves token data from BadgerDB. +// Returns the data, true if found and not expired, or false otherwise. +// Added context parameter +func (store *TokenStore) lookupTokenData(ctx context.Context, tokenHash string) (*StoredTokenData, bool, error) { + var storedData *StoredTokenData + var found bool + + err := store.DB.View(func(txn *badger.Txn) error { + // Check context cancellation within the transaction + if ctx.Err() != nil { + return ctx.Err() + } + item, err := txn.Get([]byte(tokenHash)) + if err != nil { + if err == badger.ErrKeyNotFound { + return nil // Not found, not an error for lookup + } + return fmt.Errorf("failed to get token hash %s from DB: %w", tokenHash, err) + } + + // Key exists, decode the value + err = item.Value(func(val []byte) error { + // Check context cancellation before decoding + if ctx.Err() != nil { + return ctx.Err() + } + var decodeErr error + storedData, decodeErr = decodeTokenData(val) + return decodeErr + }) + if err != nil { + // If context was cancelled, return that error + if ctx.Err() != nil { + return ctx.Err() + } + // Return actual decoding error + return fmt.Errorf("failed to decode StoredTokenData for hash %s: %w", tokenHash, err) + } + + // Check expiration explicitly just in case TTL mechanism has latency + if time.Now().After(storedData.ExpiresAt) { + log.Printf("Token hash %s found but expired (ExpiresAt: %v)", tokenHash, storedData.ExpiresAt) + storedData = nil // Treat as not found if expired + return nil + } + + found = true + return nil + }) + + if err != nil { + // Don't log here, return the error to the caller (validateToken) + return nil, false, err // Return the actual error + } + + return storedData, found, nil // Success +} + +// --- End Token Store --- + +// CloseTokenStore provides a package-level function to close the global token store. +// This should be called during application shutdown. +func CloseTokenStore() error { + if tokenStore != nil { + return tokenStore.Close() + } + return nil +} + +// loadInterstitialHTML returns the cached interstitial HTML (loads once from disk) +func loadInterstitialHTML() (string, error) { + interstitialOnce.Do(func() { + for _, path := range checkpointConfig.InterstitialPaths { + if data, err := os.ReadFile(path); err == nil { + interstitialContent = string(data) + return + } + } + interstitialLoadErr = fmt.Errorf("could not find checkpoint interstitial HTML at any configured path") + }) + return interstitialContent, interstitialLoadErr +} + +// getInterstitialTemplate parses the cached HTML as a Go template (once) +func getInterstitialTemplate() (*template.Template, error) { + interstitialTmplOnce.Do(func() { + raw, err := loadInterstitialHTML() + if err != nil { + interstitialTmplErr = err + return + } + interstitialTmpl, interstitialTmplErr = template.New("interstitial").Parse(raw) + }) + return interstitialTmpl, interstitialTmplErr +} + +// serveInterstitial serves the challenge page using a Go template for safe interpolation +func serveInterstitial(c *fiber.Ctx) error { + requestID := generateRequestID(c) + c.Status(200) + c.Set("Content-Type", "text/html; charset=utf-8") + tmpl, err := getInterstitialTemplate() + if err != nil { + log.Printf("WARNING: %v", err) + return c.SendString("Security verification required. Please refresh the page.") + } + // prepare data for template + host := c.Hostname() + originalURL, _ := c.Locals("originalURL").(string) + targetPath := c.Path() + if originalURL != "" { + targetPath = originalURL + } + data := struct { + TargetPath string + RequestID string + Host string + FullURL string + }{ + TargetPath: targetPath, + RequestID: requestID, + Host: host, + FullURL: c.BaseURL() + targetPath, + } + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + log.Printf("ERROR: Interstitial template execution failed: %v", err) + return c.SendString("Security verification required. Please refresh the page.") + } + return c.SendString(buf.String()) +} + +// checkPoSTimes ensures that memory proof run times are within the allowed ratio +func checkPoSTimes(times []int64) error { + if len(times) != 3 { + return fmt.Errorf("invalid PoS run times length") + } + minT, maxT := times[0], times[0] + for _, t := range times[1:] { + if t < minT { + minT = t + } + if t > maxT { + maxT = t + } + } + if checkpointConfig.CheckPoSTimes && float64(maxT) > float64(minT)*checkpointConfig.PoSTimeConsistencyRatio { + return fmt.Errorf("PoS run times ('i') are not consistent (ratio %.2f > %.2f)", + float64(maxT)/float64(minT), checkpointConfig.PoSTimeConsistencyRatio) + } + return nil +} + +// getDomainFromHost returns the base domain from a hostname +// For proper cookie sharing in both production and development +func getDomainFromHost(hostname string) string { + // Handle localhost development + if hostname == "localhost" || strings.HasPrefix(hostname, "localhost:") || + hostname == "127.0.0.1" || strings.HasPrefix(hostname, "127.0.0.1:") { + return "" // Use host-only cookies for localhost + } + + // For IP addresses, use host-only cookies + if net.ParseIP(strings.Split(hostname, ":")[0]) != nil { + return "" // IP address - use host-only + } + + parts := strings.Split(hostname, ".") + if len(parts) <= 1 { + return hostname // single word domain - unlikely + } + + // For standard domains, return domain with leading dot + if len(parts) >= 2 { + // Return parent domain for proper cookie sharing + domain := parts[len(parts)-2] + "." + parts[len(parts)-1] + return "." + domain // Leading dot is important + } + + return "" // Fallback to host-only cookie +} + +// issueToken handles token generation, cookie setting, and JSON response +func issueToken(c *fiber.Ctx, token CheckpointToken) error { + // 1. Generate the token hash + tokenHash := calculateTokenHash(token) + + // 2. Create the data to store in the DB + storedData := &StoredTokenData{ + ClientIPHash: token.ClientIP, // Assumes token struct is already populated + UserAgentHash: token.UserAgent, + BrowserHint: token.BrowserHint, + LastVerified: token.LastVerified, + ExpiresAt: token.ExpiresAt, // Store original expiration + } + + // 3. Add to the database + if err := tokenStore.addToken(tokenHash, storedData); err != nil { + log.Printf("ERROR: Failed to store token in DB for hash %s: %v", tokenHash, err) + // Decide if this is fatal or just a warning. For now, log and continue. + // return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to store verification proof"}) + } + + // 4. Sign the token (as before) + token.Signature = "" // Clear signature before marshalling for signing + tokenBytesForSig, _ := json.Marshal(token) + token.Signature = computeTokenSignature(token, tokenBytesForSig) + + // 5. Prepare final token for cookie + finalBytes, err := json.Marshal(token) + if err != nil { + log.Printf("ERROR: Failed to marshal final token: %v", err) + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to prepare token"}) + } + tokenStr := base64.StdEncoding.EncodeToString(finalBytes) + + // 6. Set cookie + // Determine if we're serving on HTTPS or HTTP + isSecure := true + // Check if we're in development mode using non-secure connection + if strings.HasPrefix(c.Protocol(), "http") && !strings.HasPrefix(c.BaseURL(), "https") { + isSecure = false // Running on http:// (dev mode) + } + + // Get domain for cookie - either from config or auto-detect + cookieDomain := checkpointConfig.CookieDomain + if cookieDomain == "" { + // Auto-detect - for development convenience + cookieDomain = getDomainFromHost(c.Hostname()) + } + + // Set SameSite based on domain - use Lax for cross-subdomain + sameSite := "Strict" + if cookieDomain != "" { + sameSite = "Lax" // Lax allows subdomain sharing better than Strict + } + + c.Cookie(&fiber.Cookie{ + Name: checkpointConfig.CookieName, + Value: tokenStr, + Expires: token.ExpiresAt, // Cookie expires when token expires + Path: "/", + Domain: cookieDomain, + HTTPOnly: true, + SameSite: sameSite, + Secure: isSecure, // Only set Secure in HTTPS environments + }) + + return c.JSON(fiber.Map{"token": tokenStr, "expires_at": token.ExpiresAt}) +} + +// Initialize a secure random secret key or load from persistent storage +func initSecret() bool { + if _, err := os.Stat(checkpointConfig.SecretConfigPath); err == nil { + // Config file exists, try to load it + if loadedSecret := loadSecretFromFile(); loadedSecret != nil { + hmacSecret = loadedSecret + log.Printf("Loaded existing HMAC secret from %s", checkpointConfig.SecretConfigPath) + return true + } + } + + // No config file or loading failed, generate a new secret + hmacSecret = make([]byte, 32) + _, err := cryptorand.Read(hmacSecret) + if err != nil { + // Critical security error - don't continue with insecure random numbers + log.Fatalf("CRITICAL: Could not generate secure random secret: %v", err) + } + + // Ensure data directory exists + if err := os.MkdirAll(filepath.Dir(checkpointConfig.SecretConfigPath), 0755); err != nil { + log.Printf("WARNING: Could not create data directory: %v", err) + return true + } + + // Save the new secret to file + config := SecretConfig{ + HmacSecret: hmacSecret, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + } + + if configBytes, err := json.Marshal(config); err == nil { + if err := os.WriteFile(checkpointConfig.SecretConfigPath, configBytes, 0600); err != nil { + log.Printf("WARNING: Could not save HMAC secret to file: %v", err) + } else { + log.Printf("Created and saved new HMAC secret to %s", checkpointConfig.SecretConfigPath) + } + } + + return true +} + +// loadSecretFromFile loads the HMAC secret from persistent storage +func loadSecretFromFile() []byte { + configBytes, err := os.ReadFile(checkpointConfig.SecretConfigPath) + if err != nil { + log.Printf("ERROR: Could not read secret config file: %v", err) + return nil + } + + var config SecretConfig + if err := json.Unmarshal(configBytes, &config); err != nil { + log.Printf("ERROR: Could not parse secret config file: %v", err) + return nil + } + + if len(config.HmacSecret) < 16 { + log.Printf("ERROR: Secret from file is too short, generating a new one") + return nil + } + + // Update the last loaded time + config.UpdatedAt = time.Now() + if configBytes, err := json.Marshal(config); err == nil { + if err := os.WriteFile(checkpointConfig.SecretConfigPath, configBytes, 0600); err != nil { + log.Printf("WARNING: Could not update HMAC secret file: %v", err) + } + } + + return config.HmacSecret +} + +// Start a timer to periodically clean up the nonce and rate limit maps +func startCleanupTimer() bool { + ticker := time.NewTicker(1 * time.Hour) + go func() { + for range ticker.C { + cleanupExpiredData() + cleanupExpiredChallenges() + } + }() + return true +} + +// Clean up expired nonces and rate limit data +func cleanupExpiredData() { + // Clean up used nonces + now := time.Now() + expiredNonceCount := 0 + usedNonces.Range(func(key, value interface{}) bool { + nonce := key.(string) + timestamp := value.(time.Time) + if now.Sub(timestamp) > checkpointConfig.MaxNonceAge { + usedNonces.Delete(nonce) + expiredNonceCount++ + } + return true // continue iteration + }) + if expiredNonceCount > 0 { + log.Printf("Checkpoint: Cleaned up %d expired nonces.", expiredNonceCount) + } + + // Reset IP rate limits every hour by deleting all entries + ipRateLimit.Range(func(key, value interface{}) bool { + ipRateLimit.Delete(key) + return true + }) + log.Println("Checkpoint: IP rate limits reset.") +} + +// CheckpointToken represents a validated token +type CheckpointToken struct { + Nonce string `json:"g"` // Nonce + Challenge string `json:"-"` // Derived server-side, not in token + Salt string `json:"-"` // Derived server-side, not in token + Difficulty int `json:"-"` // Derived server-side, not in token + ExpiresAt time.Time `json:"exp"` + ClientIP string `json:"cip,omitempty"` + UserAgent string `json:"ua,omitempty"` + BrowserHint string `json:"bh,omitempty"` + Entropy string `json:"ent,omitempty"` + Created time.Time `json:"crt"` + LastVerified time.Time `json:"lvf,omitempty"` + Signature string `json:"sig,omitempty"` + TokenFormat int `json:"fmt"` +} + +// ChallengeParams stores parameters for a challenge +type ChallengeParams struct { + Challenge string `json:"challenge"` // Base64 encoded + Salt string `json:"salt"` // Base64 encoded + Difficulty int `json:"difficulty"` + ExpiresAt time.Time `json:"expires_at"` + ClientIP string `json:"-"` + PoSSeed string `json:"pos_seed"` // Hex encoded +} + +// isExcludedHTMLPath checks if a path should be excluded from the HTML checkpoint. +// Exclusions happen based on configured prefixes or file extensions. +func isExcludedHTMLPath(path string) bool { + // 1. Check path prefixes + for _, prefix := range checkpointConfig.HTMLCheckpointExclusions { + if strings.HasPrefix(path, prefix) { + return true // Excluded by prefix + } + } + + // 2. Check file extension using the set + ext := strings.ToLower(filepath.Ext(path)) + if ext != "" { + if _, exists := checkpointConfig.HTMLCheckpointExcludedExtensions[ext]; exists { + return true // Excluded by file extension + } + } + + // 3. If not excluded by prefix or extension, it needs the checkpoint + return false +} + +// DirectProxy returns a handler that simply forwards the request/response to targetURL. +// Headers, status codes, and body are passed through without modification. +func DirectProxy(targetURL string) fiber.Handler { + target, err := url.Parse(targetURL) + if err != nil { + return func(c *fiber.Ctx) error { + log.Printf("ERROR: Invalid target URL %s: %v", targetURL, err) + return fiber.ErrBadGateway + } + } + + proxy := httputil.NewSingleHostReverseProxy(target) + + // Set up custom director to properly map headers + originalDirector := proxy.Director + proxy.Director = func(req *http.Request) { + originalDirector(req) + + // Add X-Forwarded headers + req.Header.Set("X-Forwarded-Host", req.Host) + req.Header.Set("X-Forwarded-Proto", "http") // Update to https when needed + + if v := req.Header.Get("X-Forwarded-For"); v != "" { + req.Header.Set("X-Forwarded-For", v+", "+req.RemoteAddr) + } else { + req.Header.Set("X-Forwarded-For", req.RemoteAddr) + } + } + + return func(c *fiber.Ctx) error { + // Create proxy request + proxyReq, err := http.NewRequest( + string(c.Method()), + target.String()+c.Path(), + bytes.NewReader(c.Body()), + ) + if err != nil { + log.Printf("ERROR: Failed to create proxy request: %v", err) + return fiber.ErrBadGateway + } + + // Copy all headers from the Fiber context to the proxy request + c.Request().Header.VisitAll(func(key, value []byte) { + proxyReq.Header.Set(string(key), string(value)) + }) + + // Execute the proxy request + proxyRes, err := http.DefaultClient.Do(proxyReq) + if err != nil { + log.Printf("ERROR: Proxy request failed: %v", err) + return fiber.ErrBadGateway + } + defer proxyRes.Body.Close() + + // Copy all headers from the proxy response to Fiber's response + for key, values := range proxyRes.Header { + for _, value := range values { + c.Response().Header.Add(key, value) + } + } + + // Set the status code + c.Status(proxyRes.StatusCode) + + // Copy the body + body, err := io.ReadAll(proxyRes.Body) + if err != nil { + log.Printf("ERROR: Failed to read proxy response body: %v", err) + return fiber.ErrBadGateway + } + + return c.Send(body) + } +} + +// isBlockedBot checks concurrently if the User-Agent indicates a known bot +// or doesn't have a standard browser prefix. +// It returns true as soon as one check decides to block. +func isBlockedBot(userAgent string) bool { + if userAgent == "" { + // Empty User-Agent is suspicious, block it + log.Printf("INFO: UA blocked - empty user agent") + return true + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() // Ensure context is cancelled eventually + + resultChan := make(chan bool, 2) // Buffered channel for results + + // Goroutine 1: Library-based bot check + go func() { + ua := useragent.Parse(userAgent) + shouldBlock := ua.Bot + if shouldBlock { + log.Printf("INFO: UA blocked by library (Bot detected: %s): %s", ua.Name, userAgent) + } + select { + case resultChan <- shouldBlock: + case <-ctx.Done(): // Don't send if context is cancelled + } + }() + + // Goroutine 2: Prefix check + go func() { + // Standard browser User-Agent prefixes + standardPrefixes := []string{"Mozilla/", "Opera/", "DuckDuckGo/", "Dart/"} + hasStandardPrefix := false + + for _, prefix := range standardPrefixes { + if strings.HasPrefix(userAgent, prefix) { + hasStandardPrefix = true + break + } + } + + // Block if it does NOT have a standard prefix + shouldBlock := !hasStandardPrefix + + if shouldBlock { + log.Printf("INFO: UA blocked by prefix check (doesn't have standard prefix): %s", userAgent) + } + select { + case resultChan <- shouldBlock: + case <-ctx.Done(): // Don't send if context is cancelled + } + }() + + // Wait for results and decide + result1 := <-resultChan + if result1 { + cancel() // Found a reason to block, cancel the other check + return true + } + + // First check didn't block, wait for the second result + result2 := <-resultChan + // cancel() is deferred, so it will run anyway, ensuring cleanup + return result2 // Block if the second check decided to block +} + +// New gives you a Fiber handler that does the POW challenge (HTML/API) or proxies requests. +func New() fiber.Handler { + return func(c *fiber.Ctx) error { + host := c.Hostname() + targetURL, useProxy := checkpointConfig.ReverseProxyMappings[host] + path := c.Path() + + // --- User-Agent Validation --- + // Only check User-Agent if path is not in exclusion list + skipUA := false + for _, prefix := range checkpointConfig.UserAgentValidationExclusions { + if strings.HasPrefix(path, prefix) { + skipUA = true + break + } + } + + if !skipUA { + // First check required UA prefixes for specific paths + for p, required := range checkpointConfig.UserAgentRequiredPrefixes { + if strings.HasPrefix(path, p) { + ua := c.Get("User-Agent") + if !strings.HasPrefix(ua, required) { + log.Printf("INFO: UA blocked by required prefix %s: %s", required, ua) + if strings.HasPrefix(path, "/api") { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{ + "error": "Access denied for automated clients.", + "reason": "useragent", + }) + } + return c.Status(fiber.StatusForbidden).SendString("Access denied for automated clients.") + } + break + } + } + + // Then do general bot check for all non-excluded paths + userAgent := c.Get("User-Agent") + if isBlockedBot(userAgent) { + if strings.HasPrefix(path, "/api") { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{ + "error": "Access denied for automated clients.", + "reason": "useragent", + }) + } + return c.Status(fiber.StatusForbidden).SendString("Access denied for automated clients.") + } + } + + // Handle any API endpoints + if strings.HasPrefix(path, "/api") { + // Always serve PoW endpoints locally (challenge & verify) + if strings.HasPrefix(path, "/api/pow/") || strings.HasPrefix(path, "/api/verify") { + log.Printf("API checkpoint endpoint %s - handling locally", path) + return c.Next() + } + // Other API paths: skip checkpoint + if useProxy { + // Proxy to backend for proxied hosts + log.Printf("API proxying endpoint %s to %s", path, targetURL) + return DirectProxy(targetURL)(c) + } + log.Printf("API endpoint %s - bypassing checkpoint", path) + return c.Next() + } + + // --- Reverse Proxy Logic --- + if useProxy { + // Check for existing valid token cookie + tokenCookie := c.Cookies(checkpointConfig.CookieName) + log.Printf("Proxy: Checking token for host %s, path %s, cookie present: %v", + host, path, tokenCookie != "") + + // Check if this is an excluded path (API endpoints, etc) + if isExcludedHTMLPath(path) { + log.Printf("Excluded path %s for proxied host %s - proxying without token check", path, host) + + // Direct transparent proxy (preserves all headers/content types) + return DirectProxy(targetURL)(c) + } + + valid, err := validateToken(tokenCookie, c) + + if err != nil { + // Log validation errors but treat as invalid for proxying + log.Printf("Error validating token for proxied host %s, path %s: %v", host, path, err) + } + + if valid { + log.Printf("Valid token found for proxied host %s, path %s - forwarding request", host, path) + // Token is valid, proxy the request + // Direct transparent proxy (preserves all headers/content types) + return DirectProxy(targetURL)(c) + } else { + // Add debug logging + log.Printf("No valid token for proxied host %s, path %s - serving interstitial", host, path) + + // Save the original full URL for potential redirection after verification + c.Locals("originalURL", c.OriginalURL()) + + // No valid token, serve the interstitial challenge page. + return serveInterstitial(c) + } + } + + // --- Standard HTML/Static/API Logic (No Proxy Mapping) --- + // Skip checkpoint for excluded paths (e.g., static assets, API endpoints handled separately) + if isExcludedHTMLPath(path) { + return c.Next() + } + + // --- Path needs checkpoint (potential HTML page) --- + tokenCookie := c.Cookies(checkpointConfig.CookieName) + if tokenCookie != "" { + valid, err := validateToken(tokenCookie, c) + if err != nil { + // Log validation errors but still serve interstitial for safety + log.Printf("Error validating token for path %s: %v", path, err) + // Fall through to serve interstitial + } else if valid { + // Token is valid, proceed to the requested page/handler + return c.Next() + } + // If token was present but invalid/expired, fall through to serve interstitial + } + + // No valid token found, serve the interstitial challenge page. + return serveInterstitial(c) + } +} + +// generateRequestID creates a unique ID for this verification request +func generateRequestID(c *fiber.Ctx) string { + challenge, salt := generateChallenge() + // Generate PoS seed + posSeedBytes := make([]byte, 32) + if n, err := cryptorand.Read(posSeedBytes); err != nil { + log.Fatalf("CRITICAL: Failed to generate PoS seed: %v", err) + } else if n != len(posSeedBytes) { + log.Fatalf("CRITICAL: Short read generating PoS seed: read %d bytes", n) + } + posSeed := hex.EncodeToString(posSeedBytes) + // Generate request ID + randBytes := make([]byte, 16) + if n, err := cryptorand.Read(randBytes); err != nil { + log.Fatalf("CRITICAL: Failed to generate request ID: %v", err) + } else if n != len(randBytes) { + log.Fatalf("CRITICAL: Short read generating request ID: read %d bytes", n) + } + requestID := hex.EncodeToString(randBytes) + + // Base64-encode the hex challenge and salt for storage + encodedChallenge := base64.StdEncoding.EncodeToString([]byte(challenge)) + encodedSalt := base64.StdEncoding.EncodeToString([]byte(salt)) + params := ChallengeParams{ + Challenge: encodedChallenge, + Salt: encodedSalt, + Difficulty: checkpointConfig.Difficulty, + ExpiresAt: time.Now().Add(checkpointConfig.ChallengeExpiration), + ClientIP: getRealIP(c), + PoSSeed: posSeed, + } + challengeStore.Store(requestID, params) + return requestID +} + +func cleanupExpiredChallenges() { + now := time.Now() + expiredChallengeCount := 0 + challengeStore.Range(func(key, value interface{}) bool { + id := key.(string) + params := value.(ChallengeParams) + if now.After(params.ExpiresAt) { + challengeStore.Delete(id) + expiredChallengeCount++ + } + return true // continue iteration + }) + if expiredChallengeCount > 0 { + log.Printf("Checkpoint: Cleaned up %d expired challenges.", expiredChallengeCount) + } +} + +// GetCheckpointChallengeHandler serves challenge parameters via API +func GetCheckpointChallengeHandler(c *fiber.Ctx) error { + requestID := c.Query("id") + if requestID == "" { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Missing request ID"}) + } + + // Apply rate limiting to challenge generation + clientIP := getRealIP(c) + val, _ := ipRateLimit.LoadOrStore(clientIP, new(atomic.Int64)) + ipCounter := val.(*atomic.Int64) + attempts := ipCounter.Add(1) // Increment and get new value + + // Limit to a reasonable number of challenge requests per hour (using the same MaxAttemptsPerHour config) + if attempts > int64(checkpointConfig.MaxAttemptsPerHour) { + return c.Status(fiber.StatusTooManyRequests).JSON(fiber.Map{"error": "Too many challenge requests. Please try again later."}) + } + + val, exists := challengeStore.Load(requestID) + if !exists { + return c.Status(fiber.StatusNotFound).JSON(fiber.Map{"error": "Challenge not found or expired"}) + } + params := val.(ChallengeParams) + + if clientIP != params.ClientIP { + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{"error": "IP address mismatch for challenge"}) + } + decoySeedBytes := make([]byte, 8) + cryptorand.Read(decoySeedBytes) + decoySeed := hex.EncodeToString(decoySeedBytes) + decoyFields := make([]map[string]interface{}, 0) + decoyFieldCount := 2 + int(decoySeedBytes[0])%3 + for i := 0; i < decoyFieldCount; i++ { + nameLen := 5 + int(decoySeedBytes[i%8])%8 + valLen := 8 + int(decoySeedBytes[(i+1)%8])%24 + name := randomHexString(nameLen) + val := randomHexString(valLen) + decoyFields = append(decoyFields, map[string]interface{}{name: val}) + } + return c.JSON(fiber.Map{ + "a": params.Challenge, // challenge + "b": params.Salt, // salt + "c": params.Difficulty, // difficulty + "d": params.PoSSeed, // pos_seed + "e": decoySeed, // decoy_seed + "f": decoyFields, // decoy_fields + }) +} + +func randomHexString(n int) string { + b := make([]byte, (n+1)/2) + if m, err := cryptorand.Read(b); err != nil { + log.Fatalf("CRITICAL: Failed to generate random hex string: %v", err) + } else if m != len(b) { + log.Fatalf("CRITICAL: Short read generating random hex string: read %d bytes", m) + } + s := hex.EncodeToString(b) + if len(s) < n { + log.Fatalf("CRITICAL: Random hex string too short: got %d hex chars, want %d", len(s), n) + } + return s[:n] +} + +func getFullClientIP(c *fiber.Ctx) string { + ip := getRealIP(c) + if ip == "" { + return "unknown" + } + h := sha256.Sum256([]byte(ip)) + return hex.EncodeToString(h[:8]) +} + +func hashUserAgent(userAgent string) string { + if userAgent == "" { + return "" + } + hash := sha256.Sum256([]byte(userAgent)) + return hex.EncodeToString(hash[:8]) +} + +func extractBrowserFingerprint(c *fiber.Ctx) string { + headers := []string{ + c.Get("Sec-CH-UA"), c.Get("Sec-CH-UA-Platform"), c.Get("Sec-CH-UA-Mobile"), + c.Get("Sec-CH-UA-Platform-Version"), c.Get("Sec-CH-UA-Arch"), c.Get("Sec-CH-UA-Model"), + } + var validHeaders []string + for _, h := range headers { + if h != "" { + validHeaders = append(validHeaders, h) + } + } + if len(validHeaders) == 0 { + return "" + } + fingerprint := strings.Join(validHeaders, "|") + hash := sha256.Sum256([]byte(fingerprint)) + return hex.EncodeToString(hash[:12]) +} + +func validateToken(tokenStr string, c *fiber.Ctx) (bool, error) { + // Explicitly handle missing token case first. + if tokenStr == "" { + return false, nil // No token cookie found, definitely not valid. + } + + // 1. Decode the token string from the cookie + tokenBytes, err := base64.StdEncoding.DecodeString(tokenStr) + if err != nil { + // Invalid Base64 encoding - treat as invalid token, not a system error + return false, nil + } + + // Check for empty byte slice after decoding + if len(tokenBytes) == 0 { + // Decoded to empty - treat as invalid token + return false, nil + } + + // 2. Unmarshal + var token CheckpointToken + if err := json.Unmarshal(tokenBytes, &token); err != nil { + // Invalid JSON structure - treat as invalid token + return false, nil // Error seen in logs comes from here, now returns nil error + } + + // 3. Basic expiration check based on ExpiresAt field in the token itself + // Note: Return nil error for expired token, it's just invalid. + if time.Now().After(token.ExpiresAt) { + return false, nil // Token itself says it's expired + } + + // 4. Check token signature first (Format 2+) + if token.TokenFormat < 2 { + return false, nil // Old format not supported/secure - invalid + } + if !verifyTokenSignature(token, tokenBytes) { + return false, nil // Invalid signature - invalid + } + + // 5. Calculate the token hash to look up in the database + tokenHash := calculateTokenHash(token) + + // 6. Look up the token data in BadgerDB + storedData, found, dbErr := tokenStore.lookupTokenData(c.Context(), tokenHash) + if dbErr != nil { + // Actual DB error during lookup - THIS is a real error to return + return false, fmt.Errorf("token DB lookup failed: %w", dbErr) + } + if !found { + // Token hash not found in DB or explicitly expired according to DB record + return false, nil + } + + // 7. *** CRITICAL: Verify bindings against stored data and current request *** + // Compare Client IP Hash + currentPartialIP := getFullClientIP(c) + if storedData.ClientIPHash != currentPartialIP { + return false, nil // IP mismatch - invalid + } + + // Compare User Agent Hash + currentUserAgent := hashUserAgent(c.Get("User-Agent")) + if storedData.UserAgentHash != currentUserAgent { + return false, nil // User agent mismatch - invalid + } + + // Compare Browser Hint + currentBrowserHint := extractBrowserFingerprint(c) + // Only enforce if hint was stored AND current hint is available + if storedData.BrowserHint != "" && currentBrowserHint != "" && storedData.BrowserHint != currentBrowserHint { + return false, nil // Browser hint mismatch - invalid + } + + // 8. All checks passed! Token is valid and bound correctly. + // Update LastVerified time in the database (best effort, log errors) + if err := tokenStore.updateTokenVerification(tokenHash); err != nil { + log.Printf("WARNING: Failed to update token verification time for hash %s: %v", tokenHash, err) + } + + // Refresh the cookie with potentially updated ExpiresAt (if sliding window desired) or just LastVerified. + // For simplicity, we'll just refresh with the same ExpiresAt for now. + token.LastVerified = time.Now() + updateTokenCookie(c, token) // Resign and set cookie + + return true, nil +} + +func updateTokenCookie(c *fiber.Ctx, token CheckpointToken) { + // Determine if we're serving on HTTPS or HTTP + isSecure := true + // Check if we're in development mode using non-secure connection + if strings.HasPrefix(c.Protocol(), "http") && !strings.HasPrefix(c.BaseURL(), "https") { + isSecure = false // Running on http:// (dev mode) + } + + // Get domain for cookie - either from config or auto-detect + cookieDomain := checkpointConfig.CookieDomain + if cookieDomain == "" { + // Auto-detect - for development convenience + cookieDomain = getDomainFromHost(c.Hostname()) + } + + // Set SameSite based on domain - use Lax for cross-subdomain + sameSite := "Strict" + if cookieDomain != "" { + sameSite = "Lax" // Lax allows subdomain sharing better than Strict + } + + // Recompute signature because LastVerified might have changed + token.Signature = "" + tempBytes, _ := json.Marshal(token) + token.Signature = computeTokenSignature(token, tempBytes) // Compute signature on token WITHOUT old signature + + finalTokenBytes, err := json.Marshal(token) // Marshal again with new signature + if err != nil { + log.Printf("Error marshaling token for cookie update: %v", err) + return + } + tokenStr := base64.StdEncoding.EncodeToString(finalTokenBytes) + c.Cookie(&fiber.Cookie{ + Name: checkpointConfig.CookieName, + Value: tokenStr, + Expires: token.ExpiresAt, // Use original expiration + Path: "/", + Domain: cookieDomain, + HTTPOnly: true, + SameSite: sameSite, + Secure: isSecure, // Only set Secure in HTTPS environments + }) +} + +func verifyProofOfWork(challenge, salt, nonce string, difficulty int) bool { + inputStr := challenge + salt + nonce + hash := calculateHash(inputStr) + prefix := strings.Repeat("0", difficulty) + return strings.HasPrefix(hash, prefix) +} + +func calculateHash(input string) string { + hash := sha256.Sum256([]byte(input)) + return hex.EncodeToString(hash[:]) +} + +func computeTokenSignature(token CheckpointToken, tokenBytes []byte) string { + tokenCopy := token + tokenCopy.Signature = "" // Ensure signature field is empty for signing + tokenToSign, _ := json.Marshal(tokenCopy) + h := hmac.New(sha256.New, hmacSecret) + h.Write(tokenToSign) + return hex.EncodeToString(h.Sum(nil)) +} + +func verifyTokenSignature(token CheckpointToken, tokenBytes []byte) bool { + if token.Signature == "" { + return false + } + expectedSignature := computeTokenSignature(token, tokenBytes) + return hmac.Equal([]byte(token.Signature), []byte(expectedSignature)) +} + +// VerifyCheckpointHandler verifies the challenge solution +func VerifyCheckpointHandler(c *fiber.Ctx) error { + clientIP := getRealIP(c) + + var req CheckpointVerifyRequest + if err := c.BodyParser(&req); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid request format"}) + } + + // Challenge lookup + challengeVal, challengeExists := challengeStore.Load(req.RequestID) + if !challengeExists { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid or expired request ID"}) + } + params := challengeVal.(ChallengeParams) + + if clientIP != params.ClientIP { // Check against IP stored with challenge + return c.Status(fiber.StatusForbidden).JSON(fiber.Map{"error": "IP address mismatch for challenge"}) + } + + decodedChallenge := "" + if decoded, err := base64.StdEncoding.DecodeString(params.Challenge); err == nil { + decodedChallenge = string(decoded) + } else { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to decode challenge"}) + } + decodedSalt := "" + if decoded, err := base64.StdEncoding.DecodeString(params.Salt); err == nil { + decodedSalt = string(decoded) + } else { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to decode salt"}) + } + + if req.Nonce == "" { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Nonce ('g') required"}) + } + + // --- Nonce Check --- + nonceKey := req.Nonce + decodedChallenge + _, nonceExists := usedNonces.Load(nonceKey) + if nonceExists { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "This solution has already been used"}) + } + // --- End Nonce Check --- + + if !verifyProofOfWork(decodedChallenge, decodedSalt, req.Nonce, params.Difficulty) { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid proof-of-work solution"}) + } + + // --- Store Used Nonce (only after PoW is verified) --- + usedNonces.Store(nonceKey, time.Now()) + // --- End Store Used Nonce --- + + // Validate PoS hashes and times if provided + if len(req.PoSHashes) == 3 && len(req.PoSTimes) == 3 { + if req.PoSHashes[0] != req.PoSHashes[1] || req.PoSHashes[1] != req.PoSHashes[2] { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "PoS hashes ('h') do not match"}) + } + if len(req.PoSHashes[0]) != 64 { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid PoS hash ('h') length"}) + } + if err := checkPoSTimes(req.PoSTimes); err != nil { + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": err.Error()}) + } + } else if checkpointConfig.CheckPoSTimes && (len(req.PoSHashes) != 0 || len(req.PoSTimes) != 0) { + // If PoS checking is enabled, but incorrect number of hashes/times provided + return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{"error": "Invalid PoS data provided"}) + } + + // Challenge is valid, remove it from store + challengeStore.Delete(req.RequestID) + + entropyBytes := make([]byte, 8) + _, err := cryptorand.Read(entropyBytes) + if err != nil { + return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{"error": "Failed to generate secure token entropy"}) + } + entropy := hex.EncodeToString(entropyBytes) + + // *** Gather current binding info for the new token *** + now := time.Now() + expiresAt := now.Add(checkpointConfig.TokenExpiration) + browserHint := extractBrowserFingerprint(c) + clientIPHash := getFullClientIP(c) + userAgentHash := hashUserAgent(c.Get("User-Agent")) + + token := CheckpointToken{ + Nonce: req.Nonce, + ExpiresAt: expiresAt, + ClientIP: clientIPHash, + UserAgent: userAgentHash, + BrowserHint: browserHint, + Entropy: entropy, + Created: now, + LastVerified: now, + TokenFormat: 2, + } + + // Add a response header indicating success for the proxy + c.Set("X-Checkpoint-Status", "success") + log.Printf("Successfully verified challenge for IP %s, issuing token", clientIP) + + // Issue token (handles DB storage, signing, cookie setting) + return issueToken(c, token) +} + +// Renamed request struct +type CheckpointVerifyRequest struct { + RequestID string `json:"request_id"` + Nonce string `json:"g"` + PoSHashes []string `json:"h"` + PoSTimes []int64 `json:"i"` + DecoyHashes []string `json:"j"` + DecoyTimes []int64 `json:"k"` + DecoyFields []map[string]interface{} `json:"l"` +} + +func generateChallenge() (string, string) { + randomBytes := make([]byte, 16) + _, err := cryptorand.Read(randomBytes) + if err != nil { + log.Fatalf("CRITICAL: Failed to generate secure random challenge: %v", err) + } + saltBytes := make([]byte, checkpointConfig.SaltLength) + _, err = cryptorand.Read(saltBytes) + if err != nil { + log.Fatalf("CRITICAL: Failed to generate secure random salt: %v", err) + } + salt := hex.EncodeToString(saltBytes) + return hex.EncodeToString(randomBytes), salt +} + +// calculateTokenHash calculates a unique hash for storing the token status +// IMPORTANT: This hash is now used as the key in the database. +func calculateTokenHash(token CheckpointToken) string { + // Hash relevant fields that identify this specific verification instance + // Using Nonce, Entropy, and Creation time ensures uniqueness per issuance. + data := fmt.Sprintf("%s:%s:%d", + token.Nonce, + token.Entropy, + token.Created.UnixNano()) + hash := sha256.Sum256([]byte(data)) + return hex.EncodeToString(hash[:]) +} + +// RequestSanitizationMiddleware spots malicious patterns (SQLi, XSS, path traversal) +// and returns 403 immediately to keep your app safe. +func RequestSanitizationMiddleware() fiber.Handler { + return func(c *fiber.Ctx) error { + // Check URL path for directory traversal + path := c.Path() + if strings.Contains(path, "../") || strings.Contains(path, "..\\") { + log.Printf("Security block: Directory traversal attempt in path: %s from IP: %s", path, getRealIP(c)) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + + // Check query parameters for malicious patterns + query := c.Request().URI().QueryString() + if len(query) > 0 { + queryStr := string(query) + + // Check for dangerous characters if configured + if checkpointConfig.BlockDangerousPathChars { + if strings.Contains(queryStr, ";") || strings.Contains(queryStr, "\\") || strings.Contains(queryStr, "`") { + log.Printf("Security block: Dangerous character in query from IP: %s, Query: %s", getRealIP(c), queryStr) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + } + + // Check for configured attack patterns + for _, pattern := range checkpointConfig.DangerousQueryPatterns { + if pattern.MatchString(queryStr) { + log.Printf("Security block: Malicious pattern match in query from IP: %s, Pattern: %s, Query: %s", + getRealIP(c), pattern.String(), queryStr) + return c.Status(fiber.StatusForbidden).SendString("Forbidden") + } + } + } + + return c.Next() + } +} diff --git a/middleware/config/checkpoint.toml b/middleware/config/checkpoint.toml new file mode 100644 index 0000000..1baf766 --- /dev/null +++ b/middleware/config/checkpoint.toml @@ -0,0 +1,77 @@ +# ----------------------------------------------------------------------------- +# Checkpoint Middleware Configuration (checkpoint.toml) +# +# All durations are parsed via time.ParseDuration (e.g. "24h"). +# Arrays and tables map directly to the Config struct fields. +# ----------------------------------------------------------------------------- + +# === GENERAL SETTINGS === +# Number of leading zeros required in PoW hash +Difficulty = 4 +# Validity period for issued tokens +TokenExpiration = "24h" +# Name of the cookie used to store the checkpoint token +CookieName = "checkpoint_token" +# Domain attribute for the cookie; empty = host-only (localhost) +CookieDomain = "" +# Length of the random salt in bytes for challenges +SaltLength = 16 + +# === RATE LIMITING & EXPIRATION === +# Max PoW verification attempts per IP per hour +MaxAttemptsPerHour = 10 +# Max age for used nonces before cleanup +MaxNonceAge = "24h" +# Time allowed for solving a challenge +ChallengeExpiration = "5m" + +# === PERSISTENCE PATHS === +# File where HMAC secret is stored +SecretConfigPath = "./data/checkpoint_secret.json" +# Directory for BadgerDB token store +TokenStoreDBPath = "./data/checkpoint_tokendb" +# Ordered fallback paths for interstitial HTML +InterstitialPaths = [ + "./public/static/pow-interstitial.html", + "./develop/static/pow-interstitial.html" +] + +# === SECURITY SETTINGS === +# Enable Proof-of-Space-Time consistency checks +CheckPoSTimes = true +# Allowed ratio between slowest and fastest PoS runs +PoSTimeConsistencyRatio = 1.35 + +# === HTML CHECKPOINT EXCLUSIONS === +# Path prefixes to skip PoW interstitial +HTMLCheckpointExclusions = ["/api"] +# File extensions to skip PoW check +HTMLCheckpointExcludedExtensions = { ".jpg" = true, ".jpeg" = true, ".png" = true, ".gif" = true, ".svg" = true, ".webp" = true, ".ico" = true, ".bmp" = true, ".tif" = true, ".tiff" = true, ".mp4" = true, ".webm" = true, ".css" = true, ".js" = true, ".mjs" = true, ".woff" = true, ".woff2" = true, ".ttf" = true, ".otf" = true, ".eot" = true, ".json" = true, ".xml" = true, ".txt" = true, ".pdf" = true, ".map" = true, ".wasm" = true } + +# === QUERY SANITIZATION === +# Regex patterns (case-insensitive) to block in query strings +DangerousQueryPatterns = [ + "(?i)union\\s+select", + "(?i)drop\\s+table", + "(?i)insert\\s+into", + "(?i) 0 { + blockType = "asn_name_group" + blockValue = groupName + log.Printf("INFO: Blocking IP %s based on %s: %s (ASN: %d, Org: '%s')", ipStr, blockType, blockValue, clientASN, asnOrg) + customPage = asnGroupBlockPages[groupName] + // No need to unlock here, defer handles it + return cacheAndReturnBlockResult(ipStr, blockType, blockValue, customPage, asnOrg) + } + } + // RUnlock happens via defer + } + } else if asnErr != nil && !strings.Contains(asnErr.Error(), "cannot be found in the database") { + // Log errors other than "not found" + log.Printf("WARNING: GeoIP ASN lookup error for IP %s: %v", ipStr, asnErr) + } + + // --- Cache the result before returning --- // + computedEntry := blockCacheEntry{ + blocked: false, + expiresAt: time.Now().Add(ipBlockCacheTTL), + } + ipBlockCacheMutex.Lock() + ipBlockCache[ipStr] = computedEntry + ipBlockCacheMutex.Unlock() + return false, "", "", "", "" // Not blocked +} + +// Helper function to cache block results +func cacheAndReturnBlockResult(ipStr string, blockType string, blockValue string, customPage string, asnOrgName string) (bool, string, string, string, string) { + // Create the cache entry + computedEntry := blockCacheEntry{ + blocked: true, + blockType: blockType, + blockValue: blockValue, + customPage: customPage, + asnOrgName: asnOrgName, + expiresAt: time.Now().Add(ipBlockCacheTTL), + } + + // Use a separate defer+recover to ensure we don't crash the entire server + // if there's any issue with the cache + func() { + defer func() { + if r := recover(); r != nil { + log.Printf("RECOVERED from panic while caching result: %v", r) + } + }() + + ipBlockCacheMutex.Lock() + defer ipBlockCacheMutex.Unlock() // Use defer to ensure unlock happens + ipBlockCache[ipStr] = computedEntry + }() + + return true, blockType, blockValue, customPage, asnOrgName +} + +// buildASNNameMatchers creates Aho-Corasick matchers for faster ASN name checking +func buildASNNameMatchers() { + // Acquire write lock before modifying the global map + asnNameMatchersMutex.Lock() + defer asnNameMatchersMutex.Unlock() + + // Clear any existing matchers first + asnNameMatchers = make(map[string]*ahocorasick.Matcher) + + for groupName, nameList := range blockedASNNames { + // Skip if the name list is empty + if len(nameList) == 0 { + log.Printf("Skipping matcher build for empty group: %s", groupName) + continue + } + + // Convert names to lowercase byte slices for case-insensitive matching + dict := make([][]byte, 0, len(nameList)) + for _, name := range nameList { + if name != "" { + dict = append(dict, []byte(strings.ToLower(name))) + } + } + + // Only create a matcher if we have patterns + if len(dict) > 0 { + // Use a recovery mechanism in case the matcher creation fails + func() { + defer func() { + if r := recover(); r != nil { + log.Printf("PANIC while building Aho-Corasick matcher for group %s: %v", groupName, r) + // Ensure the entry for this group is nil if creation failed + asnNameMatchers[groupName] = nil + } + }() + + // This assignment happens under the write lock + asnNameMatchers[groupName] = ahocorasick.NewMatcher(dict) + log.Printf("Built Aho-Corasick matcher for ASN name group: %s (%d patterns)", groupName, len(dict)) + }() + } else { + log.Printf("No valid patterns found for ASN name group: %s", groupName) + } + } + // Unlock happens via defer +} + +// ReloadGeoIPDatabases closes and reopens the GeoIP database readers +// to load updated database files. Safe to call while the server is running. +func ReloadGeoIPDatabases() { + // Close existing readers if they're open + if geoipCountryReader != nil { + geoipCountryReader.Close() + geoipCountryReader = nil + } + if geoipASNReader != nil { + geoipASNReader.Close() + geoipASNReader = nil + } + + // Re-initialize the readers + initGeoIP() + log.Printf("GeoIP databases reloaded") +} + +// getRealIP gets the real client IP when behind a reverse proxy +// It checks X-Forwarded-For header first, then falls back to c.IP() +func getRealIP(c *fiber.Ctx) string { + // Check X-Forwarded-For header first + if xff := c.Get("X-Forwarded-For"); xff != "" { + // X-Forwarded-For can contain multiple IPs (client, proxy1, proxy2, ...) + // The first one is the original client IP + ips := strings.Split(xff, ",") + if len(ips) > 0 { + // Get the first IP and trim whitespace + clientIP := strings.TrimSpace(ips[0]) + // Validate it's a real IP + if net.ParseIP(clientIP) != nil { + log.Printf("Using X-Forwarded-For IP: %s (original: %s)", clientIP, c.IP()) + return clientIP + } + } + } + + // Also check for custom Remote-Addr header that might be set by some proxies + if remoteAddr := c.Get("$remote_addr"); remoteAddr != "" { + // Validate it's a real IP + if net.ParseIP(remoteAddr) != nil { + log.Printf("Using $remote_addr IP: %s (original: %s)", remoteAddr, c.IP()) + return remoteAddr + } + } + + // Fallback to default IP + return c.IP() +} diff --git a/middleware/plugin.go b/middleware/plugin.go new file mode 100644 index 0000000..6305f0b --- /dev/null +++ b/middleware/plugin.go @@ -0,0 +1,47 @@ +// Package middleware contains a simple plugin system for Fiber middleware. +// Register plugins by name and factory, then main.go will load them automatically. +package middleware + +import ( + "path/filepath" + + "github.com/BurntSushi/toml" + "github.com/gofiber/fiber/v2" +) + +// Plugin holds a plugin's name and a function that makes its handler. +type Plugin struct { + Name string + Factory func() fiber.Handler +} + +// registry stores every plugin we've registered. +var registry []Plugin + +// RegisterPlugin tags a plugin with a name and a factory so we can use it in the app. +func RegisterPlugin(name string, factory func() fiber.Handler) { + registry = append(registry, Plugin{Name: name, Factory: factory}) +} + +// LoadPlugins returns the handler functions for each plugin. +// If skipCheckpoint is true, it skips the plugin named "checkpoint". +func LoadPlugins(skipCheckpoint bool) []fiber.Handler { + var handlers []fiber.Handler + for _, p := range registry { + if skipCheckpoint && p.Name == "checkpoint" { + continue + } + handlers = append(handlers, p.Factory()) + } + return handlers +} + +// LoadConfig loads the TOML file at middleware/config/[name].toml +// and decodes it into the struct you provide. +func LoadConfig(name string, v interface{}) error { + path := filepath.Join("middleware", "config", name+".toml") + if _, err := toml.DecodeFile(path, v); err != nil { + return err + } + return nil +} diff --git a/public/css/docs.css b/public/css/docs.css new file mode 100644 index 0000000..770bcff --- /dev/null +++ b/public/css/docs.css @@ -0,0 +1 @@ +:root{--background-color:#121212;--card-gradient-start:#1e1e1e;--card-gradient-end:#333;--header-background:#262626;--text-color:#fff;--accent-color:#4285F4;--subtext-color:#ccc;--code-background:#2c2c2c;--border-color:#444;--note-background:rgba(33, 150, 243, 0.15);--note-border:#2196F3;--warning-background:rgba(255, 193, 7, 0.15);--warning-border:#FFC107;--security-background:rgba(29, 39, 30, 0.7);--security-border:#4CAF50;--overlay-background:rgba(0, 0, 0, 0.85)}*{margin:0;padding:0;box-sizing:border-box}body{background:var(--background-color);color:var(--text-color);line-height:1.6;padding:0;margin:0}.container{max-width:1e3px;margin:0 auto;padding:1rem 2rem;padding-top:.5rem}h1,h2,h3,h4{color:var(--accent-color);font-weight:600;text-align:center}h1{margin-top:.5em;border-bottom:2px solid var(--border-color);padding-bottom:.3em;font-size:2.2rem;margin-bottom:1.5rem}h2{border-bottom:1px solid var(--border-color);padding-bottom:.3em;font-size:1.8rem;margin-top:2.5rem}h3{font-size:1.4rem;margin-top:.7rem;margin-bottom:.7rem}p,ul,ol{margin:1em 0;font-size:1.05rem;line-height:1.7}ol{padding-left:2.5rem}ul{padding-left:2rem}li{margin-bottom:.5rem}ol li{padding-left:.5rem}ol li ul{margin-top:.5rem;margin-bottom:1rem}a{color:var(--accent-color);text-decoration:none}a:hover{text-decoration:underline}code{font-family:sfmono-regular,Consolas,liberation mono,Menlo,Courier,monospace;background-color:var(--code-background);padding:.2em .4em;border-radius:3px;font-size:.9em;text-wrap:nowrap}pre{background-color:var(--code-background);border-radius:5px;padding:1.2rem;overflow:auto;margin:1.5em 0;border:1px solid var(--border-color)}pre code{background-color:initial;padding:0;font-size:.95rem;line-height:1.5}.diagram,.example{text-align:center;margin:35px 0;padding:25px;background-color:rgba(30,30,30,.5);border-radius:8px;border:1px solid var(--border-color);box-shadow:0 2px 10px rgba(0,0,0,.1)}.diagram img,.example img{max-width:100%;height:auto;cursor:pointer;transition:opacity .2s ease}.diagram img:hover,.example img:hover{opacity:.85}.example h3{text-align:left;margin-top:0;margin-bottom:15px}table{border-collapse:collapse;width:100%;margin:25px 0;background-color:rgba(30,30,30,.5);border-radius:5px;overflow:hidden}.table-container{width:100%;overflow-x:auto;margin:25px 0;border-radius:5px;border:1px solid var(--border-color);background-color:rgba(30,30,30,.5)}.table-container table{margin:0;border:none}th,td{border:1px solid var(--border-color);padding:12px 16px;text-align:left}th{background-color:var(--header-background);font-weight:600}tr:nth-child(even){background-color:rgba(40,40,40,.5)}.note,.warning,.security{padding:18px 22px;margin:1.8rem 0;border-radius:5px;border-left:4px solid}.note{background-color:var(--note-background);border-color:var(--note-border)}.warning{background-color:var(--warning-background);border-color:var(--warning-border)}.security{background-color:var(--security-background);border-color:var(--security-border);box-shadow:0 2px 8px rgba(0,0,0,.15)}.security h3{color:#6fcf7c;margin-top:0}.security ul{margin-bottom:0}footer{text-align:center;padding:20px 0;border-top:1px solid var(--border-color);color:var(--subtext-color);font-size:.9rem}.toc{background-color:rgba(30,30,30,.5);border-radius:8px;padding:20px;margin:20px 0 30px;border:1px solid var(--border-color)}.toc h2{margin-top:0;text-align:center;border-bottom:1px solid var(--border-color);padding-bottom:10px;margin-bottom:15px;color:var(--accent-color)}.toc ul{list-style-type:none;padding-left:0;margin:0;display:flex;flex-wrap:wrap;gap:10px;justify-content:center}.toc li{margin-bottom:8px;flex:none}.toc a{display:block;padding:5px 15px;border-radius:4px;transition:background-color .2s ease;background-color:rgba(20,20,20,.5);white-space:nowrap}.toc a:hover{background-color:rgba(50,50,50,.5);text-decoration:none}.code-example{position:relative}.code-label{position:absolute;top:-12px;right:10px;background-color:var(--accent-color);color:#fff;font-size:.8rem;padding:2px 8px;border-radius:4px}.lv-btn-group{display:flex;justify-content:space-between;align-items:center;gap:18px;margin:36px 0 10px}.lv-btn{display:inline-flex;align-items:center;padding:10px 24px;border-radius:8px;font-weight:700;font-size:1.05rem;text-decoration:none;transition:background .18s,color .18s,border .18s;cursor:pointer}.lv-btn-primary{border:none;background:var(--accent-color,#4285F4);color:#fff}.lv-btn-primary:hover{background:#2563eb}.lv-btn-outline{background:0 0;color:#fff;border:1.5px solid var(--accent-color,#4285F4)}.lv-btn-outline:hover{background:rgba(66,133,244,8%)}.lv-size-info{font-size:1rem;color:#888;background:rgba(0,0,0,5%);border-radius:4px;padding:7px 18px}.lv-size-info span{color:var(--accent-color,#4285F4);font-weight:600}.feature-card{background-color:rgba(40,40,40,.5);border-radius:8px;padding:20px;border:1px solid var(--border-color);margin-bottom:15px}.feature-card h3{color:var(--accent-color);margin-top:0;text-align:left;border-bottom:1px solid rgba(255,255,255,.1);padding-bottom:10px}.section{scroll-margin-top:20px}.doc-version-note{text-align:center;color:#888;font-size:.98rem;margin-bottom:10px}lazy-video{display:block;margin:30px auto;max-width:600px}.url-patterns{list-style:none;padding:0;margin:0}.url-patterns li{padding:3px 0}.url-patterns code{background:rgba(0,0,0,5%);padding:2px 5px;border-radius:3px;font-size:.9em;font-family:monospace}@media(max-width:768px){.container{padding:1rem}h1{font-size:1.8rem}h2{font-size:1.5rem}h3{font-size:1.2rem}.toc ul{flex-direction:column;align-items:stretch}.toc a{text-align:center;white-space:normal}.lv-btn-group{flex-direction:column!important;gap:14px;margin:30px 0;align-items:stretch;width:100%}.lv-btn{justify-content:center;text-align:center;width:100%}.lv-size-info{text-align:center;width:100%}} \ No newline at end of file diff --git a/public/css/lightbox.css b/public/css/lightbox.css new file mode 100644 index 0000000..16d0b93 --- /dev/null +++ b/public/css/lightbox.css @@ -0,0 +1 @@ +.lightbox{display:none;position:fixed;top:0;left:0;width:100%;height:100%;background-color:rgba(0,0,0,.85);z-index:1000;justify-content:center;align-items:center;padding:0}.lightbox.active{display:flex}.lightbox-content{position:relative;width:90%;max-width:1200px;height:85vh;background-color:#121212;padding:20px;border-radius:10px;box-shadow:0 5px 30px rgba(0,0,0,.3);display:flex;flex-direction:column;overflow:hidden}.lightbox-img-container{flex:1;overflow:hidden;position:relative;display:flex;justify-content:center;align-items:center}.lightbox-img{display:block;max-width:100%;max-height:100%;object-fit:contain;cursor:grab;border-radius:5px;transform-origin:center center;user-select:none;will-change:transform}.lightbox-img.grabbing{cursor:grabbing}.lightbox-close{position:absolute;top:10px;right:10px;width:32px;height:32px;background-color:#9b59b6;color:#fff;border-radius:50%;text-align:center;line-height:32px;cursor:pointer;font-weight:700;font-size:18px;z-index:1010;box-shadow:0 2px 5px rgba(0,0,0,.3)}.lightbox-caption{margin-top:15px;text-align:center;color:#ccc;font-size:.9rem}.zoom-controls{display:flex;align-items:center;justify-content:center;margin-top:15px;padding:10px 0;border-top:1px solid #444}.zoom-label{margin-right:10px;font-size:.9rem;color:#ccc}.zoom-slider{-webkit-appearance:none;width:70%;height:6px;border-radius:3px;background:#444;outline:none}.zoom-slider::-webkit-slider-thumb{-webkit-appearance:none;appearance:none;width:18px;height:18px;border-radius:50%;background:#9b59b6;cursor:pointer}.zoom-slider::-moz-range-thumb{width:18px;height:18px;border-radius:50%;background:#9b59b6;cursor:pointer;border:none}.zoom-value{margin-left:10px;font-size:.9rem;min-width:40px;color:#ccc}@media(max-width:768px),(max-width:1024px) and (orientation:landscape){.lightbox-content{width:100%;height:100%;padding:15px;border-radius:0}.zoom-controls{display:flex}} \ No newline at end of file diff --git a/public/css/u.css b/public/css/u.css new file mode 100644 index 0000000..fd676ec --- /dev/null +++ b/public/css/u.css @@ -0,0 +1,2 @@ +*{margin:0;padding:0;box-sizing:border-box}@font-face{font-family:Poppins;src:url(/webfonts/Poppins-Regular.woff2)format("woff2");font-weight:400;font-style:normal;font-display:swap}@font-face{font-family:Poppins;src:url(/webfonts/Poppins-SemiBold.woff2)format("woff2");font-weight:600;font-style:normal;font-display:swap}body{font-family:Poppins,sans-serif}a[rel~=external]:not([href^="mailto:"]):not([href^="tel:"])::after,a[target=_blank]:not([href^="mailto:"]):not([href^="tel:"])::after{content:"";display:inline-block;width:1.1em;height:1.1em;margin-left:.25em;margin-bottom:.25em;vertical-align:middle;background-color:currentColor;-webkit-mask:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCIgZmlsbD0ibm9uZSIgc3Ryb2tlPSJjdXJyZW50Q29sb3IiIHN0cm9rZS13aWR0aD0iMS43NSIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIj48cGF0aCBkPSJNMTIgNkg2YTIgMiAwIDAgMC0yIDJ2MTBhMiAyIDAgMCAwIDIgMmgxMGEyIDIgMCAwIDAgMi0ydi02bS03IDFsOS05bS01IDBoNXY1Ii8+PC9zdmc+)no-repeat center;mask:url(data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIyNCIgaGVpZ2h0PSIyNCIgZmlsbD0ibm9uZSIgc3Ryb2tlPSJjdXJyZW50Q29sb3IiIHN0cm9rZS13aWR0aD0iMS43NSIgc3Ryb2tlLWxpbmVjYXA9InJvdW5kIiBzdHJva2UtbGluZWpvaW49InJvdW5kIj48cGF0aCBkPSJNMTIgNkg2YTIgMiAwIDAgMC0yIDJ2MTBhMiAyIDAgMCAwIDIgMmgxMGEyIDIgMCAwIDAgMi0ydi02bS03IDFsOS05bS01IDBoNXY1Ii8+PC9zdmc+)no-repeat center;-webkit-mask-size:contain;mask-size:contain}@media(hover:hover) and (pointer:fine){a[rel~=external]:not([href^="mailto:"]):not([href^="tel:"]),a[target=_blank]:not([href^="mailto:"]):not([href^="tel:"]){position:relative}a[rel~=external]:not([href^="mailto:"]):not([href^="tel:"])::before,a[target=_blank]:not([href^="mailto:"]):not([href^="tel:"])::before{content:"Opens in new tab";position:absolute;top:50%;left:100%;transform:translateY(-50%);margin-left:5px;background-color:rgba(25,25,25,.9);color:#fff;padding:5px 8px;border-radius:4px;font-size:12px;white-space:nowrap;opacity:0;pointer-events:none;transition:opacity .18s ease-in-out;z-index:10}a[rel~=external]:not([href^="mailto:"]):not([href^="tel:"]):hover::before,a[target=_blank]:not([href^="mailto:"]):not([href^="tel:"]):hover::before{transition-delay:60ms;opacity:1}}::-webkit-scrollbar{width:4px;height:4px}::-webkit-scrollbar-track{background:#2d2d2d;border-radius:5px}::-webkit-scrollbar-thumb{background:#4d9cfa;border-radius:5px}::-webkit-scrollbar-thumb:hover{background:#3971a3}html{scroll-behavior:smooth}@view-transition{navigation: auto; +}::view-transition-old(root),::view-transition-new(root){animation-duration:.44s}@media(prefers-reduced-motion){::view-transition-group(*),::view-transition-old(*),::view-transition-new(*){animation:none!important}} \ No newline at end of file diff --git a/public/css/u.css.fiber.gz b/public/css/u.css.fiber.gz new file mode 100644 index 0000000..6a7207c Binary files /dev/null and b/public/css/u.css.fiber.gz differ diff --git a/public/html/ai-san.html b/public/html/ai-san.html new file mode 100644 index 0000000..1055381 --- /dev/null +++ b/public/html/ai-san.html @@ -0,0 +1,97 @@ + + + + +Text Cleaner - caileb.com + + + + + + + + + +
+
+

Text Cleaner

+

Convert formatted text to clean plain text +

+
+ + +
+ + +
+ + +
+
+ diff --git a/public/html/checkpoint.html b/public/html/checkpoint.html new file mode 100644 index 0000000..ace692c --- /dev/null +++ b/public/html/checkpoint.html @@ -0,0 +1,635 @@ + + + + +Checkpoint Documentation + + + + + + + + + + + + + + +
+
+

Disclaimer: Some internal fields and implementation details are omitted here for security reasons. +

+

Checkpoint Protection System

+ +
+

Overview

+

Checkpoint Protection asks visitors to solve a quick puzzle before letting them through, cutting down on automated traffic while keeping the experience smooth for real users. +

    +
  • No account or personal data needed +
  • Privacy-focused and lightweight +
  • Blocks bots and scripts effectively +
  • Works seamlessly in modern browsers +
+
+
+

How It Works

+

When you navigate to a protected page, the middleware checks for a valid token cookie (__Host-checkpoint_token). +

    +
  1. If the token is present, the server verifies its signature and confirms it's bound to your device. +
  2. Missing or invalid tokens trigger an interstitial page with a request ID. +
  3. The browser fetches challenge data from /api/pow/challenge?id=REQUEST_ID. This payload includes a random challenge, salt, difficulty, and hidden parameters. +
  4. The client runs two proofs in parallel: +
      +
    • Proof of Work: finds a nonce such that SHA‑256(challenge + salt + nonce) meets the difficulty. +
    • Proof of Space: allocates and hashes large memory buffers to confirm resource availability. +
    +
  5. Results are sent to /api/pow/verify along with the request ID. +
  6. On success, the server issues a signed token (valid for 24h) and sets it as a cookie for future visits. +
+
+

Checkpoint Protection Flow

+Checkpoint Protection Flow Diagram +
+
+
+

Challenge Generation

+

+Challenges are generated using cryptographically secure random bytes combined with a salt for additional entropy: +

+Go +
func generateChallenge() (string, string) {
+    // Generate a random challenge
+    randomBytes := make([]byte, 16)
+    _, err := cryptorand.Read(randomBytes)
+    if err != nil {
+        log.Fatalf("CRITICAL: Failed to generate secure random challenge: %v", err)
+    }
+    
+    // Generate a random salt for additional entropy
+    saltBytes := make([]byte, saltLength)
+    _, err = cryptorand.Read(saltBytes)
+    if err != nil {
+        log.Fatalf("CRITICAL: Failed to generate secure random salt: %v", err)
+    }
+    
+    return hex.EncodeToString(randomBytes), hex.EncodeToString(saltBytes)
+}
+
+
+

+Security Note: The system uses Go's crypto/rand package for secure random number generation, ensuring challenges cannot be predicted even by sophisticated attackers. +

+

Challenge Parameters

+

+Challenges are stored with a unique request ID and include parameters for verification: +

+Go +
type ChallengeParams struct {
+    Challenge  string    `json:"challenge"` // Base64 encoded
+    Salt       string    `json:"salt"`      // Base64 encoded
+    Difficulty int       `json:"difficulty"`
+    ExpiresAt  time.Time `json:"expires_at"`
+    ClientIP   string    `json:"-"`
+    PoSSeed    string    `json:"pos_seed"` // Hex encoded
+}
+
+

+When a client requests a challenge, the parameters are delivered in an obfuscated format to prevent automated analysis: +

+JSON +
{
+    "a": "base64-encoded-challenge",
+    "b": "base64-encoded-salt",
+    "c": 4,
+    "d": "hex-encoded-pos-seed"
+}
+
+
+
+

Proof Verification

+

+The system performs a two-step verification process: +

1. Computational Proof (Proof of Work)

+

+Verification checks that the hash of the challenge, salt, and nonce combination has the required number of leading zeros: +

+Go +
func verifyProofOfWork(challenge, salt, nonce string, difficulty int) bool {
+    input := challenge + salt + nonce
+    hash := calculateHash(input)
+    
+    // Check if the hash has the required number of leading zeros
+    prefix := strings.Repeat("0", difficulty)
+    return strings.HasPrefix(hash, prefix)
+}
+
+func calculateHash(input string) string {
+    hash := sha256.Sum256([]byte(input))
+    return hex.EncodeToString(hash[:])
+}
+
+

2. Memory Proof (Proof of Space)

+

+In addition to the computational work, clients must prove they can allocate and manipulate significant memory resources: +

    +
  • Clients allocate between 48MB to 160MB of memory (size determined by the PoS seed) +
  • Client divides memory into 4-8 chunks and performs deterministic filling operations +
  • The process is run three times, hashing the entire buffer each time +
  • The resulting hashes and execution times are submitted for verification +
+

+The server verifies: +

    +
  • All three hashes are identical (proving deterministic execution) +
  • Each hash is 64 characters (valid SHA-256) +
  • Execution times are consistent (within 20% variation) +
+
+

+The dual-verification approach makes the system resistant to specialized hardware acceleration. While the computational proof can be solved by ASICs or GPUs, the memory proof is specifically designed to be inefficient on such hardware. +

+
+
+

Token Structure

+

+Checkpoint tokens contain various fields for security and binding: +

+ + + + + + + + + + + + +
Field +Description +Purpose +
Nonce +The solution to the challenge +Verification proof +
ExpiresAt +Token expiration timestamp +Enforces time-limited access (24 hours) +
ClientIP +Hashed full client IP +Device binding (first 8 bytes of SHA-256) +
UserAgent +Hashed user agent +Browser binding +
BrowserHint +Derived from Sec-CH-UA headers +Additional client identity verification +
Entropy +Random data +Prevents token prediction/correlation +
Created +Token creation timestamp +Token age tracking +
LastVerified +Last verification timestamp +Token usage tracking +
Signature +HMAC signature +Prevents token forgery +
TokenFormat +Version number +Backward compatibility support +
+
+
+Go +
type CheckpointToken struct {
+    Nonce        string    `json:"g"` // Nonce
+    ExpiresAt    time.Time `json:"exp"`
+    ClientIP     string    `json:"cip,omitempty"`
+    UserAgent    string    `json:"ua,omitempty"`
+    BrowserHint  string    `json:"bh,omitempty"`
+    Entropy      string    `json:"ent,omitempty"`
+    Created      time.Time `json:"crt"`
+    LastVerified time.Time `json:"lvf,omitempty"`
+    Signature    string    `json:"sig,omitempty"`
+    TokenFormat  int       `json:"fmt"`
+}
+
+

Token Security

+

+Every token is cryptographically signed using HMAC-SHA256 with a server-side secret: +

+Go +
func computeTokenSignature(token CheckpointToken, tokenBytes []byte) string {
+    tokenCopy := token
+    tokenCopy.Signature = "" // Ensure signature field is empty for signing
+    tokenToSign, _ := json.Marshal(tokenCopy)
+    h := hmac.New(sha256.New, hmacSecret)
+    h.Write(tokenToSign)
+    return hex.EncodeToString(h.Sum(nil))
+}
+
+func verifyTokenSignature(token CheckpointToken, tokenBytes []byte) bool {
+    if token.Signature == "" {
+        return false
+    }
+    expectedSignature := computeTokenSignature(token, tokenBytes)
+    return hmac.Equal([]byte(token.Signature), []byte(expectedSignature))
+}
+
+

Token Storage

+

+Successfully verified tokens are stored in a persistent store for faster validation: +

+Go +
// TokenStore manages persistent storage of verified tokens
+type TokenStore struct {
+    VerifiedTokens map[string]time.Time `json:"verified_tokens"`
+    Mutex          sync.RWMutex         `json:"-"`
+    FilePath       string               `json:"-"`
+}
+
+// Each token is identified by a unique hash
+func calculateTokenHash(token CheckpointToken) string {
+    data := fmt.Sprintf("%s:%s:%d",
+        token.Nonce,              // Use nonce as part of the key
+        token.Entropy,            // Use entropy as part of the key
+        token.Created.UnixNano()) // Use creation time
+    hash := sha256.Sum256([]byte(data))
+    return hex.EncodeToString(hash[:])
+}
+
+
+
+

Security Features

+
+

Anti-Forgery Protections

+
    +
  • HMAC Signatures: Each token is cryptographically signed using HMAC-SHA256 to prevent tampering +
  • Token Binding: Tokens are bound to client properties (hashed full IP, hashed user agent, browser client hints) +
  • Random Entropy: Each token contains unique entropy to prevent token prediction or correlation +
  • Format Versioning: Tokens include a format version to support evolving security requirements +
+
+
+

Replay Prevention

+
    +
  • Nonce Tracking: Used nonces are tracked to prevent replay attacks +
  • Expiration Times: All tokens and challenges have expiration times +
  • Token Cleanup: Expired tokens are automatically purged from the system +
  • Challenge Invalidation: Challenges are immediately invalidated after successful verification +
+
+
+

Rate Limiting

+
    +
  • IP-Based Limits: Maximum verification attempts per hour (default: 10) +
  • Request ID Binding: Challenge parameters are bound to the requesting IP +
  • Challenge Expiration: Challenges expire after 5 minutes to prevent stockpiling +
+
+
+

Advanced Verification

+
    +
  • Proof of Space: Memory-intensive operations prevent GPU/ASIC acceleration +
  • Browser Fingerprinting: Secure client-hint headers verify legitimate browsers +
  • Challenge Obfuscation: Challenges are encoded and structured to resist automated analysis +
  • Persistent Secret: The system uses a persistent HMAC secret stored securely on disk +
+
+
+
+

Configuration Options

+

+The Checkpoint system can be configured through these constants: +

+ + + + + + + + + +
Constant +Description +Default +
Difficulty +Number of leading zeros required in the hash +4 +
TokenExpiration +Duration for which a token is valid +24 hours +
Cookie Name +__Host-checkpoint_token +The cookie name storing the issued token +
maxAttemptsPerHour +Rate limit for verification attempts +10 +
saltLength +Length of the random salt in bytes +16 +
maxNonceAge +Time before nonces are cleaned up +24 hours +
challengeExpiration +Time before a challenge expires +5 minutes +
+
+
+

+Warning: Increasing the Difficulty significantly increases the computational work required by clients. +A value that's too high may result in poor user experience, especially on mobile devices. +

+
+Go +
const (
+    // Difficulty defines the number of leading zeros required in hash
+    Difficulty = 4
+    // TokenExpiration sets token validity period
+    TokenExpiration = 24 * time.Hour
+    // CookieName defines the cookie name for tokens
+    CookieName = "__Host-checkpoint_token"
+    // Max verification attempts per IP per hour
+    maxAttemptsPerHour = 10
+    // Salt length for additional entropy
+    saltLength = 16
+)
+
+
+
+

Middleware Integration

+

+The Checkpoint system provides a middleware handler that automatically protects HTML routes while bypassing API routes and static assets: +

HTMLCheckpointMiddleware

+

+This middleware is optimized for HTML routes, with smart content-type detection and automatic exclusions for static assets and API endpoints. +

+Go +
// HTMLCheckpointMiddleware handles challenges specifically for HTML pages
+func HTMLCheckpointMiddleware() fiber.Handler {
+    return func(c *fiber.Ctx) error {
+        // Allow certain paths to bypass verification
+        path := c.Path()
+        if path == "/video-player" || path == "/video-player.html" || strings.HasPrefix(path, "/videos/") {
+            return c.Next()
+        }
+        if strings.HasPrefix(path, "/api") {
+            return c.Next()
+        }
+        if path == "/favicon.ico" || (strings.Contains(path, ".") && !strings.HasSuffix(path, ".html")) {
+            return c.Next()
+        }
+        
+        // Only apply to HTML routes
+        isHtmlRoute := strings.HasSuffix(path, ".html") || path == "/" ||
+            (len(path) > 0 && !strings.Contains(path, "."))
+        if !isHtmlRoute {
+            return c.Next()
+        }
+
+        token := c.Cookies(CookieName)
+        if token != "" {
+            valid, err := validateToken(token, c)
+            if err == nil && valid {
+                return c.Next()
+            }
+        }
+        return serveInterstitial(c)
+    }
+}
+
+

Usage in Application

+
+Go +
// Enable HTML checkpoint protection for all routes
+app.Use(middleware.HTMLCheckpointMiddleware())
+
+// API group with verification endpoints
+api := app.Group("/api")
+
+// Verification endpoints
+api.Post("/pow/verify", middleware.VerifyCheckpointHandler)
+api.Get("/pow/challenge", middleware.GetCheckpointChallengeHandler)
+
+// Example protected API endpoint
+api.Get("/protected", func(c *fiber.Ctx) error {
+    // Access is already verified by cookie presence
+    return c.JSON(fiber.Map{
+        "message": "You have accessed the protected endpoint!",
+        "time":    time.Now(),
+    })
+})
+
+
+
+

Client-Side Implementation

+

+The client-side implementation is handled by the interstitial page and its associated JavaScript: +

    +
  1. Client attempts to access a protected resource +
  2. Server serves the interstitial page with a request ID +
  3. JavaScript fetches challenge parameters from /api/pow/challenge?id=REQUEST_ID +
  4. Two verification stages run in parallel: +
      +
    • Computational proof: Using Web Workers to find a valid nonce +
    • Memory proof: Allocating and manipulating memory buffers +
    +
  5. Results are submitted to /api/pow/verify endpoint +
  6. On success, the server sets a cookie and redirects to the original URL +
+

Web Worker Implementation

+

+Computational proof is handled by Web Workers to avoid freezing the UI: +

+JavaScript +
function workerFunction() {
+    self.onmessage = function(e) {
+        const { type, data } = e.data;
+
+        if (type === 'pow') {
+            // PoW calculation
+            const { challenge, salt, startNonce, endNonce, target, batchId } = data;
+            let count = 0;
+            let solution = null;
+            
+            processNextNonce(startNonce);
+    
+            function processNextNonce(nonce) {
+                const input = String(challenge) + String(salt) + nonce.toString();
+                const msgBuffer = new TextEncoder().encode(input);
+                
+                crypto.subtle.digest('SHA-256', msgBuffer)
+                    .then(hashBuffer => {
+                        const hashArray = Array.from(new Uint8Array(hashBuffer));
+                        const result = hashArray.map(b => 
+                            b.toString(16).padStart(2, '0')).join('');
+                        
+                        count++;
+            
+                        if (result.startsWith(target)) {
+                            solution = { nonce: nonce.toString(), found: true };
+                            self.postMessage({
+                                type: 'pow_result',
+                                solution: solution,
+                                count: count,
+                                batchId: batchId
+                            });
+                            return;
+                        }
+                        
+                        if (nonce < endNonce && !solution) {
+                            setTimeout(() => processNextNonce(nonce + 1), 0);
+                        } else if (!solution) {
+                            self.postMessage({
+                                type: 'pow_result',
+                                solution: null,
+                                count: count,
+                                batchId: batchId
+                            });
+                        }
+                    });
+            }
+        }
+    };
+}
+
+

Memory Proof Implementation

+

+The memory proof allocates and manipulates large buffers to verify client capabilities: +

+JavaScript +
async function runProofOfSpace(seedHex, isDecoy) {
+    // Deterministic memory size (48MB to 160MB) based on seed
+    const minMB = 48, maxMB = 160;
+    let seedInt = parseInt(seedHex.slice(0, 8), 16);
+    const CHUNK_MB = minMB + (seedInt % (maxMB - minMB + 1));
+    const CHUNK_SIZE = CHUNK_MB * 1024 * 1024;
+    
+    // Chunk memory for controlled allocation
+    const chunkCount = 4 + (seedInt % 5); // 4-8 chunks
+    const chunkSize = Math.floor(CHUNK_SIZE / chunkCount);
+    
+    // Run the proof multiple times to verify consistency
+    const runs = 3;
+    const hashes = [];
+    const times = [];
+    
+    // For each run...
+    for (let r = 0; r < runs; r++) {
+        // Generate deterministic chunk order
+        let prng = seededPRNG(seedHex + r.toString(16));
+        let order = Array.from({length: chunkCount}, (_, i) => i);
+        for (let i = order.length - 1; i > 0; i--) {
+            const j = prng() % (i + 1);
+            [order[i], order[j]] = [order[j], order[i]];
+        }
+        
+        // Allocate and fill memory buffer
+        let t0 = performance.now();
+        let buf = new ArrayBuffer(CHUNK_SIZE);
+        let view = new Uint8Array(buf);
+        
+        // Fill buffer with deterministic pattern
+        for (let c = 0; c < chunkCount; c++) {
+            let chunkIdx = order[c];
+            let start = chunkIdx * chunkSize;
+            let end = (chunkIdx + 1) * chunkSize;
+            for (let i = start; i < end; i += 4096) {
+                view[i] = prng() & 0xFF;
+            }
+        }
+        
+        // Hash the entire buffer
+        let hashBuf = await crypto.subtle.digest('SHA-256', view);
+        let t2 = performance.now();
+        
+        // Convert hash to hex string
+        let hashHex = Array.from(new Uint8Array(hashBuf))
+            .map(b => b.toString(16).padStart(2, '0')).join('');
+        
+        // Store results
+        hashes.push(hashHex);
+        times.push(Math.round(t2 - t0));
+        
+        // Clean up
+        buf = null; view = null;
+    }
+    
+    return { hashes, times };
+}
+
+
+

+The client-side implementation is designed to be difficult to reverse-engineer. The obfuscated API responses, minimal logging, and anti-debugging measures prevent automated circumvention. +

+
+
+

API Endpoints

+

+The Checkpoint system exposes two primary API endpoints: +

1. Challenge Endpoint

+

+Retrieves challenge parameters for a verification request: +

+HTTP +
GET /api/pow/challenge?id=REQUEST_ID
+
+Response:
+{
+    "a": "base64-encoded-challenge",
+    "b": "base64-encoded-salt",
+    "c": 4,
+    "d": "hex-encoded-pos-seed"
+}
+
+

2. Verification Endpoint

+

+Accepts proof solutions and issues tokens when valid: +

+HTTP +
POST /api/pow/verify
+
+Request:
+{
+    "request_id": "unique-request-id",
+    "g": "nonce-solution",
+    "h": ["pos-hash1", "pos-hash2", "pos-hash3"],
+    "i": [time1, time2, time3]
+}
+
+Response:
+{
+    "token": "base64-encoded-token",
+    "expires_at": "2025-04-17T18:57:48Z"
+}
+
+
+

+Backwards Compatibility: The older endpoint /api/verify is maintained for compatibility with existing clients. +

+
+
+
These docs reflect version 2.0 of the Checkpoint Protection System.
+

Last updated: Tuesday, April 16, 2025 +

+
diff --git a/public/html/index.html b/public/html/index.html new file mode 100644 index 0000000..4e1ca07 --- /dev/null +++ b/public/html/index.html @@ -0,0 +1,41 @@ + + + + +caileb.com + + + + + + + + + + + + + + + + + diff --git a/public/html/integrity-demo.html b/public/html/integrity-demo.html new file mode 100644 index 0000000..c754d86 --- /dev/null +++ b/public/html/integrity-demo.html @@ -0,0 +1,190 @@ + + + + +Integrity Checker - caileb.com + + + + + + + + + + + + + + + +
+

Auto-Integrity Hash Demo

+
+

This is a live demonstration of automatic SRI hash generation. +

The server automatically adds integrity hashes to all external resources when the site is built - no manual work required. +

If you view the source code of this page, you'll see all external CSS and JavaScript files have integrity and crossorigin attributes that were added automatically during build. +

This security feature protects against compromised CDNs and ensures resources haven't been tampered with. +

+

External Scripts Working

+

These demos confirm that the external scripts are loaded and working correctly with their integrity hashes: +

+
+

jQuery Demo

+

jQuery provides DOM manipulation and animation capabilities. +

Running jQuery test...
+
+
+

Lodash Demo

+

Lodash provides utility functions for common programming tasks. +

Running Lodash test...
+
+
+
+
+

Bootstrap Components

+

Bootstrap provides responsive UI components. +

+
+This is a Bootstrap alert component +
+
+
75%
+
+
+
+
+

Quicklink Demo

+

Quicklink prefetches links that are in the viewport. +

+
+
+

Monitored Resources

+

The following resources have integrity checks automatically applied during build: +

+ + + + + + + + + + +
Resource Type +Location +Integrity Added? +
Stylesheet +/css/u.css +No (Local) +
Stylesheet +Bootstrap CSS (CDN) +Yes (External) +
Preloaded Script +/js/u.js +No (Local) +
Preloaded Script +jQuery (CDN) +Yes (External) +
Preloaded Script +Lodash (CDN) +Yes (External) +
Script +Quicklink (CDN) +Yes (External) +
+
+
+ + + + diff --git a/public/html/kb.html b/public/html/kb.html new file mode 100644 index 0000000..87d8d17 --- /dev/null +++ b/public/html/kb.html @@ -0,0 +1,175 @@ + + + + +Caileb's Knowledgebase + + + + + + + + + + +
+
+

Fail2ban

+
+ + +
+
+

Step 1: Install Fail2ban (Debian/Ubuntu)

+

First, install Fail2ban by running: +

+sudo apt install fail2ban +
+
+
+

Step 2: Navigate to the Fail2ban Directory

+

Change to the Fail2ban configuration directory: +

+cd /etc/fail2ban/ +
+
+
+

Step 3: Copy the Example Configuration File

+

Copy the example configuration file as a base for your custom configuration: +

+sudo cp jail.conf jail.local +
+
+
+

Step 4: Create a New Filter

+

Navigate to the filter.d directory and create a new filter file: +

+cd filter.d/ +
+
+sudo nano nginx-4xx.conf +
+
+
+

Step 5: Define the Filter to Block Repeated 4xx Errors

+

Add the following content to the nginx-4xx.conf file: +

+[Definition]
failregex = ^<HOST>.*"(GET|POST|HEAD|CONNECT).*" (404|444|403|400) .*
ignoreregex = 127.0.0.1 127.0.0.0/8 10.0.0.0/8 172.16.0.0/12 192.168.0.0/16
+
+
+
+

Step 6: Edit the Jail Configuration to Use the New Filter

+

Go back to the previous directory and edit jail.local: +

+cd .. +
+
+sudo nano jail.local +
+

Add the following section: +

+#
# Repeated 4xx errors (Nginx)
#
[nginx-4xx]
enabled = true
port = http,https
logpath = /var/log/nginx/access.log
maxretry = 4
+
+
+
+

Step 7: Restart Fail2ban for the Changes to Take Effect

+

Restart the Fail2ban service: +

+sudo systemctl restart fail2ban +
+
+
+

Step 8: Check the Filter Status

+

Verify the filter is working: +

+sudo fail2ban-client status nginx-4xx +
+

OR

+

For a prettified output: +

+sudo fail2ban-client get nginx-4xx banip | tr ' ' '\n' +
+
+
+
+
+
+

Node PM2

+

Restart +

+                pm2 restart caileb.com
+            
+
+
+

FFmpeg

+

Highest quality AV1 +

+ffmpeg -i input -c:v av1_nvenc -preset p7 -cq 1 -b:v 0 -qmin 1 -qmax 5 -rc-lookahead 250 -spatial-aq 1 -aq-strength 15 -refs 16 -temporal-aq 1 -c:a flac -compression_level 8 highest_quality.mkv +
+

Standard compression +

+ffmpeg -i input -vf "mpdecimate" -fps_mode vfr -c:v av1_nvenc -preset p7 -cq 30 -b:v 0 -maxrate 18.5M -bufsize 25M -g 240 -keyint_min 24 -rc vbr -c:a libopus -b:a 128k compressed.webm +
+

Extreme compression +

+ffmpeg -i input -vf "mpdecimate,scale=-1:1080" -fps_mode vfr -c:v av1_nvenc -preset p7 -rc vbr -b:v 6M -maxrate 12M -bufsize 18M -g 300 -keyint_min 34 -c:a libopus -b:a 96k compressed.webm +
+

Rocket.Chat +

+ffmpeg -i input -vf "mpdecimate,scale=-1:1440" -fps_mode vfr -c:v av1_nvenc -preset p7 -rc vbr -b:v 8M -maxrate 15M -bufsize 22M -g 270 -keyint_min 28 -c:a libopus -b:a 112k rocket_chat.webm +
+
+
+

Useful HTML Stuffs

+

Make iFrames/Images Lazy Load MDN +

Replace FitVids or other similar JS libraries with CSS' aspect-ratio MDN +

+
+

Malware Removal

+
    +
  1. +Malwarebytes Free +

    Easy-to-use tool that quickly detects and removes a broad range of malware. +

  2. +Emsisoft Emergency Kit +

    Utilizes Bitdefender's engine on top of their own for a strong all-in-one cleanup. +

  3. +Sophos Scan & Clean +

    Portable scanner with effective heuristic analysis for detecting malware. +

  4. +Kaspersky Virus Removal Tool +

    Efficiently finds and removes stubborn malware threats. +

+
+
+ diff --git a/public/html/lazy-video.html b/public/html/lazy-video.html new file mode 100644 index 0000000..2bf0edf --- /dev/null +++ b/public/html/lazy-video.html @@ -0,0 +1,550 @@ + + + + +Lazy Video Component - Multi-Platform Video Embedding + + + + + + + + + + + + + + +
+

Lazy Video Docs

+ +
+

Overview

+

+Embedding videos with standard <iframe> tags can dramatically slow down your site and consume large amounts of data. Each iframe loads the full video player and related resources immediately-even if the user never interacts with it. On pages with several videos, this can add hundreds of megabytes to the initial page load, resulting in a sluggish and costly experience, especially for users on mobile devices or limited networks. +

How Lazy Video Helps

+

+The <lazy-video> component solves this by loading only a lightweight thumbnail and play button at first. The actual video player is loaded only when the user clicks play (or when the video scrolls into view if autoload is enabled). This keeps your pages fast, responsive, and bandwidth-friendly. +

+View Source +Download +~17.0kB / 6.0kB (Gzip) +
+
+
+

Basic Usage

+

+To get started, include the script on your page and use the custom element as shown below: +

+HTML +
<lazy-video 
+    src="https://www.youtube.com/embed/wPr3kws2prM"
+    title="Till We Have Faces by Silent Planet">
+</lazy-video>
+
+
+

+Always add a title for accessibility and better alt text on thumbnails. +

+
+
+

Officially Supported Platforms

+
+ + + + +
Platform +URL Pattern +Notes +
YouTube + +
    +
  • youtube.com/embed/ID +
  • youtube.com/watch?v=ID +
  • youtu.be/ID +
+
Full support for thumbnails and parameters. +
Bitchute + +
    +
  • bitchute.com/video/ID/ +
  • bitchute.com/embed/ID/ +
+
Custom thumbnails are only needed if autoload is disabled. +
+
+
+
+

Attributes

+
+ + + + + + + + + + + + + + + +
Attribute +Description +Default +
src +Video embed URL (required) +N/A +
title +Video title +"Video" +
width +Width in pixels or percent +100% (responsive) +
height +Height in pixels +16:9 ratio +
thumbnail +Custom thumbnail URL +Auto-detected per platform +
thumbnail-quality +YouTube thumbnail quality (default, hq, mq, sd, maxres) +Auto (maxres on desktop, hq on mobile) +
service +Force a specific service (youtube, bitchute) +Auto-detected +
sandbox +Extra security for the iframe. Restricts what the embedded player can do. See MDN for details. +allow-scripts allow-same-origin allow-popups allow-forms allow-presentation +
no-cookie +Use youtube-nocookie.com for YouTube (privacy-friendly) +true +
autoload +Load video when scrolled into view +false (YouTube), true (Bitchute) +
hide-title +Hide the video title bar +false +
align +Set alignment (left, right, center) +center +
container-fit +Make video fill the container (FitVids style) +false +
+
+
+

+Warning: Using autoload with many videos on one page can impact performance as users scroll. Use with care! +

+
+

+Note: With container-fit, the component overrides max-width to 100% and sets max-height to auto, making it fill its container while keeping the aspect ratio. +

+
+
+

Styling & CSS Variables

+

+You can customize the look of <lazy-video> using CSS variables: +

+CSS +
lazy-video {
+    --lv-max-width: 600px;
+    --lv-border-radius: 8px;
+    --lv-play-button-color: #f00;
+    --lv-play-button-bg: rgba(0, 0, 0, 0.7);
+    --lv-show-title: none;
+}
+
+
+

Available CSS Variables

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CSS Variable +Description +Default +
--lv-max-width +Maximum width of the video +560px +
--lv-aspect-ratio +Aspect ratio +16 / 9 +
--lv-display +Display type +block +
--lv-position +CSS position +relative +
--lv-border-radius +Border radius for the container +0 +
--lv-margin +Container margin +0 auto +
--lv-margin-left +Margin for left alignment +0 +
--lv-margin-right +Margin for right alignment +0 0 0 auto +
--lv-margin-center +Margin for center alignment +0 auto +
--lv-align +Set alignment (left, right, center) +center +
--lv-background +Background color +#000 +
--lv-thumbnail-opacity +Thumbnail opacity +0.85 +
--lv-thumbnail-hover-opacity +Opacity on hover +1 +
--lv-thumbnail-object-fit +Object-fit for thumbnail +cover +
--lv-play-button-width +Play button width +68px +
--lv-play-button-height +Play button height +48px +
--lv-play-button-bg +Play button background +rgba(33, 33, 33, 0.8) +
--lv-play-button-bg-hover +Play button hover background +rgba(230, 33, 23, 1) +
--lv-play-button-color +Play button arrow color +rgba(255, 255, 255, 0.9) +
--lv-play-button-radius +Play button border radius +8px +
--lv-play-button-arrow-size +Play button arrow size +12px 0 12px 20px +
--lv-title-padding +Title bar padding +10px 12px +
--lv-title-bg +Title background +rgba(0, 0, 0, 0.75) +
--lv-title-color +Title text color +white +
--lv-title-font-family +Title font family +Roboto, Arial, sans-serif +
--lv-title-font-size +Title font size +18px +
--lv-title-font-weight +Title font weight +500 +
--lv-title-line-height +Title line height +1.2 +
--lv-focus-outline +Focus outline +2px solid #4285F4 +
--lv-focus-outline-offset +Focus outline offset +2px +
--lv-show-title +Show/hide title bar (use 'none' to hide) +block +
--lv-timestamp-right +Timestamp right position +10px +
--lv-timestamp-bottom +Timestamp bottom position +10px +
--lv-timestamp-bg +Timestamp background +rgba(0, 0, 0, 0.7) +
--lv-timestamp-color +Timestamp text color +white +
--lv-timestamp-padding +Timestamp padding +2px 6px +
--lv-timestamp-radius +Timestamp border radius +3px +
--lv-timestamp-font-size +Timestamp font size +12px +
--lv-timestamp-font-family +Timestamp font family +system-ui, sans-serif +
--lv-loading-bg +Loading background +rgba(0,0,0,0.7) +
--lv-loading-color +Loading text color +white +
--lv-loading-font-family +Loading font family +system-ui, sans-serif +
--lv-fallback-bg +Fallback background +#1a1a1a +
--lv-fallback-color +Fallback text color +white +
--lv-fallback-font-family +Fallback font family +system-ui, sans-serif +
--lv-fallback-font-size +Fallback font size +14px +
+
+
+
+

Examples

+

YouTube Embed with Custom Size

+
+HTML +
<lazy-video 
+    src="https://www.youtube.com/embed/wPr3kws2prM"
+    title="Till We Have Faces by Silent Planet"
+    width="50%"
+    height="260px"
+    thumbnail-quality="maxres">
+</lazy-video>
+
+
+ +
+

Bitchute with Autoload Off

+
+HTML +
<lazy-video 
+    src="https://www.bitchute.com/video/zSfeNPF-OpY"
+    title="Trump Assassination Attempt Documents LOCKED Away. What are they Hiding?"
+    autoload="false"
+    thumbnail="https://static-3.bitchute.com/live/cover_images/nDPZqzyLkFKW/zSfeNPF-OpY_640x360.jpg">
+</lazy-video>
+
+
+ + +
+

+With autoload="false" on Bitchute, users need to click twice: once to load the player, and again to play. This saves bandwidth but may be less convenient. +

+
+

Bitchute with Autoload

+
+HTML +
<lazy-video 
+    src="https://www.bitchute.com/video/zSfeNPF-OpY"
+    title="Trump Assassination Attempt Documents LOCKED Away. What are they Hiding?">
+</lazy-video>
+
+
+ + +
+

Responsive Container (FitVids Style)

+
+HTML +
<div style="max-width: 100%; width: 100%;">
+    <lazy-video 
+        src="https://www.youtube.com/embed/wPr3kws2prM"
+        title="Responsive container example"
+        container-fit="true">
+    </lazy-video>
+</div>
+
+
+
+ + +
+
+

+container-fit="true" makes the video fill its parent container while keeping the aspect ratio. Great for fluid layouts. +

+
+

YouTube with Hidden Title Bar

+
+HTML +
<lazy-video 
+    src="https://www.youtube.com/embed/wPr3kws2prM"
+    title="Hidden title example"
+    hide-title>
+</lazy-video>
+
+
+ + +
+

Global Title Control with CSS

+
+CSS +
/* Hide titles for all videos */
+lazy-video {
+    --lv-show-title: none;
+}
+
+/* Hide titles for a group */
+.article-videos lazy-video {
+    --lv-show-title: none;
+}
+
+

Global Alignment Control with CSS

+
+CSS +
/* Set alignment for all videos */
+lazy-video {
+    --lv-align: left;
+}
+
+/* Responsive alignment */
+@media (max-width: 768px) {
+    lazy-video {
+        --lv-align: center;
+    }
+}
+
+/* Different alignments for different contexts */
+.sidebar lazy-video {
+    --lv-align: right;
+}
+
+
+
+

Converting Existing iframes

+

+You can convert existing video iframes to <lazy-video> by simply changing the tag name. +

Standard YouTube iframe: +

+HTML +
<iframe 
+    src="https://www.youtube.com/embed/wPr3kws2prM?start=30&rel=0&controls=0" 
+    width="560" 
+    height="315" 
+    title="Till We Have Faces by Silent Planet" 
+    frameborder="0" 
+    allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" 
+    allowfullscreen>
+</iframe>
+
+
+

Converted to <lazy-video> (just change the tag): +

+HTML +
<lazy-video 
+    src="https://www.youtube.com/embed/wPr3kws2prM?start=30&rel=0&controls=0" 
+    width="560" 
+    height="315" 
+    title="Till We Have Faces by Silent Planet" 
+    frameborder="0" 
+    allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture" 
+    allowfullscreen>
+</lazy-video>
+
+
+
+

Security & Privacy

+

+<lazy-video> is built with modern web security and privacy best practices: +

    +
  • +All embedded iframes use the credentialless attribute. This helps prevent credential leaks and keeps third-party content isolated from your site's cookies and storage. +
  • +The sandbox attribute is set by default, restricting what the embedded player can do and reducing risk from third-party content. +
  • +For YouTube, the youtube-nocookie.com domain is used by default, so no tracking cookies are set unless the user interacts with the video. +
+
+

+Note: You can override the sandbox attribute if you need to enable additional features, but the default is designed for maximum safety. +

+
+
+

Browser Support

+

+Works in all modern browsers (Chrome, Firefox, Safari, Edge). Uses standard web component APIs. For IE11 or older, use the custom-elements polyfill. +

+
+

Breaking Change

+

+April 3, 2025: The old <lazy-youtube> element is no longer supported. Please update any code to use <lazy-video> instead. +

+
+
These docs reflect the latest release version of @lv.js.
+

Last updated: Friday, April 11th, 2025 +

+
diff --git a/public/images/Basic-POW-Overview.excalidraw.svg b/public/images/Basic-POW-Overview.excalidraw.svg new file mode 100644 index 0000000..de7c870 --- /dev/null +++ b/public/images/Basic-POW-Overview.excalidraw.svg @@ -0,0 +1,2 @@ +RequestCheck for tokenHas tokenNo tokenPresent challengeExpired?YesNoValidate w/ serverValidInvalidPass Request \ No newline at end of file diff --git a/public/images/Basic-POW-Overview.excalidraw.svg.fiber.gz b/public/images/Basic-POW-Overview.excalidraw.svg.fiber.gz new file mode 100644 index 0000000..3cc0d26 Binary files /dev/null and b/public/images/Basic-POW-Overview.excalidraw.svg.fiber.gz differ diff --git a/public/images/copy.svg b/public/images/copy.svg new file mode 100644 index 0000000..27ed44f --- /dev/null +++ b/public/images/copy.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/images/favi.png b/public/images/favi.png new file mode 100644 index 0000000..6887f26 Binary files /dev/null and b/public/images/favi.png differ diff --git a/public/images/favi.svg b/public/images/favi.svg new file mode 100644 index 0000000..ba46e64 --- /dev/null +++ b/public/images/favi.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/public/images/favi.svg.fiber.gz b/public/images/favi.svg.fiber.gz new file mode 100644 index 0000000..0571c21 Binary files /dev/null and b/public/images/favi.svg.fiber.gz differ diff --git a/public/images/india-block.jpg b/public/images/india-block.jpg new file mode 100644 index 0000000..a9519f4 Binary files /dev/null and b/public/images/india-block.jpg differ diff --git a/public/images/logos/Source/immich.svg b/public/images/logos/Source/immich.svg new file mode 100644 index 0000000..08e60a4 --- /dev/null +++ b/public/images/logos/Source/immich.svg @@ -0,0 +1,29 @@ + + + + + + + + + + + + diff --git a/public/images/logos/Source/jellyfin.svg b/public/images/logos/Source/jellyfin.svg new file mode 100644 index 0000000..d4d7f01 --- /dev/null +++ b/public/images/logos/Source/jellyfin.svg @@ -0,0 +1,24 @@ + + + + + + + + + + icon-transparent + + + + + diff --git a/public/images/logos/Source/linkwarden.png b/public/images/logos/Source/linkwarden.png new file mode 100644 index 0000000..ac6d7e9 Binary files /dev/null and b/public/images/logos/Source/linkwarden.png differ diff --git a/public/images/logos/Source/navidrome.svg b/public/images/logos/Source/navidrome.svg new file mode 100644 index 0000000..cae50d7 --- /dev/null +++ b/public/images/logos/Source/navidrome.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/images/logos/Source/portainer.svg b/public/images/logos/Source/portainer.svg new file mode 100644 index 0000000..af7ef28 --- /dev/null +++ b/public/images/logos/Source/portainer.svg @@ -0,0 +1,16 @@ + + + + + + + + diff --git a/public/images/logos/immich.svg b/public/images/logos/immich.svg new file mode 100644 index 0000000..ecf098d --- /dev/null +++ b/public/images/logos/immich.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/images/logos/immich.svg.fiber.gz b/public/images/logos/immich.svg.fiber.gz new file mode 100644 index 0000000..2439c7d Binary files /dev/null and b/public/images/logos/immich.svg.fiber.gz differ diff --git a/public/images/logos/jellyfin.svg b/public/images/logos/jellyfin.svg new file mode 100644 index 0000000..5e74815 --- /dev/null +++ b/public/images/logos/jellyfin.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/images/logos/jellyfin.svg.fiber.gz b/public/images/logos/jellyfin.svg.fiber.gz new file mode 100644 index 0000000..7a20f3f Binary files /dev/null and b/public/images/logos/jellyfin.svg.fiber.gz differ diff --git a/public/images/logos/linkwarden.jpg b/public/images/logos/linkwarden.jpg new file mode 100644 index 0000000..ef865c9 Binary files /dev/null and b/public/images/logos/linkwarden.jpg differ diff --git a/public/images/logos/linkwarden.webp b/public/images/logos/linkwarden.webp new file mode 100644 index 0000000..8ef0afd Binary files /dev/null and b/public/images/logos/linkwarden.webp differ diff --git a/public/images/logos/navidrome.svg b/public/images/logos/navidrome.svg new file mode 100644 index 0000000..df1dd1a --- /dev/null +++ b/public/images/logos/navidrome.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/images/logos/navidrome.svg.fiber.gz b/public/images/logos/navidrome.svg.fiber.gz new file mode 100644 index 0000000..4ccc072 Binary files /dev/null and b/public/images/logos/navidrome.svg.fiber.gz differ diff --git a/public/images/logos/portainer.svg b/public/images/logos/portainer.svg new file mode 100644 index 0000000..3821cd4 --- /dev/null +++ b/public/images/logos/portainer.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/images/logos/portainer.svg.fiber.gz b/public/images/logos/portainer.svg.fiber.gz new file mode 100644 index 0000000..041e490 Binary files /dev/null and b/public/images/logos/portainer.svg.fiber.gz differ diff --git a/public/js/c.js b/public/js/c.js new file mode 100644 index 0000000..813da2f --- /dev/null +++ b/public/js/c.js @@ -0,0 +1 @@ +function workerFunction(){self.onmessage=function(e){const{type:t,data:s}=e.data;if(t==="pow"){const{challenge:i,salt:a,startNonce:r,endNonce:c,target:l,batchId:o}=s;let e=0,t=null;n(r);function n(s){const r=String(i)+String(a)+s.toString(),d=(new TextEncoder).encode(r);crypto.subtle.digest("SHA-256",d).then(i=>{const a=Array.from(new Uint8Array(i)),r=a.map(e=>e.toString(16).padStart(2,"0")).join("");if(e++,r.startsWith(l)){t={nonce:s.toString(),found:!0},self.postMessage({type:"pow_result",solution:t,count:e,batchId:o});return}e%1e3===0&&self.postMessage({type:"progress",count:e,batchId:o}),sn(s+1),0):t||self.postMessage({type:"pow_result",solution:null,count:e,batchId:o})}).catch(e=>{self.postMessage({type:"error",error:"Crypto API error: "+e.message})})}}else self.postMessage({type:"error",error:"Unknown message type: "+t})}}const workerCode="("+workerFunction.toString()+")()";function posWorkerFunction(){self.onmessage=async function(t){const{type:s,seedHex:n,isDecoy:o}=t.data;if(s==="pos"){const s=48,a=160;let t=parseInt(n.slice(0,8),16);t!=t&&(t=Math.floor(Math.random()*(a-s+1)));const b=o?s+(t*3+17)%(a-s+1):s+t%(a-s+1),r=b*1024*1024,i=4+t%5,l=Math.floor(r/i),m=4096,f=1024,p=35*1024*1024,g=3,v=new ArrayBuffer(r),c=new Uint8Array(v),j=new ArrayBuffer(16*1024*1024),d=new Uint8Array(j),u=[],h=[];for(let s=0;st);for(let e=t.length-1;e>0;e--){const n=o()%(e+1);[t[e],t[n]]=[t[n],t[e]]}const a=performance.now();for(let e=0;ee.toString(16).padStart(2,"0")).join("")),h.push(Math.round(b-a));for(let e=0;e>>0;function n(e,t){return(e<>>32-t)>>>0}return function(){const s=t[1]<<9;let e=t[0]*5>>>0;e=n(e,7)*9>>>0;const o=t[0]^t[2];return t[2]^=t[1],t[1]^=t[3],t[0]^=t[1],t[3]^=o,t[2]^=s,t[3]=n(t[3],11),e>>>0}}}const posWorkerCode="("+posWorkerFunction.toString()+")()";document.addEventListener("DOMContentLoaded",function(){setTimeout(e,650);function e(){const t=document.getElementById("verification-data"),o=t.getAttribute("data-target"),n=t.getAttribute("data-request-id");i();async function i(){try{const t=await fetch("/api/pow/challenge?id="+encodeURIComponent(n),{method:"GET",headers:{Accept:"application/json"}});if(!t.ok)throw new Error("Failed to get challenge parameters");const e=await t.json(),r=e.d,s=e.e||Math.random().toString(16).slice(2,18),i=e.f||[],a=new c(e,o,n,s,i);a.start()}catch(t){e("Verification setup failed: "+t.message)}}function a(){const e=new Blob([workerCode],{type:"text/javascript"});return new Worker(URL.createObjectURL(e))}function s(){const e=new Blob([posWorkerCode],{type:"text/javascript"});return new Worker(URL.createObjectURL(e))}function e(){const s=document.querySelector(".container");s.classList.add("error"),s.classList.remove("success");const i=document.querySelector(".spinner"),n=document.getElementById("status");n.style.display="inline-block",n.textContent="",n.classList.add("error"),n.classList.remove("success");const o=document.querySelector(".spinner-container");let t=document.getElementById("error-details");t||(t=document.createElement("div"),t.id="error-details",t.className="error-details",o.appendChild(t)),t.style.display="none"}function r(){document.querySelector(".container").classList.add("success"),document.getElementById("status").textContent="Redirecting"}function c(t,n,o,i,c){const l=[],d={};let u=null,h=!1;const p=navigator.hardwareConcurrency||4,f=Math.max(1,Math.floor(p*.8)),g=1488;this.start=function(){setTimeout(v,100)};async function v(){try{h=!0;let e,n;try{e=atob(t.a),n=atob(t.b)}catch(e){throw new Error(`Failed to decode challenge/salt: ${e.message}`)}const i="0".repeat(t.c);for(let t=0;tb(e.data),e.onerror=e=>{},l.push(e)}const s=Number.MAX_SAFE_INTEGER,o=Math.floor(s/f);for(let t=0;t{const n=s();n.onmessage=t=>{t.data.type==="pos_result"&&(e(t.data),n.terminate())},n.postMessage({type:"pos",seedHex:t.d,isDecoy:!1})}),a=await new Promise(e=>{const t=s();t.onmessage=n=>{n.data.type==="pos_result"&&(e(n.data),t.terminate())},t.postMessage({type:"pos",seedHex:i,isDecoy:!0})});await y({requestID:o,g:n.nonce,h:e.hashes,i:e.times,j:a.hashes,k:a.times,l:c})}catch(t){e(t.message)}}function m(){l.forEach(e=>e.terminate())}async function y(t){try{const s=await fetch("/api/pow/verify",{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({request_id:t.requestID,g:t.g,h:t.h,i:t.i,j:t.j,k:t.k,l:t.l})});if(!s.ok){let t=`Verification failed: ${s.statusText}`;try{const e=await s.json();if(e&&e.error)t+=` - ${e.error}`;else{const e=await s.text();t+=` - Response: ${e}`}}catch{}e(t);return}r(),setTimeout(()=>{window.location.href=n},g)}catch{e("Verification failed. Please refresh the page.")}}}}}) \ No newline at end of file diff --git a/public/js/c.js.fiber.gz b/public/js/c.js.fiber.gz new file mode 100644 index 0000000..3e40adc Binary files /dev/null and b/public/js/c.js.fiber.gz differ diff --git a/public/js/cc.js b/public/js/cc.js new file mode 100644 index 0000000..80d1f14 --- /dev/null +++ b/public/js/cc.js @@ -0,0 +1,87 @@ +document.addEventListener("DOMContentLoaded",()=>{const m=document.getElementById("cards-container"),f=document.getElementById("empty-state"),O=document.getElementById("search-input"),U=document.getElementById("add-card-btn"),$=document.getElementById("add-first-card-btn"),l=document.getElementById("card-modal"),H=document.getElementById("close-modal"),v=document.getElementById("card-form"),b=document.getElementById("modal-title"),u=document.getElementById("card-id"),P=document.getElementById("add-category-btn"),g=document.getElementById("categories-container"),t=document.getElementById("payment-modal"),R=document.getElementById("close-payment-modal"),C=document.getElementById("payment-form"),p=document.getElementById("payment-card-id"),w=document.getElementById("payment-card-name"),c=document.getElementById("payment-category"),h=document.getElementById("payment-amount"),d=document.getElementById("payment-date"),n=document.createElement("div");n.className="modal",n.id="payment-history-modal";let e=M();o(),d.valueAsDate=new Date,U.addEventListener("click",()=>x()),$.addEventListener("click",()=>x()),H.addEventListener("click",()=>s(l)),R.addEventListener("click",()=>s(t)),v.addEventListener("submit",I),P.addEventListener("click",()=>y("","","")),C.addEventListener("submit",B),O.addEventListener("input",W),m.addEventListener("click",L),K(),E();function E(){n.innerHTML=` + + `,document.body.appendChild(n),document.getElementById("close-history-modal").addEventListener("click",()=>{s(n)})}function L(t){const n=t.target,i=n.closest(".credit-card");if(!i)return;const a=i.dataset.id,s=e.find(e=>e.id===a);if(!s)return;if(n.closest(".payment-btn")){t.stopPropagation(),T(s);return}if(n.closest(".edit-btn")){t.stopPropagation(),F(s);return}if(n.closest(".delete-btn")){t.stopPropagation(),V(a);return}const o=n.closest(".category-item");if(o&&o.dataset.categoryName){const t=o.dataset.categoryName,e=s.categories.find(e=>e.name===t);e&&z(s,e)}}function M(){const e=localStorage.getItem("creditCards");return e?JSON.parse(e):[]}function i(){localStorage.setItem("creditCards",JSON.stringify(e))}function o(){if(Array.from(m.children).forEach(e=>{e.classList.contains("empty-state")||e.remove()}),e.length===0){f.style.display="block";return}f.style.display="none";let n=e;const t=O.value.toLowerCase().trim();t&&(n=e.filter(e=>{const n=e.name.toLowerCase().includes(t),s=e.bank.toLowerCase().includes(t),o=e.categories.some(e=>e.name.toLowerCase().includes(t));return n||s||o})),n.forEach(e=>{const t=k(e);m.insertBefore(t,f)})}function k(e){const n=document.createElement("div");n.className="credit-card",n.dataset.id=e.id;const i=new Date,s=i.getDate(),o=parseInt(e.statementDate);let t;if(s===o)t=0;else if(s +
+

${e.name}

+
${e.bank}${e.lastDigits?` •••• ${e.lastDigits}`:""}
+
+
+ + + +
+ +
+
+ Statement Cycle: + Day ${e.statementDate} +
+
+ Cycle Resets: + ${t===0?"Today":`In ${t} day${t!==1?"s":""}`} +
+ ${e.expiryDate?` +
+ Expires: + ${A(e.expiryDate)} +
+ `:""} +
+
+ ${e.categories.map(e=>{const n=e.payments,s=n.reduce((e,t)=>e+parseFloat(t.amount),0),t=e.limit>0,o=t?s/e.limit*100:0,c=o>=75&&o<100,l=o>=100,i=n.length>0,a=(s*e.rate/100).toFixed(2),d=t?(e.limit*e.rate/100).toFixed(2):0,r=t?` ($${a}/$${d})`:` ($${a})`;return` +
+
+ ${e.name}: ${e.rate}% + ${t?` + $${s.toFixed(2)} / $${e.limit.toFixed(2)}${r} + `:`$${s.toFixed(2)}${r}`} +
+ ${t?` +
+
+
+ `:""} + ${i?``:""} +
+ `}).join("")} +
+ `,n}function A(e){const[t,n]=e.split("-");return`${n}/${t}`}function S(e){const t=new Date(e);return t.toLocaleDateString("en-US",{month:"2-digit",day:"2-digit",year:"numeric"})}function x(){v.reset(),u.value="",b.textContent="Add New Card",j(),a(l)}function F(e){v.reset(),u.value=e.id,document.getElementById("card-name").value=e.name,document.getElementById("card-bank").value=e.bank,document.getElementById("last-digits").value=e.lastDigits||"",document.getElementById("expiry-date").value=e.expiryDate||"",document.getElementById("statement-date").value=e.statementDate,b.textContent="Edit Card",j(),e.categories.length===0||e.categories.forEach(e=>{y(e.name,e.rate,e.limit)}),a(l)}function T(e){p.value=e.id,w.textContent=e.name,c.innerHTML="",e.categories.forEach(e=>{if(!e)return;const t=document.createElement("option");if(t.value=e.name,t.textContent=`${e.name} (${e.rate}%)`,e.limit>0){const n=e.payments,s=n.reduce((e,t)=>e+parseFloat(t.amount),0),o=Math.max(0,e.limit-s);t.textContent+=` - $${o.toFixed(2)} remaining`}c.appendChild(t)}),h.value="",d.valueAsDate=new Date,document.getElementById("payment-note").value="",t.querySelector(".modal-title").textContent="Record Payment",t.querySelector(".save-btn").textContent="Record Payment",a(t)}function z(e,t){if(!e||!t)return;const s=document.getElementById("payment-history-container"),i=n.querySelector(".modal-title");i.textContent=`${e.name} - ${t.name} Payments`,s.innerHTML="";const o=t.payments;if(o.length===0)s.innerHTML=` +
+

No payment history

+

No payments have been recorded for this category yet.

+
+ `;else{const n=[...o].sort((e,t)=>new Date(t.date)-new Date(e.date)),i=` +
+
+ Date + Amount + Note + Actions +
+ ${n.map(e=>` +
+ ${S(e.date)} + $${parseFloat(e.amount).toFixed(2)} + ${e.note||"-"} +
+ + +
+
+ `).join("")} +
+ `;s.innerHTML=i,s.querySelectorAll(".edit-payment-btn").forEach(n=>{n.addEventListener("click",n=>{const i=n.target.closest(".payment-history-item").dataset.paymentId,s=o.find(e=>e.id===i);s&&D(e.id,t.name,s)})}),s.querySelectorAll(".delete-payment-btn").forEach(n=>{n.addEventListener("click",n=>{const i=n.target.closest(".payment-history-item").dataset.paymentId,s=o.find(e=>e.id===i);s&&N(e.id,t.name,s.id)})})}a(n)}function D(o,i,r){p.value=o;const m=e.find(e=>e.id===o);if(!m)return;w.textContent=m.name,c.innerHTML="";const l=m.categories.find(e=>e.name===i);if(!l)return;const f=document.createElement("option");f.value=l.name,f.textContent=`${l.name} (${l.rate}%)`,c.appendChild(f),h.value=r.amount,d.value=r.date,document.getElementById("payment-note").value=r.note||"";const u=document.getElementById("payment-id")||document.createElement("input");u.type="hidden",u.id="payment-id",u.value=r.id,document.getElementById("payment-id")||C.appendChild(u),t.querySelector(".modal-title").textContent="Edit Payment",t.querySelector(".save-btn").textContent="Update Payment",s(n),setTimeout(()=>{a(t)},300)}function N(t,a,c){if(!confirm("Are you sure you want to delete this payment?"))return;const l=e.findIndex(e=>e.id===t);if(l===-1)return;const h=e[l].categories.findIndex(e=>e.name===a);if(h===-1)return;const d=e[l].categories[h].payments,u=d.findIndex(e=>e.id===c);if(u===-1)return;const m=d[u].amount;d.splice(u,1),i(),s(n),r(`Payment of $${m.toFixed(2)} has been deleted`),o()}function a(e){e.classList.add("active"),document.body.style.overflow="hidden",setTimeout(()=>{const t=e.querySelector('input:not([type="hidden"])');t&&t.focus()},100)}function s(e){e.classList.remove("active"),document.body.style.overflow=""}function j(){g.innerHTML=""}function y(e="",t="",n=""){const s=document.createElement("div");s.className="category-inputs",s.innerHTML=` + + + + + `,s.querySelector(".remove-category").addEventListener("click",function(){s.remove()}),g.appendChild(s)}function I(t){t.preventDefault();const a=u.value||_(),n=!!u.value,m=document.getElementById("card-name").value,f=document.getElementById("card-bank").value,p=document.getElementById("last-digits").value,v=document.getElementById("expiry-date").value,b=document.getElementById("statement-date").value,h=[],j=g.querySelectorAll(".category-inputs");j.forEach(t=>{const s=t.querySelector(".category-name").value.trim(),o=t.querySelector(".category-rate").value.trim(),i=t.querySelector(".category-limit").value.trim();if(!s&&!o&&!i)return;let c=[];const r=n?e.find(e=>e.id===a):null;if(n&&r&&r.categories){const e=r.categories.find(e=>e.name===s);e&&Array.isArray(e.payments)&&(c=e.payments)}h.push({name:s,rate:o?parseFloat(o):0,limit:i?parseFloat(i):null,payments:c})});const c={id:a,name:m,bank:f,lastDigits:p,expiryDate:v,statementDate:b,categories:h,createdAt:(new Date).toISOString()},d=n?e.findIndex(e=>e.id===a):-1;d!==-1?(c.archivedPayments=e[d].archivedPayments,e[d]=c):e.push(c),i(),o(),r(n?"Card updated successfully":"Card added successfully"),s(l)}function B(n){n.preventDefault();const b=p.value,j=c.value,l=parseFloat(h.value),f=d.value,g=document.getElementById("payment-note").value,u=document.getElementById("payment-id")?.value,m=e.findIndex(e=>e.id===b);if(m===-1)return;const v=e[m].categories.findIndex(e=>e.name===j);if(v===-1)return;const a=e[m].categories[v].payments;if(u){const e=a.findIndex(e=>e.id===u);e!==-1&&(a[e]={id:u,amount:l,date:f,note:g,createdAt:a[e].createdAt,updatedAt:(new Date).toISOString()},r("Payment updated successfully"))}else{const e={id:_(),amount:l,date:f,note:g,createdAt:(new Date).toISOString()};a.push(e),r(`Payment of $${l.toFixed(2)} recorded`)}i(),o(),document.getElementById("payment-id")&&document.getElementById("payment-id").remove(),t.querySelector(".modal-title").textContent="Record Payment",s(t)}function V(t){if(!confirm("Are you sure you want to delete this card?"))return;const n=e.find(e=>e.id===t)?.name||"Card";e=e.filter(e=>e.id!==t),i(),o(),r(`${n} has been deleted`)}function r(e){const n=document.querySelector(".toast");n&&n.remove();const t=document.createElement("div");t.className="toast",t.textContent=e,document.body.appendChild(t),setTimeout(()=>t.classList.add("show"),10),setTimeout(()=>{t.classList.remove("show"),setTimeout(()=>t.remove(),300)},3e3)}function W(){o()}function _(){return Date.now().toString(36)+Math.random().toString(36).substr(2,5)}function K(){const t=new Date,n=localStorage.getItem("lastCycleCheck");if(!n||new Date(n).toDateString()!==t.toDateString()){const n=t.getDate();e.forEach(e=>{const s=parseInt(e.statementDate);n===s&&e.categories.forEach(n=>{if(n.payments.length>0){e.archivedPayments||(e.archivedPayments=[]);const s={date:t.toISOString(),categories:[{name:n.name,rate:n.rate,payments:[...n.payments]}]};e.archivedPayments.push(s),n.payments=[]}})}),i(),localStorage.setItem("lastCycleCheck",t.toISOString())}}}) \ No newline at end of file diff --git a/public/js/docs.js b/public/js/docs.js new file mode 100644 index 0000000..829c6a1 --- /dev/null +++ b/public/js/docs.js @@ -0,0 +1 @@ +import{highlightElement}from"https://unpkg.com/@speed-highlight/core@1.2.7/dist/index.js";document.addEventListener("DOMContentLoaded",()=>{initSyntaxHighlighting()});async function initSyntaxHighlighting(){try{const e=document.querySelectorAll(".code-example pre code");e.forEach(e=>{let t="html";const s=e.closest(".code-example");if(s){const e=s.querySelector(".code-label");if(e){const n=e.textContent.trim().toLowerCase();n==="css"&&(t="css"),(n==="js"||n==="javascript")&&(t="js"),(n==="go"||n==="golang")&&(t="go"),n==="json"&&(t="json"),n==="http"&&(t="http")}}const n=document.createElement("div");n.className=`shj-lang-${t}`,n.textContent=e.textContent;const o=e.parentElement;o.parentNode.replaceChild(n,o),highlightElement(n,t)})}catch(e){console.warn("Syntax highlighting failed to initialize:",e)}} \ No newline at end of file diff --git a/public/js/lightbox.js b/public/js/lightbox.js new file mode 100644 index 0000000..58ce1ed --- /dev/null +++ b/public/js/lightbox.js @@ -0,0 +1,15 @@ +(function(){const e={selector:".lightbox-img, #flowDiagram",captionAttribute:"data-caption",zoomable:!0,maxZoom:300,minZoom:100,closeOnEsc:!0,closeOnOutsideClick:!0};window.EasyLightbox={options:{...e},init:function(t={}){return this.options={...e,...t},this._createLightbox(),this._initImageListeners(),this},_createLightbox:function(){if(document.getElementById("imageLightbox"))return;const e=document.createElement("div");if(e.id="imageLightbox",e.className="lightbox",e.innerHTML=` + + `,!document.getElementById("lightbox-styles")){const e=document.createElement("link");e.id="lightbox-styles",e.rel="stylesheet",e.href="/css/lightbox.css",document.head.appendChild(e)}document.body.appendChild(e),this.elements={lightbox:e,lightboxImg:document.getElementById("lightboxImg"),lightboxCaption:document.getElementById("lightboxCaption"),lightboxClose:document.getElementById("lightboxClose"),zoomSlider:document.getElementById("zoomSlider"),zoomValue:document.getElementById("zoomValue")},this._initLightboxHandlers()},_initImageListeners:function(){const n=document.querySelectorAll(this.options.selector),t=this;n.forEach(e=>{if(e.id==="lightboxImg")return;if(e.dataset.lightboxInitialized)return;e.dataset.lightboxInitialized="true",e.style.cursor="pointer",e.addEventListener("click",function(){let e=this.getAttribute(t.options.captionAttribute);(this.id==="flowDiagram"||!e)&&(e="Basic POW Flow Diagram"),t.open(this,e)})});const e=document.getElementById("flowDiagram");e&&!e.dataset.lightboxInitialized&&(e.dataset.lightboxInitialized="true",e.style.cursor="pointer",e.addEventListener("click",function(){t.open(this,"Basic POW Flow Diagram")}))},_initLightboxHandlers:function(){const n=this,e=this.elements;let o=!1,i,a,u,l,r=0,c=0;this.options.zoomable&&e.zoomSlider&&e.zoomSlider.addEventListener("input",function(){const t=this.value;e.zoomValue.textContent=t+"%",h()}),e.lightboxClose&&e.lightboxClose.addEventListener("click",function(e){e.preventDefault(),e.stopPropagation(),n.close()}),this.options.closeOnOutsideClick&&e.lightbox.addEventListener("click",function(t){t.target===e.lightbox&&n.close()}),this.options.closeOnEsc&&document.addEventListener("keydown",function(t){t.key==="Escape"&&e.lightbox.classList.contains("active")&&n.close()}),e.lightboxImg&&(e.lightboxImg.addEventListener("mousedown",d),e.lightboxImg.addEventListener("touchstart",d));function d(d){if(!n.options.zoomable||parseInt(e.zoomSlider.value)<=100)return;d.preventDefault(),d.type==="touchstart"?(i=d.touches[0].clientX,a=d.touches[0].clientY):(i=d.clientX,a=d.clientY),u=r,l=c,o=!0,e.lightboxImg.classList.add("grabbing"),document.addEventListener("mousemove",s),document.addEventListener("touchmove",s),document.addEventListener("mouseup",t),document.addEventListener("touchend",t),document.addEventListener("mouseleave",t)}function s(e){if(!o)return;e.preventDefault();let t,n;if(e.type==="touchmove"){if(e.touches.length===0)return;t=e.touches[0].clientX,n=e.touches[0].clientY}else t=e.clientX,n=e.clientY;const s=t-i,d=n-a;r=u+s,c=l+d,h()}function t(){if(!o)return;o=!1,e.lightboxImg.classList.remove("grabbing"),document.removeEventListener("mousemove",s),document.removeEventListener("touchmove",s),document.removeEventListener("mouseup",t),document.removeEventListener("touchend",t),document.removeEventListener("mouseleave",t)}function h(){if(!n.options.zoomable)return;const t=parseInt(e.zoomSlider.value)/100;e.lightboxImg.style.transform=`scale(${t}) translate(${r/t}px, ${c/t}px)`}const m=window.matchMedia("(max-width: 768px), (max-width: 1024px) and (orientation: landscape)").matches;m&&e.lightboxImg&&e.lightboxImg.addEventListener("touchmove",function(e){e.touches.length>1&&e.preventDefault()})},open:async function(e,t){if(!e||!this.elements)return;const n=this.elements;let u=0,h=0;if(n.lightboxImg&&n.lightboxImg.parentNode){n.lightboxImg.style.display="";const e=n.lightboxImg.parentNode.querySelector("svg.injected-svg");e&&e.remove()}const i=e.src||e.getAttribute("data-fullsize")||"",l=i.toLowerCase().endsWith(".svg");function a(){return n.zoomSlider?parseInt(n.zoomSlider.value)/100:1}function r(e,t,n,s){e.style.transform=`scale(${s}) translate(${t/s}px, ${n/s}px)`}if(l){n.lightboxImg.style.display="none";try{const l=await fetch(i);let d=await l.text();const t=document.createElement("div");t.innerHTML=d;const e=t.querySelector("svg");if(e){e.classList.add("injected-svg"),e.style.transformOrigin="center center",e.style.maxWidth="100%",e.style.maxHeight="100%",e.style.display="block",e.style.cursor="grab",e.style.userSelect="none",e.removeAttribute("width"),e.removeAttribute("height"),n.lightboxImg.parentNode.appendChild(e),n.zoomSlider&&(n.zoomSlider.value=100,n.zoomValue.textContent="100%");let t=0,i=0,l=!1,u,h,m=0,f=0,d=a();r(e,t,i,d),e.addEventListener("mousedown",c),e.addEventListener("touchstart",c);function c(n){n.preventDefault(),l=!0,e.classList.add("grabbing"),n.type==="touchstart"?(u=n.touches[0].clientX,h=n.touches[0].clientY):(u=n.clientX,h=n.clientY),m=t,f=i,document.addEventListener("mousemove",o),document.addEventListener("touchmove",o),document.addEventListener("mouseup",s),document.addEventListener("touchend",s),document.addEventListener("mouseleave",s)}function o(n){if(!l)return;n.preventDefault();let s,o;if(n.type==="touchmove"){if(n.touches.length===0)return;s=n.touches[0].clientX,o=n.touches[0].clientY}else s=n.clientX,o=n.clientY;const c=s-u,d=o-h;t=m+c,i=f+d,r(e,t,i,a())}function s(){if(!l)return;l=!1,e.classList.remove("grabbing"),document.removeEventListener("mousemove",o),document.removeEventListener("touchmove",o),document.removeEventListener("mouseup",s),document.removeEventListener("touchend",s),document.removeEventListener("mouseleave",s)}n.zoomSlider&&(n.zoomSlider.oninput=function(){d=a(),n.zoomValue.textContent=Math.round(d*100)+"%",r(e,t,i,d)})}}catch{n.lightboxImg.style.display=""}}else n.lightboxImg.src=i,n.lightboxImg.style.display="",n.lightboxImg.style.transform="scale(1) translate(0px, 0px)",this.options.zoomable&&n.zoomSlider&&(n.zoomSlider.value=100,n.zoomValue.textContent="100%");const d=t||e.getAttribute(this.options.captionAttribute)||e.alt||e.getAttribute("title")||"";n.lightboxCaption.textContent=d,n.lightbox.classList.add("active"),document.body.style.overflow="hidden"},close:function(){if(!this.elements)return;this.elements.lightbox.classList.remove("active"),document.body.style.overflow=""}},document.readyState==="loading"?document.addEventListener("DOMContentLoaded",function(){window.EasyLightbox.init()}):window.EasyLightbox.init()})() \ No newline at end of file diff --git a/public/js/lv.js b/public/js/lv.js new file mode 100644 index 0000000..e2bc5c2 --- /dev/null +++ b/public/js/lv.js @@ -0,0 +1,166 @@ +const THUMBNAIL_CACHE=new Map,THUMBNAIL_REGISTRY=new Map,VIDEO_SERVICES=new Map,DEFAULT_ALLOW="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; fullscreen; web-share",DEFAULT_SANDBOX="allow-scripts allow-same-origin allow-popups allow-forms allow-presentation";async function checkImage(e){if(THUMBNAIL_CACHE.has(e))return THUMBNAIL_CACHE.get(e);const t=new AbortController,n=setTimeout(()=>t.abort(),2e3);try{const o=await fetch(e,{method:"HEAD",signal:t.signal});clearTimeout(n);const s=o.ok;return THUMBNAIL_CACHE.set(e,s),s}catch{return clearTimeout(n),THUMBNAIL_CACHE.set(e,!1),!1}}function parseUrl(e){try{return new URL(e)}catch{return null}}class VideoServiceProvider{constructor(){this.name="generic"}canHandle(){return!1}getVideoId(){return null}getEmbedUrl(){return""}getThumbnailUrls(e,t,n){const s=n.getAttribute("thumbnail");return s?[s]:[]}parseParams(){return{}}getIframeAttributes(e){return{frameborder:e.getAttribute("frameborder")||"0",allow:e.getAttribute("allow")||DEFAULT_ALLOW,sandbox:e.getAttribute("sandbox")||DEFAULT_SANDBOX}}getDefaults(){return{autoload:!1}}}class YouTubeProvider extends VideoServiceProvider{constructor(){super(),this.name="youtube",this.THUMBNAIL_QUALITIES={maxres:"maxresdefault.jpg",sd:"sddefault.jpg",hq:"hqdefault.jpg",mq:"mqdefault.jpg",default:"default.jpg"},this.URL_PATTERNS=[/youtube\.com\/embed\/([a-zA-Z0-9_-]{11})/,/youtube\.com\/watch\?v=([a-zA-Z0-9_-]{11})/,/youtu\.be\/([a-zA-Z0-9_-]{11})/]}canHandle(e){return e&&/youtube\.com|youtu\.be/.test(e)}getVideoId(e){if(!e)return null;const t=parseUrl(e);if(t){if(t.pathname.startsWith("/embed/")||t.hostname==="youtu.be"){const e=t.pathname.split("/");return e[e.length>2?2:1]}const e=t.searchParams.get("v");if(e)return e}for(const n of this.URL_PATTERNS){const t=e.match(n);if(t?.[1])return t[1]}return null}getEmbedUrl(e,t={},n){const o=n.getAttribute("no-cookie")!=="false",i=o?"youtube-nocookie.com":"youtube.com";let s=`https://www.${i}/embed/${e}?autoplay=1`;for(const[e,n]of Object.entries(t))e!=="autoplay"&&e&&n&&(s+=`&${e}=${encodeURIComponent(n)}`);return s}getThumbnailUrls(e,t,n){const i=n.getAttribute("thumbnail");if(i)return[i];const o=`https://img.youtube.com/vi/${e}`,s=[];return t&&this.THUMBNAIL_QUALITIES[t]?s.push(`${o}/${this.THUMBNAIL_QUALITIES[t]}`):window.matchMedia("(max-width: 767px)").matches?s.push(`${o}/${this.THUMBNAIL_QUALITIES.hq}`):s.push(`${o}/${this.THUMBNAIL_QUALITIES.maxres}`),s.includes(`${o}/${this.THUMBNAIL_QUALITIES.hq}`)||s.push(`${o}/${this.THUMBNAIL_QUALITIES.hq}`),s.includes(`${o}/${this.THUMBNAIL_QUALITIES.default}`)||s.push(`${o}/${this.THUMBNAIL_QUALITIES.default}`),s}parseParams(e){const t={},n=parseUrl(e);if(!n)return t;for(const[e,s]of n.searchParams.entries())t[e]=s;return(t.t||t.start)&&(t.start=t.t||t.start),t.list&&(t.playlist=t.list),t}}class BitchuteProvider extends VideoServiceProvider{constructor(){super(),this.name="bitchute",this.URL_PATTERNS=[/bitchute\.com\/video\/([a-zA-Z0-9_-]+)/,/bitchute\.com\/embed\/([a-zA-Z0-9_-]+)/]}canHandle(e){return e&&/bitchute\.com/.test(e)}getVideoId(e){if(!e)return null;const t=parseUrl(e);if(t){const e=t.pathname.split("/").filter(Boolean);for(let t=0;t{const[n,s]=t.split("|");n&&n.removeEventListener&&n.removeEventListener(s,e)}),this._handlers.clear()}_setupObserver(){if(!window.IntersectionObserver)return;this._cleanupObserver(),this._observer=new IntersectionObserver(e=>{e[0].isIntersecting&&!this._loaded&&(this._loadVideo(),this._cleanupObserver())},{rootMargin:"300px",threshold:.1}),this._observer.observe(this)}_updateThumbnail(){const e=this._placeholder?.querySelector('[part="thumbnail"]');if(!e)return;const t=this.getAttribute("thumbnail");if(t){e.src=t;const n=this._placeholder.querySelector('[part="fallback-thumbnail"]');n&&n.remove();return}const n=this._placeholder.dataset.videoId;if(n&&this._videoService){const s=this.getAttribute("thumbnail-quality"),t=this._videoService.getThumbnailUrls(n,s,this);t.length>0?this._loadThumbnail(t,e):this._createFallbackThumbnail()}}_createFallbackThumbnail(){if(!this._placeholder||this._placeholder.querySelector('[part="fallback-thumbnail"]'))return;const e=document.createElement("div");if(e.setAttribute("part","fallback-thumbnail"),this._videoService){const t=this._videoService.name;e.innerHTML=` +
+
${t.charAt(0).toUpperCase()+t.slice(1)}
+
Click to play video
+
+ `}else e.textContent="No thumbnail available";this._placeholder.appendChild(e)}async _createPlaceholder(){const e=this.getAttribute("src");this._videoService=this._getServiceProvider(e);const t=this._videoService?.getVideoId(e);if(!t){this.shadowRoot.innerHTML=` + +

Error: Can't find video ID. Check the 'src' attribute.

+ `;return}this._videoParams=this._videoService.parseParams(e);const s=this.getAttribute("title")||"Video",n=document.createElement("style");n.textContent=LazyVideo.styles;const o=this._buildPlaceholder(t,s);this.shadowRoot.innerHTML="",this.shadowRoot.append(n,o),this._updateStyles()}_buildPlaceholder(e,t){const n=document.createElement("div");n.setAttribute("part","placeholder"),n.setAttribute("role","button"),n.setAttribute("aria-label",`Play: ${t}`),n.setAttribute("tabindex","0"),n.dataset.videoId=e,n.dataset.service=this._videoService.name,this._placeholder=n;const c=this.getAttribute("thumbnail-quality"),a=this._videoService.getThumbnailUrls(e,c,this),s=document.createElement("img");if(s.setAttribute("part","thumbnail"),s.alt=`Thumbnail for ${t}`,s.loading="lazy",s.decoding="async",s.fetchPriority="low",s.style.backgroundColor="#111",n.appendChild(s),a.length>0?this._setupThumbnailObserver(s,a):this._createFallbackThumbnail(),!this.hasAttribute("hide-title")){const e=document.createElement("div");e.setAttribute("part","title-bar"),e.textContent=t,n.appendChild(e)}const r=document.createElement("div");r.setAttribute("part","play-button"),n.appendChild(r);const i=parseInt(this._videoParams.start||this._videoParams.t,10);if(!(i!=i)&&i>0){const e=document.createElement("div");e.setAttribute("part","timestamp"),e.textContent=this._formatTime(i),n.appendChild(e)}const o=e=>{(e.type==="click"||e.key==="Enter"||e.key===" ")&&(e.type!=="click"&&e.preventDefault(),this._loadVideo())};return n.addEventListener("click",o),n.addEventListener("keydown",o),this._handlers.set(`${n}|click`,o),this._handlers.set(`${n}|keydown`,o),n}_setupThumbnailObserver(e,t){if(!window.IntersectionObserver){this._loadThumbnail(t,e);return}this._thumbnailLoadAttempted=!1;const n=new IntersectionObserver(async s=>{if(s[0].isIntersecting&&!this._thumbnailLoadAttempted){this._thumbnailLoadAttempted=!0;try{await this._loadThumbnail(t,e)}catch{this._thumbnailLoadAttempted=!1}finally{n.disconnect()}}},{rootMargin:"300px",threshold:.1});n.observe(e)}async _loadThumbnail(e,t){if(e.length===1&&e[0]===this.getAttribute("thumbnail"))return t.src=e[0],!0;const o=this._placeholder?.dataset?.videoId,i=this._placeholder?.dataset?.service,n=o&&i?`${i}:${o}`:null;if(n&&THUMBNAIL_REGISTRY.has(n))try{const e=await THUMBNAIL_REGISTRY.get(n);if(e)return t.src=e,!0}catch{THUMBNAIL_REGISTRY.delete(n)}let s=null;try{const n=await Promise.all(e.map(e=>checkImage(e).then(t=>({url:e,valid:t})).catch(()=>({valid:!1})))),t=n.find(e=>e.valid);t&&(s=t.url)}catch{for(const t of e)try{if(await checkImage(t)){s=t;break}}catch{}}return s?(t.src=s,n&&THUMBNAIL_REGISTRY.set(n,Promise.resolve(s)),!0):(this._createFallbackThumbnail(),n&&THUMBNAIL_REGISTRY.set(n,Promise.resolve(null)),!1)}_formatTime(e){const t=Math.floor(e/3600),n=Math.floor(e%3600/60),s=e%60;return t>0?`${t}:${n.toString().padStart(2,"0")}:${s.toString().padStart(2,"0")}`:`${n}:${s.toString().padStart(2,"0")}`}_updateStyles(){const e=this.getAttribute("width"),t=this.getAttribute("height"),n=e=>e&&/[a-z%$]/.test(e);if(e?this.style.setProperty("width",n(e)?e:`${e}px`):this.style.removeProperty("width"),t?this.style.setProperty("height",n(t)?t:`${t}px`):this.style.removeProperty("height"),e&&t){const n=parseFloat(e),s=parseFloat(t);!(n!=n)&&!(s!=s)&&this.style.setProperty("--lv-aspect-ratio",`${n} / ${s}`)}}_loadVideo(){if(this._loaded||!this._placeholder)return;const t=document.createElement("div");t.setAttribute("part","loading"),t.textContent="Loading...",this.shadowRoot.appendChild(t);const n=this._placeholder.dataset.videoId,o=this.getAttribute("title")||"Video";if(!this._videoService){const e=this._placeholder.dataset.service;this._videoService=VIDEO_SERVICES.get(e)||VIDEO_SERVICES.get("youtube")}const i=this._videoService.getEmbedUrl(n,this._videoParams,this),e=document.createElement("iframe");e.setAttribute("part","iframe"),e.loading="lazy",e.src=i,e.title=o,e.setAttribute("credentialless","");const a=this._videoService.getIframeAttributes(this);for(const[t,n]of Object.entries(a))e.setAttribute(t,n);const s=()=>t.parentNode?.removeChild(t);e.addEventListener("load",s,{once:!0}),this._handlers.set(`${e}|load`,s),this._placeholder.replaceWith(e),this._loaded=!0,this._placeholder=null,this.dispatchEvent(new CustomEvent("video-loaded",{bubbles:!0,detail:{videoId:n,service:this._videoService.name}}))}_setupStyleObserver(){if(this._styleObserver)return;if(this._styleObserver=new MutationObserver(()=>{this._updateAlignmentFromCSS()}),this._styleObserver.observe(this,{attributes:!0,attributeFilter:["style"]}),window.getComputedStyle){let e;const t=()=>{e=requestAnimationFrame(()=>{this._updateAlignmentFromCSS(),e=requestAnimationFrame(t)})};t(),this._styleFrameId=e}}_updateAlignmentFromCSS(){if(this.hasAttribute("container-fit"))return;const t=window.getComputedStyle(this),e=t.getPropertyValue("--lv-align").trim();this.classList.remove("lv-align-left","lv-align-right","lv-align-center"),e==="left"?this.classList.add("lv-align-left"):e==="right"?this.classList.add("lv-align-right"):e==="center"&&this.classList.add("lv-align-center")}}document.readyState==="loading"?document.addEventListener("DOMContentLoaded",()=>customElements.define("lazy-video",LazyVideo)):customElements.define("lazy-video",LazyVideo) \ No newline at end of file diff --git a/public/js/u.js b/public/js/u.js new file mode 100644 index 0000000..eb63eb2 --- /dev/null +++ b/public/js/u.js @@ -0,0 +1,30 @@ +window.addEventListener("load",function(){setTimeout(()=>{if(window.location.hash){let t=window.location.hash.substring(1),e=document.getElementById(t);e&&e.scrollIntoView({behavior:"smooth",block:"start"})}},135)}),("ontouchstart"in window||navigator.maxTouchPoints>0)&&window.addEventListener("touchstart",function e(){document.body.classList.add("no-hover"),window.removeEventListener("touchstart",e,!1)},!1),(()=>{let t=document.baseURI,e=document.querySelectorAll("a[href]:not(.eel)"),n=window.location.hostname;for(let o=0,i=e.length;o`;document.querySelectorAll("a[href]").forEach(e=>{const s=e.getAttribute("href");if(e.getAttribute("target")==="_blank"||s.startsWith("#")||s.startsWith("javascript:"))return;const n=document.createElement("div");n.className="link-arrow-container",n.innerHTML=t,e.appendChild(n)}),document.body.addEventListener("click",function(e){const t=e.target.closest("a[href]");if(!t)return;const n=t.getAttribute("href");if(t.getAttribute("target")==="_blank"||n.startsWith("#")||n.startsWith("javascript:"))return;if(e.ctrlKey||e.metaKey||e.shiftKey)return;const s=t.querySelector(".link-arrow-container");if(!s)return;e.preventDefault(),s.classList.add("animate"),setTimeout(()=>{window.location.href=n},100)}),window.addEventListener("pageshow",function(e){e.persisted&&document.querySelectorAll(".link-arrow-container.animate").forEach(e=>{e.classList.remove("animate")})})}),!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t(e.quicklink={})}(this,function(e){function a(e){return new Promise(function(t,n,s){(s=new XMLHttpRequest).open("GET",e,s.withCredentials=!0),s.onload=function(){200===s.status?t():n()},s.send()})}var o,l=(o=document.createElement("link")).relList&&o.relList.supports&&o.relList.supports("prefetch")?function(e){return new Promise(function(t,n,s){(s=document.createElement("link")).rel="prefetch",s.href=e,s.onload=t,s.onerror=n,document.head.appendChild(s)})}:a,d=window.requestIdleCallback||function(e){var t=Date.now();return setTimeout(function(){e({didTimeout:!1,timeRemaining:function(){return Math.max(0,50-(Date.now()-t))}})},1)},t=new Set,n=new Set,s=!1;function r(e){if(e){if(e.saveData)return new Error("Save-Data is enabled");if(/2g/.test(e.effectiveType))return new Error("network conditions are poor")}return!0}function i(e,o){var c=r(navigator.connection);return c instanceof Error?Promise.reject(new Error("Cannot prefetch, "+c.message)):(n.size>0&&!s&&console.warn("[Warning] You are using both prefetching and prerendering on the same document"),Promise.all([].concat(e).map(function(e){if(!t.has(e))return t.add(e),(o?function(e){return window.fetch?fetch(e,{credentials:"include"}):a(e)}:l)(new URL(e,location.href).toString())})))}function c(e){if(c=r(navigator.connection),c instanceof Error)return Promise.reject(new Error("Cannot prerender, "+c.message));if(!HTMLScriptElement.supports("speculationrules"))return i(e),Promise.reject(new Error("This browser does not support the speculation rules API. Falling back to prefetch."));if(document.querySelector('script[type="speculationrules"]'))return Promise.reject(new Error("Speculation Rules is already defined and cannot be altered."));for(var a,c,d,l=0,u=[].concat(e);l0&&!s&&console.warn("[Warning] You are using both prefetching and prerendering on the same document"),d=function(e){var t=document.createElement("script");t.type="speculationrules",t.text='{"prerender":[{"source": "list","urls": ["'+Array.from(e).join('","')+'"]}]}';try{document.head.appendChild(t)}catch(e){return e}return!0}(n),!0===d?Promise.resolve():Promise.reject(d)}e.listen=function(e){if(e||(e={}),window.IntersectionObserver){var r,l=function(e){e=e||1;var t=[],n=0;function s(){n0&&(t.shift()(),n++)}return[function(e){t.push(e)>1||s()},function(){n--,s()}]}(e.throttle||1/0),f=l[0],u=l[1],p=e.limit||1/0,h=e.origins||[location.hostname],g=e.ignores||[],v=e.delay||0,o=[],b=e.timeoutFn||d,a="function"==typeof e.hrefFn&&e.hrefFn,m=e.prerender||!1;return s=e.prerenderAndPrefetch||!1,r=new IntersectionObserver(function(l){l.forEach(function(l){if(l.isIntersecting)o.push((l=l.target).href),function(e,t){t?setTimeout(e,t):e()}(function(){-1!==o.indexOf(l.href)&&(r.unobserve(l),(s||m)&&n.size<1?c(a?a(l):l.href).catch(function(t){if(!e.onError)throw t;e.onError(t)}):t.size-1&&o.splice(d)}})},{threshold:e.threshold||0}),b(function(){(e.el||document).querySelectorAll("a").forEach(function(e){h.length&&!h.includes(e.hostname)||function e(t,n){return Array.isArray(n)?n.some(function(n){return e(t,n)}):(n.test||n).call(n,t.href,t)}(e,g)||r.observe(e)})},{timeout:e.timeout||2e3}),function(){t.clear(),r.disconnect()}}},e.prefetch=i,e.prerender=c}),quicklink.listen({origins:[],ignores:[e=>e.includes("caileb.com")&&e.includes("#"),e=>e.includes("gallery.caileb.com"),e=>e.includes("jellyfin.caileb.com"),e=>e.includes("archive.caileb.com"),e=>e.includes("music.caileb.com"),/\/api\/?/,/^api\./,e=>/\.(zip|tar|7z|rar|js|apk|xapk|woff2|tff|otf|pdf|mp3|mp4|wav|exe|msi|bat|deb|rpm|bin|dmg|iso|csv|log|sql|xml|key|odp|ods|pps|ppt|xls|doc|jpg|jpeg|jpe|jif|jfif|jfi|png|gif|webp|tif|psd|raw|arw|cr2|nrw|k25|bmp|dib|heif|heic|ind|indd|indt|jp2|j2k|jpf|jpx|jpm|mj2|svg|ai|eps)$/i.test(e),e=>/^(http|file|ftp|mailto|tel):/i.test(e)]}) \ No newline at end of file diff --git a/public/manifest.json b/public/manifest.json new file mode 100644 index 0000000..d934535 --- /dev/null +++ b/public/manifest.json @@ -0,0 +1 @@ +{"name":"Axis Dashboard","short_name":"Axis","start_url":"/","display":"standalone","background_color":"#121212","theme_color":"#9b59b6","icons":[{"src":"/images/favi.png","sizes":"256x256","type":"image/png"}]} diff --git a/public/static/datacenter-block.html b/public/static/datacenter-block.html new file mode 100644 index 0000000..f114363 --- /dev/null +++ b/public/static/datacenter-block.html @@ -0,0 +1,5 @@ + +Blocked +

Connections from within Datacenter IP ranges are blocked due to spam

+

Detected as: {{.ASNName}} + diff --git a/public/static/default-block.html b/public/static/default-block.html new file mode 100644 index 0000000..ffa4a2e --- /dev/null +++ b/public/static/default-block.html @@ -0,0 +1,6 @@ + + + + +Blocked +

Access Blocked

diff --git a/public/static/error.html b/public/static/error.html new file mode 100644 index 0000000..f9cad2a --- /dev/null +++ b/public/static/error.html @@ -0,0 +1,14 @@ + + + + +Error + + + + + + + + +

Something appears to have gone wrong.

diff --git a/public/static/india-block.html b/public/static/india-block.html new file mode 100644 index 0000000..4773b7a --- /dev/null +++ b/public/static/india-block.html @@ -0,0 +1,7 @@ + + + + +Access Restricted +Access Restricted + diff --git a/public/static/pow-interstitial.html b/public/static/pow-interstitial.html new file mode 100644 index 0000000..5c98d6c --- /dev/null +++ b/public/static/pow-interstitial.html @@ -0,0 +1,26 @@ + + + + +Security Checkpoint + + + + + + + + + + +
+
+

Security Checkpoint

+

Verifying your browser to protect from automated abuse. This may take a few seconds... +

+
+
Redirecting
+
+
+
+
diff --git a/public/webfonts/Poppins-Regular.woff2 b/public/webfonts/Poppins-Regular.woff2 new file mode 100644 index 0000000..4aae28c Binary files /dev/null and b/public/webfonts/Poppins-Regular.woff2 differ diff --git a/public/webfonts/Poppins-SemiBold.woff2 b/public/webfonts/Poppins-SemiBold.woff2 new file mode 100644 index 0000000..990f6f7 Binary files /dev/null and b/public/webfonts/Poppins-SemiBold.woff2 differ diff --git a/public/webfonts/fa-solid-900.woff2 b/public/webfonts/fa-solid-900.woff2 new file mode 100644 index 0000000..758dd4f Binary files /dev/null and b/public/webfonts/fa-solid-900.woff2 differ diff --git a/restart.sh b/restart.sh new file mode 100644 index 0000000..9a49085 --- /dev/null +++ b/restart.sh @@ -0,0 +1,125 @@ +#!/bin/bash +PID_FILE="server.pid" + +# --- Find Go Executable --- +GO_EXECUTABLE="" +echo "Attempting to locate Go executable..." +# Prioritize standard manual install location +if [[ -x "/usr/local/go/bin/go" ]]; then + GO_EXECUTABLE="/usr/local/go/bin/go" + echo "Found Go executable at: $GO_EXECUTABLE (standard location)" +else + # Fallback: Check if 'go' is somehow in the current (sudo) PATH + echo "Go not found in /usr/local/go/bin. Checking PATH..." + if command -v go &> /dev/null; then + # Verify the result of command -v is executable + potential_go=$(command -v go) + if [[ -x "$potential_go" ]]; then + GO_EXECUTABLE="$potential_go" + echo "Found Go executable in PATH at: $GO_EXECUTABLE" + else + echo "Found 'go' via command -v, but '$potential_go' is not executable." + fi + else + echo "'go' command not found in PATH either." + fi +fi + +# Check if we found a valid executable +if [[ -z "$GO_EXECUTABLE" ]]; then + echo "Error: Could not find a usable 'go' executable." >&2 # Error to stderr + echo "Checked /usr/local/go/bin/go and the PATH available to sudo." >&2 + echo "Ensure Go is installed correctly and accessible." >&2 + exit 1 +fi +# --- End Find Go Executable --- + + +# --- Kill existing process --- +if [ -f "$PID_FILE" ]; then + PID=$(cat "$PID_FILE") + if [[ "$PID" =~ ^[0-9]+$ ]]; then + echo "Attempting to kill process with PID: $PID" + # Check if process exists before trying to kill + if kill -0 "$PID" 2>/dev/null; then + kill "$PID" + sleep 0.5 # Give it a moment to shut down + # Verify kill (optional but good practice) + if kill -0 "$PID" 2>/dev/null; then + echo "Warning: Failed to kill process $PID. Trying kill -9..." + kill -9 "$PID" + sleep 0.5 + fi + + # Final check after kill attempts + if ! kill -0 "$PID" 2>/dev/null; then + echo "Process $PID killed." + else + echo "Error: Could not kill process $PID even with -9." >&2 + # Consider exiting if kill fails critically + # exit 1 + fi + else + echo "Process with PID $PID does not seem to be running." + fi + else + echo "Invalid PID '$PID' found in $PID_FILE. Removing stale file." + rm -f "$PID_FILE" # Use -f to avoid error if file gone + fi +else + echo "PID file '$PID_FILE' not found. Assuming server is not running." +fi + + +# --- Start the server (foreground, let app daemonize) --- +echo "Restarting server using: $GO_EXECUTABLE" # Show which executable is used + +# Run the command, capture its output (stdout & stderr) +OUTPUT_CAPTURE=$(mktemp) +# Ensure temp file is cleaned up on exit (normal or error) +trap 'echo "Cleaning up temp file: $OUTPUT_CAPTURE"; rm -f -- "$OUTPUT_CAPTURE"' EXIT + +# Execute the command, redirecting stdout and stderr to the temp file +# We also use 'tee' so the user sees the output in real-time +# Use 'if ! ...; then ... fi' structure for clarity on failure +if ! "$GO_EXECUTABLE" run main.go -p -b 2>&1 | tee "$OUTPUT_CAPTURE"; then + # This block executes if 'go run' exits with an error (non-zero status) + # This might happen if there's a compile error or immediate crash before daemonizing + echo "Error: '$GO_EXECUTABLE run' command failed with exit status $?. See output above and in $OUTPUT_CAPTURE" >&2 + # Optionally 'cat $OUTPUT_CAPTURE' here if tee might not have flushed + exit 1 +fi +# If the command succeeded (exit status 0), continue +echo "Go command finished (expected if daemonizing)." + + +# --- Extract PID from the application's captured output --- +# Look for the specific line the app prints using grep and extract the number +# -oP uses Perl-compatible regexes for lookbehind \K to get only the number +NEW_PID=$(grep -oP 'Server started in daemon mode with PID: \K[0-9]+' "$OUTPUT_CAPTURE") + + +# --- Verify and save PID --- +if [[ "$NEW_PID" =~ ^[0-9]+$ ]]; then + # Double-check if the extracted PID actually exists as a process + # Add a small delay in case the process needs a moment to stabilize after logging + sleep 0.2 + if kill -0 "$NEW_PID" 2>/dev/null; then + echo "Server successfully started and daemonized with PID: $NEW_PID" + echo "$NEW_PID" > "$PID_FILE" + echo "PID saved to $PID_FILE." + else + # This is a potential race condition or the app failed right after logging PID + echo "Error: Extracted PID $NEW_PID from logs, but process $NEW_PID is not running." >&2 + echo "The application might have failed immediately after starting." >&2 + echo "Check full output in $OUTPUT_CAPTURE" >&2 + # exit 1 # Decide if this is a fatal error + fi +else + echo "Error: Could not extract PID from server output in $OUTPUT_CAPTURE." >&2 + echo "Expected a line like 'Server started in daemon mode with PID: XXXX'" >&2 + # exit 1 # Decide if this is a fatal error +fi + +# Cleanup happens automatically via trap +echo "Restart script finished." \ No newline at end of file diff --git a/utils/minifier.go b/utils/minifier.go new file mode 100644 index 0000000..c421689 --- /dev/null +++ b/utils/minifier.go @@ -0,0 +1,547 @@ +package utils + +import ( + "crypto/sha512" + "encoding/base64" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "regexp" + "strings" + "sync" + "time" + + "github.com/tdewolff/minify/v2" + "github.com/tdewolff/minify/v2/css" + "github.com/tdewolff/minify/v2/html" + "github.com/tdewolff/minify/v2/js" +) + +// MinifierOptions configuration for the asset minification process +type MinifierOptions struct { + // Maximum number of goroutines to use for concurrent processing + MaxWorkers int + // Skip files that haven't changed since last run + SkipUnchanged bool + // Special file mappings (source path -> destination path) + SpecialMappings map[string]string + // Whether to remove comments from all file types + RemoveComments bool + // Whether to keep conditional comments (