Provide a look into the chache (#12)

That's the current state of my view into the cache. Both, HTML, JSON and CSV formats are available. It's far from perfect, but better than nothing at all.

Before I continue, I want to check if this is going into the right direction.

Co-authored-by: Lysander Trischler <twtxt@lyse.isobeef.org>
Reviewed-on: #12
Co-authored-by: lyse <lyse@noreply@mills.io>
Co-committed-by: lyse <lyse@noreply@mills.io>
master
lyse 1 month ago committed by James Mills
parent bc5e29d71b
commit adea938480
  1. 8
      go.mod
  2. 28
      go.sum
  3. 2
      internal/bitcask_store.go
  4. 2
      internal/config.go
  5. 7
      internal/context.go
  6. 222
      internal/handlers.go
  7. 16
      internal/models.go
  8. 5
      internal/options.go
  9. 22
      internal/server.go
  10. 186
      internal/stats.go
  11. 3
      internal/templates.go
  12. 47
      internal/templates/_partials.html
  13. 70
      internal/templates/feeds.html
  14. 10
      internal/templates/stats.html
  15. 13
      internal/types.go
  16. 53
      internal/utils.go
  17. 201
      internal/utils_test.go

@ -1,6 +1,6 @@
module git.mills.io/yarnsocial/yarns
go 1.17
go 1.18
require (
git.mills.io/prologic/bitcask v1.0.2
@ -12,7 +12,6 @@ require (
github.com/andreadipersio/securecookie v0.0.0-20131119095127-e3c3b33544ec
github.com/badgerodon/ioutil v0.0.0-20150716134133-06e58e34b867
github.com/blevesearch/bleve/v2 v2.3.4
github.com/creasty/defaults v1.6.0
github.com/dustin/go-humanize v1.0.0
github.com/go-mail/mail v2.3.1+incompatible
github.com/gocolly/colly/v2 v2.1.0
@ -34,7 +33,6 @@ require (
github.com/steambap/captcha v1.4.1
github.com/stretchr/testify v1.7.1
github.com/unrolled/logger v0.0.0-20201216141554-31a3694fe979
github.com/unrolled/render v1.5.0
github.com/vcraescu/go-paginator v1.0.0
github.com/wblakecaldwell/profiler v0.0.0-20150908040756-6111ef1313a1
github.com/writeas/slug v1.2.0
@ -74,7 +72,6 @@ require (
github.com/blevesearch/zapx/v15 v15.3.5 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/gobwas/glob v0.2.3 // indirect
github.com/gofrs/flock v0.8.1 // indirect
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
@ -90,6 +87,8 @@ require (
github.com/huandu/xstrings v1.3.2 // indirect
github.com/imdario/mergo v0.3.13 // indirect
github.com/jinzhu/gorm v1.9.16 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.1 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/kennygrant/sanitize v1.2.4 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.2 // indirect
@ -123,4 +122,5 @@ require (
gopkg.in/mail.v2 v2.3.1 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.0 // indirect
gorm.io/gorm v1.20.6 // indirect
)

@ -170,12 +170,11 @@ github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwc
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creasty/defaults v1.6.0 h1:ltuE9cfphUtlrBeomuu8PEyISTXnxqkBIoQfXgv7BSc=
github.com/creasty/defaults v1.6.0/go.mod h1:iGzKe6pbEHnpMPtfDXZEr0NVxWnPTjb1bbDy08fPzYM=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
github.com/denisenkom/go-mssqldb v0.9.0 h1:RSohk2RsiZqLZ0zCjtfn3S4Gp4exhpBWHyQ7D0yGjAk=
github.com/denisenkom/go-mssqldb v0.9.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
@ -188,15 +187,13 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@ -211,6 +208,7 @@ github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG
github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
github.com/go-mail/mail v2.3.1+incompatible h1:UzNOn0k5lpfVtO31cK3hn6I4VEVGhe3lX8AJBAxXExM=
github.com/go-mail/mail v2.3.1+incompatible/go.mod h1:VPWjmmNyRsWXQZHVHT3g0YbIINUkSmuKOiLIDkWbL6M=
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
@ -227,6 +225,7 @@ github.com/gofrs/uuid v3.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRx
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
@ -365,8 +364,10 @@ github.com/jawher/mow.cli v1.1.0/go.mod h1:aNaQlc7ozF3vw6IJ2dHjp2ZFiA4ozMIYY6Pyu
github.com/jinzhu/gorm v1.9.2/go.mod h1:Vla75njaFJ8clLU1W44h34PjIkijhjHIYnZxMqCdxqo=
github.com/jinzhu/gorm v1.9.16 h1:+IyIjPEABKRpsu/F8OvDPy9fyQlgsg2luMV2ZIH5i5o=
github.com/jinzhu/gorm v1.9.16/go.mod h1:G3LB3wezTOWM2ITLzPxEXgSkOXAntiLHS7UdBefADcs=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jinzhu/now v1.1.1 h1:g39TucaRWyV3dwDO++eEc6qf8TVIQ/Da48WmqjZ3i7E=
github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
@ -403,6 +404,7 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.8.0 h1:9xohqzkUwzR4Ga4ivdTcawVS89YSDVxXMa3xJX3cGzg=
github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
@ -586,8 +588,6 @@ github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGr
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/unrolled/logger v0.0.0-20201216141554-31a3694fe979 h1:47+K4wN0S8L3fUwgZtPEBIfNqtAE3tUvBfvHVZJAXfg=
github.com/unrolled/logger v0.0.0-20201216141554-31a3694fe979/go.mod h1:X5DBNY1yIVkuLwJP3BXlCoQCa5mGg7hPJPIA0Blwc44=
github.com/unrolled/render v1.5.0 h1:uNTHMvVoI9pyyXfgoDHHycIqFONNY2p4eQR9ty+NsxM=
github.com/unrolled/render v1.5.0/go.mod h1:eLTosBkQqEPEk7pRfkCRApXd++lm++nCsVlFOHpeedw=
github.com/vcraescu/go-paginator v1.0.0 h1:ilNmRhlgG8N44LuxfGoPI2u8guXMA6gUqaPGA5BmRFs=
github.com/vcraescu/go-paginator v1.0.0/go.mod h1:caZCjjt2qcA1O2aDzW7lwAcK4Rxw3LNvdEVF/ONxZWw=
github.com/wblakecaldwell/profiler v0.0.0-20150908040756-6111ef1313a1 h1:Dz/PRieZRmOhDfOlkVpY1LYYIfNoTJjlDirAlagOr0s=
@ -624,16 +624,8 @@ go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
go.yarn.social/lextwt v0.0.0-20221014141744-e8a56f9c44bc h1:nkJ8EzXruW2ESXan4ERrnMXY9/G5bt5nXR8/c++2ESU=
go.yarn.social/lextwt v0.0.0-20221014141744-e8a56f9c44bc/go.mod h1:bItAqF9uEAH/1hpGfarJyRiRVnivnxDjaF0nvnJHApo=
go.yarn.social/lextwt v0.0.0-20221016033946-853ef218eaeb h1:P1JiCX+0+LLHbRGVwqMa2m+EFcz3h4w/x51RMCYn/W8=
go.yarn.social/lextwt v0.0.0-20221016033946-853ef218eaeb/go.mod h1:/nhT00bZa6tzZJtt8VFL4InCKZH5GU1p0Gfky6Jwo0I=
go.yarn.social/lextwt v0.0.0-20221016042252-9f5611a32b9c h1:790OZRtyut7Jd+fHEiu4po+mCBSKiCSNag+lpJZdx9U=
go.yarn.social/lextwt v0.0.0-20221016042252-9f5611a32b9c/go.mod h1:/nhT00bZa6tzZJtt8VFL4InCKZH5GU1p0Gfky6Jwo0I=
go.yarn.social/types v0.0.0-20220217050814-23717f1409c2/go.mod h1:XN+G4HprNn/Gp7OF2zveqsCRSWFCHtOaIRh2GlcK+U4=
go.yarn.social/types v0.0.0-20221014141433-b15403e2c5ed h1:R6ROTOaBQplLxZTVDXvYf1hytAOZ7a3QWNl+sRCuHyk=
go.yarn.social/types v0.0.0-20221014141433-b15403e2c5ed/go.mod h1:XN+G4HprNn/Gp7OF2zveqsCRSWFCHtOaIRh2GlcK+U4=
go.yarn.social/types v0.0.0-20221016033828-693a59a7d79e h1:yMuAGkjUCp/AqejuFn7G0hRZFHEgaFCTJviPX8D4n5o=
go.yarn.social/types v0.0.0-20221016033828-693a59a7d79e/go.mod h1:XN+G4HprNn/Gp7OF2zveqsCRSWFCHtOaIRh2GlcK+U4=
go.yarn.social/types v0.0.0-20221016041921-542ddbb4e9e4 h1:8pcSwDKV1YiTmls7atdKLJMHFmQ6xregNygiKsf64Qs=
go.yarn.social/types v0.0.0-20221016041921-542ddbb4e9e4/go.mod h1:XN+G4HprNn/Gp7OF2zveqsCRSWFCHtOaIRh2GlcK+U4=
@ -650,7 +642,6 @@ golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd/go.mod h1:LzIPMQfyMNhhGPh
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20221012134737-56aed061732a h1:NmSIgad6KjE6VvHciPZuNRTKxGhlPfD6OA87W/PLkqg=
golang.org/x/crypto v0.0.0-20221012134737-56aed061732a/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@ -836,13 +827,10 @@ golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20221013171732-95e765b1cc43 h1:OK7RB6t2WQX54srQQYSXMW8dF5C6/8+oA/s5QBmmto4=
golang.org/x/sys v0.0.0-20221013171732-95e765b1cc43/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
@ -1053,8 +1041,10 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0 h1:hjy8E9ON/egN1tAYqKb61G10WtihqetD4sz2H+8nIeA=
gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/sqlite v1.1.3 h1:BYfdVuZB5He/u9dt4qDpZqiqDJ6KhPqs5QUqsr/Eeuc=
gorm.io/driver/sqlite v1.1.3/go.mod h1:AKDgRWk8lcSQSw+9kxCJnX/yySj8G3rdwYlU57cB45c=
gorm.io/gorm v1.20.1/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
gorm.io/gorm v1.20.6 h1:qa7tC1WcU+DBI/ZKMxvXy1FcrlGsvxlaKufHrT2qQ08=
gorm.io/gorm v1.20.6/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

@ -122,7 +122,7 @@ func (bs *BitcaskStore) GetOrSetURL(uri string) (*URL, bool, error) {
return url, false, err
}
url := NewURL(uri)
url := &URL{URL: uri}
return url, true, bs.SetURL(hash, url)
}

@ -34,6 +34,8 @@ type Config struct {
SearchPrompts []string
ResultsPerPage int
FeedsPerPage int
SessionExpiry time.Duration
SessionCacheTTL time.Duration

@ -5,9 +5,8 @@ import (
"net/http"
"time"
"github.com/vcraescu/go-paginator"
"github.com/justinas/nosurf"
"github.com/vcraescu/go-paginator"
"git.mills.io/yarnsocial/yarns"
"go.yarn.social/types"
@ -56,6 +55,10 @@ type Context struct {
BrokenFeeds int64
IndexedFeeds int64
DiscoveredFeeds int64
FeedsTitle string
Feeds []*URL
FeedsPagerURL func(key string, values ...interface{}) template.HTMLAttr
CachedURL string
CachedTitle string

@ -2,10 +2,12 @@ package internal
import (
"embed"
"encoding/csv"
"fmt"
"html/template"
"image/png"
"net/http"
"net/url"
"os"
"path/filepath"
"strings"
@ -20,8 +22,8 @@ import (
"github.com/julienschmidt/httprouter"
"github.com/securisec/go-keywords"
log "github.com/sirupsen/logrus"
"github.com/unrolled/render"
"github.com/vcraescu/go-paginator"
"github.com/vcraescu/go-paginator/adapter"
"go.yarn.social/types"
"golang.org/x/text/cases"
"golang.org/x/text/language"
@ -93,14 +95,11 @@ func (s *Server) PageHandler(name string) httprouter.Handle {
html := markdown.ToHTML(content, p, renderer)
var title string
if frontmatter.Title != "" {
title = frontmatter.Title
ctx.Title = frontmatter.Title
} else {
title = caser.String(name)
ctx.Title = caser.String(name)
}
ctx.Title = title
ctx.Page = name
ctx.Content = template.HTML(html)
@ -170,8 +169,6 @@ func (s *Server) AddHandler() httprouter.Handle {
// SearchHandler ...
func (svr *Server) SearchHandler() httprouter.Handle {
r := render.New()
return func(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {
ctx := NewContext(svr.config, svr.db, req)
@ -224,7 +221,7 @@ func (svr *Server) SearchHandler() httprouter.Handle {
TotalTwts: pager.Nums(),
},
}
r.JSON(w, http.StatusOK, ctx)
svr.json(w, http.StatusOK, ctx)
return
}
@ -314,8 +311,6 @@ func (s *Server) PermalinkHandler() httprouter.Handle {
// StatsHandler ...
func (s *Server) StatsHandler() httprouter.Handle {
r := render.New()
return func(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {
ctx := NewContext(s.config, s.db, req)
@ -326,7 +321,7 @@ func (s *Server) StatsHandler() httprouter.Handle {
}
if req.Header.Get("Accept") == "application/json" {
r.JSON(w, http.StatusOK, s.stats)
s.json(w, http.StatusOK, s.stats)
return
}
@ -342,6 +337,209 @@ func (s *Server) StatsHandler() httprouter.Handle {
}
}
func (srv *Server) FeedsHandler() httprouter.Handle {
return func(w http.ResponseWriter, req *http.Request, params httprouter.Params) {
ctx := NewContext(srv.config, srv.db, req)
urlsType := URLsType(params.ByName("urlsType"))
if urlsType == "" {
srv.NotFoundHandler(w, req)
return
}
ctx.SearchQuery = req.FormValue("q")
ctx.SearchSort = req.URL.Query()["s"]
urls, err := srv.stats.FindFeeds(srv.db, urlsType,
QueryFeedsBy(ctx.SearchQuery), SortFeedsBy(ctx.SearchSort))
if err != nil {
log.WithError(err).Error("error performing feed search")
ctx.Error = true
ctx.Message = fmt.Sprintf("Error %s", err.Error())
srv.render("error", w, ctx)
return
}
// Page the URLs if not explicitly requested to not limit them.
// Returning all URLs in one big chunk is most useful for CSV and JSON
// output formats. However, it might be problematic if the indexed set
// of URLs is very large.
//
// TODO Maybe this should only be allowed for admins. But currently,
// there is no user concept in yarns. So instead it could be restricted
// to requests from localhost or something like this.
var pager paginator.Paginator
if limit := req.FormValue("l"); limit == "unlimited" {
urlsCount := len(urls)
pager = paginator.New(IntAdapter{urlsCount}, urlsCount)
} else {
pager = paginator.New(
adapter.NewSliceAdapter(urls),
SafeParseInt(limit, srv.config.FeedsPerPage))
pager.SetPage(SafeParseInt(req.FormValue("p"), 1))
var pagedURLs []*URL
if err := pager.Results(&pagedURLs); err != nil {
log.WithError(err).Error("error paging URLs")
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return
}
urls = pagedURLs
addLinkResponseHeader := func(pageFunc func() (int, error), rel string) {
page, err := pageFunc()
if err != nil {
return
}
url := srv.urlForFeedsPager(req, "p", page)
w.Header().Add("Link", fmt.Sprintf(`<%s>; rel="%s"`, url, rel))
}
if pager.HasPrev() {
addLinkResponseHeader(pager.PrevPage, "prev")
}
if pager.HasNext() {
addLinkResponseHeader(pager.NextPage, "next")
}
}
ctx.Pager = &pager
switch req.Header.Get("Accept") {
case "application/json":
res := make(map[string]FeedResponse, len(urls))
for _, url := range urls {
res[url.URL] = FeedResponse{
Successes: url.Success,
Failures: url.Failure,
DiscoveredAt: NonZeroRFC3339MillisUTC(url.DiscoveredAt),
LastScrapedAt: NonZeroRFC3339MillisUTC(url.LastScrapedAt),
LastUpdated: url.LastUpdated,
LastError: url.LastError,
FetchAvg: url.FetchAvg,
NewAvg: url.NewAvg,
Dead: url.Dead,
}
}
srv.json(w, http.StatusOK, res)
return
case "text/csv":
var filename string
if numPages := pager.PageNums(); numPages > 1 {
filename = fmt.Sprintf("%s-%d-%d.csv", urlsType, pager.Page(), numPages)
} else {
filename = fmt.Sprintf("%s.csv", urlsType)
}
headers := w.Header()
headers.Add("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, filename))
headers.Add("Content-Type", "text/csv; charset=UTF-8")
writer := csv.NewWriter(w)
if err := writer.Write([]string{
"Feed URL",
"Successes",
"Failures",
"Discovered At",
"Last Scraped At",
"Last Updated At",
"Last Error",
"Fetch Avg",
"New Avg",
}); err != nil {
log.WithError(err).Errorf("error writing CSV header")
return
}
for i, url := range urls {
if err := writer.Write([]string{
url.URL,
fmt.Sprintf("%d", url.Success),
fmt.Sprintf("%d", url.Failure),
NonZeroRFC3339MillisUTC(url.DiscoveredAt),
NonZeroRFC3339MillisUTC(url.LastScrapedAt),
url.LastUpdated,
url.LastError,
fmt.Sprintf("%.2f", url.FetchAvg),
fmt.Sprintf("%.2f", url.NewAvg),
}); err != nil {
log.WithError(err).Errorf("error writing CSV record #%d", i)
return
}
}
writer.Flush()
if err := writer.Error(); err != nil {
log.WithError(err).Errorf("error flushing CSV")
return
}
return
}
ctx.FeedsTitle = urlsType.Title()
ctx.Feeds = urls
ctx.FeedsPagerURL = func(key string, values ...interface{}) template.HTMLAttr {
url := srv.urlForFeedsPager(req, key, values...)
return template.HTMLAttr("?" + url.RawQuery)
}
srv.render("feeds", w, ctx)
}
}
func (srv *Server) urlForFeedsPager(req *http.Request, key string, values ...interface{}) *url.URL {
stringValues := make([]string, len(values))
for i, value := range values {
if v, ok := value.(string); ok {
stringValues[i] = v
} else if v, ok := value.(int); ok {
stringValues[i] = fmt.Sprintf("%d", v)
} else {
panic(fmt.Errorf("unsupported value type %T %v", value, value))
}
}
query := req.URL.Query()
switch key {
case "q", "l", "p":
if len(stringValues) > 0 {
value := stringValues[0]
if value != "" {
query.Set(key, value)
}
}
case "s":
// when changing the sorting criteria, we most likely want to go to the
// first page of that newly sorted table
query.Del("p")
// Try to support multiple sorting criteria: using the same sorting key
// again removes it from the list.
for _, sort := range stringValues {
if sort != "" {
if idx := FindIndex(query["s"], sort); idx == -1 {
query.Add(key, sort)
} else {
query["s"] = DeleteIndex(query["s"], idx)
}
}
}
default:
panic(fmt.Errorf("unsupported key '%s'", key))
}
// Finally, normalize the final query at the end. If we would only do it
// for new key value pairs in the switch above, that would not catch a
// user-requested base URL that contains default values.
if query.Has("q") && query.Get("q") == "" {
query.Del("q")
}
if query.Has("p") && query.Get("p") == "1" {
query.Del("p")
}
if query.Has("l") && query.Get("l") == fmt.Sprintf("%d", srv.config.FeedsPerPage) {
query.Del("l")
}
return req.URL.ResolveReference(&url.URL{RawQuery: query.Encode(), ForceQuery: true})
}
// AvatarHandler ...
func (s *Server) AvatarHandler() httprouter.Handle {
return func(w http.ResponseWriter, r *http.Request, p httprouter.Params) {

@ -4,9 +4,6 @@ import (
"encoding/json"
"fmt"
"time"
"github.com/creasty/defaults"
log "github.com/sirupsen/logrus"
)
const (
@ -33,21 +30,8 @@ type URL struct {
Dead bool
}
// NewURL ...
func NewURL(url string) *URL {
u := &URL{URL: url}
if err := defaults.Set(u); err != nil {
log.WithError(err).Error("error creating new URL object")
}
return u
}
func LoadURL(data []byte) (u *URL, err error) {
u = &URL{}
if err := defaults.Set(u); err != nil {
return nil, err
}
if err = json.Unmarshal(data, &u); err != nil {
return nil, err
}

@ -45,6 +45,10 @@ const (
// DefaultResultsPerPage is the server's default results per page to display
DefaultResultsPerPage = 10
// DefaultFeedsPerPage is the server's default number of feeds to display
// per page in the statistics views.
DefaultFeedsPerPage = 20 // TODO increase to 100 or so
// DefaultSessionCacheTTL is the server's default session cache ttl
DefaultSessionCacheTTL = 1 * time.Hour
@ -96,6 +100,7 @@ func NewConfig() *Config {
SearchPrompts: DefaultSearchPrompts,
ResultsPerPage: DefaultResultsPerPage,
FeedsPerPage: DefaultFeedsPerPage,
SessionExpiry: DefaultSessionExpiry,

@ -3,6 +3,7 @@ package internal
import (
"context"
"embed"
"encoding/json"
"fmt"
"io/fs"
"net/http"
@ -91,6 +92,26 @@ func (s *Server) render(name string, w http.ResponseWriter, ctx *Context) {
}
}
func (s *Server) json(w http.ResponseWriter, statusCode int, response interface{}) {
data, err := json.Marshal(response)
if err != nil {
log.WithError(err).Errorf("error parshalling JSON")
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(data)))
w.WriteHeader(statusCode)
if _, err := w.Write(data); err != nil {
// besides logging there's probably nothing else we can do about it,
// chances are that trying to send another response won't work either,
// since this is most likely going to be a communication error
log.WithError(err).Errorf("error sending JSON response")
}
}
// AddRouter ...
func (s *Server) AddRoute(method, path string, handler http.Handler) {
s.router.Handler(method, path, handler)
@ -391,6 +412,7 @@ func (s *Server) initRoutes() {
s.router.POST("/add", s.AddHandler())
s.router.GET("/stats", s.StatsHandler())
s.router.GET("/stats/feeds/:urlsType", s.FeedsHandler())
// Gopher / Gemini Proxy
s.router.GET("/proxy", s.ProxyHandler())

@ -1,9 +1,73 @@
package internal
import (
"encoding/json"
"fmt"
"sort"
"strings"
"sync"
)
type URLsType string
const (
DiscoveredFeedURLs URLsType = "discovered"
IndexedFeedURLs URLsType = "indexed"
BrokenFeedURLs URLsType = "broken"
EmptyFeedURLs URLsType = "empty"
DeadFeedURLs URLsType = "dead"
)
func (ut URLsType) Title() string {
s := string(ut)
return fmt.Sprintf("%s%s Feeds", strings.ToUpper(s[0:1]), s[1:])
}
type FeedSearchOptions struct {
Sort []string
Query string
}
// Matcher returns a function that checks whether a given URL matches the
// search query specified by the feed search options or not.
func (o FeedSearchOptions) Matcher() func(*URL) bool {
if o.Query == "" {
// no search criteria matches everything
return func(*URL) bool { return true }
}
query := strings.ToLower(o.Query)
return func(url *URL) bool {
return strings.Contains(strings.ToLower(url.URL), query)
}
}
func NewDefaultFeedSearchOptions() *FeedSearchOptions {
return &FeedSearchOptions{
Sort: []string{"url"},
}
}
type FeedSearchOption func(opts *FeedSearchOptions) error
func SortFeedsBy(sort []string) FeedSearchOption {
return func(opts *FeedSearchOptions) error {
if sort != nil {
opts.Sort = sort
}
return nil
}
}
func QueryFeedsBy(query string) FeedSearchOption {
return func(opts *FeedSearchOptions) error {
if query != "" {
opts.Query = query
}
return nil
}
}
type Stats struct {
sync.RWMutex
@ -58,6 +122,128 @@ func (s *Stats) Update(db Store, indexer Indexer) error {
return nil
}
type less func(i, j int) bool
func (s *Stats) FindFeeds(db Store, urlsType URLsType, opts ...FeedSearchOption) ([]*URL, error) {
o := NewDefaultFeedSearchOptions()
for _, opt := range opts {
if err := opt(o); err != nil {
return nil, fmt.Errorf("error applying feed search option: %w", err)
}
}
matches := o.Matcher()
var urls []*URL
db.ForEachURL(func(url *URL) error {
if !matches(url) {
return nil
}
if urlsType == DiscoveredFeedURLs {
urls = append(urls, url)
} else if !url.LastScrapedAt.IsZero() {
if url.NewAvg > 0 {
if urlsType == IndexedFeedURLs {
urls = append(urls, url)
}
} else {
if urlsType == EmptyFeedURLs {
urls = append(urls, url)
}
}
} else {
if url.Dead {
if urlsType == DeadFeedURLs {
urls = append(urls, url)
}
} else {
if urlsType == BrokenFeedURLs {
urls = append(urls, url)
}
}
}
return nil
})
//
// finally sort the result
//
// determine sorting functions by supplied criteria
var l []less
for _, sort := range o.Sort {
switch sort {
case "url":
l = append(l, func(i, j int) bool { return urls[i].URL < urls[j].URL })
case "-url":
l = append(l, func(i, j int) bool { return urls[i].URL > urls[j].URL })
case "successes":
l = append(l, func(i, j int) bool { return urls[i].Success < urls[j].Success })
case "-successes":
l = append(l, func(i, j int) bool { return urls[i].Success > urls[j].Success })
case "failures":
l = append(l, func(i, j int) bool { return urls[i].Failure < urls[j].Failure })
case "-failures":
l = append(l, func(i, j int) bool { return urls[i].Failure > urls[j].Failure })
case "discovered":
l = append(l, func(i, j int) bool { return urls[i].DiscoveredAt.Before(urls[j].DiscoveredAt) })
case "-discovered":
l = append(l, func(i, j int) bool { return urls[i].DiscoveredAt.After(urls[j].DiscoveredAt) })
case "scraped":
l = append(l, func(i, j int) bool { return urls[i].LastScrapedAt.Before(urls[j].LastScrapedAt) })
case "-scraped":
l = append(l, func(i, j int) bool { return urls[i].LastScrapedAt.After(urls[j].LastScrapedAt) })
case "updated":
l = append(l, func(i, j int) bool { return urls[i].LastUpdated < urls[j].LastUpdated })
case "-updated":
l = append(l, func(i, j int) bool { return urls[i].LastUpdated > urls[j].LastUpdated })
case "error":
l = append(l, func(i, j int) bool { return urls[i].LastError < urls[j].LastError })
case "-error":
l = append(l, func(i, j int) bool { return urls[i].LastError > urls[j].LastError })
case "fetchavg":
l = append(l, func(i, j int) bool { return urls[i].FetchAvg < urls[j].FetchAvg })
case "-fetchavg":
l = append(l, func(i, j int) bool { return urls[i].FetchAvg > urls[j].FetchAvg })
case "newavg":
l = append(l, func(i, j int) bool { return urls[i].NewAvg < urls[j].NewAvg })
case "-newavg":
l = append(l, func(i, j int) bool { return urls[i].NewAvg > urls[j].NewAvg })
default:
return nil, fmt.Errorf("invalid sorting '%s'", sort)
}
}
sort.Slice(urls, func(i, j int) bool {
// try all but the last criterion
var k int
for k = 0; k < len(l)-1; k++ {
lessThan := l[k]
if lessThan(i, j) {
// first < second, so we have a final decision
return true
}
if lessThan(j, i) {
// first > second, so we have a final decision
return false
}
// first == second, so try the next criterion
}
// all criteria so far said first == second, so just return whatever
// the final comparison reports
return l[k](i, j)
})
return urls, nil
}
func (s *Stats) Bytes() ([]byte, error) {
s.RLock()
defer s.RUnlock()
return json.Marshal(s)
}
func (s *Stats) IndexedTwts() int64 {
s.RLock()
defer s.RUnlock()

@ -75,12 +75,13 @@ func NewTemplateManager(conf *Config) (*TemplateManager, error) {
funcMap["time"] = humanize.Time
funcMap["hostnameFromURL"] = HostnameFromURL
funcMap["isLocalURL"] = IsLocalURLFactory(conf)
funcMap["formatForDateTime"] = FormatForDateTime
funcMap["formatFeedURL"] = FeedURLFactory(conf)
funcMap["formatTwt"] = FormatTwtFactory(conf)
funcMap["urlForConv"] = URLForConvFactory(conf)
funcMap["findIndex"] = FindIndex[string]
m := &TemplateManager{debug: conf.Debug, templates: templates, funcMap: funcMap}
if err := m.LoadTemplates(); err != nil {

@ -25,6 +25,43 @@
</nav>
{{ end }}
{{ define "feeds-pager" }}
<nav class="pagination-nav">
<ul>
<li>
{{ if $.Pager.HasPrev }}
<a href="{{ call $.FeedsPagerURL "p" $.Pager.PrevPage }}" title="Previous page">Prev</a>
{{ else }}
<span title="No more results">Prev</span>
{{ end }}
</li>
</ul>
<ul>
<li><small>{{ $.Pager.Nums }} results | Page {{ $.Pager.Page }} of {{ $.Pager.PageNums }}</small></li>
</ul>
<ul>
<li>
{{ if $.Pager.HasNext }}
<a href="{{ call $.FeedsPagerURL "p" $.Pager.NextPage }}" title="Next page">Next</a>
{{ else }}
<span title="No more results">Next</span>
{{ end }}
</li>
</ul>
</nav>
{{ end }}
{{ define "feeds-sort-links" }}
{{- $reverseSortField := printf "%s%s" "-" $.SortField -}}
{{- $searchIndex := findIndex $.Ctx.SearchSort $.SortField -}}
{{- $reverseSearchIndex := findIndex $.Ctx.SearchSort $reverseSortField -}}
<!-- Please note the non-breaking space between the links! -->
<a href="{{ call $.Ctx.FeedsPagerURL "s" $.SortField }}"
title="{{ if eq $searchIndex -1 }}Sort{{ else }}Reset sorting{{ end }} by {{ $.SortName }} in ascending order">{{ if eq $searchIndex -1 }}โ†‘{{ else }}โ‡‘({{ add $searchIndex 1 }}){{ end }}</a>ย <a
href="{{ call $.Ctx.FeedsPagerURL "s" $reverseSortField }}"
title="{{ if eq $reverseSearchIndex -1 }}Sort{{ else }}Reset sorting{{ end}} by {{ $.SortName }} in descending order">{{ if eq $reverseSearchIndex -1 }}โ†“{{ else }}โ‡“({{ add $reverseSearchIndex 1 }}){{ end }}</a>
{{ end }}
{{ define "search" }}
<div class="container">
<form action="/search" method="GET">
@ -98,8 +135,8 @@
</a>
<div class="publish-time">
<a class="u-url" href="/twt/{{ $.ID }}">
<time class="dt-published" datetime="{{ $.Created | date " 2006-01-02T15:04:05Z07:00" }}">
{{ dateInZone ($.Created | formatForDateTime) $.Created "UTC" }}
<time class="dt-published" datetime="{{ $.Created | date " 2006-01-02T15:04:05Z07:00" }}" title="{{ $.Created | date "2006-01-02 15:04:05 Z07:00" }}">
{{ dateInZone "Mon, 2 Jan 2006 15:04 Z07:00" $.Created "UTC" }}
</time>
</a>
<span> &nbsp;({{ $.Created | time }})</span>
@ -177,8 +214,8 @@
</a>
<div class="publish-time">
<a class="u-url" href="/twt/{{ $.Twt.Hash }}">
<time class="dt-published" datetime="{{ $.Twt.Created | date " 2006-01-02T15:04:05Z07:00" }}">
{{ dateInZone ($.Twt.Created | formatForDateTime) $.Twt.Created "UTC" }}
<time class="dt-published" datetime="{{ $.Twt.Created | date " 2006-01-02T15:04:05Z07:00" }}" title="{{ $.Created | date "2006-01-02 15:04:05 Z07:00" }}">
{{ dateInZone "Mon, 2 Jan 2006 15:04 Z07:00" $.Twt.Created "UTC" }}
</time>
</a>
<span> &nbsp;({{ $.Twt.Created | time }})</span>
@ -196,4 +233,4 @@
</ul>
</nav>
</article>
{{ end }}
{{ end }}

@ -0,0 +1,70 @@
{{define "content"}}
<article>
<div>
<hgroup>
<h2>{{ $.FeedsTitle }} ({{ $.Pager.Nums }})</h2>
</hgroup>
<form type="get">
<label>Filter by case-insensitive URL substring:
<input type="text" name="q" value="{{ $.SearchQuery }}" />
</label>
<input type="submit" value="Filter" />
</form>
{{ template "feeds-pager" . }}
<table>
<thead>
<tr>
<th>Feed URL
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "url" "SortName" "feed URL") }}
</th>
<th>Successes
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "successes" "SortName" "number of successful fetch and parse attempts") }}
</th>
<th>Failures
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "failures" "SortName" "number of failed fetch and parse attempts") }}
</th>
<th>Discovered At
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "discovered" "SortName" "feed discovery timestamp") }}
</th>
<th>Last Scraped At
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "scraped" "SortName" "last scraped timestamp") }}
</th>
<th>Last Updated At
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "updated" "SortName" "last updated timestamp") }}
</th>
<th>Last Error
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "error" "SortName" "last error") }}
</th>
<th>Fetch Avg
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "fetchavg" "SortName" "fetch average") }}
</th>
<th>New Avg
{{ template "feeds-sort-links" (dict "Ctx" . "SortField" "newavg" "SortName" "new average") }}
</th>
</tr>
</thead>
<tbody>
{{ range $.Feeds }}
<tr>
<td><a href="{{ .URL }}" title="Visit {{ .URL }}">{{ .URL }}</a></td>
<td>{{ .Success }}</td>
<td>{{ .Failure }}</td>
<td>{{ if not .DiscoveredAt.IsZero }}{{ dateInZone "2006-01-02 15:04Z" .DiscoveredAt "UTC" }}{{ else }}unknown{{ end }}</td>
<td>{{ if not .LastScrapedAt.IsZero }}{{ dateInZone "2006-01-02 15:04Z" .LastScrapedAt "UTC" }}{{ else }}unknown{{ end }}</td>
<td>{{ .LastUpdated }}</td>
<td>{{ .LastError }}</td>
<td>{{ printf "%.2f" .FetchAvg }}</td>
<td>{{ printf "%.2f" .NewAvg }}</td>
</tr>
{{ end }}
<tbody>
</table>
{{ template "feeds-pager" . }}
</div>
<div></div>
</article>
<style>
.container { max-width: 100% }
th, td { padding: .2rem }
</style>
{{end}}

@ -6,11 +6,11 @@
<h3>Number of indexed twts and feeds crawled and other useful stats</h3>
</hgroup>
<ul>
<li>Discovered Feeds: {{ $.DiscoveredFeeds }}</li>
<li>Dead Feeds: {{ $.DeadFeeds }}</li>
<li>Empty Feeds: {{ $.EmptyFeeds }}</li>
<li>Broken Feeds: {{ $.BrokenFeeds }}</li>
<li>Indexed Feeds: {{ $.IndexedFeeds }}</li>
<li><a href="/stats/feeds/discovered" title="List all discovered feed URLs">Discovered Feeds: {{ $.DiscoveredFeeds }}</a></li>
<li><a href="/stats/feeds/dead" title="List all dead feed URLs">Dead Feeds: {{ $.DeadFeeds }}</a></li>
<li><a href="/stats/feeds/empty" title="List all empty feed URLs">Empty Feeds: {{ $.EmptyFeeds }}</a></li>
<li><a href="/stats/feeds/broken" title="List all broken feed URLs">Broken Feeds: {{ $.BrokenFeeds }}</a></li>
<li><a href="/stats/feeds/indexed" title="List all indexed feed URLs">Indexed Feeds: {{ $.IndexedFeeds }}</a></li>
<li>Indexed Twts: {{ $.IndexedTwts }}</li>
</ul>
</div>

@ -20,3 +20,16 @@ type SearchResponse struct {
SearchResults []Result `json:"results"`
Pager PagerResponse
}
type FeedResponse struct {
Successes int `json:"successes"`
Failures int `json:"failures"`
DiscoveredAt string `json:"discovered_at,omitempty"`
LastScrapedAt string `json:"last_scraped_at,omitempty"`
LastUpdated string `json:"last_updated,omitempty"`
LastError string `json:"last_error,omitempty"`
FetchAvg float64 `json:"fetch_avg"`
NewAvg float64 `json:"new_avg"`
Dead bool `json:"dead"`
}

@ -47,6 +47,8 @@ const (
YearAgo = MonthAgo * 12
URLHashLength = 15
RFC3339Millis = "2006-01-02T15:04:05.000Z07:00"
)
func FastHash(s string) string {
@ -345,26 +347,6 @@ func PreprocessMedia(conf *Config, u *url.URL, alt string) string {
)
}
func FormatForDateTime(t time.Time) string {
var format string
dt := time.Since(t)
if dt > YearAgo {
format = "Mon, Jan 2 3:04PM 2006"
} else if dt > MonthAgo {
format = "Mon, Jan 2 3:04PM"
} else if dt > WeekAgo {
format = "Mon, Jan 2 3:04PM"
} else if dt > DayAgo {
format = "Mon 2, 3:04PM"
} else {
format = "3:04PM"
}
return format
}
// FeedURLFactory formats a Feed URL and returns a template.HTML value
// A proxied version of the Feed URL is returned for Gopher and Gemini feeds
func FeedURLFactory(conf *Config) func(uri string) template.HTML {
@ -561,3 +543,34 @@ func GenerateAvatar(conf *Config, domainNick string) (image.Image, error) {
return ii.Image(avatarResolution), nil
}
// NonZeroRFC3339MillisUTC formats the given timestamp in UTC according to
// RFCย 3339 with milliseconds if it is not zero. Otherwise it returns an empty
// string.
func NonZeroRFC3339MillisUTC(t time.Time) string {
if t.IsZero() {
return ""
}
return t.UTC().Format(RFC3339Millis)
}
// FindIndex returns the index of search in elements. If the searched element
// is not included in the slice, it returns -1.
func FindIndex[T comparable](elements []T, search T) int {
for i, element := range elements {
if element == search {
return i
}
}
return -1
}
// DeleteIndex removes the element at the given index from elements. If an
// invalid index is given, the elements are returned unchanged.
func DeleteIndex[T comparable](elements []T, index int) []T {
l := len(elements)
if l == 0 || index >= l || index < 0 {
return elements
}
return append(elements[:index], elements[index+1:]...)
}

@ -0,0 +1,201 @@
package internal
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNonZeroRFC3339MillisUTC(t *testing.T) {
mustParseTime := func(ts string) time.Time {
timestamp, err := time.Parse(time.RFC3339Nano, ts)
require.NoError(t, err, "test setup error, cannot parse timestamp as RFC 3339 with nanoseconds")
return timestamp
}
for _, tt := range []struct {
name string
timestamp time.Time
expected string
}{
{
name: "when zero timestamp, then return empty string",
timestamp: time.Time{},
expected: "",
},
{
name: "when UTC timestamp, then return formatted timestamp",
timestamp: mustParseTime("2022-12-15T14:15:58.763999Z"),
expected: "2022-12-15T14:15:58.763Z",
},
{
name: "when UTC+0 timestamp, then return formatted timestamp",
timestamp: mustParseTime("2022-12-15T14:15:58.763999+00:00"),
expected: "2022-12-15T14:15:58.763Z",
},
{
name: "when UTC+1 timestamp, then return formatted timestamp in UTC",
timestamp: mustParseTime("2022-12-15T14:15:58.763999+01:00"),
expected: "2022-12-15T13:15:58.763Z",
},
{
name: "when UTC-1 timestamp in minutes granularity, then return formatted timestamp in UTC and milliseconds precision",
timestamp: mustParseTime("2022-12-15T14:15:00-01:00"),
expected: "2022-12-15T15:15:00.000Z",
},
} {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.expected, NonZeroRFC3339MillisUTC(tt.timestamp))
})
}
}
func TestFindIndex(t *testing.T) {
for _, tt := range []struct {
name string
elements []string
search string
expected int
}{
{
name: "nil slice does not contain empty string",
elements: nil,
search: "",
expected: -1,
},
{
name: "nil slice does not contain arbitrary string",
elements: []string{},
search: "foo",
expected: -1,
},
{
name: "empty slice does not contain empty string",
elements: []string{},
search: "",
expected: -1,
},
{
name: "empty slice does not contain arbitrary string",
elements: []string{},
search: "foo",
expected: -1,
},
{
name: "single element slice does not contain empty string",
elements: []string{"bar"},
search: "",
expected: -1,
},
{
name: "single element slice does not contain different string",
elements: []string{"bar"},
search: "foo",
expected: -1,
},
{
name: "single element slice does contain same string",
elements: []string{"bar"},
search: "bar",
expected: 0,
},
{
name: "multiple element slice does not contain different string",
elements: []string{"bar", "foo"},
search: "eggs",
expected: -1,
},
{
name: "multiple element slice does contain search string at first index",
elements: []string{"bar", "foo"},
search: "bar",
expected: 0,
},
{
name: "multiple element slice does contain search string at second index",
elements: []string{"bar", "foo"},
search: "foo",
expected: 1,
},
{
name: "multiple element slice with duplicates does contain search string and find first index",
elements: []string{"bar", "foo", "eggs", "and", "spam", "with", "eggs"},
search: "eggs",
expected: 2,
},
} {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.expected, FindIndex(tt.elements, tt.search))
})
}
}
func TestDeleteIndex(t *testing.T) {
for _, tt := range []struct {
name string
elements []string
index int
expected []string
}{
{
name: "removing from nil slice returns nil",
elements: nil,
index: 0,
expected: nil,
},
{
name: "removing from empty slice returns nil",
elements: []string{},
index: 0,
expected: []string{},
},
{
name: "removing first element from single element slice returns empty slice",
elements: []string{"foo"},
index: 0,
expected: []string{},
},
{
name: "removing second element from single element slice returns same slice",
elements: []string{"foo"},
index: 1,
expected: []string{"foo"},
},
{
name: "removing before first element from single element slice returns same slice",
elements: []string{"foo"},
index: -1,
expected: []string{"foo"},
},
{
name: "removing first element from multiple element slice returns second element",
elements: []string{"foo", "bar"},
index: 0,
expected: []string{"bar"},
},
{
name: "removing second element from multiple element slice returns first element",
elements: []string{"foo", "bar"},
index: 1,
expected: []string{"foo"},
},
{
name: "removing third element from multiple element slice returns same slice",
elements: []string{"foo", "bar"},
index: 2,
expected: []string{"foo", "bar"},
},
{
name: "removing before first element from multiple element slice returns same slice",
elements: []string{"foo", "bar"},
index: -1,
expected: []string{"foo", "bar"},
},
} {