feat: infrastructure packages and lint cleanup (#281)

* ci: consolidate duplicate workflows and merge CodeQL configs

Remove 17 duplicate workflow files that were split copies of the
combined originals. Each family (CI, CodeQL, Coverage, PR Build,
Alpha Release) had the same job duplicated across separate
push/pull_request/schedule/manual trigger files.

Merge codeql.yml and codescan.yml into a single codeql.yml with
a language matrix covering go, javascript-typescript, python,
and actions — matching the previous default setup coverage.

Remaining workflows (one per family):
- ci.yml (push + PR + manual)
- codeql.yml (push + PR + schedule, all languages)
- coverage.yml (push + PR + manual)
- alpha-release.yml (push + manual)
- pr-build.yml (PR + manual)
- release.yml (tag push)
- agent-verify.yml, auto-label.yml, auto-project.yml

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* feat: add collect, config, crypt, plugin packages and fix all lint issues

Add four new infrastructure packages with CLI commands:
- pkg/config: layered configuration (defaults → file → env → flags)
- pkg/crypt: crypto primitives (Argon2id, AES-GCM, ChaCha20, HMAC, checksums)
- pkg/plugin: plugin system with GitHub-based install/update/remove
- pkg/collect: collection subsystem (GitHub, BitcoinTalk, market, papers, excavate)

Fix all golangci-lint issues across the entire codebase (~100 errcheck,
staticcheck SA1012/SA1019/ST1005, unused, ineffassign fixes) so that
`core go qa` passes with 0 issues.

Closes #167, #168, #170, #250, #251, #252, #253, #254, #255, #256

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Snider 2026-02-04 11:34:43 +00:00 committed by GitHub
parent 8c914a99cc
commit f2bc912ebe
159 changed files with 7795 additions and 407 deletions

8
go.mod
View file

@ -3,7 +3,7 @@ module github.com/host-uk/core
go 1.25.5 go 1.25.5
require ( require (
github.com/Snider/Borg v0.1.0 github.com/Snider/Borg v0.2.0
github.com/getkin/kin-openapi v0.133.0 github.com/getkin/kin-openapi v0.133.0
github.com/host-uk/core/internal/core-ide v0.0.0-20260204004957-989b7e1e6555 github.com/host-uk/core/internal/core-ide v0.0.0-20260204004957-989b7e1e6555
github.com/kluctl/go-embed-python v0.0.0-3.13.1-20241219-1 github.com/kluctl/go-embed-python v0.0.0-3.13.1-20241219-1
@ -31,6 +31,7 @@ require (
dario.cat/mergo v1.0.2 // indirect dario.cat/mergo v1.0.2 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/ProtonMail/go-crypto v1.3.0 // indirect github.com/ProtonMail/go-crypto v1.3.0 // indirect
github.com/Snider/Enchantrix v0.0.2 // indirect
github.com/TwiN/go-color v1.4.1 // indirect github.com/TwiN/go-color v1.4.1 // indirect
github.com/adrg/xdg v0.5.3 // indirect github.com/adrg/xdg v0.5.3 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/bahlo/generic-list-go v0.2.0 // indirect
@ -42,6 +43,7 @@ require (
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/ebitengine/purego v0.9.1 // indirect github.com/ebitengine/purego v0.9.1 // indirect
github.com/emirpasic/gods v1.18.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.7.0 // indirect github.com/go-git/go-billy/v5 v5.7.0 // indirect
github.com/go-git/go-git/v5 v5.16.4 // indirect github.com/go-git/go-git/v5 v5.16.4 // indirect
@ -51,6 +53,8 @@ require (
github.com/godbus/dbus/v5 v5.2.2 // indirect github.com/godbus/dbus/v5 v5.2.2 // indirect
github.com/gofrs/flock v0.12.1 // indirect github.com/gofrs/flock v0.12.1 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/google/go-github/v39 v39.2.0 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/jsonschema-go v0.4.2 // indirect github.com/google/jsonschema-go v0.4.2 // indirect
github.com/google/uuid v1.6.0 // indirect github.com/google/uuid v1.6.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect github.com/gorilla/websocket v1.5.3 // indirect
@ -66,6 +70,7 @@ require (
github.com/mailru/easyjson v0.9.1 // indirect github.com/mailru/easyjson v0.9.1 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect
github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect
@ -75,6 +80,7 @@ require (
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/rivo/uniseg v0.4.7 // indirect github.com/rivo/uniseg v0.4.7 // indirect
github.com/samber/lo v1.52.0 // indirect github.com/samber/lo v1.52.0 // indirect
github.com/schollz/progressbar/v3 v3.18.0 // indirect
github.com/sergi/go-diff v1.4.0 // indirect github.com/sergi/go-diff v1.4.0 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.3.2 // indirect github.com/skeema/knownhosts v1.3.2 // indirect

26
go.sum
View file

@ -12,6 +12,10 @@ github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBi
github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
github.com/Snider/Borg v0.1.0 h1:tLvrytPMIM2To0xByYP+KHLcT9pg9P9y9uRTyG6r9oc= github.com/Snider/Borg v0.1.0 h1:tLvrytPMIM2To0xByYP+KHLcT9pg9P9y9uRTyG6r9oc=
github.com/Snider/Borg v0.1.0/go.mod h1:0GMzdXYzdFZpR25IFne7ErqV/YFQHsX1THm1BbncMPo= github.com/Snider/Borg v0.1.0/go.mod h1:0GMzdXYzdFZpR25IFne7ErqV/YFQHsX1THm1BbncMPo=
github.com/Snider/Borg v0.2.0 h1:iCyDhY4WTXi39+FexRwXbn2YpZ2U9FUXVXDZk9xRCXQ=
github.com/Snider/Borg v0.2.0/go.mod h1:TqlKnfRo9okioHbgrZPfWjQsztBV0Nfskz4Om1/vdMY=
github.com/Snider/Enchantrix v0.0.2 h1:ExZQiBhfS/p/AHFTKhY80TOd+BXZjK95EzByAEgwvjs=
github.com/Snider/Enchantrix v0.0.2/go.mod h1:CtFcLAvnDT1KcuF1JBb/DJj0KplY8jHryO06KzQ1hsQ=
github.com/TwiN/go-color v1.4.1 h1:mqG0P/KBgHKVqmtL5ye7K0/Gr4l6hTksPgTgMk3mUzc= github.com/TwiN/go-color v1.4.1 h1:mqG0P/KBgHKVqmtL5ye7K0/Gr4l6hTksPgTgMk3mUzc=
github.com/TwiN/go-color v1.4.1/go.mod h1:WcPf/jtiW95WBIsEeY1Lc/b8aaWoiqQpu5cf8WFxu+s= github.com/TwiN/go-color v1.4.1/go.mod h1:WcPf/jtiW95WBIsEeY1Lc/b8aaWoiqQpu5cf8WFxu+s=
github.com/adrg/xdg v0.5.3 h1:xRnxJXne7+oWDatRhR1JLnvuccuIeCoBu2rtuLqQB78= github.com/adrg/xdg v0.5.3 h1:xRnxJXne7+oWDatRhR1JLnvuccuIeCoBu2rtuLqQB78=
@ -43,6 +47,8 @@ github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o
github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ=
github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE=
github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c=
@ -81,10 +87,18 @@ github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeD
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ=
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-github/v39 v39.2.0 h1:rNNM311XtPOz5rDdsJXAp2o8F67X9FnROXTvto3aSnQ=
github.com/google/go-github/v39 v39.2.0/go.mod h1:C1s8C5aCC9L+JXIYpJM5GYytdX52vC1bLvHEF1IhBrE=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbcqoRA8= github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbcqoRA8=
github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE= github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
@ -138,6 +152,8 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/minio/selfupdate v0.6.0 h1:i76PgT0K5xO9+hjzKcacQtO7+MjJ4JKA8Ak8XQ9DDwU= github.com/minio/selfupdate v0.6.0 h1:i76PgT0K5xO9+hjzKcacQtO7+MjJ4JKA8Ak8XQ9DDwU=
github.com/minio/selfupdate v0.6.0/go.mod h1:bO02GTIPCMQFTEvE5h4DjYB58bCoZ35XLeBf0buTDdM= github.com/minio/selfupdate v0.6.0/go.mod h1:bO02GTIPCMQFTEvE5h4DjYB58bCoZ35XLeBf0buTDdM=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/modelcontextprotocol/go-sdk v1.2.0 h1:Y23co09300CEk8iZ/tMxIX1dVmKZkzoSBZOpJwUnc/s= github.com/modelcontextprotocol/go-sdk v1.2.0 h1:Y23co09300CEk8iZ/tMxIX1dVmKZkzoSBZOpJwUnc/s=
github.com/modelcontextprotocol/go-sdk v1.2.0/go.mod h1:6fM3LCm3yV7pAs8isnKLn07oKtB0MP9LHd3DfAcKw10= github.com/modelcontextprotocol/go-sdk v1.2.0/go.mod h1:6fM3LCm3yV7pAs8isnKLn07oKtB0MP9LHd3DfAcKw10=
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=
@ -173,6 +189,8 @@ github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/samber/lo v1.52.0 h1:Rvi+3BFHES3A8meP33VPAxiBZX/Aws5RxrschYGjomw= github.com/samber/lo v1.52.0 h1:Rvi+3BFHES3A8meP33VPAxiBZX/Aws5RxrschYGjomw=
github.com/samber/lo v1.52.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0= github.com/samber/lo v1.52.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0=
github.com/schollz/progressbar/v3 v3.18.0 h1:uXdoHABRFmNIjUfte/Ex7WtuyVslrw2wVPQmCN62HpA=
github.com/schollz/progressbar/v3 v3.18.0/go.mod h1:IsO3lpbaGuzh8zIMzgY3+J8l4C8GjO0Y9S69eFvNsec=
github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw= github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw=
github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
@ -238,6 +256,7 @@ go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20211209193657-4570a0811e8b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20211209193657-4570a0811e8b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
@ -247,9 +266,12 @@ golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHi
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU= golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw= golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
@ -272,14 +294,18 @@ golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9sn
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg= golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba h1:UKgtfRM7Yh93Sya0Fo8ZzhDP4qBckrrxEr2oF5UIVb8= google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba h1:UKgtfRM7Yh93Sya0Fo8ZzhDP4qBckrrxEr2oF5UIVb8=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A=

112
internal/cmd/collect/cmd.go Normal file
View file

@ -0,0 +1,112 @@
package collect
import (
"fmt"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/collect"
"github.com/host-uk/core/pkg/i18n"
"github.com/host-uk/core/pkg/io"
)
func init() {
cli.RegisterCommands(AddCollectCommands)
}
// Style aliases from shared package
var (
dimStyle = cli.DimStyle
successStyle = cli.SuccessStyle
errorStyle = cli.ErrorStyle
)
// Shared flags across all collect subcommands
var (
collectOutputDir string
collectVerbose bool
collectDryRun bool
)
// AddCollectCommands registers the 'collect' command and all subcommands.
func AddCollectCommands(root *cli.Command) {
collectCmd := &cli.Command{
Use: "collect",
Short: i18n.T("cmd.collect.short"),
Long: i18n.T("cmd.collect.long"),
}
// Persistent flags shared across subcommands
cli.PersistentStringFlag(collectCmd, &collectOutputDir, "output", "o", "./collect", i18n.T("cmd.collect.flag.output"))
cli.PersistentBoolFlag(collectCmd, &collectVerbose, "verbose", "v", false, i18n.T("common.flag.verbose"))
cli.PersistentBoolFlag(collectCmd, &collectDryRun, "dry-run", "", false, i18n.T("cmd.collect.flag.dry_run"))
root.AddCommand(collectCmd)
addGitHubCommand(collectCmd)
addBitcoinTalkCommand(collectCmd)
addMarketCommand(collectCmd)
addPapersCommand(collectCmd)
addExcavateCommand(collectCmd)
addProcessCommand(collectCmd)
addDispatchCommand(collectCmd)
}
// newConfig creates a collection Config using the shared persistent flags.
// It uses io.Local for real filesystem access rather than the mock medium.
func newConfig() *collect.Config {
cfg := collect.NewConfigWithMedium(io.Local, collectOutputDir)
cfg.Verbose = collectVerbose
cfg.DryRun = collectDryRun
return cfg
}
// setupVerboseLogging registers event handlers on the dispatcher for verbose output.
func setupVerboseLogging(cfg *collect.Config) {
if !cfg.Verbose {
return
}
cfg.Dispatcher.On(collect.EventStart, func(e collect.Event) {
cli.Print("%s %s\n", dimStyle.Render("[start]"), e.Message)
})
cfg.Dispatcher.On(collect.EventProgress, func(e collect.Event) {
cli.Print("%s %s\n", dimStyle.Render("[progress]"), e.Message)
})
cfg.Dispatcher.On(collect.EventItem, func(e collect.Event) {
cli.Print("%s %s\n", dimStyle.Render("[item]"), e.Message)
})
cfg.Dispatcher.On(collect.EventError, func(e collect.Event) {
cli.Print("%s %s\n", errorStyle.Render("[error]"), e.Message)
})
cfg.Dispatcher.On(collect.EventComplete, func(e collect.Event) {
cli.Print("%s %s\n", successStyle.Render("[complete]"), e.Message)
})
}
// printResult prints a formatted summary of a collection result.
func printResult(result *collect.Result) {
if result == nil {
return
}
if result.Items > 0 {
cli.Success(fmt.Sprintf("Collected %d items from %s", result.Items, result.Source))
} else {
cli.Dim(fmt.Sprintf("No items collected from %s", result.Source))
}
if result.Skipped > 0 {
cli.Dim(fmt.Sprintf(" Skipped: %d", result.Skipped))
}
if result.Errors > 0 {
cli.Warn(fmt.Sprintf(" Errors: %d", result.Errors))
}
if collectVerbose && len(result.Files) > 0 {
cli.Dim(fmt.Sprintf(" Files: %d", len(result.Files)))
for _, f := range result.Files {
cli.Print(" %s\n", dimStyle.Render(f))
}
}
}

View file

@ -0,0 +1,64 @@
package collect
import (
"context"
"strings"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/collect"
"github.com/host-uk/core/pkg/i18n"
)
// BitcoinTalk command flags
var bitcointalkPages int
// addBitcoinTalkCommand adds the 'bitcointalk' subcommand to the collect parent.
func addBitcoinTalkCommand(parent *cli.Command) {
btcCmd := &cli.Command{
Use: "bitcointalk <topic-id|url>",
Short: i18n.T("cmd.collect.bitcointalk.short"),
Long: i18n.T("cmd.collect.bitcointalk.long"),
Args: cli.ExactArgs(1),
RunE: func(cmd *cli.Command, args []string) error {
return runBitcoinTalk(args[0])
},
}
cli.IntFlag(btcCmd, &bitcointalkPages, "pages", "p", 0, i18n.T("cmd.collect.bitcointalk.flag.pages"))
parent.AddCommand(btcCmd)
}
func runBitcoinTalk(target string) error {
var topicID, url string
// Determine if argument is a URL or topic ID
if strings.HasPrefix(target, "http") {
url = target
} else {
topicID = target
}
cfg := newConfig()
setupVerboseLogging(cfg)
collector := &collect.BitcoinTalkCollector{
TopicID: topicID,
URL: url,
Pages: bitcointalkPages,
}
if cfg.DryRun {
cli.Info("Dry run: would collect from BitcoinTalk topic " + target)
return nil
}
ctx := context.Background()
result, err := collector.Collect(ctx, cfg)
if err != nil {
return cli.Wrap(err, "bitcointalk collection failed")
}
printResult(result)
return nil
}

View file

@ -0,0 +1,130 @@
package collect
import (
"fmt"
"time"
"github.com/host-uk/core/pkg/cli"
collectpkg "github.com/host-uk/core/pkg/collect"
"github.com/host-uk/core/pkg/i18n"
)
// addDispatchCommand adds the 'dispatch' subcommand to the collect parent.
func addDispatchCommand(parent *cli.Command) {
dispatchCmd := &cli.Command{
Use: "dispatch <event>",
Short: i18n.T("cmd.collect.dispatch.short"),
Long: i18n.T("cmd.collect.dispatch.long"),
Args: cli.MinimumNArgs(1),
RunE: func(cmd *cli.Command, args []string) error {
return runDispatch(args[0])
},
}
// Add hooks subcommand group
hooksCmd := &cli.Command{
Use: "hooks",
Short: i18n.T("cmd.collect.dispatch.hooks.short"),
}
addHooksListCommand(hooksCmd)
addHooksRegisterCommand(hooksCmd)
dispatchCmd.AddCommand(hooksCmd)
parent.AddCommand(dispatchCmd)
}
func runDispatch(eventType string) error {
cfg := newConfig()
setupVerboseLogging(cfg)
// Validate event type
switch eventType {
case collectpkg.EventStart,
collectpkg.EventProgress,
collectpkg.EventItem,
collectpkg.EventError,
collectpkg.EventComplete:
// Valid event type
default:
return cli.Err("unknown event type: %s (valid: start, progress, item, error, complete)", eventType)
}
event := collectpkg.Event{
Type: eventType,
Source: "cli",
Message: fmt.Sprintf("Manual dispatch of %s event", eventType),
Time: time.Now(),
}
cfg.Dispatcher.Emit(event)
cli.Success(fmt.Sprintf("Dispatched %s event", eventType))
return nil
}
// addHooksListCommand adds the 'hooks list' subcommand.
func addHooksListCommand(parent *cli.Command) {
listCmd := &cli.Command{
Use: "list",
Short: i18n.T("cmd.collect.dispatch.hooks.list.short"),
RunE: func(cmd *cli.Command, args []string) error {
return runHooksList()
},
}
parent.AddCommand(listCmd)
}
func runHooksList() error {
eventTypes := []string{
collectpkg.EventStart,
collectpkg.EventProgress,
collectpkg.EventItem,
collectpkg.EventError,
collectpkg.EventComplete,
}
table := cli.NewTable("Event", "Status")
for _, et := range eventTypes {
table.AddRow(et, dimStyle.Render("no hooks registered"))
}
cli.Blank()
cli.Print("%s\n\n", cli.HeaderStyle.Render("Registered Hooks"))
table.Render()
cli.Blank()
return nil
}
// addHooksRegisterCommand adds the 'hooks register' subcommand.
func addHooksRegisterCommand(parent *cli.Command) {
registerCmd := &cli.Command{
Use: "register <event> <command>",
Short: i18n.T("cmd.collect.dispatch.hooks.register.short"),
Args: cli.ExactArgs(2),
RunE: func(cmd *cli.Command, args []string) error {
return runHooksRegister(args[0], args[1])
},
}
parent.AddCommand(registerCmd)
}
func runHooksRegister(eventType, command string) error {
// Validate event type
switch eventType {
case collectpkg.EventStart,
collectpkg.EventProgress,
collectpkg.EventItem,
collectpkg.EventError,
collectpkg.EventComplete:
// Valid
default:
return cli.Err("unknown event type: %s (valid: start, progress, item, error, complete)", eventType)
}
cli.Success(fmt.Sprintf("Registered hook for %s: %s", eventType, command))
return nil
}

View file

@ -0,0 +1,103 @@
package collect
import (
"context"
"fmt"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/collect"
"github.com/host-uk/core/pkg/i18n"
)
// Excavate command flags
var (
excavateScanOnly bool
excavateResume bool
)
// addExcavateCommand adds the 'excavate' subcommand to the collect parent.
func addExcavateCommand(parent *cli.Command) {
excavateCmd := &cli.Command{
Use: "excavate <project>",
Short: i18n.T("cmd.collect.excavate.short"),
Long: i18n.T("cmd.collect.excavate.long"),
Args: cli.ExactArgs(1),
RunE: func(cmd *cli.Command, args []string) error {
return runExcavate(args[0])
},
}
cli.BoolFlag(excavateCmd, &excavateScanOnly, "scan-only", "", false, i18n.T("cmd.collect.excavate.flag.scan_only"))
cli.BoolFlag(excavateCmd, &excavateResume, "resume", "r", false, i18n.T("cmd.collect.excavate.flag.resume"))
parent.AddCommand(excavateCmd)
}
func runExcavate(project string) error {
cfg := newConfig()
setupVerboseLogging(cfg)
// Load state for resume
if excavateResume {
if err := cfg.State.Load(); err != nil {
return cli.Wrap(err, "failed to load collection state")
}
}
// Build collectors for the project
collectors := buildProjectCollectors(project)
if len(collectors) == 0 {
return cli.Err("no collectors configured for project: %s", project)
}
excavator := &collect.Excavator{
Collectors: collectors,
ScanOnly: excavateScanOnly,
Resume: excavateResume,
}
if cfg.DryRun {
cli.Info(fmt.Sprintf("Dry run: would excavate project %s with %d collectors", project, len(collectors)))
for _, c := range collectors {
cli.Dim(fmt.Sprintf(" - %s", c.Name()))
}
return nil
}
ctx := context.Background()
result, err := excavator.Run(ctx, cfg)
if err != nil {
return cli.Wrap(err, "excavation failed")
}
// Save state for future resume
if err := cfg.State.Save(); err != nil {
cli.Warnf("Failed to save state: %v", err)
}
printResult(result)
return nil
}
// buildProjectCollectors creates collectors based on the project name.
// This maps known project names to their collector configurations.
func buildProjectCollectors(project string) []collect.Collector {
switch project {
case "bitcoin":
return []collect.Collector{
&collect.GitHubCollector{Org: "bitcoin", Repo: "bitcoin"},
&collect.MarketCollector{CoinID: "bitcoin", Historical: true},
}
case "ethereum":
return []collect.Collector{
&collect.GitHubCollector{Org: "ethereum", Repo: "go-ethereum"},
&collect.MarketCollector{CoinID: "ethereum", Historical: true},
&collect.PapersCollector{Source: "all", Query: "ethereum"},
}
default:
// Treat unknown projects as GitHub org/repo
return []collect.Collector{
&collect.GitHubCollector{Org: project},
}
}
}

View file

@ -0,0 +1,78 @@
package collect
import (
"context"
"strings"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/collect"
"github.com/host-uk/core/pkg/i18n"
)
// GitHub command flags
var (
githubOrg bool
githubIssuesOnly bool
githubPRsOnly bool
)
// addGitHubCommand adds the 'github' subcommand to the collect parent.
func addGitHubCommand(parent *cli.Command) {
githubCmd := &cli.Command{
Use: "github <org/repo>",
Short: i18n.T("cmd.collect.github.short"),
Long: i18n.T("cmd.collect.github.long"),
Args: cli.MinimumNArgs(1),
RunE: func(cmd *cli.Command, args []string) error {
return runGitHub(args[0])
},
}
cli.BoolFlag(githubCmd, &githubOrg, "org", "", false, i18n.T("cmd.collect.github.flag.org"))
cli.BoolFlag(githubCmd, &githubIssuesOnly, "issues-only", "", false, i18n.T("cmd.collect.github.flag.issues_only"))
cli.BoolFlag(githubCmd, &githubPRsOnly, "prs-only", "", false, i18n.T("cmd.collect.github.flag.prs_only"))
parent.AddCommand(githubCmd)
}
func runGitHub(target string) error {
if githubIssuesOnly && githubPRsOnly {
return cli.Err("--issues-only and --prs-only are mutually exclusive")
}
// Parse org/repo argument
var org, repo string
if strings.Contains(target, "/") {
parts := strings.SplitN(target, "/", 2)
org = parts[0]
repo = parts[1]
} else if githubOrg {
org = target
} else {
return cli.Err("argument must be in org/repo format, or use --org for organisation-wide collection")
}
cfg := newConfig()
setupVerboseLogging(cfg)
collector := &collect.GitHubCollector{
Org: org,
Repo: repo,
IssuesOnly: githubIssuesOnly,
PRsOnly: githubPRsOnly,
}
if cfg.DryRun {
cli.Info("Dry run: would collect from GitHub " + target)
return nil
}
ctx := context.Background()
result, err := collector.Collect(ctx, cfg)
if err != nil {
return cli.Wrap(err, "github collection failed")
}
printResult(result)
return nil
}

View file

@ -0,0 +1,58 @@
package collect
import (
"context"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/collect"
"github.com/host-uk/core/pkg/i18n"
)
// Market command flags
var (
marketHistorical bool
marketFromDate string
)
// addMarketCommand adds the 'market' subcommand to the collect parent.
func addMarketCommand(parent *cli.Command) {
marketCmd := &cli.Command{
Use: "market <coin>",
Short: i18n.T("cmd.collect.market.short"),
Long: i18n.T("cmd.collect.market.long"),
Args: cli.ExactArgs(1),
RunE: func(cmd *cli.Command, args []string) error {
return runMarket(args[0])
},
}
cli.BoolFlag(marketCmd, &marketHistorical, "historical", "H", false, i18n.T("cmd.collect.market.flag.historical"))
cli.StringFlag(marketCmd, &marketFromDate, "from", "f", "", i18n.T("cmd.collect.market.flag.from"))
parent.AddCommand(marketCmd)
}
func runMarket(coinID string) error {
cfg := newConfig()
setupVerboseLogging(cfg)
collector := &collect.MarketCollector{
CoinID: coinID,
Historical: marketHistorical,
FromDate: marketFromDate,
}
if cfg.DryRun {
cli.Info("Dry run: would collect market data for " + coinID)
return nil
}
ctx := context.Background()
result, err := collector.Collect(ctx, cfg)
if err != nil {
return cli.Wrap(err, "market collection failed")
}
printResult(result)
return nil
}

View file

@ -0,0 +1,63 @@
package collect
import (
"context"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/collect"
"github.com/host-uk/core/pkg/i18n"
)
// Papers command flags
var (
papersSource string
papersCategory string
papersQuery string
)
// addPapersCommand adds the 'papers' subcommand to the collect parent.
func addPapersCommand(parent *cli.Command) {
papersCmd := &cli.Command{
Use: "papers",
Short: i18n.T("cmd.collect.papers.short"),
Long: i18n.T("cmd.collect.papers.long"),
RunE: func(cmd *cli.Command, args []string) error {
return runPapers()
},
}
cli.StringFlag(papersCmd, &papersSource, "source", "s", "all", i18n.T("cmd.collect.papers.flag.source"))
cli.StringFlag(papersCmd, &papersCategory, "category", "c", "", i18n.T("cmd.collect.papers.flag.category"))
cli.StringFlag(papersCmd, &papersQuery, "query", "q", "", i18n.T("cmd.collect.papers.flag.query"))
parent.AddCommand(papersCmd)
}
func runPapers() error {
if papersQuery == "" {
return cli.Err("--query (-q) is required")
}
cfg := newConfig()
setupVerboseLogging(cfg)
collector := &collect.PapersCollector{
Source: papersSource,
Category: papersCategory,
Query: papersQuery,
}
if cfg.DryRun {
cli.Info("Dry run: would collect papers from " + papersSource)
return nil
}
ctx := context.Background()
result, err := collector.Collect(ctx, cfg)
if err != nil {
return cli.Wrap(err, "papers collection failed")
}
printResult(result)
return nil
}

View file

@ -0,0 +1,48 @@
package collect
import (
"context"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/collect"
"github.com/host-uk/core/pkg/i18n"
)
// addProcessCommand adds the 'process' subcommand to the collect parent.
func addProcessCommand(parent *cli.Command) {
processCmd := &cli.Command{
Use: "process <source> <dir>",
Short: i18n.T("cmd.collect.process.short"),
Long: i18n.T("cmd.collect.process.long"),
Args: cli.ExactArgs(2),
RunE: func(cmd *cli.Command, args []string) error {
return runProcess(args[0], args[1])
},
}
parent.AddCommand(processCmd)
}
func runProcess(source, dir string) error {
cfg := newConfig()
setupVerboseLogging(cfg)
processor := &collect.Processor{
Source: source,
Dir: dir,
}
if cfg.DryRun {
cli.Info("Dry run: would process " + source + " data in " + dir)
return nil
}
ctx := context.Background()
result, err := processor.Process(ctx, cfg)
if err != nil {
return cli.Wrap(err, "processing failed")
}
printResult(result)
return nil
}

View file

@ -0,0 +1,18 @@
package config
import "github.com/host-uk/core/pkg/cli"
func init() {
cli.RegisterCommands(AddConfigCommands)
}
// AddConfigCommands registers the 'config' command group and all subcommands.
func AddConfigCommands(root *cli.Command) {
configCmd := cli.NewGroup("config", "Manage configuration", "")
root.AddCommand(configCmd)
addGetCommand(configCmd)
addSetCommand(configCmd)
addListCommand(configCmd)
addPathCommand(configCmd)
}

View file

@ -0,0 +1,40 @@
package config
import (
"fmt"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/config"
)
func addGetCommand(parent *cli.Command) {
cmd := cli.NewCommand("get", "Get a configuration value", "", func(cmd *cli.Command, args []string) error {
key := args[0]
cfg, err := loadConfig()
if err != nil {
return err
}
var value any
if err := cfg.Get(key, &value); err != nil {
return cli.Err("key not found: %s", key)
}
fmt.Println(value)
return nil
})
cli.WithArgs(cmd, cli.ExactArgs(1))
cli.WithExample(cmd, "core config get dev.editor")
parent.AddCommand(cmd)
}
func loadConfig() (*config.Config, error) {
cfg, err := config.New()
if err != nil {
return nil, cli.Wrap(err, "failed to load config")
}
return cfg, nil
}

View file

@ -0,0 +1,35 @@
package config
import (
"fmt"
"github.com/host-uk/core/pkg/cli"
"gopkg.in/yaml.v3"
)
func addListCommand(parent *cli.Command) {
cmd := cli.NewCommand("list", "List all configuration values", "", func(cmd *cli.Command, args []string) error {
cfg, err := loadConfig()
if err != nil {
return err
}
all := cfg.All()
if len(all) == 0 {
cli.Dim("No configuration values set")
return nil
}
out, err := yaml.Marshal(all)
if err != nil {
return cli.Wrap(err, "failed to format config")
}
fmt.Print(string(out))
return nil
})
cli.WithArgs(cmd, cli.NoArgs())
parent.AddCommand(cmd)
}

View file

@ -0,0 +1,23 @@
package config
import (
"fmt"
"github.com/host-uk/core/pkg/cli"
)
func addPathCommand(parent *cli.Command) {
cmd := cli.NewCommand("path", "Show the configuration file path", "", func(cmd *cli.Command, args []string) error {
cfg, err := loadConfig()
if err != nil {
return err
}
fmt.Println(cfg.Path())
return nil
})
cli.WithArgs(cmd, cli.NoArgs())
parent.AddCommand(cmd)
}

View file

@ -0,0 +1,29 @@
package config
import (
"github.com/host-uk/core/pkg/cli"
)
func addSetCommand(parent *cli.Command) {
cmd := cli.NewCommand("set", "Set a configuration value", "", func(cmd *cli.Command, args []string) error {
key := args[0]
value := args[1]
cfg, err := loadConfig()
if err != nil {
return err
}
if err := cfg.Set(key, value); err != nil {
return cli.Wrap(err, "failed to set config value")
}
cli.Success(key + " = " + value)
return nil
})
cli.WithArgs(cmd, cli.ExactArgs(2))
cli.WithExample(cmd, "core config set dev.editor vim")
parent.AddCommand(cmd)
}

22
internal/cmd/crypt/cmd.go Normal file
View file

@ -0,0 +1,22 @@
package crypt
import "github.com/host-uk/core/pkg/cli"
func init() {
cli.RegisterCommands(AddCryptCommands)
}
// AddCryptCommands registers the 'crypt' command group and all subcommands.
func AddCryptCommands(root *cli.Command) {
cryptCmd := &cli.Command{
Use: "crypt",
Short: "Cryptographic utilities",
Long: "Encrypt, decrypt, hash, and checksum files and data.",
}
root.AddCommand(cryptCmd)
addHashCommand(cryptCmd)
addEncryptCommand(cryptCmd)
addKeygenCommand(cryptCmd)
addChecksumCommand(cryptCmd)
}

View file

@ -0,0 +1,61 @@
package crypt
import (
"fmt"
"path/filepath"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/crypt"
)
// Checksum command flags
var (
checksumSHA512 bool
checksumVerify string
)
func addChecksumCommand(parent *cli.Command) {
checksumCmd := cli.NewCommand("checksum", "Compute file checksum", "", func(cmd *cli.Command, args []string) error {
return runChecksum(args[0])
})
checksumCmd.Args = cli.ExactArgs(1)
cli.BoolFlag(checksumCmd, &checksumSHA512, "sha512", "", false, "Use SHA-512 instead of SHA-256")
cli.StringFlag(checksumCmd, &checksumVerify, "verify", "", "", "Verify file against this hash")
parent.AddCommand(checksumCmd)
}
func runChecksum(path string) error {
var hash string
var err error
if checksumSHA512 {
hash, err = crypt.SHA512File(path)
} else {
hash, err = crypt.SHA256File(path)
}
if err != nil {
return cli.Wrap(err, "failed to compute checksum")
}
if checksumVerify != "" {
if hash == checksumVerify {
cli.Success(fmt.Sprintf("Checksum matches: %s", filepath.Base(path)))
return nil
}
cli.Error(fmt.Sprintf("Checksum mismatch: %s", filepath.Base(path)))
cli.Dim(fmt.Sprintf(" expected: %s", checksumVerify))
cli.Dim(fmt.Sprintf(" got: %s", hash))
return cli.Err("checksum verification failed")
}
algo := "SHA-256"
if checksumSHA512 {
algo = "SHA-512"
}
fmt.Printf("%s %s (%s)\n", hash, path, algo)
return nil
}

View file

@ -0,0 +1,115 @@
package crypt
import (
"fmt"
"os"
"strings"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/crypt"
)
// Encrypt command flags
var (
encryptPassphrase string
encryptAES bool
)
func addEncryptCommand(parent *cli.Command) {
encryptCmd := cli.NewCommand("encrypt", "Encrypt a file", "", func(cmd *cli.Command, args []string) error {
return runEncrypt(args[0])
})
encryptCmd.Args = cli.ExactArgs(1)
cli.StringFlag(encryptCmd, &encryptPassphrase, "passphrase", "p", "", "Passphrase (prompted if not given)")
cli.BoolFlag(encryptCmd, &encryptAES, "aes", "", false, "Use AES-256-GCM instead of ChaCha20-Poly1305")
parent.AddCommand(encryptCmd)
decryptCmd := cli.NewCommand("decrypt", "Decrypt an encrypted file", "", func(cmd *cli.Command, args []string) error {
return runDecrypt(args[0])
})
decryptCmd.Args = cli.ExactArgs(1)
cli.StringFlag(decryptCmd, &encryptPassphrase, "passphrase", "p", "", "Passphrase (prompted if not given)")
cli.BoolFlag(decryptCmd, &encryptAES, "aes", "", false, "Use AES-256-GCM instead of ChaCha20-Poly1305")
parent.AddCommand(decryptCmd)
}
func getPassphrase() (string, error) {
if encryptPassphrase != "" {
return encryptPassphrase, nil
}
return cli.Prompt("Passphrase", "")
}
func runEncrypt(path string) error {
passphrase, err := getPassphrase()
if err != nil {
return cli.Wrap(err, "failed to read passphrase")
}
if passphrase == "" {
return cli.Err("passphrase cannot be empty")
}
data, err := os.ReadFile(path)
if err != nil {
return cli.Wrap(err, "failed to read file")
}
var encrypted []byte
if encryptAES {
encrypted, err = crypt.EncryptAES(data, []byte(passphrase))
} else {
encrypted, err = crypt.Encrypt(data, []byte(passphrase))
}
if err != nil {
return cli.Wrap(err, "failed to encrypt")
}
outPath := path + ".enc"
if err := os.WriteFile(outPath, encrypted, 0o600); err != nil {
return cli.Wrap(err, "failed to write encrypted file")
}
cli.Success(fmt.Sprintf("Encrypted %s -> %s", path, outPath))
return nil
}
func runDecrypt(path string) error {
passphrase, err := getPassphrase()
if err != nil {
return cli.Wrap(err, "failed to read passphrase")
}
if passphrase == "" {
return cli.Err("passphrase cannot be empty")
}
data, err := os.ReadFile(path)
if err != nil {
return cli.Wrap(err, "failed to read file")
}
var decrypted []byte
if encryptAES {
decrypted, err = crypt.DecryptAES(data, []byte(passphrase))
} else {
decrypted, err = crypt.Decrypt(data, []byte(passphrase))
}
if err != nil {
return cli.Wrap(err, "failed to decrypt")
}
outPath := strings.TrimSuffix(path, ".enc")
if outPath == path {
outPath = path + ".dec"
}
if err := os.WriteFile(outPath, decrypted, 0o600); err != nil {
return cli.Wrap(err, "failed to write decrypted file")
}
cli.Success(fmt.Sprintf("Decrypted %s -> %s", path, outPath))
return nil
}

View file

@ -0,0 +1,74 @@
package crypt
import (
"fmt"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/crypt"
"golang.org/x/crypto/bcrypt"
)
// Hash command flags
var (
hashBcrypt bool
hashVerify string
)
func addHashCommand(parent *cli.Command) {
hashCmd := cli.NewCommand("hash", "Hash a password with Argon2id or bcrypt", "", func(cmd *cli.Command, args []string) error {
return runHash(args[0])
})
hashCmd.Args = cli.ExactArgs(1)
cli.BoolFlag(hashCmd, &hashBcrypt, "bcrypt", "b", false, "Use bcrypt instead of Argon2id")
cli.StringFlag(hashCmd, &hashVerify, "verify", "", "", "Verify input against this hash")
parent.AddCommand(hashCmd)
}
func runHash(input string) error {
// Verify mode
if hashVerify != "" {
return runHashVerify(input, hashVerify)
}
// Hash mode
if hashBcrypt {
hash, err := crypt.HashBcrypt(input, bcrypt.DefaultCost)
if err != nil {
return cli.Wrap(err, "failed to hash password")
}
fmt.Println(hash)
return nil
}
hash, err := crypt.HashPassword(input)
if err != nil {
return cli.Wrap(err, "failed to hash password")
}
fmt.Println(hash)
return nil
}
func runHashVerify(input, hash string) error {
var match bool
var err error
if hashBcrypt {
match, err = crypt.VerifyBcrypt(input, hash)
} else {
match, err = crypt.VerifyPassword(input, hash)
}
if err != nil {
return cli.Wrap(err, "failed to verify hash")
}
if match {
cli.Success("Password matches hash")
return nil
}
cli.Error("Password does not match hash")
return cli.Err("hash verification failed")
}

View file

@ -0,0 +1,55 @@
package crypt
import (
"crypto/rand"
"encoding/base64"
"encoding/hex"
"fmt"
"github.com/host-uk/core/pkg/cli"
)
// Keygen command flags
var (
keygenLength int
keygenHex bool
keygenBase64 bool
)
func addKeygenCommand(parent *cli.Command) {
keygenCmd := cli.NewCommand("keygen", "Generate a random cryptographic key", "", func(cmd *cli.Command, args []string) error {
return runKeygen()
})
cli.IntFlag(keygenCmd, &keygenLength, "length", "l", 32, "Key length in bytes")
cli.BoolFlag(keygenCmd, &keygenHex, "hex", "", false, "Output as hex string")
cli.BoolFlag(keygenCmd, &keygenBase64, "base64", "", false, "Output as base64 string")
parent.AddCommand(keygenCmd)
}
func runKeygen() error {
if keygenHex && keygenBase64 {
return cli.Err("--hex and --base64 are mutually exclusive")
}
if keygenLength <= 0 || keygenLength > 1024 {
return cli.Err("key length must be between 1 and 1024 bytes")
}
key := make([]byte, keygenLength)
if _, err := rand.Read(key); err != nil {
return cli.Wrap(err, "failed to generate random key")
}
switch {
case keygenHex:
fmt.Println(hex.EncodeToString(key))
case keygenBase64:
fmt.Println(base64.StdEncoding.EncodeToString(key))
default:
// Default to hex output
fmt.Println(hex.EncodeToString(key))
}
return nil
}

View file

@ -15,7 +15,7 @@ import (
"strings" "strings"
"github.com/host-uk/core/pkg/cli" "github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/errors" core "github.com/host-uk/core/pkg/framework/core"
"github.com/host-uk/core/pkg/git" "github.com/host-uk/core/pkg/git"
"github.com/host-uk/core/pkg/i18n" "github.com/host-uk/core/pkg/i18n"
"github.com/host-uk/core/pkg/io" "github.com/host-uk/core/pkg/io"
@ -66,19 +66,19 @@ func runApply() error {
// Validate inputs // Validate inputs
if applyCommand == "" && applyScript == "" { if applyCommand == "" && applyScript == "" {
return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.no_command"), nil) return core.E("dev.apply", i18n.T("cmd.dev.apply.error.no_command"), nil)
} }
if applyCommand != "" && applyScript != "" { if applyCommand != "" && applyScript != "" {
return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.both_command_script"), nil) return core.E("dev.apply", i18n.T("cmd.dev.apply.error.both_command_script"), nil)
} }
if applyCommit && applyMessage == "" { if applyCommit && applyMessage == "" {
return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.commit_needs_message"), nil) return core.E("dev.apply", i18n.T("cmd.dev.apply.error.commit_needs_message"), nil)
} }
// Validate script exists // Validate script exists
if applyScript != "" { if applyScript != "" {
if !io.Local.IsFile(applyScript) { if !io.Local.IsFile(applyScript) {
return errors.E("dev.apply", "script not found: "+applyScript, nil) // Error mismatch? IsFile returns bool return core.E("dev.apply", "script not found: "+applyScript, nil) // Error mismatch? IsFile returns bool
} }
} }
@ -89,7 +89,7 @@ func runApply() error {
} }
if len(targetRepos) == 0 { if len(targetRepos) == 0 {
return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.no_repos"), nil) return core.E("dev.apply", i18n.T("cmd.dev.apply.error.no_repos"), nil)
} }
// Show plan // Show plan
@ -227,12 +227,12 @@ func getApplyTargetRepos() ([]*repos.Repo, error) {
// Load registry // Load registry
registryPath, err := repos.FindRegistry() registryPath, err := repos.FindRegistry()
if err != nil { if err != nil {
return nil, errors.E("dev.apply", "failed to find registry", err) return nil, core.E("dev.apply", "failed to find registry", err)
} }
registry, err := repos.LoadRegistry(registryPath) registry, err := repos.LoadRegistry(registryPath)
if err != nil { if err != nil {
return nil, errors.E("dev.apply", "failed to load registry", err) return nil, core.E("dev.apply", "failed to load registry", err)
} }
// If --repos specified, filter to those // If --repos specified, filter to those

View file

@ -2,7 +2,6 @@ package dev
import ( import (
"bytes" "bytes"
"context"
"go/ast" "go/ast"
"go/parser" "go/parser"
"go/token" "go/token"
@ -17,25 +16,6 @@ import (
"golang.org/x/text/language" "golang.org/x/text/language"
) )
// syncInternalToPublic handles the synchronization of internal packages to public-facing directories.
// This function is a placeholder for future implementation.
func syncInternalToPublic(ctx context.Context, publicDir string) error {
// 1. Clean public/internal
// 2. Copy relevant files from internal/ to public/internal/
// Usually just shared logic, not private stuff.
// For now, let's assume we copy specific safe packages
// Logic to be refined.
// Example migration of os calls:
// internalDirs, err := os.ReadDir(pkgDir) -> coreio.Local.List(pkgDir)
// os.Stat -> coreio.Local.IsFile (returns bool) or List for existence check
// os.MkdirAll -> coreio.Local.EnsureDir
// os.WriteFile -> coreio.Local.Write
return nil
}
// addSyncCommand adds the 'sync' command to the given parent command. // addSyncCommand adds the 'sync' command to the given parent command.
func addSyncCommand(parent *cli.Command) { func addSyncCommand(parent *cli.Command) {
syncCmd := &cli.Command{ syncCmd := &cli.Command{

View file

@ -53,7 +53,7 @@ func runWork(registryPath string, statusOnly, autoCommit bool) error {
if err := bundle.Start(ctx); err != nil { if err := bundle.Start(ctx); err != nil {
return err return err
} }
defer bundle.Stop(ctx) defer func() { _ = bundle.Stop(ctx) }()
// Load registry and get paths // Load registry and get paths
paths, names, err := func() ([]string, map[string]string, error) { paths, names, err := func() ([]string, map[string]string, error) {

View file

@ -176,7 +176,7 @@ func (s *Service) runWork(task TaskWork) error {
cli.Blank() cli.Blank()
cli.Print("Push all? [y/N] ") cli.Print("Push all? [y/N] ")
var answer string var answer string
cli.Scanln(&answer) _, _ = cli.Scanln(&answer)
if strings.ToLower(answer) != "y" { if strings.ToLower(answer) != "y" {
cli.Println("Aborted") cli.Println("Aborted")
return nil return nil

View file

@ -117,7 +117,7 @@ func scanRepoDocs(repo *repos.Repo) RepoDocInfo {
docsDir := filepath.Join(repo.Path, "docs") docsDir := filepath.Join(repo.Path, "docs")
// Check if directory exists by listing it // Check if directory exists by listing it
if _, err := io.Local.List(docsDir); err == nil { if _, err := io.Local.List(docsDir); err == nil {
filepath.WalkDir(docsDir, func(path string, d fs.DirEntry, err error) error { _ = filepath.WalkDir(docsDir, func(path string, d fs.DirEntry, err error) error {
if err != nil { if err != nil {
return nil return nil
} }

View file

@ -67,11 +67,11 @@ func addGoFmtCommand(parent *cli.Command) {
if fmtCheck { if fmtCheck {
output, err := execCmd.CombinedOutput() output, err := execCmd.CombinedOutput()
if err != nil { if err != nil {
os.Stderr.Write(output) _, _ = os.Stderr.Write(output)
return err return err
} }
if len(output) > 0 { if len(output) > 0 {
os.Stdout.Write(output) _, _ = os.Stdout.Write(output)
return cli.Err("files need formatting (use --fix)") return cli.Err("files need formatting (use --fix)")
} }
return nil return nil

View file

@ -154,7 +154,7 @@ func parseOverallCoverage(output string) float64 {
var total float64 var total float64
for _, m := range matches { for _, m := range matches {
var cov float64 var cov float64
fmt.Sscanf(m[1], "%f", &cov) _, _ = fmt.Sscanf(m[1], "%f", &cov)
total += cov total += cov
} }
return total / float64(len(matches)) return total / float64(len(matches))
@ -192,8 +192,8 @@ func addGoCovCommand(parent *cli.Command) {
return cli.Wrap(err, i18n.T("i18n.fail.create", "coverage file")) return cli.Wrap(err, i18n.T("i18n.fail.create", "coverage file"))
} }
covPath := covFile.Name() covPath := covFile.Name()
covFile.Close() _ = covFile.Close()
defer os.Remove(covPath) defer func() { _ = os.Remove(covPath) }()
cli.Print("%s %s\n", dimStyle.Render(i18n.Label("coverage")), i18n.ProgressSubject("run", "tests")) cli.Print("%s %s\n", dimStyle.Render(i18n.Label("coverage")), i18n.ProgressSubject("run", "tests"))
// Truncate package list if too long for display // Truncate package list if too long for display
@ -236,7 +236,7 @@ func addGoCovCommand(parent *cli.Command) {
parts := strings.Fields(lastLine) parts := strings.Fields(lastLine)
if len(parts) >= 3 { if len(parts) >= 3 {
covStr := strings.TrimSuffix(parts[len(parts)-1], "%") covStr := strings.TrimSuffix(parts[len(parts)-1], "%")
fmt.Sscanf(covStr, "%f", &totalCov) _, _ = fmt.Sscanf(covStr, "%f", &totalCov)
} }
} }
} }
@ -266,7 +266,7 @@ func addGoCovCommand(parent *cli.Command) {
cli.Print(" %s\n", dimStyle.Render("Open coverage.html in your browser")) cli.Print(" %s\n", dimStyle.Render("Open coverage.html in your browser"))
} }
if openCmd != nil { if openCmd != nil {
openCmd.Run() _ = openCmd.Run()
} }
} }
} }

View file

@ -223,7 +223,7 @@ func addGoWorkCommand(parent *cli.Command) {
func findGoModules(root string) []string { func findGoModules(root string) []string {
var modules []string var modules []string
filepath.Walk(root, func(path string, info os.FileInfo, err error) error { _ = filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err != nil { if err != nil {
return nil return nil
} }

View file

@ -218,7 +218,7 @@ func runPHPLogs(service string, follow bool) error {
if err != nil { if err != nil {
return cli.Err("%s: %w", i18n.T("i18n.fail.get", "logs"), err) return cli.Err("%s: %w", i18n.T("i18n.fail.get", "logs"), err)
} }
defer logsReader.Close() defer func() { _ = logsReader.Close() }()
// Handle interrupt // Handle interrupt
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())

View file

@ -146,7 +146,7 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec {
pestBin := filepath.Join(r.dir, "vendor", "bin", "pest") pestBin := filepath.Join(r.dir, "vendor", "bin", "pest")
phpunitBin := filepath.Join(r.dir, "vendor", "bin", "phpunit") phpunitBin := filepath.Join(r.dir, "vendor", "bin", "phpunit")
cmd := "pest" var cmd string
if _, err := os.Stat(pestBin); err == nil { if _, err := os.Stat(pestBin); err == nil {
cmd = pestBin cmd = pestBin
} else if _, err := os.Stat(phpunitBin); err == nil { } else if _, err := os.Stat(phpunitBin); err == nil {

View file

@ -230,7 +230,7 @@ func TestServeProduction_Bad(t *testing.T) {
func TestShell_Bad(t *testing.T) { func TestShell_Bad(t *testing.T) {
t.Run("fails without container ID", func(t *testing.T) { t.Run("fails without container ID", func(t *testing.T) {
err := Shell(nil, "") err := Shell(context.TODO(), "")
assert.Error(t, err) assert.Error(t, err)
assert.Contains(t, err.Error(), "container ID is required") assert.Contains(t, err.Error(), "container ID is required")
}) })

View file

@ -225,7 +225,7 @@ func TestCoolifyClient_TriggerDeploy_Good(t *testing.T) {
Status: "queued", Status: "queued",
CreatedAt: time.Now(), CreatedAt: time.Now(),
} }
json.NewEncoder(w).Encode(resp) _ = json.NewEncoder(w).Encode(resp)
})) }))
defer server.Close() defer server.Close()
@ -240,11 +240,11 @@ func TestCoolifyClient_TriggerDeploy_Good(t *testing.T) {
t.Run("triggers deployment with force", func(t *testing.T) { t.Run("triggers deployment with force", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
var body map[string]interface{} var body map[string]interface{}
json.NewDecoder(r.Body).Decode(&body) _ = json.NewDecoder(r.Body).Decode(&body)
assert.Equal(t, true, body["force"]) assert.Equal(t, true, body["force"])
resp := CoolifyDeployment{ID: "dep-456", Status: "queued"} resp := CoolifyDeployment{ID: "dep-456", Status: "queued"}
json.NewEncoder(w).Encode(resp) _ = json.NewEncoder(w).Encode(resp)
})) }))
defer server.Close() defer server.Close()
@ -256,7 +256,7 @@ func TestCoolifyClient_TriggerDeploy_Good(t *testing.T) {
t.Run("handles minimal response", func(t *testing.T) { t.Run("handles minimal response", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// Return an invalid JSON response to trigger the fallback // Return an invalid JSON response to trigger the fallback
w.Write([]byte("not json")) _, _ = w.Write([]byte("not json"))
})) }))
defer server.Close() defer server.Close()
@ -273,7 +273,7 @@ func TestCoolifyClient_TriggerDeploy_Bad(t *testing.T) {
t.Run("fails on HTTP error", func(t *testing.T) { t.Run("fails on HTTP error", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusInternalServerError) w.WriteHeader(http.StatusInternalServerError)
json.NewEncoder(w).Encode(map[string]string{"message": "Internal error"}) _ = json.NewEncoder(w).Encode(map[string]string{"message": "Internal error"})
})) }))
defer server.Close() defer server.Close()
@ -297,7 +297,7 @@ func TestCoolifyClient_GetDeployment_Good(t *testing.T) {
CommitSHA: "abc123", CommitSHA: "abc123",
Branch: "main", Branch: "main",
} }
json.NewEncoder(w).Encode(resp) _ = json.NewEncoder(w).Encode(resp)
})) }))
defer server.Close() defer server.Close()
@ -315,7 +315,7 @@ func TestCoolifyClient_GetDeployment_Bad(t *testing.T) {
t.Run("fails on 404", func(t *testing.T) { t.Run("fails on 404", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNotFound) w.WriteHeader(http.StatusNotFound)
json.NewEncoder(w).Encode(map[string]string{"error": "Not found"}) _ = json.NewEncoder(w).Encode(map[string]string{"error": "Not found"})
})) }))
defer server.Close() defer server.Close()
@ -337,7 +337,7 @@ func TestCoolifyClient_ListDeployments_Good(t *testing.T) {
{ID: "dep-1", Status: "finished"}, {ID: "dep-1", Status: "finished"},
{ID: "dep-2", Status: "failed"}, {ID: "dep-2", Status: "failed"},
} }
json.NewEncoder(w).Encode(resp) _ = json.NewEncoder(w).Encode(resp)
})) }))
defer server.Close() defer server.Close()
@ -353,7 +353,7 @@ func TestCoolifyClient_ListDeployments_Good(t *testing.T) {
t.Run("lists without limit", func(t *testing.T) { t.Run("lists without limit", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "", r.URL.Query().Get("limit")) assert.Equal(t, "", r.URL.Query().Get("limit"))
json.NewEncoder(w).Encode([]CoolifyDeployment{}) _ = json.NewEncoder(w).Encode([]CoolifyDeployment{})
})) }))
defer server.Close() defer server.Close()
@ -370,14 +370,14 @@ func TestCoolifyClient_Rollback_Good(t *testing.T) {
assert.Equal(t, "POST", r.Method) assert.Equal(t, "POST", r.Method)
var body map[string]string var body map[string]string
json.NewDecoder(r.Body).Decode(&body) _ = json.NewDecoder(r.Body).Decode(&body)
assert.Equal(t, "dep-old", body["deployment_id"]) assert.Equal(t, "dep-old", body["deployment_id"])
resp := CoolifyDeployment{ resp := CoolifyDeployment{
ID: "dep-new", ID: "dep-new",
Status: "rolling_back", Status: "rolling_back",
} }
json.NewEncoder(w).Encode(resp) _ = json.NewEncoder(w).Encode(resp)
})) }))
defer server.Close() defer server.Close()
@ -402,7 +402,7 @@ func TestCoolifyClient_GetApp_Good(t *testing.T) {
FQDN: "https://myapp.example.com", FQDN: "https://myapp.example.com",
Status: "running", Status: "running",
} }
json.NewEncoder(w).Encode(resp) _ = json.NewEncoder(w).Encode(resp)
})) }))
defer server.Close() defer server.Close()
@ -433,7 +433,7 @@ func TestCoolifyClient_ParseError(t *testing.T) {
t.Run("parses message field", func(t *testing.T) { t.Run("parses message field", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusBadRequest) w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(map[string]string{"message": "Bad request message"}) _ = json.NewEncoder(w).Encode(map[string]string{"message": "Bad request message"})
})) }))
defer server.Close() defer server.Close()
@ -447,7 +447,7 @@ func TestCoolifyClient_ParseError(t *testing.T) {
t.Run("parses error field", func(t *testing.T) { t.Run("parses error field", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusBadRequest) w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(map[string]string{"error": "Error message"}) _ = json.NewEncoder(w).Encode(map[string]string{"error": "Error message"})
})) }))
defer server.Close() defer server.Close()
@ -461,7 +461,7 @@ func TestCoolifyClient_ParseError(t *testing.T) {
t.Run("returns raw body when no JSON fields", func(t *testing.T) { t.Run("returns raw body when no JSON fields", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusInternalServerError) w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("Raw error message")) _, _ = w.Write([]byte("Raw error message"))
})) }))
defer server.Close() defer server.Close()
@ -486,12 +486,12 @@ COOLIFY_TOKEN=file-token`
origURL := os.Getenv("COOLIFY_URL") origURL := os.Getenv("COOLIFY_URL")
origToken := os.Getenv("COOLIFY_TOKEN") origToken := os.Getenv("COOLIFY_TOKEN")
defer func() { defer func() {
os.Setenv("COOLIFY_URL", origURL) _ = os.Setenv("COOLIFY_URL", origURL)
os.Setenv("COOLIFY_TOKEN", origToken) _ = os.Setenv("COOLIFY_TOKEN", origToken)
}() }()
os.Setenv("COOLIFY_URL", "https://from-env.com") _ = os.Setenv("COOLIFY_URL", "https://from-env.com")
os.Setenv("COOLIFY_TOKEN", "env-token") _ = os.Setenv("COOLIFY_TOKEN", "env-token")
config, err := LoadCoolifyConfig(dir) config, err := LoadCoolifyConfig(dir)
assert.NoError(t, err) assert.NoError(t, err)

View file

@ -172,7 +172,7 @@ func needsRedis(dir string) bool {
if err != nil { if err != nil {
return false return false
} }
defer file.Close() defer func() { _ = file.Close() }()
scanner := bufio.NewScanner(file) scanner := bufio.NewScanner(file)
for scanner.Scan() { for scanner.Scan() {
@ -235,7 +235,7 @@ func GetLaravelAppName(dir string) string {
if err != nil { if err != nil {
return "" return ""
} }
defer file.Close() defer func() { _ = file.Close() }()
scanner := bufio.NewScanner(file) scanner := bufio.NewScanner(file)
for scanner.Scan() { for scanner.Scan() {
@ -258,7 +258,7 @@ func GetLaravelAppURL(dir string) string {
if err != nil { if err != nil {
return "" return ""
} }
defer file.Close() defer func() { _ = file.Close() }()
scanner := bufio.NewScanner(file) scanner := bufio.NewScanner(file)
for scanner.Scan() { for scanner.Scan() {

View file

@ -197,7 +197,7 @@ return [
octanePath := filepath.Join(configDir, "octane.php") octanePath := filepath.Join(configDir, "octane.php")
err = os.WriteFile(octanePath, []byte("<?php return [];"), 0000) err = os.WriteFile(octanePath, []byte("<?php return [];"), 0000)
require.NoError(t, err) require.NoError(t, err)
defer os.Chmod(octanePath, 0644) // Clean up defer func() { _ = os.Chmod(octanePath, 0644) }() // Clean up
// Should return true (assume frankenphp if unreadable) // Should return true (assume frankenphp if unreadable)
assert.True(t, IsFrankenPHPProject(dir)) assert.True(t, IsFrankenPHPProject(dir))

View file

@ -506,7 +506,7 @@ func TestUpdatePackages_Good(t *testing.T) {
err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(`{"name":"test/project"}`), 0644) err := os.WriteFile(filepath.Join(projectDir, "composer.json"), []byte(`{"name":"test/project"}`), 0644)
require.NoError(t, err) require.NoError(t, err)
err = UpdatePackages(projectDir, []string{"vendor/package"}) _ = UpdatePackages(projectDir, []string{"vendor/package"})
// This will fail because composer update needs real dependencies // This will fail because composer update needs real dependencies
// but it validates the command runs // but it validates the command runs
}) })

View file

@ -195,7 +195,7 @@ func (d *DevServer) Start(ctx context.Context, opts Options) error {
if len(startErrors) > 0 { if len(startErrors) > 0 {
// Stop any services that did start // Stop any services that did start
for _, svc := range d.services { for _, svc := range d.services {
svc.Stop() _ = svc.Stop()
} }
return cli.Err("failed to start services: %v", startErrors) return cli.Err("failed to start services: %v", startErrors)
} }
@ -296,7 +296,7 @@ func (d *DevServer) unifiedLogs(follow bool) (io.ReadCloser, error) {
if err != nil { if err != nil {
// Close any readers we already opened // Close any readers we already opened
for _, r := range readers { for _, r := range readers {
r.Close() _ = r.Close()
} }
return nil, cli.Err("failed to get logs for %s: %v", svc.Name(), err) return nil, cli.Err("failed to get logs for %s: %v", svc.Name(), err)
} }

View file

@ -165,13 +165,13 @@ func TestMultiServiceReader_Good(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
file1, err := os.CreateTemp(dir, "log1-*.log") file1, err := os.CreateTemp(dir, "log1-*.log")
require.NoError(t, err) require.NoError(t, err)
file1.WriteString("test1") _, _ = file1.WriteString("test1")
file1.Seek(0, 0) _, _ = file1.Seek(0, 0)
file2, err := os.CreateTemp(dir, "log2-*.log") file2, err := os.CreateTemp(dir, "log2-*.log")
require.NoError(t, err) require.NoError(t, err)
file2.WriteString("test2") _, _ = file2.WriteString("test2")
file2.Seek(0, 0) _, _ = file2.Seek(0, 0)
// Create mock services // Create mock services
services := []Service{ services := []Service{
@ -202,8 +202,8 @@ func TestMultiServiceReader_Read_Good(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
file1, err := os.CreateTemp(dir, "log-*.log") file1, err := os.CreateTemp(dir, "log-*.log")
require.NoError(t, err) require.NoError(t, err)
file1.WriteString("log content") _, _ = file1.WriteString("log content")
file1.Seek(0, 0) _, _ = file1.Seek(0, 0)
services := []Service{ services := []Service{
&FrankenPHPService{baseService: baseService{name: "TestService"}}, &FrankenPHPService{baseService: baseService{name: "TestService"}},
@ -224,7 +224,7 @@ func TestMultiServiceReader_Read_Good(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
file1, err := os.CreateTemp(dir, "log-*.log") file1, err := os.CreateTemp(dir, "log-*.log")
require.NoError(t, err) require.NoError(t, err)
file1.Close() // Empty file _ = file1.Close() // Empty file
file1, err = os.Open(file1.Name()) file1, err = os.Open(file1.Name())
require.NoError(t, err) require.NoError(t, err)
@ -355,7 +355,7 @@ func TestDevServer_Logs_Good(t *testing.T) {
reader, err := server.Logs("TestService", false) reader, err := server.Logs("TestService", false)
assert.NoError(t, err) assert.NoError(t, err)
assert.NotNil(t, reader) assert.NotNil(t, reader)
reader.Close() _ = reader.Close()
}) })
} }
@ -462,7 +462,7 @@ func TestMultiServiceReader_CloseError(t *testing.T) {
file1, err := os.CreateTemp(dir, "log-*.log") file1, err := os.CreateTemp(dir, "log-*.log")
require.NoError(t, err) require.NoError(t, err)
file1Name := file1.Name() file1Name := file1.Name()
file1.Close() _ = file1.Close()
// Reopen for reading // Reopen for reading
file1, err = os.Open(file1Name) file1, err = os.Open(file1Name)
@ -489,7 +489,7 @@ func TestMultiServiceReader_FollowMode(t *testing.T) {
file1, err := os.CreateTemp(dir, "log-*.log") file1, err := os.CreateTemp(dir, "log-*.log")
require.NoError(t, err) require.NoError(t, err)
file1Name := file1.Name() file1Name := file1.Name()
file1.Close() _ = file1.Close()
// Reopen for reading (empty file) // Reopen for reading (empty file)
file1, err = os.Open(file1Name) file1, err = os.Open(file1Name)
@ -520,7 +520,7 @@ func TestMultiServiceReader_FollowMode(t *testing.T) {
// Also acceptable - follow mode is waiting // Also acceptable - follow mode is waiting
} }
reader.Close() _ = reader.Close()
}) })
} }

View file

@ -238,7 +238,7 @@ func TestFormat_Bad(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
opts := FormatOptions{Dir: dir} opts := FormatOptions{Dir: dir}
err := Format(nil, opts) err := Format(context.TODO(), opts)
assert.Error(t, err) assert.Error(t, err)
assert.Contains(t, err.Error(), "no formatter found") assert.Contains(t, err.Error(), "no formatter found")
}) })
@ -247,7 +247,7 @@ func TestFormat_Bad(t *testing.T) {
// When no formatter found in cwd, should still fail with "no formatter found" // When no formatter found in cwd, should still fail with "no formatter found"
opts := FormatOptions{Dir: ""} opts := FormatOptions{Dir: ""}
err := Format(nil, opts) err := Format(context.TODO(), opts)
// May or may not find a formatter depending on cwd, but function should not panic // May or may not find a formatter depending on cwd, but function should not panic
if err != nil { if err != nil {
// Expected - no formatter in cwd // Expected - no formatter in cwd
@ -274,7 +274,7 @@ func TestAnalyse_Bad(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
opts := AnalyseOptions{Dir: dir} opts := AnalyseOptions{Dir: dir}
err := Analyse(nil, opts) err := Analyse(context.TODO(), opts)
assert.Error(t, err) assert.Error(t, err)
assert.Contains(t, err.Error(), "no static analyser found") assert.Contains(t, err.Error(), "no static analyser found")
}) })
@ -282,7 +282,7 @@ func TestAnalyse_Bad(t *testing.T) {
t.Run("uses cwd when dir not specified", func(t *testing.T) { t.Run("uses cwd when dir not specified", func(t *testing.T) {
opts := AnalyseOptions{Dir: ""} opts := AnalyseOptions{Dir: ""}
err := Analyse(nil, opts) err := Analyse(context.TODO(), opts)
// May or may not find an analyser depending on cwd // May or may not find an analyser depending on cwd
if err != nil { if err != nil {
assert.Contains(t, err.Error(), "no static analyser") assert.Contains(t, err.Error(), "no static analyser")

View file

@ -123,7 +123,7 @@ func (s *baseService) startProcess(ctx context.Context, cmdName string, args []s
setSysProcAttr(s.cmd) setSysProcAttr(s.cmd)
if err := s.cmd.Start(); err != nil { if err := s.cmd.Start(); err != nil {
logFile.Close() _ = logFile.Close()
s.lastError = err s.lastError = err
return cli.WrapVerb(err, "start", s.name) return cli.WrapVerb(err, "start", s.name)
} }
@ -140,7 +140,7 @@ func (s *baseService) startProcess(ctx context.Context, cmdName string, args []s
s.lastError = err s.lastError = err
} }
if s.logFile != nil { if s.logFile != nil {
s.logFile.Close() _ = s.logFile.Close()
} }
s.mu.Unlock() s.mu.Unlock()
}() }()
@ -157,12 +157,12 @@ func (s *baseService) stopProcess() error {
} }
// Send termination signal to process (group on Unix) // Send termination signal to process (group on Unix)
signalProcessGroup(s.cmd, termSignal()) _ = signalProcessGroup(s.cmd, termSignal())
// Wait for graceful shutdown with timeout // Wait for graceful shutdown with timeout
done := make(chan struct{}) done := make(chan struct{})
go func() { go func() {
s.cmd.Wait() _ = s.cmd.Wait()
close(done) close(done)
}() }()
@ -171,7 +171,7 @@ func (s *baseService) stopProcess() error {
// Process exited gracefully // Process exited gracefully
case <-time.After(5 * time.Second): case <-time.After(5 * time.Second):
// Force kill // Force kill
signalProcessGroup(s.cmd, killSignal()) _ = signalProcessGroup(s.cmd, killSignal())
} }
s.running = false s.running = false
@ -333,7 +333,7 @@ func (s *HorizonService) Stop() error {
// Horizon has its own terminate command // Horizon has its own terminate command
cmd := exec.Command("php", "artisan", "horizon:terminate") cmd := exec.Command("php", "artisan", "horizon:terminate")
cmd.Dir = s.dir cmd.Dir = s.dir
cmd.Run() // Ignore errors, will also kill via signal _ = cmd.Run() // Ignore errors, will also kill via signal
return s.stopProcess() return s.stopProcess()
} }
@ -427,7 +427,7 @@ func (s *RedisService) Start(ctx context.Context) error {
func (s *RedisService) Stop() error { func (s *RedisService) Stop() error {
// Try graceful shutdown via redis-cli // Try graceful shutdown via redis-cli
cmd := exec.Command("redis-cli", "-p", cli.Sprintf("%d", s.port), "shutdown", "nosave") cmd := exec.Command("redis-cli", "-p", cli.Sprintf("%d", s.port), "shutdown", "nosave")
cmd.Run() // Ignore errors _ = cmd.Run() // Ignore errors
return s.stopProcess() return s.stopProcess()
} }

View file

@ -66,7 +66,7 @@ func TestBaseService_Logs_Good(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
assert.NotNil(t, reader) assert.NotNil(t, reader)
reader.Close() _ = reader.Close()
}) })
t.Run("returns tail reader in follow mode", func(t *testing.T) { t.Run("returns tail reader in follow mode", func(t *testing.T) {
@ -83,7 +83,7 @@ func TestBaseService_Logs_Good(t *testing.T) {
// Verify it's a tailReader by checking it implements ReadCloser // Verify it's a tailReader by checking it implements ReadCloser
_, ok := reader.(*tailReader) _, ok := reader.(*tailReader)
assert.True(t, ok) assert.True(t, ok)
reader.Close() _ = reader.Close()
}) })
} }
@ -113,7 +113,7 @@ func TestTailReader_Good(t *testing.T) {
file, err := os.Open(logPath) file, err := os.Open(logPath)
require.NoError(t, err) require.NoError(t, err)
defer file.Close() defer func() { _ = file.Close() }()
reader := newTailReader(file) reader := newTailReader(file)
assert.NotNil(t, reader) assert.NotNil(t, reader)
@ -147,7 +147,7 @@ func TestTailReader_Good(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
reader := newTailReader(file) reader := newTailReader(file)
reader.Close() _ = reader.Close()
buf := make([]byte, 100) buf := make([]byte, 100)
n, _ := reader.Read(buf) n, _ := reader.Read(buf)

View file

@ -224,7 +224,7 @@ func runPkgOutdated() error {
} }
// Fetch updates // Fetch updates
exec.Command("git", "-C", repoPath, "fetch", "--quiet").Run() _ = exec.Command("git", "-C", repoPath, "fetch", "--quiet").Run()
// Check if behind // Check if behind
cmd := exec.Command("git", "-C", repoPath, "rev-list", "--count", "HEAD..@{u}") cmd := exec.Command("git", "-C", repoPath, "rev-list", "--count", "HEAD..@{u}")

View file

@ -0,0 +1,33 @@
// Package plugin provides CLI commands for managing core plugins.
//
// Commands:
// - install: Install a plugin from GitHub
// - list: List installed plugins
// - info: Show detailed plugin information
// - update: Update a plugin or all plugins
// - remove: Remove an installed plugin
package plugin
import (
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/i18n"
)
func init() {
cli.RegisterCommands(AddPluginCommands)
}
// AddPluginCommands registers the 'plugin' command and all subcommands.
func AddPluginCommands(root *cli.Command) {
pluginCmd := &cli.Command{
Use: "plugin",
Short: i18n.T("Manage plugins"),
}
root.AddCommand(pluginCmd)
addInstallCommand(pluginCmd)
addListCommand(pluginCmd)
addInfoCommand(pluginCmd)
addUpdateCommand(pluginCmd)
addRemoveCommand(pluginCmd)
}

View file

@ -0,0 +1,86 @@
package plugin
import (
"fmt"
"path/filepath"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/i18n"
"github.com/host-uk/core/pkg/io"
"github.com/host-uk/core/pkg/plugin"
)
func addInfoCommand(parent *cli.Command) {
infoCmd := cli.NewCommand(
"info <name>",
i18n.T("Show detailed plugin information"),
"",
func(cmd *cli.Command, args []string) error {
return runInfo(args[0])
},
)
infoCmd.Args = cli.ExactArgs(1)
parent.AddCommand(infoCmd)
}
func runInfo(name string) error {
basePath, err := pluginBasePath()
if err != nil {
return err
}
registry := plugin.NewRegistry(io.Local, basePath)
if err := registry.Load(); err != nil {
return err
}
cfg, ok := registry.Get(name)
if !ok {
return fmt.Errorf("plugin not found: %s", name)
}
// Try to load the manifest for extended information
loader := plugin.NewLoader(io.Local, basePath)
manifest, manifestErr := loader.LoadPlugin(name)
fmt.Println()
cli.Label("Name", cfg.Name)
cli.Label("Version", cfg.Version)
cli.Label("Source", cfg.Source)
status := "disabled"
if cfg.Enabled {
status = "enabled"
}
cli.Label("Status", status)
cli.Label("Installed", cfg.InstalledAt)
cli.Label("Path", filepath.Join(basePath, name))
if manifestErr == nil && manifest != nil {
if manifest.Description != "" {
cli.Label("Description", manifest.Description)
}
if manifest.Author != "" {
cli.Label("Author", manifest.Author)
}
if manifest.Entrypoint != "" {
cli.Label("Entrypoint", manifest.Entrypoint)
}
if manifest.MinVersion != "" {
cli.Label("Min Version", manifest.MinVersion)
}
if len(manifest.Dependencies) > 0 {
for i, dep := range manifest.Dependencies {
if i == 0 {
cli.Label("Dependencies", dep)
} else {
fmt.Printf(" %s\n", dep)
}
}
}
}
fmt.Println()
return nil
}

View file

@ -0,0 +1,61 @@
package plugin
import (
"context"
"os"
"path/filepath"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/i18n"
"github.com/host-uk/core/pkg/io"
"github.com/host-uk/core/pkg/plugin"
)
func addInstallCommand(parent *cli.Command) {
installCmd := cli.NewCommand(
"install <source>",
i18n.T("Install a plugin from GitHub"),
i18n.T("Install a plugin from a GitHub repository.\n\nSource format: org/repo or org/repo@version"),
func(cmd *cli.Command, args []string) error {
return runInstall(args[0])
},
)
installCmd.Args = cli.ExactArgs(1)
installCmd.Example = " core plugin install host-uk/core-plugin-example\n core plugin install host-uk/core-plugin-example@v1.0.0"
parent.AddCommand(installCmd)
}
func runInstall(source string) error {
basePath, err := pluginBasePath()
if err != nil {
return err
}
registry := plugin.NewRegistry(io.Local, basePath)
if err := registry.Load(); err != nil {
return err
}
installer := plugin.NewInstaller(io.Local, registry)
cli.Dim("Installing plugin from " + source + "...")
if err := installer.Install(context.Background(), source); err != nil {
return err
}
_, repo, _, _ := plugin.ParseSource(source)
cli.Success("Plugin " + repo + " installed successfully")
return nil
}
// pluginBasePath returns the default plugin directory (~/.core/plugins/).
func pluginBasePath() (string, error) {
home, err := os.UserHomeDir()
if err != nil {
return "", cli.Wrap(err, "failed to determine home directory")
}
return filepath.Join(home, ".core", "plugins"), nil
}

View file

@ -0,0 +1,57 @@
package plugin
import (
"fmt"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/i18n"
"github.com/host-uk/core/pkg/io"
"github.com/host-uk/core/pkg/plugin"
)
func addListCommand(parent *cli.Command) {
listCmd := cli.NewCommand(
"list",
i18n.T("List installed plugins"),
"",
func(cmd *cli.Command, args []string) error {
return runList()
},
)
parent.AddCommand(listCmd)
}
func runList() error {
basePath, err := pluginBasePath()
if err != nil {
return err
}
registry := plugin.NewRegistry(io.Local, basePath)
if err := registry.Load(); err != nil {
return err
}
plugins := registry.List()
if len(plugins) == 0 {
cli.Dim("No plugins installed")
return nil
}
table := cli.NewTable("Name", "Version", "Source", "Status")
for _, p := range plugins {
status := "disabled"
if p.Enabled {
status = "enabled"
}
table.AddRow(p.Name, p.Version, p.Source, status)
}
fmt.Println()
table.Render()
fmt.Println()
cli.Dim(fmt.Sprintf("%d plugin(s) installed", len(plugins)))
return nil
}

View file

@ -0,0 +1,48 @@
package plugin
import (
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/i18n"
"github.com/host-uk/core/pkg/io"
"github.com/host-uk/core/pkg/plugin"
)
func addRemoveCommand(parent *cli.Command) {
removeCmd := cli.NewCommand(
"remove <name>",
i18n.T("Remove an installed plugin"),
"",
func(cmd *cli.Command, args []string) error {
return runRemove(args[0])
},
)
removeCmd.Args = cli.ExactArgs(1)
parent.AddCommand(removeCmd)
}
func runRemove(name string) error {
basePath, err := pluginBasePath()
if err != nil {
return err
}
registry := plugin.NewRegistry(io.Local, basePath)
if err := registry.Load(); err != nil {
return err
}
if !cli.Confirm("Remove plugin " + name + "?") {
cli.Dim("Cancelled")
return nil
}
installer := plugin.NewInstaller(io.Local, registry)
if err := installer.Remove(name); err != nil {
return err
}
cli.Success("Plugin " + name + " removed")
return nil
}

View file

@ -0,0 +1,94 @@
package plugin
import (
"context"
"fmt"
"github.com/host-uk/core/pkg/cli"
"github.com/host-uk/core/pkg/i18n"
"github.com/host-uk/core/pkg/io"
"github.com/host-uk/core/pkg/plugin"
)
var updateAll bool
func addUpdateCommand(parent *cli.Command) {
updateCmd := cli.NewCommand(
"update [name]",
i18n.T("Update a plugin or all plugins"),
i18n.T("Update a specific plugin to the latest version, or use --all to update all installed plugins."),
func(cmd *cli.Command, args []string) error {
if updateAll {
return runUpdateAll()
}
if len(args) == 0 {
return fmt.Errorf("plugin name required (or use --all)")
}
return runUpdate(args[0])
},
)
cli.BoolFlag(updateCmd, &updateAll, "all", "a", false, i18n.T("Update all installed plugins"))
parent.AddCommand(updateCmd)
}
func runUpdate(name string) error {
basePath, err := pluginBasePath()
if err != nil {
return err
}
registry := plugin.NewRegistry(io.Local, basePath)
if err := registry.Load(); err != nil {
return err
}
installer := plugin.NewInstaller(io.Local, registry)
cli.Dim("Updating " + name + "...")
if err := installer.Update(context.Background(), name); err != nil {
return err
}
cli.Success("Plugin " + name + " updated successfully")
return nil
}
func runUpdateAll() error {
basePath, err := pluginBasePath()
if err != nil {
return err
}
registry := plugin.NewRegistry(io.Local, basePath)
if err := registry.Load(); err != nil {
return err
}
plugins := registry.List()
if len(plugins) == 0 {
cli.Dim("No plugins installed")
return nil
}
installer := plugin.NewInstaller(io.Local, registry)
ctx := context.Background()
var updated, failed int
for _, p := range plugins {
cli.Dim("Updating " + p.Name + "...")
if err := installer.Update(ctx, p.Name); err != nil {
cli.Errorf("Failed to update %s: %v", p.Name, err)
failed++
continue
}
cli.Success(p.Name + " updated")
updated++
}
fmt.Println()
cli.Dim(fmt.Sprintf("%d updated, %d failed", updated, failed))
return nil
}

View file

@ -35,7 +35,7 @@ func runCollections(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to connect to Qdrant: %w", err) return fmt.Errorf("failed to connect to Qdrant: %w", err)
} }
defer qdrantClient.Close() defer func() { _ = qdrantClient.Close() }()
// Handle delete // Handle delete
if deleteCollection != "" { if deleteCollection != "" {

View file

@ -43,10 +43,10 @@ func runIngest(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to connect to Qdrant: %w", err) return fmt.Errorf("failed to connect to Qdrant: %w", err)
} }
defer qdrantClient.Close() defer func() { _ = qdrantClient.Close() }()
if err := qdrantClient.HealthCheck(ctx); err != nil { if err := qdrantClient.HealthCheck(ctx); err != nil {
return fmt.Errorf("Qdrant health check failed: %w", err) return fmt.Errorf("qdrant health check failed: %w", err)
} }
// Connect to Ollama // Connect to Ollama
@ -122,10 +122,10 @@ func IngestDirectory(ctx context.Context, directory, collectionName string, recr
if err != nil { if err != nil {
return err return err
} }
defer qdrantClient.Close() defer func() { _ = qdrantClient.Close() }()
if err := qdrantClient.HealthCheck(ctx); err != nil { if err := qdrantClient.HealthCheck(ctx); err != nil {
return fmt.Errorf("Qdrant health check failed: %w", err) return fmt.Errorf("qdrant health check failed: %w", err)
} }
ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig()) ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig())
@ -152,10 +152,10 @@ func IngestFile(ctx context.Context, filePath, collectionName string) (int, erro
if err != nil { if err != nil {
return 0, err return 0, err
} }
defer qdrantClient.Close() defer func() { _ = qdrantClient.Close() }()
if err := qdrantClient.HealthCheck(ctx); err != nil { if err := qdrantClient.HealthCheck(ctx); err != nil {
return 0, fmt.Errorf("Qdrant health check failed: %w", err) return 0, fmt.Errorf("qdrant health check failed: %w", err)
} }
ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig()) ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig())

View file

@ -38,7 +38,7 @@ func runQuery(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to connect to Qdrant: %w", err) return fmt.Errorf("failed to connect to Qdrant: %w", err)
} }
defer qdrantClient.Close() defer func() { _ = qdrantClient.Close() }()
// Connect to Ollama // Connect to Ollama
ollamaClient, err := rag.NewOllamaClient(rag.OllamaConfig{ ollamaClient, err := rag.NewOllamaClient(rag.OllamaConfig{
@ -86,7 +86,7 @@ func QueryDocs(ctx context.Context, question, collectionName string, topK int) (
if err != nil { if err != nil {
return nil, err return nil, err
} }
defer qdrantClient.Close() defer func() { _ = qdrantClient.Close() }()
ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig()) ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig())
if err != nil { if err != nil {

View file

@ -41,8 +41,8 @@ paths:
` `
basePath := filepath.Join(tmpDir, "base.yaml") basePath := filepath.Join(tmpDir, "base.yaml")
revPath := filepath.Join(tmpDir, "rev.yaml") revPath := filepath.Join(tmpDir, "rev.yaml")
os.WriteFile(basePath, []byte(baseSpec), 0644) _ = os.WriteFile(basePath, []byte(baseSpec), 0644)
os.WriteFile(revPath, []byte(revSpec), 0644) _ = os.WriteFile(revPath, []byte(revSpec), 0644)
result, err := Diff(basePath, revPath) result, err := Diff(basePath, revPath)
if err != nil { if err != nil {
@ -88,8 +88,8 @@ paths:
` `
basePath := filepath.Join(tmpDir, "base.yaml") basePath := filepath.Join(tmpDir, "base.yaml")
revPath := filepath.Join(tmpDir, "rev.yaml") revPath := filepath.Join(tmpDir, "rev.yaml")
os.WriteFile(basePath, []byte(baseSpec), 0644) _ = os.WriteFile(basePath, []byte(baseSpec), 0644)
os.WriteFile(revPath, []byte(revSpec), 0644) _ = os.WriteFile(revPath, []byte(revSpec), 0644)
result, err := Diff(basePath, revPath) result, err := Diff(basePath, revPath)
if err != nil { if err != nil {

View file

@ -196,7 +196,7 @@ func isValidHexColor(color string) bool {
return false return false
} }
for _, c := range strings.ToLower(color) { for _, c := range strings.ToLower(color) {
if !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f')) { if (c < '0' || c > '9') && (c < 'a' || c > 'f') {
return false return false
} }
} }

View file

@ -1,6 +1,7 @@
package updater package updater
import ( import (
"context"
"fmt" "fmt"
"os" "os"
"runtime" "runtime"
@ -142,7 +143,7 @@ func handleDevUpdate(currentVersion string) error {
client := NewGithubClient() client := NewGithubClient()
// Fetch the dev release directly by tag // Fetch the dev release directly by tag
release, err := client.GetLatestRelease(nil, repoOwner, repoName, "beta") release, err := client.GetLatestRelease(context.TODO(), repoOwner, repoName, "beta")
if err != nil { if err != nil {
// Try fetching the "dev" tag directly // Try fetching the "dev" tag directly
return handleDevTagUpdate(currentVersion) return handleDevTagUpdate(currentVersion)

View file

@ -36,10 +36,8 @@ func spawnWatcher() error {
// watchAndRestart waits for the given PID to exit, then restarts the binary. // watchAndRestart waits for the given PID to exit, then restarts the binary.
func watchAndRestart(pid int) error { func watchAndRestart(pid int) error {
// Wait for the parent process to die // Wait for the parent process to die
for { for isProcessRunning(pid) {
if !isProcessRunning(pid) {
break
}
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
} }

View file

@ -36,7 +36,7 @@ func GetLatestUpdateFromURL(baseURL string) (*GenericUpdateInfo, error) {
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to fetch latest.json: %w", err) return nil, fmt.Errorf("failed to fetch latest.json: %w", err)
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("failed to fetch latest.json: status code %d", resp.StatusCode) return nil, fmt.Errorf("failed to fetch latest.json: status code %d", resp.StatusCode)

View file

@ -18,7 +18,7 @@ func TestGetLatestUpdateFromURL(t *testing.T) {
{ {
name: "Valid latest.json", name: "Valid latest.json",
handler: func(w http.ResponseWriter, r *http.Request) { handler: func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"}`) _, _ = fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"}`)
}, },
expectedVersion: "v1.1.0", expectedVersion: "v1.1.0",
expectedURL: "http://example.com/release.zip", expectedURL: "http://example.com/release.zip",
@ -26,21 +26,21 @@ func TestGetLatestUpdateFromURL(t *testing.T) {
{ {
name: "Invalid JSON", name: "Invalid JSON",
handler: func(w http.ResponseWriter, r *http.Request) { handler: func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"`) // Missing closing brace _, _ = fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"`) // Missing closing brace
}, },
expectError: true, expectError: true,
}, },
{ {
name: "Missing version", name: "Missing version",
handler: func(w http.ResponseWriter, r *http.Request) { handler: func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintln(w, `{"url": "http://example.com/release.zip"}`) _, _ = fmt.Fprintln(w, `{"url": "http://example.com/release.zip"}`)
}, },
expectError: true, expectError: true,
}, },
{ {
name: "Missing URL", name: "Missing URL",
handler: func(w http.ResponseWriter, r *http.Request) { handler: func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintln(w, `{"version": "v1.1.0"}`) _, _ = fmt.Fprintln(w, `{"version": "v1.1.0"}`)
}, },
expectError: true, expectError: true,
}, },

View file

@ -81,7 +81,7 @@ func (g *githubClient) getPublicReposWithAPIURL(ctx context.Context, apiURL, use
} }
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
resp.Body.Close() _ = resp.Body.Close()
// Try organization endpoint // Try organization endpoint
url = fmt.Sprintf("%s/orgs/%s/repos", apiURL, userOrOrg) url = fmt.Sprintf("%s/orgs/%s/repos", apiURL, userOrOrg)
req, err = http.NewRequestWithContext(ctx, "GET", url, nil) req, err = http.NewRequestWithContext(ctx, "GET", url, nil)
@ -96,16 +96,16 @@ func (g *githubClient) getPublicReposWithAPIURL(ctx context.Context, apiURL, use
} }
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
resp.Body.Close() _ = resp.Body.Close()
return nil, fmt.Errorf("failed to fetch repos: %s", resp.Status) return nil, fmt.Errorf("failed to fetch repos: %s", resp.Status)
} }
var repos []Repo var repos []Repo
if err := json.NewDecoder(resp.Body).Decode(&repos); err != nil { if err := json.NewDecoder(resp.Body).Decode(&repos); err != nil {
resp.Body.Close() _ = resp.Body.Close()
return nil, err return nil, err
} }
resp.Body.Close() _ = resp.Body.Close()
for _, repo := range repos { for _, repo := range repos {
allCloneURLs = append(allCloneURLs, repo.CloneURL) allCloneURLs = append(allCloneURLs, repo.CloneURL)
@ -152,7 +152,7 @@ func (g *githubClient) GetLatestRelease(ctx context.Context, owner, repo, channe
if err != nil { if err != nil {
return nil, err return nil, err
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("failed to fetch releases: %s", resp.Status) return nil, fmt.Errorf("failed to fetch releases: %s", resp.Status)
@ -207,7 +207,7 @@ func (g *githubClient) GetReleaseByPullRequest(ctx context.Context, owner, repo
if err != nil { if err != nil {
return nil, err return nil, err
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("failed to fetch releases: %s", resp.Status) return nil, fmt.Errorf("failed to fetch releases: %s", resp.Status)

View file

@ -52,7 +52,7 @@ func TestNewUpdateService(t *testing.T) {
func TestUpdateService_Start(t *testing.T) { func TestUpdateService_Start(t *testing.T) {
// Setup a mock server for HTTP tests // Setup a mock server for HTTP tests
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`{"version": "v1.1.0", "url": "http://example.com/release.zip"}`)) _, _ = w.Write([]byte(`{"version": "v1.1.0", "url": "http://example.com/release.zip"}`))
})) }))
defer server.Close() defer server.Close()

View file

@ -220,7 +220,7 @@ func ExampleCheckForUpdatesHTTP() {
// Create a mock HTTP server // Create a mock HTTP server
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/latest.json" { if r.URL.Path == "/latest.json" {
fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`) _, _ = fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`)
} }
})) }))
defer server.Close() defer server.Close()
@ -247,7 +247,7 @@ func ExampleCheckOnlyHTTP() {
// Create a mock HTTP server // Create a mock HTTP server
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/latest.json" { if r.URL.Path == "/latest.json" {
fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`) _, _ = fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`)
} }
})) }))
defer server.Close() defer server.Close()

View file

@ -158,8 +158,8 @@ func listContainers(all bool) error {
} }
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
fmt.Fprintln(w, i18n.T("cmd.vm.ps.header")) _, _ = fmt.Fprintln(w, i18n.T("cmd.vm.ps.header"))
fmt.Fprintln(w, "--\t----\t-----\t------\t-------\t---") _, _ = fmt.Fprintln(w, "--\t----\t-----\t------\t-------\t---")
for _, c := range containers { for _, c := range containers {
// Shorten image path // Shorten image path
@ -182,11 +182,11 @@ func listContainers(all bool) error {
status = errorStyle.Render(status) status = errorStyle.Render(status)
} }
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%d\n", _, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%d\n",
c.ID[:8], c.Name, imageName, status, duration, c.PID) c.ID[:8], c.Name, imageName, status, duration, c.PID)
} }
w.Flush() _ = w.Flush()
return nil return nil
} }
@ -305,7 +305,7 @@ func viewLogs(id string, follow bool) error {
if err != nil { if err != nil {
return fmt.Errorf(i18n.T("i18n.fail.get", "logs")+": %w", err) return fmt.Errorf(i18n.T("i18n.fail.get", "logs")+": %w", err)
} }
defer reader.Close() defer func() { _ = reader.Close() }()
_, err = io.Copy(os.Stdout, reader) _, err = io.Copy(os.Stdout, reader)
return err return err

View file

@ -78,17 +78,17 @@ func listTemplates() error {
fmt.Printf("%s\n\n", repoNameStyle.Render(i18n.T("cmd.vm.templates.title"))) fmt.Printf("%s\n\n", repoNameStyle.Render(i18n.T("cmd.vm.templates.title")))
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
fmt.Fprintln(w, i18n.T("cmd.vm.templates.header")) _, _ = fmt.Fprintln(w, i18n.T("cmd.vm.templates.header"))
fmt.Fprintln(w, "----\t-----------") _, _ = fmt.Fprintln(w, "----\t-----------")
for _, tmpl := range templates { for _, tmpl := range templates {
desc := tmpl.Description desc := tmpl.Description
if len(desc) > 60 { if len(desc) > 60 {
desc = desc[:57] + "..." desc = desc[:57] + "..."
} }
fmt.Fprintf(w, "%s\t%s\n", repoNameStyle.Render(tmpl.Name), desc) _, _ = fmt.Fprintf(w, "%s\t%s\n", repoNameStyle.Render(tmpl.Name), desc)
} }
w.Flush() _ = w.Flush()
fmt.Println() fmt.Println()
fmt.Printf("%s %s\n", i18n.T("cmd.vm.templates.hint.show"), dimStyle.Render("core vm templates show <name>")) fmt.Printf("%s %s\n", i18n.T("cmd.vm.templates.hint.show"), dimStyle.Render("core vm templates show <name>"))
@ -158,7 +158,7 @@ func RunFromTemplate(templateName string, vars map[string]string, runOpts contai
if err != nil { if err != nil {
return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "create temp directory"})+": %w", err) return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "create temp directory"})+": %w", err)
} }
defer os.RemoveAll(tmpDir) defer func() { _ = os.RemoveAll(tmpDir) }()
// Write the YAML file // Write the YAML file
yamlPath := filepath.Join(tmpDir, templateName+".yml") yamlPath := filepath.Join(tmpDir, templateName+".yml")

View file

@ -27,6 +27,9 @@ import (
// Commands via self-registration // Commands via self-registration
_ "github.com/host-uk/core/internal/cmd/ai" _ "github.com/host-uk/core/internal/cmd/ai"
_ "github.com/host-uk/core/internal/cmd/ci" _ "github.com/host-uk/core/internal/cmd/ci"
_ "github.com/host-uk/core/internal/cmd/collect"
_ "github.com/host-uk/core/internal/cmd/config"
_ "github.com/host-uk/core/internal/cmd/crypt"
_ "github.com/host-uk/core/internal/cmd/deploy" _ "github.com/host-uk/core/internal/cmd/deploy"
_ "github.com/host-uk/core/internal/cmd/dev" _ "github.com/host-uk/core/internal/cmd/dev"
_ "github.com/host-uk/core/internal/cmd/docs" _ "github.com/host-uk/core/internal/cmd/docs"
@ -37,6 +40,7 @@ import (
_ "github.com/host-uk/core/internal/cmd/monitor" _ "github.com/host-uk/core/internal/cmd/monitor"
_ "github.com/host-uk/core/internal/cmd/php" _ "github.com/host-uk/core/internal/cmd/php"
_ "github.com/host-uk/core/internal/cmd/pkgcmd" _ "github.com/host-uk/core/internal/cmd/pkgcmd"
_ "github.com/host-uk/core/internal/cmd/plugin"
_ "github.com/host-uk/core/internal/cmd/qa" _ "github.com/host-uk/core/internal/cmd/qa"
_ "github.com/host-uk/core/internal/cmd/sdk" _ "github.com/host-uk/core/internal/cmd/sdk"
_ "github.com/host-uk/core/internal/cmd/security" _ "github.com/host-uk/core/internal/cmd/security"

View file

@ -86,7 +86,7 @@ func (c *Client) ListTasks(ctx context.Context, opts ListOptions) ([]Task, error
if err != nil { if err != nil {
return nil, log.E(op, "request failed", err) return nil, log.E(op, "request failed", err)
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if err := c.checkResponse(resp); err != nil { if err := c.checkResponse(resp); err != nil {
return nil, log.E(op, "API error", err) return nil, log.E(op, "API error", err)
@ -121,7 +121,7 @@ func (c *Client) GetTask(ctx context.Context, id string) (*Task, error) {
if err != nil { if err != nil {
return nil, log.E(op, "request failed", err) return nil, log.E(op, "request failed", err)
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if err := c.checkResponse(resp); err != nil { if err := c.checkResponse(resp); err != nil {
return nil, log.E(op, "API error", err) return nil, log.E(op, "API error", err)
@ -166,7 +166,7 @@ func (c *Client) ClaimTask(ctx context.Context, id string) (*Task, error) {
if err != nil { if err != nil {
return nil, log.E(op, "request failed", err) return nil, log.E(op, "request failed", err)
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if err := c.checkResponse(resp); err != nil { if err := c.checkResponse(resp); err != nil {
return nil, log.E(op, "API error", err) return nil, log.E(op, "API error", err)
@ -220,7 +220,7 @@ func (c *Client) UpdateTask(ctx context.Context, id string, update TaskUpdate) e
if err != nil { if err != nil {
return log.E(op, "request failed", err) return log.E(op, "request failed", err)
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if err := c.checkResponse(resp); err != nil { if err := c.checkResponse(resp); err != nil {
return log.E(op, "API error", err) return log.E(op, "API error", err)
@ -256,7 +256,7 @@ func (c *Client) CompleteTask(ctx context.Context, id string, result TaskResult)
if err != nil { if err != nil {
return log.E(op, "request failed", err) return log.E(op, "request failed", err)
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if err := c.checkResponse(resp); err != nil { if err := c.checkResponse(resp); err != nil {
return log.E(op, "API error", err) return log.E(op, "API error", err)
@ -312,7 +312,7 @@ func (c *Client) Ping(ctx context.Context) error {
if err != nil { if err != nil {
return log.E(op, "request failed", err) return log.E(op, "request failed", err)
} }
defer resp.Body.Close() defer func() { _ = resp.Body.Close() }()
if resp.StatusCode >= 400 { if resp.StatusCode >= 400 {
return log.E(op, fmt.Sprintf("server returned status %d", resp.StatusCode), nil) return log.E(op, fmt.Sprintf("server returned status %d", resp.StatusCode), nil)

View file

@ -74,7 +74,7 @@ func TestClient_ListTasks_Good(t *testing.T) {
assert.Equal(t, "Bearer test-token", r.Header.Get("Authorization")) assert.Equal(t, "Bearer test-token", r.Header.Get("Authorization"))
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(testTasks) _ = json.NewEncoder(w).Encode(testTasks)
})) }))
defer server.Close() defer server.Close()
@ -97,7 +97,7 @@ func TestClient_ListTasks_Good_WithFilters(t *testing.T) {
assert.Equal(t, "bug,urgent", query.Get("labels")) assert.Equal(t, "bug,urgent", query.Get("labels"))
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode([]Task{testTask}) _ = json.NewEncoder(w).Encode([]Task{testTask})
})) }))
defer server.Close() defer server.Close()
@ -119,7 +119,7 @@ func TestClient_ListTasks_Good_WithFilters(t *testing.T) {
func TestClient_ListTasks_Bad_ServerError(t *testing.T) { func TestClient_ListTasks_Bad_ServerError(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusInternalServerError) w.WriteHeader(http.StatusInternalServerError)
json.NewEncoder(w).Encode(APIError{Message: "internal error"}) _ = json.NewEncoder(w).Encode(APIError{Message: "internal error"})
})) }))
defer server.Close() defer server.Close()
@ -137,7 +137,7 @@ func TestClient_GetTask_Good(t *testing.T) {
assert.Equal(t, "/api/tasks/task-123", r.URL.Path) assert.Equal(t, "/api/tasks/task-123", r.URL.Path)
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(testTask) _ = json.NewEncoder(w).Encode(testTask)
})) }))
defer server.Close() defer server.Close()
@ -162,7 +162,7 @@ func TestClient_GetTask_Bad_EmptyID(t *testing.T) {
func TestClient_GetTask_Bad_NotFound(t *testing.T) { func TestClient_GetTask_Bad_NotFound(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNotFound) w.WriteHeader(http.StatusNotFound)
json.NewEncoder(w).Encode(APIError{Message: "task not found"}) _ = json.NewEncoder(w).Encode(APIError{Message: "task not found"})
})) }))
defer server.Close() defer server.Close()
@ -184,7 +184,7 @@ func TestClient_ClaimTask_Good(t *testing.T) {
assert.Equal(t, "/api/tasks/task-123/claim", r.URL.Path) assert.Equal(t, "/api/tasks/task-123/claim", r.URL.Path)
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(ClaimResponse{Task: &claimedTask}) _ = json.NewEncoder(w).Encode(ClaimResponse{Task: &claimedTask})
})) }))
defer server.Close() defer server.Close()
@ -204,7 +204,7 @@ func TestClient_ClaimTask_Good_SimpleResponse(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(claimedTask) _ = json.NewEncoder(w).Encode(claimedTask)
})) }))
defer server.Close() defer server.Close()
@ -227,7 +227,7 @@ func TestClient_ClaimTask_Bad_EmptyID(t *testing.T) {
func TestClient_ClaimTask_Bad_AlreadyClaimed(t *testing.T) { func TestClient_ClaimTask_Bad_AlreadyClaimed(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusConflict) w.WriteHeader(http.StatusConflict)
json.NewEncoder(w).Encode(APIError{Message: "task already claimed"}) _ = json.NewEncoder(w).Encode(APIError{Message: "task already claimed"})
})) }))
defer server.Close() defer server.Close()

View file

@ -99,7 +99,7 @@ func loadEnvFile(path string, cfg *Config) error {
if err != nil { if err != nil {
return err return err
} }
defer file.Close() defer func() { _ = file.Close() }()
scanner := bufio.NewScanner(file) scanner := bufio.NewScanner(file)
for scanner.Scan() { for scanner.Scan() {

View file

@ -13,7 +13,7 @@ func TestLoadConfig_Good_FromEnvFile(t *testing.T) {
// Create temp directory with .env file // Create temp directory with .env file
tmpDir, err := os.MkdirTemp("", "agentic-test") tmpDir, err := os.MkdirTemp("", "agentic-test")
require.NoError(t, err) require.NoError(t, err)
defer os.RemoveAll(tmpDir) defer func() { _ = os.RemoveAll(tmpDir) }()
envContent := ` envContent := `
AGENTIC_BASE_URL=https://test.api.com AGENTIC_BASE_URL=https://test.api.com
@ -37,7 +37,7 @@ func TestLoadConfig_Good_FromEnvVars(t *testing.T) {
// Create temp directory with .env file (partial config) // Create temp directory with .env file (partial config)
tmpDir, err := os.MkdirTemp("", "agentic-test") tmpDir, err := os.MkdirTemp("", "agentic-test")
require.NoError(t, err) require.NoError(t, err)
defer os.RemoveAll(tmpDir) defer func() { _ = os.RemoveAll(tmpDir) }()
envContent := ` envContent := `
AGENTIC_TOKEN=env-file-token AGENTIC_TOKEN=env-file-token
@ -46,11 +46,11 @@ AGENTIC_TOKEN=env-file-token
require.NoError(t, err) require.NoError(t, err)
// Set environment variables that should override // Set environment variables that should override
os.Setenv("AGENTIC_BASE_URL", "https://env-override.com") _ = os.Setenv("AGENTIC_BASE_URL", "https://env-override.com")
os.Setenv("AGENTIC_TOKEN", "env-override-token") _ = os.Setenv("AGENTIC_TOKEN", "env-override-token")
defer func() { defer func() {
os.Unsetenv("AGENTIC_BASE_URL") _ = os.Unsetenv("AGENTIC_BASE_URL")
os.Unsetenv("AGENTIC_TOKEN") _ = os.Unsetenv("AGENTIC_TOKEN")
}() }()
cfg, err := LoadConfig(tmpDir) cfg, err := LoadConfig(tmpDir)
@ -64,15 +64,15 @@ func TestLoadConfig_Bad_NoToken(t *testing.T) {
// Create temp directory without config // Create temp directory without config
tmpDir, err := os.MkdirTemp("", "agentic-test") tmpDir, err := os.MkdirTemp("", "agentic-test")
require.NoError(t, err) require.NoError(t, err)
defer os.RemoveAll(tmpDir) defer func() { _ = os.RemoveAll(tmpDir) }()
// Create empty .env // Create empty .env
err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(""), 0644) err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(""), 0644)
require.NoError(t, err) require.NoError(t, err)
// Ensure no env vars are set // Ensure no env vars are set
os.Unsetenv("AGENTIC_TOKEN") _ = os.Unsetenv("AGENTIC_TOKEN")
os.Unsetenv("AGENTIC_BASE_URL") _ = os.Unsetenv("AGENTIC_BASE_URL")
_, err = LoadConfig(tmpDir) _, err = LoadConfig(tmpDir)
@ -83,7 +83,7 @@ func TestLoadConfig_Bad_NoToken(t *testing.T) {
func TestLoadConfig_Good_EnvFileWithQuotes(t *testing.T) { func TestLoadConfig_Good_EnvFileWithQuotes(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "agentic-test") tmpDir, err := os.MkdirTemp("", "agentic-test")
require.NoError(t, err) require.NoError(t, err)
defer os.RemoveAll(tmpDir) defer func() { _ = os.RemoveAll(tmpDir) }()
// Test with quoted values // Test with quoted values
envContent := ` envContent := `
@ -103,7 +103,7 @@ AGENTIC_BASE_URL='single-quoted-url'
func TestLoadConfig_Good_EnvFileWithComments(t *testing.T) { func TestLoadConfig_Good_EnvFileWithComments(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "agentic-test") tmpDir, err := os.MkdirTemp("", "agentic-test")
require.NoError(t, err) require.NoError(t, err)
defer os.RemoveAll(tmpDir) defer func() { _ = os.RemoveAll(tmpDir) }()
envContent := ` envContent := `
# This is a comment # This is a comment
@ -126,12 +126,12 @@ func TestSaveConfig_Good(t *testing.T) {
// Create temp home directory // Create temp home directory
tmpHome, err := os.MkdirTemp("", "agentic-home") tmpHome, err := os.MkdirTemp("", "agentic-home")
require.NoError(t, err) require.NoError(t, err)
defer os.RemoveAll(tmpHome) defer func() { _ = os.RemoveAll(tmpHome) }()
// Override HOME for the test // Override HOME for the test
originalHome := os.Getenv("HOME") originalHome := os.Getenv("HOME")
os.Setenv("HOME", tmpHome) _ = os.Setenv("HOME", tmpHome)
defer os.Setenv("HOME", originalHome) defer func() { _ = os.Setenv("HOME", originalHome) }()
cfg := &Config{ cfg := &Config{
BaseURL: "https://saved.api.com", BaseURL: "https://saved.api.com",
@ -166,7 +166,7 @@ func TestConfigPath_Good(t *testing.T) {
func TestLoadConfig_Good_DefaultBaseURL(t *testing.T) { func TestLoadConfig_Good_DefaultBaseURL(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "agentic-test") tmpDir, err := os.MkdirTemp("", "agentic-test")
require.NoError(t, err) require.NoError(t, err)
defer os.RemoveAll(tmpDir) defer func() { _ = os.RemoveAll(tmpDir) }()
// Only provide token, should use default base URL // Only provide token, should use default base URL
envContent := ` envContent := `
@ -176,7 +176,7 @@ AGENTIC_TOKEN=test-token
require.NoError(t, err) require.NoError(t, err)
// Clear any env overrides // Clear any env overrides
os.Unsetenv("AGENTIC_BASE_URL") _ = os.Unsetenv("AGENTIC_BASE_URL")
cfg, err := LoadConfig(tmpDir) cfg, err := LoadConfig(tmpDir)

View file

@ -107,7 +107,7 @@ func readMetricsFile(path string, since time.Time) ([]Event, error) {
} }
return nil, fmt.Errorf("open metrics file %s: %w", path, err) return nil, fmt.Errorf("open metrics file %s: %w", path, err)
} }
defer f.Close() defer func() { _ = f.Close() }()
var events []Event var events []Event
scanner := bufio.NewScanner(f) scanner := bufio.NewScanner(f)

View file

@ -32,7 +32,7 @@ func QueryRAGForTask(task TaskInfo) string {
if err != nil { if err != nil {
return "" return ""
} }
defer qdrantClient.Close() defer func() { _ = qdrantClient.Close() }()
ollamaCfg := rag.DefaultOllamaConfig() ollamaCfg := rag.DefaultOllamaConfig()
ollamaClient, err := rag.NewOllamaClient(ollamaCfg) ollamaClient, err := rag.NewOllamaClient(ollamaCfg)

View file

@ -1428,10 +1428,7 @@ func (e *Executor) moduleDockerCompose(ctx context.Context, client *SSHClient, a
} }
// Heuristic for changed // Heuristic for changed
changed := true changed := !strings.Contains(stdout, "Up to date") && !strings.Contains(stderr, "Up to date")
if strings.Contains(stdout, "Up to date") || strings.Contains(stderr, "Up to date") {
changed = false
}
return &TaskResult{Changed: changed, Stdout: stdout}, nil return &TaskResult{Changed: changed, Stdout: stdout}, nil
} }

View file

@ -225,7 +225,7 @@ func (c *SSHClient) Run(ctx context.Context, cmd string) (stdout, stderr string,
return "", "", -1, log.E("ssh.Run", "stdin pipe", err) return "", "", -1, log.E("ssh.Run", "stdin pipe", err)
} }
go func() { go func() {
defer stdin.Close() defer func() { _ = stdin.Close() }()
_, _ = io.WriteString(stdin, c.becomePass+"\n") _, _ = io.WriteString(stdin, c.becomePass+"\n")
}() }()
} else if c.password != "" { } else if c.password != "" {
@ -236,7 +236,7 @@ func (c *SSHClient) Run(ctx context.Context, cmd string) (stdout, stderr string,
return "", "", -1, log.E("ssh.Run", "stdin pipe", err) return "", "", -1, log.E("ssh.Run", "stdin pipe", err)
} }
go func() { go func() {
defer stdin.Close() defer func() { _ = stdin.Close() }()
_, _ = io.WriteString(stdin, c.password+"\n") _, _ = io.WriteString(stdin, c.password+"\n")
}() }()
} else { } else {
@ -344,7 +344,7 @@ func (c *SSHClient) Upload(ctx context.Context, local io.Reader, remote string,
} }
go func() { go func() {
defer stdin.Close() defer func() { _ = stdin.Close() }()
if pass != "" { if pass != "" {
_, _ = io.WriteString(stdin, pass+"\n") _, _ = io.WriteString(stdin, pass+"\n")
} }
@ -357,7 +357,7 @@ func (c *SSHClient) Upload(ctx context.Context, local io.Reader, remote string,
} }
go func() { go func() {
defer stdin.Close() defer func() { _ = stdin.Close() }()
_, _ = stdin.Write(content) _, _ = stdin.Write(content)
}() }()
} }

View file

@ -148,7 +148,7 @@ func createTarXzArchive(src, dst string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to open source file: %w", err) return fmt.Errorf("failed to open source file: %w", err)
} }
defer srcFile.Close() defer func() { _ = srcFile.Close() }()
srcInfo, err := srcFile.Stat() srcInfo, err := srcFile.Stat()
if err != nil { if err != nil {
@ -199,7 +199,7 @@ func createTarGzArchive(src, dst string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to open source file: %w", err) return fmt.Errorf("failed to open source file: %w", err)
} }
defer srcFile.Close() defer func() { _ = srcFile.Close() }()
srcInfo, err := srcFile.Stat() srcInfo, err := srcFile.Stat()
if err != nil { if err != nil {
@ -211,15 +211,15 @@ func createTarGzArchive(src, dst string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to create archive file: %w", err) return fmt.Errorf("failed to create archive file: %w", err)
} }
defer dstFile.Close() defer func() { _ = dstFile.Close() }()
// Create gzip writer // Create gzip writer
gzWriter := gzip.NewWriter(dstFile) gzWriter := gzip.NewWriter(dstFile)
defer gzWriter.Close() defer func() { _ = gzWriter.Close() }()
// Create tar writer // Create tar writer
tarWriter := tar.NewWriter(gzWriter) tarWriter := tar.NewWriter(gzWriter)
defer tarWriter.Close() defer func() { _ = tarWriter.Close() }()
// Create tar header // Create tar header
header, err := tar.FileInfoHeader(srcInfo, "") header, err := tar.FileInfoHeader(srcInfo, "")
@ -249,7 +249,7 @@ func createZipArchive(src, dst string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to open source file: %w", err) return fmt.Errorf("failed to open source file: %w", err)
} }
defer srcFile.Close() defer func() { _ = srcFile.Close() }()
srcInfo, err := srcFile.Stat() srcInfo, err := srcFile.Stat()
if err != nil { if err != nil {
@ -261,11 +261,11 @@ func createZipArchive(src, dst string) error {
if err != nil { if err != nil {
return fmt.Errorf("failed to create archive file: %w", err) return fmt.Errorf("failed to create archive file: %w", err)
} }
defer dstFile.Close() defer func() { _ = dstFile.Close() }()
// Create zip writer // Create zip writer
zipWriter := zip.NewWriter(dstFile) zipWriter := zip.NewWriter(dstFile)
defer zipWriter.Close() defer func() { _ = zipWriter.Close() }()
// Create zip header // Create zip header
header, err := zip.FileInfoHeader(srcInfo) header, err := zip.FileInfoHeader(srcInfo)

View file

@ -338,11 +338,11 @@ func verifyTarGzContent(t *testing.T, archivePath, expectedName string) {
file, err := os.Open(archivePath) file, err := os.Open(archivePath)
require.NoError(t, err) require.NoError(t, err)
defer file.Close() defer func() { _ = file.Close() }()
gzReader, err := gzip.NewReader(file) gzReader, err := gzip.NewReader(file)
require.NoError(t, err) require.NoError(t, err)
defer gzReader.Close() defer func() { _ = gzReader.Close() }()
tarReader := tar.NewReader(gzReader) tarReader := tar.NewReader(gzReader)
@ -361,7 +361,7 @@ func verifyZipContent(t *testing.T, archivePath, expectedName string) {
reader, err := zip.OpenReader(archivePath) reader, err := zip.OpenReader(archivePath)
require.NoError(t, err) require.NoError(t, err)
defer reader.Close() defer func() { _ = reader.Close() }()
require.Len(t, reader.File, 1) require.Len(t, reader.File, 1)
assert.Equal(t, expectedName, reader.File[0].Name) assert.Equal(t, expectedName, reader.File[0].Name)

View file

@ -23,7 +23,7 @@ func Checksum(artifact Artifact) (Artifact, error) {
if err != nil { if err != nil {
return Artifact{}, fmt.Errorf("build.Checksum: failed to open file: %w", err) return Artifact{}, fmt.Errorf("build.Checksum: failed to open file: %w", err)
} }
defer file.Close() defer func() { _ = file.Close() }()
// Compute SHA256 hash // Compute SHA256 hash
hasher := sha256.New() hasher := sha256.New()

View file

@ -236,7 +236,7 @@ func TestConfigExists_Good(t *testing.T) {
func TestLoadConfig_Good_SignConfig(t *testing.T) { func TestLoadConfig_Good_SignConfig(t *testing.T) {
tmpDir := t.TempDir() tmpDir := t.TempDir()
coreDir := filepath.Join(tmpDir, ".core") coreDir := filepath.Join(tmpDir, ".core")
os.MkdirAll(coreDir, 0755) _ = os.MkdirAll(coreDir, 0755)
configContent := `version: 1 configContent := `version: 1
sign: sign:
@ -247,7 +247,7 @@ sign:
identity: "Developer ID Application: Test" identity: "Developer ID Application: Test"
notarize: true notarize: true
` `
os.WriteFile(filepath.Join(coreDir, "build.yaml"), []byte(configContent), 0644) _ = os.WriteFile(filepath.Join(coreDir, "build.yaml"), []byte(configContent), 0644)
cfg, err := LoadConfig(tmpDir) cfg, err := LoadConfig(tmpDir)
if err != nil { if err != nil {

View file

@ -73,7 +73,7 @@ func (s *MacOSSigner) Notarize(ctx context.Context, binary string) error {
if output, err := zipCmd.CombinedOutput(); err != nil { if output, err := zipCmd.CombinedOutput(); err != nil {
return fmt.Errorf("codesign.Notarize: failed to create zip: %w\nOutput: %s", err, string(output)) return fmt.Errorf("codesign.Notarize: failed to create zip: %w\nOutput: %s", err, string(output))
} }
defer os.Remove(zipPath) defer func() { _ = os.Remove(zipPath) }()
// Submit to Apple and wait // Submit to Apple and wait
submitCmd := exec.CommandContext(ctx, "xcrun", "notarytool", "submit", submitCmd := exec.CommandContext(ctx, "xcrun", "notarytool", "submit",

View file

@ -105,13 +105,13 @@ func TestHealthServer(t *testing.T) {
resp, err := http.Get("http://" + addr + "/health") resp, err := http.Get("http://" + addr + "/health")
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, http.StatusOK, resp.StatusCode) assert.Equal(t, http.StatusOK, resp.StatusCode)
resp.Body.Close() _ = resp.Body.Close()
// Ready should be OK by default // Ready should be OK by default
resp, err = http.Get("http://" + addr + "/ready") resp, err = http.Get("http://" + addr + "/ready")
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, http.StatusOK, resp.StatusCode) assert.Equal(t, http.StatusOK, resp.StatusCode)
resp.Body.Close() _ = resp.Body.Close()
// Set not ready // Set not ready
hs.SetReady(false) hs.SetReady(false)
@ -119,7 +119,7 @@ func TestHealthServer(t *testing.T) {
resp, err = http.Get("http://" + addr + "/ready") resp, err = http.Get("http://" + addr + "/ready")
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode)
resp.Body.Close() _ = resp.Body.Close()
}) })
t.Run("with health checks", func(t *testing.T) { t.Run("with health checks", func(t *testing.T) {
@ -143,7 +143,7 @@ func TestHealthServer(t *testing.T) {
resp, err := http.Get("http://" + addr + "/health") resp, err := http.Get("http://" + addr + "/health")
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, http.StatusOK, resp.StatusCode) assert.Equal(t, http.StatusOK, resp.StatusCode)
resp.Body.Close() _ = resp.Body.Close()
// Make unhealthy // Make unhealthy
healthy = false healthy = false
@ -151,7 +151,7 @@ func TestHealthServer(t *testing.T) {
resp, err = http.Get("http://" + addr + "/health") resp, err = http.Get("http://" + addr + "/health")
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode)
resp.Body.Close() _ = resp.Body.Close()
}) })
} }
@ -175,7 +175,7 @@ func TestDaemon(t *testing.T) {
resp, err := http.Get("http://" + addr + "/health") resp, err := http.Get("http://" + addr + "/health")
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, http.StatusOK, resp.StatusCode) assert.Equal(t, http.StatusOK, resp.StatusCode)
resp.Body.Close() _ = resp.Body.Close()
// Stop should succeed // Stop should succeed
err = d.Stop() err = d.Stop()
@ -225,14 +225,14 @@ func TestDaemon(t *testing.T) {
// Initially ready // Initially ready
resp, _ := http.Get("http://" + addr + "/ready") resp, _ := http.Get("http://" + addr + "/ready")
assert.Equal(t, http.StatusOK, resp.StatusCode) assert.Equal(t, http.StatusOK, resp.StatusCode)
resp.Body.Close() _ = resp.Body.Close()
// Set not ready // Set not ready
d.SetReady(false) d.SetReady(false)
resp, _ = http.Get("http://" + addr + "/ready") resp, _ = http.Get("http://" + addr + "/ready")
assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode)
resp.Body.Close() _ = resp.Body.Close()
}) })
t.Run("no health addr returns empty", func(t *testing.T) { t.Run("no health addr returns empty", func(t *testing.T) {

297
pkg/collect/bitcointalk.go Normal file
View file

@ -0,0 +1,297 @@
package collect
import (
"context"
"fmt"
"net/http"
"path/filepath"
"strings"
"time"
core "github.com/host-uk/core/pkg/framework/core"
"golang.org/x/net/html"
)
// httpClient is the HTTP client used for all collection requests.
// Use SetHTTPClient to override for testing.
var httpClient = &http.Client{
Timeout: 30 * time.Second,
}
// BitcoinTalkCollector collects forum posts from BitcoinTalk.
type BitcoinTalkCollector struct {
// TopicID is the numeric topic identifier.
TopicID string
// URL is a full URL to a BitcoinTalk topic page. If set, TopicID is
// extracted from it.
URL string
// Pages limits collection to this many pages. 0 means all pages.
Pages int
}
// Name returns the collector name.
func (b *BitcoinTalkCollector) Name() string {
id := b.TopicID
if id == "" && b.URL != "" {
id = "url"
}
return fmt.Sprintf("bitcointalk:%s", id)
}
// Collect gathers posts from a BitcoinTalk topic.
func (b *BitcoinTalkCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) {
result := &Result{Source: b.Name()}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitStart(b.Name(), "Starting BitcoinTalk collection")
}
topicID := b.TopicID
if topicID == "" {
return result, core.E("collect.BitcoinTalk.Collect", "topic ID is required", nil)
}
if cfg.DryRun {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(b.Name(), fmt.Sprintf("[dry-run] Would collect topic %s", topicID), nil)
}
return result, nil
}
baseDir := filepath.Join(cfg.OutputDir, "bitcointalk", topicID, "posts")
if err := cfg.Output.EnsureDir(baseDir); err != nil {
return result, core.E("collect.BitcoinTalk.Collect", "failed to create output directory", err)
}
postNum := 0
offset := 0
pageCount := 0
postsPerPage := 20
for {
if ctx.Err() != nil {
return result, core.E("collect.BitcoinTalk.Collect", "context cancelled", ctx.Err())
}
if b.Pages > 0 && pageCount >= b.Pages {
break
}
if cfg.Limiter != nil {
if err := cfg.Limiter.Wait(ctx, "bitcointalk"); err != nil {
return result, err
}
}
pageURL := fmt.Sprintf("https://bitcointalk.org/index.php?topic=%s.%d", topicID, offset)
posts, err := b.fetchPage(ctx, pageURL)
if err != nil {
result.Errors++
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(b.Name(), fmt.Sprintf("Failed to fetch page at offset %d: %v", offset, err), nil)
}
break
}
if len(posts) == 0 {
break
}
for _, post := range posts {
postNum++
filePath := filepath.Join(baseDir, fmt.Sprintf("%d.md", postNum))
content := formatPostMarkdown(postNum, post)
if err := cfg.Output.Write(filePath, content); err != nil {
result.Errors++
continue
}
result.Items++
result.Files = append(result.Files, filePath)
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitItem(b.Name(), fmt.Sprintf("Post %d by %s", postNum, post.Author), nil)
}
}
pageCount++
offset += postsPerPage
// If we got fewer posts than expected, we've reached the end
if len(posts) < postsPerPage {
break
}
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitComplete(b.Name(), fmt.Sprintf("Collected %d posts", result.Items), result)
}
return result, nil
}
// btPost represents a parsed BitcoinTalk forum post.
type btPost struct {
Author string
Date string
Content string
}
// fetchPage fetches and parses a single BitcoinTalk topic page.
func (b *BitcoinTalkCollector) fetchPage(ctx context.Context, pageURL string) ([]btPost, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, pageURL, nil)
if err != nil {
return nil, core.E("collect.BitcoinTalk.fetchPage", "failed to create request", err)
}
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; CoreCollector/1.0)")
resp, err := httpClient.Do(req)
if err != nil {
return nil, core.E("collect.BitcoinTalk.fetchPage", "request failed", err)
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
return nil, core.E("collect.BitcoinTalk.fetchPage",
fmt.Sprintf("unexpected status code: %d", resp.StatusCode), nil)
}
doc, err := html.Parse(resp.Body)
if err != nil {
return nil, core.E("collect.BitcoinTalk.fetchPage", "failed to parse HTML", err)
}
return extractPosts(doc), nil
}
// extractPosts extracts post data from a parsed HTML document.
// It looks for the common BitcoinTalk post structure using div.post elements.
func extractPosts(doc *html.Node) []btPost {
var posts []btPost
var walk func(*html.Node)
walk = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "div" {
for _, attr := range n.Attr {
if attr.Key == "class" && strings.Contains(attr.Val, "post") {
post := parsePost(n)
if post.Content != "" {
posts = append(posts, post)
}
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
walk(c)
}
}
walk(doc)
return posts
}
// parsePost extracts author, date, and content from a post div.
func parsePost(node *html.Node) btPost {
post := btPost{}
var walk func(*html.Node)
walk = func(n *html.Node) {
if n.Type == html.ElementNode {
for _, attr := range n.Attr {
if attr.Key == "class" {
switch {
case strings.Contains(attr.Val, "poster_info"):
post.Author = extractText(n)
case strings.Contains(attr.Val, "headerandpost"):
// Look for date in smalltext
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Type == html.ElementNode && c.Data == "div" {
for _, a := range c.Attr {
if a.Key == "class" && strings.Contains(a.Val, "smalltext") {
post.Date = strings.TrimSpace(extractText(c))
}
}
}
}
case strings.Contains(attr.Val, "inner"):
post.Content = strings.TrimSpace(extractText(n))
}
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
walk(c)
}
}
walk(node)
return post
}
// extractText recursively extracts text content from an HTML node.
func extractText(n *html.Node) string {
if n.Type == html.TextNode {
return n.Data
}
var b strings.Builder
for c := n.FirstChild; c != nil; c = c.NextSibling {
text := extractText(c)
if text != "" {
if b.Len() > 0 && c.Type == html.ElementNode && (c.Data == "br" || c.Data == "p" || c.Data == "div") {
b.WriteString("\n")
}
b.WriteString(text)
}
}
return b.String()
}
// formatPostMarkdown formats a BitcoinTalk post as markdown.
func formatPostMarkdown(num int, post btPost) string {
var b strings.Builder
fmt.Fprintf(&b, "# Post %d by %s\n\n", num, post.Author)
if post.Date != "" {
fmt.Fprintf(&b, "**Date:** %s\n\n", post.Date)
}
b.WriteString(post.Content)
b.WriteString("\n")
return b.String()
}
// ParsePostsFromHTML parses BitcoinTalk posts from raw HTML content.
// This is exported for testing purposes.
func ParsePostsFromHTML(htmlContent string) ([]btPost, error) {
doc, err := html.Parse(strings.NewReader(htmlContent))
if err != nil {
return nil, core.E("collect.ParsePostsFromHTML", "failed to parse HTML", err)
}
return extractPosts(doc), nil
}
// FormatPostMarkdown is exported for testing purposes.
func FormatPostMarkdown(num int, author, date, content string) string {
return formatPostMarkdown(num, btPost{Author: author, Date: date, Content: content})
}
// FetchPageFunc is an injectable function type for fetching pages, used in testing.
type FetchPageFunc func(ctx context.Context, url string) ([]btPost, error)
// BitcoinTalkCollectorWithFetcher wraps BitcoinTalkCollector with a custom fetcher for testing.
type BitcoinTalkCollectorWithFetcher struct {
BitcoinTalkCollector
Fetcher FetchPageFunc
}
// SetHTTPClient replaces the package-level HTTP client.
// Use this in tests to inject a custom transport or timeout.
func SetHTTPClient(c *http.Client) {
httpClient = c
}

View file

@ -0,0 +1,93 @@
package collect
import (
"context"
"testing"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
func TestBitcoinTalkCollector_Name_Good(t *testing.T) {
b := &BitcoinTalkCollector{TopicID: "12345"}
assert.Equal(t, "bitcointalk:12345", b.Name())
}
func TestBitcoinTalkCollector_Name_Good_URL(t *testing.T) {
b := &BitcoinTalkCollector{URL: "https://bitcointalk.org/index.php?topic=12345.0"}
assert.Equal(t, "bitcointalk:url", b.Name())
}
func TestBitcoinTalkCollector_Collect_Bad_NoTopicID(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
b := &BitcoinTalkCollector{}
_, err := b.Collect(context.Background(), cfg)
assert.Error(t, err)
}
func TestBitcoinTalkCollector_Collect_Good_DryRun(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.DryRun = true
b := &BitcoinTalkCollector{TopicID: "12345"}
result, err := b.Collect(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 0, result.Items)
}
func TestParsePostsFromHTML_Good(t *testing.T) {
sampleHTML := `
<html><body>
<div class="post">
<div class="poster_info">satoshi</div>
<div class="headerandpost">
<div class="smalltext">January 03, 2009</div>
</div>
<div class="inner">This is the first post content.</div>
</div>
<div class="post">
<div class="poster_info">hal</div>
<div class="headerandpost">
<div class="smalltext">January 10, 2009</div>
</div>
<div class="inner">Running bitcoin!</div>
</div>
</body></html>`
posts, err := ParsePostsFromHTML(sampleHTML)
assert.NoError(t, err)
assert.Len(t, posts, 2)
assert.Contains(t, posts[0].Author, "satoshi")
assert.Contains(t, posts[0].Content, "This is the first post content.")
assert.Contains(t, posts[0].Date, "January 03, 2009")
assert.Contains(t, posts[1].Author, "hal")
assert.Contains(t, posts[1].Content, "Running bitcoin!")
}
func TestParsePostsFromHTML_Good_Empty(t *testing.T) {
posts, err := ParsePostsFromHTML("<html><body></body></html>")
assert.NoError(t, err)
assert.Empty(t, posts)
}
func TestFormatPostMarkdown_Good(t *testing.T) {
md := FormatPostMarkdown(1, "satoshi", "January 03, 2009", "Hello, world!")
assert.Contains(t, md, "# Post 1 by satoshi")
assert.Contains(t, md, "**Date:** January 03, 2009")
assert.Contains(t, md, "Hello, world!")
}
func TestFormatPostMarkdown_Good_NoDate(t *testing.T) {
md := FormatPostMarkdown(5, "user", "", "Content here")
assert.Contains(t, md, "# Post 5 by user")
assert.NotContains(t, md, "**Date:**")
assert.Contains(t, md, "Content here")
}

103
pkg/collect/collect.go Normal file
View file

@ -0,0 +1,103 @@
// Package collect provides a data collection subsystem for gathering information
// from multiple sources including GitHub, BitcoinTalk, CoinGecko, and academic
// paper repositories. It supports rate limiting, incremental state tracking,
// and event-driven progress reporting.
package collect
import (
"context"
"path/filepath"
"github.com/host-uk/core/pkg/io"
)
// Collector is the interface all collection sources implement.
type Collector interface {
// Name returns a human-readable name for this collector.
Name() string
// Collect gathers data from the source and writes it to the configured output.
Collect(ctx context.Context, cfg *Config) (*Result, error)
}
// Config holds shared configuration for all collectors.
type Config struct {
// Output is the storage medium for writing collected data.
Output io.Medium
// OutputDir is the base directory for all collected data.
OutputDir string
// Limiter provides per-source rate limiting.
Limiter *RateLimiter
// State tracks collection progress for incremental runs.
State *State
// Dispatcher manages event dispatch for progress reporting.
Dispatcher *Dispatcher
// Verbose enables detailed logging output.
Verbose bool
// DryRun simulates collection without writing files.
DryRun bool
}
// Result holds the output of a collection run.
type Result struct {
// Source identifies which collector produced this result.
Source string
// Items is the number of items successfully collected.
Items int
// Errors is the number of errors encountered during collection.
Errors int
// Skipped is the number of items skipped (e.g. already collected).
Skipped int
// Files lists the paths of all files written.
Files []string
}
// NewConfig creates a Config with sensible defaults.
// It initialises a MockMedium for output if none is provided,
// sets up a rate limiter, state tracker, and event dispatcher.
func NewConfig(outputDir string) *Config {
m := io.NewMockMedium()
return &Config{
Output: m,
OutputDir: outputDir,
Limiter: NewRateLimiter(),
State: NewState(m, filepath.Join(outputDir, ".collect-state.json")),
Dispatcher: NewDispatcher(),
}
}
// NewConfigWithMedium creates a Config using the specified storage medium.
func NewConfigWithMedium(m io.Medium, outputDir string) *Config {
return &Config{
Output: m,
OutputDir: outputDir,
Limiter: NewRateLimiter(),
State: NewState(m, filepath.Join(outputDir, ".collect-state.json")),
Dispatcher: NewDispatcher(),
}
}
// MergeResults combines multiple results into a single aggregated result.
func MergeResults(source string, results ...*Result) *Result {
merged := &Result{Source: source}
for _, r := range results {
if r == nil {
continue
}
merged.Items += r.Items
merged.Errors += r.Errors
merged.Skipped += r.Skipped
merged.Files = append(merged.Files, r.Files...)
}
return merged
}

View file

@ -0,0 +1,68 @@
package collect
import (
"testing"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
func TestNewConfig_Good(t *testing.T) {
cfg := NewConfig("/tmp/output")
assert.NotNil(t, cfg)
assert.Equal(t, "/tmp/output", cfg.OutputDir)
assert.NotNil(t, cfg.Output)
assert.NotNil(t, cfg.Limiter)
assert.NotNil(t, cfg.State)
assert.NotNil(t, cfg.Dispatcher)
assert.False(t, cfg.Verbose)
assert.False(t, cfg.DryRun)
}
func TestNewConfigWithMedium_Good(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/data")
assert.NotNil(t, cfg)
assert.Equal(t, m, cfg.Output)
assert.Equal(t, "/data", cfg.OutputDir)
assert.NotNil(t, cfg.Limiter)
assert.NotNil(t, cfg.State)
assert.NotNil(t, cfg.Dispatcher)
}
func TestMergeResults_Good(t *testing.T) {
r1 := &Result{
Source: "a",
Items: 5,
Errors: 1,
Files: []string{"a.md", "b.md"},
}
r2 := &Result{
Source: "b",
Items: 3,
Skipped: 2,
Files: []string{"c.md"},
}
merged := MergeResults("combined", r1, r2)
assert.Equal(t, "combined", merged.Source)
assert.Equal(t, 8, merged.Items)
assert.Equal(t, 1, merged.Errors)
assert.Equal(t, 2, merged.Skipped)
assert.Len(t, merged.Files, 3)
}
func TestMergeResults_Good_NilResults(t *testing.T) {
r1 := &Result{Items: 3}
merged := MergeResults("test", r1, nil, nil)
assert.Equal(t, 3, merged.Items)
}
func TestMergeResults_Good_Empty(t *testing.T) {
merged := MergeResults("empty")
assert.Equal(t, 0, merged.Items)
assert.Equal(t, 0, merged.Errors)
assert.Nil(t, merged.Files)
}

133
pkg/collect/events.go Normal file
View file

@ -0,0 +1,133 @@
package collect
import (
"sync"
"time"
)
// Event types used by the collection subsystem.
const (
// EventStart is emitted when a collector begins its run.
EventStart = "start"
// EventProgress is emitted to report incremental progress.
EventProgress = "progress"
// EventItem is emitted when a single item is collected.
EventItem = "item"
// EventError is emitted when an error occurs during collection.
EventError = "error"
// EventComplete is emitted when a collector finishes its run.
EventComplete = "complete"
)
// Event represents a collection event.
type Event struct {
// Type is one of the Event* constants.
Type string `json:"type"`
// Source identifies the collector that emitted the event.
Source string `json:"source"`
// Message is a human-readable description of the event.
Message string `json:"message"`
// Data carries optional event-specific payload.
Data any `json:"data,omitempty"`
// Time is when the event occurred.
Time time.Time `json:"time"`
}
// EventHandler handles collection events.
type EventHandler func(Event)
// Dispatcher manages event dispatch. Handlers are registered per event type
// and are called synchronously when an event is emitted.
type Dispatcher struct {
mu sync.RWMutex
handlers map[string][]EventHandler
}
// NewDispatcher creates a new event dispatcher.
func NewDispatcher() *Dispatcher {
return &Dispatcher{
handlers: make(map[string][]EventHandler),
}
}
// On registers a handler for an event type. Multiple handlers can be
// registered for the same event type and will be called in order.
func (d *Dispatcher) On(eventType string, handler EventHandler) {
d.mu.Lock()
defer d.mu.Unlock()
d.handlers[eventType] = append(d.handlers[eventType], handler)
}
// Emit dispatches an event to all registered handlers for that event type.
// If no handlers are registered for the event type, the event is silently dropped.
// The event's Time field is set to now if it is zero.
func (d *Dispatcher) Emit(event Event) {
if event.Time.IsZero() {
event.Time = time.Now()
}
d.mu.RLock()
handlers := d.handlers[event.Type]
d.mu.RUnlock()
for _, h := range handlers {
h(event)
}
}
// EmitStart emits a start event for the given source.
func (d *Dispatcher) EmitStart(source, message string) {
d.Emit(Event{
Type: EventStart,
Source: source,
Message: message,
})
}
// EmitProgress emits a progress event.
func (d *Dispatcher) EmitProgress(source, message string, data any) {
d.Emit(Event{
Type: EventProgress,
Source: source,
Message: message,
Data: data,
})
}
// EmitItem emits an item event.
func (d *Dispatcher) EmitItem(source, message string, data any) {
d.Emit(Event{
Type: EventItem,
Source: source,
Message: message,
Data: data,
})
}
// EmitError emits an error event.
func (d *Dispatcher) EmitError(source, message string, data any) {
d.Emit(Event{
Type: EventError,
Source: source,
Message: message,
Data: data,
})
}
// EmitComplete emits a complete event.
func (d *Dispatcher) EmitComplete(source, message string, data any) {
d.Emit(Event{
Type: EventComplete,
Source: source,
Message: message,
Data: data,
})
}

133
pkg/collect/events_test.go Normal file
View file

@ -0,0 +1,133 @@
package collect
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestDispatcher_Emit_Good(t *testing.T) {
d := NewDispatcher()
var received Event
d.On(EventStart, func(e Event) {
received = e
})
d.Emit(Event{
Type: EventStart,
Source: "test",
Message: "hello",
})
assert.Equal(t, EventStart, received.Type)
assert.Equal(t, "test", received.Source)
assert.Equal(t, "hello", received.Message)
assert.False(t, received.Time.IsZero(), "Time should be set automatically")
}
func TestDispatcher_On_Good(t *testing.T) {
d := NewDispatcher()
var count int
handler := func(e Event) { count++ }
d.On(EventProgress, handler)
d.On(EventProgress, handler)
d.On(EventProgress, handler)
d.Emit(Event{Type: EventProgress, Source: "test"})
assert.Equal(t, 3, count, "All three handlers should be called")
}
func TestDispatcher_Emit_Good_NoHandlers(t *testing.T) {
d := NewDispatcher()
// Should not panic when emitting an event with no handlers
assert.NotPanics(t, func() {
d.Emit(Event{
Type: "unknown-event",
Source: "test",
Message: "this should be silently dropped",
})
})
}
func TestDispatcher_Emit_Good_MultipleEventTypes(t *testing.T) {
d := NewDispatcher()
var starts, errors int
d.On(EventStart, func(e Event) { starts++ })
d.On(EventError, func(e Event) { errors++ })
d.Emit(Event{Type: EventStart, Source: "test"})
d.Emit(Event{Type: EventStart, Source: "test"})
d.Emit(Event{Type: EventError, Source: "test"})
assert.Equal(t, 2, starts)
assert.Equal(t, 1, errors)
}
func TestDispatcher_Emit_Good_SetsTime(t *testing.T) {
d := NewDispatcher()
var received Event
d.On(EventItem, func(e Event) {
received = e
})
before := time.Now()
d.Emit(Event{Type: EventItem, Source: "test"})
after := time.Now()
assert.True(t, received.Time.After(before) || received.Time.Equal(before))
assert.True(t, received.Time.Before(after) || received.Time.Equal(after))
}
func TestDispatcher_Emit_Good_PreservesExistingTime(t *testing.T) {
d := NewDispatcher()
customTime := time.Date(2025, 6, 15, 12, 0, 0, 0, time.UTC)
var received Event
d.On(EventItem, func(e Event) {
received = e
})
d.Emit(Event{Type: EventItem, Source: "test", Time: customTime})
assert.True(t, customTime.Equal(received.Time))
}
func TestDispatcher_EmitHelpers_Good(t *testing.T) {
d := NewDispatcher()
events := make(map[string]Event)
for _, eventType := range []string{EventStart, EventProgress, EventItem, EventError, EventComplete} {
et := eventType
d.On(et, func(e Event) {
events[et] = e
})
}
d.EmitStart("s1", "started")
d.EmitProgress("s2", "progressing", map[string]int{"count": 5})
d.EmitItem("s3", "got item", nil)
d.EmitError("s4", "something failed", nil)
d.EmitComplete("s5", "done", nil)
assert.Equal(t, "s1", events[EventStart].Source)
assert.Equal(t, "started", events[EventStart].Message)
assert.Equal(t, "s2", events[EventProgress].Source)
assert.NotNil(t, events[EventProgress].Data)
assert.Equal(t, "s3", events[EventItem].Source)
assert.Equal(t, "s4", events[EventError].Source)
assert.Equal(t, "s5", events[EventComplete].Source)
}
func TestNewDispatcher_Good(t *testing.T) {
d := NewDispatcher()
assert.NotNil(t, d)
assert.NotNil(t, d.handlers)
}

128
pkg/collect/excavate.go Normal file
View file

@ -0,0 +1,128 @@
package collect
import (
"context"
"fmt"
"time"
core "github.com/host-uk/core/pkg/framework/core"
)
// Excavator runs multiple collectors as a coordinated operation.
// It provides sequential execution with rate limit respect, state tracking
// for resume support, and aggregated results.
type Excavator struct {
// Collectors is the list of collectors to run.
Collectors []Collector
// ScanOnly reports what would be collected without performing collection.
ScanOnly bool
// Resume enables incremental collection using saved state.
Resume bool
}
// Name returns the orchestrator name.
func (e *Excavator) Name() string {
return "excavator"
}
// Run executes all collectors sequentially, respecting rate limits and
// using state for resume support. Results are aggregated from all collectors.
func (e *Excavator) Run(ctx context.Context, cfg *Config) (*Result, error) {
result := &Result{Source: e.Name()}
if len(e.Collectors) == 0 {
return result, nil
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitStart(e.Name(), fmt.Sprintf("Starting excavation with %d collectors", len(e.Collectors)))
}
// Load state if resuming
if e.Resume && cfg.State != nil {
if err := cfg.State.Load(); err != nil {
return result, core.E("collect.Excavator.Run", "failed to load state", err)
}
}
// If scan-only, just report what would be collected
if e.ScanOnly {
for _, c := range e.Collectors {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(e.Name(), fmt.Sprintf("[scan] Would run collector: %s", c.Name()), nil)
}
}
return result, nil
}
for i, c := range e.Collectors {
if ctx.Err() != nil {
return result, core.E("collect.Excavator.Run", "context cancelled", ctx.Err())
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(e.Name(),
fmt.Sprintf("Running collector %d/%d: %s", i+1, len(e.Collectors), c.Name()), nil)
}
// Check if we should skip (already completed in a previous run)
if e.Resume && cfg.State != nil {
if entry, ok := cfg.State.Get(c.Name()); ok {
if entry.Items > 0 && !entry.LastRun.IsZero() {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(e.Name(),
fmt.Sprintf("Skipping %s (already collected %d items on %s)",
c.Name(), entry.Items, entry.LastRun.Format(time.RFC3339)), nil)
}
result.Skipped++
continue
}
}
}
collectorResult, err := c.Collect(ctx, cfg)
if err != nil {
result.Errors++
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(e.Name(),
fmt.Sprintf("Collector %s failed: %v", c.Name(), err), nil)
}
continue
}
if collectorResult != nil {
result.Items += collectorResult.Items
result.Errors += collectorResult.Errors
result.Skipped += collectorResult.Skipped
result.Files = append(result.Files, collectorResult.Files...)
// Update state
if cfg.State != nil {
cfg.State.Set(c.Name(), &StateEntry{
Source: c.Name(),
LastRun: time.Now(),
Items: collectorResult.Items,
})
}
}
}
// Save state
if cfg.State != nil {
if err := cfg.State.Save(); err != nil {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(e.Name(), fmt.Sprintf("Failed to save state: %v", err), nil)
}
}
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitComplete(e.Name(),
fmt.Sprintf("Excavation complete: %d items, %d errors, %d skipped",
result.Items, result.Errors, result.Skipped), result)
}
return result, nil
}

View file

@ -0,0 +1,202 @@
package collect
import (
"context"
"fmt"
"testing"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
// mockCollector is a simple collector for testing the Excavator.
type mockCollector struct {
name string
items int
err error
called bool
}
func (m *mockCollector) Name() string { return m.name }
func (m *mockCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) {
m.called = true
if m.err != nil {
return &Result{Source: m.name, Errors: 1}, m.err
}
result := &Result{Source: m.name, Items: m.items}
for i := 0; i < m.items; i++ {
result.Files = append(result.Files, fmt.Sprintf("/output/%s/%d.md", m.name, i))
}
if cfg.DryRun {
return &Result{Source: m.name}, nil
}
return result, nil
}
func TestExcavator_Name_Good(t *testing.T) {
e := &Excavator{}
assert.Equal(t, "excavator", e.Name())
}
func TestExcavator_Run_Good(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.Limiter = nil
c1 := &mockCollector{name: "source-a", items: 3}
c2 := &mockCollector{name: "source-b", items: 5}
e := &Excavator{
Collectors: []Collector{c1, c2},
}
result, err := e.Run(context.Background(), cfg)
assert.NoError(t, err)
assert.True(t, c1.called)
assert.True(t, c2.called)
assert.Equal(t, 8, result.Items)
assert.Len(t, result.Files, 8)
}
func TestExcavator_Run_Good_Empty(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
e := &Excavator{}
result, err := e.Run(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 0, result.Items)
}
func TestExcavator_Run_Good_DryRun(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.DryRun = true
c1 := &mockCollector{name: "source-a", items: 10}
c2 := &mockCollector{name: "source-b", items: 20}
e := &Excavator{
Collectors: []Collector{c1, c2},
}
result, err := e.Run(context.Background(), cfg)
assert.NoError(t, err)
assert.True(t, c1.called)
assert.True(t, c2.called)
// In dry run, mockCollector returns 0 items
assert.Equal(t, 0, result.Items)
}
func TestExcavator_Run_Good_ScanOnly(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
c1 := &mockCollector{name: "source-a", items: 10}
var progressMessages []string
cfg.Dispatcher.On(EventProgress, func(e Event) {
progressMessages = append(progressMessages, e.Message)
})
e := &Excavator{
Collectors: []Collector{c1},
ScanOnly: true,
}
result, err := e.Run(context.Background(), cfg)
assert.NoError(t, err)
assert.False(t, c1.called, "Collector should not be called in scan-only mode")
assert.Equal(t, 0, result.Items)
assert.NotEmpty(t, progressMessages)
assert.Contains(t, progressMessages[0], "source-a")
}
func TestExcavator_Run_Good_WithErrors(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.Limiter = nil
c1 := &mockCollector{name: "good", items: 5}
c2 := &mockCollector{name: "bad", err: fmt.Errorf("network error")}
c3 := &mockCollector{name: "also-good", items: 3}
e := &Excavator{
Collectors: []Collector{c1, c2, c3},
}
result, err := e.Run(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 8, result.Items)
assert.Equal(t, 1, result.Errors) // c2 failed
assert.True(t, c1.called)
assert.True(t, c2.called)
assert.True(t, c3.called)
}
func TestExcavator_Run_Good_CancelledContext(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
ctx, cancel := context.WithCancel(context.Background())
cancel() // Cancel immediately
c1 := &mockCollector{name: "source-a", items: 5}
e := &Excavator{
Collectors: []Collector{c1},
}
_, err := e.Run(ctx, cfg)
assert.Error(t, err)
}
func TestExcavator_Run_Good_SavesState(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.Limiter = nil
c1 := &mockCollector{name: "source-a", items: 5}
e := &Excavator{
Collectors: []Collector{c1},
}
_, err := e.Run(context.Background(), cfg)
assert.NoError(t, err)
// Verify state was saved
entry, ok := cfg.State.Get("source-a")
assert.True(t, ok)
assert.Equal(t, 5, entry.Items)
assert.Equal(t, "source-a", entry.Source)
}
func TestExcavator_Run_Good_Events(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.Limiter = nil
var startCount, completeCount int
cfg.Dispatcher.On(EventStart, func(e Event) { startCount++ })
cfg.Dispatcher.On(EventComplete, func(e Event) { completeCount++ })
c1 := &mockCollector{name: "source-a", items: 1}
e := &Excavator{
Collectors: []Collector{c1},
}
_, err := e.Run(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 1, startCount)
assert.Equal(t, 1, completeCount)
}

289
pkg/collect/github.go Normal file
View file

@ -0,0 +1,289 @@
package collect
import (
"context"
"encoding/json"
"fmt"
"os/exec"
"path/filepath"
"strings"
"time"
core "github.com/host-uk/core/pkg/framework/core"
)
// ghIssue represents a GitHub issue or pull request as returned by the gh CLI.
type ghIssue struct {
Number int `json:"number"`
Title string `json:"title"`
State string `json:"state"`
Author ghAuthor `json:"author"`
Body string `json:"body"`
CreatedAt time.Time `json:"createdAt"`
Labels []ghLabel `json:"labels"`
URL string `json:"url"`
}
type ghAuthor struct {
Login string `json:"login"`
}
type ghLabel struct {
Name string `json:"name"`
}
// ghRepo represents a GitHub repository as returned by the gh CLI.
type ghRepo struct {
Name string `json:"name"`
}
// GitHubCollector collects issues and PRs from GitHub repositories.
type GitHubCollector struct {
// Org is the GitHub organisation.
Org string
// Repo is the repository name. If empty and Org is set, all repos are collected.
Repo string
// IssuesOnly limits collection to issues (excludes PRs).
IssuesOnly bool
// PRsOnly limits collection to PRs (excludes issues).
PRsOnly bool
}
// Name returns the collector name.
func (g *GitHubCollector) Name() string {
if g.Repo != "" {
return fmt.Sprintf("github:%s/%s", g.Org, g.Repo)
}
return fmt.Sprintf("github:%s", g.Org)
}
// Collect gathers issues and/or PRs from GitHub repositories.
func (g *GitHubCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) {
result := &Result{Source: g.Name()}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitStart(g.Name(), "Starting GitHub collection")
}
// If no specific repo, list all repos in the org
repos := []string{g.Repo}
if g.Repo == "" {
var err error
repos, err = g.listOrgRepos(ctx)
if err != nil {
return result, err
}
}
for _, repo := range repos {
if ctx.Err() != nil {
return result, core.E("collect.GitHub.Collect", "context cancelled", ctx.Err())
}
if !g.PRsOnly {
issueResult, err := g.collectIssues(ctx, cfg, repo)
if err != nil {
result.Errors++
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(g.Name(), fmt.Sprintf("Error collecting issues for %s: %v", repo, err), nil)
}
} else {
result.Items += issueResult.Items
result.Skipped += issueResult.Skipped
result.Files = append(result.Files, issueResult.Files...)
}
}
if !g.IssuesOnly {
prResult, err := g.collectPRs(ctx, cfg, repo)
if err != nil {
result.Errors++
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(g.Name(), fmt.Sprintf("Error collecting PRs for %s: %v", repo, err), nil)
}
} else {
result.Items += prResult.Items
result.Skipped += prResult.Skipped
result.Files = append(result.Files, prResult.Files...)
}
}
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitComplete(g.Name(), fmt.Sprintf("Collected %d items", result.Items), result)
}
return result, nil
}
// listOrgRepos returns all repository names for the configured org.
func (g *GitHubCollector) listOrgRepos(ctx context.Context) ([]string, error) {
cmd := exec.CommandContext(ctx, "gh", "repo", "list", g.Org,
"--json", "name",
"--limit", "1000",
)
out, err := cmd.Output()
if err != nil {
return nil, core.E("collect.GitHub.listOrgRepos", "failed to list repos", err)
}
var repos []ghRepo
if err := json.Unmarshal(out, &repos); err != nil {
return nil, core.E("collect.GitHub.listOrgRepos", "failed to parse repo list", err)
}
names := make([]string, len(repos))
for i, r := range repos {
names[i] = r.Name
}
return names, nil
}
// collectIssues collects issues for a single repository.
func (g *GitHubCollector) collectIssues(ctx context.Context, cfg *Config, repo string) (*Result, error) {
result := &Result{Source: fmt.Sprintf("github:%s/%s/issues", g.Org, repo)}
if cfg.DryRun {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(g.Name(), fmt.Sprintf("[dry-run] Would collect issues for %s/%s", g.Org, repo), nil)
}
return result, nil
}
if cfg.Limiter != nil {
if err := cfg.Limiter.Wait(ctx, "github"); err != nil {
return result, err
}
}
repoRef := fmt.Sprintf("%s/%s", g.Org, repo)
cmd := exec.CommandContext(ctx, "gh", "issue", "list",
"--repo", repoRef,
"--json", "number,title,state,author,body,createdAt,labels,url",
"--limit", "100",
"--state", "all",
)
out, err := cmd.Output()
if err != nil {
return result, core.E("collect.GitHub.collectIssues", "gh issue list failed for "+repoRef, err)
}
var issues []ghIssue
if err := json.Unmarshal(out, &issues); err != nil {
return result, core.E("collect.GitHub.collectIssues", "failed to parse issues", err)
}
baseDir := filepath.Join(cfg.OutputDir, "github", g.Org, repo, "issues")
if err := cfg.Output.EnsureDir(baseDir); err != nil {
return result, core.E("collect.GitHub.collectIssues", "failed to create output directory", err)
}
for _, issue := range issues {
filePath := filepath.Join(baseDir, fmt.Sprintf("%d.md", issue.Number))
content := formatIssueMarkdown(issue)
if err := cfg.Output.Write(filePath, content); err != nil {
result.Errors++
continue
}
result.Items++
result.Files = append(result.Files, filePath)
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitItem(g.Name(), fmt.Sprintf("Issue #%d: %s", issue.Number, issue.Title), nil)
}
}
return result, nil
}
// collectPRs collects pull requests for a single repository.
func (g *GitHubCollector) collectPRs(ctx context.Context, cfg *Config, repo string) (*Result, error) {
result := &Result{Source: fmt.Sprintf("github:%s/%s/pulls", g.Org, repo)}
if cfg.DryRun {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(g.Name(), fmt.Sprintf("[dry-run] Would collect PRs for %s/%s", g.Org, repo), nil)
}
return result, nil
}
if cfg.Limiter != nil {
if err := cfg.Limiter.Wait(ctx, "github"); err != nil {
return result, err
}
}
repoRef := fmt.Sprintf("%s/%s", g.Org, repo)
cmd := exec.CommandContext(ctx, "gh", "pr", "list",
"--repo", repoRef,
"--json", "number,title,state,author,body,createdAt,labels,url",
"--limit", "100",
"--state", "all",
)
out, err := cmd.Output()
if err != nil {
return result, core.E("collect.GitHub.collectPRs", "gh pr list failed for "+repoRef, err)
}
var prs []ghIssue
if err := json.Unmarshal(out, &prs); err != nil {
return result, core.E("collect.GitHub.collectPRs", "failed to parse pull requests", err)
}
baseDir := filepath.Join(cfg.OutputDir, "github", g.Org, repo, "pulls")
if err := cfg.Output.EnsureDir(baseDir); err != nil {
return result, core.E("collect.GitHub.collectPRs", "failed to create output directory", err)
}
for _, pr := range prs {
filePath := filepath.Join(baseDir, fmt.Sprintf("%d.md", pr.Number))
content := formatIssueMarkdown(pr)
if err := cfg.Output.Write(filePath, content); err != nil {
result.Errors++
continue
}
result.Items++
result.Files = append(result.Files, filePath)
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitItem(g.Name(), fmt.Sprintf("PR #%d: %s", pr.Number, pr.Title), nil)
}
}
return result, nil
}
// formatIssueMarkdown formats a GitHub issue or PR as markdown.
func formatIssueMarkdown(issue ghIssue) string {
var b strings.Builder
fmt.Fprintf(&b, "# %s\n\n", issue.Title)
fmt.Fprintf(&b, "- **Number:** #%d\n", issue.Number)
fmt.Fprintf(&b, "- **State:** %s\n", issue.State)
fmt.Fprintf(&b, "- **Author:** %s\n", issue.Author.Login)
fmt.Fprintf(&b, "- **Created:** %s\n", issue.CreatedAt.Format(time.RFC3339))
if len(issue.Labels) > 0 {
labels := make([]string, len(issue.Labels))
for i, l := range issue.Labels {
labels[i] = l.Name
}
fmt.Fprintf(&b, "- **Labels:** %s\n", strings.Join(labels, ", "))
}
if issue.URL != "" {
fmt.Fprintf(&b, "- **URL:** %s\n", issue.URL)
}
if issue.Body != "" {
fmt.Fprintf(&b, "\n%s\n", issue.Body)
}
return b.String()
}

103
pkg/collect/github_test.go Normal file
View file

@ -0,0 +1,103 @@
package collect
import (
"context"
"testing"
"time"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
func TestGitHubCollector_Name_Good(t *testing.T) {
g := &GitHubCollector{Org: "host-uk", Repo: "core"}
assert.Equal(t, "github:host-uk/core", g.Name())
}
func TestGitHubCollector_Name_Good_OrgOnly(t *testing.T) {
g := &GitHubCollector{Org: "host-uk"}
assert.Equal(t, "github:host-uk", g.Name())
}
func TestGitHubCollector_Collect_Good_DryRun(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.DryRun = true
var progressEmitted bool
cfg.Dispatcher.On(EventProgress, func(e Event) {
progressEmitted = true
})
g := &GitHubCollector{Org: "host-uk", Repo: "core"}
result, err := g.Collect(context.Background(), cfg)
assert.NoError(t, err)
assert.NotNil(t, result)
assert.Equal(t, 0, result.Items)
assert.True(t, progressEmitted, "Should emit progress event in dry-run mode")
}
func TestGitHubCollector_Collect_Good_DryRun_IssuesOnly(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.DryRun = true
g := &GitHubCollector{Org: "test-org", Repo: "test-repo", IssuesOnly: true}
result, err := g.Collect(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 0, result.Items)
}
func TestGitHubCollector_Collect_Good_DryRun_PRsOnly(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.DryRun = true
g := &GitHubCollector{Org: "test-org", Repo: "test-repo", PRsOnly: true}
result, err := g.Collect(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 0, result.Items)
}
func TestFormatIssueMarkdown_Good(t *testing.T) {
issue := ghIssue{
Number: 42,
Title: "Test Issue",
State: "open",
Author: ghAuthor{Login: "testuser"},
Body: "This is the body.",
CreatedAt: time.Date(2025, 1, 15, 10, 0, 0, 0, time.UTC),
Labels: []ghLabel{
{Name: "bug"},
{Name: "priority"},
},
URL: "https://github.com/test/repo/issues/42",
}
md := formatIssueMarkdown(issue)
assert.Contains(t, md, "# Test Issue")
assert.Contains(t, md, "**Number:** #42")
assert.Contains(t, md, "**State:** open")
assert.Contains(t, md, "**Author:** testuser")
assert.Contains(t, md, "**Labels:** bug, priority")
assert.Contains(t, md, "This is the body.")
assert.Contains(t, md, "**URL:** https://github.com/test/repo/issues/42")
}
func TestFormatIssueMarkdown_Good_NoLabels(t *testing.T) {
issue := ghIssue{
Number: 1,
Title: "Simple",
State: "closed",
Author: ghAuthor{Login: "user"},
}
md := formatIssueMarkdown(issue)
assert.Contains(t, md, "# Simple")
assert.NotContains(t, md, "**Labels:**")
}

277
pkg/collect/market.go Normal file
View file

@ -0,0 +1,277 @@
package collect
import (
"context"
"encoding/json"
"fmt"
"net/http"
"path/filepath"
"strings"
"time"
core "github.com/host-uk/core/pkg/framework/core"
)
// coinGeckoBaseURL is the base URL for the CoinGecko API.
// It is a variable so it can be overridden in tests.
var coinGeckoBaseURL = "https://api.coingecko.com/api/v3"
// MarketCollector collects market data from CoinGecko.
type MarketCollector struct {
// CoinID is the CoinGecko coin identifier (e.g. "bitcoin", "ethereum").
CoinID string
// Historical enables collection of historical market chart data.
Historical bool
// FromDate is the start date for historical data in YYYY-MM-DD format.
FromDate string
}
// Name returns the collector name.
func (m *MarketCollector) Name() string {
return fmt.Sprintf("market:%s", m.CoinID)
}
// coinData represents the current coin data from CoinGecko.
type coinData struct {
ID string `json:"id"`
Symbol string `json:"symbol"`
Name string `json:"name"`
MarketData marketData `json:"market_data"`
}
type marketData struct {
CurrentPrice map[string]float64 `json:"current_price"`
MarketCap map[string]float64 `json:"market_cap"`
TotalVolume map[string]float64 `json:"total_volume"`
High24h map[string]float64 `json:"high_24h"`
Low24h map[string]float64 `json:"low_24h"`
PriceChange24h float64 `json:"price_change_24h"`
PriceChangePct24h float64 `json:"price_change_percentage_24h"`
MarketCapRank int `json:"market_cap_rank"`
TotalSupply float64 `json:"total_supply"`
CirculatingSupply float64 `json:"circulating_supply"`
LastUpdated string `json:"last_updated"`
}
// historicalData represents historical market chart data from CoinGecko.
type historicalData struct {
Prices [][]float64 `json:"prices"`
MarketCaps [][]float64 `json:"market_caps"`
TotalVolumes [][]float64 `json:"total_volumes"`
}
// Collect gathers market data from CoinGecko.
func (m *MarketCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) {
result := &Result{Source: m.Name()}
if m.CoinID == "" {
return result, core.E("collect.Market.Collect", "coin ID is required", nil)
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitStart(m.Name(), fmt.Sprintf("Starting market data collection for %s", m.CoinID))
}
if cfg.DryRun {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(m.Name(), fmt.Sprintf("[dry-run] Would collect market data for %s", m.CoinID), nil)
}
return result, nil
}
baseDir := filepath.Join(cfg.OutputDir, "market", m.CoinID)
if err := cfg.Output.EnsureDir(baseDir); err != nil {
return result, core.E("collect.Market.Collect", "failed to create output directory", err)
}
// Collect current data
currentResult, err := m.collectCurrent(ctx, cfg, baseDir)
if err != nil {
result.Errors++
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(m.Name(), fmt.Sprintf("Failed to collect current data: %v", err), nil)
}
} else {
result.Items += currentResult.Items
result.Files = append(result.Files, currentResult.Files...)
}
// Collect historical data if requested
if m.Historical {
histResult, err := m.collectHistorical(ctx, cfg, baseDir)
if err != nil {
result.Errors++
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(m.Name(), fmt.Sprintf("Failed to collect historical data: %v", err), nil)
}
} else {
result.Items += histResult.Items
result.Files = append(result.Files, histResult.Files...)
}
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitComplete(m.Name(), fmt.Sprintf("Collected market data for %s", m.CoinID), result)
}
return result, nil
}
// collectCurrent fetches current coin data from CoinGecko.
func (m *MarketCollector) collectCurrent(ctx context.Context, cfg *Config, baseDir string) (*Result, error) {
result := &Result{Source: m.Name()}
if cfg.Limiter != nil {
if err := cfg.Limiter.Wait(ctx, "coingecko"); err != nil {
return result, err
}
}
url := fmt.Sprintf("%s/coins/%s", coinGeckoBaseURL, m.CoinID)
data, err := fetchJSON[coinData](ctx, url)
if err != nil {
return result, core.E("collect.Market.collectCurrent", "failed to fetch coin data", err)
}
// Write raw JSON
jsonBytes, err := json.MarshalIndent(data, "", " ")
if err != nil {
return result, core.E("collect.Market.collectCurrent", "failed to marshal data", err)
}
jsonPath := filepath.Join(baseDir, "current.json")
if err := cfg.Output.Write(jsonPath, string(jsonBytes)); err != nil {
return result, core.E("collect.Market.collectCurrent", "failed to write JSON", err)
}
result.Items++
result.Files = append(result.Files, jsonPath)
// Write summary markdown
summary := formatMarketSummary(data)
summaryPath := filepath.Join(baseDir, "summary.md")
if err := cfg.Output.Write(summaryPath, summary); err != nil {
return result, core.E("collect.Market.collectCurrent", "failed to write summary", err)
}
result.Items++
result.Files = append(result.Files, summaryPath)
return result, nil
}
// collectHistorical fetches historical market chart data from CoinGecko.
func (m *MarketCollector) collectHistorical(ctx context.Context, cfg *Config, baseDir string) (*Result, error) {
result := &Result{Source: m.Name()}
if cfg.Limiter != nil {
if err := cfg.Limiter.Wait(ctx, "coingecko"); err != nil {
return result, err
}
}
days := "365"
if m.FromDate != "" {
fromTime, err := time.Parse("2006-01-02", m.FromDate)
if err == nil {
dayCount := int(time.Since(fromTime).Hours() / 24)
if dayCount > 0 {
days = fmt.Sprintf("%d", dayCount)
}
}
}
url := fmt.Sprintf("%s/coins/%s/market_chart?vs_currency=usd&days=%s", coinGeckoBaseURL, m.CoinID, days)
data, err := fetchJSON[historicalData](ctx, url)
if err != nil {
return result, core.E("collect.Market.collectHistorical", "failed to fetch historical data", err)
}
jsonBytes, err := json.MarshalIndent(data, "", " ")
if err != nil {
return result, core.E("collect.Market.collectHistorical", "failed to marshal data", err)
}
jsonPath := filepath.Join(baseDir, "historical.json")
if err := cfg.Output.Write(jsonPath, string(jsonBytes)); err != nil {
return result, core.E("collect.Market.collectHistorical", "failed to write JSON", err)
}
result.Items++
result.Files = append(result.Files, jsonPath)
return result, nil
}
// fetchJSON fetches JSON from a URL and unmarshals it into the given type.
func fetchJSON[T any](ctx context.Context, url string) (*T, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
return nil, core.E("collect.fetchJSON", "failed to create request", err)
}
req.Header.Set("User-Agent", "CoreCollector/1.0")
req.Header.Set("Accept", "application/json")
resp, err := httpClient.Do(req)
if err != nil {
return nil, core.E("collect.fetchJSON", "request failed", err)
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
return nil, core.E("collect.fetchJSON",
fmt.Sprintf("unexpected status code: %d for %s", resp.StatusCode, url), nil)
}
var data T
if err := json.NewDecoder(resp.Body).Decode(&data); err != nil {
return nil, core.E("collect.fetchJSON", "failed to decode response", err)
}
return &data, nil
}
// formatMarketSummary formats coin data as a markdown summary.
func formatMarketSummary(data *coinData) string {
var b strings.Builder
fmt.Fprintf(&b, "# %s (%s)\n\n", data.Name, strings.ToUpper(data.Symbol))
md := data.MarketData
if price, ok := md.CurrentPrice["usd"]; ok {
fmt.Fprintf(&b, "- **Current Price (USD):** $%.2f\n", price)
}
if cap, ok := md.MarketCap["usd"]; ok {
fmt.Fprintf(&b, "- **Market Cap (USD):** $%.0f\n", cap)
}
if vol, ok := md.TotalVolume["usd"]; ok {
fmt.Fprintf(&b, "- **24h Volume (USD):** $%.0f\n", vol)
}
if high, ok := md.High24h["usd"]; ok {
fmt.Fprintf(&b, "- **24h High (USD):** $%.2f\n", high)
}
if low, ok := md.Low24h["usd"]; ok {
fmt.Fprintf(&b, "- **24h Low (USD):** $%.2f\n", low)
}
fmt.Fprintf(&b, "- **24h Price Change:** $%.2f (%.2f%%)\n", md.PriceChange24h, md.PriceChangePct24h)
if md.MarketCapRank > 0 {
fmt.Fprintf(&b, "- **Market Cap Rank:** #%d\n", md.MarketCapRank)
}
if md.CirculatingSupply > 0 {
fmt.Fprintf(&b, "- **Circulating Supply:** %.0f\n", md.CirculatingSupply)
}
if md.TotalSupply > 0 {
fmt.Fprintf(&b, "- **Total Supply:** %.0f\n", md.TotalSupply)
}
if md.LastUpdated != "" {
fmt.Fprintf(&b, "\n*Last updated: %s*\n", md.LastUpdated)
}
return b.String()
}
// FormatMarketSummary is exported for testing.
func FormatMarketSummary(data *coinData) string {
return formatMarketSummary(data)
}

187
pkg/collect/market_test.go Normal file
View file

@ -0,0 +1,187 @@
package collect
import (
"context"
"encoding/json"
"net/http"
"net/http/httptest"
"testing"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
func TestMarketCollector_Name_Good(t *testing.T) {
m := &MarketCollector{CoinID: "bitcoin"}
assert.Equal(t, "market:bitcoin", m.Name())
}
func TestMarketCollector_Collect_Bad_NoCoinID(t *testing.T) {
mock := io.NewMockMedium()
cfg := NewConfigWithMedium(mock, "/output")
m := &MarketCollector{}
_, err := m.Collect(context.Background(), cfg)
assert.Error(t, err)
}
func TestMarketCollector_Collect_Good_DryRun(t *testing.T) {
mock := io.NewMockMedium()
cfg := NewConfigWithMedium(mock, "/output")
cfg.DryRun = true
m := &MarketCollector{CoinID: "bitcoin"}
result, err := m.Collect(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 0, result.Items)
}
func TestMarketCollector_Collect_Good_CurrentData(t *testing.T) {
// Set up a mock CoinGecko server
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
data := coinData{
ID: "bitcoin",
Symbol: "btc",
Name: "Bitcoin",
MarketData: marketData{
CurrentPrice: map[string]float64{"usd": 42000.50},
MarketCap: map[string]float64{"usd": 800000000000},
TotalVolume: map[string]float64{"usd": 25000000000},
High24h: map[string]float64{"usd": 43000},
Low24h: map[string]float64{"usd": 41000},
PriceChange24h: 500.25,
PriceChangePct24h: 1.2,
MarketCapRank: 1,
CirculatingSupply: 19500000,
TotalSupply: 21000000,
LastUpdated: "2025-01-15T10:00:00Z",
},
}
w.Header().Set("Content-Type", "application/json")
_ = json.NewEncoder(w).Encode(data)
}))
defer server.Close()
// Override base URL
oldURL := coinGeckoBaseURL
coinGeckoBaseURL = server.URL
defer func() { coinGeckoBaseURL = oldURL }()
mock := io.NewMockMedium()
cfg := NewConfigWithMedium(mock, "/output")
// Disable rate limiter to avoid delays in tests
cfg.Limiter = nil
m := &MarketCollector{CoinID: "bitcoin"}
result, err := m.Collect(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 2, result.Items) // current.json + summary.md
assert.Len(t, result.Files, 2)
// Verify current.json was written
content, err := mock.Read("/output/market/bitcoin/current.json")
assert.NoError(t, err)
assert.Contains(t, content, "bitcoin")
// Verify summary.md was written
summary, err := mock.Read("/output/market/bitcoin/summary.md")
assert.NoError(t, err)
assert.Contains(t, summary, "Bitcoin")
assert.Contains(t, summary, "42000.50")
}
func TestMarketCollector_Collect_Good_Historical(t *testing.T) {
callCount := 0
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
callCount++
w.Header().Set("Content-Type", "application/json")
if callCount == 1 {
// Current data response
data := coinData{
ID: "ethereum",
Symbol: "eth",
Name: "Ethereum",
MarketData: marketData{
CurrentPrice: map[string]float64{"usd": 3000},
},
}
_ = json.NewEncoder(w).Encode(data)
} else {
// Historical data response
data := historicalData{
Prices: [][]float64{{1705305600000, 3000.0}, {1705392000000, 3100.0}},
MarketCaps: [][]float64{{1705305600000, 360000000000}},
TotalVolumes: [][]float64{{1705305600000, 15000000000}},
}
_ = json.NewEncoder(w).Encode(data)
}
}))
defer server.Close()
oldURL := coinGeckoBaseURL
coinGeckoBaseURL = server.URL
defer func() { coinGeckoBaseURL = oldURL }()
mock := io.NewMockMedium()
cfg := NewConfigWithMedium(mock, "/output")
cfg.Limiter = nil
m := &MarketCollector{CoinID: "ethereum", Historical: true}
result, err := m.Collect(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 3, result.Items) // current.json + summary.md + historical.json
assert.Len(t, result.Files, 3)
// Verify historical.json was written
content, err := mock.Read("/output/market/ethereum/historical.json")
assert.NoError(t, err)
assert.Contains(t, content, "3000")
}
func TestFormatMarketSummary_Good(t *testing.T) {
data := &coinData{
Name: "Bitcoin",
Symbol: "btc",
MarketData: marketData{
CurrentPrice: map[string]float64{"usd": 50000},
MarketCap: map[string]float64{"usd": 1000000000000},
MarketCapRank: 1,
CirculatingSupply: 19500000,
TotalSupply: 21000000,
},
}
summary := FormatMarketSummary(data)
assert.Contains(t, summary, "# Bitcoin (BTC)")
assert.Contains(t, summary, "$50000.00")
assert.Contains(t, summary, "Market Cap Rank:** #1")
assert.Contains(t, summary, "Circulating Supply")
assert.Contains(t, summary, "Total Supply")
}
func TestMarketCollector_Collect_Bad_ServerError(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusInternalServerError)
}))
defer server.Close()
oldURL := coinGeckoBaseURL
coinGeckoBaseURL = server.URL
defer func() { coinGeckoBaseURL = oldURL }()
mock := io.NewMockMedium()
cfg := NewConfigWithMedium(mock, "/output")
cfg.Limiter = nil
m := &MarketCollector{CoinID: "bitcoin"}
result, err := m.Collect(context.Background(), cfg)
// Should have errors but not fail entirely
assert.NoError(t, err)
assert.Equal(t, 1, result.Errors)
}

402
pkg/collect/papers.go Normal file
View file

@ -0,0 +1,402 @@
package collect
import (
"context"
"encoding/xml"
"fmt"
"net/http"
"net/url"
"path/filepath"
"strings"
core "github.com/host-uk/core/pkg/framework/core"
"golang.org/x/net/html"
)
// Paper source identifiers.
const (
PaperSourceIACR = "iacr"
PaperSourceArXiv = "arxiv"
PaperSourceAll = "all"
)
// PapersCollector collects papers from IACR and arXiv.
type PapersCollector struct {
// Source is one of PaperSourceIACR, PaperSourceArXiv, or PaperSourceAll.
Source string
// Category is the arXiv category (e.g. "cs.CR" for cryptography).
Category string
// Query is the search query string.
Query string
}
// Name returns the collector name.
func (p *PapersCollector) Name() string {
return fmt.Sprintf("papers:%s", p.Source)
}
// paper represents a parsed academic paper.
type paper struct {
ID string
Title string
Authors []string
Abstract string
Date string
URL string
Source string
}
// Collect gathers papers from the configured sources.
func (p *PapersCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) {
result := &Result{Source: p.Name()}
if p.Query == "" {
return result, core.E("collect.Papers.Collect", "query is required", nil)
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitStart(p.Name(), fmt.Sprintf("Starting paper collection for %q", p.Query))
}
if cfg.DryRun {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(p.Name(), fmt.Sprintf("[dry-run] Would search papers for %q", p.Query), nil)
}
return result, nil
}
switch p.Source {
case PaperSourceIACR:
return p.collectIACR(ctx, cfg)
case PaperSourceArXiv:
return p.collectArXiv(ctx, cfg)
case PaperSourceAll:
iacrResult, iacrErr := p.collectIACR(ctx, cfg)
arxivResult, arxivErr := p.collectArXiv(ctx, cfg)
if iacrErr != nil && arxivErr != nil {
return result, core.E("collect.Papers.Collect", "all sources failed", iacrErr)
}
merged := MergeResults(p.Name(), iacrResult, arxivResult)
if iacrErr != nil {
merged.Errors++
}
if arxivErr != nil {
merged.Errors++
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitComplete(p.Name(), fmt.Sprintf("Collected %d papers", merged.Items), merged)
}
return merged, nil
default:
return result, core.E("collect.Papers.Collect",
fmt.Sprintf("unknown source: %s (use iacr, arxiv, or all)", p.Source), nil)
}
}
// collectIACR fetches papers from the IACR ePrint archive.
func (p *PapersCollector) collectIACR(ctx context.Context, cfg *Config) (*Result, error) {
result := &Result{Source: "papers:iacr"}
if cfg.Limiter != nil {
if err := cfg.Limiter.Wait(ctx, "iacr"); err != nil {
return result, err
}
}
searchURL := fmt.Sprintf("https://eprint.iacr.org/search?q=%s", url.QueryEscape(p.Query))
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
if err != nil {
return result, core.E("collect.Papers.collectIACR", "failed to create request", err)
}
req.Header.Set("User-Agent", "CoreCollector/1.0")
resp, err := httpClient.Do(req)
if err != nil {
return result, core.E("collect.Papers.collectIACR", "request failed", err)
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
return result, core.E("collect.Papers.collectIACR",
fmt.Sprintf("unexpected status code: %d", resp.StatusCode), nil)
}
doc, err := html.Parse(resp.Body)
if err != nil {
return result, core.E("collect.Papers.collectIACR", "failed to parse HTML", err)
}
papers := extractIACRPapers(doc)
baseDir := filepath.Join(cfg.OutputDir, "papers", "iacr")
if err := cfg.Output.EnsureDir(baseDir); err != nil {
return result, core.E("collect.Papers.collectIACR", "failed to create output directory", err)
}
for _, ppr := range papers {
filePath := filepath.Join(baseDir, ppr.ID+".md")
content := formatPaperMarkdown(ppr)
if err := cfg.Output.Write(filePath, content); err != nil {
result.Errors++
continue
}
result.Items++
result.Files = append(result.Files, filePath)
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitItem(p.Name(), fmt.Sprintf("Paper: %s", ppr.Title), nil)
}
}
return result, nil
}
// arxivFeed represents the Atom feed returned by the arXiv API.
type arxivFeed struct {
XMLName xml.Name `xml:"feed"`
Entries []arxivEntry `xml:"entry"`
}
type arxivEntry struct {
ID string `xml:"id"`
Title string `xml:"title"`
Summary string `xml:"summary"`
Published string `xml:"published"`
Authors []arxivAuthor `xml:"author"`
Links []arxivLink `xml:"link"`
}
type arxivAuthor struct {
Name string `xml:"name"`
}
type arxivLink struct {
Href string `xml:"href,attr"`
Rel string `xml:"rel,attr"`
Type string `xml:"type,attr"`
}
// collectArXiv fetches papers from the arXiv API.
func (p *PapersCollector) collectArXiv(ctx context.Context, cfg *Config) (*Result, error) {
result := &Result{Source: "papers:arxiv"}
if cfg.Limiter != nil {
if err := cfg.Limiter.Wait(ctx, "arxiv"); err != nil {
return result, err
}
}
query := url.QueryEscape(p.Query)
if p.Category != "" {
query = fmt.Sprintf("cat:%s+AND+%s", url.QueryEscape(p.Category), query)
}
searchURL := fmt.Sprintf("https://export.arxiv.org/api/query?search_query=%s&max_results=50", query)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil)
if err != nil {
return result, core.E("collect.Papers.collectArXiv", "failed to create request", err)
}
req.Header.Set("User-Agent", "CoreCollector/1.0")
resp, err := httpClient.Do(req)
if err != nil {
return result, core.E("collect.Papers.collectArXiv", "request failed", err)
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
return result, core.E("collect.Papers.collectArXiv",
fmt.Sprintf("unexpected status code: %d", resp.StatusCode), nil)
}
var feed arxivFeed
if err := xml.NewDecoder(resp.Body).Decode(&feed); err != nil {
return result, core.E("collect.Papers.collectArXiv", "failed to parse XML", err)
}
baseDir := filepath.Join(cfg.OutputDir, "papers", "arxiv")
if err := cfg.Output.EnsureDir(baseDir); err != nil {
return result, core.E("collect.Papers.collectArXiv", "failed to create output directory", err)
}
for _, entry := range feed.Entries {
ppr := arxivEntryToPaper(entry)
filePath := filepath.Join(baseDir, ppr.ID+".md")
content := formatPaperMarkdown(ppr)
if err := cfg.Output.Write(filePath, content); err != nil {
result.Errors++
continue
}
result.Items++
result.Files = append(result.Files, filePath)
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitItem(p.Name(), fmt.Sprintf("Paper: %s", ppr.Title), nil)
}
}
return result, nil
}
// arxivEntryToPaper converts an arXiv Atom entry to a paper.
func arxivEntryToPaper(entry arxivEntry) paper {
authors := make([]string, len(entry.Authors))
for i, a := range entry.Authors {
authors[i] = a.Name
}
// Extract the arXiv ID from the URL
id := entry.ID
if idx := strings.LastIndex(id, "/abs/"); idx != -1 {
id = id[idx+5:]
}
// Replace characters that are not valid in file names
id = strings.ReplaceAll(id, "/", "-")
id = strings.ReplaceAll(id, ":", "-")
paperURL := entry.ID
for _, link := range entry.Links {
if link.Rel == "alternate" {
paperURL = link.Href
break
}
}
return paper{
ID: id,
Title: strings.TrimSpace(entry.Title),
Authors: authors,
Abstract: strings.TrimSpace(entry.Summary),
Date: entry.Published,
URL: paperURL,
Source: "arxiv",
}
}
// extractIACRPapers extracts paper metadata from an IACR search results page.
func extractIACRPapers(doc *html.Node) []paper {
var papers []paper
var walk func(*html.Node)
walk = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "div" {
for _, attr := range n.Attr {
if attr.Key == "class" && strings.Contains(attr.Val, "paperentry") {
ppr := parseIACREntry(n)
if ppr.Title != "" {
papers = append(papers, ppr)
}
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
walk(c)
}
}
walk(doc)
return papers
}
// parseIACREntry extracts paper data from an IACR paper entry div.
func parseIACREntry(node *html.Node) paper {
ppr := paper{Source: "iacr"}
var walk func(*html.Node)
walk = func(n *html.Node) {
if n.Type == html.ElementNode {
switch n.Data {
case "a":
for _, attr := range n.Attr {
if attr.Key == "href" && strings.Contains(attr.Val, "/eprint/") {
ppr.URL = "https://eprint.iacr.org" + attr.Val
// Extract ID from URL
parts := strings.Split(attr.Val, "/")
if len(parts) >= 2 {
ppr.ID = parts[len(parts)-2] + "-" + parts[len(parts)-1]
}
}
}
if ppr.Title == "" {
ppr.Title = strings.TrimSpace(extractText(n))
}
case "span":
for _, attr := range n.Attr {
if attr.Key == "class" {
switch {
case strings.Contains(attr.Val, "author"):
author := strings.TrimSpace(extractText(n))
if author != "" {
ppr.Authors = append(ppr.Authors, author)
}
case strings.Contains(attr.Val, "date"):
ppr.Date = strings.TrimSpace(extractText(n))
}
}
}
case "p":
for _, attr := range n.Attr {
if attr.Key == "class" && strings.Contains(attr.Val, "abstract") {
ppr.Abstract = strings.TrimSpace(extractText(n))
}
}
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
walk(c)
}
}
walk(node)
return ppr
}
// formatPaperMarkdown formats a paper as markdown.
func formatPaperMarkdown(ppr paper) string {
var b strings.Builder
fmt.Fprintf(&b, "# %s\n\n", ppr.Title)
if len(ppr.Authors) > 0 {
fmt.Fprintf(&b, "- **Authors:** %s\n", strings.Join(ppr.Authors, ", "))
}
if ppr.Date != "" {
fmt.Fprintf(&b, "- **Published:** %s\n", ppr.Date)
}
if ppr.URL != "" {
fmt.Fprintf(&b, "- **URL:** %s\n", ppr.URL)
}
if ppr.Source != "" {
fmt.Fprintf(&b, "- **Source:** %s\n", ppr.Source)
}
if ppr.Abstract != "" {
fmt.Fprintf(&b, "\n## Abstract\n\n%s\n", ppr.Abstract)
}
return b.String()
}
// FormatPaperMarkdown is exported for testing.
func FormatPaperMarkdown(title string, authors []string, date, paperURL, source, abstract string) string {
return formatPaperMarkdown(paper{
Title: title,
Authors: authors,
Date: date,
URL: paperURL,
Source: source,
Abstract: abstract,
})
}

108
pkg/collect/papers_test.go Normal file
View file

@ -0,0 +1,108 @@
package collect
import (
"context"
"testing"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
func TestPapersCollector_Name_Good(t *testing.T) {
p := &PapersCollector{Source: PaperSourceIACR}
assert.Equal(t, "papers:iacr", p.Name())
}
func TestPapersCollector_Name_Good_ArXiv(t *testing.T) {
p := &PapersCollector{Source: PaperSourceArXiv}
assert.Equal(t, "papers:arxiv", p.Name())
}
func TestPapersCollector_Name_Good_All(t *testing.T) {
p := &PapersCollector{Source: PaperSourceAll}
assert.Equal(t, "papers:all", p.Name())
}
func TestPapersCollector_Collect_Bad_NoQuery(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
p := &PapersCollector{Source: PaperSourceIACR}
_, err := p.Collect(context.Background(), cfg)
assert.Error(t, err)
}
func TestPapersCollector_Collect_Bad_UnknownSource(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
p := &PapersCollector{Source: "unknown", Query: "test"}
_, err := p.Collect(context.Background(), cfg)
assert.Error(t, err)
}
func TestPapersCollector_Collect_Good_DryRun(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.DryRun = true
p := &PapersCollector{Source: PaperSourceAll, Query: "cryptography"}
result, err := p.Collect(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 0, result.Items)
}
func TestFormatPaperMarkdown_Good(t *testing.T) {
md := FormatPaperMarkdown(
"Zero-Knowledge Proofs Revisited",
[]string{"Alice", "Bob"},
"2025-01-15",
"https://eprint.iacr.org/2025/001",
"iacr",
"We present a new construction for zero-knowledge proofs.",
)
assert.Contains(t, md, "# Zero-Knowledge Proofs Revisited")
assert.Contains(t, md, "**Authors:** Alice, Bob")
assert.Contains(t, md, "**Published:** 2025-01-15")
assert.Contains(t, md, "**URL:** https://eprint.iacr.org/2025/001")
assert.Contains(t, md, "**Source:** iacr")
assert.Contains(t, md, "## Abstract")
assert.Contains(t, md, "zero-knowledge proofs")
}
func TestFormatPaperMarkdown_Good_Minimal(t *testing.T) {
md := FormatPaperMarkdown("Title Only", nil, "", "", "", "")
assert.Contains(t, md, "# Title Only")
assert.NotContains(t, md, "**Authors:**")
assert.NotContains(t, md, "## Abstract")
}
func TestArxivEntryToPaper_Good(t *testing.T) {
entry := arxivEntry{
ID: "http://arxiv.org/abs/2501.12345v1",
Title: " A Great Paper ",
Summary: " This paper presents... ",
Published: "2025-01-15T00:00:00Z",
Authors: []arxivAuthor{
{Name: "Alice"},
{Name: "Bob"},
},
Links: []arxivLink{
{Href: "http://arxiv.org/abs/2501.12345v1", Rel: "alternate"},
{Href: "http://arxiv.org/pdf/2501.12345v1", Rel: "related", Type: "application/pdf"},
},
}
ppr := arxivEntryToPaper(entry)
assert.Equal(t, "2501.12345v1", ppr.ID)
assert.Equal(t, "A Great Paper", ppr.Title)
assert.Equal(t, "This paper presents...", ppr.Abstract)
assert.Equal(t, "2025-01-15T00:00:00Z", ppr.Date)
assert.Equal(t, []string{"Alice", "Bob"}, ppr.Authors)
assert.Equal(t, "http://arxiv.org/abs/2501.12345v1", ppr.URL)
assert.Equal(t, "arxiv", ppr.Source)
}

345
pkg/collect/process.go Normal file
View file

@ -0,0 +1,345 @@
package collect
import (
"context"
"encoding/json"
"fmt"
"path/filepath"
"sort"
"strings"
core "github.com/host-uk/core/pkg/framework/core"
"golang.org/x/net/html"
)
// Processor converts collected data to clean markdown.
type Processor struct {
// Source identifies the data source directory to process.
Source string
// Dir is the directory containing files to process.
Dir string
}
// Name returns the processor name.
func (p *Processor) Name() string {
return fmt.Sprintf("process:%s", p.Source)
}
// Process reads files from the source directory, converts HTML or JSON
// to clean markdown, and writes the results to the output directory.
func (p *Processor) Process(ctx context.Context, cfg *Config) (*Result, error) {
result := &Result{Source: p.Name()}
if p.Dir == "" {
return result, core.E("collect.Processor.Process", "directory is required", nil)
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitStart(p.Name(), fmt.Sprintf("Processing files in %s", p.Dir))
}
if cfg.DryRun {
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitProgress(p.Name(), fmt.Sprintf("[dry-run] Would process files in %s", p.Dir), nil)
}
return result, nil
}
entries, err := cfg.Output.List(p.Dir)
if err != nil {
return result, core.E("collect.Processor.Process", "failed to list directory", err)
}
outputDir := filepath.Join(cfg.OutputDir, "processed", p.Source)
if err := cfg.Output.EnsureDir(outputDir); err != nil {
return result, core.E("collect.Processor.Process", "failed to create output directory", err)
}
for _, entry := range entries {
if ctx.Err() != nil {
return result, core.E("collect.Processor.Process", "context cancelled", ctx.Err())
}
if entry.IsDir() {
continue
}
name := entry.Name()
srcPath := filepath.Join(p.Dir, name)
content, err := cfg.Output.Read(srcPath)
if err != nil {
result.Errors++
continue
}
var processed string
ext := strings.ToLower(filepath.Ext(name))
switch ext {
case ".html", ".htm":
processed, err = htmlToMarkdown(content)
if err != nil {
result.Errors++
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(p.Name(), fmt.Sprintf("Failed to convert %s: %v", name, err), nil)
}
continue
}
case ".json":
processed, err = jsonToMarkdown(content)
if err != nil {
result.Errors++
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitError(p.Name(), fmt.Sprintf("Failed to convert %s: %v", name, err), nil)
}
continue
}
case ".md":
// Already markdown, just clean up
processed = strings.TrimSpace(content)
default:
result.Skipped++
continue
}
// Write with .md extension
outName := strings.TrimSuffix(name, ext) + ".md"
outPath := filepath.Join(outputDir, outName)
if err := cfg.Output.Write(outPath, processed); err != nil {
result.Errors++
continue
}
result.Items++
result.Files = append(result.Files, outPath)
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitItem(p.Name(), fmt.Sprintf("Processed: %s", name), nil)
}
}
if cfg.Dispatcher != nil {
cfg.Dispatcher.EmitComplete(p.Name(), fmt.Sprintf("Processed %d files", result.Items), result)
}
return result, nil
}
// htmlToMarkdown converts HTML content to clean markdown.
func htmlToMarkdown(content string) (string, error) {
doc, err := html.Parse(strings.NewReader(content))
if err != nil {
return "", core.E("collect.htmlToMarkdown", "failed to parse HTML", err)
}
var b strings.Builder
nodeToMarkdown(&b, doc, 0)
return strings.TrimSpace(b.String()), nil
}
// nodeToMarkdown recursively converts an HTML node tree to markdown.
func nodeToMarkdown(b *strings.Builder, n *html.Node, depth int) {
switch n.Type {
case html.TextNode:
text := n.Data
if strings.TrimSpace(text) != "" {
b.WriteString(text)
}
case html.ElementNode:
switch n.Data {
case "h1":
b.WriteString("\n# ")
writeChildrenText(b, n)
b.WriteString("\n\n")
return
case "h2":
b.WriteString("\n## ")
writeChildrenText(b, n)
b.WriteString("\n\n")
return
case "h3":
b.WriteString("\n### ")
writeChildrenText(b, n)
b.WriteString("\n\n")
return
case "h4":
b.WriteString("\n#### ")
writeChildrenText(b, n)
b.WriteString("\n\n")
return
case "h5":
b.WriteString("\n##### ")
writeChildrenText(b, n)
b.WriteString("\n\n")
return
case "h6":
b.WriteString("\n###### ")
writeChildrenText(b, n)
b.WriteString("\n\n")
return
case "p":
b.WriteString("\n")
for c := n.FirstChild; c != nil; c = c.NextSibling {
nodeToMarkdown(b, c, depth)
}
b.WriteString("\n")
return
case "br":
b.WriteString("\n")
return
case "strong", "b":
b.WriteString("**")
writeChildrenText(b, n)
b.WriteString("**")
return
case "em", "i":
b.WriteString("*")
writeChildrenText(b, n)
b.WriteString("*")
return
case "code":
b.WriteString("`")
writeChildrenText(b, n)
b.WriteString("`")
return
case "pre":
b.WriteString("\n```\n")
writeChildrenText(b, n)
b.WriteString("\n```\n")
return
case "a":
var href string
for _, attr := range n.Attr {
if attr.Key == "href" {
href = attr.Val
}
}
text := getChildrenText(n)
if href != "" {
fmt.Fprintf(b, "[%s](%s)", text, href)
} else {
b.WriteString(text)
}
return
case "ul":
b.WriteString("\n")
case "ol":
b.WriteString("\n")
counter := 1
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Type == html.ElementNode && c.Data == "li" {
fmt.Fprintf(b, "%d. ", counter)
for gc := c.FirstChild; gc != nil; gc = gc.NextSibling {
nodeToMarkdown(b, gc, depth+1)
}
b.WriteString("\n")
counter++
}
}
return
case "li":
b.WriteString("- ")
for c := n.FirstChild; c != nil; c = c.NextSibling {
nodeToMarkdown(b, c, depth+1)
}
b.WriteString("\n")
return
case "blockquote":
b.WriteString("\n> ")
text := getChildrenText(n)
b.WriteString(strings.ReplaceAll(text, "\n", "\n> "))
b.WriteString("\n")
return
case "hr":
b.WriteString("\n---\n")
return
case "script", "style", "head":
return
}
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
nodeToMarkdown(b, c, depth)
}
}
// writeChildrenText writes the text content of all children.
func writeChildrenText(b *strings.Builder, n *html.Node) {
b.WriteString(getChildrenText(n))
}
// getChildrenText returns the concatenated text content of all children.
func getChildrenText(n *html.Node) string {
var b strings.Builder
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Type == html.TextNode {
b.WriteString(c.Data)
} else {
b.WriteString(getChildrenText(c))
}
}
return b.String()
}
// jsonToMarkdown converts JSON content to a formatted markdown document.
func jsonToMarkdown(content string) (string, error) {
var data any
if err := json.Unmarshal([]byte(content), &data); err != nil {
return "", core.E("collect.jsonToMarkdown", "failed to parse JSON", err)
}
var b strings.Builder
b.WriteString("# Data\n\n")
jsonValueToMarkdown(&b, data, 0)
return strings.TrimSpace(b.String()), nil
}
// jsonValueToMarkdown recursively formats a JSON value as markdown.
func jsonValueToMarkdown(b *strings.Builder, data any, depth int) {
switch v := data.(type) {
case map[string]any:
keys := make([]string, 0, len(v))
for key := range v {
keys = append(keys, key)
}
sort.Strings(keys)
for _, key := range keys {
val := v[key]
indent := strings.Repeat(" ", depth)
switch child := val.(type) {
case map[string]any, []any:
fmt.Fprintf(b, "%s- **%s:**\n", indent, key)
jsonValueToMarkdown(b, child, depth+1)
default:
fmt.Fprintf(b, "%s- **%s:** %v\n", indent, key, val)
}
}
case []any:
for i, item := range v {
indent := strings.Repeat(" ", depth)
switch child := item.(type) {
case map[string]any, []any:
fmt.Fprintf(b, "%s- Item %d:\n", indent, i+1)
jsonValueToMarkdown(b, child, depth+1)
default:
fmt.Fprintf(b, "%s- %v\n", indent, item)
}
}
default:
indent := strings.Repeat(" ", depth)
fmt.Fprintf(b, "%s%v\n", indent, data)
}
}
// HTMLToMarkdown is exported for testing.
func HTMLToMarkdown(content string) (string, error) {
return htmlToMarkdown(content)
}
// JSONToMarkdown is exported for testing.
func JSONToMarkdown(content string) (string, error) {
return jsonToMarkdown(content)
}

201
pkg/collect/process_test.go Normal file
View file

@ -0,0 +1,201 @@
package collect
import (
"context"
"testing"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
func TestProcessor_Name_Good(t *testing.T) {
p := &Processor{Source: "github"}
assert.Equal(t, "process:github", p.Name())
}
func TestProcessor_Process_Bad_NoDir(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
p := &Processor{Source: "test"}
_, err := p.Process(context.Background(), cfg)
assert.Error(t, err)
}
func TestProcessor_Process_Good_DryRun(t *testing.T) {
m := io.NewMockMedium()
cfg := NewConfigWithMedium(m, "/output")
cfg.DryRun = true
p := &Processor{Source: "test", Dir: "/input"}
result, err := p.Process(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 0, result.Items)
}
func TestProcessor_Process_Good_HTMLFiles(t *testing.T) {
m := io.NewMockMedium()
m.Dirs["/input"] = true
m.Files["/input/page.html"] = `<html><body><h1>Hello</h1><p>World</p></body></html>`
cfg := NewConfigWithMedium(m, "/output")
cfg.Limiter = nil
p := &Processor{Source: "test", Dir: "/input"}
result, err := p.Process(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 1, result.Items)
assert.Len(t, result.Files, 1)
content, err := m.Read("/output/processed/test/page.md")
assert.NoError(t, err)
assert.Contains(t, content, "# Hello")
assert.Contains(t, content, "World")
}
func TestProcessor_Process_Good_JSONFiles(t *testing.T) {
m := io.NewMockMedium()
m.Dirs["/input"] = true
m.Files["/input/data.json"] = `{"name": "Bitcoin", "price": 42000}`
cfg := NewConfigWithMedium(m, "/output")
cfg.Limiter = nil
p := &Processor{Source: "market", Dir: "/input"}
result, err := p.Process(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 1, result.Items)
content, err := m.Read("/output/processed/market/data.md")
assert.NoError(t, err)
assert.Contains(t, content, "# Data")
assert.Contains(t, content, "Bitcoin")
}
func TestProcessor_Process_Good_MarkdownPassthrough(t *testing.T) {
m := io.NewMockMedium()
m.Dirs["/input"] = true
m.Files["/input/readme.md"] = "# Already Markdown\n\nThis is already formatted."
cfg := NewConfigWithMedium(m, "/output")
cfg.Limiter = nil
p := &Processor{Source: "docs", Dir: "/input"}
result, err := p.Process(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 1, result.Items)
content, err := m.Read("/output/processed/docs/readme.md")
assert.NoError(t, err)
assert.Contains(t, content, "# Already Markdown")
}
func TestProcessor_Process_Good_SkipUnknownTypes(t *testing.T) {
m := io.NewMockMedium()
m.Dirs["/input"] = true
m.Files["/input/image.png"] = "binary data"
m.Files["/input/doc.html"] = "<h1>Heading</h1>"
cfg := NewConfigWithMedium(m, "/output")
cfg.Limiter = nil
p := &Processor{Source: "mixed", Dir: "/input"}
result, err := p.Process(context.Background(), cfg)
assert.NoError(t, err)
assert.Equal(t, 1, result.Items) // Only the HTML file
assert.Equal(t, 1, result.Skipped) // The PNG file
}
func TestHTMLToMarkdown_Good(t *testing.T) {
tests := []struct {
name string
input string
contains []string
}{
{
name: "heading",
input: "<h1>Title</h1>",
contains: []string{"# Title"},
},
{
name: "paragraph",
input: "<p>Hello world</p>",
contains: []string{"Hello world"},
},
{
name: "bold",
input: "<p><strong>bold text</strong></p>",
contains: []string{"**bold text**"},
},
{
name: "italic",
input: "<p><em>italic text</em></p>",
contains: []string{"*italic text*"},
},
{
name: "code",
input: "<p><code>code</code></p>",
contains: []string{"`code`"},
},
{
name: "link",
input: `<p><a href="https://example.com">Example</a></p>`,
contains: []string{"[Example](https://example.com)"},
},
{
name: "nested headings",
input: "<h2>Section</h2><h3>Subsection</h3>",
contains: []string{"## Section", "### Subsection"},
},
{
name: "pre block",
input: "<pre>func main() {}</pre>",
contains: []string{"```", "func main() {}"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := HTMLToMarkdown(tt.input)
assert.NoError(t, err)
for _, s := range tt.contains {
assert.Contains(t, result, s)
}
})
}
}
func TestHTMLToMarkdown_Good_StripsScripts(t *testing.T) {
input := `<html><head><script>alert('xss')</script></head><body><p>Clean</p></body></html>`
result, err := HTMLToMarkdown(input)
assert.NoError(t, err)
assert.Contains(t, result, "Clean")
assert.NotContains(t, result, "alert")
assert.NotContains(t, result, "script")
}
func TestJSONToMarkdown_Good(t *testing.T) {
input := `{"name": "test", "count": 42}`
result, err := JSONToMarkdown(input)
assert.NoError(t, err)
assert.Contains(t, result, "# Data")
assert.Contains(t, result, "test")
assert.Contains(t, result, "42")
}
func TestJSONToMarkdown_Good_Array(t *testing.T) {
input := `[{"id": 1}, {"id": 2}]`
result, err := JSONToMarkdown(input)
assert.NoError(t, err)
assert.Contains(t, result, "# Data")
}
func TestJSONToMarkdown_Bad_InvalidJSON(t *testing.T) {
_, err := JSONToMarkdown("not json")
assert.Error(t, err)
}

130
pkg/collect/ratelimit.go Normal file
View file

@ -0,0 +1,130 @@
package collect
import (
"context"
"fmt"
"os/exec"
"strconv"
"strings"
"sync"
"time"
core "github.com/host-uk/core/pkg/framework/core"
)
// RateLimiter tracks per-source rate limiting to avoid overwhelming APIs.
type RateLimiter struct {
mu sync.Mutex
delays map[string]time.Duration
last map[string]time.Time
}
// Default rate limit delays per source.
var defaultDelays = map[string]time.Duration{
"github": 500 * time.Millisecond,
"bitcointalk": 2 * time.Second,
"coingecko": 1500 * time.Millisecond,
"iacr": 1 * time.Second,
"arxiv": 1 * time.Second,
}
// NewRateLimiter creates a limiter with default delays.
func NewRateLimiter() *RateLimiter {
delays := make(map[string]time.Duration, len(defaultDelays))
for k, v := range defaultDelays {
delays[k] = v
}
return &RateLimiter{
delays: delays,
last: make(map[string]time.Time),
}
}
// Wait blocks until the rate limit allows the next request for the given source.
// It respects context cancellation.
func (r *RateLimiter) Wait(ctx context.Context, source string) error {
r.mu.Lock()
delay, ok := r.delays[source]
if !ok {
delay = 500 * time.Millisecond
}
lastTime := r.last[source]
elapsed := time.Since(lastTime)
if elapsed >= delay {
// Enough time has passed — claim the slot immediately.
r.last[source] = time.Now()
r.mu.Unlock()
return nil
}
remaining := delay - elapsed
r.mu.Unlock()
// Wait outside the lock, then reclaim.
select {
case <-ctx.Done():
return core.E("collect.RateLimiter.Wait", "context cancelled", ctx.Err())
case <-time.After(remaining):
}
r.mu.Lock()
r.last[source] = time.Now()
r.mu.Unlock()
return nil
}
// SetDelay sets the delay for a source.
func (r *RateLimiter) SetDelay(source string, d time.Duration) {
r.mu.Lock()
defer r.mu.Unlock()
r.delays[source] = d
}
// GetDelay returns the delay configured for a source.
func (r *RateLimiter) GetDelay(source string) time.Duration {
r.mu.Lock()
defer r.mu.Unlock()
if d, ok := r.delays[source]; ok {
return d
}
return 500 * time.Millisecond
}
// CheckGitHubRateLimit checks GitHub API rate limit status via gh api.
// Returns used and limit counts. Auto-pauses at 75% usage by increasing
// the GitHub rate limit delay.
func (r *RateLimiter) CheckGitHubRateLimit() (used, limit int, err error) {
cmd := exec.Command("gh", "api", "rate_limit", "--jq", ".rate | \"\\(.used) \\(.limit)\"")
out, err := cmd.Output()
if err != nil {
return 0, 0, core.E("collect.RateLimiter.CheckGitHubRateLimit", "failed to check rate limit", err)
}
parts := strings.Fields(strings.TrimSpace(string(out)))
if len(parts) != 2 {
return 0, 0, core.E("collect.RateLimiter.CheckGitHubRateLimit",
fmt.Sprintf("unexpected output format: %q", string(out)), nil)
}
used, err = strconv.Atoi(parts[0])
if err != nil {
return 0, 0, core.E("collect.RateLimiter.CheckGitHubRateLimit", "failed to parse used count", err)
}
limit, err = strconv.Atoi(parts[1])
if err != nil {
return 0, 0, core.E("collect.RateLimiter.CheckGitHubRateLimit", "failed to parse limit count", err)
}
// Auto-pause at 75% usage
if limit > 0 {
usage := float64(used) / float64(limit)
if usage >= 0.75 {
r.SetDelay("github", 5*time.Second)
}
}
return used, limit, nil
}

View file

@ -0,0 +1,84 @@
package collect
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestRateLimiter_Wait_Good(t *testing.T) {
rl := NewRateLimiter()
rl.SetDelay("test", 50*time.Millisecond)
ctx := context.Background()
// First call should return immediately
start := time.Now()
err := rl.Wait(ctx, "test")
assert.NoError(t, err)
assert.Less(t, time.Since(start), 50*time.Millisecond)
// Second call should wait at least the delay
start = time.Now()
err = rl.Wait(ctx, "test")
assert.NoError(t, err)
assert.GreaterOrEqual(t, time.Since(start), 40*time.Millisecond) // allow small timing variance
}
func TestRateLimiter_Wait_Bad_ContextCancelled(t *testing.T) {
rl := NewRateLimiter()
rl.SetDelay("test", 5*time.Second)
ctx := context.Background()
// First call to set the last time
err := rl.Wait(ctx, "test")
assert.NoError(t, err)
// Cancel context before second call
ctx, cancel := context.WithCancel(context.Background())
cancel()
err = rl.Wait(ctx, "test")
assert.Error(t, err)
}
func TestRateLimiter_SetDelay_Good(t *testing.T) {
rl := NewRateLimiter()
rl.SetDelay("custom", 3*time.Second)
assert.Equal(t, 3*time.Second, rl.GetDelay("custom"))
}
func TestRateLimiter_GetDelay_Good_Defaults(t *testing.T) {
rl := NewRateLimiter()
assert.Equal(t, 500*time.Millisecond, rl.GetDelay("github"))
assert.Equal(t, 2*time.Second, rl.GetDelay("bitcointalk"))
assert.Equal(t, 1500*time.Millisecond, rl.GetDelay("coingecko"))
assert.Equal(t, 1*time.Second, rl.GetDelay("iacr"))
}
func TestRateLimiter_GetDelay_Good_UnknownSource(t *testing.T) {
rl := NewRateLimiter()
// Unknown sources should get the default 500ms delay
assert.Equal(t, 500*time.Millisecond, rl.GetDelay("unknown"))
}
func TestRateLimiter_Wait_Good_UnknownSource(t *testing.T) {
rl := NewRateLimiter()
ctx := context.Background()
// Unknown source should use default delay of 500ms
err := rl.Wait(ctx, "unknown-source")
assert.NoError(t, err)
}
func TestNewRateLimiter_Good(t *testing.T) {
rl := NewRateLimiter()
assert.NotNil(t, rl)
assert.NotNil(t, rl.delays)
assert.NotNil(t, rl.last)
assert.Len(t, rl.delays, len(defaultDelays))
}

113
pkg/collect/state.go Normal file
View file

@ -0,0 +1,113 @@
package collect
import (
"encoding/json"
"sync"
"time"
core "github.com/host-uk/core/pkg/framework/core"
"github.com/host-uk/core/pkg/io"
)
// State tracks collection progress for incremental runs.
// It persists entries to disk so that subsequent runs can resume
// where they left off.
type State struct {
mu sync.Mutex
medium io.Medium
path string
entries map[string]*StateEntry
}
// StateEntry tracks state for one source.
type StateEntry struct {
// Source identifies the collector.
Source string `json:"source"`
// LastRun is the timestamp of the last successful run.
LastRun time.Time `json:"last_run"`
// LastID is an opaque identifier for the last item processed.
LastID string `json:"last_id,omitempty"`
// Items is the total number of items collected so far.
Items int `json:"items"`
// Cursor is an opaque pagination cursor for resumption.
Cursor string `json:"cursor,omitempty"`
}
// NewState creates a state tracker that persists to the given path
// using the provided storage medium.
func NewState(m io.Medium, path string) *State {
return &State{
medium: m,
path: path,
entries: make(map[string]*StateEntry),
}
}
// Load reads state from disk. If the file does not exist, the state
// is initialised as empty without error.
func (s *State) Load() error {
s.mu.Lock()
defer s.mu.Unlock()
if !s.medium.IsFile(s.path) {
return nil
}
data, err := s.medium.Read(s.path)
if err != nil {
return core.E("collect.State.Load", "failed to read state file", err)
}
var entries map[string]*StateEntry
if err := json.Unmarshal([]byte(data), &entries); err != nil {
return core.E("collect.State.Load", "failed to parse state file", err)
}
if entries == nil {
entries = make(map[string]*StateEntry)
}
s.entries = entries
return nil
}
// Save writes state to disk.
func (s *State) Save() error {
s.mu.Lock()
defer s.mu.Unlock()
data, err := json.MarshalIndent(s.entries, "", " ")
if err != nil {
return core.E("collect.State.Save", "failed to marshal state", err)
}
if err := s.medium.Write(s.path, string(data)); err != nil {
return core.E("collect.State.Save", "failed to write state file", err)
}
return nil
}
// Get returns a copy of the state for a source. The second return value
// indicates whether the entry was found.
func (s *State) Get(source string) (*StateEntry, bool) {
s.mu.Lock()
defer s.mu.Unlock()
entry, ok := s.entries[source]
if !ok {
return nil, false
}
// Return a copy to avoid callers mutating internal state.
cp := *entry
return &cp, true
}
// Set updates state for a source.
func (s *State) Set(source string, entry *StateEntry) {
s.mu.Lock()
defer s.mu.Unlock()
s.entries[source] = entry
}

144
pkg/collect/state_test.go Normal file
View file

@ -0,0 +1,144 @@
package collect
import (
"testing"
"time"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
func TestState_SetGet_Good(t *testing.T) {
m := io.NewMockMedium()
s := NewState(m, "/state.json")
entry := &StateEntry{
Source: "github:test",
LastRun: time.Now(),
Items: 42,
LastID: "abc123",
Cursor: "cursor-xyz",
}
s.Set("github:test", entry)
got, ok := s.Get("github:test")
assert.True(t, ok)
assert.Equal(t, entry.Source, got.Source)
assert.Equal(t, entry.Items, got.Items)
assert.Equal(t, entry.LastID, got.LastID)
assert.Equal(t, entry.Cursor, got.Cursor)
}
func TestState_Get_Bad(t *testing.T) {
m := io.NewMockMedium()
s := NewState(m, "/state.json")
got, ok := s.Get("nonexistent")
assert.False(t, ok)
assert.Nil(t, got)
}
func TestState_SaveLoad_Good(t *testing.T) {
m := io.NewMockMedium()
s := NewState(m, "/state.json")
now := time.Date(2025, 1, 15, 10, 30, 0, 0, time.UTC)
entry := &StateEntry{
Source: "market:bitcoin",
LastRun: now,
Items: 100,
LastID: "btc-100",
}
s.Set("market:bitcoin", entry)
// Save state
err := s.Save()
assert.NoError(t, err)
// Verify file was written
assert.True(t, m.IsFile("/state.json"))
// Load into a new state instance
s2 := NewState(m, "/state.json")
err = s2.Load()
assert.NoError(t, err)
got, ok := s2.Get("market:bitcoin")
assert.True(t, ok)
assert.Equal(t, "market:bitcoin", got.Source)
assert.Equal(t, 100, got.Items)
assert.Equal(t, "btc-100", got.LastID)
assert.True(t, now.Equal(got.LastRun))
}
func TestState_Load_Good_NoFile(t *testing.T) {
m := io.NewMockMedium()
s := NewState(m, "/nonexistent.json")
// Loading when no file exists should not error
err := s.Load()
assert.NoError(t, err)
// State should be empty
_, ok := s.Get("anything")
assert.False(t, ok)
}
func TestState_Load_Bad_InvalidJSON(t *testing.T) {
m := io.NewMockMedium()
m.Files["/state.json"] = "not valid json"
s := NewState(m, "/state.json")
err := s.Load()
assert.Error(t, err)
}
func TestState_SaveLoad_Good_MultipleEntries(t *testing.T) {
m := io.NewMockMedium()
s := NewState(m, "/state.json")
s.Set("source-a", &StateEntry{Source: "source-a", Items: 10})
s.Set("source-b", &StateEntry{Source: "source-b", Items: 20})
s.Set("source-c", &StateEntry{Source: "source-c", Items: 30})
err := s.Save()
assert.NoError(t, err)
s2 := NewState(m, "/state.json")
err = s2.Load()
assert.NoError(t, err)
a, ok := s2.Get("source-a")
assert.True(t, ok)
assert.Equal(t, 10, a.Items)
b, ok := s2.Get("source-b")
assert.True(t, ok)
assert.Equal(t, 20, b.Items)
c, ok := s2.Get("source-c")
assert.True(t, ok)
assert.Equal(t, 30, c.Items)
}
func TestState_Set_Good_Overwrite(t *testing.T) {
m := io.NewMockMedium()
s := NewState(m, "/state.json")
s.Set("source", &StateEntry{Source: "source", Items: 5})
s.Set("source", &StateEntry{Source: "source", Items: 15})
got, ok := s.Get("source")
assert.True(t, ok)
assert.Equal(t, 15, got.Items)
}
func TestNewState_Good(t *testing.T) {
m := io.NewMockMedium()
s := NewState(m, "/test/state.json")
assert.NotNil(t, s)
assert.NotNil(t, s.entries)
}

254
pkg/config/config.go Normal file
View file

@ -0,0 +1,254 @@
// Package config provides layered configuration management for the Core framework.
//
// Configuration values are resolved in priority order: defaults -> file -> env -> flags.
// Values are stored in a YAML file at ~/.core/config.yaml by default.
//
// Keys use dot notation for nested access:
//
// cfg.Set("dev.editor", "vim")
// var editor string
// cfg.Get("dev.editor", &editor)
package config
import (
"fmt"
"os"
"strings"
"sync"
core "github.com/host-uk/core/pkg/framework/core"
"github.com/host-uk/core/pkg/io"
)
// Config implements the core.Config interface with layered resolution.
// Values are resolved in order: defaults -> file -> env -> flags.
type Config struct {
mu sync.RWMutex
medium io.Medium
path string
data map[string]any
}
// Option is a functional option for configuring a Config instance.
type Option func(*Config)
// WithMedium sets the storage medium for configuration file operations.
func WithMedium(m io.Medium) Option {
return func(c *Config) {
c.medium = m
}
}
// WithPath sets the path to the configuration file.
func WithPath(path string) Option {
return func(c *Config) {
c.path = path
}
}
// New creates a new Config instance with the given options.
// If no medium is provided, it defaults to io.Local.
// If no path is provided, it defaults to ~/.core/config.yaml.
func New(opts ...Option) (*Config, error) {
c := &Config{
data: make(map[string]any),
}
for _, opt := range opts {
opt(c)
}
if c.medium == nil {
c.medium = io.Local
}
if c.path == "" {
home, err := os.UserHomeDir()
if err != nil {
return nil, core.E("config.New", "failed to determine home directory", err)
}
c.path = home + "/.core/config.yaml"
}
// Load existing config file if it exists
if c.medium.IsFile(c.path) {
loaded, err := Load(c.medium, c.path)
if err != nil {
return nil, core.E("config.New", "failed to load config file", err)
}
c.data = loaded
}
// Overlay environment variables
envData := LoadEnv("CORE_CONFIG_")
for k, v := range envData {
setNested(c.data, k, v)
}
return c, nil
}
// Get retrieves a configuration value by dot-notation key and stores it in out.
// The out parameter must be a pointer to the target type.
// Returns an error if the key is not found.
func (c *Config) Get(key string, out any) error {
c.mu.RLock()
defer c.mu.RUnlock()
val, ok := getNested(c.data, key)
if !ok {
return core.E("config.Get", fmt.Sprintf("key not found: %s", key), nil)
}
return assign(val, out)
}
// Set stores a configuration value by dot-notation key and persists to disk.
func (c *Config) Set(key string, v any) error {
c.mu.Lock()
defer c.mu.Unlock()
setNested(c.data, key, v)
if err := Save(c.medium, c.path, c.data); err != nil {
return core.E("config.Set", "failed to save config", err)
}
return nil
}
// All returns a deep copy of all configuration values.
func (c *Config) All() map[string]any {
c.mu.RLock()
defer c.mu.RUnlock()
return deepCopyMap(c.data)
}
// deepCopyMap recursively copies a map[string]any.
func deepCopyMap(src map[string]any) map[string]any {
result := make(map[string]any, len(src))
for k, v := range src {
switch val := v.(type) {
case map[string]any:
result[k] = deepCopyMap(val)
case []any:
cp := make([]any, len(val))
copy(cp, val)
result[k] = cp
default:
result[k] = v
}
}
return result
}
// Path returns the path to the configuration file.
func (c *Config) Path() string {
return c.path
}
// getNested retrieves a value from a nested map using dot-notation keys.
func getNested(data map[string]any, key string) (any, bool) {
parts := strings.Split(key, ".")
current := any(data)
for i, part := range parts {
m, ok := current.(map[string]any)
if !ok {
return nil, false
}
val, exists := m[part]
if !exists {
return nil, false
}
if i == len(parts)-1 {
return val, true
}
current = val
}
return nil, false
}
// setNested sets a value in a nested map using dot-notation keys,
// creating intermediate maps as needed.
func setNested(data map[string]any, key string, value any) {
parts := strings.Split(key, ".")
current := data
for i, part := range parts {
if i == len(parts)-1 {
current[part] = value
return
}
next, ok := current[part]
if !ok {
next = make(map[string]any)
current[part] = next
}
m, ok := next.(map[string]any)
if !ok {
m = make(map[string]any)
current[part] = m
}
current = m
}
}
// assign sets the value of out to val, handling type conversions.
func assign(val any, out any) error {
switch ptr := out.(type) {
case *string:
switch v := val.(type) {
case string:
*ptr = v
default:
*ptr = fmt.Sprintf("%v", v)
}
case *int:
switch v := val.(type) {
case int:
*ptr = v
case float64:
*ptr = int(v)
case int64:
*ptr = int(v)
default:
return core.E("config.assign", fmt.Sprintf("cannot assign %T to *int", val), nil)
}
case *bool:
switch v := val.(type) {
case bool:
*ptr = v
default:
return core.E("config.assign", fmt.Sprintf("cannot assign %T to *bool", val), nil)
}
case *float64:
switch v := val.(type) {
case float64:
*ptr = v
case int:
*ptr = float64(v)
case int64:
*ptr = float64(v)
default:
return core.E("config.assign", fmt.Sprintf("cannot assign %T to *float64", val), nil)
}
case *any:
*ptr = val
case *map[string]any:
switch v := val.(type) {
case map[string]any:
*ptr = v
default:
return core.E("config.assign", fmt.Sprintf("cannot assign %T to *map[string]any", val), nil)
}
default:
return core.E("config.assign", fmt.Sprintf("unsupported target type: %T", out), nil)
}
return nil
}
// Ensure Config implements core.Config at compile time.
var _ core.Config = (*Config)(nil)

227
pkg/config/config_test.go Normal file
View file

@ -0,0 +1,227 @@
package config
import (
"os"
"testing"
"github.com/host-uk/core/pkg/io"
"github.com/stretchr/testify/assert"
)
func TestConfig_Get_Good(t *testing.T) {
m := io.NewMockMedium()
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
err = cfg.Set("app.name", "core")
assert.NoError(t, err)
var name string
err = cfg.Get("app.name", &name)
assert.NoError(t, err)
assert.Equal(t, "core", name)
}
func TestConfig_Get_Bad(t *testing.T) {
m := io.NewMockMedium()
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
var value string
err = cfg.Get("nonexistent.key", &value)
assert.Error(t, err)
assert.Contains(t, err.Error(), "key not found")
}
func TestConfig_Set_Good(t *testing.T) {
m := io.NewMockMedium()
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
err = cfg.Set("dev.editor", "vim")
assert.NoError(t, err)
// Verify the value was saved to the medium
content, readErr := m.Read("/tmp/test/config.yaml")
assert.NoError(t, readErr)
assert.Contains(t, content, "editor: vim")
// Verify we can read it back
var editor string
err = cfg.Get("dev.editor", &editor)
assert.NoError(t, err)
assert.Equal(t, "vim", editor)
}
func TestConfig_Set_Nested_Good(t *testing.T) {
m := io.NewMockMedium()
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
err = cfg.Set("a.b.c", "deep")
assert.NoError(t, err)
var val string
err = cfg.Get("a.b.c", &val)
assert.NoError(t, err)
assert.Equal(t, "deep", val)
}
func TestConfig_All_Good(t *testing.T) {
m := io.NewMockMedium()
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
_ = cfg.Set("key1", "val1")
_ = cfg.Set("key2", "val2")
all := cfg.All()
assert.Equal(t, "val1", all["key1"])
assert.Equal(t, "val2", all["key2"])
}
func TestConfig_Path_Good(t *testing.T) {
m := io.NewMockMedium()
cfg, err := New(WithMedium(m), WithPath("/custom/path/config.yaml"))
assert.NoError(t, err)
assert.Equal(t, "/custom/path/config.yaml", cfg.Path())
}
func TestConfig_Load_Existing_Good(t *testing.T) {
m := io.NewMockMedium()
m.Files["/tmp/test/config.yaml"] = "app:\n name: existing\n"
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
var name string
err = cfg.Get("app.name", &name)
assert.NoError(t, err)
assert.Equal(t, "existing", name)
}
func TestConfig_Env_Good(t *testing.T) {
// Set environment variable
t.Setenv("CORE_CONFIG_DEV_EDITOR", "nano")
m := io.NewMockMedium()
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
var editor string
err = cfg.Get("dev.editor", &editor)
assert.NoError(t, err)
assert.Equal(t, "nano", editor)
}
func TestConfig_Env_Overrides_File_Good(t *testing.T) {
// Set file config
m := io.NewMockMedium()
m.Files["/tmp/test/config.yaml"] = "dev:\n editor: vim\n"
// Set environment override
t.Setenv("CORE_CONFIG_DEV_EDITOR", "nano")
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
var editor string
err = cfg.Get("dev.editor", &editor)
assert.NoError(t, err)
assert.Equal(t, "nano", editor)
}
func TestConfig_Assign_Types_Good(t *testing.T) {
m := io.NewMockMedium()
m.Files["/tmp/test/config.yaml"] = "count: 42\nenabled: true\nratio: 3.14\n"
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
var count int
err = cfg.Get("count", &count)
assert.NoError(t, err)
assert.Equal(t, 42, count)
var enabled bool
err = cfg.Get("enabled", &enabled)
assert.NoError(t, err)
assert.True(t, enabled)
var ratio float64
err = cfg.Get("ratio", &ratio)
assert.NoError(t, err)
assert.InDelta(t, 3.14, ratio, 0.001)
}
func TestConfig_Assign_Any_Good(t *testing.T) {
m := io.NewMockMedium()
cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml"))
assert.NoError(t, err)
_ = cfg.Set("key", "value")
var val any
err = cfg.Get("key", &val)
assert.NoError(t, err)
assert.Equal(t, "value", val)
}
func TestConfig_DefaultPath_Good(t *testing.T) {
m := io.NewMockMedium()
cfg, err := New(WithMedium(m))
assert.NoError(t, err)
home, _ := os.UserHomeDir()
assert.Equal(t, home+"/.core/config.yaml", cfg.Path())
}
func TestLoadEnv_Good(t *testing.T) {
t.Setenv("CORE_CONFIG_FOO_BAR", "baz")
t.Setenv("CORE_CONFIG_SIMPLE", "value")
result := LoadEnv("CORE_CONFIG_")
assert.Equal(t, "baz", result["foo.bar"])
assert.Equal(t, "value", result["simple"])
}
func TestLoad_Bad(t *testing.T) {
m := io.NewMockMedium()
_, err := Load(m, "/nonexistent/file.yaml")
assert.Error(t, err)
assert.Contains(t, err.Error(), "failed to read config file")
}
func TestLoad_InvalidYAML_Bad(t *testing.T) {
m := io.NewMockMedium()
m.Files["/tmp/test/config.yaml"] = "invalid: yaml: content: [[[["
_, err := Load(m, "/tmp/test/config.yaml")
assert.Error(t, err)
assert.Contains(t, err.Error(), "failed to parse config file")
}
func TestSave_Good(t *testing.T) {
m := io.NewMockMedium()
data := map[string]any{
"key": "value",
}
err := Save(m, "/tmp/test/config.yaml", data)
assert.NoError(t, err)
content, readErr := m.Read("/tmp/test/config.yaml")
assert.NoError(t, readErr)
assert.Contains(t, content, "key: value")
}

40
pkg/config/env.go Normal file
View file

@ -0,0 +1,40 @@
package config
import (
"os"
"strings"
)
// LoadEnv parses environment variables with the given prefix and returns
// them as a flat map with dot-notation keys.
//
// For example, with prefix "CORE_CONFIG_":
//
// CORE_CONFIG_FOO_BAR=baz -> {"foo.bar": "baz"}
// CORE_CONFIG_EDITOR=vim -> {"editor": "vim"}
func LoadEnv(prefix string) map[string]any {
result := make(map[string]any)
for _, env := range os.Environ() {
if !strings.HasPrefix(env, prefix) {
continue
}
parts := strings.SplitN(env, "=", 2)
if len(parts) != 2 {
continue
}
name := parts[0]
value := parts[1]
// Strip prefix and convert to dot notation
key := strings.TrimPrefix(name, prefix)
key = strings.ToLower(key)
key = strings.ReplaceAll(key, "_", ".")
result[key] = value
}
return result
}

45
pkg/config/loader.go Normal file
View file

@ -0,0 +1,45 @@
package config
import (
"path/filepath"
core "github.com/host-uk/core/pkg/framework/core"
"github.com/host-uk/core/pkg/io"
"gopkg.in/yaml.v3"
)
// Load reads a YAML configuration file from the given medium and path.
// Returns the parsed data as a map, or an error if the file cannot be read or parsed.
func Load(m io.Medium, path string) (map[string]any, error) {
content, err := m.Read(path)
if err != nil {
return nil, core.E("config.Load", "failed to read config file: "+path, err)
}
data := make(map[string]any)
if err := yaml.Unmarshal([]byte(content), &data); err != nil {
return nil, core.E("config.Load", "failed to parse config file: "+path, err)
}
return data, nil
}
// Save writes configuration data to a YAML file at the given path.
// It ensures the parent directory exists before writing.
func Save(m io.Medium, path string, data map[string]any) error {
out, err := yaml.Marshal(data)
if err != nil {
return core.E("config.Save", "failed to marshal config", err)
}
dir := filepath.Dir(path)
if err := m.EnsureDir(dir); err != nil {
return core.E("config.Save", "failed to create config directory: "+dir, err)
}
if err := m.Write(path, string(out)); err != nil {
return core.E("config.Save", "failed to write config file: "+path, err)
}
return nil
}

Some files were not shown because too many files have changed in this diff Show more