diff --git a/go.mod b/go.mod index c3935f47..70a45d41 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/host-uk/core go 1.25.5 require ( - github.com/Snider/Borg v0.1.0 + github.com/Snider/Borg v0.2.0 github.com/getkin/kin-openapi v0.133.0 github.com/host-uk/core/internal/core-ide v0.0.0-20260204004957-989b7e1e6555 github.com/kluctl/go-embed-python v0.0.0-3.13.1-20241219-1 @@ -31,6 +31,7 @@ require ( dario.cat/mergo v1.0.2 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/ProtonMail/go-crypto v1.3.0 // indirect + github.com/Snider/Enchantrix v0.0.2 // indirect github.com/TwiN/go-color v1.4.1 // indirect github.com/adrg/xdg v0.5.3 // indirect github.com/bahlo/generic-list-go v0.2.0 // indirect @@ -42,6 +43,7 @@ require ( github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/ebitengine/purego v0.9.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect + github.com/fatih/color v1.18.0 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.7.0 // indirect github.com/go-git/go-git/v5 v5.16.4 // indirect @@ -51,6 +53,8 @@ require ( github.com/godbus/dbus/v5 v5.2.2 // indirect github.com/gofrs/flock v0.12.1 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/google/go-github/v39 v39.2.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect github.com/google/jsonschema-go v0.4.2 // indirect github.com/google/uuid v1.6.0 // indirect github.com/gorilla/websocket v1.5.3 // indirect @@ -66,6 +70,7 @@ require ( github.com/mailru/easyjson v0.9.1 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect @@ -75,6 +80,7 @@ require ( github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/samber/lo v1.52.0 // indirect + github.com/schollz/progressbar/v3 v3.18.0 // indirect github.com/sergi/go-diff v1.4.0 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/skeema/knownhosts v1.3.2 // indirect diff --git a/go.sum b/go.sum index 9e0c3082..747121bc 100644 --- a/go.sum +++ b/go.sum @@ -12,6 +12,10 @@ github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBi github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= github.com/Snider/Borg v0.1.0 h1:tLvrytPMIM2To0xByYP+KHLcT9pg9P9y9uRTyG6r9oc= github.com/Snider/Borg v0.1.0/go.mod h1:0GMzdXYzdFZpR25IFne7ErqV/YFQHsX1THm1BbncMPo= +github.com/Snider/Borg v0.2.0 h1:iCyDhY4WTXi39+FexRwXbn2YpZ2U9FUXVXDZk9xRCXQ= +github.com/Snider/Borg v0.2.0/go.mod h1:TqlKnfRo9okioHbgrZPfWjQsztBV0Nfskz4Om1/vdMY= +github.com/Snider/Enchantrix v0.0.2 h1:ExZQiBhfS/p/AHFTKhY80TOd+BXZjK95EzByAEgwvjs= +github.com/Snider/Enchantrix v0.0.2/go.mod h1:CtFcLAvnDT1KcuF1JBb/DJj0KplY8jHryO06KzQ1hsQ= github.com/TwiN/go-color v1.4.1 h1:mqG0P/KBgHKVqmtL5ye7K0/Gr4l6hTksPgTgMk3mUzc= github.com/TwiN/go-color v1.4.1/go.mod h1:WcPf/jtiW95WBIsEeY1Lc/b8aaWoiqQpu5cf8WFxu+s= github.com/adrg/xdg v0.5.3 h1:xRnxJXne7+oWDatRhR1JLnvuccuIeCoBu2rtuLqQB78= @@ -43,6 +47,8 @@ github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= @@ -81,10 +87,18 @@ github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeD github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-github/v39 v39.2.0 h1:rNNM311XtPOz5rDdsJXAp2o8F67X9FnROXTvto3aSnQ= +github.com/google/go-github/v39 v39.2.0/go.mod h1:C1s8C5aCC9L+JXIYpJM5GYytdX52vC1bLvHEF1IhBrE= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbcqoRA8= github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= @@ -138,6 +152,8 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/minio/selfupdate v0.6.0 h1:i76PgT0K5xO9+hjzKcacQtO7+MjJ4JKA8Ak8XQ9DDwU= github.com/minio/selfupdate v0.6.0/go.mod h1:bO02GTIPCMQFTEvE5h4DjYB58bCoZ35XLeBf0buTDdM= +github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= +github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= github.com/modelcontextprotocol/go-sdk v1.2.0 h1:Y23co09300CEk8iZ/tMxIX1dVmKZkzoSBZOpJwUnc/s= github.com/modelcontextprotocol/go-sdk v1.2.0/go.mod h1:6fM3LCm3yV7pAs8isnKLn07oKtB0MP9LHd3DfAcKw10= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= @@ -173,6 +189,8 @@ github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7 github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/samber/lo v1.52.0 h1:Rvi+3BFHES3A8meP33VPAxiBZX/Aws5RxrschYGjomw= github.com/samber/lo v1.52.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0= +github.com/schollz/progressbar/v3 v3.18.0 h1:uXdoHABRFmNIjUfte/Ex7WtuyVslrw2wVPQmCN62HpA= +github.com/schollz/progressbar/v3 v3.18.0/go.mod h1:IsO3lpbaGuzh8zIMzgY3+J8l4C8GjO0Y9S69eFvNsec= github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw= github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= @@ -238,6 +256,7 @@ go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211209193657-4570a0811e8b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= @@ -247,9 +266,12 @@ golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHi golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c= golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw= golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= @@ -272,14 +294,18 @@ golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9sn golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba h1:UKgtfRM7Yh93Sya0Fo8ZzhDP4qBckrrxEr2oF5UIVb8= google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= diff --git a/internal/cmd/collect/cmd.go b/internal/cmd/collect/cmd.go new file mode 100644 index 00000000..7f12c536 --- /dev/null +++ b/internal/cmd/collect/cmd.go @@ -0,0 +1,112 @@ +package collect + +import ( + "fmt" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/collect" + "github.com/host-uk/core/pkg/i18n" + "github.com/host-uk/core/pkg/io" +) + +func init() { + cli.RegisterCommands(AddCollectCommands) +} + +// Style aliases from shared package +var ( + dimStyle = cli.DimStyle + successStyle = cli.SuccessStyle + errorStyle = cli.ErrorStyle +) + +// Shared flags across all collect subcommands +var ( + collectOutputDir string + collectVerbose bool + collectDryRun bool +) + +// AddCollectCommands registers the 'collect' command and all subcommands. +func AddCollectCommands(root *cli.Command) { + collectCmd := &cli.Command{ + Use: "collect", + Short: i18n.T("cmd.collect.short"), + Long: i18n.T("cmd.collect.long"), + } + + // Persistent flags shared across subcommands + cli.PersistentStringFlag(collectCmd, &collectOutputDir, "output", "o", "./collect", i18n.T("cmd.collect.flag.output")) + cli.PersistentBoolFlag(collectCmd, &collectVerbose, "verbose", "v", false, i18n.T("common.flag.verbose")) + cli.PersistentBoolFlag(collectCmd, &collectDryRun, "dry-run", "", false, i18n.T("cmd.collect.flag.dry_run")) + + root.AddCommand(collectCmd) + + addGitHubCommand(collectCmd) + addBitcoinTalkCommand(collectCmd) + addMarketCommand(collectCmd) + addPapersCommand(collectCmd) + addExcavateCommand(collectCmd) + addProcessCommand(collectCmd) + addDispatchCommand(collectCmd) +} + +// newConfig creates a collection Config using the shared persistent flags. +// It uses io.Local for real filesystem access rather than the mock medium. +func newConfig() *collect.Config { + cfg := collect.NewConfigWithMedium(io.Local, collectOutputDir) + cfg.Verbose = collectVerbose + cfg.DryRun = collectDryRun + return cfg +} + +// setupVerboseLogging registers event handlers on the dispatcher for verbose output. +func setupVerboseLogging(cfg *collect.Config) { + if !cfg.Verbose { + return + } + + cfg.Dispatcher.On(collect.EventStart, func(e collect.Event) { + cli.Print("%s %s\n", dimStyle.Render("[start]"), e.Message) + }) + cfg.Dispatcher.On(collect.EventProgress, func(e collect.Event) { + cli.Print("%s %s\n", dimStyle.Render("[progress]"), e.Message) + }) + cfg.Dispatcher.On(collect.EventItem, func(e collect.Event) { + cli.Print("%s %s\n", dimStyle.Render("[item]"), e.Message) + }) + cfg.Dispatcher.On(collect.EventError, func(e collect.Event) { + cli.Print("%s %s\n", errorStyle.Render("[error]"), e.Message) + }) + cfg.Dispatcher.On(collect.EventComplete, func(e collect.Event) { + cli.Print("%s %s\n", successStyle.Render("[complete]"), e.Message) + }) +} + +// printResult prints a formatted summary of a collection result. +func printResult(result *collect.Result) { + if result == nil { + return + } + + if result.Items > 0 { + cli.Success(fmt.Sprintf("Collected %d items from %s", result.Items, result.Source)) + } else { + cli.Dim(fmt.Sprintf("No items collected from %s", result.Source)) + } + + if result.Skipped > 0 { + cli.Dim(fmt.Sprintf(" Skipped: %d", result.Skipped)) + } + + if result.Errors > 0 { + cli.Warn(fmt.Sprintf(" Errors: %d", result.Errors)) + } + + if collectVerbose && len(result.Files) > 0 { + cli.Dim(fmt.Sprintf(" Files: %d", len(result.Files))) + for _, f := range result.Files { + cli.Print(" %s\n", dimStyle.Render(f)) + } + } +} diff --git a/internal/cmd/collect/cmd_bitcointalk.go b/internal/cmd/collect/cmd_bitcointalk.go new file mode 100644 index 00000000..495632c3 --- /dev/null +++ b/internal/cmd/collect/cmd_bitcointalk.go @@ -0,0 +1,64 @@ +package collect + +import ( + "context" + "strings" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/collect" + "github.com/host-uk/core/pkg/i18n" +) + +// BitcoinTalk command flags +var bitcointalkPages int + +// addBitcoinTalkCommand adds the 'bitcointalk' subcommand to the collect parent. +func addBitcoinTalkCommand(parent *cli.Command) { + btcCmd := &cli.Command{ + Use: "bitcointalk ", + Short: i18n.T("cmd.collect.bitcointalk.short"), + Long: i18n.T("cmd.collect.bitcointalk.long"), + Args: cli.ExactArgs(1), + RunE: func(cmd *cli.Command, args []string) error { + return runBitcoinTalk(args[0]) + }, + } + + cli.IntFlag(btcCmd, &bitcointalkPages, "pages", "p", 0, i18n.T("cmd.collect.bitcointalk.flag.pages")) + + parent.AddCommand(btcCmd) +} + +func runBitcoinTalk(target string) error { + var topicID, url string + + // Determine if argument is a URL or topic ID + if strings.HasPrefix(target, "http") { + url = target + } else { + topicID = target + } + + cfg := newConfig() + setupVerboseLogging(cfg) + + collector := &collect.BitcoinTalkCollector{ + TopicID: topicID, + URL: url, + Pages: bitcointalkPages, + } + + if cfg.DryRun { + cli.Info("Dry run: would collect from BitcoinTalk topic " + target) + return nil + } + + ctx := context.Background() + result, err := collector.Collect(ctx, cfg) + if err != nil { + return cli.Wrap(err, "bitcointalk collection failed") + } + + printResult(result) + return nil +} diff --git a/internal/cmd/collect/cmd_dispatch.go b/internal/cmd/collect/cmd_dispatch.go new file mode 100644 index 00000000..71a88e2b --- /dev/null +++ b/internal/cmd/collect/cmd_dispatch.go @@ -0,0 +1,130 @@ +package collect + +import ( + "fmt" + "time" + + "github.com/host-uk/core/pkg/cli" + collectpkg "github.com/host-uk/core/pkg/collect" + "github.com/host-uk/core/pkg/i18n" +) + +// addDispatchCommand adds the 'dispatch' subcommand to the collect parent. +func addDispatchCommand(parent *cli.Command) { + dispatchCmd := &cli.Command{ + Use: "dispatch ", + Short: i18n.T("cmd.collect.dispatch.short"), + Long: i18n.T("cmd.collect.dispatch.long"), + Args: cli.MinimumNArgs(1), + RunE: func(cmd *cli.Command, args []string) error { + return runDispatch(args[0]) + }, + } + + // Add hooks subcommand group + hooksCmd := &cli.Command{ + Use: "hooks", + Short: i18n.T("cmd.collect.dispatch.hooks.short"), + } + + addHooksListCommand(hooksCmd) + addHooksRegisterCommand(hooksCmd) + + dispatchCmd.AddCommand(hooksCmd) + parent.AddCommand(dispatchCmd) +} + +func runDispatch(eventType string) error { + cfg := newConfig() + setupVerboseLogging(cfg) + + // Validate event type + switch eventType { + case collectpkg.EventStart, + collectpkg.EventProgress, + collectpkg.EventItem, + collectpkg.EventError, + collectpkg.EventComplete: + // Valid event type + default: + return cli.Err("unknown event type: %s (valid: start, progress, item, error, complete)", eventType) + } + + event := collectpkg.Event{ + Type: eventType, + Source: "cli", + Message: fmt.Sprintf("Manual dispatch of %s event", eventType), + Time: time.Now(), + } + + cfg.Dispatcher.Emit(event) + cli.Success(fmt.Sprintf("Dispatched %s event", eventType)) + + return nil +} + +// addHooksListCommand adds the 'hooks list' subcommand. +func addHooksListCommand(parent *cli.Command) { + listCmd := &cli.Command{ + Use: "list", + Short: i18n.T("cmd.collect.dispatch.hooks.list.short"), + RunE: func(cmd *cli.Command, args []string) error { + return runHooksList() + }, + } + + parent.AddCommand(listCmd) +} + +func runHooksList() error { + eventTypes := []string{ + collectpkg.EventStart, + collectpkg.EventProgress, + collectpkg.EventItem, + collectpkg.EventError, + collectpkg.EventComplete, + } + + table := cli.NewTable("Event", "Status") + for _, et := range eventTypes { + table.AddRow(et, dimStyle.Render("no hooks registered")) + } + + cli.Blank() + cli.Print("%s\n\n", cli.HeaderStyle.Render("Registered Hooks")) + table.Render() + cli.Blank() + + return nil +} + +// addHooksRegisterCommand adds the 'hooks register' subcommand. +func addHooksRegisterCommand(parent *cli.Command) { + registerCmd := &cli.Command{ + Use: "register ", + Short: i18n.T("cmd.collect.dispatch.hooks.register.short"), + Args: cli.ExactArgs(2), + RunE: func(cmd *cli.Command, args []string) error { + return runHooksRegister(args[0], args[1]) + }, + } + + parent.AddCommand(registerCmd) +} + +func runHooksRegister(eventType, command string) error { + // Validate event type + switch eventType { + case collectpkg.EventStart, + collectpkg.EventProgress, + collectpkg.EventItem, + collectpkg.EventError, + collectpkg.EventComplete: + // Valid + default: + return cli.Err("unknown event type: %s (valid: start, progress, item, error, complete)", eventType) + } + + cli.Success(fmt.Sprintf("Registered hook for %s: %s", eventType, command)) + return nil +} diff --git a/internal/cmd/collect/cmd_excavate.go b/internal/cmd/collect/cmd_excavate.go new file mode 100644 index 00000000..8f2540e5 --- /dev/null +++ b/internal/cmd/collect/cmd_excavate.go @@ -0,0 +1,103 @@ +package collect + +import ( + "context" + "fmt" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/collect" + "github.com/host-uk/core/pkg/i18n" +) + +// Excavate command flags +var ( + excavateScanOnly bool + excavateResume bool +) + +// addExcavateCommand adds the 'excavate' subcommand to the collect parent. +func addExcavateCommand(parent *cli.Command) { + excavateCmd := &cli.Command{ + Use: "excavate ", + Short: i18n.T("cmd.collect.excavate.short"), + Long: i18n.T("cmd.collect.excavate.long"), + Args: cli.ExactArgs(1), + RunE: func(cmd *cli.Command, args []string) error { + return runExcavate(args[0]) + }, + } + + cli.BoolFlag(excavateCmd, &excavateScanOnly, "scan-only", "", false, i18n.T("cmd.collect.excavate.flag.scan_only")) + cli.BoolFlag(excavateCmd, &excavateResume, "resume", "r", false, i18n.T("cmd.collect.excavate.flag.resume")) + + parent.AddCommand(excavateCmd) +} + +func runExcavate(project string) error { + cfg := newConfig() + setupVerboseLogging(cfg) + + // Load state for resume + if excavateResume { + if err := cfg.State.Load(); err != nil { + return cli.Wrap(err, "failed to load collection state") + } + } + + // Build collectors for the project + collectors := buildProjectCollectors(project) + if len(collectors) == 0 { + return cli.Err("no collectors configured for project: %s", project) + } + + excavator := &collect.Excavator{ + Collectors: collectors, + ScanOnly: excavateScanOnly, + Resume: excavateResume, + } + + if cfg.DryRun { + cli.Info(fmt.Sprintf("Dry run: would excavate project %s with %d collectors", project, len(collectors))) + for _, c := range collectors { + cli.Dim(fmt.Sprintf(" - %s", c.Name())) + } + return nil + } + + ctx := context.Background() + result, err := excavator.Run(ctx, cfg) + if err != nil { + return cli.Wrap(err, "excavation failed") + } + + // Save state for future resume + if err := cfg.State.Save(); err != nil { + cli.Warnf("Failed to save state: %v", err) + } + + printResult(result) + return nil +} + +// buildProjectCollectors creates collectors based on the project name. +// This maps known project names to their collector configurations. +func buildProjectCollectors(project string) []collect.Collector { + switch project { + case "bitcoin": + return []collect.Collector{ + &collect.GitHubCollector{Org: "bitcoin", Repo: "bitcoin"}, + &collect.MarketCollector{CoinID: "bitcoin", Historical: true}, + } + case "ethereum": + return []collect.Collector{ + &collect.GitHubCollector{Org: "ethereum", Repo: "go-ethereum"}, + &collect.MarketCollector{CoinID: "ethereum", Historical: true}, + &collect.PapersCollector{Source: "all", Query: "ethereum"}, + } + default: + // Treat unknown projects as GitHub org/repo + return []collect.Collector{ + &collect.GitHubCollector{Org: project}, + } + } +} diff --git a/internal/cmd/collect/cmd_github.go b/internal/cmd/collect/cmd_github.go new file mode 100644 index 00000000..5016feb8 --- /dev/null +++ b/internal/cmd/collect/cmd_github.go @@ -0,0 +1,78 @@ +package collect + +import ( + "context" + "strings" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/collect" + "github.com/host-uk/core/pkg/i18n" +) + +// GitHub command flags +var ( + githubOrg bool + githubIssuesOnly bool + githubPRsOnly bool +) + +// addGitHubCommand adds the 'github' subcommand to the collect parent. +func addGitHubCommand(parent *cli.Command) { + githubCmd := &cli.Command{ + Use: "github ", + Short: i18n.T("cmd.collect.github.short"), + Long: i18n.T("cmd.collect.github.long"), + Args: cli.MinimumNArgs(1), + RunE: func(cmd *cli.Command, args []string) error { + return runGitHub(args[0]) + }, + } + + cli.BoolFlag(githubCmd, &githubOrg, "org", "", false, i18n.T("cmd.collect.github.flag.org")) + cli.BoolFlag(githubCmd, &githubIssuesOnly, "issues-only", "", false, i18n.T("cmd.collect.github.flag.issues_only")) + cli.BoolFlag(githubCmd, &githubPRsOnly, "prs-only", "", false, i18n.T("cmd.collect.github.flag.prs_only")) + + parent.AddCommand(githubCmd) +} + +func runGitHub(target string) error { + if githubIssuesOnly && githubPRsOnly { + return cli.Err("--issues-only and --prs-only are mutually exclusive") + } + + // Parse org/repo argument + var org, repo string + if strings.Contains(target, "/") { + parts := strings.SplitN(target, "/", 2) + org = parts[0] + repo = parts[1] + } else if githubOrg { + org = target + } else { + return cli.Err("argument must be in org/repo format, or use --org for organisation-wide collection") + } + + cfg := newConfig() + setupVerboseLogging(cfg) + + collector := &collect.GitHubCollector{ + Org: org, + Repo: repo, + IssuesOnly: githubIssuesOnly, + PRsOnly: githubPRsOnly, + } + + if cfg.DryRun { + cli.Info("Dry run: would collect from GitHub " + target) + return nil + } + + ctx := context.Background() + result, err := collector.Collect(ctx, cfg) + if err != nil { + return cli.Wrap(err, "github collection failed") + } + + printResult(result) + return nil +} diff --git a/internal/cmd/collect/cmd_market.go b/internal/cmd/collect/cmd_market.go new file mode 100644 index 00000000..5907ada4 --- /dev/null +++ b/internal/cmd/collect/cmd_market.go @@ -0,0 +1,58 @@ +package collect + +import ( + "context" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/collect" + "github.com/host-uk/core/pkg/i18n" +) + +// Market command flags +var ( + marketHistorical bool + marketFromDate string +) + +// addMarketCommand adds the 'market' subcommand to the collect parent. +func addMarketCommand(parent *cli.Command) { + marketCmd := &cli.Command{ + Use: "market ", + Short: i18n.T("cmd.collect.market.short"), + Long: i18n.T("cmd.collect.market.long"), + Args: cli.ExactArgs(1), + RunE: func(cmd *cli.Command, args []string) error { + return runMarket(args[0]) + }, + } + + cli.BoolFlag(marketCmd, &marketHistorical, "historical", "H", false, i18n.T("cmd.collect.market.flag.historical")) + cli.StringFlag(marketCmd, &marketFromDate, "from", "f", "", i18n.T("cmd.collect.market.flag.from")) + + parent.AddCommand(marketCmd) +} + +func runMarket(coinID string) error { + cfg := newConfig() + setupVerboseLogging(cfg) + + collector := &collect.MarketCollector{ + CoinID: coinID, + Historical: marketHistorical, + FromDate: marketFromDate, + } + + if cfg.DryRun { + cli.Info("Dry run: would collect market data for " + coinID) + return nil + } + + ctx := context.Background() + result, err := collector.Collect(ctx, cfg) + if err != nil { + return cli.Wrap(err, "market collection failed") + } + + printResult(result) + return nil +} diff --git a/internal/cmd/collect/cmd_papers.go b/internal/cmd/collect/cmd_papers.go new file mode 100644 index 00000000..de37c0f1 --- /dev/null +++ b/internal/cmd/collect/cmd_papers.go @@ -0,0 +1,63 @@ +package collect + +import ( + "context" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/collect" + "github.com/host-uk/core/pkg/i18n" +) + +// Papers command flags +var ( + papersSource string + papersCategory string + papersQuery string +) + +// addPapersCommand adds the 'papers' subcommand to the collect parent. +func addPapersCommand(parent *cli.Command) { + papersCmd := &cli.Command{ + Use: "papers", + Short: i18n.T("cmd.collect.papers.short"), + Long: i18n.T("cmd.collect.papers.long"), + RunE: func(cmd *cli.Command, args []string) error { + return runPapers() + }, + } + + cli.StringFlag(papersCmd, &papersSource, "source", "s", "all", i18n.T("cmd.collect.papers.flag.source")) + cli.StringFlag(papersCmd, &papersCategory, "category", "c", "", i18n.T("cmd.collect.papers.flag.category")) + cli.StringFlag(papersCmd, &papersQuery, "query", "q", "", i18n.T("cmd.collect.papers.flag.query")) + + parent.AddCommand(papersCmd) +} + +func runPapers() error { + if papersQuery == "" { + return cli.Err("--query (-q) is required") + } + + cfg := newConfig() + setupVerboseLogging(cfg) + + collector := &collect.PapersCollector{ + Source: papersSource, + Category: papersCategory, + Query: papersQuery, + } + + if cfg.DryRun { + cli.Info("Dry run: would collect papers from " + papersSource) + return nil + } + + ctx := context.Background() + result, err := collector.Collect(ctx, cfg) + if err != nil { + return cli.Wrap(err, "papers collection failed") + } + + printResult(result) + return nil +} diff --git a/internal/cmd/collect/cmd_process.go b/internal/cmd/collect/cmd_process.go new file mode 100644 index 00000000..44560b28 --- /dev/null +++ b/internal/cmd/collect/cmd_process.go @@ -0,0 +1,48 @@ +package collect + +import ( + "context" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/collect" + "github.com/host-uk/core/pkg/i18n" +) + +// addProcessCommand adds the 'process' subcommand to the collect parent. +func addProcessCommand(parent *cli.Command) { + processCmd := &cli.Command{ + Use: "process ", + Short: i18n.T("cmd.collect.process.short"), + Long: i18n.T("cmd.collect.process.long"), + Args: cli.ExactArgs(2), + RunE: func(cmd *cli.Command, args []string) error { + return runProcess(args[0], args[1]) + }, + } + + parent.AddCommand(processCmd) +} + +func runProcess(source, dir string) error { + cfg := newConfig() + setupVerboseLogging(cfg) + + processor := &collect.Processor{ + Source: source, + Dir: dir, + } + + if cfg.DryRun { + cli.Info("Dry run: would process " + source + " data in " + dir) + return nil + } + + ctx := context.Background() + result, err := processor.Process(ctx, cfg) + if err != nil { + return cli.Wrap(err, "processing failed") + } + + printResult(result) + return nil +} diff --git a/internal/cmd/config/cmd.go b/internal/cmd/config/cmd.go new file mode 100644 index 00000000..7e8c5b69 --- /dev/null +++ b/internal/cmd/config/cmd.go @@ -0,0 +1,18 @@ +package config + +import "github.com/host-uk/core/pkg/cli" + +func init() { + cli.RegisterCommands(AddConfigCommands) +} + +// AddConfigCommands registers the 'config' command group and all subcommands. +func AddConfigCommands(root *cli.Command) { + configCmd := cli.NewGroup("config", "Manage configuration", "") + root.AddCommand(configCmd) + + addGetCommand(configCmd) + addSetCommand(configCmd) + addListCommand(configCmd) + addPathCommand(configCmd) +} diff --git a/internal/cmd/config/cmd_get.go b/internal/cmd/config/cmd_get.go new file mode 100644 index 00000000..9ae7f157 --- /dev/null +++ b/internal/cmd/config/cmd_get.go @@ -0,0 +1,40 @@ +package config + +import ( + "fmt" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/config" +) + +func addGetCommand(parent *cli.Command) { + cmd := cli.NewCommand("get", "Get a configuration value", "", func(cmd *cli.Command, args []string) error { + key := args[0] + + cfg, err := loadConfig() + if err != nil { + return err + } + + var value any + if err := cfg.Get(key, &value); err != nil { + return cli.Err("key not found: %s", key) + } + + fmt.Println(value) + return nil + }) + + cli.WithArgs(cmd, cli.ExactArgs(1)) + cli.WithExample(cmd, "core config get dev.editor") + + parent.AddCommand(cmd) +} + +func loadConfig() (*config.Config, error) { + cfg, err := config.New() + if err != nil { + return nil, cli.Wrap(err, "failed to load config") + } + return cfg, nil +} diff --git a/internal/cmd/config/cmd_list.go b/internal/cmd/config/cmd_list.go new file mode 100644 index 00000000..dbb038fb --- /dev/null +++ b/internal/cmd/config/cmd_list.go @@ -0,0 +1,35 @@ +package config + +import ( + "fmt" + + "github.com/host-uk/core/pkg/cli" + "gopkg.in/yaml.v3" +) + +func addListCommand(parent *cli.Command) { + cmd := cli.NewCommand("list", "List all configuration values", "", func(cmd *cli.Command, args []string) error { + cfg, err := loadConfig() + if err != nil { + return err + } + + all := cfg.All() + if len(all) == 0 { + cli.Dim("No configuration values set") + return nil + } + + out, err := yaml.Marshal(all) + if err != nil { + return cli.Wrap(err, "failed to format config") + } + + fmt.Print(string(out)) + return nil + }) + + cli.WithArgs(cmd, cli.NoArgs()) + + parent.AddCommand(cmd) +} diff --git a/internal/cmd/config/cmd_path.go b/internal/cmd/config/cmd_path.go new file mode 100644 index 00000000..33264395 --- /dev/null +++ b/internal/cmd/config/cmd_path.go @@ -0,0 +1,23 @@ +package config + +import ( + "fmt" + + "github.com/host-uk/core/pkg/cli" +) + +func addPathCommand(parent *cli.Command) { + cmd := cli.NewCommand("path", "Show the configuration file path", "", func(cmd *cli.Command, args []string) error { + cfg, err := loadConfig() + if err != nil { + return err + } + + fmt.Println(cfg.Path()) + return nil + }) + + cli.WithArgs(cmd, cli.NoArgs()) + + parent.AddCommand(cmd) +} diff --git a/internal/cmd/config/cmd_set.go b/internal/cmd/config/cmd_set.go new file mode 100644 index 00000000..6cb6addb --- /dev/null +++ b/internal/cmd/config/cmd_set.go @@ -0,0 +1,29 @@ +package config + +import ( + "github.com/host-uk/core/pkg/cli" +) + +func addSetCommand(parent *cli.Command) { + cmd := cli.NewCommand("set", "Set a configuration value", "", func(cmd *cli.Command, args []string) error { + key := args[0] + value := args[1] + + cfg, err := loadConfig() + if err != nil { + return err + } + + if err := cfg.Set(key, value); err != nil { + return cli.Wrap(err, "failed to set config value") + } + + cli.Success(key + " = " + value) + return nil + }) + + cli.WithArgs(cmd, cli.ExactArgs(2)) + cli.WithExample(cmd, "core config set dev.editor vim") + + parent.AddCommand(cmd) +} diff --git a/internal/cmd/crypt/cmd.go b/internal/cmd/crypt/cmd.go new file mode 100644 index 00000000..909c0498 --- /dev/null +++ b/internal/cmd/crypt/cmd.go @@ -0,0 +1,22 @@ +package crypt + +import "github.com/host-uk/core/pkg/cli" + +func init() { + cli.RegisterCommands(AddCryptCommands) +} + +// AddCryptCommands registers the 'crypt' command group and all subcommands. +func AddCryptCommands(root *cli.Command) { + cryptCmd := &cli.Command{ + Use: "crypt", + Short: "Cryptographic utilities", + Long: "Encrypt, decrypt, hash, and checksum files and data.", + } + root.AddCommand(cryptCmd) + + addHashCommand(cryptCmd) + addEncryptCommand(cryptCmd) + addKeygenCommand(cryptCmd) + addChecksumCommand(cryptCmd) +} diff --git a/internal/cmd/crypt/cmd_checksum.go b/internal/cmd/crypt/cmd_checksum.go new file mode 100644 index 00000000..4634d7ea --- /dev/null +++ b/internal/cmd/crypt/cmd_checksum.go @@ -0,0 +1,61 @@ +package crypt + +import ( + "fmt" + "path/filepath" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/crypt" +) + +// Checksum command flags +var ( + checksumSHA512 bool + checksumVerify string +) + +func addChecksumCommand(parent *cli.Command) { + checksumCmd := cli.NewCommand("checksum", "Compute file checksum", "", func(cmd *cli.Command, args []string) error { + return runChecksum(args[0]) + }) + checksumCmd.Args = cli.ExactArgs(1) + + cli.BoolFlag(checksumCmd, &checksumSHA512, "sha512", "", false, "Use SHA-512 instead of SHA-256") + cli.StringFlag(checksumCmd, &checksumVerify, "verify", "", "", "Verify file against this hash") + + parent.AddCommand(checksumCmd) +} + +func runChecksum(path string) error { + var hash string + var err error + + if checksumSHA512 { + hash, err = crypt.SHA512File(path) + } else { + hash, err = crypt.SHA256File(path) + } + + if err != nil { + return cli.Wrap(err, "failed to compute checksum") + } + + if checksumVerify != "" { + if hash == checksumVerify { + cli.Success(fmt.Sprintf("Checksum matches: %s", filepath.Base(path))) + return nil + } + cli.Error(fmt.Sprintf("Checksum mismatch: %s", filepath.Base(path))) + cli.Dim(fmt.Sprintf(" expected: %s", checksumVerify)) + cli.Dim(fmt.Sprintf(" got: %s", hash)) + return cli.Err("checksum verification failed") + } + + algo := "SHA-256" + if checksumSHA512 { + algo = "SHA-512" + } + + fmt.Printf("%s %s (%s)\n", hash, path, algo) + return nil +} diff --git a/internal/cmd/crypt/cmd_encrypt.go b/internal/cmd/crypt/cmd_encrypt.go new file mode 100644 index 00000000..718d5046 --- /dev/null +++ b/internal/cmd/crypt/cmd_encrypt.go @@ -0,0 +1,115 @@ +package crypt + +import ( + "fmt" + "os" + "strings" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/crypt" +) + +// Encrypt command flags +var ( + encryptPassphrase string + encryptAES bool +) + +func addEncryptCommand(parent *cli.Command) { + encryptCmd := cli.NewCommand("encrypt", "Encrypt a file", "", func(cmd *cli.Command, args []string) error { + return runEncrypt(args[0]) + }) + encryptCmd.Args = cli.ExactArgs(1) + + cli.StringFlag(encryptCmd, &encryptPassphrase, "passphrase", "p", "", "Passphrase (prompted if not given)") + cli.BoolFlag(encryptCmd, &encryptAES, "aes", "", false, "Use AES-256-GCM instead of ChaCha20-Poly1305") + + parent.AddCommand(encryptCmd) + + decryptCmd := cli.NewCommand("decrypt", "Decrypt an encrypted file", "", func(cmd *cli.Command, args []string) error { + return runDecrypt(args[0]) + }) + decryptCmd.Args = cli.ExactArgs(1) + + cli.StringFlag(decryptCmd, &encryptPassphrase, "passphrase", "p", "", "Passphrase (prompted if not given)") + cli.BoolFlag(decryptCmd, &encryptAES, "aes", "", false, "Use AES-256-GCM instead of ChaCha20-Poly1305") + + parent.AddCommand(decryptCmd) +} + +func getPassphrase() (string, error) { + if encryptPassphrase != "" { + return encryptPassphrase, nil + } + return cli.Prompt("Passphrase", "") +} + +func runEncrypt(path string) error { + passphrase, err := getPassphrase() + if err != nil { + return cli.Wrap(err, "failed to read passphrase") + } + if passphrase == "" { + return cli.Err("passphrase cannot be empty") + } + + data, err := os.ReadFile(path) + if err != nil { + return cli.Wrap(err, "failed to read file") + } + + var encrypted []byte + if encryptAES { + encrypted, err = crypt.EncryptAES(data, []byte(passphrase)) + } else { + encrypted, err = crypt.Encrypt(data, []byte(passphrase)) + } + if err != nil { + return cli.Wrap(err, "failed to encrypt") + } + + outPath := path + ".enc" + if err := os.WriteFile(outPath, encrypted, 0o600); err != nil { + return cli.Wrap(err, "failed to write encrypted file") + } + + cli.Success(fmt.Sprintf("Encrypted %s -> %s", path, outPath)) + return nil +} + +func runDecrypt(path string) error { + passphrase, err := getPassphrase() + if err != nil { + return cli.Wrap(err, "failed to read passphrase") + } + if passphrase == "" { + return cli.Err("passphrase cannot be empty") + } + + data, err := os.ReadFile(path) + if err != nil { + return cli.Wrap(err, "failed to read file") + } + + var decrypted []byte + if encryptAES { + decrypted, err = crypt.DecryptAES(data, []byte(passphrase)) + } else { + decrypted, err = crypt.Decrypt(data, []byte(passphrase)) + } + if err != nil { + return cli.Wrap(err, "failed to decrypt") + } + + outPath := strings.TrimSuffix(path, ".enc") + if outPath == path { + outPath = path + ".dec" + } + + if err := os.WriteFile(outPath, decrypted, 0o600); err != nil { + return cli.Wrap(err, "failed to write decrypted file") + } + + cli.Success(fmt.Sprintf("Decrypted %s -> %s", path, outPath)) + return nil +} diff --git a/internal/cmd/crypt/cmd_hash.go b/internal/cmd/crypt/cmd_hash.go new file mode 100644 index 00000000..fcf02e6d --- /dev/null +++ b/internal/cmd/crypt/cmd_hash.go @@ -0,0 +1,74 @@ +package crypt + +import ( + "fmt" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/crypt" + "golang.org/x/crypto/bcrypt" +) + +// Hash command flags +var ( + hashBcrypt bool + hashVerify string +) + +func addHashCommand(parent *cli.Command) { + hashCmd := cli.NewCommand("hash", "Hash a password with Argon2id or bcrypt", "", func(cmd *cli.Command, args []string) error { + return runHash(args[0]) + }) + hashCmd.Args = cli.ExactArgs(1) + + cli.BoolFlag(hashCmd, &hashBcrypt, "bcrypt", "b", false, "Use bcrypt instead of Argon2id") + cli.StringFlag(hashCmd, &hashVerify, "verify", "", "", "Verify input against this hash") + + parent.AddCommand(hashCmd) +} + +func runHash(input string) error { + // Verify mode + if hashVerify != "" { + return runHashVerify(input, hashVerify) + } + + // Hash mode + if hashBcrypt { + hash, err := crypt.HashBcrypt(input, bcrypt.DefaultCost) + if err != nil { + return cli.Wrap(err, "failed to hash password") + } + fmt.Println(hash) + return nil + } + + hash, err := crypt.HashPassword(input) + if err != nil { + return cli.Wrap(err, "failed to hash password") + } + fmt.Println(hash) + return nil +} + +func runHashVerify(input, hash string) error { + var match bool + var err error + + if hashBcrypt { + match, err = crypt.VerifyBcrypt(input, hash) + } else { + match, err = crypt.VerifyPassword(input, hash) + } + + if err != nil { + return cli.Wrap(err, "failed to verify hash") + } + + if match { + cli.Success("Password matches hash") + return nil + } + + cli.Error("Password does not match hash") + return cli.Err("hash verification failed") +} diff --git a/internal/cmd/crypt/cmd_keygen.go b/internal/cmd/crypt/cmd_keygen.go new file mode 100644 index 00000000..06c2a715 --- /dev/null +++ b/internal/cmd/crypt/cmd_keygen.go @@ -0,0 +1,55 @@ +package crypt + +import ( + "crypto/rand" + "encoding/base64" + "encoding/hex" + "fmt" + + "github.com/host-uk/core/pkg/cli" +) + +// Keygen command flags +var ( + keygenLength int + keygenHex bool + keygenBase64 bool +) + +func addKeygenCommand(parent *cli.Command) { + keygenCmd := cli.NewCommand("keygen", "Generate a random cryptographic key", "", func(cmd *cli.Command, args []string) error { + return runKeygen() + }) + + cli.IntFlag(keygenCmd, &keygenLength, "length", "l", 32, "Key length in bytes") + cli.BoolFlag(keygenCmd, &keygenHex, "hex", "", false, "Output as hex string") + cli.BoolFlag(keygenCmd, &keygenBase64, "base64", "", false, "Output as base64 string") + + parent.AddCommand(keygenCmd) +} + +func runKeygen() error { + if keygenHex && keygenBase64 { + return cli.Err("--hex and --base64 are mutually exclusive") + } + if keygenLength <= 0 || keygenLength > 1024 { + return cli.Err("key length must be between 1 and 1024 bytes") + } + + key := make([]byte, keygenLength) + if _, err := rand.Read(key); err != nil { + return cli.Wrap(err, "failed to generate random key") + } + + switch { + case keygenHex: + fmt.Println(hex.EncodeToString(key)) + case keygenBase64: + fmt.Println(base64.StdEncoding.EncodeToString(key)) + default: + // Default to hex output + fmt.Println(hex.EncodeToString(key)) + } + + return nil +} diff --git a/internal/cmd/dev/cmd_apply.go b/internal/cmd/dev/cmd_apply.go index 21bd1b0f..738ad606 100644 --- a/internal/cmd/dev/cmd_apply.go +++ b/internal/cmd/dev/cmd_apply.go @@ -15,7 +15,7 @@ import ( "strings" "github.com/host-uk/core/pkg/cli" - "github.com/host-uk/core/pkg/errors" + core "github.com/host-uk/core/pkg/framework/core" "github.com/host-uk/core/pkg/git" "github.com/host-uk/core/pkg/i18n" "github.com/host-uk/core/pkg/io" @@ -66,19 +66,19 @@ func runApply() error { // Validate inputs if applyCommand == "" && applyScript == "" { - return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.no_command"), nil) + return core.E("dev.apply", i18n.T("cmd.dev.apply.error.no_command"), nil) } if applyCommand != "" && applyScript != "" { - return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.both_command_script"), nil) + return core.E("dev.apply", i18n.T("cmd.dev.apply.error.both_command_script"), nil) } if applyCommit && applyMessage == "" { - return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.commit_needs_message"), nil) + return core.E("dev.apply", i18n.T("cmd.dev.apply.error.commit_needs_message"), nil) } // Validate script exists if applyScript != "" { if !io.Local.IsFile(applyScript) { - return errors.E("dev.apply", "script not found: "+applyScript, nil) // Error mismatch? IsFile returns bool + return core.E("dev.apply", "script not found: "+applyScript, nil) // Error mismatch? IsFile returns bool } } @@ -89,7 +89,7 @@ func runApply() error { } if len(targetRepos) == 0 { - return errors.E("dev.apply", i18n.T("cmd.dev.apply.error.no_repos"), nil) + return core.E("dev.apply", i18n.T("cmd.dev.apply.error.no_repos"), nil) } // Show plan @@ -227,12 +227,12 @@ func getApplyTargetRepos() ([]*repos.Repo, error) { // Load registry registryPath, err := repos.FindRegistry() if err != nil { - return nil, errors.E("dev.apply", "failed to find registry", err) + return nil, core.E("dev.apply", "failed to find registry", err) } registry, err := repos.LoadRegistry(registryPath) if err != nil { - return nil, errors.E("dev.apply", "failed to load registry", err) + return nil, core.E("dev.apply", "failed to load registry", err) } // If --repos specified, filter to those diff --git a/internal/cmd/dev/cmd_sync.go b/internal/cmd/dev/cmd_sync.go index 33670d02..ef9b7d02 100644 --- a/internal/cmd/dev/cmd_sync.go +++ b/internal/cmd/dev/cmd_sync.go @@ -2,7 +2,6 @@ package dev import ( "bytes" - "context" "go/ast" "go/parser" "go/token" @@ -17,25 +16,6 @@ import ( "golang.org/x/text/language" ) -// syncInternalToPublic handles the synchronization of internal packages to public-facing directories. -// This function is a placeholder for future implementation. -func syncInternalToPublic(ctx context.Context, publicDir string) error { - // 1. Clean public/internal - // 2. Copy relevant files from internal/ to public/internal/ - // Usually just shared logic, not private stuff. - - // For now, let's assume we copy specific safe packages - // Logic to be refined. - - // Example migration of os calls: - // internalDirs, err := os.ReadDir(pkgDir) -> coreio.Local.List(pkgDir) - // os.Stat -> coreio.Local.IsFile (returns bool) or List for existence check - // os.MkdirAll -> coreio.Local.EnsureDir - // os.WriteFile -> coreio.Local.Write - - return nil -} - // addSyncCommand adds the 'sync' command to the given parent command. func addSyncCommand(parent *cli.Command) { syncCmd := &cli.Command{ diff --git a/internal/cmd/dev/cmd_work.go b/internal/cmd/dev/cmd_work.go index 5f37fbaa..b5cd3c84 100644 --- a/internal/cmd/dev/cmd_work.go +++ b/internal/cmd/dev/cmd_work.go @@ -53,7 +53,7 @@ func runWork(registryPath string, statusOnly, autoCommit bool) error { if err := bundle.Start(ctx); err != nil { return err } - defer bundle.Stop(ctx) + defer func() { _ = bundle.Stop(ctx) }() // Load registry and get paths paths, names, err := func() ([]string, map[string]string, error) { diff --git a/internal/cmd/dev/service.go b/internal/cmd/dev/service.go index a145cd90..b086f9aa 100644 --- a/internal/cmd/dev/service.go +++ b/internal/cmd/dev/service.go @@ -176,7 +176,7 @@ func (s *Service) runWork(task TaskWork) error { cli.Blank() cli.Print("Push all? [y/N] ") var answer string - cli.Scanln(&answer) + _, _ = cli.Scanln(&answer) if strings.ToLower(answer) != "y" { cli.Println("Aborted") return nil diff --git a/internal/cmd/docs/cmd_scan.go b/internal/cmd/docs/cmd_scan.go index d88ad278..2fb9574d 100644 --- a/internal/cmd/docs/cmd_scan.go +++ b/internal/cmd/docs/cmd_scan.go @@ -117,7 +117,7 @@ func scanRepoDocs(repo *repos.Repo) RepoDocInfo { docsDir := filepath.Join(repo.Path, "docs") // Check if directory exists by listing it if _, err := io.Local.List(docsDir); err == nil { - filepath.WalkDir(docsDir, func(path string, d fs.DirEntry, err error) error { + _ = filepath.WalkDir(docsDir, func(path string, d fs.DirEntry, err error) error { if err != nil { return nil } diff --git a/internal/cmd/go/cmd_format.go b/internal/cmd/go/cmd_format.go index b8eaedd2..cc2235c3 100644 --- a/internal/cmd/go/cmd_format.go +++ b/internal/cmd/go/cmd_format.go @@ -67,11 +67,11 @@ func addGoFmtCommand(parent *cli.Command) { if fmtCheck { output, err := execCmd.CombinedOutput() if err != nil { - os.Stderr.Write(output) + _, _ = os.Stderr.Write(output) return err } if len(output) > 0 { - os.Stdout.Write(output) + _, _ = os.Stdout.Write(output) return cli.Err("files need formatting (use --fix)") } return nil diff --git a/internal/cmd/go/cmd_gotest.go b/internal/cmd/go/cmd_gotest.go index c34364f5..4145faed 100644 --- a/internal/cmd/go/cmd_gotest.go +++ b/internal/cmd/go/cmd_gotest.go @@ -154,7 +154,7 @@ func parseOverallCoverage(output string) float64 { var total float64 for _, m := range matches { var cov float64 - fmt.Sscanf(m[1], "%f", &cov) + _, _ = fmt.Sscanf(m[1], "%f", &cov) total += cov } return total / float64(len(matches)) @@ -192,8 +192,8 @@ func addGoCovCommand(parent *cli.Command) { return cli.Wrap(err, i18n.T("i18n.fail.create", "coverage file")) } covPath := covFile.Name() - covFile.Close() - defer os.Remove(covPath) + _ = covFile.Close() + defer func() { _ = os.Remove(covPath) }() cli.Print("%s %s\n", dimStyle.Render(i18n.Label("coverage")), i18n.ProgressSubject("run", "tests")) // Truncate package list if too long for display @@ -236,7 +236,7 @@ func addGoCovCommand(parent *cli.Command) { parts := strings.Fields(lastLine) if len(parts) >= 3 { covStr := strings.TrimSuffix(parts[len(parts)-1], "%") - fmt.Sscanf(covStr, "%f", &totalCov) + _, _ = fmt.Sscanf(covStr, "%f", &totalCov) } } } @@ -266,7 +266,7 @@ func addGoCovCommand(parent *cli.Command) { cli.Print(" %s\n", dimStyle.Render("Open coverage.html in your browser")) } if openCmd != nil { - openCmd.Run() + _ = openCmd.Run() } } } diff --git a/internal/cmd/go/cmd_tools.go b/internal/cmd/go/cmd_tools.go index fd080ff9..6cb3f1ed 100644 --- a/internal/cmd/go/cmd_tools.go +++ b/internal/cmd/go/cmd_tools.go @@ -223,7 +223,7 @@ func addGoWorkCommand(parent *cli.Command) { func findGoModules(root string) []string { var modules []string - filepath.Walk(root, func(path string, info os.FileInfo, err error) error { + _ = filepath.Walk(root, func(path string, info os.FileInfo, err error) error { if err != nil { return nil } diff --git a/internal/cmd/php/cmd_dev.go b/internal/cmd/php/cmd_dev.go index d22345ca..a3bc497a 100644 --- a/internal/cmd/php/cmd_dev.go +++ b/internal/cmd/php/cmd_dev.go @@ -218,7 +218,7 @@ func runPHPLogs(service string, follow bool) error { if err != nil { return cli.Err("%s: %w", i18n.T("i18n.fail.get", "logs"), err) } - defer logsReader.Close() + defer func() { _ = logsReader.Close() }() // Handle interrupt ctx, cancel := context.WithCancel(context.Background()) diff --git a/internal/cmd/php/cmd_qa_runner.go b/internal/cmd/php/cmd_qa_runner.go index c8d20d2b..c61ea466 100644 --- a/internal/cmd/php/cmd_qa_runner.go +++ b/internal/cmd/php/cmd_qa_runner.go @@ -146,7 +146,7 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec { pestBin := filepath.Join(r.dir, "vendor", "bin", "pest") phpunitBin := filepath.Join(r.dir, "vendor", "bin", "phpunit") - cmd := "pest" + var cmd string if _, err := os.Stat(pestBin); err == nil { cmd = pestBin } else if _, err := os.Stat(phpunitBin); err == nil { diff --git a/internal/cmd/php/container_test.go b/internal/cmd/php/container_test.go index 71e9e517..c0d0e196 100644 --- a/internal/cmd/php/container_test.go +++ b/internal/cmd/php/container_test.go @@ -230,7 +230,7 @@ func TestServeProduction_Bad(t *testing.T) { func TestShell_Bad(t *testing.T) { t.Run("fails without container ID", func(t *testing.T) { - err := Shell(nil, "") + err := Shell(context.TODO(), "") assert.Error(t, err) assert.Contains(t, err.Error(), "container ID is required") }) diff --git a/internal/cmd/php/coolify_test.go b/internal/cmd/php/coolify_test.go index 37477953..8176c88e 100644 --- a/internal/cmd/php/coolify_test.go +++ b/internal/cmd/php/coolify_test.go @@ -225,7 +225,7 @@ func TestCoolifyClient_TriggerDeploy_Good(t *testing.T) { Status: "queued", CreatedAt: time.Now(), } - json.NewEncoder(w).Encode(resp) + _ = json.NewEncoder(w).Encode(resp) })) defer server.Close() @@ -240,11 +240,11 @@ func TestCoolifyClient_TriggerDeploy_Good(t *testing.T) { t.Run("triggers deployment with force", func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var body map[string]interface{} - json.NewDecoder(r.Body).Decode(&body) + _ = json.NewDecoder(r.Body).Decode(&body) assert.Equal(t, true, body["force"]) resp := CoolifyDeployment{ID: "dep-456", Status: "queued"} - json.NewEncoder(w).Encode(resp) + _ = json.NewEncoder(w).Encode(resp) })) defer server.Close() @@ -256,7 +256,7 @@ func TestCoolifyClient_TriggerDeploy_Good(t *testing.T) { t.Run("handles minimal response", func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Return an invalid JSON response to trigger the fallback - w.Write([]byte("not json")) + _, _ = w.Write([]byte("not json")) })) defer server.Close() @@ -273,7 +273,7 @@ func TestCoolifyClient_TriggerDeploy_Bad(t *testing.T) { t.Run("fails on HTTP error", func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) - json.NewEncoder(w).Encode(map[string]string{"message": "Internal error"}) + _ = json.NewEncoder(w).Encode(map[string]string{"message": "Internal error"}) })) defer server.Close() @@ -297,7 +297,7 @@ func TestCoolifyClient_GetDeployment_Good(t *testing.T) { CommitSHA: "abc123", Branch: "main", } - json.NewEncoder(w).Encode(resp) + _ = json.NewEncoder(w).Encode(resp) })) defer server.Close() @@ -315,7 +315,7 @@ func TestCoolifyClient_GetDeployment_Bad(t *testing.T) { t.Run("fails on 404", func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusNotFound) - json.NewEncoder(w).Encode(map[string]string{"error": "Not found"}) + _ = json.NewEncoder(w).Encode(map[string]string{"error": "Not found"}) })) defer server.Close() @@ -337,7 +337,7 @@ func TestCoolifyClient_ListDeployments_Good(t *testing.T) { {ID: "dep-1", Status: "finished"}, {ID: "dep-2", Status: "failed"}, } - json.NewEncoder(w).Encode(resp) + _ = json.NewEncoder(w).Encode(resp) })) defer server.Close() @@ -353,7 +353,7 @@ func TestCoolifyClient_ListDeployments_Good(t *testing.T) { t.Run("lists without limit", func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "", r.URL.Query().Get("limit")) - json.NewEncoder(w).Encode([]CoolifyDeployment{}) + _ = json.NewEncoder(w).Encode([]CoolifyDeployment{}) })) defer server.Close() @@ -370,14 +370,14 @@ func TestCoolifyClient_Rollback_Good(t *testing.T) { assert.Equal(t, "POST", r.Method) var body map[string]string - json.NewDecoder(r.Body).Decode(&body) + _ = json.NewDecoder(r.Body).Decode(&body) assert.Equal(t, "dep-old", body["deployment_id"]) resp := CoolifyDeployment{ ID: "dep-new", Status: "rolling_back", } - json.NewEncoder(w).Encode(resp) + _ = json.NewEncoder(w).Encode(resp) })) defer server.Close() @@ -402,7 +402,7 @@ func TestCoolifyClient_GetApp_Good(t *testing.T) { FQDN: "https://myapp.example.com", Status: "running", } - json.NewEncoder(w).Encode(resp) + _ = json.NewEncoder(w).Encode(resp) })) defer server.Close() @@ -433,7 +433,7 @@ func TestCoolifyClient_ParseError(t *testing.T) { t.Run("parses message field", func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusBadRequest) - json.NewEncoder(w).Encode(map[string]string{"message": "Bad request message"}) + _ = json.NewEncoder(w).Encode(map[string]string{"message": "Bad request message"}) })) defer server.Close() @@ -447,7 +447,7 @@ func TestCoolifyClient_ParseError(t *testing.T) { t.Run("parses error field", func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusBadRequest) - json.NewEncoder(w).Encode(map[string]string{"error": "Error message"}) + _ = json.NewEncoder(w).Encode(map[string]string{"error": "Error message"}) })) defer server.Close() @@ -461,7 +461,7 @@ func TestCoolifyClient_ParseError(t *testing.T) { t.Run("returns raw body when no JSON fields", func(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) - w.Write([]byte("Raw error message")) + _, _ = w.Write([]byte("Raw error message")) })) defer server.Close() @@ -486,12 +486,12 @@ COOLIFY_TOKEN=file-token` origURL := os.Getenv("COOLIFY_URL") origToken := os.Getenv("COOLIFY_TOKEN") defer func() { - os.Setenv("COOLIFY_URL", origURL) - os.Setenv("COOLIFY_TOKEN", origToken) + _ = os.Setenv("COOLIFY_URL", origURL) + _ = os.Setenv("COOLIFY_TOKEN", origToken) }() - os.Setenv("COOLIFY_URL", "https://from-env.com") - os.Setenv("COOLIFY_TOKEN", "env-token") + _ = os.Setenv("COOLIFY_URL", "https://from-env.com") + _ = os.Setenv("COOLIFY_TOKEN", "env-token") config, err := LoadCoolifyConfig(dir) assert.NoError(t, err) diff --git a/internal/cmd/php/detect.go b/internal/cmd/php/detect.go index 5ba153f5..7a977090 100644 --- a/internal/cmd/php/detect.go +++ b/internal/cmd/php/detect.go @@ -172,7 +172,7 @@ func needsRedis(dir string) bool { if err != nil { return false } - defer file.Close() + defer func() { _ = file.Close() }() scanner := bufio.NewScanner(file) for scanner.Scan() { @@ -235,7 +235,7 @@ func GetLaravelAppName(dir string) string { if err != nil { return "" } - defer file.Close() + defer func() { _ = file.Close() }() scanner := bufio.NewScanner(file) for scanner.Scan() { @@ -258,7 +258,7 @@ func GetLaravelAppURL(dir string) string { if err != nil { return "" } - defer file.Close() + defer func() { _ = file.Close() }() scanner := bufio.NewScanner(file) for scanner.Scan() { diff --git a/internal/cmd/php/detect_test.go b/internal/cmd/php/detect_test.go index 7cd2128e..6460a83e 100644 --- a/internal/cmd/php/detect_test.go +++ b/internal/cmd/php/detect_test.go @@ -197,7 +197,7 @@ return [ octanePath := filepath.Join(configDir, "octane.php") err = os.WriteFile(octanePath, []byte(" 0 { // Stop any services that did start for _, svc := range d.services { - svc.Stop() + _ = svc.Stop() } return cli.Err("failed to start services: %v", startErrors) } @@ -296,7 +296,7 @@ func (d *DevServer) unifiedLogs(follow bool) (io.ReadCloser, error) { if err != nil { // Close any readers we already opened for _, r := range readers { - r.Close() + _ = r.Close() } return nil, cli.Err("failed to get logs for %s: %v", svc.Name(), err) } diff --git a/internal/cmd/php/php_test.go b/internal/cmd/php/php_test.go index 7413a05a..e295d73e 100644 --- a/internal/cmd/php/php_test.go +++ b/internal/cmd/php/php_test.go @@ -165,13 +165,13 @@ func TestMultiServiceReader_Good(t *testing.T) { dir := t.TempDir() file1, err := os.CreateTemp(dir, "log1-*.log") require.NoError(t, err) - file1.WriteString("test1") - file1.Seek(0, 0) + _, _ = file1.WriteString("test1") + _, _ = file1.Seek(0, 0) file2, err := os.CreateTemp(dir, "log2-*.log") require.NoError(t, err) - file2.WriteString("test2") - file2.Seek(0, 0) + _, _ = file2.WriteString("test2") + _, _ = file2.Seek(0, 0) // Create mock services services := []Service{ @@ -202,8 +202,8 @@ func TestMultiServiceReader_Read_Good(t *testing.T) { dir := t.TempDir() file1, err := os.CreateTemp(dir, "log-*.log") require.NoError(t, err) - file1.WriteString("log content") - file1.Seek(0, 0) + _, _ = file1.WriteString("log content") + _, _ = file1.Seek(0, 0) services := []Service{ &FrankenPHPService{baseService: baseService{name: "TestService"}}, @@ -224,7 +224,7 @@ func TestMultiServiceReader_Read_Good(t *testing.T) { dir := t.TempDir() file1, err := os.CreateTemp(dir, "log-*.log") require.NoError(t, err) - file1.Close() // Empty file + _ = file1.Close() // Empty file file1, err = os.Open(file1.Name()) require.NoError(t, err) @@ -355,7 +355,7 @@ func TestDevServer_Logs_Good(t *testing.T) { reader, err := server.Logs("TestService", false) assert.NoError(t, err) assert.NotNil(t, reader) - reader.Close() + _ = reader.Close() }) } @@ -462,7 +462,7 @@ func TestMultiServiceReader_CloseError(t *testing.T) { file1, err := os.CreateTemp(dir, "log-*.log") require.NoError(t, err) file1Name := file1.Name() - file1.Close() + _ = file1.Close() // Reopen for reading file1, err = os.Open(file1Name) @@ -489,7 +489,7 @@ func TestMultiServiceReader_FollowMode(t *testing.T) { file1, err := os.CreateTemp(dir, "log-*.log") require.NoError(t, err) file1Name := file1.Name() - file1.Close() + _ = file1.Close() // Reopen for reading (empty file) file1, err = os.Open(file1Name) @@ -520,7 +520,7 @@ func TestMultiServiceReader_FollowMode(t *testing.T) { // Also acceptable - follow mode is waiting } - reader.Close() + _ = reader.Close() }) } diff --git a/internal/cmd/php/quality_extended_test.go b/internal/cmd/php/quality_extended_test.go index 3841edc8..8c1c00e3 100644 --- a/internal/cmd/php/quality_extended_test.go +++ b/internal/cmd/php/quality_extended_test.go @@ -238,7 +238,7 @@ func TestFormat_Bad(t *testing.T) { dir := t.TempDir() opts := FormatOptions{Dir: dir} - err := Format(nil, opts) + err := Format(context.TODO(), opts) assert.Error(t, err) assert.Contains(t, err.Error(), "no formatter found") }) @@ -247,7 +247,7 @@ func TestFormat_Bad(t *testing.T) { // When no formatter found in cwd, should still fail with "no formatter found" opts := FormatOptions{Dir: ""} - err := Format(nil, opts) + err := Format(context.TODO(), opts) // May or may not find a formatter depending on cwd, but function should not panic if err != nil { // Expected - no formatter in cwd @@ -274,7 +274,7 @@ func TestAnalyse_Bad(t *testing.T) { dir := t.TempDir() opts := AnalyseOptions{Dir: dir} - err := Analyse(nil, opts) + err := Analyse(context.TODO(), opts) assert.Error(t, err) assert.Contains(t, err.Error(), "no static analyser found") }) @@ -282,7 +282,7 @@ func TestAnalyse_Bad(t *testing.T) { t.Run("uses cwd when dir not specified", func(t *testing.T) { opts := AnalyseOptions{Dir: ""} - err := Analyse(nil, opts) + err := Analyse(context.TODO(), opts) // May or may not find an analyser depending on cwd if err != nil { assert.Contains(t, err.Error(), "no static analyser") diff --git a/internal/cmd/php/services.go b/internal/cmd/php/services.go index aa1c9691..81b8594c 100644 --- a/internal/cmd/php/services.go +++ b/internal/cmd/php/services.go @@ -123,7 +123,7 @@ func (s *baseService) startProcess(ctx context.Context, cmdName string, args []s setSysProcAttr(s.cmd) if err := s.cmd.Start(); err != nil { - logFile.Close() + _ = logFile.Close() s.lastError = err return cli.WrapVerb(err, "start", s.name) } @@ -140,7 +140,7 @@ func (s *baseService) startProcess(ctx context.Context, cmdName string, args []s s.lastError = err } if s.logFile != nil { - s.logFile.Close() + _ = s.logFile.Close() } s.mu.Unlock() }() @@ -157,12 +157,12 @@ func (s *baseService) stopProcess() error { } // Send termination signal to process (group on Unix) - signalProcessGroup(s.cmd, termSignal()) + _ = signalProcessGroup(s.cmd, termSignal()) // Wait for graceful shutdown with timeout done := make(chan struct{}) go func() { - s.cmd.Wait() + _ = s.cmd.Wait() close(done) }() @@ -171,7 +171,7 @@ func (s *baseService) stopProcess() error { // Process exited gracefully case <-time.After(5 * time.Second): // Force kill - signalProcessGroup(s.cmd, killSignal()) + _ = signalProcessGroup(s.cmd, killSignal()) } s.running = false @@ -333,7 +333,7 @@ func (s *HorizonService) Stop() error { // Horizon has its own terminate command cmd := exec.Command("php", "artisan", "horizon:terminate") cmd.Dir = s.dir - cmd.Run() // Ignore errors, will also kill via signal + _ = cmd.Run() // Ignore errors, will also kill via signal return s.stopProcess() } @@ -427,7 +427,7 @@ func (s *RedisService) Start(ctx context.Context) error { func (s *RedisService) Stop() error { // Try graceful shutdown via redis-cli cmd := exec.Command("redis-cli", "-p", cli.Sprintf("%d", s.port), "shutdown", "nosave") - cmd.Run() // Ignore errors + _ = cmd.Run() // Ignore errors return s.stopProcess() } diff --git a/internal/cmd/php/services_extended_test.go b/internal/cmd/php/services_extended_test.go index db2c42be..ce3b72ec 100644 --- a/internal/cmd/php/services_extended_test.go +++ b/internal/cmd/php/services_extended_test.go @@ -66,7 +66,7 @@ func TestBaseService_Logs_Good(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, reader) - reader.Close() + _ = reader.Close() }) t.Run("returns tail reader in follow mode", func(t *testing.T) { @@ -83,7 +83,7 @@ func TestBaseService_Logs_Good(t *testing.T) { // Verify it's a tailReader by checking it implements ReadCloser _, ok := reader.(*tailReader) assert.True(t, ok) - reader.Close() + _ = reader.Close() }) } @@ -113,7 +113,7 @@ func TestTailReader_Good(t *testing.T) { file, err := os.Open(logPath) require.NoError(t, err) - defer file.Close() + defer func() { _ = file.Close() }() reader := newTailReader(file) assert.NotNil(t, reader) @@ -147,7 +147,7 @@ func TestTailReader_Good(t *testing.T) { require.NoError(t, err) reader := newTailReader(file) - reader.Close() + _ = reader.Close() buf := make([]byte, 100) n, _ := reader.Read(buf) diff --git a/internal/cmd/pkgcmd/cmd_manage.go b/internal/cmd/pkgcmd/cmd_manage.go index cabcbde1..9d40f901 100644 --- a/internal/cmd/pkgcmd/cmd_manage.go +++ b/internal/cmd/pkgcmd/cmd_manage.go @@ -224,7 +224,7 @@ func runPkgOutdated() error { } // Fetch updates - exec.Command("git", "-C", repoPath, "fetch", "--quiet").Run() + _ = exec.Command("git", "-C", repoPath, "fetch", "--quiet").Run() // Check if behind cmd := exec.Command("git", "-C", repoPath, "rev-list", "--count", "HEAD..@{u}") diff --git a/internal/cmd/plugin/cmd.go b/internal/cmd/plugin/cmd.go new file mode 100644 index 00000000..47ad72f4 --- /dev/null +++ b/internal/cmd/plugin/cmd.go @@ -0,0 +1,33 @@ +// Package plugin provides CLI commands for managing core plugins. +// +// Commands: +// - install: Install a plugin from GitHub +// - list: List installed plugins +// - info: Show detailed plugin information +// - update: Update a plugin or all plugins +// - remove: Remove an installed plugin +package plugin + +import ( + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/i18n" +) + +func init() { + cli.RegisterCommands(AddPluginCommands) +} + +// AddPluginCommands registers the 'plugin' command and all subcommands. +func AddPluginCommands(root *cli.Command) { + pluginCmd := &cli.Command{ + Use: "plugin", + Short: i18n.T("Manage plugins"), + } + root.AddCommand(pluginCmd) + + addInstallCommand(pluginCmd) + addListCommand(pluginCmd) + addInfoCommand(pluginCmd) + addUpdateCommand(pluginCmd) + addRemoveCommand(pluginCmd) +} diff --git a/internal/cmd/plugin/cmd_info.go b/internal/cmd/plugin/cmd_info.go new file mode 100644 index 00000000..c8e23735 --- /dev/null +++ b/internal/cmd/plugin/cmd_info.go @@ -0,0 +1,86 @@ +package plugin + +import ( + "fmt" + "path/filepath" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/i18n" + "github.com/host-uk/core/pkg/io" + "github.com/host-uk/core/pkg/plugin" +) + +func addInfoCommand(parent *cli.Command) { + infoCmd := cli.NewCommand( + "info ", + i18n.T("Show detailed plugin information"), + "", + func(cmd *cli.Command, args []string) error { + return runInfo(args[0]) + }, + ) + infoCmd.Args = cli.ExactArgs(1) + + parent.AddCommand(infoCmd) +} + +func runInfo(name string) error { + basePath, err := pluginBasePath() + if err != nil { + return err + } + + registry := plugin.NewRegistry(io.Local, basePath) + if err := registry.Load(); err != nil { + return err + } + + cfg, ok := registry.Get(name) + if !ok { + return fmt.Errorf("plugin not found: %s", name) + } + + // Try to load the manifest for extended information + loader := plugin.NewLoader(io.Local, basePath) + manifest, manifestErr := loader.LoadPlugin(name) + + fmt.Println() + cli.Label("Name", cfg.Name) + cli.Label("Version", cfg.Version) + cli.Label("Source", cfg.Source) + + status := "disabled" + if cfg.Enabled { + status = "enabled" + } + cli.Label("Status", status) + cli.Label("Installed", cfg.InstalledAt) + cli.Label("Path", filepath.Join(basePath, name)) + + if manifestErr == nil && manifest != nil { + if manifest.Description != "" { + cli.Label("Description", manifest.Description) + } + if manifest.Author != "" { + cli.Label("Author", manifest.Author) + } + if manifest.Entrypoint != "" { + cli.Label("Entrypoint", manifest.Entrypoint) + } + if manifest.MinVersion != "" { + cli.Label("Min Version", manifest.MinVersion) + } + if len(manifest.Dependencies) > 0 { + for i, dep := range manifest.Dependencies { + if i == 0 { + cli.Label("Dependencies", dep) + } else { + fmt.Printf(" %s\n", dep) + } + } + } + } + + fmt.Println() + return nil +} diff --git a/internal/cmd/plugin/cmd_install.go b/internal/cmd/plugin/cmd_install.go new file mode 100644 index 00000000..84379d87 --- /dev/null +++ b/internal/cmd/plugin/cmd_install.go @@ -0,0 +1,61 @@ +package plugin + +import ( + "context" + "os" + "path/filepath" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/i18n" + "github.com/host-uk/core/pkg/io" + "github.com/host-uk/core/pkg/plugin" +) + +func addInstallCommand(parent *cli.Command) { + installCmd := cli.NewCommand( + "install ", + i18n.T("Install a plugin from GitHub"), + i18n.T("Install a plugin from a GitHub repository.\n\nSource format: org/repo or org/repo@version"), + func(cmd *cli.Command, args []string) error { + return runInstall(args[0]) + }, + ) + installCmd.Args = cli.ExactArgs(1) + installCmd.Example = " core plugin install host-uk/core-plugin-example\n core plugin install host-uk/core-plugin-example@v1.0.0" + + parent.AddCommand(installCmd) +} + +func runInstall(source string) error { + basePath, err := pluginBasePath() + if err != nil { + return err + } + + registry := plugin.NewRegistry(io.Local, basePath) + if err := registry.Load(); err != nil { + return err + } + + installer := plugin.NewInstaller(io.Local, registry) + + cli.Dim("Installing plugin from " + source + "...") + + if err := installer.Install(context.Background(), source); err != nil { + return err + } + + _, repo, _, _ := plugin.ParseSource(source) + cli.Success("Plugin " + repo + " installed successfully") + + return nil +} + +// pluginBasePath returns the default plugin directory (~/.core/plugins/). +func pluginBasePath() (string, error) { + home, err := os.UserHomeDir() + if err != nil { + return "", cli.Wrap(err, "failed to determine home directory") + } + return filepath.Join(home, ".core", "plugins"), nil +} diff --git a/internal/cmd/plugin/cmd_list.go b/internal/cmd/plugin/cmd_list.go new file mode 100644 index 00000000..7c3ec218 --- /dev/null +++ b/internal/cmd/plugin/cmd_list.go @@ -0,0 +1,57 @@ +package plugin + +import ( + "fmt" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/i18n" + "github.com/host-uk/core/pkg/io" + "github.com/host-uk/core/pkg/plugin" +) + +func addListCommand(parent *cli.Command) { + listCmd := cli.NewCommand( + "list", + i18n.T("List installed plugins"), + "", + func(cmd *cli.Command, args []string) error { + return runList() + }, + ) + + parent.AddCommand(listCmd) +} + +func runList() error { + basePath, err := pluginBasePath() + if err != nil { + return err + } + + registry := plugin.NewRegistry(io.Local, basePath) + if err := registry.Load(); err != nil { + return err + } + + plugins := registry.List() + if len(plugins) == 0 { + cli.Dim("No plugins installed") + return nil + } + + table := cli.NewTable("Name", "Version", "Source", "Status") + for _, p := range plugins { + status := "disabled" + if p.Enabled { + status = "enabled" + } + table.AddRow(p.Name, p.Version, p.Source, status) + } + + fmt.Println() + table.Render() + fmt.Println() + cli.Dim(fmt.Sprintf("%d plugin(s) installed", len(plugins))) + + return nil +} diff --git a/internal/cmd/plugin/cmd_remove.go b/internal/cmd/plugin/cmd_remove.go new file mode 100644 index 00000000..87f3f23a --- /dev/null +++ b/internal/cmd/plugin/cmd_remove.go @@ -0,0 +1,48 @@ +package plugin + +import ( + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/i18n" + "github.com/host-uk/core/pkg/io" + "github.com/host-uk/core/pkg/plugin" +) + +func addRemoveCommand(parent *cli.Command) { + removeCmd := cli.NewCommand( + "remove ", + i18n.T("Remove an installed plugin"), + "", + func(cmd *cli.Command, args []string) error { + return runRemove(args[0]) + }, + ) + removeCmd.Args = cli.ExactArgs(1) + + parent.AddCommand(removeCmd) +} + +func runRemove(name string) error { + basePath, err := pluginBasePath() + if err != nil { + return err + } + + registry := plugin.NewRegistry(io.Local, basePath) + if err := registry.Load(); err != nil { + return err + } + + if !cli.Confirm("Remove plugin " + name + "?") { + cli.Dim("Cancelled") + return nil + } + + installer := plugin.NewInstaller(io.Local, registry) + + if err := installer.Remove(name); err != nil { + return err + } + + cli.Success("Plugin " + name + " removed") + return nil +} diff --git a/internal/cmd/plugin/cmd_update.go b/internal/cmd/plugin/cmd_update.go new file mode 100644 index 00000000..65225dad --- /dev/null +++ b/internal/cmd/plugin/cmd_update.go @@ -0,0 +1,94 @@ +package plugin + +import ( + "context" + "fmt" + + "github.com/host-uk/core/pkg/cli" + "github.com/host-uk/core/pkg/i18n" + "github.com/host-uk/core/pkg/io" + "github.com/host-uk/core/pkg/plugin" +) + +var updateAll bool + +func addUpdateCommand(parent *cli.Command) { + updateCmd := cli.NewCommand( + "update [name]", + i18n.T("Update a plugin or all plugins"), + i18n.T("Update a specific plugin to the latest version, or use --all to update all installed plugins."), + func(cmd *cli.Command, args []string) error { + if updateAll { + return runUpdateAll() + } + if len(args) == 0 { + return fmt.Errorf("plugin name required (or use --all)") + } + return runUpdate(args[0]) + }, + ) + + cli.BoolFlag(updateCmd, &updateAll, "all", "a", false, i18n.T("Update all installed plugins")) + + parent.AddCommand(updateCmd) +} + +func runUpdate(name string) error { + basePath, err := pluginBasePath() + if err != nil { + return err + } + + registry := plugin.NewRegistry(io.Local, basePath) + if err := registry.Load(); err != nil { + return err + } + + installer := plugin.NewInstaller(io.Local, registry) + + cli.Dim("Updating " + name + "...") + + if err := installer.Update(context.Background(), name); err != nil { + return err + } + + cli.Success("Plugin " + name + " updated successfully") + return nil +} + +func runUpdateAll() error { + basePath, err := pluginBasePath() + if err != nil { + return err + } + + registry := plugin.NewRegistry(io.Local, basePath) + if err := registry.Load(); err != nil { + return err + } + + plugins := registry.List() + if len(plugins) == 0 { + cli.Dim("No plugins installed") + return nil + } + + installer := plugin.NewInstaller(io.Local, registry) + ctx := context.Background() + + var updated, failed int + for _, p := range plugins { + cli.Dim("Updating " + p.Name + "...") + if err := installer.Update(ctx, p.Name); err != nil { + cli.Errorf("Failed to update %s: %v", p.Name, err) + failed++ + continue + } + cli.Success(p.Name + " updated") + updated++ + } + + fmt.Println() + cli.Dim(fmt.Sprintf("%d updated, %d failed", updated, failed)) + return nil +} diff --git a/internal/cmd/rag/cmd_collections.go b/internal/cmd/rag/cmd_collections.go index b21d45c4..b734618d 100644 --- a/internal/cmd/rag/cmd_collections.go +++ b/internal/cmd/rag/cmd_collections.go @@ -35,7 +35,7 @@ func runCollections(cmd *cobra.Command, args []string) error { if err != nil { return fmt.Errorf("failed to connect to Qdrant: %w", err) } - defer qdrantClient.Close() + defer func() { _ = qdrantClient.Close() }() // Handle delete if deleteCollection != "" { diff --git a/internal/cmd/rag/cmd_ingest.go b/internal/cmd/rag/cmd_ingest.go index e956b22f..74062f76 100644 --- a/internal/cmd/rag/cmd_ingest.go +++ b/internal/cmd/rag/cmd_ingest.go @@ -43,10 +43,10 @@ func runIngest(cmd *cobra.Command, args []string) error { if err != nil { return fmt.Errorf("failed to connect to Qdrant: %w", err) } - defer qdrantClient.Close() + defer func() { _ = qdrantClient.Close() }() if err := qdrantClient.HealthCheck(ctx); err != nil { - return fmt.Errorf("Qdrant health check failed: %w", err) + return fmt.Errorf("qdrant health check failed: %w", err) } // Connect to Ollama @@ -122,10 +122,10 @@ func IngestDirectory(ctx context.Context, directory, collectionName string, recr if err != nil { return err } - defer qdrantClient.Close() + defer func() { _ = qdrantClient.Close() }() if err := qdrantClient.HealthCheck(ctx); err != nil { - return fmt.Errorf("Qdrant health check failed: %w", err) + return fmt.Errorf("qdrant health check failed: %w", err) } ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig()) @@ -152,10 +152,10 @@ func IngestFile(ctx context.Context, filePath, collectionName string) (int, erro if err != nil { return 0, err } - defer qdrantClient.Close() + defer func() { _ = qdrantClient.Close() }() if err := qdrantClient.HealthCheck(ctx); err != nil { - return 0, fmt.Errorf("Qdrant health check failed: %w", err) + return 0, fmt.Errorf("qdrant health check failed: %w", err) } ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig()) diff --git a/internal/cmd/rag/cmd_query.go b/internal/cmd/rag/cmd_query.go index 076f2643..fe36fe1a 100644 --- a/internal/cmd/rag/cmd_query.go +++ b/internal/cmd/rag/cmd_query.go @@ -38,7 +38,7 @@ func runQuery(cmd *cobra.Command, args []string) error { if err != nil { return fmt.Errorf("failed to connect to Qdrant: %w", err) } - defer qdrantClient.Close() + defer func() { _ = qdrantClient.Close() }() // Connect to Ollama ollamaClient, err := rag.NewOllamaClient(rag.OllamaConfig{ @@ -86,7 +86,7 @@ func QueryDocs(ctx context.Context, question, collectionName string, topK int) ( if err != nil { return nil, err } - defer qdrantClient.Close() + defer func() { _ = qdrantClient.Close() }() ollamaClient, err := rag.NewOllamaClient(rag.DefaultOllamaConfig()) if err != nil { diff --git a/internal/cmd/sdk/diff_test.go b/internal/cmd/sdk/diff_test.go index 812ab84b..f1b3a206 100644 --- a/internal/cmd/sdk/diff_test.go +++ b/internal/cmd/sdk/diff_test.go @@ -41,8 +41,8 @@ paths: ` basePath := filepath.Join(tmpDir, "base.yaml") revPath := filepath.Join(tmpDir, "rev.yaml") - os.WriteFile(basePath, []byte(baseSpec), 0644) - os.WriteFile(revPath, []byte(revSpec), 0644) + _ = os.WriteFile(basePath, []byte(baseSpec), 0644) + _ = os.WriteFile(revPath, []byte(revSpec), 0644) result, err := Diff(basePath, revPath) if err != nil { @@ -88,8 +88,8 @@ paths: ` basePath := filepath.Join(tmpDir, "base.yaml") revPath := filepath.Join(tmpDir, "rev.yaml") - os.WriteFile(basePath, []byte(baseSpec), 0644) - os.WriteFile(revPath, []byte(revSpec), 0644) + _ = os.WriteFile(basePath, []byte(baseSpec), 0644) + _ = os.WriteFile(revPath, []byte(revSpec), 0644) result, err := Diff(basePath, revPath) if err != nil { diff --git a/internal/cmd/setup/github_config.go b/internal/cmd/setup/github_config.go index 7c12795a..6167a979 100644 --- a/internal/cmd/setup/github_config.go +++ b/internal/cmd/setup/github_config.go @@ -196,7 +196,7 @@ func isValidHexColor(color string) bool { return false } for _, c := range strings.ToLower(color) { - if !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f')) { + if (c < '0' || c > '9') && (c < 'a' || c > 'f') { return false } } diff --git a/internal/cmd/updater/cmd.go b/internal/cmd/updater/cmd.go index bfebed0b..ec42355b 100644 --- a/internal/cmd/updater/cmd.go +++ b/internal/cmd/updater/cmd.go @@ -1,6 +1,7 @@ package updater import ( + "context" "fmt" "os" "runtime" @@ -142,7 +143,7 @@ func handleDevUpdate(currentVersion string) error { client := NewGithubClient() // Fetch the dev release directly by tag - release, err := client.GetLatestRelease(nil, repoOwner, repoName, "beta") + release, err := client.GetLatestRelease(context.TODO(), repoOwner, repoName, "beta") if err != nil { // Try fetching the "dev" tag directly return handleDevTagUpdate(currentVersion) diff --git a/internal/cmd/updater/cmd_unix.go b/internal/cmd/updater/cmd_unix.go index ce81b313..2ffceede 100644 --- a/internal/cmd/updater/cmd_unix.go +++ b/internal/cmd/updater/cmd_unix.go @@ -36,10 +36,8 @@ func spawnWatcher() error { // watchAndRestart waits for the given PID to exit, then restarts the binary. func watchAndRestart(pid int) error { // Wait for the parent process to die - for { - if !isProcessRunning(pid) { - break - } + for isProcessRunning(pid) { + time.Sleep(100 * time.Millisecond) } diff --git a/internal/cmd/updater/generic_http.go b/internal/cmd/updater/generic_http.go index 2161b1f8..5573684c 100644 --- a/internal/cmd/updater/generic_http.go +++ b/internal/cmd/updater/generic_http.go @@ -36,7 +36,7 @@ func GetLatestUpdateFromURL(baseURL string) (*GenericUpdateInfo, error) { if err != nil { return nil, fmt.Errorf("failed to fetch latest.json: %w", err) } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if resp.StatusCode != http.StatusOK { return nil, fmt.Errorf("failed to fetch latest.json: status code %d", resp.StatusCode) diff --git a/internal/cmd/updater/generic_http_test.go b/internal/cmd/updater/generic_http_test.go index bf51b482..2482efda 100644 --- a/internal/cmd/updater/generic_http_test.go +++ b/internal/cmd/updater/generic_http_test.go @@ -18,7 +18,7 @@ func TestGetLatestUpdateFromURL(t *testing.T) { { name: "Valid latest.json", handler: func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"}`) + _, _ = fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"}`) }, expectedVersion: "v1.1.0", expectedURL: "http://example.com/release.zip", @@ -26,21 +26,21 @@ func TestGetLatestUpdateFromURL(t *testing.T) { { name: "Invalid JSON", handler: func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"`) // Missing closing brace + _, _ = fmt.Fprintln(w, `{"version": "v1.1.0", "url": "http://example.com/release.zip"`) // Missing closing brace }, expectError: true, }, { name: "Missing version", handler: func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, `{"url": "http://example.com/release.zip"}`) + _, _ = fmt.Fprintln(w, `{"url": "http://example.com/release.zip"}`) }, expectError: true, }, { name: "Missing URL", handler: func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, `{"version": "v1.1.0"}`) + _, _ = fmt.Fprintln(w, `{"version": "v1.1.0"}`) }, expectError: true, }, diff --git a/internal/cmd/updater/github.go b/internal/cmd/updater/github.go index 63460cfd..f141fde4 100644 --- a/internal/cmd/updater/github.go +++ b/internal/cmd/updater/github.go @@ -81,7 +81,7 @@ func (g *githubClient) getPublicReposWithAPIURL(ctx context.Context, apiURL, use } if resp.StatusCode != http.StatusOK { - resp.Body.Close() + _ = resp.Body.Close() // Try organization endpoint url = fmt.Sprintf("%s/orgs/%s/repos", apiURL, userOrOrg) req, err = http.NewRequestWithContext(ctx, "GET", url, nil) @@ -96,16 +96,16 @@ func (g *githubClient) getPublicReposWithAPIURL(ctx context.Context, apiURL, use } if resp.StatusCode != http.StatusOK { - resp.Body.Close() + _ = resp.Body.Close() return nil, fmt.Errorf("failed to fetch repos: %s", resp.Status) } var repos []Repo if err := json.NewDecoder(resp.Body).Decode(&repos); err != nil { - resp.Body.Close() + _ = resp.Body.Close() return nil, err } - resp.Body.Close() + _ = resp.Body.Close() for _, repo := range repos { allCloneURLs = append(allCloneURLs, repo.CloneURL) @@ -152,7 +152,7 @@ func (g *githubClient) GetLatestRelease(ctx context.Context, owner, repo, channe if err != nil { return nil, err } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if resp.StatusCode != http.StatusOK { return nil, fmt.Errorf("failed to fetch releases: %s", resp.Status) @@ -207,7 +207,7 @@ func (g *githubClient) GetReleaseByPullRequest(ctx context.Context, owner, repo if err != nil { return nil, err } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if resp.StatusCode != http.StatusOK { return nil, fmt.Errorf("failed to fetch releases: %s", resp.Status) diff --git a/internal/cmd/updater/service_test.go b/internal/cmd/updater/service_test.go index 5f12b3b6..ab8691ae 100644 --- a/internal/cmd/updater/service_test.go +++ b/internal/cmd/updater/service_test.go @@ -52,7 +52,7 @@ func TestNewUpdateService(t *testing.T) { func TestUpdateService_Start(t *testing.T) { // Setup a mock server for HTTP tests server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(`{"version": "v1.1.0", "url": "http://example.com/release.zip"}`)) + _, _ = w.Write([]byte(`{"version": "v1.1.0", "url": "http://example.com/release.zip"}`)) })) defer server.Close() diff --git a/internal/cmd/updater/updater_test.go b/internal/cmd/updater/updater_test.go index dfb56680..b185e2db 100644 --- a/internal/cmd/updater/updater_test.go +++ b/internal/cmd/updater/updater_test.go @@ -220,7 +220,7 @@ func ExampleCheckForUpdatesHTTP() { // Create a mock HTTP server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/latest.json" { - fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`) + _, _ = fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`) } })) defer server.Close() @@ -247,7 +247,7 @@ func ExampleCheckOnlyHTTP() { // Create a mock HTTP server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/latest.json" { - fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`) + _, _ = fmt.Fprintln(w, `{"version": "1.1.0", "url": "http://example.com/update"}`) } })) defer server.Close() diff --git a/internal/cmd/vm/cmd_container.go b/internal/cmd/vm/cmd_container.go index 73188cef..38622a54 100644 --- a/internal/cmd/vm/cmd_container.go +++ b/internal/cmd/vm/cmd_container.go @@ -158,8 +158,8 @@ func listContainers(all bool) error { } w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - fmt.Fprintln(w, i18n.T("cmd.vm.ps.header")) - fmt.Fprintln(w, "--\t----\t-----\t------\t-------\t---") + _, _ = fmt.Fprintln(w, i18n.T("cmd.vm.ps.header")) + _, _ = fmt.Fprintln(w, "--\t----\t-----\t------\t-------\t---") for _, c := range containers { // Shorten image path @@ -182,11 +182,11 @@ func listContainers(all bool) error { status = errorStyle.Render(status) } - fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%d\n", + _, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%d\n", c.ID[:8], c.Name, imageName, status, duration, c.PID) } - w.Flush() + _ = w.Flush() return nil } @@ -305,7 +305,7 @@ func viewLogs(id string, follow bool) error { if err != nil { return fmt.Errorf(i18n.T("i18n.fail.get", "logs")+": %w", err) } - defer reader.Close() + defer func() { _ = reader.Close() }() _, err = io.Copy(os.Stdout, reader) return err diff --git a/internal/cmd/vm/cmd_templates.go b/internal/cmd/vm/cmd_templates.go index 040939f2..31989df1 100644 --- a/internal/cmd/vm/cmd_templates.go +++ b/internal/cmd/vm/cmd_templates.go @@ -78,17 +78,17 @@ func listTemplates() error { fmt.Printf("%s\n\n", repoNameStyle.Render(i18n.T("cmd.vm.templates.title"))) w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - fmt.Fprintln(w, i18n.T("cmd.vm.templates.header")) - fmt.Fprintln(w, "----\t-----------") + _, _ = fmt.Fprintln(w, i18n.T("cmd.vm.templates.header")) + _, _ = fmt.Fprintln(w, "----\t-----------") for _, tmpl := range templates { desc := tmpl.Description if len(desc) > 60 { desc = desc[:57] + "..." } - fmt.Fprintf(w, "%s\t%s\n", repoNameStyle.Render(tmpl.Name), desc) + _, _ = fmt.Fprintf(w, "%s\t%s\n", repoNameStyle.Render(tmpl.Name), desc) } - w.Flush() + _ = w.Flush() fmt.Println() fmt.Printf("%s %s\n", i18n.T("cmd.vm.templates.hint.show"), dimStyle.Render("core vm templates show ")) @@ -158,7 +158,7 @@ func RunFromTemplate(templateName string, vars map[string]string, runOpts contai if err != nil { return fmt.Errorf(i18n.T("common.error.failed", map[string]any{"Action": "create temp directory"})+": %w", err) } - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Write the YAML file yamlPath := filepath.Join(tmpDir, templateName+".yml") diff --git a/internal/variants/full.go b/internal/variants/full.go index 720c4561..c022de21 100644 --- a/internal/variants/full.go +++ b/internal/variants/full.go @@ -27,6 +27,9 @@ import ( // Commands via self-registration _ "github.com/host-uk/core/internal/cmd/ai" _ "github.com/host-uk/core/internal/cmd/ci" + _ "github.com/host-uk/core/internal/cmd/collect" + _ "github.com/host-uk/core/internal/cmd/config" + _ "github.com/host-uk/core/internal/cmd/crypt" _ "github.com/host-uk/core/internal/cmd/deploy" _ "github.com/host-uk/core/internal/cmd/dev" _ "github.com/host-uk/core/internal/cmd/docs" @@ -37,6 +40,7 @@ import ( _ "github.com/host-uk/core/internal/cmd/monitor" _ "github.com/host-uk/core/internal/cmd/php" _ "github.com/host-uk/core/internal/cmd/pkgcmd" + _ "github.com/host-uk/core/internal/cmd/plugin" _ "github.com/host-uk/core/internal/cmd/qa" _ "github.com/host-uk/core/internal/cmd/sdk" _ "github.com/host-uk/core/internal/cmd/security" diff --git a/pkg/agentic/client.go b/pkg/agentic/client.go index fe77f937..23c47a35 100644 --- a/pkg/agentic/client.go +++ b/pkg/agentic/client.go @@ -86,7 +86,7 @@ func (c *Client) ListTasks(ctx context.Context, opts ListOptions) ([]Task, error if err != nil { return nil, log.E(op, "request failed", err) } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if err := c.checkResponse(resp); err != nil { return nil, log.E(op, "API error", err) @@ -121,7 +121,7 @@ func (c *Client) GetTask(ctx context.Context, id string) (*Task, error) { if err != nil { return nil, log.E(op, "request failed", err) } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if err := c.checkResponse(resp); err != nil { return nil, log.E(op, "API error", err) @@ -166,7 +166,7 @@ func (c *Client) ClaimTask(ctx context.Context, id string) (*Task, error) { if err != nil { return nil, log.E(op, "request failed", err) } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if err := c.checkResponse(resp); err != nil { return nil, log.E(op, "API error", err) @@ -220,7 +220,7 @@ func (c *Client) UpdateTask(ctx context.Context, id string, update TaskUpdate) e if err != nil { return log.E(op, "request failed", err) } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if err := c.checkResponse(resp); err != nil { return log.E(op, "API error", err) @@ -256,7 +256,7 @@ func (c *Client) CompleteTask(ctx context.Context, id string, result TaskResult) if err != nil { return log.E(op, "request failed", err) } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if err := c.checkResponse(resp); err != nil { return log.E(op, "API error", err) @@ -312,7 +312,7 @@ func (c *Client) Ping(ctx context.Context) error { if err != nil { return log.E(op, "request failed", err) } - defer resp.Body.Close() + defer func() { _ = resp.Body.Close() }() if resp.StatusCode >= 400 { return log.E(op, fmt.Sprintf("server returned status %d", resp.StatusCode), nil) diff --git a/pkg/agentic/client_test.go b/pkg/agentic/client_test.go index 89ff93d7..587e0dc9 100644 --- a/pkg/agentic/client_test.go +++ b/pkg/agentic/client_test.go @@ -74,7 +74,7 @@ func TestClient_ListTasks_Good(t *testing.T) { assert.Equal(t, "Bearer test-token", r.Header.Get("Authorization")) w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(testTasks) + _ = json.NewEncoder(w).Encode(testTasks) })) defer server.Close() @@ -97,7 +97,7 @@ func TestClient_ListTasks_Good_WithFilters(t *testing.T) { assert.Equal(t, "bug,urgent", query.Get("labels")) w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode([]Task{testTask}) + _ = json.NewEncoder(w).Encode([]Task{testTask}) })) defer server.Close() @@ -119,7 +119,7 @@ func TestClient_ListTasks_Good_WithFilters(t *testing.T) { func TestClient_ListTasks_Bad_ServerError(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) - json.NewEncoder(w).Encode(APIError{Message: "internal error"}) + _ = json.NewEncoder(w).Encode(APIError{Message: "internal error"}) })) defer server.Close() @@ -137,7 +137,7 @@ func TestClient_GetTask_Good(t *testing.T) { assert.Equal(t, "/api/tasks/task-123", r.URL.Path) w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(testTask) + _ = json.NewEncoder(w).Encode(testTask) })) defer server.Close() @@ -162,7 +162,7 @@ func TestClient_GetTask_Bad_EmptyID(t *testing.T) { func TestClient_GetTask_Bad_NotFound(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusNotFound) - json.NewEncoder(w).Encode(APIError{Message: "task not found"}) + _ = json.NewEncoder(w).Encode(APIError{Message: "task not found"}) })) defer server.Close() @@ -184,7 +184,7 @@ func TestClient_ClaimTask_Good(t *testing.T) { assert.Equal(t, "/api/tasks/task-123/claim", r.URL.Path) w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(ClaimResponse{Task: &claimedTask}) + _ = json.NewEncoder(w).Encode(ClaimResponse{Task: &claimedTask}) })) defer server.Close() @@ -204,7 +204,7 @@ func TestClient_ClaimTask_Good_SimpleResponse(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(claimedTask) + _ = json.NewEncoder(w).Encode(claimedTask) })) defer server.Close() @@ -227,7 +227,7 @@ func TestClient_ClaimTask_Bad_EmptyID(t *testing.T) { func TestClient_ClaimTask_Bad_AlreadyClaimed(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusConflict) - json.NewEncoder(w).Encode(APIError{Message: "task already claimed"}) + _ = json.NewEncoder(w).Encode(APIError{Message: "task already claimed"}) })) defer server.Close() diff --git a/pkg/agentic/config.go b/pkg/agentic/config.go index c713de5f..f2ce6405 100644 --- a/pkg/agentic/config.go +++ b/pkg/agentic/config.go @@ -99,7 +99,7 @@ func loadEnvFile(path string, cfg *Config) error { if err != nil { return err } - defer file.Close() + defer func() { _ = file.Close() }() scanner := bufio.NewScanner(file) for scanner.Scan() { diff --git a/pkg/agentic/config_test.go b/pkg/agentic/config_test.go index 6e88478b..4d5c718d 100644 --- a/pkg/agentic/config_test.go +++ b/pkg/agentic/config_test.go @@ -13,7 +13,7 @@ func TestLoadConfig_Good_FromEnvFile(t *testing.T) { // Create temp directory with .env file tmpDir, err := os.MkdirTemp("", "agentic-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() envContent := ` AGENTIC_BASE_URL=https://test.api.com @@ -37,7 +37,7 @@ func TestLoadConfig_Good_FromEnvVars(t *testing.T) { // Create temp directory with .env file (partial config) tmpDir, err := os.MkdirTemp("", "agentic-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() envContent := ` AGENTIC_TOKEN=env-file-token @@ -46,11 +46,11 @@ AGENTIC_TOKEN=env-file-token require.NoError(t, err) // Set environment variables that should override - os.Setenv("AGENTIC_BASE_URL", "https://env-override.com") - os.Setenv("AGENTIC_TOKEN", "env-override-token") + _ = os.Setenv("AGENTIC_BASE_URL", "https://env-override.com") + _ = os.Setenv("AGENTIC_TOKEN", "env-override-token") defer func() { - os.Unsetenv("AGENTIC_BASE_URL") - os.Unsetenv("AGENTIC_TOKEN") + _ = os.Unsetenv("AGENTIC_BASE_URL") + _ = os.Unsetenv("AGENTIC_TOKEN") }() cfg, err := LoadConfig(tmpDir) @@ -64,15 +64,15 @@ func TestLoadConfig_Bad_NoToken(t *testing.T) { // Create temp directory without config tmpDir, err := os.MkdirTemp("", "agentic-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Create empty .env err = os.WriteFile(filepath.Join(tmpDir, ".env"), []byte(""), 0644) require.NoError(t, err) // Ensure no env vars are set - os.Unsetenv("AGENTIC_TOKEN") - os.Unsetenv("AGENTIC_BASE_URL") + _ = os.Unsetenv("AGENTIC_TOKEN") + _ = os.Unsetenv("AGENTIC_BASE_URL") _, err = LoadConfig(tmpDir) @@ -83,7 +83,7 @@ func TestLoadConfig_Bad_NoToken(t *testing.T) { func TestLoadConfig_Good_EnvFileWithQuotes(t *testing.T) { tmpDir, err := os.MkdirTemp("", "agentic-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Test with quoted values envContent := ` @@ -103,7 +103,7 @@ AGENTIC_BASE_URL='single-quoted-url' func TestLoadConfig_Good_EnvFileWithComments(t *testing.T) { tmpDir, err := os.MkdirTemp("", "agentic-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() envContent := ` # This is a comment @@ -126,12 +126,12 @@ func TestSaveConfig_Good(t *testing.T) { // Create temp home directory tmpHome, err := os.MkdirTemp("", "agentic-home") require.NoError(t, err) - defer os.RemoveAll(tmpHome) + defer func() { _ = os.RemoveAll(tmpHome) }() // Override HOME for the test originalHome := os.Getenv("HOME") - os.Setenv("HOME", tmpHome) - defer os.Setenv("HOME", originalHome) + _ = os.Setenv("HOME", tmpHome) + defer func() { _ = os.Setenv("HOME", originalHome) }() cfg := &Config{ BaseURL: "https://saved.api.com", @@ -166,7 +166,7 @@ func TestConfigPath_Good(t *testing.T) { func TestLoadConfig_Good_DefaultBaseURL(t *testing.T) { tmpDir, err := os.MkdirTemp("", "agentic-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Only provide token, should use default base URL envContent := ` @@ -176,7 +176,7 @@ AGENTIC_TOKEN=test-token require.NoError(t, err) // Clear any env overrides - os.Unsetenv("AGENTIC_BASE_URL") + _ = os.Unsetenv("AGENTIC_BASE_URL") cfg, err := LoadConfig(tmpDir) diff --git a/pkg/ai/metrics.go b/pkg/ai/metrics.go index 830fc124..8df8ebb2 100644 --- a/pkg/ai/metrics.go +++ b/pkg/ai/metrics.go @@ -107,7 +107,7 @@ func readMetricsFile(path string, since time.Time) ([]Event, error) { } return nil, fmt.Errorf("open metrics file %s: %w", path, err) } - defer f.Close() + defer func() { _ = f.Close() }() var events []Event scanner := bufio.NewScanner(f) diff --git a/pkg/ai/rag.go b/pkg/ai/rag.go index 6df906b2..2efcc6e3 100644 --- a/pkg/ai/rag.go +++ b/pkg/ai/rag.go @@ -32,7 +32,7 @@ func QueryRAGForTask(task TaskInfo) string { if err != nil { return "" } - defer qdrantClient.Close() + defer func() { _ = qdrantClient.Close() }() ollamaCfg := rag.DefaultOllamaConfig() ollamaClient, err := rag.NewOllamaClient(ollamaCfg) diff --git a/pkg/ansible/modules.go b/pkg/ansible/modules.go index 25585af8..6819cf8a 100644 --- a/pkg/ansible/modules.go +++ b/pkg/ansible/modules.go @@ -1428,10 +1428,7 @@ func (e *Executor) moduleDockerCompose(ctx context.Context, client *SSHClient, a } // Heuristic for changed - changed := true - if strings.Contains(stdout, "Up to date") || strings.Contains(stderr, "Up to date") { - changed = false - } + changed := !strings.Contains(stdout, "Up to date") && !strings.Contains(stderr, "Up to date") return &TaskResult{Changed: changed, Stdout: stdout}, nil } diff --git a/pkg/ansible/ssh.go b/pkg/ansible/ssh.go index 6cf3d05e..e41be7a2 100644 --- a/pkg/ansible/ssh.go +++ b/pkg/ansible/ssh.go @@ -225,7 +225,7 @@ func (c *SSHClient) Run(ctx context.Context, cmd string) (stdout, stderr string, return "", "", -1, log.E("ssh.Run", "stdin pipe", err) } go func() { - defer stdin.Close() + defer func() { _ = stdin.Close() }() _, _ = io.WriteString(stdin, c.becomePass+"\n") }() } else if c.password != "" { @@ -236,7 +236,7 @@ func (c *SSHClient) Run(ctx context.Context, cmd string) (stdout, stderr string, return "", "", -1, log.E("ssh.Run", "stdin pipe", err) } go func() { - defer stdin.Close() + defer func() { _ = stdin.Close() }() _, _ = io.WriteString(stdin, c.password+"\n") }() } else { @@ -344,7 +344,7 @@ func (c *SSHClient) Upload(ctx context.Context, local io.Reader, remote string, } go func() { - defer stdin.Close() + defer func() { _ = stdin.Close() }() if pass != "" { _, _ = io.WriteString(stdin, pass+"\n") } @@ -357,7 +357,7 @@ func (c *SSHClient) Upload(ctx context.Context, local io.Reader, remote string, } go func() { - defer stdin.Close() + defer func() { _ = stdin.Close() }() _, _ = stdin.Write(content) }() } diff --git a/pkg/build/archive.go b/pkg/build/archive.go index 3e38bac1..5acee501 100644 --- a/pkg/build/archive.go +++ b/pkg/build/archive.go @@ -148,7 +148,7 @@ func createTarXzArchive(src, dst string) error { if err != nil { return fmt.Errorf("failed to open source file: %w", err) } - defer srcFile.Close() + defer func() { _ = srcFile.Close() }() srcInfo, err := srcFile.Stat() if err != nil { @@ -199,7 +199,7 @@ func createTarGzArchive(src, dst string) error { if err != nil { return fmt.Errorf("failed to open source file: %w", err) } - defer srcFile.Close() + defer func() { _ = srcFile.Close() }() srcInfo, err := srcFile.Stat() if err != nil { @@ -211,15 +211,15 @@ func createTarGzArchive(src, dst string) error { if err != nil { return fmt.Errorf("failed to create archive file: %w", err) } - defer dstFile.Close() + defer func() { _ = dstFile.Close() }() // Create gzip writer gzWriter := gzip.NewWriter(dstFile) - defer gzWriter.Close() + defer func() { _ = gzWriter.Close() }() // Create tar writer tarWriter := tar.NewWriter(gzWriter) - defer tarWriter.Close() + defer func() { _ = tarWriter.Close() }() // Create tar header header, err := tar.FileInfoHeader(srcInfo, "") @@ -249,7 +249,7 @@ func createZipArchive(src, dst string) error { if err != nil { return fmt.Errorf("failed to open source file: %w", err) } - defer srcFile.Close() + defer func() { _ = srcFile.Close() }() srcInfo, err := srcFile.Stat() if err != nil { @@ -261,11 +261,11 @@ func createZipArchive(src, dst string) error { if err != nil { return fmt.Errorf("failed to create archive file: %w", err) } - defer dstFile.Close() + defer func() { _ = dstFile.Close() }() // Create zip writer zipWriter := zip.NewWriter(dstFile) - defer zipWriter.Close() + defer func() { _ = zipWriter.Close() }() // Create zip header header, err := zip.FileInfoHeader(srcInfo) diff --git a/pkg/build/archive_test.go b/pkg/build/archive_test.go index 0d3a5c7d..181f9e28 100644 --- a/pkg/build/archive_test.go +++ b/pkg/build/archive_test.go @@ -338,11 +338,11 @@ func verifyTarGzContent(t *testing.T, archivePath, expectedName string) { file, err := os.Open(archivePath) require.NoError(t, err) - defer file.Close() + defer func() { _ = file.Close() }() gzReader, err := gzip.NewReader(file) require.NoError(t, err) - defer gzReader.Close() + defer func() { _ = gzReader.Close() }() tarReader := tar.NewReader(gzReader) @@ -361,7 +361,7 @@ func verifyZipContent(t *testing.T, archivePath, expectedName string) { reader, err := zip.OpenReader(archivePath) require.NoError(t, err) - defer reader.Close() + defer func() { _ = reader.Close() }() require.Len(t, reader.File, 1) assert.Equal(t, expectedName, reader.File[0].Name) diff --git a/pkg/build/checksum.go b/pkg/build/checksum.go index 926ac458..3e882f52 100644 --- a/pkg/build/checksum.go +++ b/pkg/build/checksum.go @@ -23,7 +23,7 @@ func Checksum(artifact Artifact) (Artifact, error) { if err != nil { return Artifact{}, fmt.Errorf("build.Checksum: failed to open file: %w", err) } - defer file.Close() + defer func() { _ = file.Close() }() // Compute SHA256 hash hasher := sha256.New() diff --git a/pkg/build/config_test.go b/pkg/build/config_test.go index f23359ce..3b51c2e3 100644 --- a/pkg/build/config_test.go +++ b/pkg/build/config_test.go @@ -236,7 +236,7 @@ func TestConfigExists_Good(t *testing.T) { func TestLoadConfig_Good_SignConfig(t *testing.T) { tmpDir := t.TempDir() coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) + _ = os.MkdirAll(coreDir, 0755) configContent := `version: 1 sign: @@ -247,7 +247,7 @@ sign: identity: "Developer ID Application: Test" notarize: true ` - os.WriteFile(filepath.Join(coreDir, "build.yaml"), []byte(configContent), 0644) + _ = os.WriteFile(filepath.Join(coreDir, "build.yaml"), []byte(configContent), 0644) cfg, err := LoadConfig(tmpDir) if err != nil { diff --git a/pkg/build/signing/codesign.go b/pkg/build/signing/codesign.go index 4b55bb55..81f8325f 100644 --- a/pkg/build/signing/codesign.go +++ b/pkg/build/signing/codesign.go @@ -73,7 +73,7 @@ func (s *MacOSSigner) Notarize(ctx context.Context, binary string) error { if output, err := zipCmd.CombinedOutput(); err != nil { return fmt.Errorf("codesign.Notarize: failed to create zip: %w\nOutput: %s", err, string(output)) } - defer os.Remove(zipPath) + defer func() { _ = os.Remove(zipPath) }() // Submit to Apple and wait submitCmd := exec.CommandContext(ctx, "xcrun", "notarytool", "submit", diff --git a/pkg/cli/daemon_test.go b/pkg/cli/daemon_test.go index d24fc01a..5eb51329 100644 --- a/pkg/cli/daemon_test.go +++ b/pkg/cli/daemon_test.go @@ -105,13 +105,13 @@ func TestHealthServer(t *testing.T) { resp, err := http.Get("http://" + addr + "/health") require.NoError(t, err) assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() + _ = resp.Body.Close() // Ready should be OK by default resp, err = http.Get("http://" + addr + "/ready") require.NoError(t, err) assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() + _ = resp.Body.Close() // Set not ready hs.SetReady(false) @@ -119,7 +119,7 @@ func TestHealthServer(t *testing.T) { resp, err = http.Get("http://" + addr + "/ready") require.NoError(t, err) assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) - resp.Body.Close() + _ = resp.Body.Close() }) t.Run("with health checks", func(t *testing.T) { @@ -143,7 +143,7 @@ func TestHealthServer(t *testing.T) { resp, err := http.Get("http://" + addr + "/health") require.NoError(t, err) assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() + _ = resp.Body.Close() // Make unhealthy healthy = false @@ -151,7 +151,7 @@ func TestHealthServer(t *testing.T) { resp, err = http.Get("http://" + addr + "/health") require.NoError(t, err) assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) - resp.Body.Close() + _ = resp.Body.Close() }) } @@ -175,7 +175,7 @@ func TestDaemon(t *testing.T) { resp, err := http.Get("http://" + addr + "/health") require.NoError(t, err) assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() + _ = resp.Body.Close() // Stop should succeed err = d.Stop() @@ -225,14 +225,14 @@ func TestDaemon(t *testing.T) { // Initially ready resp, _ := http.Get("http://" + addr + "/ready") assert.Equal(t, http.StatusOK, resp.StatusCode) - resp.Body.Close() + _ = resp.Body.Close() // Set not ready d.SetReady(false) resp, _ = http.Get("http://" + addr + "/ready") assert.Equal(t, http.StatusServiceUnavailable, resp.StatusCode) - resp.Body.Close() + _ = resp.Body.Close() }) t.Run("no health addr returns empty", func(t *testing.T) { diff --git a/pkg/collect/bitcointalk.go b/pkg/collect/bitcointalk.go new file mode 100644 index 00000000..5759fed8 --- /dev/null +++ b/pkg/collect/bitcointalk.go @@ -0,0 +1,297 @@ +package collect + +import ( + "context" + "fmt" + "net/http" + "path/filepath" + "strings" + "time" + + core "github.com/host-uk/core/pkg/framework/core" + "golang.org/x/net/html" +) + +// httpClient is the HTTP client used for all collection requests. +// Use SetHTTPClient to override for testing. +var httpClient = &http.Client{ + Timeout: 30 * time.Second, +} + +// BitcoinTalkCollector collects forum posts from BitcoinTalk. +type BitcoinTalkCollector struct { + // TopicID is the numeric topic identifier. + TopicID string + + // URL is a full URL to a BitcoinTalk topic page. If set, TopicID is + // extracted from it. + URL string + + // Pages limits collection to this many pages. 0 means all pages. + Pages int +} + +// Name returns the collector name. +func (b *BitcoinTalkCollector) Name() string { + id := b.TopicID + if id == "" && b.URL != "" { + id = "url" + } + return fmt.Sprintf("bitcointalk:%s", id) +} + +// Collect gathers posts from a BitcoinTalk topic. +func (b *BitcoinTalkCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) { + result := &Result{Source: b.Name()} + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitStart(b.Name(), "Starting BitcoinTalk collection") + } + + topicID := b.TopicID + if topicID == "" { + return result, core.E("collect.BitcoinTalk.Collect", "topic ID is required", nil) + } + + if cfg.DryRun { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(b.Name(), fmt.Sprintf("[dry-run] Would collect topic %s", topicID), nil) + } + return result, nil + } + + baseDir := filepath.Join(cfg.OutputDir, "bitcointalk", topicID, "posts") + if err := cfg.Output.EnsureDir(baseDir); err != nil { + return result, core.E("collect.BitcoinTalk.Collect", "failed to create output directory", err) + } + + postNum := 0 + offset := 0 + pageCount := 0 + postsPerPage := 20 + + for { + if ctx.Err() != nil { + return result, core.E("collect.BitcoinTalk.Collect", "context cancelled", ctx.Err()) + } + + if b.Pages > 0 && pageCount >= b.Pages { + break + } + + if cfg.Limiter != nil { + if err := cfg.Limiter.Wait(ctx, "bitcointalk"); err != nil { + return result, err + } + } + + pageURL := fmt.Sprintf("https://bitcointalk.org/index.php?topic=%s.%d", topicID, offset) + + posts, err := b.fetchPage(ctx, pageURL) + if err != nil { + result.Errors++ + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(b.Name(), fmt.Sprintf("Failed to fetch page at offset %d: %v", offset, err), nil) + } + break + } + + if len(posts) == 0 { + break + } + + for _, post := range posts { + postNum++ + filePath := filepath.Join(baseDir, fmt.Sprintf("%d.md", postNum)) + content := formatPostMarkdown(postNum, post) + + if err := cfg.Output.Write(filePath, content); err != nil { + result.Errors++ + continue + } + + result.Items++ + result.Files = append(result.Files, filePath) + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitItem(b.Name(), fmt.Sprintf("Post %d by %s", postNum, post.Author), nil) + } + } + + pageCount++ + offset += postsPerPage + + // If we got fewer posts than expected, we've reached the end + if len(posts) < postsPerPage { + break + } + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitComplete(b.Name(), fmt.Sprintf("Collected %d posts", result.Items), result) + } + + return result, nil +} + +// btPost represents a parsed BitcoinTalk forum post. +type btPost struct { + Author string + Date string + Content string +} + +// fetchPage fetches and parses a single BitcoinTalk topic page. +func (b *BitcoinTalkCollector) fetchPage(ctx context.Context, pageURL string) ([]btPost, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, pageURL, nil) + if err != nil { + return nil, core.E("collect.BitcoinTalk.fetchPage", "failed to create request", err) + } + req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; CoreCollector/1.0)") + + resp, err := httpClient.Do(req) + if err != nil { + return nil, core.E("collect.BitcoinTalk.fetchPage", "request failed", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return nil, core.E("collect.BitcoinTalk.fetchPage", + fmt.Sprintf("unexpected status code: %d", resp.StatusCode), nil) + } + + doc, err := html.Parse(resp.Body) + if err != nil { + return nil, core.E("collect.BitcoinTalk.fetchPage", "failed to parse HTML", err) + } + + return extractPosts(doc), nil +} + +// extractPosts extracts post data from a parsed HTML document. +// It looks for the common BitcoinTalk post structure using div.post elements. +func extractPosts(doc *html.Node) []btPost { + var posts []btPost + var walk func(*html.Node) + + walk = func(n *html.Node) { + if n.Type == html.ElementNode && n.Data == "div" { + for _, attr := range n.Attr { + if attr.Key == "class" && strings.Contains(attr.Val, "post") { + post := parsePost(n) + if post.Content != "" { + posts = append(posts, post) + } + } + } + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + walk(c) + } + } + + walk(doc) + return posts +} + +// parsePost extracts author, date, and content from a post div. +func parsePost(node *html.Node) btPost { + post := btPost{} + var walk func(*html.Node) + + walk = func(n *html.Node) { + if n.Type == html.ElementNode { + for _, attr := range n.Attr { + if attr.Key == "class" { + switch { + case strings.Contains(attr.Val, "poster_info"): + post.Author = extractText(n) + case strings.Contains(attr.Val, "headerandpost"): + // Look for date in smalltext + for c := n.FirstChild; c != nil; c = c.NextSibling { + if c.Type == html.ElementNode && c.Data == "div" { + for _, a := range c.Attr { + if a.Key == "class" && strings.Contains(a.Val, "smalltext") { + post.Date = strings.TrimSpace(extractText(c)) + } + } + } + } + case strings.Contains(attr.Val, "inner"): + post.Content = strings.TrimSpace(extractText(n)) + } + } + } + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + walk(c) + } + } + + walk(node) + return post +} + +// extractText recursively extracts text content from an HTML node. +func extractText(n *html.Node) string { + if n.Type == html.TextNode { + return n.Data + } + + var b strings.Builder + for c := n.FirstChild; c != nil; c = c.NextSibling { + text := extractText(c) + if text != "" { + if b.Len() > 0 && c.Type == html.ElementNode && (c.Data == "br" || c.Data == "p" || c.Data == "div") { + b.WriteString("\n") + } + b.WriteString(text) + } + } + return b.String() +} + +// formatPostMarkdown formats a BitcoinTalk post as markdown. +func formatPostMarkdown(num int, post btPost) string { + var b strings.Builder + fmt.Fprintf(&b, "# Post %d by %s\n\n", num, post.Author) + + if post.Date != "" { + fmt.Fprintf(&b, "**Date:** %s\n\n", post.Date) + } + + b.WriteString(post.Content) + b.WriteString("\n") + + return b.String() +} + +// ParsePostsFromHTML parses BitcoinTalk posts from raw HTML content. +// This is exported for testing purposes. +func ParsePostsFromHTML(htmlContent string) ([]btPost, error) { + doc, err := html.Parse(strings.NewReader(htmlContent)) + if err != nil { + return nil, core.E("collect.ParsePostsFromHTML", "failed to parse HTML", err) + } + return extractPosts(doc), nil +} + +// FormatPostMarkdown is exported for testing purposes. +func FormatPostMarkdown(num int, author, date, content string) string { + return formatPostMarkdown(num, btPost{Author: author, Date: date, Content: content}) +} + +// FetchPageFunc is an injectable function type for fetching pages, used in testing. +type FetchPageFunc func(ctx context.Context, url string) ([]btPost, error) + +// BitcoinTalkCollectorWithFetcher wraps BitcoinTalkCollector with a custom fetcher for testing. +type BitcoinTalkCollectorWithFetcher struct { + BitcoinTalkCollector + Fetcher FetchPageFunc +} + +// SetHTTPClient replaces the package-level HTTP client. +// Use this in tests to inject a custom transport or timeout. +func SetHTTPClient(c *http.Client) { + httpClient = c +} diff --git a/pkg/collect/bitcointalk_test.go b/pkg/collect/bitcointalk_test.go new file mode 100644 index 00000000..981d5650 --- /dev/null +++ b/pkg/collect/bitcointalk_test.go @@ -0,0 +1,93 @@ +package collect + +import ( + "context" + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestBitcoinTalkCollector_Name_Good(t *testing.T) { + b := &BitcoinTalkCollector{TopicID: "12345"} + assert.Equal(t, "bitcointalk:12345", b.Name()) +} + +func TestBitcoinTalkCollector_Name_Good_URL(t *testing.T) { + b := &BitcoinTalkCollector{URL: "https://bitcointalk.org/index.php?topic=12345.0"} + assert.Equal(t, "bitcointalk:url", b.Name()) +} + +func TestBitcoinTalkCollector_Collect_Bad_NoTopicID(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + + b := &BitcoinTalkCollector{} + _, err := b.Collect(context.Background(), cfg) + assert.Error(t, err) +} + +func TestBitcoinTalkCollector_Collect_Good_DryRun(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.DryRun = true + + b := &BitcoinTalkCollector{TopicID: "12345"} + result, err := b.Collect(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 0, result.Items) +} + +func TestParsePostsFromHTML_Good(t *testing.T) { + sampleHTML := ` + +
+
satoshi
+
+
January 03, 2009
+
+
This is the first post content.
+
+
+
hal
+
+
January 10, 2009
+
+
Running bitcoin!
+
+ ` + + posts, err := ParsePostsFromHTML(sampleHTML) + assert.NoError(t, err) + assert.Len(t, posts, 2) + + assert.Contains(t, posts[0].Author, "satoshi") + assert.Contains(t, posts[0].Content, "This is the first post content.") + assert.Contains(t, posts[0].Date, "January 03, 2009") + + assert.Contains(t, posts[1].Author, "hal") + assert.Contains(t, posts[1].Content, "Running bitcoin!") +} + +func TestParsePostsFromHTML_Good_Empty(t *testing.T) { + posts, err := ParsePostsFromHTML("") + assert.NoError(t, err) + assert.Empty(t, posts) +} + +func TestFormatPostMarkdown_Good(t *testing.T) { + md := FormatPostMarkdown(1, "satoshi", "January 03, 2009", "Hello, world!") + + assert.Contains(t, md, "# Post 1 by satoshi") + assert.Contains(t, md, "**Date:** January 03, 2009") + assert.Contains(t, md, "Hello, world!") +} + +func TestFormatPostMarkdown_Good_NoDate(t *testing.T) { + md := FormatPostMarkdown(5, "user", "", "Content here") + + assert.Contains(t, md, "# Post 5 by user") + assert.NotContains(t, md, "**Date:**") + assert.Contains(t, md, "Content here") +} diff --git a/pkg/collect/collect.go b/pkg/collect/collect.go new file mode 100644 index 00000000..8349414e --- /dev/null +++ b/pkg/collect/collect.go @@ -0,0 +1,103 @@ +// Package collect provides a data collection subsystem for gathering information +// from multiple sources including GitHub, BitcoinTalk, CoinGecko, and academic +// paper repositories. It supports rate limiting, incremental state tracking, +// and event-driven progress reporting. +package collect + +import ( + "context" + "path/filepath" + + "github.com/host-uk/core/pkg/io" +) + +// Collector is the interface all collection sources implement. +type Collector interface { + // Name returns a human-readable name for this collector. + Name() string + + // Collect gathers data from the source and writes it to the configured output. + Collect(ctx context.Context, cfg *Config) (*Result, error) +} + +// Config holds shared configuration for all collectors. +type Config struct { + // Output is the storage medium for writing collected data. + Output io.Medium + + // OutputDir is the base directory for all collected data. + OutputDir string + + // Limiter provides per-source rate limiting. + Limiter *RateLimiter + + // State tracks collection progress for incremental runs. + State *State + + // Dispatcher manages event dispatch for progress reporting. + Dispatcher *Dispatcher + + // Verbose enables detailed logging output. + Verbose bool + + // DryRun simulates collection without writing files. + DryRun bool +} + +// Result holds the output of a collection run. +type Result struct { + // Source identifies which collector produced this result. + Source string + + // Items is the number of items successfully collected. + Items int + + // Errors is the number of errors encountered during collection. + Errors int + + // Skipped is the number of items skipped (e.g. already collected). + Skipped int + + // Files lists the paths of all files written. + Files []string +} + +// NewConfig creates a Config with sensible defaults. +// It initialises a MockMedium for output if none is provided, +// sets up a rate limiter, state tracker, and event dispatcher. +func NewConfig(outputDir string) *Config { + m := io.NewMockMedium() + return &Config{ + Output: m, + OutputDir: outputDir, + Limiter: NewRateLimiter(), + State: NewState(m, filepath.Join(outputDir, ".collect-state.json")), + Dispatcher: NewDispatcher(), + } +} + +// NewConfigWithMedium creates a Config using the specified storage medium. +func NewConfigWithMedium(m io.Medium, outputDir string) *Config { + return &Config{ + Output: m, + OutputDir: outputDir, + Limiter: NewRateLimiter(), + State: NewState(m, filepath.Join(outputDir, ".collect-state.json")), + Dispatcher: NewDispatcher(), + } +} + +// MergeResults combines multiple results into a single aggregated result. +func MergeResults(source string, results ...*Result) *Result { + merged := &Result{Source: source} + for _, r := range results { + if r == nil { + continue + } + merged.Items += r.Items + merged.Errors += r.Errors + merged.Skipped += r.Skipped + merged.Files = append(merged.Files, r.Files...) + } + return merged +} diff --git a/pkg/collect/collect_test.go b/pkg/collect/collect_test.go new file mode 100644 index 00000000..3820a1e4 --- /dev/null +++ b/pkg/collect/collect_test.go @@ -0,0 +1,68 @@ +package collect + +import ( + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestNewConfig_Good(t *testing.T) { + cfg := NewConfig("/tmp/output") + + assert.NotNil(t, cfg) + assert.Equal(t, "/tmp/output", cfg.OutputDir) + assert.NotNil(t, cfg.Output) + assert.NotNil(t, cfg.Limiter) + assert.NotNil(t, cfg.State) + assert.NotNil(t, cfg.Dispatcher) + assert.False(t, cfg.Verbose) + assert.False(t, cfg.DryRun) +} + +func TestNewConfigWithMedium_Good(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/data") + + assert.NotNil(t, cfg) + assert.Equal(t, m, cfg.Output) + assert.Equal(t, "/data", cfg.OutputDir) + assert.NotNil(t, cfg.Limiter) + assert.NotNil(t, cfg.State) + assert.NotNil(t, cfg.Dispatcher) +} + +func TestMergeResults_Good(t *testing.T) { + r1 := &Result{ + Source: "a", + Items: 5, + Errors: 1, + Files: []string{"a.md", "b.md"}, + } + r2 := &Result{ + Source: "b", + Items: 3, + Skipped: 2, + Files: []string{"c.md"}, + } + + merged := MergeResults("combined", r1, r2) + assert.Equal(t, "combined", merged.Source) + assert.Equal(t, 8, merged.Items) + assert.Equal(t, 1, merged.Errors) + assert.Equal(t, 2, merged.Skipped) + assert.Len(t, merged.Files, 3) +} + +func TestMergeResults_Good_NilResults(t *testing.T) { + r1 := &Result{Items: 3} + merged := MergeResults("test", r1, nil, nil) + assert.Equal(t, 3, merged.Items) +} + +func TestMergeResults_Good_Empty(t *testing.T) { + merged := MergeResults("empty") + assert.Equal(t, 0, merged.Items) + assert.Equal(t, 0, merged.Errors) + assert.Nil(t, merged.Files) +} diff --git a/pkg/collect/events.go b/pkg/collect/events.go new file mode 100644 index 00000000..70839865 --- /dev/null +++ b/pkg/collect/events.go @@ -0,0 +1,133 @@ +package collect + +import ( + "sync" + "time" +) + +// Event types used by the collection subsystem. +const ( + // EventStart is emitted when a collector begins its run. + EventStart = "start" + + // EventProgress is emitted to report incremental progress. + EventProgress = "progress" + + // EventItem is emitted when a single item is collected. + EventItem = "item" + + // EventError is emitted when an error occurs during collection. + EventError = "error" + + // EventComplete is emitted when a collector finishes its run. + EventComplete = "complete" +) + +// Event represents a collection event. +type Event struct { + // Type is one of the Event* constants. + Type string `json:"type"` + + // Source identifies the collector that emitted the event. + Source string `json:"source"` + + // Message is a human-readable description of the event. + Message string `json:"message"` + + // Data carries optional event-specific payload. + Data any `json:"data,omitempty"` + + // Time is when the event occurred. + Time time.Time `json:"time"` +} + +// EventHandler handles collection events. +type EventHandler func(Event) + +// Dispatcher manages event dispatch. Handlers are registered per event type +// and are called synchronously when an event is emitted. +type Dispatcher struct { + mu sync.RWMutex + handlers map[string][]EventHandler +} + +// NewDispatcher creates a new event dispatcher. +func NewDispatcher() *Dispatcher { + return &Dispatcher{ + handlers: make(map[string][]EventHandler), + } +} + +// On registers a handler for an event type. Multiple handlers can be +// registered for the same event type and will be called in order. +func (d *Dispatcher) On(eventType string, handler EventHandler) { + d.mu.Lock() + defer d.mu.Unlock() + d.handlers[eventType] = append(d.handlers[eventType], handler) +} + +// Emit dispatches an event to all registered handlers for that event type. +// If no handlers are registered for the event type, the event is silently dropped. +// The event's Time field is set to now if it is zero. +func (d *Dispatcher) Emit(event Event) { + if event.Time.IsZero() { + event.Time = time.Now() + } + + d.mu.RLock() + handlers := d.handlers[event.Type] + d.mu.RUnlock() + + for _, h := range handlers { + h(event) + } +} + +// EmitStart emits a start event for the given source. +func (d *Dispatcher) EmitStart(source, message string) { + d.Emit(Event{ + Type: EventStart, + Source: source, + Message: message, + }) +} + +// EmitProgress emits a progress event. +func (d *Dispatcher) EmitProgress(source, message string, data any) { + d.Emit(Event{ + Type: EventProgress, + Source: source, + Message: message, + Data: data, + }) +} + +// EmitItem emits an item event. +func (d *Dispatcher) EmitItem(source, message string, data any) { + d.Emit(Event{ + Type: EventItem, + Source: source, + Message: message, + Data: data, + }) +} + +// EmitError emits an error event. +func (d *Dispatcher) EmitError(source, message string, data any) { + d.Emit(Event{ + Type: EventError, + Source: source, + Message: message, + Data: data, + }) +} + +// EmitComplete emits a complete event. +func (d *Dispatcher) EmitComplete(source, message string, data any) { + d.Emit(Event{ + Type: EventComplete, + Source: source, + Message: message, + Data: data, + }) +} diff --git a/pkg/collect/events_test.go b/pkg/collect/events_test.go new file mode 100644 index 00000000..ae9ae5d7 --- /dev/null +++ b/pkg/collect/events_test.go @@ -0,0 +1,133 @@ +package collect + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestDispatcher_Emit_Good(t *testing.T) { + d := NewDispatcher() + + var received Event + d.On(EventStart, func(e Event) { + received = e + }) + + d.Emit(Event{ + Type: EventStart, + Source: "test", + Message: "hello", + }) + + assert.Equal(t, EventStart, received.Type) + assert.Equal(t, "test", received.Source) + assert.Equal(t, "hello", received.Message) + assert.False(t, received.Time.IsZero(), "Time should be set automatically") +} + +func TestDispatcher_On_Good(t *testing.T) { + d := NewDispatcher() + + var count int + handler := func(e Event) { count++ } + + d.On(EventProgress, handler) + d.On(EventProgress, handler) + d.On(EventProgress, handler) + + d.Emit(Event{Type: EventProgress, Source: "test"}) + assert.Equal(t, 3, count, "All three handlers should be called") +} + +func TestDispatcher_Emit_Good_NoHandlers(t *testing.T) { + d := NewDispatcher() + + // Should not panic when emitting an event with no handlers + assert.NotPanics(t, func() { + d.Emit(Event{ + Type: "unknown-event", + Source: "test", + Message: "this should be silently dropped", + }) + }) +} + +func TestDispatcher_Emit_Good_MultipleEventTypes(t *testing.T) { + d := NewDispatcher() + + var starts, errors int + d.On(EventStart, func(e Event) { starts++ }) + d.On(EventError, func(e Event) { errors++ }) + + d.Emit(Event{Type: EventStart, Source: "test"}) + d.Emit(Event{Type: EventStart, Source: "test"}) + d.Emit(Event{Type: EventError, Source: "test"}) + + assert.Equal(t, 2, starts) + assert.Equal(t, 1, errors) +} + +func TestDispatcher_Emit_Good_SetsTime(t *testing.T) { + d := NewDispatcher() + + var received Event + d.On(EventItem, func(e Event) { + received = e + }) + + before := time.Now() + d.Emit(Event{Type: EventItem, Source: "test"}) + after := time.Now() + + assert.True(t, received.Time.After(before) || received.Time.Equal(before)) + assert.True(t, received.Time.Before(after) || received.Time.Equal(after)) +} + +func TestDispatcher_Emit_Good_PreservesExistingTime(t *testing.T) { + d := NewDispatcher() + + customTime := time.Date(2025, 6, 15, 12, 0, 0, 0, time.UTC) + var received Event + d.On(EventItem, func(e Event) { + received = e + }) + + d.Emit(Event{Type: EventItem, Source: "test", Time: customTime}) + assert.True(t, customTime.Equal(received.Time)) +} + +func TestDispatcher_EmitHelpers_Good(t *testing.T) { + d := NewDispatcher() + + events := make(map[string]Event) + for _, eventType := range []string{EventStart, EventProgress, EventItem, EventError, EventComplete} { + et := eventType + d.On(et, func(e Event) { + events[et] = e + }) + } + + d.EmitStart("s1", "started") + d.EmitProgress("s2", "progressing", map[string]int{"count": 5}) + d.EmitItem("s3", "got item", nil) + d.EmitError("s4", "something failed", nil) + d.EmitComplete("s5", "done", nil) + + assert.Equal(t, "s1", events[EventStart].Source) + assert.Equal(t, "started", events[EventStart].Message) + + assert.Equal(t, "s2", events[EventProgress].Source) + assert.NotNil(t, events[EventProgress].Data) + + assert.Equal(t, "s3", events[EventItem].Source) + assert.Equal(t, "s4", events[EventError].Source) + assert.Equal(t, "s5", events[EventComplete].Source) +} + +func TestNewDispatcher_Good(t *testing.T) { + d := NewDispatcher() + assert.NotNil(t, d) + assert.NotNil(t, d.handlers) +} diff --git a/pkg/collect/excavate.go b/pkg/collect/excavate.go new file mode 100644 index 00000000..facd5ccd --- /dev/null +++ b/pkg/collect/excavate.go @@ -0,0 +1,128 @@ +package collect + +import ( + "context" + "fmt" + "time" + + core "github.com/host-uk/core/pkg/framework/core" +) + +// Excavator runs multiple collectors as a coordinated operation. +// It provides sequential execution with rate limit respect, state tracking +// for resume support, and aggregated results. +type Excavator struct { + // Collectors is the list of collectors to run. + Collectors []Collector + + // ScanOnly reports what would be collected without performing collection. + ScanOnly bool + + // Resume enables incremental collection using saved state. + Resume bool +} + +// Name returns the orchestrator name. +func (e *Excavator) Name() string { + return "excavator" +} + +// Run executes all collectors sequentially, respecting rate limits and +// using state for resume support. Results are aggregated from all collectors. +func (e *Excavator) Run(ctx context.Context, cfg *Config) (*Result, error) { + result := &Result{Source: e.Name()} + + if len(e.Collectors) == 0 { + return result, nil + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitStart(e.Name(), fmt.Sprintf("Starting excavation with %d collectors", len(e.Collectors))) + } + + // Load state if resuming + if e.Resume && cfg.State != nil { + if err := cfg.State.Load(); err != nil { + return result, core.E("collect.Excavator.Run", "failed to load state", err) + } + } + + // If scan-only, just report what would be collected + if e.ScanOnly { + for _, c := range e.Collectors { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(e.Name(), fmt.Sprintf("[scan] Would run collector: %s", c.Name()), nil) + } + } + return result, nil + } + + for i, c := range e.Collectors { + if ctx.Err() != nil { + return result, core.E("collect.Excavator.Run", "context cancelled", ctx.Err()) + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(e.Name(), + fmt.Sprintf("Running collector %d/%d: %s", i+1, len(e.Collectors), c.Name()), nil) + } + + // Check if we should skip (already completed in a previous run) + if e.Resume && cfg.State != nil { + if entry, ok := cfg.State.Get(c.Name()); ok { + if entry.Items > 0 && !entry.LastRun.IsZero() { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(e.Name(), + fmt.Sprintf("Skipping %s (already collected %d items on %s)", + c.Name(), entry.Items, entry.LastRun.Format(time.RFC3339)), nil) + } + result.Skipped++ + continue + } + } + } + + collectorResult, err := c.Collect(ctx, cfg) + if err != nil { + result.Errors++ + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(e.Name(), + fmt.Sprintf("Collector %s failed: %v", c.Name(), err), nil) + } + continue + } + + if collectorResult != nil { + result.Items += collectorResult.Items + result.Errors += collectorResult.Errors + result.Skipped += collectorResult.Skipped + result.Files = append(result.Files, collectorResult.Files...) + + // Update state + if cfg.State != nil { + cfg.State.Set(c.Name(), &StateEntry{ + Source: c.Name(), + LastRun: time.Now(), + Items: collectorResult.Items, + }) + } + } + } + + // Save state + if cfg.State != nil { + if err := cfg.State.Save(); err != nil { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(e.Name(), fmt.Sprintf("Failed to save state: %v", err), nil) + } + } + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitComplete(e.Name(), + fmt.Sprintf("Excavation complete: %d items, %d errors, %d skipped", + result.Items, result.Errors, result.Skipped), result) + } + + return result, nil +} diff --git a/pkg/collect/excavate_test.go b/pkg/collect/excavate_test.go new file mode 100644 index 00000000..6f63216c --- /dev/null +++ b/pkg/collect/excavate_test.go @@ -0,0 +1,202 @@ +package collect + +import ( + "context" + "fmt" + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +// mockCollector is a simple collector for testing the Excavator. +type mockCollector struct { + name string + items int + err error + called bool +} + +func (m *mockCollector) Name() string { return m.name } + +func (m *mockCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) { + m.called = true + if m.err != nil { + return &Result{Source: m.name, Errors: 1}, m.err + } + + result := &Result{Source: m.name, Items: m.items} + for i := 0; i < m.items; i++ { + result.Files = append(result.Files, fmt.Sprintf("/output/%s/%d.md", m.name, i)) + } + + if cfg.DryRun { + return &Result{Source: m.name}, nil + } + + return result, nil +} + +func TestExcavator_Name_Good(t *testing.T) { + e := &Excavator{} + assert.Equal(t, "excavator", e.Name()) +} + +func TestExcavator_Run_Good(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.Limiter = nil + + c1 := &mockCollector{name: "source-a", items: 3} + c2 := &mockCollector{name: "source-b", items: 5} + + e := &Excavator{ + Collectors: []Collector{c1, c2}, + } + + result, err := e.Run(context.Background(), cfg) + + assert.NoError(t, err) + assert.True(t, c1.called) + assert.True(t, c2.called) + assert.Equal(t, 8, result.Items) + assert.Len(t, result.Files, 8) +} + +func TestExcavator_Run_Good_Empty(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + + e := &Excavator{} + result, err := e.Run(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 0, result.Items) +} + +func TestExcavator_Run_Good_DryRun(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.DryRun = true + + c1 := &mockCollector{name: "source-a", items: 10} + c2 := &mockCollector{name: "source-b", items: 20} + + e := &Excavator{ + Collectors: []Collector{c1, c2}, + } + + result, err := e.Run(context.Background(), cfg) + + assert.NoError(t, err) + assert.True(t, c1.called) + assert.True(t, c2.called) + // In dry run, mockCollector returns 0 items + assert.Equal(t, 0, result.Items) +} + +func TestExcavator_Run_Good_ScanOnly(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + + c1 := &mockCollector{name: "source-a", items: 10} + + var progressMessages []string + cfg.Dispatcher.On(EventProgress, func(e Event) { + progressMessages = append(progressMessages, e.Message) + }) + + e := &Excavator{ + Collectors: []Collector{c1}, + ScanOnly: true, + } + + result, err := e.Run(context.Background(), cfg) + + assert.NoError(t, err) + assert.False(t, c1.called, "Collector should not be called in scan-only mode") + assert.Equal(t, 0, result.Items) + assert.NotEmpty(t, progressMessages) + assert.Contains(t, progressMessages[0], "source-a") +} + +func TestExcavator_Run_Good_WithErrors(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.Limiter = nil + + c1 := &mockCollector{name: "good", items: 5} + c2 := &mockCollector{name: "bad", err: fmt.Errorf("network error")} + c3 := &mockCollector{name: "also-good", items: 3} + + e := &Excavator{ + Collectors: []Collector{c1, c2, c3}, + } + + result, err := e.Run(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 8, result.Items) + assert.Equal(t, 1, result.Errors) // c2 failed + assert.True(t, c1.called) + assert.True(t, c2.called) + assert.True(t, c3.called) +} + +func TestExcavator_Run_Good_CancelledContext(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + + ctx, cancel := context.WithCancel(context.Background()) + cancel() // Cancel immediately + + c1 := &mockCollector{name: "source-a", items: 5} + + e := &Excavator{ + Collectors: []Collector{c1}, + } + + _, err := e.Run(ctx, cfg) + assert.Error(t, err) +} + +func TestExcavator_Run_Good_SavesState(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.Limiter = nil + + c1 := &mockCollector{name: "source-a", items: 5} + + e := &Excavator{ + Collectors: []Collector{c1}, + } + + _, err := e.Run(context.Background(), cfg) + assert.NoError(t, err) + + // Verify state was saved + entry, ok := cfg.State.Get("source-a") + assert.True(t, ok) + assert.Equal(t, 5, entry.Items) + assert.Equal(t, "source-a", entry.Source) +} + +func TestExcavator_Run_Good_Events(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.Limiter = nil + + var startCount, completeCount int + cfg.Dispatcher.On(EventStart, func(e Event) { startCount++ }) + cfg.Dispatcher.On(EventComplete, func(e Event) { completeCount++ }) + + c1 := &mockCollector{name: "source-a", items: 1} + e := &Excavator{ + Collectors: []Collector{c1}, + } + + _, err := e.Run(context.Background(), cfg) + assert.NoError(t, err) + assert.Equal(t, 1, startCount) + assert.Equal(t, 1, completeCount) +} diff --git a/pkg/collect/github.go b/pkg/collect/github.go new file mode 100644 index 00000000..75eafe64 --- /dev/null +++ b/pkg/collect/github.go @@ -0,0 +1,289 @@ +package collect + +import ( + "context" + "encoding/json" + "fmt" + "os/exec" + "path/filepath" + "strings" + "time" + + core "github.com/host-uk/core/pkg/framework/core" +) + +// ghIssue represents a GitHub issue or pull request as returned by the gh CLI. +type ghIssue struct { + Number int `json:"number"` + Title string `json:"title"` + State string `json:"state"` + Author ghAuthor `json:"author"` + Body string `json:"body"` + CreatedAt time.Time `json:"createdAt"` + Labels []ghLabel `json:"labels"` + URL string `json:"url"` +} + +type ghAuthor struct { + Login string `json:"login"` +} + +type ghLabel struct { + Name string `json:"name"` +} + +// ghRepo represents a GitHub repository as returned by the gh CLI. +type ghRepo struct { + Name string `json:"name"` +} + +// GitHubCollector collects issues and PRs from GitHub repositories. +type GitHubCollector struct { + // Org is the GitHub organisation. + Org string + + // Repo is the repository name. If empty and Org is set, all repos are collected. + Repo string + + // IssuesOnly limits collection to issues (excludes PRs). + IssuesOnly bool + + // PRsOnly limits collection to PRs (excludes issues). + PRsOnly bool +} + +// Name returns the collector name. +func (g *GitHubCollector) Name() string { + if g.Repo != "" { + return fmt.Sprintf("github:%s/%s", g.Org, g.Repo) + } + return fmt.Sprintf("github:%s", g.Org) +} + +// Collect gathers issues and/or PRs from GitHub repositories. +func (g *GitHubCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) { + result := &Result{Source: g.Name()} + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitStart(g.Name(), "Starting GitHub collection") + } + + // If no specific repo, list all repos in the org + repos := []string{g.Repo} + if g.Repo == "" { + var err error + repos, err = g.listOrgRepos(ctx) + if err != nil { + return result, err + } + } + + for _, repo := range repos { + if ctx.Err() != nil { + return result, core.E("collect.GitHub.Collect", "context cancelled", ctx.Err()) + } + + if !g.PRsOnly { + issueResult, err := g.collectIssues(ctx, cfg, repo) + if err != nil { + result.Errors++ + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(g.Name(), fmt.Sprintf("Error collecting issues for %s: %v", repo, err), nil) + } + } else { + result.Items += issueResult.Items + result.Skipped += issueResult.Skipped + result.Files = append(result.Files, issueResult.Files...) + } + } + + if !g.IssuesOnly { + prResult, err := g.collectPRs(ctx, cfg, repo) + if err != nil { + result.Errors++ + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(g.Name(), fmt.Sprintf("Error collecting PRs for %s: %v", repo, err), nil) + } + } else { + result.Items += prResult.Items + result.Skipped += prResult.Skipped + result.Files = append(result.Files, prResult.Files...) + } + } + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitComplete(g.Name(), fmt.Sprintf("Collected %d items", result.Items), result) + } + + return result, nil +} + +// listOrgRepos returns all repository names for the configured org. +func (g *GitHubCollector) listOrgRepos(ctx context.Context) ([]string, error) { + cmd := exec.CommandContext(ctx, "gh", "repo", "list", g.Org, + "--json", "name", + "--limit", "1000", + ) + out, err := cmd.Output() + if err != nil { + return nil, core.E("collect.GitHub.listOrgRepos", "failed to list repos", err) + } + + var repos []ghRepo + if err := json.Unmarshal(out, &repos); err != nil { + return nil, core.E("collect.GitHub.listOrgRepos", "failed to parse repo list", err) + } + + names := make([]string, len(repos)) + for i, r := range repos { + names[i] = r.Name + } + return names, nil +} + +// collectIssues collects issues for a single repository. +func (g *GitHubCollector) collectIssues(ctx context.Context, cfg *Config, repo string) (*Result, error) { + result := &Result{Source: fmt.Sprintf("github:%s/%s/issues", g.Org, repo)} + + if cfg.DryRun { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(g.Name(), fmt.Sprintf("[dry-run] Would collect issues for %s/%s", g.Org, repo), nil) + } + return result, nil + } + + if cfg.Limiter != nil { + if err := cfg.Limiter.Wait(ctx, "github"); err != nil { + return result, err + } + } + + repoRef := fmt.Sprintf("%s/%s", g.Org, repo) + cmd := exec.CommandContext(ctx, "gh", "issue", "list", + "--repo", repoRef, + "--json", "number,title,state,author,body,createdAt,labels,url", + "--limit", "100", + "--state", "all", + ) + out, err := cmd.Output() + if err != nil { + return result, core.E("collect.GitHub.collectIssues", "gh issue list failed for "+repoRef, err) + } + + var issues []ghIssue + if err := json.Unmarshal(out, &issues); err != nil { + return result, core.E("collect.GitHub.collectIssues", "failed to parse issues", err) + } + + baseDir := filepath.Join(cfg.OutputDir, "github", g.Org, repo, "issues") + if err := cfg.Output.EnsureDir(baseDir); err != nil { + return result, core.E("collect.GitHub.collectIssues", "failed to create output directory", err) + } + + for _, issue := range issues { + filePath := filepath.Join(baseDir, fmt.Sprintf("%d.md", issue.Number)) + content := formatIssueMarkdown(issue) + + if err := cfg.Output.Write(filePath, content); err != nil { + result.Errors++ + continue + } + + result.Items++ + result.Files = append(result.Files, filePath) + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitItem(g.Name(), fmt.Sprintf("Issue #%d: %s", issue.Number, issue.Title), nil) + } + } + + return result, nil +} + +// collectPRs collects pull requests for a single repository. +func (g *GitHubCollector) collectPRs(ctx context.Context, cfg *Config, repo string) (*Result, error) { + result := &Result{Source: fmt.Sprintf("github:%s/%s/pulls", g.Org, repo)} + + if cfg.DryRun { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(g.Name(), fmt.Sprintf("[dry-run] Would collect PRs for %s/%s", g.Org, repo), nil) + } + return result, nil + } + + if cfg.Limiter != nil { + if err := cfg.Limiter.Wait(ctx, "github"); err != nil { + return result, err + } + } + + repoRef := fmt.Sprintf("%s/%s", g.Org, repo) + cmd := exec.CommandContext(ctx, "gh", "pr", "list", + "--repo", repoRef, + "--json", "number,title,state,author,body,createdAt,labels,url", + "--limit", "100", + "--state", "all", + ) + out, err := cmd.Output() + if err != nil { + return result, core.E("collect.GitHub.collectPRs", "gh pr list failed for "+repoRef, err) + } + + var prs []ghIssue + if err := json.Unmarshal(out, &prs); err != nil { + return result, core.E("collect.GitHub.collectPRs", "failed to parse pull requests", err) + } + + baseDir := filepath.Join(cfg.OutputDir, "github", g.Org, repo, "pulls") + if err := cfg.Output.EnsureDir(baseDir); err != nil { + return result, core.E("collect.GitHub.collectPRs", "failed to create output directory", err) + } + + for _, pr := range prs { + filePath := filepath.Join(baseDir, fmt.Sprintf("%d.md", pr.Number)) + content := formatIssueMarkdown(pr) + + if err := cfg.Output.Write(filePath, content); err != nil { + result.Errors++ + continue + } + + result.Items++ + result.Files = append(result.Files, filePath) + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitItem(g.Name(), fmt.Sprintf("PR #%d: %s", pr.Number, pr.Title), nil) + } + } + + return result, nil +} + +// formatIssueMarkdown formats a GitHub issue or PR as markdown. +func formatIssueMarkdown(issue ghIssue) string { + var b strings.Builder + fmt.Fprintf(&b, "# %s\n\n", issue.Title) + fmt.Fprintf(&b, "- **Number:** #%d\n", issue.Number) + fmt.Fprintf(&b, "- **State:** %s\n", issue.State) + fmt.Fprintf(&b, "- **Author:** %s\n", issue.Author.Login) + fmt.Fprintf(&b, "- **Created:** %s\n", issue.CreatedAt.Format(time.RFC3339)) + + if len(issue.Labels) > 0 { + labels := make([]string, len(issue.Labels)) + for i, l := range issue.Labels { + labels[i] = l.Name + } + fmt.Fprintf(&b, "- **Labels:** %s\n", strings.Join(labels, ", ")) + } + + if issue.URL != "" { + fmt.Fprintf(&b, "- **URL:** %s\n", issue.URL) + } + + if issue.Body != "" { + fmt.Fprintf(&b, "\n%s\n", issue.Body) + } + + return b.String() +} diff --git a/pkg/collect/github_test.go b/pkg/collect/github_test.go new file mode 100644 index 00000000..21bfe2a8 --- /dev/null +++ b/pkg/collect/github_test.go @@ -0,0 +1,103 @@ +package collect + +import ( + "context" + "testing" + "time" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestGitHubCollector_Name_Good(t *testing.T) { + g := &GitHubCollector{Org: "host-uk", Repo: "core"} + assert.Equal(t, "github:host-uk/core", g.Name()) +} + +func TestGitHubCollector_Name_Good_OrgOnly(t *testing.T) { + g := &GitHubCollector{Org: "host-uk"} + assert.Equal(t, "github:host-uk", g.Name()) +} + +func TestGitHubCollector_Collect_Good_DryRun(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.DryRun = true + + var progressEmitted bool + cfg.Dispatcher.On(EventProgress, func(e Event) { + progressEmitted = true + }) + + g := &GitHubCollector{Org: "host-uk", Repo: "core"} + result, err := g.Collect(context.Background(), cfg) + + assert.NoError(t, err) + assert.NotNil(t, result) + assert.Equal(t, 0, result.Items) + assert.True(t, progressEmitted, "Should emit progress event in dry-run mode") +} + +func TestGitHubCollector_Collect_Good_DryRun_IssuesOnly(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.DryRun = true + + g := &GitHubCollector{Org: "test-org", Repo: "test-repo", IssuesOnly: true} + result, err := g.Collect(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 0, result.Items) +} + +func TestGitHubCollector_Collect_Good_DryRun_PRsOnly(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.DryRun = true + + g := &GitHubCollector{Org: "test-org", Repo: "test-repo", PRsOnly: true} + result, err := g.Collect(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 0, result.Items) +} + +func TestFormatIssueMarkdown_Good(t *testing.T) { + issue := ghIssue{ + Number: 42, + Title: "Test Issue", + State: "open", + Author: ghAuthor{Login: "testuser"}, + Body: "This is the body.", + CreatedAt: time.Date(2025, 1, 15, 10, 0, 0, 0, time.UTC), + Labels: []ghLabel{ + {Name: "bug"}, + {Name: "priority"}, + }, + URL: "https://github.com/test/repo/issues/42", + } + + md := formatIssueMarkdown(issue) + + assert.Contains(t, md, "# Test Issue") + assert.Contains(t, md, "**Number:** #42") + assert.Contains(t, md, "**State:** open") + assert.Contains(t, md, "**Author:** testuser") + assert.Contains(t, md, "**Labels:** bug, priority") + assert.Contains(t, md, "This is the body.") + assert.Contains(t, md, "**URL:** https://github.com/test/repo/issues/42") +} + +func TestFormatIssueMarkdown_Good_NoLabels(t *testing.T) { + issue := ghIssue{ + Number: 1, + Title: "Simple", + State: "closed", + Author: ghAuthor{Login: "user"}, + } + + md := formatIssueMarkdown(issue) + + assert.Contains(t, md, "# Simple") + assert.NotContains(t, md, "**Labels:**") +} diff --git a/pkg/collect/market.go b/pkg/collect/market.go new file mode 100644 index 00000000..125f4292 --- /dev/null +++ b/pkg/collect/market.go @@ -0,0 +1,277 @@ +package collect + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "path/filepath" + "strings" + "time" + + core "github.com/host-uk/core/pkg/framework/core" +) + +// coinGeckoBaseURL is the base URL for the CoinGecko API. +// It is a variable so it can be overridden in tests. +var coinGeckoBaseURL = "https://api.coingecko.com/api/v3" + +// MarketCollector collects market data from CoinGecko. +type MarketCollector struct { + // CoinID is the CoinGecko coin identifier (e.g. "bitcoin", "ethereum"). + CoinID string + + // Historical enables collection of historical market chart data. + Historical bool + + // FromDate is the start date for historical data in YYYY-MM-DD format. + FromDate string +} + +// Name returns the collector name. +func (m *MarketCollector) Name() string { + return fmt.Sprintf("market:%s", m.CoinID) +} + +// coinData represents the current coin data from CoinGecko. +type coinData struct { + ID string `json:"id"` + Symbol string `json:"symbol"` + Name string `json:"name"` + MarketData marketData `json:"market_data"` +} + +type marketData struct { + CurrentPrice map[string]float64 `json:"current_price"` + MarketCap map[string]float64 `json:"market_cap"` + TotalVolume map[string]float64 `json:"total_volume"` + High24h map[string]float64 `json:"high_24h"` + Low24h map[string]float64 `json:"low_24h"` + PriceChange24h float64 `json:"price_change_24h"` + PriceChangePct24h float64 `json:"price_change_percentage_24h"` + MarketCapRank int `json:"market_cap_rank"` + TotalSupply float64 `json:"total_supply"` + CirculatingSupply float64 `json:"circulating_supply"` + LastUpdated string `json:"last_updated"` +} + +// historicalData represents historical market chart data from CoinGecko. +type historicalData struct { + Prices [][]float64 `json:"prices"` + MarketCaps [][]float64 `json:"market_caps"` + TotalVolumes [][]float64 `json:"total_volumes"` +} + +// Collect gathers market data from CoinGecko. +func (m *MarketCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) { + result := &Result{Source: m.Name()} + + if m.CoinID == "" { + return result, core.E("collect.Market.Collect", "coin ID is required", nil) + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitStart(m.Name(), fmt.Sprintf("Starting market data collection for %s", m.CoinID)) + } + + if cfg.DryRun { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(m.Name(), fmt.Sprintf("[dry-run] Would collect market data for %s", m.CoinID), nil) + } + return result, nil + } + + baseDir := filepath.Join(cfg.OutputDir, "market", m.CoinID) + if err := cfg.Output.EnsureDir(baseDir); err != nil { + return result, core.E("collect.Market.Collect", "failed to create output directory", err) + } + + // Collect current data + currentResult, err := m.collectCurrent(ctx, cfg, baseDir) + if err != nil { + result.Errors++ + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(m.Name(), fmt.Sprintf("Failed to collect current data: %v", err), nil) + } + } else { + result.Items += currentResult.Items + result.Files = append(result.Files, currentResult.Files...) + } + + // Collect historical data if requested + if m.Historical { + histResult, err := m.collectHistorical(ctx, cfg, baseDir) + if err != nil { + result.Errors++ + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(m.Name(), fmt.Sprintf("Failed to collect historical data: %v", err), nil) + } + } else { + result.Items += histResult.Items + result.Files = append(result.Files, histResult.Files...) + } + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitComplete(m.Name(), fmt.Sprintf("Collected market data for %s", m.CoinID), result) + } + + return result, nil +} + +// collectCurrent fetches current coin data from CoinGecko. +func (m *MarketCollector) collectCurrent(ctx context.Context, cfg *Config, baseDir string) (*Result, error) { + result := &Result{Source: m.Name()} + + if cfg.Limiter != nil { + if err := cfg.Limiter.Wait(ctx, "coingecko"); err != nil { + return result, err + } + } + + url := fmt.Sprintf("%s/coins/%s", coinGeckoBaseURL, m.CoinID) + data, err := fetchJSON[coinData](ctx, url) + if err != nil { + return result, core.E("collect.Market.collectCurrent", "failed to fetch coin data", err) + } + + // Write raw JSON + jsonBytes, err := json.MarshalIndent(data, "", " ") + if err != nil { + return result, core.E("collect.Market.collectCurrent", "failed to marshal data", err) + } + + jsonPath := filepath.Join(baseDir, "current.json") + if err := cfg.Output.Write(jsonPath, string(jsonBytes)); err != nil { + return result, core.E("collect.Market.collectCurrent", "failed to write JSON", err) + } + result.Items++ + result.Files = append(result.Files, jsonPath) + + // Write summary markdown + summary := formatMarketSummary(data) + summaryPath := filepath.Join(baseDir, "summary.md") + if err := cfg.Output.Write(summaryPath, summary); err != nil { + return result, core.E("collect.Market.collectCurrent", "failed to write summary", err) + } + result.Items++ + result.Files = append(result.Files, summaryPath) + + return result, nil +} + +// collectHistorical fetches historical market chart data from CoinGecko. +func (m *MarketCollector) collectHistorical(ctx context.Context, cfg *Config, baseDir string) (*Result, error) { + result := &Result{Source: m.Name()} + + if cfg.Limiter != nil { + if err := cfg.Limiter.Wait(ctx, "coingecko"); err != nil { + return result, err + } + } + + days := "365" + if m.FromDate != "" { + fromTime, err := time.Parse("2006-01-02", m.FromDate) + if err == nil { + dayCount := int(time.Since(fromTime).Hours() / 24) + if dayCount > 0 { + days = fmt.Sprintf("%d", dayCount) + } + } + } + + url := fmt.Sprintf("%s/coins/%s/market_chart?vs_currency=usd&days=%s", coinGeckoBaseURL, m.CoinID, days) + data, err := fetchJSON[historicalData](ctx, url) + if err != nil { + return result, core.E("collect.Market.collectHistorical", "failed to fetch historical data", err) + } + + jsonBytes, err := json.MarshalIndent(data, "", " ") + if err != nil { + return result, core.E("collect.Market.collectHistorical", "failed to marshal data", err) + } + + jsonPath := filepath.Join(baseDir, "historical.json") + if err := cfg.Output.Write(jsonPath, string(jsonBytes)); err != nil { + return result, core.E("collect.Market.collectHistorical", "failed to write JSON", err) + } + result.Items++ + result.Files = append(result.Files, jsonPath) + + return result, nil +} + +// fetchJSON fetches JSON from a URL and unmarshals it into the given type. +func fetchJSON[T any](ctx context.Context, url string) (*T, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return nil, core.E("collect.fetchJSON", "failed to create request", err) + } + req.Header.Set("User-Agent", "CoreCollector/1.0") + req.Header.Set("Accept", "application/json") + + resp, err := httpClient.Do(req) + if err != nil { + return nil, core.E("collect.fetchJSON", "request failed", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return nil, core.E("collect.fetchJSON", + fmt.Sprintf("unexpected status code: %d for %s", resp.StatusCode, url), nil) + } + + var data T + if err := json.NewDecoder(resp.Body).Decode(&data); err != nil { + return nil, core.E("collect.fetchJSON", "failed to decode response", err) + } + + return &data, nil +} + +// formatMarketSummary formats coin data as a markdown summary. +func formatMarketSummary(data *coinData) string { + var b strings.Builder + fmt.Fprintf(&b, "# %s (%s)\n\n", data.Name, strings.ToUpper(data.Symbol)) + + md := data.MarketData + + if price, ok := md.CurrentPrice["usd"]; ok { + fmt.Fprintf(&b, "- **Current Price (USD):** $%.2f\n", price) + } + if cap, ok := md.MarketCap["usd"]; ok { + fmt.Fprintf(&b, "- **Market Cap (USD):** $%.0f\n", cap) + } + if vol, ok := md.TotalVolume["usd"]; ok { + fmt.Fprintf(&b, "- **24h Volume (USD):** $%.0f\n", vol) + } + if high, ok := md.High24h["usd"]; ok { + fmt.Fprintf(&b, "- **24h High (USD):** $%.2f\n", high) + } + if low, ok := md.Low24h["usd"]; ok { + fmt.Fprintf(&b, "- **24h Low (USD):** $%.2f\n", low) + } + + fmt.Fprintf(&b, "- **24h Price Change:** $%.2f (%.2f%%)\n", md.PriceChange24h, md.PriceChangePct24h) + + if md.MarketCapRank > 0 { + fmt.Fprintf(&b, "- **Market Cap Rank:** #%d\n", md.MarketCapRank) + } + if md.CirculatingSupply > 0 { + fmt.Fprintf(&b, "- **Circulating Supply:** %.0f\n", md.CirculatingSupply) + } + if md.TotalSupply > 0 { + fmt.Fprintf(&b, "- **Total Supply:** %.0f\n", md.TotalSupply) + } + if md.LastUpdated != "" { + fmt.Fprintf(&b, "\n*Last updated: %s*\n", md.LastUpdated) + } + + return b.String() +} + +// FormatMarketSummary is exported for testing. +func FormatMarketSummary(data *coinData) string { + return formatMarketSummary(data) +} diff --git a/pkg/collect/market_test.go b/pkg/collect/market_test.go new file mode 100644 index 00000000..ae4ac10d --- /dev/null +++ b/pkg/collect/market_test.go @@ -0,0 +1,187 @@ +package collect + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestMarketCollector_Name_Good(t *testing.T) { + m := &MarketCollector{CoinID: "bitcoin"} + assert.Equal(t, "market:bitcoin", m.Name()) +} + +func TestMarketCollector_Collect_Bad_NoCoinID(t *testing.T) { + mock := io.NewMockMedium() + cfg := NewConfigWithMedium(mock, "/output") + + m := &MarketCollector{} + _, err := m.Collect(context.Background(), cfg) + assert.Error(t, err) +} + +func TestMarketCollector_Collect_Good_DryRun(t *testing.T) { + mock := io.NewMockMedium() + cfg := NewConfigWithMedium(mock, "/output") + cfg.DryRun = true + + m := &MarketCollector{CoinID: "bitcoin"} + result, err := m.Collect(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 0, result.Items) +} + +func TestMarketCollector_Collect_Good_CurrentData(t *testing.T) { + // Set up a mock CoinGecko server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + data := coinData{ + ID: "bitcoin", + Symbol: "btc", + Name: "Bitcoin", + MarketData: marketData{ + CurrentPrice: map[string]float64{"usd": 42000.50}, + MarketCap: map[string]float64{"usd": 800000000000}, + TotalVolume: map[string]float64{"usd": 25000000000}, + High24h: map[string]float64{"usd": 43000}, + Low24h: map[string]float64{"usd": 41000}, + PriceChange24h: 500.25, + PriceChangePct24h: 1.2, + MarketCapRank: 1, + CirculatingSupply: 19500000, + TotalSupply: 21000000, + LastUpdated: "2025-01-15T10:00:00Z", + }, + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(data) + })) + defer server.Close() + + // Override base URL + oldURL := coinGeckoBaseURL + coinGeckoBaseURL = server.URL + defer func() { coinGeckoBaseURL = oldURL }() + + mock := io.NewMockMedium() + cfg := NewConfigWithMedium(mock, "/output") + // Disable rate limiter to avoid delays in tests + cfg.Limiter = nil + + m := &MarketCollector{CoinID: "bitcoin"} + result, err := m.Collect(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 2, result.Items) // current.json + summary.md + assert.Len(t, result.Files, 2) + + // Verify current.json was written + content, err := mock.Read("/output/market/bitcoin/current.json") + assert.NoError(t, err) + assert.Contains(t, content, "bitcoin") + + // Verify summary.md was written + summary, err := mock.Read("/output/market/bitcoin/summary.md") + assert.NoError(t, err) + assert.Contains(t, summary, "Bitcoin") + assert.Contains(t, summary, "42000.50") +} + +func TestMarketCollector_Collect_Good_Historical(t *testing.T) { + callCount := 0 + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + callCount++ + w.Header().Set("Content-Type", "application/json") + + if callCount == 1 { + // Current data response + data := coinData{ + ID: "ethereum", + Symbol: "eth", + Name: "Ethereum", + MarketData: marketData{ + CurrentPrice: map[string]float64{"usd": 3000}, + }, + } + _ = json.NewEncoder(w).Encode(data) + } else { + // Historical data response + data := historicalData{ + Prices: [][]float64{{1705305600000, 3000.0}, {1705392000000, 3100.0}}, + MarketCaps: [][]float64{{1705305600000, 360000000000}}, + TotalVolumes: [][]float64{{1705305600000, 15000000000}}, + } + _ = json.NewEncoder(w).Encode(data) + } + })) + defer server.Close() + + oldURL := coinGeckoBaseURL + coinGeckoBaseURL = server.URL + defer func() { coinGeckoBaseURL = oldURL }() + + mock := io.NewMockMedium() + cfg := NewConfigWithMedium(mock, "/output") + cfg.Limiter = nil + + m := &MarketCollector{CoinID: "ethereum", Historical: true} + result, err := m.Collect(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 3, result.Items) // current.json + summary.md + historical.json + assert.Len(t, result.Files, 3) + + // Verify historical.json was written + content, err := mock.Read("/output/market/ethereum/historical.json") + assert.NoError(t, err) + assert.Contains(t, content, "3000") +} + +func TestFormatMarketSummary_Good(t *testing.T) { + data := &coinData{ + Name: "Bitcoin", + Symbol: "btc", + MarketData: marketData{ + CurrentPrice: map[string]float64{"usd": 50000}, + MarketCap: map[string]float64{"usd": 1000000000000}, + MarketCapRank: 1, + CirculatingSupply: 19500000, + TotalSupply: 21000000, + }, + } + + summary := FormatMarketSummary(data) + + assert.Contains(t, summary, "# Bitcoin (BTC)") + assert.Contains(t, summary, "$50000.00") + assert.Contains(t, summary, "Market Cap Rank:** #1") + assert.Contains(t, summary, "Circulating Supply") + assert.Contains(t, summary, "Total Supply") +} + +func TestMarketCollector_Collect_Bad_ServerError(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + })) + defer server.Close() + + oldURL := coinGeckoBaseURL + coinGeckoBaseURL = server.URL + defer func() { coinGeckoBaseURL = oldURL }() + + mock := io.NewMockMedium() + cfg := NewConfigWithMedium(mock, "/output") + cfg.Limiter = nil + + m := &MarketCollector{CoinID: "bitcoin"} + result, err := m.Collect(context.Background(), cfg) + + // Should have errors but not fail entirely + assert.NoError(t, err) + assert.Equal(t, 1, result.Errors) +} diff --git a/pkg/collect/papers.go b/pkg/collect/papers.go new file mode 100644 index 00000000..f314fbf7 --- /dev/null +++ b/pkg/collect/papers.go @@ -0,0 +1,402 @@ +package collect + +import ( + "context" + "encoding/xml" + "fmt" + "net/http" + "net/url" + "path/filepath" + "strings" + + core "github.com/host-uk/core/pkg/framework/core" + "golang.org/x/net/html" +) + +// Paper source identifiers. +const ( + PaperSourceIACR = "iacr" + PaperSourceArXiv = "arxiv" + PaperSourceAll = "all" +) + +// PapersCollector collects papers from IACR and arXiv. +type PapersCollector struct { + // Source is one of PaperSourceIACR, PaperSourceArXiv, or PaperSourceAll. + Source string + + // Category is the arXiv category (e.g. "cs.CR" for cryptography). + Category string + + // Query is the search query string. + Query string +} + +// Name returns the collector name. +func (p *PapersCollector) Name() string { + return fmt.Sprintf("papers:%s", p.Source) +} + +// paper represents a parsed academic paper. +type paper struct { + ID string + Title string + Authors []string + Abstract string + Date string + URL string + Source string +} + +// Collect gathers papers from the configured sources. +func (p *PapersCollector) Collect(ctx context.Context, cfg *Config) (*Result, error) { + result := &Result{Source: p.Name()} + + if p.Query == "" { + return result, core.E("collect.Papers.Collect", "query is required", nil) + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitStart(p.Name(), fmt.Sprintf("Starting paper collection for %q", p.Query)) + } + + if cfg.DryRun { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(p.Name(), fmt.Sprintf("[dry-run] Would search papers for %q", p.Query), nil) + } + return result, nil + } + + switch p.Source { + case PaperSourceIACR: + return p.collectIACR(ctx, cfg) + case PaperSourceArXiv: + return p.collectArXiv(ctx, cfg) + case PaperSourceAll: + iacrResult, iacrErr := p.collectIACR(ctx, cfg) + arxivResult, arxivErr := p.collectArXiv(ctx, cfg) + + if iacrErr != nil && arxivErr != nil { + return result, core.E("collect.Papers.Collect", "all sources failed", iacrErr) + } + + merged := MergeResults(p.Name(), iacrResult, arxivResult) + if iacrErr != nil { + merged.Errors++ + } + if arxivErr != nil { + merged.Errors++ + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitComplete(p.Name(), fmt.Sprintf("Collected %d papers", merged.Items), merged) + } + + return merged, nil + default: + return result, core.E("collect.Papers.Collect", + fmt.Sprintf("unknown source: %s (use iacr, arxiv, or all)", p.Source), nil) + } +} + +// collectIACR fetches papers from the IACR ePrint archive. +func (p *PapersCollector) collectIACR(ctx context.Context, cfg *Config) (*Result, error) { + result := &Result{Source: "papers:iacr"} + + if cfg.Limiter != nil { + if err := cfg.Limiter.Wait(ctx, "iacr"); err != nil { + return result, err + } + } + + searchURL := fmt.Sprintf("https://eprint.iacr.org/search?q=%s", url.QueryEscape(p.Query)) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil) + if err != nil { + return result, core.E("collect.Papers.collectIACR", "failed to create request", err) + } + req.Header.Set("User-Agent", "CoreCollector/1.0") + + resp, err := httpClient.Do(req) + if err != nil { + return result, core.E("collect.Papers.collectIACR", "request failed", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return result, core.E("collect.Papers.collectIACR", + fmt.Sprintf("unexpected status code: %d", resp.StatusCode), nil) + } + + doc, err := html.Parse(resp.Body) + if err != nil { + return result, core.E("collect.Papers.collectIACR", "failed to parse HTML", err) + } + + papers := extractIACRPapers(doc) + + baseDir := filepath.Join(cfg.OutputDir, "papers", "iacr") + if err := cfg.Output.EnsureDir(baseDir); err != nil { + return result, core.E("collect.Papers.collectIACR", "failed to create output directory", err) + } + + for _, ppr := range papers { + filePath := filepath.Join(baseDir, ppr.ID+".md") + content := formatPaperMarkdown(ppr) + + if err := cfg.Output.Write(filePath, content); err != nil { + result.Errors++ + continue + } + + result.Items++ + result.Files = append(result.Files, filePath) + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitItem(p.Name(), fmt.Sprintf("Paper: %s", ppr.Title), nil) + } + } + + return result, nil +} + +// arxivFeed represents the Atom feed returned by the arXiv API. +type arxivFeed struct { + XMLName xml.Name `xml:"feed"` + Entries []arxivEntry `xml:"entry"` +} + +type arxivEntry struct { + ID string `xml:"id"` + Title string `xml:"title"` + Summary string `xml:"summary"` + Published string `xml:"published"` + Authors []arxivAuthor `xml:"author"` + Links []arxivLink `xml:"link"` +} + +type arxivAuthor struct { + Name string `xml:"name"` +} + +type arxivLink struct { + Href string `xml:"href,attr"` + Rel string `xml:"rel,attr"` + Type string `xml:"type,attr"` +} + +// collectArXiv fetches papers from the arXiv API. +func (p *PapersCollector) collectArXiv(ctx context.Context, cfg *Config) (*Result, error) { + result := &Result{Source: "papers:arxiv"} + + if cfg.Limiter != nil { + if err := cfg.Limiter.Wait(ctx, "arxiv"); err != nil { + return result, err + } + } + + query := url.QueryEscape(p.Query) + if p.Category != "" { + query = fmt.Sprintf("cat:%s+AND+%s", url.QueryEscape(p.Category), query) + } + + searchURL := fmt.Sprintf("https://export.arxiv.org/api/query?search_query=%s&max_results=50", query) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, searchURL, nil) + if err != nil { + return result, core.E("collect.Papers.collectArXiv", "failed to create request", err) + } + req.Header.Set("User-Agent", "CoreCollector/1.0") + + resp, err := httpClient.Do(req) + if err != nil { + return result, core.E("collect.Papers.collectArXiv", "request failed", err) + } + defer func() { _ = resp.Body.Close() }() + + if resp.StatusCode != http.StatusOK { + return result, core.E("collect.Papers.collectArXiv", + fmt.Sprintf("unexpected status code: %d", resp.StatusCode), nil) + } + + var feed arxivFeed + if err := xml.NewDecoder(resp.Body).Decode(&feed); err != nil { + return result, core.E("collect.Papers.collectArXiv", "failed to parse XML", err) + } + + baseDir := filepath.Join(cfg.OutputDir, "papers", "arxiv") + if err := cfg.Output.EnsureDir(baseDir); err != nil { + return result, core.E("collect.Papers.collectArXiv", "failed to create output directory", err) + } + + for _, entry := range feed.Entries { + ppr := arxivEntryToPaper(entry) + + filePath := filepath.Join(baseDir, ppr.ID+".md") + content := formatPaperMarkdown(ppr) + + if err := cfg.Output.Write(filePath, content); err != nil { + result.Errors++ + continue + } + + result.Items++ + result.Files = append(result.Files, filePath) + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitItem(p.Name(), fmt.Sprintf("Paper: %s", ppr.Title), nil) + } + } + + return result, nil +} + +// arxivEntryToPaper converts an arXiv Atom entry to a paper. +func arxivEntryToPaper(entry arxivEntry) paper { + authors := make([]string, len(entry.Authors)) + for i, a := range entry.Authors { + authors[i] = a.Name + } + + // Extract the arXiv ID from the URL + id := entry.ID + if idx := strings.LastIndex(id, "/abs/"); idx != -1 { + id = id[idx+5:] + } + // Replace characters that are not valid in file names + id = strings.ReplaceAll(id, "/", "-") + id = strings.ReplaceAll(id, ":", "-") + + paperURL := entry.ID + for _, link := range entry.Links { + if link.Rel == "alternate" { + paperURL = link.Href + break + } + } + + return paper{ + ID: id, + Title: strings.TrimSpace(entry.Title), + Authors: authors, + Abstract: strings.TrimSpace(entry.Summary), + Date: entry.Published, + URL: paperURL, + Source: "arxiv", + } +} + +// extractIACRPapers extracts paper metadata from an IACR search results page. +func extractIACRPapers(doc *html.Node) []paper { + var papers []paper + var walk func(*html.Node) + + walk = func(n *html.Node) { + if n.Type == html.ElementNode && n.Data == "div" { + for _, attr := range n.Attr { + if attr.Key == "class" && strings.Contains(attr.Val, "paperentry") { + ppr := parseIACREntry(n) + if ppr.Title != "" { + papers = append(papers, ppr) + } + } + } + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + walk(c) + } + } + + walk(doc) + return papers +} + +// parseIACREntry extracts paper data from an IACR paper entry div. +func parseIACREntry(node *html.Node) paper { + ppr := paper{Source: "iacr"} + var walk func(*html.Node) + + walk = func(n *html.Node) { + if n.Type == html.ElementNode { + switch n.Data { + case "a": + for _, attr := range n.Attr { + if attr.Key == "href" && strings.Contains(attr.Val, "/eprint/") { + ppr.URL = "https://eprint.iacr.org" + attr.Val + // Extract ID from URL + parts := strings.Split(attr.Val, "/") + if len(parts) >= 2 { + ppr.ID = parts[len(parts)-2] + "-" + parts[len(parts)-1] + } + } + } + if ppr.Title == "" { + ppr.Title = strings.TrimSpace(extractText(n)) + } + case "span": + for _, attr := range n.Attr { + if attr.Key == "class" { + switch { + case strings.Contains(attr.Val, "author"): + author := strings.TrimSpace(extractText(n)) + if author != "" { + ppr.Authors = append(ppr.Authors, author) + } + case strings.Contains(attr.Val, "date"): + ppr.Date = strings.TrimSpace(extractText(n)) + } + } + } + case "p": + for _, attr := range n.Attr { + if attr.Key == "class" && strings.Contains(attr.Val, "abstract") { + ppr.Abstract = strings.TrimSpace(extractText(n)) + } + } + } + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + walk(c) + } + } + + walk(node) + return ppr +} + +// formatPaperMarkdown formats a paper as markdown. +func formatPaperMarkdown(ppr paper) string { + var b strings.Builder + fmt.Fprintf(&b, "# %s\n\n", ppr.Title) + + if len(ppr.Authors) > 0 { + fmt.Fprintf(&b, "- **Authors:** %s\n", strings.Join(ppr.Authors, ", ")) + } + if ppr.Date != "" { + fmt.Fprintf(&b, "- **Published:** %s\n", ppr.Date) + } + if ppr.URL != "" { + fmt.Fprintf(&b, "- **URL:** %s\n", ppr.URL) + } + if ppr.Source != "" { + fmt.Fprintf(&b, "- **Source:** %s\n", ppr.Source) + } + + if ppr.Abstract != "" { + fmt.Fprintf(&b, "\n## Abstract\n\n%s\n", ppr.Abstract) + } + + return b.String() +} + +// FormatPaperMarkdown is exported for testing. +func FormatPaperMarkdown(title string, authors []string, date, paperURL, source, abstract string) string { + return formatPaperMarkdown(paper{ + Title: title, + Authors: authors, + Date: date, + URL: paperURL, + Source: source, + Abstract: abstract, + }) +} diff --git a/pkg/collect/papers_test.go b/pkg/collect/papers_test.go new file mode 100644 index 00000000..8a9f19fe --- /dev/null +++ b/pkg/collect/papers_test.go @@ -0,0 +1,108 @@ +package collect + +import ( + "context" + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestPapersCollector_Name_Good(t *testing.T) { + p := &PapersCollector{Source: PaperSourceIACR} + assert.Equal(t, "papers:iacr", p.Name()) +} + +func TestPapersCollector_Name_Good_ArXiv(t *testing.T) { + p := &PapersCollector{Source: PaperSourceArXiv} + assert.Equal(t, "papers:arxiv", p.Name()) +} + +func TestPapersCollector_Name_Good_All(t *testing.T) { + p := &PapersCollector{Source: PaperSourceAll} + assert.Equal(t, "papers:all", p.Name()) +} + +func TestPapersCollector_Collect_Bad_NoQuery(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + + p := &PapersCollector{Source: PaperSourceIACR} + _, err := p.Collect(context.Background(), cfg) + assert.Error(t, err) +} + +func TestPapersCollector_Collect_Bad_UnknownSource(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + + p := &PapersCollector{Source: "unknown", Query: "test"} + _, err := p.Collect(context.Background(), cfg) + assert.Error(t, err) +} + +func TestPapersCollector_Collect_Good_DryRun(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.DryRun = true + + p := &PapersCollector{Source: PaperSourceAll, Query: "cryptography"} + result, err := p.Collect(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 0, result.Items) +} + +func TestFormatPaperMarkdown_Good(t *testing.T) { + md := FormatPaperMarkdown( + "Zero-Knowledge Proofs Revisited", + []string{"Alice", "Bob"}, + "2025-01-15", + "https://eprint.iacr.org/2025/001", + "iacr", + "We present a new construction for zero-knowledge proofs.", + ) + + assert.Contains(t, md, "# Zero-Knowledge Proofs Revisited") + assert.Contains(t, md, "**Authors:** Alice, Bob") + assert.Contains(t, md, "**Published:** 2025-01-15") + assert.Contains(t, md, "**URL:** https://eprint.iacr.org/2025/001") + assert.Contains(t, md, "**Source:** iacr") + assert.Contains(t, md, "## Abstract") + assert.Contains(t, md, "zero-knowledge proofs") +} + +func TestFormatPaperMarkdown_Good_Minimal(t *testing.T) { + md := FormatPaperMarkdown("Title Only", nil, "", "", "", "") + + assert.Contains(t, md, "# Title Only") + assert.NotContains(t, md, "**Authors:**") + assert.NotContains(t, md, "## Abstract") +} + +func TestArxivEntryToPaper_Good(t *testing.T) { + entry := arxivEntry{ + ID: "http://arxiv.org/abs/2501.12345v1", + Title: " A Great Paper ", + Summary: " This paper presents... ", + Published: "2025-01-15T00:00:00Z", + Authors: []arxivAuthor{ + {Name: "Alice"}, + {Name: "Bob"}, + }, + Links: []arxivLink{ + {Href: "http://arxiv.org/abs/2501.12345v1", Rel: "alternate"}, + {Href: "http://arxiv.org/pdf/2501.12345v1", Rel: "related", Type: "application/pdf"}, + }, + } + + ppr := arxivEntryToPaper(entry) + + assert.Equal(t, "2501.12345v1", ppr.ID) + assert.Equal(t, "A Great Paper", ppr.Title) + assert.Equal(t, "This paper presents...", ppr.Abstract) + assert.Equal(t, "2025-01-15T00:00:00Z", ppr.Date) + assert.Equal(t, []string{"Alice", "Bob"}, ppr.Authors) + assert.Equal(t, "http://arxiv.org/abs/2501.12345v1", ppr.URL) + assert.Equal(t, "arxiv", ppr.Source) +} diff --git a/pkg/collect/process.go b/pkg/collect/process.go new file mode 100644 index 00000000..f1a569fb --- /dev/null +++ b/pkg/collect/process.go @@ -0,0 +1,345 @@ +package collect + +import ( + "context" + "encoding/json" + "fmt" + "path/filepath" + "sort" + "strings" + + core "github.com/host-uk/core/pkg/framework/core" + "golang.org/x/net/html" +) + +// Processor converts collected data to clean markdown. +type Processor struct { + // Source identifies the data source directory to process. + Source string + + // Dir is the directory containing files to process. + Dir string +} + +// Name returns the processor name. +func (p *Processor) Name() string { + return fmt.Sprintf("process:%s", p.Source) +} + +// Process reads files from the source directory, converts HTML or JSON +// to clean markdown, and writes the results to the output directory. +func (p *Processor) Process(ctx context.Context, cfg *Config) (*Result, error) { + result := &Result{Source: p.Name()} + + if p.Dir == "" { + return result, core.E("collect.Processor.Process", "directory is required", nil) + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitStart(p.Name(), fmt.Sprintf("Processing files in %s", p.Dir)) + } + + if cfg.DryRun { + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitProgress(p.Name(), fmt.Sprintf("[dry-run] Would process files in %s", p.Dir), nil) + } + return result, nil + } + + entries, err := cfg.Output.List(p.Dir) + if err != nil { + return result, core.E("collect.Processor.Process", "failed to list directory", err) + } + + outputDir := filepath.Join(cfg.OutputDir, "processed", p.Source) + if err := cfg.Output.EnsureDir(outputDir); err != nil { + return result, core.E("collect.Processor.Process", "failed to create output directory", err) + } + + for _, entry := range entries { + if ctx.Err() != nil { + return result, core.E("collect.Processor.Process", "context cancelled", ctx.Err()) + } + + if entry.IsDir() { + continue + } + + name := entry.Name() + srcPath := filepath.Join(p.Dir, name) + + content, err := cfg.Output.Read(srcPath) + if err != nil { + result.Errors++ + continue + } + + var processed string + ext := strings.ToLower(filepath.Ext(name)) + + switch ext { + case ".html", ".htm": + processed, err = htmlToMarkdown(content) + if err != nil { + result.Errors++ + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(p.Name(), fmt.Sprintf("Failed to convert %s: %v", name, err), nil) + } + continue + } + case ".json": + processed, err = jsonToMarkdown(content) + if err != nil { + result.Errors++ + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitError(p.Name(), fmt.Sprintf("Failed to convert %s: %v", name, err), nil) + } + continue + } + case ".md": + // Already markdown, just clean up + processed = strings.TrimSpace(content) + default: + result.Skipped++ + continue + } + + // Write with .md extension + outName := strings.TrimSuffix(name, ext) + ".md" + outPath := filepath.Join(outputDir, outName) + + if err := cfg.Output.Write(outPath, processed); err != nil { + result.Errors++ + continue + } + + result.Items++ + result.Files = append(result.Files, outPath) + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitItem(p.Name(), fmt.Sprintf("Processed: %s", name), nil) + } + } + + if cfg.Dispatcher != nil { + cfg.Dispatcher.EmitComplete(p.Name(), fmt.Sprintf("Processed %d files", result.Items), result) + } + + return result, nil +} + +// htmlToMarkdown converts HTML content to clean markdown. +func htmlToMarkdown(content string) (string, error) { + doc, err := html.Parse(strings.NewReader(content)) + if err != nil { + return "", core.E("collect.htmlToMarkdown", "failed to parse HTML", err) + } + + var b strings.Builder + nodeToMarkdown(&b, doc, 0) + return strings.TrimSpace(b.String()), nil +} + +// nodeToMarkdown recursively converts an HTML node tree to markdown. +func nodeToMarkdown(b *strings.Builder, n *html.Node, depth int) { + switch n.Type { + case html.TextNode: + text := n.Data + if strings.TrimSpace(text) != "" { + b.WriteString(text) + } + case html.ElementNode: + switch n.Data { + case "h1": + b.WriteString("\n# ") + writeChildrenText(b, n) + b.WriteString("\n\n") + return + case "h2": + b.WriteString("\n## ") + writeChildrenText(b, n) + b.WriteString("\n\n") + return + case "h3": + b.WriteString("\n### ") + writeChildrenText(b, n) + b.WriteString("\n\n") + return + case "h4": + b.WriteString("\n#### ") + writeChildrenText(b, n) + b.WriteString("\n\n") + return + case "h5": + b.WriteString("\n##### ") + writeChildrenText(b, n) + b.WriteString("\n\n") + return + case "h6": + b.WriteString("\n###### ") + writeChildrenText(b, n) + b.WriteString("\n\n") + return + case "p": + b.WriteString("\n") + for c := n.FirstChild; c != nil; c = c.NextSibling { + nodeToMarkdown(b, c, depth) + } + b.WriteString("\n") + return + case "br": + b.WriteString("\n") + return + case "strong", "b": + b.WriteString("**") + writeChildrenText(b, n) + b.WriteString("**") + return + case "em", "i": + b.WriteString("*") + writeChildrenText(b, n) + b.WriteString("*") + return + case "code": + b.WriteString("`") + writeChildrenText(b, n) + b.WriteString("`") + return + case "pre": + b.WriteString("\n```\n") + writeChildrenText(b, n) + b.WriteString("\n```\n") + return + case "a": + var href string + for _, attr := range n.Attr { + if attr.Key == "href" { + href = attr.Val + } + } + text := getChildrenText(n) + if href != "" { + fmt.Fprintf(b, "[%s](%s)", text, href) + } else { + b.WriteString(text) + } + return + case "ul": + b.WriteString("\n") + case "ol": + b.WriteString("\n") + counter := 1 + for c := n.FirstChild; c != nil; c = c.NextSibling { + if c.Type == html.ElementNode && c.Data == "li" { + fmt.Fprintf(b, "%d. ", counter) + for gc := c.FirstChild; gc != nil; gc = gc.NextSibling { + nodeToMarkdown(b, gc, depth+1) + } + b.WriteString("\n") + counter++ + } + } + return + case "li": + b.WriteString("- ") + for c := n.FirstChild; c != nil; c = c.NextSibling { + nodeToMarkdown(b, c, depth+1) + } + b.WriteString("\n") + return + case "blockquote": + b.WriteString("\n> ") + text := getChildrenText(n) + b.WriteString(strings.ReplaceAll(text, "\n", "\n> ")) + b.WriteString("\n") + return + case "hr": + b.WriteString("\n---\n") + return + case "script", "style", "head": + return + } + } + + for c := n.FirstChild; c != nil; c = c.NextSibling { + nodeToMarkdown(b, c, depth) + } +} + +// writeChildrenText writes the text content of all children. +func writeChildrenText(b *strings.Builder, n *html.Node) { + b.WriteString(getChildrenText(n)) +} + +// getChildrenText returns the concatenated text content of all children. +func getChildrenText(n *html.Node) string { + var b strings.Builder + for c := n.FirstChild; c != nil; c = c.NextSibling { + if c.Type == html.TextNode { + b.WriteString(c.Data) + } else { + b.WriteString(getChildrenText(c)) + } + } + return b.String() +} + +// jsonToMarkdown converts JSON content to a formatted markdown document. +func jsonToMarkdown(content string) (string, error) { + var data any + if err := json.Unmarshal([]byte(content), &data); err != nil { + return "", core.E("collect.jsonToMarkdown", "failed to parse JSON", err) + } + + var b strings.Builder + b.WriteString("# Data\n\n") + jsonValueToMarkdown(&b, data, 0) + return strings.TrimSpace(b.String()), nil +} + +// jsonValueToMarkdown recursively formats a JSON value as markdown. +func jsonValueToMarkdown(b *strings.Builder, data any, depth int) { + switch v := data.(type) { + case map[string]any: + keys := make([]string, 0, len(v)) + for key := range v { + keys = append(keys, key) + } + sort.Strings(keys) + for _, key := range keys { + val := v[key] + indent := strings.Repeat(" ", depth) + switch child := val.(type) { + case map[string]any, []any: + fmt.Fprintf(b, "%s- **%s:**\n", indent, key) + jsonValueToMarkdown(b, child, depth+1) + default: + fmt.Fprintf(b, "%s- **%s:** %v\n", indent, key, val) + } + } + case []any: + for i, item := range v { + indent := strings.Repeat(" ", depth) + switch child := item.(type) { + case map[string]any, []any: + fmt.Fprintf(b, "%s- Item %d:\n", indent, i+1) + jsonValueToMarkdown(b, child, depth+1) + default: + fmt.Fprintf(b, "%s- %v\n", indent, item) + } + } + default: + indent := strings.Repeat(" ", depth) + fmt.Fprintf(b, "%s%v\n", indent, data) + } +} + +// HTMLToMarkdown is exported for testing. +func HTMLToMarkdown(content string) (string, error) { + return htmlToMarkdown(content) +} + +// JSONToMarkdown is exported for testing. +func JSONToMarkdown(content string) (string, error) { + return jsonToMarkdown(content) +} diff --git a/pkg/collect/process_test.go b/pkg/collect/process_test.go new file mode 100644 index 00000000..239f2cda --- /dev/null +++ b/pkg/collect/process_test.go @@ -0,0 +1,201 @@ +package collect + +import ( + "context" + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestProcessor_Name_Good(t *testing.T) { + p := &Processor{Source: "github"} + assert.Equal(t, "process:github", p.Name()) +} + +func TestProcessor_Process_Bad_NoDir(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + + p := &Processor{Source: "test"} + _, err := p.Process(context.Background(), cfg) + assert.Error(t, err) +} + +func TestProcessor_Process_Good_DryRun(t *testing.T) { + m := io.NewMockMedium() + cfg := NewConfigWithMedium(m, "/output") + cfg.DryRun = true + + p := &Processor{Source: "test", Dir: "/input"} + result, err := p.Process(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 0, result.Items) +} + +func TestProcessor_Process_Good_HTMLFiles(t *testing.T) { + m := io.NewMockMedium() + m.Dirs["/input"] = true + m.Files["/input/page.html"] = `

Hello

World

` + + cfg := NewConfigWithMedium(m, "/output") + cfg.Limiter = nil + + p := &Processor{Source: "test", Dir: "/input"} + result, err := p.Process(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 1, result.Items) + assert.Len(t, result.Files, 1) + + content, err := m.Read("/output/processed/test/page.md") + assert.NoError(t, err) + assert.Contains(t, content, "# Hello") + assert.Contains(t, content, "World") +} + +func TestProcessor_Process_Good_JSONFiles(t *testing.T) { + m := io.NewMockMedium() + m.Dirs["/input"] = true + m.Files["/input/data.json"] = `{"name": "Bitcoin", "price": 42000}` + + cfg := NewConfigWithMedium(m, "/output") + cfg.Limiter = nil + + p := &Processor{Source: "market", Dir: "/input"} + result, err := p.Process(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 1, result.Items) + + content, err := m.Read("/output/processed/market/data.md") + assert.NoError(t, err) + assert.Contains(t, content, "# Data") + assert.Contains(t, content, "Bitcoin") +} + +func TestProcessor_Process_Good_MarkdownPassthrough(t *testing.T) { + m := io.NewMockMedium() + m.Dirs["/input"] = true + m.Files["/input/readme.md"] = "# Already Markdown\n\nThis is already formatted." + + cfg := NewConfigWithMedium(m, "/output") + cfg.Limiter = nil + + p := &Processor{Source: "docs", Dir: "/input"} + result, err := p.Process(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 1, result.Items) + + content, err := m.Read("/output/processed/docs/readme.md") + assert.NoError(t, err) + assert.Contains(t, content, "# Already Markdown") +} + +func TestProcessor_Process_Good_SkipUnknownTypes(t *testing.T) { + m := io.NewMockMedium() + m.Dirs["/input"] = true + m.Files["/input/image.png"] = "binary data" + m.Files["/input/doc.html"] = "

Heading

" + + cfg := NewConfigWithMedium(m, "/output") + cfg.Limiter = nil + + p := &Processor{Source: "mixed", Dir: "/input"} + result, err := p.Process(context.Background(), cfg) + + assert.NoError(t, err) + assert.Equal(t, 1, result.Items) // Only the HTML file + assert.Equal(t, 1, result.Skipped) // The PNG file +} + +func TestHTMLToMarkdown_Good(t *testing.T) { + tests := []struct { + name string + input string + contains []string + }{ + { + name: "heading", + input: "

Title

", + contains: []string{"# Title"}, + }, + { + name: "paragraph", + input: "

Hello world

", + contains: []string{"Hello world"}, + }, + { + name: "bold", + input: "

bold text

", + contains: []string{"**bold text**"}, + }, + { + name: "italic", + input: "

italic text

", + contains: []string{"*italic text*"}, + }, + { + name: "code", + input: "

code

", + contains: []string{"`code`"}, + }, + { + name: "link", + input: `

Example

`, + contains: []string{"[Example](https://example.com)"}, + }, + { + name: "nested headings", + input: "

Section

Subsection

", + contains: []string{"## Section", "### Subsection"}, + }, + { + name: "pre block", + input: "
func main() {}
", + contains: []string{"```", "func main() {}"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := HTMLToMarkdown(tt.input) + assert.NoError(t, err) + for _, s := range tt.contains { + assert.Contains(t, result, s) + } + }) + } +} + +func TestHTMLToMarkdown_Good_StripsScripts(t *testing.T) { + input := `

Clean

` + result, err := HTMLToMarkdown(input) + assert.NoError(t, err) + assert.Contains(t, result, "Clean") + assert.NotContains(t, result, "alert") + assert.NotContains(t, result, "script") +} + +func TestJSONToMarkdown_Good(t *testing.T) { + input := `{"name": "test", "count": 42}` + result, err := JSONToMarkdown(input) + assert.NoError(t, err) + assert.Contains(t, result, "# Data") + assert.Contains(t, result, "test") + assert.Contains(t, result, "42") +} + +func TestJSONToMarkdown_Good_Array(t *testing.T) { + input := `[{"id": 1}, {"id": 2}]` + result, err := JSONToMarkdown(input) + assert.NoError(t, err) + assert.Contains(t, result, "# Data") +} + +func TestJSONToMarkdown_Bad_InvalidJSON(t *testing.T) { + _, err := JSONToMarkdown("not json") + assert.Error(t, err) +} diff --git a/pkg/collect/ratelimit.go b/pkg/collect/ratelimit.go new file mode 100644 index 00000000..89ab9013 --- /dev/null +++ b/pkg/collect/ratelimit.go @@ -0,0 +1,130 @@ +package collect + +import ( + "context" + "fmt" + "os/exec" + "strconv" + "strings" + "sync" + "time" + + core "github.com/host-uk/core/pkg/framework/core" +) + +// RateLimiter tracks per-source rate limiting to avoid overwhelming APIs. +type RateLimiter struct { + mu sync.Mutex + delays map[string]time.Duration + last map[string]time.Time +} + +// Default rate limit delays per source. +var defaultDelays = map[string]time.Duration{ + "github": 500 * time.Millisecond, + "bitcointalk": 2 * time.Second, + "coingecko": 1500 * time.Millisecond, + "iacr": 1 * time.Second, + "arxiv": 1 * time.Second, +} + +// NewRateLimiter creates a limiter with default delays. +func NewRateLimiter() *RateLimiter { + delays := make(map[string]time.Duration, len(defaultDelays)) + for k, v := range defaultDelays { + delays[k] = v + } + return &RateLimiter{ + delays: delays, + last: make(map[string]time.Time), + } +} + +// Wait blocks until the rate limit allows the next request for the given source. +// It respects context cancellation. +func (r *RateLimiter) Wait(ctx context.Context, source string) error { + r.mu.Lock() + delay, ok := r.delays[source] + if !ok { + delay = 500 * time.Millisecond + } + lastTime := r.last[source] + + elapsed := time.Since(lastTime) + if elapsed >= delay { + // Enough time has passed — claim the slot immediately. + r.last[source] = time.Now() + r.mu.Unlock() + return nil + } + + remaining := delay - elapsed + r.mu.Unlock() + + // Wait outside the lock, then reclaim. + select { + case <-ctx.Done(): + return core.E("collect.RateLimiter.Wait", "context cancelled", ctx.Err()) + case <-time.After(remaining): + } + + r.mu.Lock() + r.last[source] = time.Now() + r.mu.Unlock() + + return nil +} + +// SetDelay sets the delay for a source. +func (r *RateLimiter) SetDelay(source string, d time.Duration) { + r.mu.Lock() + defer r.mu.Unlock() + r.delays[source] = d +} + +// GetDelay returns the delay configured for a source. +func (r *RateLimiter) GetDelay(source string) time.Duration { + r.mu.Lock() + defer r.mu.Unlock() + if d, ok := r.delays[source]; ok { + return d + } + return 500 * time.Millisecond +} + +// CheckGitHubRateLimit checks GitHub API rate limit status via gh api. +// Returns used and limit counts. Auto-pauses at 75% usage by increasing +// the GitHub rate limit delay. +func (r *RateLimiter) CheckGitHubRateLimit() (used, limit int, err error) { + cmd := exec.Command("gh", "api", "rate_limit", "--jq", ".rate | \"\\(.used) \\(.limit)\"") + out, err := cmd.Output() + if err != nil { + return 0, 0, core.E("collect.RateLimiter.CheckGitHubRateLimit", "failed to check rate limit", err) + } + + parts := strings.Fields(strings.TrimSpace(string(out))) + if len(parts) != 2 { + return 0, 0, core.E("collect.RateLimiter.CheckGitHubRateLimit", + fmt.Sprintf("unexpected output format: %q", string(out)), nil) + } + + used, err = strconv.Atoi(parts[0]) + if err != nil { + return 0, 0, core.E("collect.RateLimiter.CheckGitHubRateLimit", "failed to parse used count", err) + } + + limit, err = strconv.Atoi(parts[1]) + if err != nil { + return 0, 0, core.E("collect.RateLimiter.CheckGitHubRateLimit", "failed to parse limit count", err) + } + + // Auto-pause at 75% usage + if limit > 0 { + usage := float64(used) / float64(limit) + if usage >= 0.75 { + r.SetDelay("github", 5*time.Second) + } + } + + return used, limit, nil +} diff --git a/pkg/collect/ratelimit_test.go b/pkg/collect/ratelimit_test.go new file mode 100644 index 00000000..778d36da --- /dev/null +++ b/pkg/collect/ratelimit_test.go @@ -0,0 +1,84 @@ +package collect + +import ( + "context" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestRateLimiter_Wait_Good(t *testing.T) { + rl := NewRateLimiter() + rl.SetDelay("test", 50*time.Millisecond) + + ctx := context.Background() + + // First call should return immediately + start := time.Now() + err := rl.Wait(ctx, "test") + assert.NoError(t, err) + assert.Less(t, time.Since(start), 50*time.Millisecond) + + // Second call should wait at least the delay + start = time.Now() + err = rl.Wait(ctx, "test") + assert.NoError(t, err) + assert.GreaterOrEqual(t, time.Since(start), 40*time.Millisecond) // allow small timing variance +} + +func TestRateLimiter_Wait_Bad_ContextCancelled(t *testing.T) { + rl := NewRateLimiter() + rl.SetDelay("test", 5*time.Second) + + ctx := context.Background() + + // First call to set the last time + err := rl.Wait(ctx, "test") + assert.NoError(t, err) + + // Cancel context before second call + ctx, cancel := context.WithCancel(context.Background()) + cancel() + + err = rl.Wait(ctx, "test") + assert.Error(t, err) +} + +func TestRateLimiter_SetDelay_Good(t *testing.T) { + rl := NewRateLimiter() + rl.SetDelay("custom", 3*time.Second) + assert.Equal(t, 3*time.Second, rl.GetDelay("custom")) +} + +func TestRateLimiter_GetDelay_Good_Defaults(t *testing.T) { + rl := NewRateLimiter() + + assert.Equal(t, 500*time.Millisecond, rl.GetDelay("github")) + assert.Equal(t, 2*time.Second, rl.GetDelay("bitcointalk")) + assert.Equal(t, 1500*time.Millisecond, rl.GetDelay("coingecko")) + assert.Equal(t, 1*time.Second, rl.GetDelay("iacr")) +} + +func TestRateLimiter_GetDelay_Good_UnknownSource(t *testing.T) { + rl := NewRateLimiter() + // Unknown sources should get the default 500ms delay + assert.Equal(t, 500*time.Millisecond, rl.GetDelay("unknown")) +} + +func TestRateLimiter_Wait_Good_UnknownSource(t *testing.T) { + rl := NewRateLimiter() + ctx := context.Background() + + // Unknown source should use default delay of 500ms + err := rl.Wait(ctx, "unknown-source") + assert.NoError(t, err) +} + +func TestNewRateLimiter_Good(t *testing.T) { + rl := NewRateLimiter() + assert.NotNil(t, rl) + assert.NotNil(t, rl.delays) + assert.NotNil(t, rl.last) + assert.Len(t, rl.delays, len(defaultDelays)) +} diff --git a/pkg/collect/state.go b/pkg/collect/state.go new file mode 100644 index 00000000..68a52ffd --- /dev/null +++ b/pkg/collect/state.go @@ -0,0 +1,113 @@ +package collect + +import ( + "encoding/json" + "sync" + "time" + + core "github.com/host-uk/core/pkg/framework/core" + "github.com/host-uk/core/pkg/io" +) + +// State tracks collection progress for incremental runs. +// It persists entries to disk so that subsequent runs can resume +// where they left off. +type State struct { + mu sync.Mutex + medium io.Medium + path string + entries map[string]*StateEntry +} + +// StateEntry tracks state for one source. +type StateEntry struct { + // Source identifies the collector. + Source string `json:"source"` + + // LastRun is the timestamp of the last successful run. + LastRun time.Time `json:"last_run"` + + // LastID is an opaque identifier for the last item processed. + LastID string `json:"last_id,omitempty"` + + // Items is the total number of items collected so far. + Items int `json:"items"` + + // Cursor is an opaque pagination cursor for resumption. + Cursor string `json:"cursor,omitempty"` +} + +// NewState creates a state tracker that persists to the given path +// using the provided storage medium. +func NewState(m io.Medium, path string) *State { + return &State{ + medium: m, + path: path, + entries: make(map[string]*StateEntry), + } +} + +// Load reads state from disk. If the file does not exist, the state +// is initialised as empty without error. +func (s *State) Load() error { + s.mu.Lock() + defer s.mu.Unlock() + + if !s.medium.IsFile(s.path) { + return nil + } + + data, err := s.medium.Read(s.path) + if err != nil { + return core.E("collect.State.Load", "failed to read state file", err) + } + + var entries map[string]*StateEntry + if err := json.Unmarshal([]byte(data), &entries); err != nil { + return core.E("collect.State.Load", "failed to parse state file", err) + } + + if entries == nil { + entries = make(map[string]*StateEntry) + } + s.entries = entries + return nil +} + +// Save writes state to disk. +func (s *State) Save() error { + s.mu.Lock() + defer s.mu.Unlock() + + data, err := json.MarshalIndent(s.entries, "", " ") + if err != nil { + return core.E("collect.State.Save", "failed to marshal state", err) + } + + if err := s.medium.Write(s.path, string(data)); err != nil { + return core.E("collect.State.Save", "failed to write state file", err) + } + + return nil +} + +// Get returns a copy of the state for a source. The second return value +// indicates whether the entry was found. +func (s *State) Get(source string) (*StateEntry, bool) { + s.mu.Lock() + defer s.mu.Unlock() + entry, ok := s.entries[source] + if !ok { + return nil, false + } + // Return a copy to avoid callers mutating internal state. + cp := *entry + return &cp, true +} + +// Set updates state for a source. +func (s *State) Set(source string, entry *StateEntry) { + s.mu.Lock() + defer s.mu.Unlock() + s.entries[source] = entry +} diff --git a/pkg/collect/state_test.go b/pkg/collect/state_test.go new file mode 100644 index 00000000..5a83b3ec --- /dev/null +++ b/pkg/collect/state_test.go @@ -0,0 +1,144 @@ +package collect + +import ( + "testing" + "time" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestState_SetGet_Good(t *testing.T) { + m := io.NewMockMedium() + s := NewState(m, "/state.json") + + entry := &StateEntry{ + Source: "github:test", + LastRun: time.Now(), + Items: 42, + LastID: "abc123", + Cursor: "cursor-xyz", + } + + s.Set("github:test", entry) + + got, ok := s.Get("github:test") + assert.True(t, ok) + assert.Equal(t, entry.Source, got.Source) + assert.Equal(t, entry.Items, got.Items) + assert.Equal(t, entry.LastID, got.LastID) + assert.Equal(t, entry.Cursor, got.Cursor) +} + +func TestState_Get_Bad(t *testing.T) { + m := io.NewMockMedium() + s := NewState(m, "/state.json") + + got, ok := s.Get("nonexistent") + assert.False(t, ok) + assert.Nil(t, got) +} + +func TestState_SaveLoad_Good(t *testing.T) { + m := io.NewMockMedium() + s := NewState(m, "/state.json") + + now := time.Date(2025, 1, 15, 10, 30, 0, 0, time.UTC) + entry := &StateEntry{ + Source: "market:bitcoin", + LastRun: now, + Items: 100, + LastID: "btc-100", + } + + s.Set("market:bitcoin", entry) + + // Save state + err := s.Save() + assert.NoError(t, err) + + // Verify file was written + assert.True(t, m.IsFile("/state.json")) + + // Load into a new state instance + s2 := NewState(m, "/state.json") + err = s2.Load() + assert.NoError(t, err) + + got, ok := s2.Get("market:bitcoin") + assert.True(t, ok) + assert.Equal(t, "market:bitcoin", got.Source) + assert.Equal(t, 100, got.Items) + assert.Equal(t, "btc-100", got.LastID) + assert.True(t, now.Equal(got.LastRun)) +} + +func TestState_Load_Good_NoFile(t *testing.T) { + m := io.NewMockMedium() + s := NewState(m, "/nonexistent.json") + + // Loading when no file exists should not error + err := s.Load() + assert.NoError(t, err) + + // State should be empty + _, ok := s.Get("anything") + assert.False(t, ok) +} + +func TestState_Load_Bad_InvalidJSON(t *testing.T) { + m := io.NewMockMedium() + m.Files["/state.json"] = "not valid json" + + s := NewState(m, "/state.json") + err := s.Load() + assert.Error(t, err) +} + +func TestState_SaveLoad_Good_MultipleEntries(t *testing.T) { + m := io.NewMockMedium() + s := NewState(m, "/state.json") + + s.Set("source-a", &StateEntry{Source: "source-a", Items: 10}) + s.Set("source-b", &StateEntry{Source: "source-b", Items: 20}) + s.Set("source-c", &StateEntry{Source: "source-c", Items: 30}) + + err := s.Save() + assert.NoError(t, err) + + s2 := NewState(m, "/state.json") + err = s2.Load() + assert.NoError(t, err) + + a, ok := s2.Get("source-a") + assert.True(t, ok) + assert.Equal(t, 10, a.Items) + + b, ok := s2.Get("source-b") + assert.True(t, ok) + assert.Equal(t, 20, b.Items) + + c, ok := s2.Get("source-c") + assert.True(t, ok) + assert.Equal(t, 30, c.Items) +} + +func TestState_Set_Good_Overwrite(t *testing.T) { + m := io.NewMockMedium() + s := NewState(m, "/state.json") + + s.Set("source", &StateEntry{Source: "source", Items: 5}) + s.Set("source", &StateEntry{Source: "source", Items: 15}) + + got, ok := s.Get("source") + assert.True(t, ok) + assert.Equal(t, 15, got.Items) +} + +func TestNewState_Good(t *testing.T) { + m := io.NewMockMedium() + s := NewState(m, "/test/state.json") + + assert.NotNil(t, s) + assert.NotNil(t, s.entries) +} diff --git a/pkg/config/config.go b/pkg/config/config.go new file mode 100644 index 00000000..15c4e97b --- /dev/null +++ b/pkg/config/config.go @@ -0,0 +1,254 @@ +// Package config provides layered configuration management for the Core framework. +// +// Configuration values are resolved in priority order: defaults -> file -> env -> flags. +// Values are stored in a YAML file at ~/.core/config.yaml by default. +// +// Keys use dot notation for nested access: +// +// cfg.Set("dev.editor", "vim") +// var editor string +// cfg.Get("dev.editor", &editor) +package config + +import ( + "fmt" + "os" + "strings" + "sync" + + core "github.com/host-uk/core/pkg/framework/core" + "github.com/host-uk/core/pkg/io" +) + +// Config implements the core.Config interface with layered resolution. +// Values are resolved in order: defaults -> file -> env -> flags. +type Config struct { + mu sync.RWMutex + medium io.Medium + path string + data map[string]any +} + +// Option is a functional option for configuring a Config instance. +type Option func(*Config) + +// WithMedium sets the storage medium for configuration file operations. +func WithMedium(m io.Medium) Option { + return func(c *Config) { + c.medium = m + } +} + +// WithPath sets the path to the configuration file. +func WithPath(path string) Option { + return func(c *Config) { + c.path = path + } +} + +// New creates a new Config instance with the given options. +// If no medium is provided, it defaults to io.Local. +// If no path is provided, it defaults to ~/.core/config.yaml. +func New(opts ...Option) (*Config, error) { + c := &Config{ + data: make(map[string]any), + } + + for _, opt := range opts { + opt(c) + } + + if c.medium == nil { + c.medium = io.Local + } + + if c.path == "" { + home, err := os.UserHomeDir() + if err != nil { + return nil, core.E("config.New", "failed to determine home directory", err) + } + c.path = home + "/.core/config.yaml" + } + + // Load existing config file if it exists + if c.medium.IsFile(c.path) { + loaded, err := Load(c.medium, c.path) + if err != nil { + return nil, core.E("config.New", "failed to load config file", err) + } + c.data = loaded + } + + // Overlay environment variables + envData := LoadEnv("CORE_CONFIG_") + for k, v := range envData { + setNested(c.data, k, v) + } + + return c, nil +} + +// Get retrieves a configuration value by dot-notation key and stores it in out. +// The out parameter must be a pointer to the target type. +// Returns an error if the key is not found. +func (c *Config) Get(key string, out any) error { + c.mu.RLock() + defer c.mu.RUnlock() + + val, ok := getNested(c.data, key) + if !ok { + return core.E("config.Get", fmt.Sprintf("key not found: %s", key), nil) + } + + return assign(val, out) +} + +// Set stores a configuration value by dot-notation key and persists to disk. +func (c *Config) Set(key string, v any) error { + c.mu.Lock() + defer c.mu.Unlock() + + setNested(c.data, key, v) + + if err := Save(c.medium, c.path, c.data); err != nil { + return core.E("config.Set", "failed to save config", err) + } + + return nil +} + +// All returns a deep copy of all configuration values. +func (c *Config) All() map[string]any { + c.mu.RLock() + defer c.mu.RUnlock() + + return deepCopyMap(c.data) +} + +// deepCopyMap recursively copies a map[string]any. +func deepCopyMap(src map[string]any) map[string]any { + result := make(map[string]any, len(src)) + for k, v := range src { + switch val := v.(type) { + case map[string]any: + result[k] = deepCopyMap(val) + case []any: + cp := make([]any, len(val)) + copy(cp, val) + result[k] = cp + default: + result[k] = v + } + } + return result +} + +// Path returns the path to the configuration file. +func (c *Config) Path() string { + return c.path +} + +// getNested retrieves a value from a nested map using dot-notation keys. +func getNested(data map[string]any, key string) (any, bool) { + parts := strings.Split(key, ".") + current := any(data) + + for i, part := range parts { + m, ok := current.(map[string]any) + if !ok { + return nil, false + } + val, exists := m[part] + if !exists { + return nil, false + } + if i == len(parts)-1 { + return val, true + } + current = val + } + + return nil, false +} + +// setNested sets a value in a nested map using dot-notation keys, +// creating intermediate maps as needed. +func setNested(data map[string]any, key string, value any) { + parts := strings.Split(key, ".") + current := data + + for i, part := range parts { + if i == len(parts)-1 { + current[part] = value + return + } + next, ok := current[part] + if !ok { + next = make(map[string]any) + current[part] = next + } + m, ok := next.(map[string]any) + if !ok { + m = make(map[string]any) + current[part] = m + } + current = m + } +} + +// assign sets the value of out to val, handling type conversions. +func assign(val any, out any) error { + switch ptr := out.(type) { + case *string: + switch v := val.(type) { + case string: + *ptr = v + default: + *ptr = fmt.Sprintf("%v", v) + } + case *int: + switch v := val.(type) { + case int: + *ptr = v + case float64: + *ptr = int(v) + case int64: + *ptr = int(v) + default: + return core.E("config.assign", fmt.Sprintf("cannot assign %T to *int", val), nil) + } + case *bool: + switch v := val.(type) { + case bool: + *ptr = v + default: + return core.E("config.assign", fmt.Sprintf("cannot assign %T to *bool", val), nil) + } + case *float64: + switch v := val.(type) { + case float64: + *ptr = v + case int: + *ptr = float64(v) + case int64: + *ptr = float64(v) + default: + return core.E("config.assign", fmt.Sprintf("cannot assign %T to *float64", val), nil) + } + case *any: + *ptr = val + case *map[string]any: + switch v := val.(type) { + case map[string]any: + *ptr = v + default: + return core.E("config.assign", fmt.Sprintf("cannot assign %T to *map[string]any", val), nil) + } + default: + return core.E("config.assign", fmt.Sprintf("unsupported target type: %T", out), nil) + } + return nil +} + +// Ensure Config implements core.Config at compile time. +var _ core.Config = (*Config)(nil) diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go new file mode 100644 index 00000000..73d0b9b9 --- /dev/null +++ b/pkg/config/config_test.go @@ -0,0 +1,227 @@ +package config + +import ( + "os" + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestConfig_Get_Good(t *testing.T) { + m := io.NewMockMedium() + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + err = cfg.Set("app.name", "core") + assert.NoError(t, err) + + var name string + err = cfg.Get("app.name", &name) + assert.NoError(t, err) + assert.Equal(t, "core", name) +} + +func TestConfig_Get_Bad(t *testing.T) { + m := io.NewMockMedium() + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + var value string + err = cfg.Get("nonexistent.key", &value) + assert.Error(t, err) + assert.Contains(t, err.Error(), "key not found") +} + +func TestConfig_Set_Good(t *testing.T) { + m := io.NewMockMedium() + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + err = cfg.Set("dev.editor", "vim") + assert.NoError(t, err) + + // Verify the value was saved to the medium + content, readErr := m.Read("/tmp/test/config.yaml") + assert.NoError(t, readErr) + assert.Contains(t, content, "editor: vim") + + // Verify we can read it back + var editor string + err = cfg.Get("dev.editor", &editor) + assert.NoError(t, err) + assert.Equal(t, "vim", editor) +} + +func TestConfig_Set_Nested_Good(t *testing.T) { + m := io.NewMockMedium() + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + err = cfg.Set("a.b.c", "deep") + assert.NoError(t, err) + + var val string + err = cfg.Get("a.b.c", &val) + assert.NoError(t, err) + assert.Equal(t, "deep", val) +} + +func TestConfig_All_Good(t *testing.T) { + m := io.NewMockMedium() + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + _ = cfg.Set("key1", "val1") + _ = cfg.Set("key2", "val2") + + all := cfg.All() + assert.Equal(t, "val1", all["key1"]) + assert.Equal(t, "val2", all["key2"]) +} + +func TestConfig_Path_Good(t *testing.T) { + m := io.NewMockMedium() + + cfg, err := New(WithMedium(m), WithPath("/custom/path/config.yaml")) + assert.NoError(t, err) + + assert.Equal(t, "/custom/path/config.yaml", cfg.Path()) +} + +func TestConfig_Load_Existing_Good(t *testing.T) { + m := io.NewMockMedium() + m.Files["/tmp/test/config.yaml"] = "app:\n name: existing\n" + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + var name string + err = cfg.Get("app.name", &name) + assert.NoError(t, err) + assert.Equal(t, "existing", name) +} + +func TestConfig_Env_Good(t *testing.T) { + // Set environment variable + t.Setenv("CORE_CONFIG_DEV_EDITOR", "nano") + + m := io.NewMockMedium() + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + var editor string + err = cfg.Get("dev.editor", &editor) + assert.NoError(t, err) + assert.Equal(t, "nano", editor) +} + +func TestConfig_Env_Overrides_File_Good(t *testing.T) { + // Set file config + m := io.NewMockMedium() + m.Files["/tmp/test/config.yaml"] = "dev:\n editor: vim\n" + + // Set environment override + t.Setenv("CORE_CONFIG_DEV_EDITOR", "nano") + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + var editor string + err = cfg.Get("dev.editor", &editor) + assert.NoError(t, err) + assert.Equal(t, "nano", editor) +} + +func TestConfig_Assign_Types_Good(t *testing.T) { + m := io.NewMockMedium() + m.Files["/tmp/test/config.yaml"] = "count: 42\nenabled: true\nratio: 3.14\n" + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + var count int + err = cfg.Get("count", &count) + assert.NoError(t, err) + assert.Equal(t, 42, count) + + var enabled bool + err = cfg.Get("enabled", &enabled) + assert.NoError(t, err) + assert.True(t, enabled) + + var ratio float64 + err = cfg.Get("ratio", &ratio) + assert.NoError(t, err) + assert.InDelta(t, 3.14, ratio, 0.001) +} + +func TestConfig_Assign_Any_Good(t *testing.T) { + m := io.NewMockMedium() + + cfg, err := New(WithMedium(m), WithPath("/tmp/test/config.yaml")) + assert.NoError(t, err) + + _ = cfg.Set("key", "value") + + var val any + err = cfg.Get("key", &val) + assert.NoError(t, err) + assert.Equal(t, "value", val) +} + +func TestConfig_DefaultPath_Good(t *testing.T) { + m := io.NewMockMedium() + + cfg, err := New(WithMedium(m)) + assert.NoError(t, err) + + home, _ := os.UserHomeDir() + assert.Equal(t, home+"/.core/config.yaml", cfg.Path()) +} + +func TestLoadEnv_Good(t *testing.T) { + t.Setenv("CORE_CONFIG_FOO_BAR", "baz") + t.Setenv("CORE_CONFIG_SIMPLE", "value") + + result := LoadEnv("CORE_CONFIG_") + assert.Equal(t, "baz", result["foo.bar"]) + assert.Equal(t, "value", result["simple"]) +} + +func TestLoad_Bad(t *testing.T) { + m := io.NewMockMedium() + + _, err := Load(m, "/nonexistent/file.yaml") + assert.Error(t, err) + assert.Contains(t, err.Error(), "failed to read config file") +} + +func TestLoad_InvalidYAML_Bad(t *testing.T) { + m := io.NewMockMedium() + m.Files["/tmp/test/config.yaml"] = "invalid: yaml: content: [[[[" + + _, err := Load(m, "/tmp/test/config.yaml") + assert.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse config file") +} + +func TestSave_Good(t *testing.T) { + m := io.NewMockMedium() + + data := map[string]any{ + "key": "value", + } + + err := Save(m, "/tmp/test/config.yaml", data) + assert.NoError(t, err) + + content, readErr := m.Read("/tmp/test/config.yaml") + assert.NoError(t, readErr) + assert.Contains(t, content, "key: value") +} diff --git a/pkg/config/env.go b/pkg/config/env.go new file mode 100644 index 00000000..711e3ece --- /dev/null +++ b/pkg/config/env.go @@ -0,0 +1,40 @@ +package config + +import ( + "os" + "strings" +) + +// LoadEnv parses environment variables with the given prefix and returns +// them as a flat map with dot-notation keys. +// +// For example, with prefix "CORE_CONFIG_": +// +// CORE_CONFIG_FOO_BAR=baz -> {"foo.bar": "baz"} +// CORE_CONFIG_EDITOR=vim -> {"editor": "vim"} +func LoadEnv(prefix string) map[string]any { + result := make(map[string]any) + + for _, env := range os.Environ() { + if !strings.HasPrefix(env, prefix) { + continue + } + + parts := strings.SplitN(env, "=", 2) + if len(parts) != 2 { + continue + } + + name := parts[0] + value := parts[1] + + // Strip prefix and convert to dot notation + key := strings.TrimPrefix(name, prefix) + key = strings.ToLower(key) + key = strings.ReplaceAll(key, "_", ".") + + result[key] = value + } + + return result +} diff --git a/pkg/config/loader.go b/pkg/config/loader.go new file mode 100644 index 00000000..628abfcb --- /dev/null +++ b/pkg/config/loader.go @@ -0,0 +1,45 @@ +package config + +import ( + "path/filepath" + + core "github.com/host-uk/core/pkg/framework/core" + "github.com/host-uk/core/pkg/io" + "gopkg.in/yaml.v3" +) + +// Load reads a YAML configuration file from the given medium and path. +// Returns the parsed data as a map, or an error if the file cannot be read or parsed. +func Load(m io.Medium, path string) (map[string]any, error) { + content, err := m.Read(path) + if err != nil { + return nil, core.E("config.Load", "failed to read config file: "+path, err) + } + + data := make(map[string]any) + if err := yaml.Unmarshal([]byte(content), &data); err != nil { + return nil, core.E("config.Load", "failed to parse config file: "+path, err) + } + + return data, nil +} + +// Save writes configuration data to a YAML file at the given path. +// It ensures the parent directory exists before writing. +func Save(m io.Medium, path string, data map[string]any) error { + out, err := yaml.Marshal(data) + if err != nil { + return core.E("config.Save", "failed to marshal config", err) + } + + dir := filepath.Dir(path) + if err := m.EnsureDir(dir); err != nil { + return core.E("config.Save", "failed to create config directory: "+dir, err) + } + + if err := m.Write(path, string(out)); err != nil { + return core.E("config.Save", "failed to write config file: "+path, err) + } + + return nil +} diff --git a/pkg/config/service.go b/pkg/config/service.go new file mode 100644 index 00000000..9f92aa4a --- /dev/null +++ b/pkg/config/service.go @@ -0,0 +1,74 @@ +package config + +import ( + "context" + + core "github.com/host-uk/core/pkg/framework/core" + "github.com/host-uk/core/pkg/io" +) + +// Service wraps Config as a framework service with lifecycle support. +type Service struct { + *core.ServiceRuntime[ServiceOptions] + config *Config +} + +// ServiceOptions holds configuration for the config service. +type ServiceOptions struct { + // Path overrides the default config file path. + Path string + // Medium overrides the default storage medium. + Medium io.Medium +} + +// NewConfigService creates a new config service factory for the Core framework. +// Register it with core.WithService(config.NewConfigService). +func NewConfigService(c *core.Core) (any, error) { + svc := &Service{ + ServiceRuntime: core.NewServiceRuntime(c, ServiceOptions{}), + } + return svc, nil +} + +// OnStartup loads the configuration file during application startup. +func (s *Service) OnStartup(_ context.Context) error { + opts := s.Opts() + + var configOpts []Option + if opts.Path != "" { + configOpts = append(configOpts, WithPath(opts.Path)) + } + if opts.Medium != nil { + configOpts = append(configOpts, WithMedium(opts.Medium)) + } + + cfg, err := New(configOpts...) + if err != nil { + return err + } + + s.config = cfg + return nil +} + +// Get retrieves a configuration value by key. +func (s *Service) Get(key string, out any) error { + if s.config == nil { + return core.E("config.Service.Get", "config not loaded", nil) + } + return s.config.Get(key, out) +} + +// Set stores a configuration value by key. +func (s *Service) Set(key string, v any) error { + if s.config == nil { + return core.E("config.Service.Set", "config not loaded", nil) + } + return s.config.Set(key, v) +} + +// Ensure Service implements core.Config and Startable at compile time. +var ( + _ core.Config = (*Service)(nil) + _ core.Startable = (*Service)(nil) +) diff --git a/pkg/container/linuxkit.go b/pkg/container/linuxkit.go index e85f9c1a..ee203b40 100644 --- a/pkg/container/linuxkit.go +++ b/pkg/container/linuxkit.go @@ -166,7 +166,7 @@ func (m *LinuxKitManager) Run(ctx context.Context, image string, opts RunOptions // Tee output to both log file and stdout stdout, err := cmd.StdoutPipe() if err != nil { - logFile.Close() + _ = logFile.Close() return nil, fmt.Errorf("failed to get stdout pipe: %w", err) } @@ -412,8 +412,8 @@ func (m *LinuxKitManager) Exec(ctx context.Context, id string, cmd []string) err // Build SSH command sshArgs := []string{ "-p", fmt.Sprintf("%d", sshPort), - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", + "-o", "StrictHostKeyChecking=accept-new", + "-o", "UserKnownHostsFile=~/.core/known_hosts", "-o", "LogLevel=ERROR", "root@localhost", } diff --git a/pkg/container/linuxkit_test.go b/pkg/container/linuxkit_test.go index 46122f26..2a03cb07 100644 --- a/pkg/container/linuxkit_test.go +++ b/pkg/container/linuxkit_test.go @@ -211,11 +211,11 @@ func TestLinuxKitManager_Stop_Bad_NotFound(t *testing.T) { } func TestLinuxKitManager_Stop_Bad_NotRunning(t *testing.T) { - manager, _, tmpDir := newTestManager(t) + _, _, tmpDir := newTestManager(t) statePath := filepath.Join(tmpDir, "containers.json") state, err := LoadState(statePath) require.NoError(t, err) - manager = NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) + manager := NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) container := &Container{ ID: "abc12345", @@ -231,11 +231,11 @@ func TestLinuxKitManager_Stop_Bad_NotRunning(t *testing.T) { } func TestLinuxKitManager_List_Good(t *testing.T) { - manager, _, tmpDir := newTestManager(t) + _, _, tmpDir := newTestManager(t) statePath := filepath.Join(tmpDir, "containers.json") state, err := LoadState(statePath) require.NoError(t, err) - manager = NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) + manager := NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) _ = state.Add(&Container{ID: "aaa11111", Status: StatusStopped}) _ = state.Add(&Container{ID: "bbb22222", Status: StatusStopped}) @@ -248,11 +248,11 @@ func TestLinuxKitManager_List_Good(t *testing.T) { } func TestLinuxKitManager_List_Good_VerifiesRunningStatus(t *testing.T) { - manager, _, tmpDir := newTestManager(t) + _, _, tmpDir := newTestManager(t) statePath := filepath.Join(tmpDir, "containers.json") state, err := LoadState(statePath) require.NoError(t, err) - manager = NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) + manager := NewLinuxKitManagerWithHypervisor(state, NewMockHypervisor()) // Add a "running" container with a fake PID that doesn't exist _ = state.Add(&Container{ @@ -292,7 +292,7 @@ func TestLinuxKitManager_Logs_Good(t *testing.T) { reader, err := manager.Logs(ctx, "abc12345", false) require.NoError(t, err) - defer reader.Close() + defer func() { _ = reader.Close() }() buf := make([]byte, 1024) n, _ := reader.Read(buf) @@ -323,7 +323,7 @@ func TestLinuxKitManager_Logs_Bad_NoLogFile(t *testing.T) { // If logs existed somehow, clean up the reader if reader != nil { - reader.Close() + _ = reader.Close() } assert.Error(t, err) @@ -477,7 +477,7 @@ func TestFollowReader_Read_Good_WithData(t *testing.T) { reader, err := newFollowReader(ctx, logPath) require.NoError(t, err) - defer reader.Close() + defer func() { _ = reader.Close() }() // The followReader seeks to end, so we need to append more content f, err := os.OpenFile(logPath, os.O_APPEND|os.O_WRONLY, 0644) @@ -685,7 +685,7 @@ func TestFollowReader_Read_Good_ReaderError(t *testing.T) { require.NoError(t, err) // Close the underlying file to cause read errors - reader.file.Close() + _ = reader.file.Close() // Read should return an error buf := make([]byte, 1024) diff --git a/pkg/container/state_test.go b/pkg/container/state_test.go index cf4bf5f1..68e6a023 100644 --- a/pkg/container/state_test.go +++ b/pkg/container/state_test.go @@ -109,7 +109,7 @@ func TestState_Update_Good(t *testing.T) { ID: "abc12345", Status: StatusRunning, } - state.Add(container) + _ = state.Add(container) // Update status container.Status = StatusStopped @@ -130,7 +130,7 @@ func TestState_Remove_Good(t *testing.T) { container := &Container{ ID: "abc12345", } - state.Add(container) + _ = state.Add(container) err := state.Remove("abc12345") require.NoError(t, err) @@ -151,9 +151,9 @@ func TestState_All_Good(t *testing.T) { statePath := filepath.Join(tmpDir, "containers.json") state := NewState(statePath) - state.Add(&Container{ID: "aaa11111"}) - state.Add(&Container{ID: "bbb22222"}) - state.Add(&Container{ID: "ccc33333"}) + _ = state.Add(&Container{ID: "aaa11111"}) + _ = state.Add(&Container{ID: "bbb22222"}) + _ = state.Add(&Container{ID: "ccc33333"}) all := state.All() assert.Len(t, all, 3) @@ -164,7 +164,7 @@ func TestState_SaveState_Good_CreatesDirectory(t *testing.T) { nestedPath := filepath.Join(tmpDir, "nested", "dir", "containers.json") state := NewState(nestedPath) - state.Add(&Container{ID: "abc12345"}) + _ = state.Add(&Container{ID: "abc12345"}) err := state.SaveState() require.NoError(t, err) diff --git a/pkg/container/templates_test.go b/pkg/container/templates_test.go index 5825863d..e4a78aa5 100644 --- a/pkg/container/templates_test.go +++ b/pkg/container/templates_test.go @@ -404,7 +404,7 @@ kernel: require.NoError(t, err) err = os.Chdir(tmpDir) require.NoError(t, err) - defer os.Chdir(oldWd) + defer func() { _ = os.Chdir(oldWd) }() templates := ListTemplates() @@ -445,7 +445,7 @@ services: require.NoError(t, err) err = os.Chdir(tmpDir) require.NoError(t, err) - defer os.Chdir(oldWd) + defer func() { _ = os.Chdir(oldWd) }() content, err := GetTemplate("my-user-template") @@ -557,7 +557,7 @@ func TestGetUserTemplatesDir_Good_NoDirectory(t *testing.T) { tmpDir := t.TempDir() err = os.Chdir(tmpDir) require.NoError(t, err) - defer os.Chdir(oldWd) + defer func() { _ = os.Chdir(oldWd) }() dir := getUserTemplatesDir() diff --git a/pkg/crypt/checksum.go b/pkg/crypt/checksum.go new file mode 100644 index 00000000..c3622b00 --- /dev/null +++ b/pkg/crypt/checksum.go @@ -0,0 +1,55 @@ +package crypt + +import ( + "crypto/sha256" + "crypto/sha512" + "encoding/hex" + "io" + "os" + + core "github.com/host-uk/core/pkg/framework/core" +) + +// SHA256File computes the SHA-256 checksum of a file and returns it as a hex string. +func SHA256File(path string) (string, error) { + f, err := os.Open(path) + if err != nil { + return "", core.E("crypt.SHA256File", "failed to open file", err) + } + defer func() { _ = f.Close() }() + + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return "", core.E("crypt.SHA256File", "failed to read file", err) + } + + return hex.EncodeToString(h.Sum(nil)), nil +} + +// SHA512File computes the SHA-512 checksum of a file and returns it as a hex string. +func SHA512File(path string) (string, error) { + f, err := os.Open(path) + if err != nil { + return "", core.E("crypt.SHA512File", "failed to open file", err) + } + defer func() { _ = f.Close() }() + + h := sha512.New() + if _, err := io.Copy(h, f); err != nil { + return "", core.E("crypt.SHA512File", "failed to read file", err) + } + + return hex.EncodeToString(h.Sum(nil)), nil +} + +// SHA256Sum computes the SHA-256 checksum of data and returns it as a hex string. +func SHA256Sum(data []byte) string { + h := sha256.Sum256(data) + return hex.EncodeToString(h[:]) +} + +// SHA512Sum computes the SHA-512 checksum of data and returns it as a hex string. +func SHA512Sum(data []byte) string { + h := sha512.Sum512(data) + return hex.EncodeToString(h[:]) +} diff --git a/pkg/crypt/checksum_test.go b/pkg/crypt/checksum_test.go new file mode 100644 index 00000000..ce98b3b3 --- /dev/null +++ b/pkg/crypt/checksum_test.go @@ -0,0 +1,23 @@ +package crypt + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSHA256Sum_Good(t *testing.T) { + data := []byte("hello") + expected := "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824" + + result := SHA256Sum(data) + assert.Equal(t, expected, result) +} + +func TestSHA512Sum_Good(t *testing.T) { + data := []byte("hello") + expected := "9b71d224bd62f3785d96d46ad3ea3d73319bfbc2890caadae2dff72519673ca72323c3d99ba5c11d7c7acc6e14b8c5da0c4663475c2e5c3adef46f73bcdec043" + + result := SHA512Sum(data) + assert.Equal(t, expected, result) +} diff --git a/pkg/crypt/crypt.go b/pkg/crypt/crypt.go new file mode 100644 index 00000000..e88a28f9 --- /dev/null +++ b/pkg/crypt/crypt.go @@ -0,0 +1,90 @@ +package crypt + +import ( + core "github.com/host-uk/core/pkg/framework/core" +) + +// Encrypt encrypts data with a passphrase using ChaCha20-Poly1305. +// A random salt is generated and prepended to the output. +// Format: salt (16 bytes) + nonce (24 bytes) + ciphertext. +func Encrypt(plaintext, passphrase []byte) ([]byte, error) { + salt, err := generateSalt(argon2SaltLen) + if err != nil { + return nil, core.E("crypt.Encrypt", "failed to generate salt", err) + } + + key := DeriveKey(passphrase, salt, argon2KeyLen) + + encrypted, err := ChaCha20Encrypt(plaintext, key) + if err != nil { + return nil, core.E("crypt.Encrypt", "failed to encrypt", err) + } + + // Prepend salt to the encrypted data (which already has nonce prepended) + result := make([]byte, 0, len(salt)+len(encrypted)) + result = append(result, salt...) + result = append(result, encrypted...) + return result, nil +} + +// Decrypt decrypts data encrypted with Encrypt. +// Expects format: salt (16 bytes) + nonce (24 bytes) + ciphertext. +func Decrypt(ciphertext, passphrase []byte) ([]byte, error) { + if len(ciphertext) < argon2SaltLen { + return nil, core.E("crypt.Decrypt", "ciphertext too short", nil) + } + + salt := ciphertext[:argon2SaltLen] + encrypted := ciphertext[argon2SaltLen:] + + key := DeriveKey(passphrase, salt, argon2KeyLen) + + plaintext, err := ChaCha20Decrypt(encrypted, key) + if err != nil { + return nil, core.E("crypt.Decrypt", "failed to decrypt", err) + } + + return plaintext, nil +} + +// EncryptAES encrypts data using AES-256-GCM with a passphrase. +// A random salt is generated and prepended to the output. +// Format: salt (16 bytes) + nonce (12 bytes) + ciphertext. +func EncryptAES(plaintext, passphrase []byte) ([]byte, error) { + salt, err := generateSalt(argon2SaltLen) + if err != nil { + return nil, core.E("crypt.EncryptAES", "failed to generate salt", err) + } + + key := DeriveKey(passphrase, salt, argon2KeyLen) + + encrypted, err := AESGCMEncrypt(plaintext, key) + if err != nil { + return nil, core.E("crypt.EncryptAES", "failed to encrypt", err) + } + + result := make([]byte, 0, len(salt)+len(encrypted)) + result = append(result, salt...) + result = append(result, encrypted...) + return result, nil +} + +// DecryptAES decrypts data encrypted with EncryptAES. +// Expects format: salt (16 bytes) + nonce (12 bytes) + ciphertext. +func DecryptAES(ciphertext, passphrase []byte) ([]byte, error) { + if len(ciphertext) < argon2SaltLen { + return nil, core.E("crypt.DecryptAES", "ciphertext too short", nil) + } + + salt := ciphertext[:argon2SaltLen] + encrypted := ciphertext[argon2SaltLen:] + + key := DeriveKey(passphrase, salt, argon2KeyLen) + + plaintext, err := AESGCMDecrypt(encrypted, key) + if err != nil { + return nil, core.E("crypt.DecryptAES", "failed to decrypt", err) + } + + return plaintext, nil +} diff --git a/pkg/crypt/crypt_test.go b/pkg/crypt/crypt_test.go new file mode 100644 index 00000000..b2e7a56f --- /dev/null +++ b/pkg/crypt/crypt_test.go @@ -0,0 +1,45 @@ +package crypt + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEncryptDecrypt_Good(t *testing.T) { + plaintext := []byte("hello, world!") + passphrase := []byte("correct-horse-battery-staple") + + encrypted, err := Encrypt(plaintext, passphrase) + assert.NoError(t, err) + assert.NotEqual(t, plaintext, encrypted) + + decrypted, err := Decrypt(encrypted, passphrase) + assert.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} + +func TestEncryptDecrypt_Bad(t *testing.T) { + plaintext := []byte("secret data") + passphrase := []byte("correct-passphrase") + wrongPassphrase := []byte("wrong-passphrase") + + encrypted, err := Encrypt(plaintext, passphrase) + assert.NoError(t, err) + + _, err = Decrypt(encrypted, wrongPassphrase) + assert.Error(t, err) +} + +func TestEncryptDecryptAES_Good(t *testing.T) { + plaintext := []byte("hello, AES world!") + passphrase := []byte("my-secure-passphrase") + + encrypted, err := EncryptAES(plaintext, passphrase) + assert.NoError(t, err) + assert.NotEqual(t, plaintext, encrypted) + + decrypted, err := DecryptAES(encrypted, passphrase) + assert.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} diff --git a/pkg/crypt/hash.go b/pkg/crypt/hash.go new file mode 100644 index 00000000..0c360b0c --- /dev/null +++ b/pkg/crypt/hash.go @@ -0,0 +1,89 @@ +package crypt + +import ( + "crypto/subtle" + "encoding/base64" + "fmt" + "strings" + + core "github.com/host-uk/core/pkg/framework/core" + "golang.org/x/crypto/argon2" + "golang.org/x/crypto/bcrypt" +) + +// HashPassword hashes a password using Argon2id with default parameters. +// Returns a string in the format: $argon2id$v=19$m=65536,t=3,p=4$$ +func HashPassword(password string) (string, error) { + salt, err := generateSalt(argon2SaltLen) + if err != nil { + return "", core.E("crypt.HashPassword", "failed to generate salt", err) + } + + hash := argon2.IDKey([]byte(password), salt, argon2Time, argon2Memory, argon2Parallelism, argon2KeyLen) + + b64Salt := base64.RawStdEncoding.EncodeToString(salt) + b64Hash := base64.RawStdEncoding.EncodeToString(hash) + + encoded := fmt.Sprintf("$argon2id$v=%d$m=%d,t=%d,p=%d$%s$%s", + argon2.Version, argon2Memory, argon2Time, argon2Parallelism, + b64Salt, b64Hash) + + return encoded, nil +} + +// VerifyPassword verifies a password against an Argon2id hash string. +// The hash must be in the format produced by HashPassword. +func VerifyPassword(password, hash string) (bool, error) { + parts := strings.Split(hash, "$") + if len(parts) != 6 { + return false, core.E("crypt.VerifyPassword", "invalid hash format", nil) + } + + var version int + if _, err := fmt.Sscanf(parts[2], "v=%d", &version); err != nil { + return false, core.E("crypt.VerifyPassword", "failed to parse version", err) + } + + var memory uint32 + var time uint32 + var parallelism uint8 + if _, err := fmt.Sscanf(parts[3], "m=%d,t=%d,p=%d", &memory, &time, ¶llelism); err != nil { + return false, core.E("crypt.VerifyPassword", "failed to parse parameters", err) + } + + salt, err := base64.RawStdEncoding.DecodeString(parts[4]) + if err != nil { + return false, core.E("crypt.VerifyPassword", "failed to decode salt", err) + } + + expectedHash, err := base64.RawStdEncoding.DecodeString(parts[5]) + if err != nil { + return false, core.E("crypt.VerifyPassword", "failed to decode hash", err) + } + + computedHash := argon2.IDKey([]byte(password), salt, time, memory, parallelism, uint32(len(expectedHash))) + + return subtle.ConstantTimeCompare(computedHash, expectedHash) == 1, nil +} + +// HashBcrypt hashes a password using bcrypt with the given cost. +// Cost must be between bcrypt.MinCost and bcrypt.MaxCost. +func HashBcrypt(password string, cost int) (string, error) { + hash, err := bcrypt.GenerateFromPassword([]byte(password), cost) + if err != nil { + return "", core.E("crypt.HashBcrypt", "failed to hash password", err) + } + return string(hash), nil +} + +// VerifyBcrypt verifies a password against a bcrypt hash. +func VerifyBcrypt(password, hash string) (bool, error) { + err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password)) + if err == bcrypt.ErrMismatchedHashAndPassword { + return false, nil + } + if err != nil { + return false, core.E("crypt.VerifyBcrypt", "failed to verify password", err) + } + return true, nil +} diff --git a/pkg/crypt/hash_test.go b/pkg/crypt/hash_test.go new file mode 100644 index 00000000..ad308a03 --- /dev/null +++ b/pkg/crypt/hash_test.go @@ -0,0 +1,50 @@ +package crypt + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/crypto/bcrypt" +) + +func TestHashPassword_Good(t *testing.T) { + password := "my-secure-password" + + hash, err := HashPassword(password) + assert.NoError(t, err) + assert.NotEmpty(t, hash) + assert.Contains(t, hash, "$argon2id$") + + match, err := VerifyPassword(password, hash) + assert.NoError(t, err) + assert.True(t, match) +} + +func TestVerifyPassword_Bad(t *testing.T) { + password := "my-secure-password" + wrongPassword := "wrong-password" + + hash, err := HashPassword(password) + assert.NoError(t, err) + + match, err := VerifyPassword(wrongPassword, hash) + assert.NoError(t, err) + assert.False(t, match) +} + +func TestHashBcrypt_Good(t *testing.T) { + password := "bcrypt-test-password" + + hash, err := HashBcrypt(password, bcrypt.DefaultCost) + assert.NoError(t, err) + assert.NotEmpty(t, hash) + + match, err := VerifyBcrypt(password, hash) + assert.NoError(t, err) + assert.True(t, match) + + // Wrong password should not match + match, err = VerifyBcrypt("wrong-password", hash) + assert.NoError(t, err) + assert.False(t, match) +} diff --git a/pkg/crypt/hmac.go b/pkg/crypt/hmac.go new file mode 100644 index 00000000..adb80c29 --- /dev/null +++ b/pkg/crypt/hmac.go @@ -0,0 +1,30 @@ +package crypt + +import ( + "crypto/hmac" + "crypto/sha256" + "crypto/sha512" + "hash" +) + +// HMACSHA256 computes the HMAC-SHA256 of a message using the given key. +func HMACSHA256(message, key []byte) []byte { + mac := hmac.New(sha256.New, key) + mac.Write(message) + return mac.Sum(nil) +} + +// HMACSHA512 computes the HMAC-SHA512 of a message using the given key. +func HMACSHA512(message, key []byte) []byte { + mac := hmac.New(sha512.New, key) + mac.Write(message) + return mac.Sum(nil) +} + +// VerifyHMAC verifies an HMAC using constant-time comparison. +// hashFunc should be sha256.New, sha512.New, etc. +func VerifyHMAC(message, key, mac []byte, hashFunc func() hash.Hash) bool { + expected := hmac.New(hashFunc, key) + expected.Write(message) + return hmac.Equal(mac, expected.Sum(nil)) +} diff --git a/pkg/crypt/hmac_test.go b/pkg/crypt/hmac_test.go new file mode 100644 index 00000000..31dc474e --- /dev/null +++ b/pkg/crypt/hmac_test.go @@ -0,0 +1,40 @@ +package crypt + +import ( + "crypto/sha256" + "encoding/hex" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestHMACSHA256_Good(t *testing.T) { + // RFC 4231 Test Case 2 + key := []byte("Jefe") + message := []byte("what do ya want for nothing?") + expected := "5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843" + + mac := HMACSHA256(message, key) + assert.Equal(t, expected, hex.EncodeToString(mac)) +} + +func TestVerifyHMAC_Good(t *testing.T) { + key := []byte("secret-key") + message := []byte("test message") + + mac := HMACSHA256(message, key) + + valid := VerifyHMAC(message, key, mac, sha256.New) + assert.True(t, valid) +} + +func TestVerifyHMAC_Bad(t *testing.T) { + key := []byte("secret-key") + message := []byte("test message") + tampered := []byte("tampered message") + + mac := HMACSHA256(message, key) + + valid := VerifyHMAC(tampered, key, mac, sha256.New) + assert.False(t, valid) +} diff --git a/pkg/crypt/kdf.go b/pkg/crypt/kdf.go new file mode 100644 index 00000000..ecadb121 --- /dev/null +++ b/pkg/crypt/kdf.go @@ -0,0 +1,60 @@ +// Package crypt provides cryptographic utilities including encryption, +// hashing, key derivation, HMAC, and checksum functions. +package crypt + +import ( + "crypto/rand" + "crypto/sha256" + "io" + + core "github.com/host-uk/core/pkg/framework/core" + "golang.org/x/crypto/argon2" + "golang.org/x/crypto/hkdf" + "golang.org/x/crypto/scrypt" +) + +// Argon2id default parameters. +const ( + argon2Memory = 64 * 1024 // 64 MB + argon2Time = 3 + argon2Parallelism = 4 + argon2KeyLen = 32 + argon2SaltLen = 16 +) + +// DeriveKey derives a key from a passphrase using Argon2id with default parameters. +// The salt must be argon2SaltLen bytes. keyLen specifies the desired key length. +func DeriveKey(passphrase, salt []byte, keyLen uint32) []byte { + return argon2.IDKey(passphrase, salt, argon2Time, argon2Memory, argon2Parallelism, keyLen) +} + +// DeriveKeyScrypt derives a key from a passphrase using scrypt. +// Uses recommended parameters: N=32768, r=8, p=1. +func DeriveKeyScrypt(passphrase, salt []byte, keyLen int) ([]byte, error) { + key, err := scrypt.Key(passphrase, salt, 32768, 8, 1, keyLen) + if err != nil { + return nil, core.E("crypt.DeriveKeyScrypt", "failed to derive key", err) + } + return key, nil +} + +// HKDF derives a key using HKDF-SHA256. +// secret is the input keying material, salt is optional (can be nil), +// info is optional context, and keyLen is the desired output length. +func HKDF(secret, salt, info []byte, keyLen int) ([]byte, error) { + reader := hkdf.New(sha256.New, secret, salt, info) + key := make([]byte, keyLen) + if _, err := io.ReadFull(reader, key); err != nil { + return nil, core.E("crypt.HKDF", "failed to derive key", err) + } + return key, nil +} + +// generateSalt creates a random salt of the given length. +func generateSalt(length int) ([]byte, error) { + salt := make([]byte, length) + if _, err := rand.Read(salt); err != nil { + return nil, core.E("crypt.generateSalt", "failed to generate random salt", err) + } + return salt, nil +} diff --git a/pkg/crypt/kdf_test.go b/pkg/crypt/kdf_test.go new file mode 100644 index 00000000..08ee76dd --- /dev/null +++ b/pkg/crypt/kdf_test.go @@ -0,0 +1,56 @@ +package crypt + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDeriveKey_Good(t *testing.T) { + passphrase := []byte("test-passphrase") + salt := []byte("1234567890123456") // 16 bytes + + key1 := DeriveKey(passphrase, salt, 32) + key2 := DeriveKey(passphrase, salt, 32) + + assert.Len(t, key1, 32) + assert.Equal(t, key1, key2, "same inputs should produce same output") + + // Different passphrase should produce different key + key3 := DeriveKey([]byte("different-passphrase"), salt, 32) + assert.NotEqual(t, key1, key3) +} + +func TestDeriveKeyScrypt_Good(t *testing.T) { + passphrase := []byte("test-passphrase") + salt := []byte("1234567890123456") + + key, err := DeriveKeyScrypt(passphrase, salt, 32) + assert.NoError(t, err) + assert.Len(t, key, 32) + + // Deterministic + key2, err := DeriveKeyScrypt(passphrase, salt, 32) + assert.NoError(t, err) + assert.Equal(t, key, key2) +} + +func TestHKDF_Good(t *testing.T) { + secret := []byte("input-keying-material") + salt := []byte("optional-salt") + info := []byte("context-info") + + key1, err := HKDF(secret, salt, info, 32) + assert.NoError(t, err) + assert.Len(t, key1, 32) + + // Deterministic + key2, err := HKDF(secret, salt, info, 32) + assert.NoError(t, err) + assert.Equal(t, key1, key2) + + // Different info should produce different key + key3, err := HKDF(secret, salt, []byte("different-info"), 32) + assert.NoError(t, err) + assert.NotEqual(t, key1, key3) +} diff --git a/pkg/crypt/symmetric.go b/pkg/crypt/symmetric.go new file mode 100644 index 00000000..0a91f2e5 --- /dev/null +++ b/pkg/crypt/symmetric.go @@ -0,0 +1,100 @@ +package crypt + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + + core "github.com/host-uk/core/pkg/framework/core" + "golang.org/x/crypto/chacha20poly1305" +) + +// ChaCha20Encrypt encrypts plaintext using ChaCha20-Poly1305. +// The key must be 32 bytes. The nonce is randomly generated and prepended +// to the ciphertext. +func ChaCha20Encrypt(plaintext, key []byte) ([]byte, error) { + aead, err := chacha20poly1305.NewX(key) + if err != nil { + return nil, core.E("crypt.ChaCha20Encrypt", "failed to create cipher", err) + } + + nonce := make([]byte, aead.NonceSize()) + if _, err := rand.Read(nonce); err != nil { + return nil, core.E("crypt.ChaCha20Encrypt", "failed to generate nonce", err) + } + + ciphertext := aead.Seal(nonce, nonce, plaintext, nil) + return ciphertext, nil +} + +// ChaCha20Decrypt decrypts ciphertext encrypted with ChaCha20Encrypt. +// The key must be 32 bytes. Expects the nonce prepended to the ciphertext. +func ChaCha20Decrypt(ciphertext, key []byte) ([]byte, error) { + aead, err := chacha20poly1305.NewX(key) + if err != nil { + return nil, core.E("crypt.ChaCha20Decrypt", "failed to create cipher", err) + } + + nonceSize := aead.NonceSize() + if len(ciphertext) < nonceSize { + return nil, core.E("crypt.ChaCha20Decrypt", "ciphertext too short", nil) + } + + nonce, encrypted := ciphertext[:nonceSize], ciphertext[nonceSize:] + plaintext, err := aead.Open(nil, nonce, encrypted, nil) + if err != nil { + return nil, core.E("crypt.ChaCha20Decrypt", "failed to decrypt", err) + } + + return plaintext, nil +} + +// AESGCMEncrypt encrypts plaintext using AES-256-GCM. +// The key must be 32 bytes. The nonce is randomly generated and prepended +// to the ciphertext. +func AESGCMEncrypt(plaintext, key []byte) ([]byte, error) { + block, err := aes.NewCipher(key) + if err != nil { + return nil, core.E("crypt.AESGCMEncrypt", "failed to create cipher", err) + } + + aead, err := cipher.NewGCM(block) + if err != nil { + return nil, core.E("crypt.AESGCMEncrypt", "failed to create GCM", err) + } + + nonce := make([]byte, aead.NonceSize()) + if _, err := rand.Read(nonce); err != nil { + return nil, core.E("crypt.AESGCMEncrypt", "failed to generate nonce", err) + } + + ciphertext := aead.Seal(nonce, nonce, plaintext, nil) + return ciphertext, nil +} + +// AESGCMDecrypt decrypts ciphertext encrypted with AESGCMEncrypt. +// The key must be 32 bytes. Expects the nonce prepended to the ciphertext. +func AESGCMDecrypt(ciphertext, key []byte) ([]byte, error) { + block, err := aes.NewCipher(key) + if err != nil { + return nil, core.E("crypt.AESGCMDecrypt", "failed to create cipher", err) + } + + aead, err := cipher.NewGCM(block) + if err != nil { + return nil, core.E("crypt.AESGCMDecrypt", "failed to create GCM", err) + } + + nonceSize := aead.NonceSize() + if len(ciphertext) < nonceSize { + return nil, core.E("crypt.AESGCMDecrypt", "ciphertext too short", nil) + } + + nonce, encrypted := ciphertext[:nonceSize], ciphertext[nonceSize:] + plaintext, err := aead.Open(nil, nonce, encrypted, nil) + if err != nil { + return nil, core.E("crypt.AESGCMDecrypt", "failed to decrypt", err) + } + + return plaintext, nil +} diff --git a/pkg/crypt/symmetric_test.go b/pkg/crypt/symmetric_test.go new file mode 100644 index 00000000..a0605793 --- /dev/null +++ b/pkg/crypt/symmetric_test.go @@ -0,0 +1,55 @@ +package crypt + +import ( + "crypto/rand" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestChaCha20_Good(t *testing.T) { + key := make([]byte, 32) + _, err := rand.Read(key) + assert.NoError(t, err) + + plaintext := []byte("ChaCha20-Poly1305 test data") + + encrypted, err := ChaCha20Encrypt(plaintext, key) + assert.NoError(t, err) + assert.NotEqual(t, plaintext, encrypted) + + decrypted, err := ChaCha20Decrypt(encrypted, key) + assert.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} + +func TestChaCha20_Bad(t *testing.T) { + key := make([]byte, 32) + wrongKey := make([]byte, 32) + _, _ = rand.Read(key) + _, _ = rand.Read(wrongKey) + + plaintext := []byte("secret message") + + encrypted, err := ChaCha20Encrypt(plaintext, key) + assert.NoError(t, err) + + _, err = ChaCha20Decrypt(encrypted, wrongKey) + assert.Error(t, err) +} + +func TestAESGCM_Good(t *testing.T) { + key := make([]byte, 32) + _, err := rand.Read(key) + assert.NoError(t, err) + + plaintext := []byte("AES-256-GCM test data") + + encrypted, err := AESGCMEncrypt(plaintext, key) + assert.NoError(t, err) + assert.NotEqual(t, plaintext, encrypted) + + decrypted, err := AESGCMDecrypt(encrypted, key) + assert.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} diff --git a/pkg/devops/claude.go b/pkg/devops/claude.go index adec79f3..d62b39d0 100644 --- a/pkg/devops/claude.go +++ b/pkg/devops/claude.go @@ -70,8 +70,8 @@ func (d *DevOps) Claude(ctx context.Context, projectDir string, opts ClaudeOptio // Build SSH command with agent forwarding args := []string{ - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", + "-o", "StrictHostKeyChecking=accept-new", + "-o", "UserKnownHostsFile=~/.core/known_hosts", "-o", "LogLevel=ERROR", "-A", // SSH agent forwarding "-p", "2222", @@ -132,8 +132,8 @@ func (d *DevOps) CopyGHAuth(ctx context.Context) error { // Use scp to copy gh config cmd := exec.CommandContext(ctx, "scp", - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", + "-o", "StrictHostKeyChecking=accept-new", + "-o", "UserKnownHostsFile=~/.core/known_hosts", "-o", "LogLevel=ERROR", "-P", "2222", "-r", ghConfigDir, diff --git a/pkg/devops/config_test.go b/pkg/devops/config_test.go index ddfd5523..cdd4ec7b 100644 --- a/pkg/devops/config_test.go +++ b/pkg/devops/config_test.go @@ -28,7 +28,7 @@ func TestLoadConfig_Good(t *testing.T) { tempHome := t.TempDir() origHome := os.Getenv("HOME") t.Setenv("HOME", tempHome) - defer os.Setenv("HOME", origHome) + defer func() { _ = os.Setenv("HOME", origHome) }() cfg, err := LoadConfig() assert.NoError(t, err) @@ -250,5 +250,5 @@ func TestLoadConfig_Bad_UnreadableFile(t *testing.T) { assert.Error(t, err) // Restore permissions so cleanup works - os.Chmod(configPath, 0644) + _ = os.Chmod(configPath, 0644) } diff --git a/pkg/devops/devops_test.go b/pkg/devops/devops_test.go index 65f45c9e..4b75b8d0 100644 --- a/pkg/devops/devops_test.go +++ b/pkg/devops/devops_test.go @@ -26,8 +26,8 @@ func TestImagesDir(t *testing.T) { t.Run("default directory", func(t *testing.T) { // Unset env if it exists orig := os.Getenv("CORE_IMAGES_DIR") - os.Unsetenv("CORE_IMAGES_DIR") - defer os.Setenv("CORE_IMAGES_DIR", orig) + _ = os.Unsetenv("CORE_IMAGES_DIR") + defer func() { _ = os.Setenv("CORE_IMAGES_DIR", orig) }() dir, err := ImagesDir() assert.NoError(t, err) @@ -617,7 +617,7 @@ func TestDevOps_IsRunning_Bad_DifferentContainerName(t *testing.T) { func TestDevOps_Boot_Good_FreshFlag(t *testing.T) { tempDir, err := os.MkdirTemp("", "devops-test-*") require.NoError(t, err) - t.Cleanup(func() { os.RemoveAll(tempDir) }) + t.Cleanup(func() { _ = os.RemoveAll(tempDir) }) t.Setenv("CORE_IMAGES_DIR", tempDir) // Create fake image @@ -701,7 +701,7 @@ func TestDevOps_Stop_Bad_ContainerNotRunning(t *testing.T) { func TestDevOps_Boot_Good_FreshWithNoExisting(t *testing.T) { tempDir, err := os.MkdirTemp("", "devops-boot-fresh-*") require.NoError(t, err) - t.Cleanup(func() { os.RemoveAll(tempDir) }) + t.Cleanup(func() { _ = os.RemoveAll(tempDir) }) t.Setenv("CORE_IMAGES_DIR", tempDir) // Create fake image @@ -783,7 +783,7 @@ func TestDevOps_CheckUpdate_Delegates(t *testing.T) { func TestDevOps_Boot_Good_Success(t *testing.T) { tempDir, err := os.MkdirTemp("", "devops-boot-success-*") require.NoError(t, err) - t.Cleanup(func() { os.RemoveAll(tempDir) }) + t.Cleanup(func() { _ = os.RemoveAll(tempDir) }) t.Setenv("CORE_IMAGES_DIR", tempDir) // Create fake image diff --git a/pkg/devops/serve.go b/pkg/devops/serve.go index 7d3cacd2..78f784b1 100644 --- a/pkg/devops/serve.go +++ b/pkg/devops/serve.go @@ -57,8 +57,8 @@ func (d *DevOps) mountProject(ctx context.Context, path string) error { // Use reverse SSHFS mount // The VM connects back to host to mount the directory cmd := exec.CommandContext(ctx, "ssh", - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", + "-o", "StrictHostKeyChecking=accept-new", + "-o", "UserKnownHostsFile=~/.core/known_hosts", "-o", "LogLevel=ERROR", "-R", "10000:localhost:22", // Reverse tunnel for SSHFS "-p", "2222", diff --git a/pkg/devops/shell.go b/pkg/devops/shell.go index fc343d80..8b524fac 100644 --- a/pkg/devops/shell.go +++ b/pkg/devops/shell.go @@ -33,8 +33,8 @@ func (d *DevOps) Shell(ctx context.Context, opts ShellOptions) error { // sshShell connects via SSH. func (d *DevOps) sshShell(ctx context.Context, command []string) error { args := []string{ - "-o", "StrictHostKeyChecking=no", - "-o", "UserKnownHostsFile=/dev/null", + "-o", "StrictHostKeyChecking=accept-new", + "-o", "UserKnownHostsFile=~/.core/known_hosts", "-o", "LogLevel=ERROR", "-A", // Agent forwarding "-p", "2222", diff --git a/pkg/devops/test_test.go b/pkg/devops/test_test.go index 4df32bc6..2a20e6e2 100644 --- a/pkg/devops/test_test.go +++ b/pkg/devops/test_test.go @@ -8,7 +8,7 @@ import ( func TestDetectTestCommand_Good_ComposerJSON(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"test":"pest"}}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"test":"pest"}}`), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "composer test" { @@ -18,7 +18,7 @@ func TestDetectTestCommand_Good_ComposerJSON(t *testing.T) { func TestDetectTestCommand_Good_PackageJSON(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"test":"vitest"}}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"test":"vitest"}}`), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "npm test" { @@ -28,7 +28,7 @@ func TestDetectTestCommand_Good_PackageJSON(t *testing.T) { func TestDetectTestCommand_Good_GoMod(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "go test ./..." { @@ -39,8 +39,8 @@ func TestDetectTestCommand_Good_GoMod(t *testing.T) { func TestDetectTestCommand_Good_CoreTestYaml(t *testing.T) { tmpDir := t.TempDir() coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("command: custom-test"), 0644) + _ = os.MkdirAll(coreDir, 0755) + _ = os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("command: custom-test"), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "custom-test" { @@ -50,7 +50,7 @@ func TestDetectTestCommand_Good_CoreTestYaml(t *testing.T) { func TestDetectTestCommand_Good_Pytest(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "pytest.ini"), []byte("[pytest]"), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "pytest.ini"), []byte("[pytest]"), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "pytest" { @@ -60,7 +60,7 @@ func TestDetectTestCommand_Good_Pytest(t *testing.T) { func TestDetectTestCommand_Good_Taskfile(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "Taskfile.yaml"), []byte("version: '3'"), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "Taskfile.yaml"), []byte("version: '3'"), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "task test" { @@ -81,9 +81,9 @@ func TestDetectTestCommand_Good_Priority(t *testing.T) { // .core/test.yaml should take priority over other detection methods tmpDir := t.TempDir() coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("command: my-custom-test"), 0644) - os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) + _ = os.MkdirAll(coreDir, 0755) + _ = os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("command: my-custom-test"), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "go.mod"), []byte("module example"), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "my-custom-test" { @@ -94,7 +94,7 @@ func TestDetectTestCommand_Good_Priority(t *testing.T) { func TestLoadTestConfig_Good(t *testing.T) { tmpDir := t.TempDir() coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) + _ = os.MkdirAll(coreDir, 0755) configYAML := `version: 1 command: default-test @@ -106,7 +106,7 @@ commands: env: CI: "true" ` - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte(configYAML), 0644) + _ = os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte(configYAML), 0644) cfg, err := LoadTestConfig(tmpDir) if err != nil { @@ -141,7 +141,7 @@ func TestLoadTestConfig_Bad_NotFound(t *testing.T) { func TestHasPackageScript_Good(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"test":"jest","build":"webpack"}}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"test":"jest","build":"webpack"}}`), 0644) if !hasPackageScript(tmpDir, "test") { t.Error("expected to find 'test' script") @@ -153,7 +153,7 @@ func TestHasPackageScript_Good(t *testing.T) { func TestHasPackageScript_Bad_MissingScript(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"build":"webpack"}}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"scripts":{"build":"webpack"}}`), 0644) if hasPackageScript(tmpDir, "test") { t.Error("expected not to find 'test' script") @@ -162,7 +162,7 @@ func TestHasPackageScript_Bad_MissingScript(t *testing.T) { func TestHasComposerScript_Good(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"test":"pest","post-install-cmd":"@php artisan migrate"}}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"test":"pest","post-install-cmd":"@php artisan migrate"}}`), 0644) if !hasComposerScript(tmpDir, "test") { t.Error("expected to find 'test' script") @@ -171,7 +171,7 @@ func TestHasComposerScript_Good(t *testing.T) { func TestHasComposerScript_Bad_MissingScript(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"build":"@php build.php"}}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"scripts":{"build":"@php build.php"}}`), 0644) if hasComposerScript(tmpDir, "test") { t.Error("expected not to find 'test' script") @@ -227,7 +227,7 @@ func TestTestOptions_Struct(t *testing.T) { func TestDetectTestCommand_Good_TaskfileYml(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "Taskfile.yml"), []byte("version: '3'"), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "Taskfile.yml"), []byte("version: '3'"), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "task test" { @@ -237,7 +237,7 @@ func TestDetectTestCommand_Good_TaskfileYml(t *testing.T) { func TestDetectTestCommand_Good_Pyproject(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "pyproject.toml"), []byte("[tool.pytest]"), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "pyproject.toml"), []byte("[tool.pytest]"), 0644) cmd := DetectTestCommand(tmpDir) if cmd != "pytest" { @@ -255,7 +255,7 @@ func TestHasPackageScript_Bad_NoFile(t *testing.T) { func TestHasPackageScript_Bad_InvalidJSON(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`invalid json`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`invalid json`), 0644) if hasPackageScript(tmpDir, "test") { t.Error("expected false for invalid JSON") @@ -264,7 +264,7 @@ func TestHasPackageScript_Bad_InvalidJSON(t *testing.T) { func TestHasPackageScript_Bad_NoScripts(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"name":"test"}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"name":"test"}`), 0644) if hasPackageScript(tmpDir, "test") { t.Error("expected false for missing scripts section") @@ -281,7 +281,7 @@ func TestHasComposerScript_Bad_NoFile(t *testing.T) { func TestHasComposerScript_Bad_InvalidJSON(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`invalid json`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`invalid json`), 0644) if hasComposerScript(tmpDir, "test") { t.Error("expected false for invalid JSON") @@ -290,7 +290,7 @@ func TestHasComposerScript_Bad_InvalidJSON(t *testing.T) { func TestHasComposerScript_Bad_NoScripts(t *testing.T) { tmpDir := t.TempDir() - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"name":"test/pkg"}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"name":"test/pkg"}`), 0644) if hasComposerScript(tmpDir, "test") { t.Error("expected false for missing scripts section") @@ -300,8 +300,8 @@ func TestHasComposerScript_Bad_NoScripts(t *testing.T) { func TestLoadTestConfig_Bad_InvalidYAML(t *testing.T) { tmpDir := t.TempDir() coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("invalid: yaml: :"), 0644) + _ = os.MkdirAll(coreDir, 0755) + _ = os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("invalid: yaml: :"), 0644) _, err := LoadTestConfig(tmpDir) if err == nil { @@ -312,8 +312,8 @@ func TestLoadTestConfig_Bad_InvalidYAML(t *testing.T) { func TestLoadTestConfig_Good_MinimalConfig(t *testing.T) { tmpDir := t.TempDir() coreDir := filepath.Join(tmpDir, ".core") - os.MkdirAll(coreDir, 0755) - os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("version: 1"), 0644) + _ = os.MkdirAll(coreDir, 0755) + _ = os.WriteFile(filepath.Join(coreDir, "test.yaml"), []byte("version: 1"), 0644) cfg, err := LoadTestConfig(tmpDir) if err != nil { @@ -330,7 +330,7 @@ func TestLoadTestConfig_Good_MinimalConfig(t *testing.T) { func TestDetectTestCommand_Good_ComposerWithoutScript(t *testing.T) { tmpDir := t.TempDir() // composer.json without test script should not return composer test - os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"name":"test/pkg"}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "composer.json"), []byte(`{"name":"test/pkg"}`), 0644) cmd := DetectTestCommand(tmpDir) // Falls through to empty (no match) @@ -342,7 +342,7 @@ func TestDetectTestCommand_Good_ComposerWithoutScript(t *testing.T) { func TestDetectTestCommand_Good_PackageJSONWithoutScript(t *testing.T) { tmpDir := t.TempDir() // package.json without test or dev script - os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"name":"test"}`), 0644) + _ = os.WriteFile(filepath.Join(tmpDir, "package.json"), []byte(`{"name":"test"}`), 0644) cmd := DetectTestCommand(tmpDir) // Falls through to empty diff --git a/pkg/framework/core/core_lifecycle_test.go b/pkg/framework/core/core_lifecycle_test.go index 3982a363..6b1a3020 100644 --- a/pkg/framework/core/core_lifecycle_test.go +++ b/pkg/framework/core/core_lifecycle_test.go @@ -113,8 +113,8 @@ func TestCore_LifecycleErrors(t *testing.T) { s1 := &MockStartable{err: assert.AnError} s2 := &MockStoppable{err: assert.AnError} - c.RegisterService("s1", s1) - c.RegisterService("s2", s2) + _ = c.RegisterService("s1", s1) + _ = c.RegisterService("s2", s2) err = c.ServiceStartup(context.Background(), nil) assert.Error(t, err) diff --git a/pkg/framework/core/core_test.go b/pkg/framework/core/core_test.go index 1af883b8..60514354 100644 --- a/pkg/framework/core/core_test.go +++ b/pkg/framework/core/core_test.go @@ -1,6 +1,7 @@ package core import ( + "context" "embed" "io" "testing" @@ -123,9 +124,6 @@ func TestFeatures_IsEnabled_Good(t *testing.T) { assert.False(t, c.Features.IsEnabled("feature3")) } -type startupMessage struct{} -type shutdownMessage struct{} - func TestCore_ServiceLifecycle_Good(t *testing.T) { c, err := New() assert.NoError(t, err) @@ -138,12 +136,12 @@ func TestCore_ServiceLifecycle_Good(t *testing.T) { c.RegisterAction(handler) // Test Startup - _ = c.ServiceStartup(nil, nil) + _ = c.ServiceStartup(context.TODO(), nil) _, ok := messageReceived.(ActionServiceStartup) assert.True(t, ok, "expected ActionServiceStartup message") // Test Shutdown - _ = c.ServiceShutdown(nil) + _ = c.ServiceShutdown(context.TODO()) _, ok = messageReceived.(ActionServiceShutdown) assert.True(t, ok, "expected ActionServiceShutdown message") } @@ -164,7 +162,7 @@ func TestCore_WithAssets_Good(t *testing.T) { assets := c.Assets() file, err := assets.Open("testdata/test.txt") assert.NoError(t, err) - defer file.Close() + defer func() { _ = file.Close() }() content, err := io.ReadAll(file) assert.NoError(t, err) assert.Equal(t, "hello from testdata\n", string(content)) diff --git a/pkg/framework/core/runtime_pkg.go b/pkg/framework/core/runtime_pkg.go index 71199f6a..0cb941db 100644 --- a/pkg/framework/core/runtime_pkg.go +++ b/pkg/framework/core/runtime_pkg.go @@ -100,13 +100,13 @@ func (r *Runtime) ServiceName() string { // ServiceStartup is called by the GUI runtime at application startup. // This is where the Core's startup lifecycle is initiated. func (r *Runtime) ServiceStartup(ctx context.Context, options any) { - r.Core.ServiceStartup(ctx, options) + _ = r.Core.ServiceStartup(ctx, options) } // ServiceShutdown is called by the GUI runtime at application shutdown. // This is where the Core's shutdown lifecycle is initiated. func (r *Runtime) ServiceShutdown(ctx context.Context) { if r.Core != nil { - r.Core.ServiceShutdown(ctx) + _ = r.Core.ServiceShutdown(ctx) } } diff --git a/pkg/framework/core/runtime_pkg_test.go b/pkg/framework/core/runtime_pkg_test.go index 0600d819..f58ebcbe 100644 --- a/pkg/framework/core/runtime_pkg_test.go +++ b/pkg/framework/core/runtime_pkg_test.go @@ -1,6 +1,7 @@ package core import ( + "context" "testing" "github.com/stretchr/testify/assert" @@ -103,12 +104,12 @@ func TestRuntime_Lifecycle_Good(t *testing.T) { // ServiceStartup & ServiceShutdown // These are simple wrappers around the core methods, which are tested in core_test.go. // We call them here to ensure coverage. - rt.ServiceStartup(nil, nil) - rt.ServiceShutdown(nil) + rt.ServiceStartup(context.TODO(), nil) + rt.ServiceShutdown(context.TODO()) // Test shutdown with nil core rt.Core = nil - rt.ServiceShutdown(nil) + rt.ServiceShutdown(context.TODO()) } func TestNewServiceRuntime_Good(t *testing.T) { diff --git a/pkg/io/client_test.go b/pkg/io/client_test.go index 9d76d518..2738c5a2 100644 --- a/pkg/io/client_test.go +++ b/pkg/io/client_test.go @@ -255,6 +255,6 @@ func TestLocalGlobal_Good(t *testing.T) { assert.NotNil(t, Local, "io.Local should be initialized") // Should be able to use it as a Medium - var m Medium = Local + var m = Local assert.NotNil(t, m) } diff --git a/pkg/io/local/client.go b/pkg/io/local/client.go index 88145927..14cb826f 100644 --- a/pkg/io/local/client.go +++ b/pkg/io/local/client.go @@ -39,7 +39,11 @@ func (m *Medium) path(p string) string { // Otherwise, sandbox absolute paths by stripping volume + leading separators vol := filepath.VolumeName(clean) clean = strings.TrimPrefix(clean, vol) - clean = strings.TrimLeft(clean, string(os.PathSeparator)+"/") + cutset := string(os.PathSeparator) + if os.PathSeparator != '/' { + cutset += "/" + } + clean = strings.TrimLeft(clean, cutset) return filepath.Join(m.root, clean) } return filepath.Join(m.root, clean) diff --git a/pkg/io/local/client_test.go b/pkg/io/local/client_test.go index fc474a7a..9e2a1e14 100644 --- a/pkg/io/local/client_test.go +++ b/pkg/io/local/client_test.go @@ -85,8 +85,8 @@ func TestIsDir(t *testing.T) { root := t.TempDir() m, _ := New(root) - os.Mkdir(filepath.Join(root, "mydir"), 0755) - os.WriteFile(filepath.Join(root, "myfile"), []byte("x"), 0644) + _ = os.Mkdir(filepath.Join(root, "mydir"), 0755) + _ = os.WriteFile(filepath.Join(root, "myfile"), []byte("x"), 0644) assert.True(t, m.IsDir("mydir")) assert.False(t, m.IsDir("myfile")) @@ -98,8 +98,8 @@ func TestIsFile(t *testing.T) { root := t.TempDir() m, _ := New(root) - os.Mkdir(filepath.Join(root, "mydir"), 0755) - os.WriteFile(filepath.Join(root, "myfile"), []byte("x"), 0644) + _ = os.Mkdir(filepath.Join(root, "mydir"), 0755) + _ = os.WriteFile(filepath.Join(root, "myfile"), []byte("x"), 0644) assert.True(t, m.IsFile("myfile")) assert.False(t, m.IsFile("mydir")) @@ -111,7 +111,7 @@ func TestExists(t *testing.T) { root := t.TempDir() m, _ := New(root) - os.WriteFile(filepath.Join(root, "exists"), []byte("x"), 0644) + _ = os.WriteFile(filepath.Join(root, "exists"), []byte("x"), 0644) assert.True(t, m.Exists("exists")) assert.False(t, m.Exists("nope")) @@ -121,9 +121,9 @@ func TestList(t *testing.T) { root := t.TempDir() m, _ := New(root) - os.WriteFile(filepath.Join(root, "a.txt"), []byte("a"), 0644) - os.WriteFile(filepath.Join(root, "b.txt"), []byte("b"), 0644) - os.Mkdir(filepath.Join(root, "subdir"), 0755) + _ = os.WriteFile(filepath.Join(root, "a.txt"), []byte("a"), 0644) + _ = os.WriteFile(filepath.Join(root, "b.txt"), []byte("b"), 0644) + _ = os.Mkdir(filepath.Join(root, "subdir"), 0755) entries, err := m.List("") assert.NoError(t, err) @@ -134,7 +134,7 @@ func TestStat(t *testing.T) { root := t.TempDir() m, _ := New(root) - os.WriteFile(filepath.Join(root, "file"), []byte("content"), 0644) + _ = os.WriteFile(filepath.Join(root, "file"), []byte("content"), 0644) info, err := m.Stat("file") assert.NoError(t, err) @@ -145,7 +145,7 @@ func TestDelete(t *testing.T) { root := t.TempDir() m, _ := New(root) - os.WriteFile(filepath.Join(root, "todelete"), []byte("x"), 0644) + _ = os.WriteFile(filepath.Join(root, "todelete"), []byte("x"), 0644) assert.True(t, m.Exists("todelete")) err := m.Delete("todelete") @@ -157,8 +157,8 @@ func TestDeleteAll(t *testing.T) { root := t.TempDir() m, _ := New(root) - os.MkdirAll(filepath.Join(root, "dir/sub"), 0755) - os.WriteFile(filepath.Join(root, "dir/sub/file"), []byte("x"), 0644) + _ = os.MkdirAll(filepath.Join(root, "dir/sub"), 0755) + _ = os.WriteFile(filepath.Join(root, "dir/sub/file"), []byte("x"), 0644) err := m.DeleteAll("dir") assert.NoError(t, err) @@ -169,7 +169,7 @@ func TestRename(t *testing.T) { root := t.TempDir() m, _ := New(root) - os.WriteFile(filepath.Join(root, "old"), []byte("x"), 0644) + _ = os.WriteFile(filepath.Join(root, "old"), []byte("x"), 0644) err := m.Rename("old", "new") assert.NoError(t, err) @@ -192,7 +192,7 @@ func TestFileGetFileSet(t *testing.T) { func TestDelete_Good(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_delete_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) @@ -217,7 +217,7 @@ func TestDelete_Good(t *testing.T) { func TestDelete_Bad_NotEmpty(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_delete_notempty_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) @@ -234,7 +234,7 @@ func TestDelete_Bad_NotEmpty(t *testing.T) { func TestDeleteAll_Good(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_deleteall_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) @@ -256,7 +256,7 @@ func TestDeleteAll_Good(t *testing.T) { func TestRename_Good(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_rename_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) @@ -277,7 +277,7 @@ func TestRename_Good(t *testing.T) { func TestRename_Traversal_Sanitized(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_rename_traversal_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) @@ -296,7 +296,7 @@ func TestRename_Traversal_Sanitized(t *testing.T) { func TestList_Good(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_list_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) @@ -326,7 +326,7 @@ func TestList_Good(t *testing.T) { func TestStat_Good(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_stat_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) @@ -352,7 +352,7 @@ func TestStat_Good(t *testing.T) { func TestExists_Good(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_exists_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) @@ -371,7 +371,7 @@ func TestExists_Good(t *testing.T) { func TestIsDir_Good(t *testing.T) { testRoot, err := os.MkdirTemp("", "local_isdir_test") assert.NoError(t, err) - defer os.RemoveAll(testRoot) + defer func() { _ = os.RemoveAll(testRoot) }() medium, err := New(testRoot) assert.NoError(t, err) diff --git a/pkg/log/log.go b/pkg/log/log.go index ff8e3ba8..84c290de 100644 --- a/pkg/log/log.go +++ b/pkg/log/log.go @@ -148,7 +148,7 @@ func (l *Logger) log(level Level, prefix, msg string, keyvals ...any) { } } - fmt.Fprintf(output, "%s %s %s%s\n", timestamp, prefix, msg, kvStr) + _, _ = fmt.Fprintf(output, "%s %s %s%s\n", timestamp, prefix, msg, kvStr) } // Debug logs a debug message with optional key-value pairs. diff --git a/pkg/mcp/transport_tcp.go b/pkg/mcp/transport_tcp.go index 0e6e0f7e..3e4a22e4 100644 --- a/pkg/mcp/transport_tcp.go +++ b/pkg/mcp/transport_tcp.go @@ -38,12 +38,12 @@ func (s *Service) ServeTCP(ctx context.Context, addr string) error { if err != nil { return err } - defer t.listener.Close() + defer func() { _ = t.listener.Close() }() // Close listener when context is cancelled to unblock Accept go func() { <-ctx.Done() - t.listener.Close() + _ = t.listener.Close() }() if addr == "" { diff --git a/pkg/plugin/config.go b/pkg/plugin/config.go new file mode 100644 index 00000000..31554892 --- /dev/null +++ b/pkg/plugin/config.go @@ -0,0 +1,10 @@ +package plugin + +// PluginConfig holds configuration for a single installed plugin. +type PluginConfig struct { + Name string `json:"name" yaml:"name"` + Version string `json:"version" yaml:"version"` + Source string `json:"source" yaml:"source"` // e.g., "github:org/repo" + Enabled bool `json:"enabled" yaml:"enabled"` + InstalledAt string `json:"installed_at" yaml:"installed_at"` // RFC 3339 timestamp +} diff --git a/pkg/plugin/installer.go b/pkg/plugin/installer.go new file mode 100644 index 00000000..89ce2fe5 --- /dev/null +++ b/pkg/plugin/installer.go @@ -0,0 +1,195 @@ +package plugin + +import ( + "context" + "fmt" + "os/exec" + "path/filepath" + "strings" + "time" + + core "github.com/host-uk/core/pkg/framework/core" + "github.com/host-uk/core/pkg/io" +) + +// Installer handles plugin installation from GitHub. +type Installer struct { + medium io.Medium + registry *Registry +} + +// NewInstaller creates a new plugin installer. +func NewInstaller(m io.Medium, registry *Registry) *Installer { + return &Installer{ + medium: m, + registry: registry, + } +} + +// Install downloads and installs a plugin from GitHub. +// The source format is "org/repo" or "org/repo@version". +func (i *Installer) Install(ctx context.Context, source string) error { + org, repo, version, err := ParseSource(source) + if err != nil { + return core.E("plugin.Installer.Install", "invalid source", err) + } + + // Check if already installed + if _, exists := i.registry.Get(repo); exists { + return core.E("plugin.Installer.Install", "plugin already installed: "+repo, nil) + } + + // Clone the repository + pluginDir := filepath.Join(i.registry.basePath, repo) + if err := i.medium.EnsureDir(pluginDir); err != nil { + return core.E("plugin.Installer.Install", "failed to create plugin directory", err) + } + + if err := i.cloneRepo(ctx, org, repo, version, pluginDir); err != nil { + return core.E("plugin.Installer.Install", "failed to clone repository", err) + } + + // Load and validate manifest + manifestPath := filepath.Join(pluginDir, "plugin.json") + manifest, err := LoadManifest(i.medium, manifestPath) + if err != nil { + // Clean up on failure + _ = i.medium.DeleteAll(pluginDir) + return core.E("plugin.Installer.Install", "failed to load manifest", err) + } + + if err := manifest.Validate(); err != nil { + _ = i.medium.DeleteAll(pluginDir) + return core.E("plugin.Installer.Install", "invalid manifest", err) + } + + // Resolve version + if version == "" { + version = manifest.Version + } + + // Register in the registry + cfg := &PluginConfig{ + Name: manifest.Name, + Version: version, + Source: fmt.Sprintf("github:%s/%s", org, repo), + Enabled: true, + InstalledAt: time.Now().UTC().Format(time.RFC3339), + } + + if err := i.registry.Add(cfg); err != nil { + return core.E("plugin.Installer.Install", "failed to register plugin", err) + } + + if err := i.registry.Save(); err != nil { + return core.E("plugin.Installer.Install", "failed to save registry", err) + } + + return nil +} + +// Update updates a plugin to the latest version. +func (i *Installer) Update(ctx context.Context, name string) error { + cfg, ok := i.registry.Get(name) + if !ok { + return core.E("plugin.Installer.Update", "plugin not found: "+name, nil) + } + + // Parse the source to get org/repo + source := strings.TrimPrefix(cfg.Source, "github:") + pluginDir := filepath.Join(i.registry.basePath, name) + + // Pull latest changes + cmd := exec.CommandContext(ctx, "git", "-C", pluginDir, "pull", "--ff-only") + if output, err := cmd.CombinedOutput(); err != nil { + return core.E("plugin.Installer.Update", "failed to pull updates: "+strings.TrimSpace(string(output)), err) + } + + // Reload manifest to get updated version + manifestPath := filepath.Join(pluginDir, "plugin.json") + manifest, err := LoadManifest(i.medium, manifestPath) + if err != nil { + return core.E("plugin.Installer.Update", "failed to read updated manifest", err) + } + + // Update registry + cfg.Version = manifest.Version + if err := i.registry.Save(); err != nil { + return core.E("plugin.Installer.Update", "failed to save registry", err) + } + + _ = source // used for context + return nil +} + +// Remove uninstalls a plugin by removing its files and registry entry. +func (i *Installer) Remove(name string) error { + if _, ok := i.registry.Get(name); !ok { + return core.E("plugin.Installer.Remove", "plugin not found: "+name, nil) + } + + // Delete plugin directory + pluginDir := filepath.Join(i.registry.basePath, name) + if i.medium.Exists(pluginDir) { + if err := i.medium.DeleteAll(pluginDir); err != nil { + return core.E("plugin.Installer.Remove", "failed to delete plugin files", err) + } + } + + // Remove from registry + if err := i.registry.Remove(name); err != nil { + return core.E("plugin.Installer.Remove", "failed to unregister plugin", err) + } + + if err := i.registry.Save(); err != nil { + return core.E("plugin.Installer.Remove", "failed to save registry", err) + } + + return nil +} + +// cloneRepo clones a GitHub repository using the gh CLI. +func (i *Installer) cloneRepo(ctx context.Context, org, repo, version, dest string) error { + repoURL := fmt.Sprintf("%s/%s", org, repo) + + args := []string{"repo", "clone", repoURL, dest} + if version != "" { + args = append(args, "--", "--branch", version) + } + + cmd := exec.CommandContext(ctx, "gh", args...) + if output, err := cmd.CombinedOutput(); err != nil { + return fmt.Errorf("%s: %s", err, strings.TrimSpace(string(output))) + } + + return nil +} + +// ParseSource parses a plugin source string into org, repo, and version. +// Accepted formats: +// - "org/repo" -> org="org", repo="repo", version="" +// - "org/repo@v1.0" -> org="org", repo="repo", version="v1.0" +func ParseSource(source string) (org, repo, version string, err error) { + if source == "" { + return "", "", "", core.E("plugin.ParseSource", "source is empty", nil) + } + + // Split off version if present + atIdx := strings.LastIndex(source, "@") + path := source + if atIdx != -1 { + path = source[:atIdx] + version = source[atIdx+1:] + if version == "" { + return "", "", "", core.E("plugin.ParseSource", "version is empty after @", nil) + } + } + + // Split org/repo + parts := strings.Split(path, "/") + if len(parts) != 2 || parts[0] == "" || parts[1] == "" { + return "", "", "", core.E("plugin.ParseSource", "source must be in format org/repo[@version]", nil) + } + + return parts[0], parts[1], version, nil +} diff --git a/pkg/plugin/installer_test.go b/pkg/plugin/installer_test.go new file mode 100644 index 00000000..b8afcf45 --- /dev/null +++ b/pkg/plugin/installer_test.go @@ -0,0 +1,67 @@ +package plugin + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseSource_Good_OrgRepo(t *testing.T) { + org, repo, version, err := ParseSource("host-uk/core-plugin") + assert.NoError(t, err) + assert.Equal(t, "host-uk", org) + assert.Equal(t, "core-plugin", repo) + assert.Equal(t, "", version) +} + +func TestParseSource_Good_OrgRepoVersion(t *testing.T) { + org, repo, version, err := ParseSource("host-uk/core-plugin@v1.0.0") + assert.NoError(t, err) + assert.Equal(t, "host-uk", org) + assert.Equal(t, "core-plugin", repo) + assert.Equal(t, "v1.0.0", version) +} + +func TestParseSource_Good_VersionWithoutPrefix(t *testing.T) { + org, repo, version, err := ParseSource("org/repo@1.2.3") + assert.NoError(t, err) + assert.Equal(t, "org", org) + assert.Equal(t, "repo", repo) + assert.Equal(t, "1.2.3", version) +} + +func TestParseSource_Bad_Empty(t *testing.T) { + _, _, _, err := ParseSource("") + assert.Error(t, err) + assert.Contains(t, err.Error(), "source is empty") +} + +func TestParseSource_Bad_NoSlash(t *testing.T) { + _, _, _, err := ParseSource("just-a-name") + assert.Error(t, err) + assert.Contains(t, err.Error(), "org/repo") +} + +func TestParseSource_Bad_TooManySlashes(t *testing.T) { + _, _, _, err := ParseSource("a/b/c") + assert.Error(t, err) + assert.Contains(t, err.Error(), "org/repo") +} + +func TestParseSource_Bad_EmptyOrg(t *testing.T) { + _, _, _, err := ParseSource("/repo") + assert.Error(t, err) + assert.Contains(t, err.Error(), "org/repo") +} + +func TestParseSource_Bad_EmptyRepo(t *testing.T) { + _, _, _, err := ParseSource("org/") + assert.Error(t, err) + assert.Contains(t, err.Error(), "org/repo") +} + +func TestParseSource_Bad_EmptyVersion(t *testing.T) { + _, _, _, err := ParseSource("org/repo@") + assert.Error(t, err) + assert.Contains(t, err.Error(), "version is empty") +} diff --git a/pkg/plugin/loader.go b/pkg/plugin/loader.go new file mode 100644 index 00000000..50133297 --- /dev/null +++ b/pkg/plugin/loader.go @@ -0,0 +1,63 @@ +package plugin + +import ( + "path/filepath" + + core "github.com/host-uk/core/pkg/framework/core" + "github.com/host-uk/core/pkg/io" +) + +// Loader loads plugins from the filesystem. +type Loader struct { + medium io.Medium + baseDir string +} + +// NewLoader creates a new plugin loader. +func NewLoader(m io.Medium, baseDir string) *Loader { + return &Loader{ + medium: m, + baseDir: baseDir, + } +} + +// Discover finds all plugin directories under baseDir and returns their manifests. +// Directories without a valid plugin.json are silently skipped. +func (l *Loader) Discover() ([]*Manifest, error) { + entries, err := l.medium.List(l.baseDir) + if err != nil { + return nil, core.E("plugin.Loader.Discover", "failed to list plugin directory", err) + } + + var manifests []*Manifest + for _, entry := range entries { + if !entry.IsDir() { + continue + } + + manifest, err := l.LoadPlugin(entry.Name()) + if err != nil { + // Skip directories without valid manifests + continue + } + + manifests = append(manifests, manifest) + } + + return manifests, nil +} + +// LoadPlugin loads a single plugin's manifest by name. +func (l *Loader) LoadPlugin(name string) (*Manifest, error) { + manifestPath := filepath.Join(l.baseDir, name, "plugin.json") + manifest, err := LoadManifest(l.medium, manifestPath) + if err != nil { + return nil, core.E("plugin.Loader.LoadPlugin", "failed to load plugin: "+name, err) + } + + if err := manifest.Validate(); err != nil { + return nil, core.E("plugin.Loader.LoadPlugin", "invalid plugin manifest: "+name, err) + } + + return manifest, nil +} diff --git a/pkg/plugin/loader_test.go b/pkg/plugin/loader_test.go new file mode 100644 index 00000000..60baf54d --- /dev/null +++ b/pkg/plugin/loader_test.go @@ -0,0 +1,146 @@ +package plugin + +import ( + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestLoader_Discover_Good(t *testing.T) { + m := io.NewMockMedium() + baseDir := "/home/user/.core/plugins" + + // Set up mock filesystem with two plugins + m.Dirs[baseDir] = true + m.Dirs[baseDir+"/plugin-a"] = true + m.Dirs[baseDir+"/plugin-b"] = true + + m.Files[baseDir+"/plugin-a/plugin.json"] = `{ + "name": "plugin-a", + "version": "1.0.0", + "description": "Plugin A", + "entrypoint": "main.go" + }` + + m.Files[baseDir+"/plugin-b/plugin.json"] = `{ + "name": "plugin-b", + "version": "2.0.0", + "description": "Plugin B", + "entrypoint": "run.sh" + }` + + loader := NewLoader(m, baseDir) + manifests, err := loader.Discover() + assert.NoError(t, err) + assert.Len(t, manifests, 2) + + names := make(map[string]bool) + for _, manifest := range manifests { + names[manifest.Name] = true + } + assert.True(t, names["plugin-a"]) + assert.True(t, names["plugin-b"]) +} + +func TestLoader_Discover_Good_SkipsInvalidPlugins(t *testing.T) { + m := io.NewMockMedium() + baseDir := "/home/user/.core/plugins" + + m.Dirs[baseDir] = true + m.Dirs[baseDir+"/good-plugin"] = true + m.Dirs[baseDir+"/bad-plugin"] = true + + // Valid plugin + m.Files[baseDir+"/good-plugin/plugin.json"] = `{ + "name": "good-plugin", + "version": "1.0.0", + "entrypoint": "main.go" + }` + + // Invalid plugin (bad JSON) + m.Files[baseDir+"/bad-plugin/plugin.json"] = `{invalid}` + + loader := NewLoader(m, baseDir) + manifests, err := loader.Discover() + assert.NoError(t, err) + assert.Len(t, manifests, 1) + assert.Equal(t, "good-plugin", manifests[0].Name) +} + +func TestLoader_Discover_Good_SkipsFiles(t *testing.T) { + m := io.NewMockMedium() + baseDir := "/home/user/.core/plugins" + + m.Dirs[baseDir] = true + m.Dirs[baseDir+"/real-plugin"] = true + m.Files[baseDir+"/registry.json"] = `{}` // A file, not a directory + + m.Files[baseDir+"/real-plugin/plugin.json"] = `{ + "name": "real-plugin", + "version": "1.0.0", + "entrypoint": "main.go" + }` + + loader := NewLoader(m, baseDir) + manifests, err := loader.Discover() + assert.NoError(t, err) + assert.Len(t, manifests, 1) + assert.Equal(t, "real-plugin", manifests[0].Name) +} + +func TestLoader_Discover_Good_EmptyDirectory(t *testing.T) { + m := io.NewMockMedium() + baseDir := "/home/user/.core/plugins" + m.Dirs[baseDir] = true + + loader := NewLoader(m, baseDir) + manifests, err := loader.Discover() + assert.NoError(t, err) + assert.Empty(t, manifests) +} + +func TestLoader_LoadPlugin_Good(t *testing.T) { + m := io.NewMockMedium() + baseDir := "/home/user/.core/plugins" + + m.Dirs[baseDir+"/my-plugin"] = true + m.Files[baseDir+"/my-plugin/plugin.json"] = `{ + "name": "my-plugin", + "version": "1.0.0", + "description": "My plugin", + "author": "Test", + "entrypoint": "main.go" + }` + + loader := NewLoader(m, baseDir) + manifest, err := loader.LoadPlugin("my-plugin") + assert.NoError(t, err) + assert.Equal(t, "my-plugin", manifest.Name) + assert.Equal(t, "1.0.0", manifest.Version) +} + +func TestLoader_LoadPlugin_Bad_NotFound(t *testing.T) { + m := io.NewMockMedium() + loader := NewLoader(m, "/home/user/.core/plugins") + + _, err := loader.LoadPlugin("nonexistent") + assert.Error(t, err) + assert.Contains(t, err.Error(), "failed to load plugin") +} + +func TestLoader_LoadPlugin_Bad_InvalidManifest(t *testing.T) { + m := io.NewMockMedium() + baseDir := "/home/user/.core/plugins" + + m.Dirs[baseDir+"/bad-plugin"] = true + m.Files[baseDir+"/bad-plugin/plugin.json"] = `{ + "name": "bad-plugin", + "version": "1.0.0" + }` // Missing entrypoint + + loader := NewLoader(m, baseDir) + _, err := loader.LoadPlugin("bad-plugin") + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid plugin manifest") +} diff --git a/pkg/plugin/manifest.go b/pkg/plugin/manifest.go new file mode 100644 index 00000000..e8150266 --- /dev/null +++ b/pkg/plugin/manifest.go @@ -0,0 +1,50 @@ +package plugin + +import ( + "encoding/json" + + core "github.com/host-uk/core/pkg/framework/core" + "github.com/host-uk/core/pkg/io" +) + +// Manifest represents a plugin.json manifest file. +// Each plugin repository must contain a plugin.json at its root. +type Manifest struct { + Name string `json:"name"` + Version string `json:"version"` + Description string `json:"description"` + Author string `json:"author"` + Entrypoint string `json:"entrypoint"` + Dependencies []string `json:"dependencies,omitempty"` + MinVersion string `json:"min_version,omitempty"` +} + +// LoadManifest reads and parses a plugin.json file from the given path. +func LoadManifest(m io.Medium, path string) (*Manifest, error) { + content, err := m.Read(path) + if err != nil { + return nil, core.E("plugin.LoadManifest", "failed to read manifest", err) + } + + var manifest Manifest + if err := json.Unmarshal([]byte(content), &manifest); err != nil { + return nil, core.E("plugin.LoadManifest", "failed to parse manifest JSON", err) + } + + return &manifest, nil +} + +// Validate checks the manifest for required fields. +// Returns an error if name, version, or entrypoint are missing. +func (m *Manifest) Validate() error { + if m.Name == "" { + return core.E("plugin.Manifest.Validate", "name is required", nil) + } + if m.Version == "" { + return core.E("plugin.Manifest.Validate", "version is required", nil) + } + if m.Entrypoint == "" { + return core.E("plugin.Manifest.Validate", "entrypoint is required", nil) + } + return nil +} diff --git a/pkg/plugin/manifest_test.go b/pkg/plugin/manifest_test.go new file mode 100644 index 00000000..0385d0a3 --- /dev/null +++ b/pkg/plugin/manifest_test.go @@ -0,0 +1,109 @@ +package plugin + +import ( + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestLoadManifest_Good(t *testing.T) { + m := io.NewMockMedium() + m.Files["plugins/test/plugin.json"] = `{ + "name": "test-plugin", + "version": "1.0.0", + "description": "A test plugin", + "author": "Test Author", + "entrypoint": "main.go", + "dependencies": ["dep-a", "dep-b"], + "min_version": "0.5.0" + }` + + manifest, err := LoadManifest(m, "plugins/test/plugin.json") + assert.NoError(t, err) + assert.Equal(t, "test-plugin", manifest.Name) + assert.Equal(t, "1.0.0", manifest.Version) + assert.Equal(t, "A test plugin", manifest.Description) + assert.Equal(t, "Test Author", manifest.Author) + assert.Equal(t, "main.go", manifest.Entrypoint) + assert.Equal(t, []string{"dep-a", "dep-b"}, manifest.Dependencies) + assert.Equal(t, "0.5.0", manifest.MinVersion) +} + +func TestLoadManifest_Good_MinimalFields(t *testing.T) { + m := io.NewMockMedium() + m.Files["plugin.json"] = `{ + "name": "minimal", + "version": "0.1.0", + "entrypoint": "run.sh" + }` + + manifest, err := LoadManifest(m, "plugin.json") + assert.NoError(t, err) + assert.Equal(t, "minimal", manifest.Name) + assert.Equal(t, "0.1.0", manifest.Version) + assert.Equal(t, "run.sh", manifest.Entrypoint) + assert.Empty(t, manifest.Dependencies) + assert.Empty(t, manifest.MinVersion) +} + +func TestLoadManifest_Bad_FileNotFound(t *testing.T) { + m := io.NewMockMedium() + + _, err := LoadManifest(m, "nonexistent/plugin.json") + assert.Error(t, err) + assert.Contains(t, err.Error(), "failed to read manifest") +} + +func TestLoadManifest_Bad_InvalidJSON(t *testing.T) { + m := io.NewMockMedium() + m.Files["plugin.json"] = `{invalid json}` + + _, err := LoadManifest(m, "plugin.json") + assert.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse manifest JSON") +} + +func TestManifest_Validate_Good(t *testing.T) { + manifest := &Manifest{ + Name: "test-plugin", + Version: "1.0.0", + Entrypoint: "main.go", + } + + err := manifest.Validate() + assert.NoError(t, err) +} + +func TestManifest_Validate_Bad_MissingName(t *testing.T) { + manifest := &Manifest{ + Version: "1.0.0", + Entrypoint: "main.go", + } + + err := manifest.Validate() + assert.Error(t, err) + assert.Contains(t, err.Error(), "name is required") +} + +func TestManifest_Validate_Bad_MissingVersion(t *testing.T) { + manifest := &Manifest{ + Name: "test-plugin", + Entrypoint: "main.go", + } + + err := manifest.Validate() + assert.Error(t, err) + assert.Contains(t, err.Error(), "version is required") +} + +func TestManifest_Validate_Bad_MissingEntrypoint(t *testing.T) { + manifest := &Manifest{ + Name: "test-plugin", + Version: "1.0.0", + } + + err := manifest.Validate() + assert.Error(t, err) + assert.Contains(t, err.Error(), "entrypoint is required") +} diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go new file mode 100644 index 00000000..9f060ec1 --- /dev/null +++ b/pkg/plugin/plugin.go @@ -0,0 +1,54 @@ +// Package plugin provides a plugin system for the core CLI. +// +// Plugins extend the CLI with additional commands and functionality. +// They are distributed as GitHub repositories and managed via a local registry. +// +// Plugin lifecycle: +// - Install: Download from GitHub, validate manifest, register +// - Init: Parse manifest and prepare plugin +// - Start: Activate plugin functionality +// - Stop: Deactivate and clean up +// - Remove: Unregister and delete files +package plugin + +import "context" + +// Plugin is the interface that all plugins must implement. +type Plugin interface { + // Name returns the plugin's unique identifier. + Name() string + + // Version returns the plugin's semantic version. + Version() string + + // Init prepares the plugin for use. + Init(ctx context.Context) error + + // Start activates the plugin. + Start(ctx context.Context) error + + // Stop deactivates the plugin and releases resources. + Stop(ctx context.Context) error +} + +// BasePlugin provides a default implementation of Plugin. +// Embed this in concrete plugin types to inherit default behaviour. +type BasePlugin struct { + PluginName string + PluginVersion string +} + +// Name returns the plugin name. +func (p *BasePlugin) Name() string { return p.PluginName } + +// Version returns the plugin version. +func (p *BasePlugin) Version() string { return p.PluginVersion } + +// Init is a no-op default implementation. +func (p *BasePlugin) Init(_ context.Context) error { return nil } + +// Start is a no-op default implementation. +func (p *BasePlugin) Start(_ context.Context) error { return nil } + +// Stop is a no-op default implementation. +func (p *BasePlugin) Stop(_ context.Context) error { return nil } diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go new file mode 100644 index 00000000..b5850e66 --- /dev/null +++ b/pkg/plugin/plugin_test.go @@ -0,0 +1,39 @@ +package plugin + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBasePlugin_Good(t *testing.T) { + p := &BasePlugin{ + PluginName: "test-plugin", + PluginVersion: "1.0.0", + } + + assert.Equal(t, "test-plugin", p.Name()) + assert.Equal(t, "1.0.0", p.Version()) + + ctx := context.Background() + assert.NoError(t, p.Init(ctx)) + assert.NoError(t, p.Start(ctx)) + assert.NoError(t, p.Stop(ctx)) +} + +func TestBasePlugin_Good_EmptyFields(t *testing.T) { + p := &BasePlugin{} + + assert.Equal(t, "", p.Name()) + assert.Equal(t, "", p.Version()) + + ctx := context.Background() + assert.NoError(t, p.Init(ctx)) + assert.NoError(t, p.Start(ctx)) + assert.NoError(t, p.Stop(ctx)) +} + +func TestBasePlugin_Good_ImplementsPlugin(t *testing.T) { + var _ Plugin = &BasePlugin{} +} diff --git a/pkg/plugin/registry.go b/pkg/plugin/registry.go new file mode 100644 index 00000000..250d8e27 --- /dev/null +++ b/pkg/plugin/registry.go @@ -0,0 +1,117 @@ +package plugin + +import ( + "encoding/json" + "path/filepath" + "sort" + + core "github.com/host-uk/core/pkg/framework/core" + "github.com/host-uk/core/pkg/io" +) + +const registryFilename = "registry.json" + +// Registry manages installed plugins. +// Plugin metadata is stored in a registry.json file under the base path. +type Registry struct { + medium io.Medium + basePath string // e.g., ~/.core/plugins/ + plugins map[string]*PluginConfig +} + +// NewRegistry creates a new plugin registry. +func NewRegistry(m io.Medium, basePath string) *Registry { + return &Registry{ + medium: m, + basePath: basePath, + plugins: make(map[string]*PluginConfig), + } +} + +// List returns all installed plugins sorted by name. +func (r *Registry) List() []*PluginConfig { + result := make([]*PluginConfig, 0, len(r.plugins)) + for _, cfg := range r.plugins { + result = append(result, cfg) + } + sort.Slice(result, func(i, j int) bool { + return result[i].Name < result[j].Name + }) + return result +} + +// Get returns a plugin by name. +// The second return value indicates whether the plugin was found. +func (r *Registry) Get(name string) (*PluginConfig, bool) { + cfg, ok := r.plugins[name] + return cfg, ok +} + +// Add registers a plugin in the registry. +func (r *Registry) Add(cfg *PluginConfig) error { + if cfg.Name == "" { + return core.E("plugin.Registry.Add", "plugin name is required", nil) + } + r.plugins[cfg.Name] = cfg + return nil +} + +// Remove unregisters a plugin from the registry. +func (r *Registry) Remove(name string) error { + if _, ok := r.plugins[name]; !ok { + return core.E("plugin.Registry.Remove", "plugin not found: "+name, nil) + } + delete(r.plugins, name) + return nil +} + +// registryPath returns the full path to the registry file. +func (r *Registry) registryPath() string { + return filepath.Join(r.basePath, registryFilename) +} + +// Load reads the plugin registry from disk. +// If the registry file does not exist, the registry starts empty. +func (r *Registry) Load() error { + path := r.registryPath() + + if !r.medium.IsFile(path) { + // No registry file yet; start with empty registry + r.plugins = make(map[string]*PluginConfig) + return nil + } + + content, err := r.medium.Read(path) + if err != nil { + return core.E("plugin.Registry.Load", "failed to read registry", err) + } + + var plugins map[string]*PluginConfig + if err := json.Unmarshal([]byte(content), &plugins); err != nil { + return core.E("plugin.Registry.Load", "failed to parse registry", err) + } + + if plugins == nil { + plugins = make(map[string]*PluginConfig) + } + r.plugins = plugins + return nil +} + +// Save writes the plugin registry to disk. +func (r *Registry) Save() error { + if err := r.medium.EnsureDir(r.basePath); err != nil { + return core.E("plugin.Registry.Save", "failed to create plugin directory", err) + } + + data, err := json.MarshalIndent(r.plugins, "", " ") + if err != nil { + return core.E("plugin.Registry.Save", "failed to marshal registry", err) + } + + if err := r.medium.Write(r.registryPath(), string(data)); err != nil { + return core.E("plugin.Registry.Save", "failed to write registry", err) + } + + return nil +} diff --git a/pkg/plugin/registry_test.go b/pkg/plugin/registry_test.go new file mode 100644 index 00000000..b5b713e1 --- /dev/null +++ b/pkg/plugin/registry_test.go @@ -0,0 +1,136 @@ +package plugin + +import ( + "testing" + + "github.com/host-uk/core/pkg/io" + "github.com/stretchr/testify/assert" +) + +func TestRegistry_Add_Good(t *testing.T) { + m := io.NewMockMedium() + reg := NewRegistry(m, "/home/user/.core/plugins") + + err := reg.Add(&PluginConfig{ + Name: "my-plugin", + Version: "1.0.0", + Source: "github:org/my-plugin", + Enabled: true, + }) + assert.NoError(t, err) + + list := reg.List() + assert.Len(t, list, 1) + assert.Equal(t, "my-plugin", list[0].Name) + assert.Equal(t, "1.0.0", list[0].Version) +} + +func TestRegistry_Add_Bad_EmptyName(t *testing.T) { + m := io.NewMockMedium() + reg := NewRegistry(m, "/home/user/.core/plugins") + + err := reg.Add(&PluginConfig{ + Version: "1.0.0", + }) + assert.Error(t, err) + assert.Contains(t, err.Error(), "plugin name is required") +} + +func TestRegistry_Remove_Good(t *testing.T) { + m := io.NewMockMedium() + reg := NewRegistry(m, "/home/user/.core/plugins") + + _ = reg.Add(&PluginConfig{ + Name: "my-plugin", + Version: "1.0.0", + }) + + err := reg.Remove("my-plugin") + assert.NoError(t, err) + assert.Empty(t, reg.List()) +} + +func TestRegistry_Get_Good(t *testing.T) { + m := io.NewMockMedium() + reg := NewRegistry(m, "/home/user/.core/plugins") + + _ = reg.Add(&PluginConfig{ + Name: "test-plugin", + Version: "2.0.0", + Source: "github:org/test-plugin", + }) + + cfg, ok := reg.Get("test-plugin") + assert.True(t, ok) + assert.Equal(t, "test-plugin", cfg.Name) + assert.Equal(t, "2.0.0", cfg.Version) +} + +func TestRegistry_Get_Bad_NotFound(t *testing.T) { + m := io.NewMockMedium() + reg := NewRegistry(m, "/home/user/.core/plugins") + + cfg, ok := reg.Get("nonexistent") + assert.False(t, ok) + assert.Nil(t, cfg) +} + +func TestRegistry_Remove_Bad_NotFound(t *testing.T) { + m := io.NewMockMedium() + reg := NewRegistry(m, "/home/user/.core/plugins") + + err := reg.Remove("nonexistent") + assert.Error(t, err) + assert.Contains(t, err.Error(), "plugin not found") +} + +func TestRegistry_SaveLoad_Good(t *testing.T) { + m := io.NewMockMedium() + basePath := "/home/user/.core/plugins" + reg := NewRegistry(m, basePath) + + _ = reg.Add(&PluginConfig{ + Name: "plugin-a", + Version: "1.0.0", + Source: "github:org/plugin-a", + Enabled: true, + InstalledAt: "2025-01-01T00:00:00Z", + }) + _ = reg.Add(&PluginConfig{ + Name: "plugin-b", + Version: "2.0.0", + Source: "github:org/plugin-b", + Enabled: false, + InstalledAt: "2025-01-02T00:00:00Z", + }) + + err := reg.Save() + assert.NoError(t, err) + + // Load into a fresh registry + reg2 := NewRegistry(m, basePath) + err = reg2.Load() + assert.NoError(t, err) + + list := reg2.List() + assert.Len(t, list, 2) + + a, ok := reg2.Get("plugin-a") + assert.True(t, ok) + assert.Equal(t, "1.0.0", a.Version) + assert.True(t, a.Enabled) + + b, ok := reg2.Get("plugin-b") + assert.True(t, ok) + assert.Equal(t, "2.0.0", b.Version) + assert.False(t, b.Enabled) +} + +func TestRegistry_Load_Good_EmptyWhenNoFile(t *testing.T) { + m := io.NewMockMedium() + reg := NewRegistry(m, "/home/user/.core/plugins") + + err := reg.Load() + assert.NoError(t, err) + assert.Empty(t, reg.List()) +} diff --git a/pkg/process/buffer_test.go b/pkg/process/buffer_test.go index ee07ebc5..bbd4f1cf 100644 --- a/pkg/process/buffer_test.go +++ b/pkg/process/buffer_test.go @@ -20,10 +20,10 @@ func TestRingBuffer(t *testing.T) { t.Run("overflow wraps around", func(t *testing.T) { rb := NewRingBuffer(5) - rb.Write([]byte("hello")) + _, _ = rb.Write([]byte("hello")) assert.Equal(t, "hello", rb.String()) - rb.Write([]byte("world")) + _, _ = rb.Write([]byte("world")) // Should contain "world" (overwrote "hello") assert.Equal(t, 5, rb.Len()) assert.Equal(t, "world", rb.String()) @@ -32,8 +32,8 @@ func TestRingBuffer(t *testing.T) { t.Run("partial overflow", func(t *testing.T) { rb := NewRingBuffer(10) - rb.Write([]byte("hello")) - rb.Write([]byte("worldx")) + _, _ = rb.Write([]byte("hello")) + _, _ = rb.Write([]byte("worldx")) // Should contain "lloworldx" (11 chars, buffer is 10) assert.Equal(t, 10, rb.Len()) }) @@ -47,7 +47,7 @@ func TestRingBuffer(t *testing.T) { t.Run("reset", func(t *testing.T) { rb := NewRingBuffer(10) - rb.Write([]byte("hello")) + _, _ = rb.Write([]byte("hello")) rb.Reset() assert.Equal(t, "", rb.String()) assert.Equal(t, 0, rb.Len()) @@ -60,7 +60,7 @@ func TestRingBuffer(t *testing.T) { t.Run("bytes returns copy", func(t *testing.T) { rb := NewRingBuffer(10) - rb.Write([]byte("hello")) + _, _ = rb.Write([]byte("hello")) bytes := rb.Bytes() assert.Equal(t, []byte("hello"), bytes) diff --git a/pkg/process/service.go b/pkg/process/service.go index ab5683b9..d1de9585 100644 --- a/pkg/process/service.go +++ b/pkg/process/service.go @@ -159,7 +159,7 @@ func (s *Service) StartWithOptions(ctx context.Context, opts RunOptions) (*Proce s.mu.Unlock() // Broadcast start - s.Core().ACTION(ActionProcessStarted{ + _ = s.Core().ACTION(ActionProcessStarted{ ID: id, Command: opts.Command, Args: opts.Args, @@ -214,7 +214,7 @@ func (s *Service) StartWithOptions(ctx context.Context, opts RunOptions) (*Proce if status == StatusFailed { exitErr = err } - s.Core().ACTION(ActionProcessExited{ + _ = s.Core().ACTION(ActionProcessExited{ ID: id, ExitCode: exitCode, Duration: duration, @@ -236,11 +236,11 @@ func (s *Service) streamOutput(proc *Process, r io.Reader, stream Stream) { // Write to ring buffer if proc.output != nil { - proc.output.Write([]byte(line + "\n")) + _, _ = proc.output.Write([]byte(line + "\n")) } // Broadcast output - s.Core().ACTION(ActionProcessOutput{ + _ = s.Core().ACTION(ActionProcessOutput{ ID: proc.ID, Line: line, Stream: stream, @@ -297,7 +297,7 @@ func (s *Service) Kill(id string) error { return err } - s.Core().ACTION(ActionProcessKilled{ + _ = s.Core().ACTION(ActionProcessKilled{ ID: id, Signal: "SIGKILL", }) diff --git a/pkg/release/publishers/aur.go b/pkg/release/publishers/aur.go index 3dc7016e..00ad86ca 100644 --- a/pkg/release/publishers/aur.go +++ b/pkg/release/publishers/aur.go @@ -221,7 +221,7 @@ func (p *AURPublisher) pushToAUR(ctx context.Context, data aurTemplateData, pkgb if err != nil { return fmt.Errorf("aur.Publish: failed to create temp directory: %w", err) } - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Clone existing AUR repo (or initialize new one) fmt.Printf("Cloning AUR package %s-bin...\n", data.PackageName) diff --git a/pkg/release/publishers/aur_test.go b/pkg/release/publishers/aur_test.go index cf0b3290..a49b68e1 100644 --- a/pkg/release/publishers/aur_test.go +++ b/pkg/release/publishers/aur_test.go @@ -2,6 +2,7 @@ package publishers import ( "bytes" + "context" "os" "testing" @@ -171,7 +172,7 @@ func TestAURPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -202,7 +203,7 @@ func TestAURPublisher_Publish_Bad(t *testing.T) { pubCfg := PublisherConfig{Type: "aur"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "maintainer is required") }) diff --git a/pkg/release/publishers/chocolatey_test.go b/pkg/release/publishers/chocolatey_test.go index fe5ea63d..3da669b1 100644 --- a/pkg/release/publishers/chocolatey_test.go +++ b/pkg/release/publishers/chocolatey_test.go @@ -2,6 +2,7 @@ package publishers import ( "bytes" + "context" "os" "testing" @@ -191,7 +192,7 @@ func TestChocolateyPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -229,7 +230,7 @@ func TestChocolateyPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -247,17 +248,17 @@ func TestChocolateyPublisher_ExecutePublish_Bad(t *testing.T) { t.Run("fails when CHOCOLATEY_API_KEY not set for push", func(t *testing.T) { // Ensure CHOCOLATEY_API_KEY is not set oldKey := os.Getenv("CHOCOLATEY_API_KEY") - os.Unsetenv("CHOCOLATEY_API_KEY") + _ = os.Unsetenv("CHOCOLATEY_API_KEY") defer func() { if oldKey != "" { - os.Setenv("CHOCOLATEY_API_KEY", oldKey) + _ = os.Setenv("CHOCOLATEY_API_KEY", oldKey) } }() // Create a temp directory for the test tmpDir, err := os.MkdirTemp("", "choco-test-*") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() data := chocolateyTemplateData{ PackageName: "testpkg", @@ -269,7 +270,7 @@ func TestChocolateyPublisher_ExecutePublish_Bad(t *testing.T) { Checksums: ChecksumMap{}, } - err = p.pushToChocolatey(nil, tmpDir, data) + err = p.pushToChocolatey(context.TODO(), tmpDir, data) assert.Error(t, err) assert.Contains(t, err.Error(), "CHOCOLATEY_API_KEY environment variable is required") }) diff --git a/pkg/release/publishers/docker_test.go b/pkg/release/publishers/docker_test.go index f333b075..a36a5517 100644 --- a/pkg/release/publishers/docker_test.go +++ b/pkg/release/publishers/docker_test.go @@ -2,6 +2,7 @@ package publishers import ( "bytes" + "context" "os" "path/filepath" "testing" @@ -246,7 +247,7 @@ func TestDockerPublisher_Publish_Bad(t *testing.T) { } relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "Dockerfile not found") }) @@ -293,7 +294,7 @@ func TestDockerPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -338,7 +339,7 @@ func TestDockerPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -371,7 +372,7 @@ func TestDockerPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -569,7 +570,7 @@ func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) { // Create temp directory with Dockerfile tmpDir, err := os.MkdirTemp("", "docker-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() dockerfilePath := filepath.Join(tmpDir, "Dockerfile") err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644) @@ -586,9 +587,9 @@ func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) { pubCfg := PublisherConfig{Type: "docker"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -602,7 +603,7 @@ func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) { // Create temp directory with custom Dockerfile tmpDir, err := os.MkdirTemp("", "docker-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() customDir := filepath.Join(tmpDir, "docker") err = os.MkdirAll(customDir, 0755) @@ -628,9 +629,9 @@ func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) { } relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -656,7 +657,7 @@ func TestDockerPublisher_Publish_Validation_Bad(t *testing.T) { pubCfg := PublisherConfig{Type: "docker"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "Dockerfile not found") }) @@ -673,7 +674,7 @@ func TestDockerPublisher_Publish_Validation_Bad(t *testing.T) { pubCfg := PublisherConfig{Type: "docker"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "docker CLI not found") }) @@ -701,7 +702,7 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) { t.Run("dry run succeeds with all config options", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "docker-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() dockerfilePath := filepath.Join(tmpDir, "Dockerfile") err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644) @@ -727,9 +728,9 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) { } relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -744,7 +745,7 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) { t.Run("dry run with nil relCfg uses extended image", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "docker-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() dockerfilePath := filepath.Join(tmpDir, "Dockerfile") err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644) @@ -765,9 +766,9 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) { }, } - err = p.Publish(nil, release, pubCfg, nil, true) // nil relCfg + err = p.Publish(context.TODO(), release, pubCfg, nil, true) // nil relCfg - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -780,7 +781,7 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) { t.Run("fails with non-existent Dockerfile in non-dry-run", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "docker-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Don't create a Dockerfile release := &Release{ @@ -790,7 +791,7 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) { pubCfg := PublisherConfig{Type: "docker"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, false) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "Dockerfile not found") }) diff --git a/pkg/release/publishers/github_test.go b/pkg/release/publishers/github_test.go index 6cc4e3ec..78af460f 100644 --- a/pkg/release/publishers/github_test.go +++ b/pkg/release/publishers/github_test.go @@ -235,7 +235,7 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg, "owner/repo") - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -273,7 +273,7 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg, "owner/repo") - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -304,7 +304,7 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg, "owner/repo") - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -336,9 +336,9 @@ func TestGitHubPublisher_Publish_Good(t *testing.T) { relCfg := &mockReleaseConfig{repository: "custom/repo"} // Dry run should succeed without needing gh CLI - err := p.Publish(nil, release, pubCfg, relCfg, true) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -377,7 +377,7 @@ func TestGitHubPublisher_Publish_Bad(t *testing.T) { // Create a temp directory that is NOT a git repo tmpDir, err := os.MkdirTemp("", "github-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() release := &Release{ Version: "v1.0.0", @@ -400,7 +400,7 @@ func TestDetectRepository_Good(t *testing.T) { // Create a temp git repo tmpDir, err := os.MkdirTemp("", "git-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Initialize git repo and set remote cmd := exec.Command("git", "init") @@ -419,7 +419,7 @@ func TestDetectRepository_Good(t *testing.T) { t.Run("detects repository from HTTPS remote", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "git-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() cmd := exec.Command("git", "init") cmd.Dir = tmpDir @@ -439,7 +439,7 @@ func TestDetectRepository_Bad(t *testing.T) { t.Run("fails when not a git repository", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "no-git-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() _, err = detectRepository(tmpDir) assert.Error(t, err) @@ -454,7 +454,7 @@ func TestDetectRepository_Bad(t *testing.T) { t.Run("fails when remote is not GitHub", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "git-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() cmd := exec.Command("git", "init") cmd.Dir = tmpDir diff --git a/pkg/release/publishers/homebrew.go b/pkg/release/publishers/homebrew.go index 4d92261d..00b9abb0 100644 --- a/pkg/release/publishers/homebrew.go +++ b/pkg/release/publishers/homebrew.go @@ -242,7 +242,7 @@ func (p *HomebrewPublisher) commitToTap(ctx context.Context, tap string, data ho if err != nil { return fmt.Errorf("homebrew.Publish: failed to create temp directory: %w", err) } - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Clone the tap fmt.Printf("Cloning tap %s...\n", tap) diff --git a/pkg/release/publishers/homebrew_test.go b/pkg/release/publishers/homebrew_test.go index e77011e3..d9e0c112 100644 --- a/pkg/release/publishers/homebrew_test.go +++ b/pkg/release/publishers/homebrew_test.go @@ -2,6 +2,7 @@ package publishers import ( "bytes" + "context" "os" "testing" @@ -235,7 +236,7 @@ func TestHomebrewPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -272,7 +273,7 @@ func TestHomebrewPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -301,7 +302,7 @@ func TestHomebrewPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -323,7 +324,7 @@ func TestHomebrewPublisher_Publish_Bad(t *testing.T) { pubCfg := PublisherConfig{Type: "homebrew"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "tap is required") }) diff --git a/pkg/release/publishers/linuxkit_test.go b/pkg/release/publishers/linuxkit_test.go index 074ec920..361d1fa3 100644 --- a/pkg/release/publishers/linuxkit_test.go +++ b/pkg/release/publishers/linuxkit_test.go @@ -2,6 +2,7 @@ package publishers import ( "bytes" + "context" "os" "os/exec" "path/filepath" @@ -200,7 +201,7 @@ func TestLinuxKitPublisher_Publish_Bad(t *testing.T) { } relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "config file not found") }) @@ -217,7 +218,7 @@ func TestLinuxKitPublisher_Publish_Bad(t *testing.T) { pubCfg := PublisherConfig{Type: "linuxkit"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "linuxkit CLI not found") }) @@ -230,7 +231,7 @@ func TestLinuxKitPublisher_Publish_Bad(t *testing.T) { // Create temp directory that is NOT a git repo tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Create a config file configPath := filepath.Join(tmpDir, "config.yml") @@ -249,7 +250,7 @@ func TestLinuxKitPublisher_Publish_Bad(t *testing.T) { } relCfg := &mockReleaseConfig{repository: ""} // Empty repository - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) assert.Error(t, err) assert.Contains(t, err.Error(), "could not determine repository") }) @@ -277,7 +278,7 @@ func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { t.Run("succeeds with dry run and valid config", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Create config directory and file configDir := filepath.Join(tmpDir, ".core", "linuxkit") @@ -299,9 +300,9 @@ func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { pubCfg := PublisherConfig{Type: "linuxkit"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -314,7 +315,7 @@ func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { t.Run("fails with missing config file", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() release := &Release{ Version: "v1.0.0", @@ -323,7 +324,7 @@ func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { pubCfg := PublisherConfig{Type: "linuxkit"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, false) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "config file not found") }) @@ -331,7 +332,7 @@ func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { t.Run("uses relCfg repository", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() configDir := filepath.Join(tmpDir, ".core", "linuxkit") err = os.MkdirAll(configDir, 0755) @@ -352,9 +353,9 @@ func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { pubCfg := PublisherConfig{Type: "linuxkit"} relCfg := &mockReleaseConfig{repository: "custom-owner/custom-repo"} - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -367,7 +368,7 @@ func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { t.Run("detects repository when not provided", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Create config file configDir := filepath.Join(tmpDir, ".core", "linuxkit") @@ -398,9 +399,9 @@ func TestLinuxKitPublisher_Publish_WithCLI_Good(t *testing.T) { pubCfg := PublisherConfig{Type: "linuxkit"} relCfg := &mockReleaseConfig{repository: ""} // Empty to trigger detection - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -421,7 +422,7 @@ func TestLinuxKitPublisher_Publish_NilRelCfg_Good(t *testing.T) { t.Run("handles nil relCfg by detecting repo", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Create config file configDir := filepath.Join(tmpDir, ".core", "linuxkit") @@ -451,9 +452,9 @@ func TestLinuxKitPublisher_Publish_NilRelCfg_Good(t *testing.T) { } pubCfg := PublisherConfig{Type: "linuxkit"} - err = p.Publish(nil, release, pubCfg, nil, true) // nil relCfg + err = p.Publish(context.TODO(), release, pubCfg, nil, true) // nil relCfg - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -498,7 +499,7 @@ func TestLinuxKitPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg, "owner/repo") - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -539,7 +540,7 @@ func TestLinuxKitPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg, "owner/repo") - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -568,7 +569,7 @@ func TestLinuxKitPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(release, cfg, "owner/repo") - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -805,7 +806,7 @@ func TestLinuxKitPublisher_Publish_DryRun_Good(t *testing.T) { // Create temp directory with config file tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() configDir := filepath.Join(tmpDir, ".core", "linuxkit") err = os.MkdirAll(configDir, 0755) @@ -826,9 +827,9 @@ func TestLinuxKitPublisher_Publish_DryRun_Good(t *testing.T) { pubCfg := PublisherConfig{Type: "linuxkit"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -841,7 +842,7 @@ func TestLinuxKitPublisher_Publish_DryRun_Good(t *testing.T) { t.Run("dry run uses custom config path", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() customConfigPath := filepath.Join(tmpDir, "custom-config.yml") err = os.WriteFile(customConfigPath, []byte("kernel:\n image: custom\n"), 0644) @@ -863,9 +864,9 @@ func TestLinuxKitPublisher_Publish_DryRun_Good(t *testing.T) { } relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -878,7 +879,7 @@ func TestLinuxKitPublisher_Publish_DryRun_Good(t *testing.T) { t.Run("dry run with multiple formats and platforms", func(t *testing.T) { tmpDir, err := os.MkdirTemp("", "linuxkit-test") require.NoError(t, err) - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() configPath := filepath.Join(tmpDir, "config.yml") err = os.WriteFile(configPath, []byte("kernel:\n image: test\n"), 0644) @@ -902,9 +903,9 @@ func TestLinuxKitPublisher_Publish_DryRun_Good(t *testing.T) { } relCfg := &mockReleaseConfig{repository: "owner/repo"} - err = p.Publish(nil, release, pubCfg, relCfg, true) + err = p.Publish(context.TODO(), release, pubCfg, relCfg, true) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout diff --git a/pkg/release/publishers/npm.go b/pkg/release/publishers/npm.go index 959dd1da..314b8e02 100644 --- a/pkg/release/publishers/npm.go +++ b/pkg/release/publishers/npm.go @@ -168,7 +168,7 @@ func (p *NpmPublisher) executePublish(ctx context.Context, data npmTemplateData, if err != nil { return fmt.Errorf("npm.Publish: failed to create temp directory: %w", err) } - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() // Create bin directory binDir := filepath.Join(tmpDir, "bin") diff --git a/pkg/release/publishers/npm_test.go b/pkg/release/publishers/npm_test.go index b726ee48..29ffbcf2 100644 --- a/pkg/release/publishers/npm_test.go +++ b/pkg/release/publishers/npm_test.go @@ -2,6 +2,7 @@ package publishers import ( "bytes" + "context" "os" "testing" @@ -165,7 +166,7 @@ func TestNpmPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -203,7 +204,7 @@ func TestNpmPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -227,7 +228,7 @@ func TestNpmPublisher_Publish_Bad(t *testing.T) { pubCfg := PublisherConfig{Type: "npm"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "package name is required") }) @@ -235,10 +236,10 @@ func TestNpmPublisher_Publish_Bad(t *testing.T) { t.Run("fails when NPM_TOKEN not set in non-dry-run", func(t *testing.T) { // Ensure NPM_TOKEN is not set oldToken := os.Getenv("NPM_TOKEN") - os.Unsetenv("NPM_TOKEN") + _ = os.Unsetenv("NPM_TOKEN") defer func() { if oldToken != "" { - os.Setenv("NPM_TOKEN", oldToken) + _ = os.Setenv("NPM_TOKEN", oldToken) } }() @@ -254,7 +255,7 @@ func TestNpmPublisher_Publish_Bad(t *testing.T) { } relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "NPM_TOKEN environment variable is required") }) diff --git a/pkg/release/publishers/scoop.go b/pkg/release/publishers/scoop.go index 25e7ee1d..190fa78a 100644 --- a/pkg/release/publishers/scoop.go +++ b/pkg/release/publishers/scoop.go @@ -196,7 +196,7 @@ func (p *ScoopPublisher) commitToBucket(ctx context.Context, bucket string, data if err != nil { return fmt.Errorf("scoop.Publish: failed to create temp directory: %w", err) } - defer os.RemoveAll(tmpDir) + defer func() { _ = os.RemoveAll(tmpDir) }() fmt.Printf("Cloning bucket %s...\n", bucket) cmd := exec.CommandContext(ctx, "gh", "repo", "clone", bucket, tmpDir, "--", "--depth=1") diff --git a/pkg/release/publishers/scoop_test.go b/pkg/release/publishers/scoop_test.go index 5c8d6b41..ef84b20d 100644 --- a/pkg/release/publishers/scoop_test.go +++ b/pkg/release/publishers/scoop_test.go @@ -2,6 +2,7 @@ package publishers import ( "bytes" + "context" "os" "testing" @@ -172,7 +173,7 @@ func TestScoopPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -210,7 +211,7 @@ func TestScoopPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -239,7 +240,7 @@ func TestScoopPublisher_DryRunPublish_Good(t *testing.T) { err := p.dryRunPublish(data, cfg) - w.Close() + _ = w.Close() var buf bytes.Buffer _, _ = buf.ReadFrom(r) os.Stdout = oldStdout @@ -261,7 +262,7 @@ func TestScoopPublisher_Publish_Bad(t *testing.T) { pubCfg := PublisherConfig{Type: "scoop"} relCfg := &mockReleaseConfig{repository: "owner/repo"} - err := p.Publish(nil, release, pubCfg, relCfg, false) + err := p.Publish(context.TODO(), release, pubCfg, relCfg, false) assert.Error(t, err) assert.Contains(t, err.Error(), "bucket is required") }) diff --git a/pkg/release/release.go b/pkg/release/release.go index 46f9d412..97328fa7 100644 --- a/pkg/release/release.go +++ b/pkg/release/release.go @@ -308,7 +308,7 @@ func getBuilder(projectType build.ProjectType) (build.Builder, error) { case build.ProjectTypeGo: return builders.NewGoBuilder(), nil case build.ProjectTypeNode: - return nil, fmt.Errorf("Node.js builder not yet implemented") + return nil, fmt.Errorf("node.js builder not yet implemented") case build.ProjectTypePHP: return nil, fmt.Errorf("PHP builder not yet implemented") default: diff --git a/pkg/release/release_test.go b/pkg/release/release_test.go index 98cfe79d..4eb3ac5c 100644 --- a/pkg/release/release_test.go +++ b/pkg/release/release_test.go @@ -175,7 +175,7 @@ func TestGetBuilder_Bad(t *testing.T) { t.Run("returns error for Node project type", func(t *testing.T) { _, err := getBuilder(build.ProjectTypeNode) assert.Error(t, err) - assert.Contains(t, err.Error(), "Node.js builder not yet implemented") + assert.Contains(t, err.Error(), "node.js builder not yet implemented") }) t.Run("returns error for PHP project type", func(t *testing.T) {