Compare commits
209 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
364cfb7c90 | ||
|
|
489608aaa4 | ||
|
|
2b9369f6ab | ||
|
|
8c83da1f4f | ||
|
|
af96a316c5 | ||
|
|
c4ffa4d40d | ||
|
|
4921058bf3 | ||
|
|
bb46675131 | ||
|
|
5e592bf0e0 | ||
|
|
2af4cfbf32 | ||
|
|
311eb96410 | ||
|
|
4b707a11f2 | ||
|
|
0fca52d975 | ||
|
|
5bc2b6dc96 | ||
|
|
3d7a92a3f3 | ||
|
|
8f3f7f93f7 | ||
|
|
680aa259de | ||
|
|
7705adcb74 | ||
|
|
73b669cbaa | ||
|
|
597e7a678a | ||
|
|
ce1de925be | ||
|
|
4ab4cc77c5 | ||
|
|
953d0a3c7f | ||
|
|
5a16139e7a | ||
|
|
555ac20eb9 | ||
|
|
79b32ad71d | ||
|
|
07e4f26392 | ||
|
|
57c47a7a62 | ||
|
|
50f1bb1894 | ||
|
|
3bb0b3cbda | ||
|
|
181a19a2a3 | ||
|
|
0faab64ce8 | ||
|
|
a8227776d7 | ||
|
|
85a10830d4 | ||
|
|
20cdc36597 | ||
|
|
e3e7159efa | ||
|
|
00039368f4 | ||
|
|
1527e21aac | ||
|
|
e8983ef648 | ||
|
|
bfc88dc00c | ||
|
|
355be6ee31 | ||
|
|
dbe7c69378 | ||
|
|
83b5f12ce1 | ||
|
|
be4324bbb1 | ||
|
|
fdb684ccf0 | ||
|
|
ef72e09c1e | ||
|
|
29a73bae5a | ||
|
|
0e49d459b4 | ||
|
|
e44d9ccf04 | ||
|
|
7a8caa6027 | ||
|
|
e0a7741765 | ||
|
|
5e6974a567 | ||
|
|
17666e61c6 | ||
|
|
4b357d5dd7 | ||
|
|
4ba442d6c0 | ||
|
|
3045415f7a | ||
|
|
a5670721c9 | ||
|
|
e1b34de166 | ||
|
|
7b937eebf9 | ||
|
|
5690b9f15b | ||
|
|
e7c991ed36 | ||
|
|
7cf8b504f0 | ||
|
|
59e9d9168b | ||
|
|
33fb9bb5a5 | ||
|
|
e01c88e39b | ||
|
|
a2e3c3d6a2 | ||
|
|
fb518dca4c | ||
|
|
844f729a01 | ||
|
|
9a7ecedb81 | ||
|
|
fe0f528581 | ||
|
|
8bf1fe774c | ||
|
|
c7e4687dfb | ||
|
|
a470c4054f | ||
|
|
ed7b1ef083 | ||
|
|
077bdf3342 | ||
|
|
92067b2e08 | ||
|
|
c40ca4666a | ||
|
|
6551e6a91d | ||
|
|
892901dee2 | ||
|
|
06e00c7d13 | ||
|
|
112017d5f3 | ||
|
|
495500b18d | ||
|
|
ad8ac01837 | ||
|
|
cfa6efe69c | ||
|
|
5b2422e2ca | ||
|
|
9284a3f271 | ||
|
|
25540b87e0 | ||
|
|
14069f56fd | ||
|
|
c8c2abaf88 | ||
|
|
ba8bcc89a3 | ||
|
|
d21867ef61 | ||
|
|
48e5a6e859 | ||
|
|
ca49b10f17 | ||
|
|
a33a060b81 | ||
|
|
c648e981b4 | ||
|
|
9b6c468d94 | ||
|
|
d6e073b3f8 | ||
|
|
f809d07079 | ||
|
|
891b06eac8 | ||
|
|
e882b3a6b1 | ||
|
|
cb3ae60231 | ||
|
|
3d7f7d6da1 | ||
|
|
2ea501c5bc | ||
|
|
e8fa26b5d2 | ||
|
|
fef151891c | ||
|
|
479612fdf7 | ||
|
|
489e118779 | ||
|
|
39adee8bbb | ||
|
|
0625288a73 | ||
|
|
650ab9fd3a | ||
|
|
1b572ed5e5 | ||
|
|
6745d87741 | ||
|
|
eeb87e0444 | ||
|
|
20f6f8e538 | ||
|
|
3bbb45caac | ||
|
|
231d43fda1 | ||
|
|
7da8d7e843 | ||
|
|
303ec418e9 | ||
|
|
fed5df7e7d | ||
|
|
9e912fe8b9 | ||
|
|
195f4bfef1 | ||
|
|
f5a8062ffd | ||
|
|
5c9db01527 | ||
|
|
b8053c3225 | ||
|
|
fd04157c2d | ||
|
|
95b50bdefa | ||
|
|
7ccab3d364 | ||
|
|
da26bfc695 | ||
|
|
a39ace6c43 | ||
|
|
f7e994382c | ||
|
|
4e664e62b5 | ||
|
|
ba6ac633ff | ||
|
|
599a6ecff8 | ||
|
|
8091a1186f | ||
|
|
d7a5d141dd | ||
|
|
a79201c004 | ||
|
|
5a7973d086 | ||
|
|
b59e6e35ff | ||
|
|
845565c502 | ||
|
|
47dcc3dc93 | ||
|
|
2f41d65254 | ||
|
|
3f32f3e783 | ||
|
|
98b742d4b5 | ||
|
|
28e603aff5 | ||
|
|
f3111433e5 | ||
|
|
25f6faea12 | ||
|
|
4f3bd86086 | ||
|
|
15394260fc | ||
|
|
578fb9f968 | ||
|
|
cb5ebefc3d | ||
|
|
ea051a3c25 | ||
|
|
94d8ea7e55 | ||
|
|
ca2b00323b | ||
|
|
a64e15f50d | ||
|
|
05ac67c596 | ||
|
|
6f18da95a9 | ||
|
|
d77021caa7 | ||
|
|
520312b708 | ||
|
|
ac64a6ec7a | ||
|
|
6792c37a9d | ||
|
|
9e09f022a9 | ||
|
|
7236a81419 | ||
|
|
15f675f27d | ||
|
|
ebebfd2155 | ||
|
|
e66220f493 | ||
|
|
32a64aa00b | ||
|
|
9cf66f0656 | ||
|
|
ade1e3d485 | ||
|
|
2fba3cdf2c | ||
|
|
14d3500e1a | ||
|
|
9e9a1a7da7 | ||
|
|
0e8dc42f0f | ||
|
|
f1ad2082c0 | ||
|
|
d203a7ff78 | ||
|
|
0beac61805 | ||
|
|
046d7f88a8 | ||
|
|
b565752f9f | ||
|
|
91037a9443 | ||
|
|
83a50b684d | ||
|
|
72bb94e355 | ||
|
|
efdc252462 | ||
|
|
f539643f8c | ||
|
|
fd566a40fb | ||
|
|
0c39bc99f3 | ||
|
|
854422d369 | ||
|
|
1265dc158b | ||
|
|
7f1da7766a | ||
|
|
71f0852a06 | ||
|
|
26a7a3bfdd | ||
|
|
479e6cb8e9 | ||
|
|
77aaac226c | ||
|
|
febe858942 | ||
|
|
1573807fc4 | ||
|
|
ae792f3dbe | ||
|
|
a331b1958c | ||
|
|
85f3a28d9d | ||
|
|
4b57a2bd7d | ||
|
|
750a4d0ce0 | ||
|
|
8ea79392a7 | ||
|
|
fd5353bb88 | ||
|
|
6950a75fa3 | ||
|
|
1c5e7b77fd | ||
|
|
2c3db40a0b | ||
|
|
7b4f657d25 | ||
|
|
007e30e869 | ||
|
|
919be0f3cb | ||
|
|
1a874bc4d9 | ||
|
|
7aa4e5486d | ||
|
|
d7427a324d |
141 changed files with 17799 additions and 3623 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -2,3 +2,4 @@
|
||||||
.vscode/
|
.vscode/
|
||||||
*.log
|
*.log
|
||||||
.core/
|
.core/
|
||||||
|
.gocache-build/
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
// Package buildcmd provides project build commands with auto-detection.
|
// Package buildcmd registers auto-detected project build commands.
|
||||||
package buildcmd
|
package buildcmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
@ -13,7 +13,7 @@ func init() {
|
||||||
cli.RegisterCommands(AddBuildCommands)
|
cli.RegisterCommands(AddBuildCommands)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Style aliases from shared package
|
// Style aliases used by build command output.
|
||||||
var (
|
var (
|
||||||
buildHeaderStyle = cli.TitleStyle
|
buildHeaderStyle = cli.TitleStyle
|
||||||
buildTargetStyle = cli.ValueStyle
|
buildTargetStyle = cli.ValueStyle
|
||||||
|
|
@ -27,13 +27,14 @@ var guiTemplate embed.FS
|
||||||
|
|
||||||
// Flags for the main build command
|
// Flags for the main build command
|
||||||
var (
|
var (
|
||||||
buildType string
|
buildType string
|
||||||
ciMode bool
|
ciMode bool
|
||||||
targets string
|
targets string
|
||||||
outputDir string
|
outputDir string
|
||||||
doArchive bool
|
archiveOutput bool
|
||||||
doChecksum bool
|
checksumOutput bool
|
||||||
verbose bool
|
archiveFormat string
|
||||||
|
verbose bool
|
||||||
|
|
||||||
// Docker/LinuxKit specific flags
|
// Docker/LinuxKit specific flags
|
||||||
configPath string
|
configPath string
|
||||||
|
|
@ -61,7 +62,23 @@ var (
|
||||||
var buildCmd = &cli.Command{
|
var buildCmd = &cli.Command{
|
||||||
Use: "build",
|
Use: "build",
|
||||||
RunE: func(cmd *cli.Command, args []string) error {
|
RunE: func(cmd *cli.Command, args []string) error {
|
||||||
return runProjectBuild(cmd.Context(), buildType, ciMode, targets, outputDir, doArchive, doChecksum, configPath, format, push, imageName, noSign, notarize, verbose)
|
return runProjectBuild(ProjectBuildRequest{
|
||||||
|
Context: cmd.Context(),
|
||||||
|
BuildType: buildType,
|
||||||
|
CIMode: ciMode,
|
||||||
|
TargetsFlag: targets,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
ArchiveOutput: archiveOutput,
|
||||||
|
ChecksumOutput: checksumOutput,
|
||||||
|
ArchiveFormat: archiveFormat,
|
||||||
|
ConfigPath: configPath,
|
||||||
|
Format: format,
|
||||||
|
Push: push,
|
||||||
|
ImageName: imageName,
|
||||||
|
NoSign: noSign,
|
||||||
|
Notarize: notarize,
|
||||||
|
Verbose: verbose,
|
||||||
|
})
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -71,7 +88,7 @@ var fromPathCmd = &cli.Command{
|
||||||
if fromPath == "" {
|
if fromPath == "" {
|
||||||
return errPathRequired
|
return errPathRequired
|
||||||
}
|
}
|
||||||
return runBuild(fromPath)
|
return runBuild(cmd.Context(), fromPath)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -81,14 +98,14 @@ var pwaCmd = &cli.Command{
|
||||||
if pwaURL == "" {
|
if pwaURL == "" {
|
||||||
return errURLRequired
|
return errURLRequired
|
||||||
}
|
}
|
||||||
return runPwaBuild(pwaURL)
|
return runPwaBuild(cmd.Context(), pwaURL)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
var sdkBuildCmd = &cli.Command{
|
var sdkBuildCmd = &cli.Command{
|
||||||
Use: "sdk",
|
Use: "sdk",
|
||||||
RunE: func(cmd *cli.Command, args []string) error {
|
RunE: func(cmd *cli.Command, args []string) error {
|
||||||
return runBuildSDK(sdkSpec, sdkLang, sdkVersion, sdkDryRun)
|
return runBuildSDK(cmd.Context(), sdkSpec, sdkLang, sdkVersion, sdkDryRun)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -108,11 +125,14 @@ func initBuildFlags() {
|
||||||
buildCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, i18n.T("common.flag.verbose"))
|
buildCmd.Flags().BoolVarP(&verbose, "verbose", "v", false, i18n.T("common.flag.verbose"))
|
||||||
buildCmd.Flags().StringVar(&targets, "targets", "", i18n.T("cmd.build.flag.targets"))
|
buildCmd.Flags().StringVar(&targets, "targets", "", i18n.T("cmd.build.flag.targets"))
|
||||||
buildCmd.Flags().StringVar(&outputDir, "output", "", i18n.T("cmd.build.flag.output"))
|
buildCmd.Flags().StringVar(&outputDir, "output", "", i18n.T("cmd.build.flag.output"))
|
||||||
buildCmd.Flags().BoolVar(&doArchive, "archive", true, i18n.T("cmd.build.flag.archive"))
|
buildCmd.Flags().BoolVar(&archiveOutput, "archive", true, i18n.T("cmd.build.flag.archive"))
|
||||||
buildCmd.Flags().BoolVar(&doChecksum, "checksum", true, i18n.T("cmd.build.flag.checksum"))
|
buildCmd.Flags().BoolVar(&checksumOutput, "checksum", true, i18n.T("cmd.build.flag.checksum"))
|
||||||
|
buildCmd.Flags().StringVar(&archiveFormat, "archive-format", "", i18n.T("cmd.build.flag.archive_format"))
|
||||||
|
|
||||||
|
// Build config override.
|
||||||
|
buildCmd.Flags().StringVar(&configPath, "config", "", i18n.T("cmd.build.flag.config"))
|
||||||
|
|
||||||
// Docker/LinuxKit specific
|
// Docker/LinuxKit specific
|
||||||
buildCmd.Flags().StringVar(&configPath, "config", "", i18n.T("cmd.build.flag.config"))
|
|
||||||
buildCmd.Flags().StringVar(&format, "format", "", i18n.T("cmd.build.flag.format"))
|
buildCmd.Flags().StringVar(&format, "format", "", i18n.T("cmd.build.flag.format"))
|
||||||
buildCmd.Flags().BoolVar(&push, "push", false, i18n.T("cmd.build.flag.push"))
|
buildCmd.Flags().BoolVar(&push, "push", false, i18n.T("cmd.build.flag.push"))
|
||||||
buildCmd.Flags().StringVar(&imageName, "image", "", i18n.T("cmd.build.flag.image"))
|
buildCmd.Flags().StringVar(&imageName, "image", "", i18n.T("cmd.build.flag.image"))
|
||||||
|
|
@ -140,9 +160,12 @@ func initBuildFlags() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddBuildCommands registers the 'build' command and all subcommands.
|
// AddBuildCommands registers the 'build' command and all subcommands.
|
||||||
|
//
|
||||||
|
// buildcmd.AddBuildCommands(root)
|
||||||
func AddBuildCommands(root *cli.Command) {
|
func AddBuildCommands(root *cli.Command) {
|
||||||
setBuildI18n()
|
setBuildI18n()
|
||||||
initBuildFlags()
|
initBuildFlags()
|
||||||
AddReleaseCommand(buildCmd)
|
AddReleaseCommand(buildCmd)
|
||||||
|
AddWorkflowCommand(buildCmd)
|
||||||
root.AddCommand(buildCmd)
|
root.AddCommand(buildCmd)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,21 +1,5 @@
|
||||||
// Package buildcmd provides project build commands with auto-detection.
|
// Package buildcmd registers build-oriented Core commands.
|
||||||
//
|
//
|
||||||
// Supports building:
|
// buildcmd.AddBuildCommands(root)
|
||||||
// - Go projects (standard and cross-compilation)
|
// buildcmd.AddReleaseCommand(buildCmd)
|
||||||
// - Wails desktop applications
|
|
||||||
// - Docker images
|
|
||||||
// - LinuxKit VM images
|
|
||||||
// - Taskfile-based projects
|
|
||||||
//
|
|
||||||
// Configuration via .core/build.yaml or command-line flags.
|
|
||||||
//
|
|
||||||
// Subcommands:
|
|
||||||
// - build: Auto-detect and build the current project
|
|
||||||
// - build from-path: Build from a local static web app directory
|
|
||||||
// - build pwa: Build from a live PWA URL
|
|
||||||
// - build sdk: Generate API SDKs from OpenAPI spec
|
|
||||||
package buildcmd
|
package buildcmd
|
||||||
|
|
||||||
// Note: The AddBuildCommands function is defined in cmd_build.go
|
|
||||||
// This file exists for documentation purposes and maintains the original
|
|
||||||
// package documentation from commands.go.
|
|
||||||
|
|
|
||||||
|
|
@ -1,53 +1,108 @@
|
||||||
// cmd_project.go implements the main project build logic.
|
// cmd_project.go implements the main project build logic.
|
||||||
//
|
//
|
||||||
// This handles auto-detection of project types (Go, Wails, Docker, LinuxKit, Taskfile)
|
// This handles auto-detection of project types (Go, Wails, Node, PHP, Docs, Docker, LinuxKit, Taskfile)
|
||||||
// and orchestrates the build process including signing, archiving, and checksums.
|
// and orchestrates the build process including signing, archiving, and checksums.
|
||||||
|
|
||||||
package buildcmd
|
package buildcmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/internal/projectdetect"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/build/pkg/build/builders"
|
"dappco.re/go/core/build/pkg/build/builders"
|
||||||
"dappco.re/go/core/build/pkg/build/signing"
|
"dappco.re/go/core/build/pkg/build/signing"
|
||||||
|
"dappco.re/go/core/build/pkg/release"
|
||||||
"dappco.re/go/core/i18n"
|
"dappco.re/go/core/i18n"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ProjectBuildRequest groups the inputs for the main `core build` command.
|
||||||
|
//
|
||||||
|
// req := ProjectBuildRequest{
|
||||||
|
// Context: cmd.Context(),
|
||||||
|
// BuildType: "go",
|
||||||
|
// TargetsFlag: "linux/amd64,linux/arm64",
|
||||||
|
// }
|
||||||
|
type ProjectBuildRequest struct {
|
||||||
|
Context context.Context
|
||||||
|
BuildType string
|
||||||
|
CIMode bool
|
||||||
|
TargetsFlag string
|
||||||
|
OutputDir string
|
||||||
|
ArchiveOutput bool
|
||||||
|
ChecksumOutput bool
|
||||||
|
ArchiveFormat string
|
||||||
|
ConfigPath string
|
||||||
|
Format string
|
||||||
|
Push bool
|
||||||
|
ImageName string
|
||||||
|
NoSign bool
|
||||||
|
Notarize bool
|
||||||
|
Verbose bool
|
||||||
|
}
|
||||||
|
|
||||||
// runProjectBuild handles the main `core build` command with auto-detection.
|
// runProjectBuild handles the main `core build` command with auto-detection.
|
||||||
func runProjectBuild(ctx context.Context, buildType string, ciMode bool, targetsFlag string, outputDir string, doArchive bool, doChecksum bool, configPath string, format string, push bool, imageName string, noSign bool, notarize bool, verbose bool) error {
|
func runProjectBuild(req ProjectBuildRequest) error {
|
||||||
// Use local filesystem as the default medium
|
ctx := req.Context
|
||||||
fs := io.Local
|
if ctx == nil {
|
||||||
|
ctx = context.Background()
|
||||||
|
}
|
||||||
|
// Use local filesystem as the default medium.
|
||||||
|
filesystem := io.Local
|
||||||
|
|
||||||
// Get current working directory as project root
|
// Get current working directory as project root
|
||||||
projectDir, err := os.Getwd()
|
projectDir, err := ax.Getwd()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("build.Run", "failed to get working directory", err)
|
return coreerr.E("build.Run", "failed to get working directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// PWA builds use the dedicated local web-app pipeline rather than the
|
||||||
|
// project-type builder registry.
|
||||||
|
if req.BuildType == "pwa" {
|
||||||
|
return runLocalPwaBuild(ctx, projectDir)
|
||||||
|
}
|
||||||
|
|
||||||
// Load configuration from .core/build.yaml (or defaults)
|
// Load configuration from .core/build.yaml (or defaults)
|
||||||
buildCfg, err := build.LoadConfig(fs, projectDir)
|
var buildConfig *build.BuildConfig
|
||||||
|
configPath := req.ConfigPath
|
||||||
|
if configPath != "" {
|
||||||
|
if !ax.IsAbs(configPath) {
|
||||||
|
configPath = ax.Join(projectDir, configPath)
|
||||||
|
}
|
||||||
|
if !filesystem.Exists(configPath) {
|
||||||
|
return coreerr.E("build.Run", "build config not found: "+configPath, nil)
|
||||||
|
}
|
||||||
|
buildConfig, err = build.LoadConfigAtPath(filesystem, configPath)
|
||||||
|
} else {
|
||||||
|
buildConfig, err = build.LoadConfig(filesystem, projectDir)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("build.Run", "failed to load config", err)
|
return coreerr.E("build.Run", "failed to load config", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if buildConfig.Build.Type == "pwa" {
|
||||||
|
return runLocalPwaBuild(ctx, projectDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := build.SetupBuildCache(filesystem, projectDir, buildConfig); err != nil {
|
||||||
|
return coreerr.E("build.Run", "failed to set up build cache", err)
|
||||||
|
}
|
||||||
|
|
||||||
// Detect project type if not specified
|
// Detect project type if not specified
|
||||||
var projectType build.ProjectType
|
var projectType build.ProjectType
|
||||||
if buildType != "" {
|
if req.BuildType != "" {
|
||||||
projectType = build.ProjectType(buildType)
|
projectType = build.ProjectType(req.BuildType)
|
||||||
} else if buildCfg.Build.Type != "" {
|
} else if buildConfig.Build.Type != "" {
|
||||||
// Use type from .core/build.yaml
|
// Use type from .core/build.yaml
|
||||||
projectType = build.ProjectType(buildCfg.Build.Type)
|
projectType = build.ProjectType(buildConfig.Build.Type)
|
||||||
} else {
|
} else {
|
||||||
projectType, err = build.PrimaryType(fs, projectDir)
|
projectType, err = projectdetect.DetectProjectType(filesystem, projectDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("build.Run", "failed to detect project type", err)
|
return coreerr.E("build.Run", "failed to detect project type", err)
|
||||||
}
|
}
|
||||||
|
|
@ -58,15 +113,15 @@ func runProjectBuild(ctx context.Context, buildType string, ciMode bool, targets
|
||||||
|
|
||||||
// Determine targets
|
// Determine targets
|
||||||
var buildTargets []build.Target
|
var buildTargets []build.Target
|
||||||
if targetsFlag != "" {
|
if req.TargetsFlag != "" {
|
||||||
// Parse from command line
|
// Parse from command line
|
||||||
buildTargets, err = parseTargets(targetsFlag)
|
buildTargets, err = parseTargets(req.TargetsFlag)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else if len(buildCfg.Targets) > 0 {
|
} else if len(buildConfig.Targets) > 0 {
|
||||||
// Use config targets
|
// Use config targets
|
||||||
buildTargets = buildCfg.ToTargets()
|
buildTargets = buildConfig.ToTargets()
|
||||||
} else {
|
} else {
|
||||||
// Fall back to current OS/arch
|
// Fall back to current OS/arch
|
||||||
buildTargets = []build.Target{
|
buildTargets = []build.Target{
|
||||||
|
|
@ -75,36 +130,32 @@ func runProjectBuild(ctx context.Context, buildType string, ciMode bool, targets
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine output directory
|
// Determine output directory
|
||||||
|
outputDir := req.OutputDir
|
||||||
if outputDir == "" {
|
if outputDir == "" {
|
||||||
outputDir = "dist"
|
outputDir = "dist"
|
||||||
}
|
}
|
||||||
if !filepath.IsAbs(outputDir) {
|
if !ax.IsAbs(outputDir) {
|
||||||
outputDir = filepath.Join(projectDir, outputDir)
|
outputDir = ax.Join(projectDir, outputDir)
|
||||||
}
|
|
||||||
outputDir = filepath.Clean(outputDir)
|
|
||||||
|
|
||||||
// Ensure config path is absolute if provided
|
|
||||||
if configPath != "" && !filepath.IsAbs(configPath) {
|
|
||||||
configPath = filepath.Join(projectDir, configPath)
|
|
||||||
}
|
}
|
||||||
|
outputDir = ax.Clean(outputDir)
|
||||||
|
|
||||||
// Determine binary name
|
// Determine binary name
|
||||||
binaryName := buildCfg.Project.Binary
|
binaryName := buildConfig.Project.Binary
|
||||||
if binaryName == "" {
|
if binaryName == "" {
|
||||||
binaryName = buildCfg.Project.Name
|
binaryName = buildConfig.Project.Name
|
||||||
}
|
}
|
||||||
if binaryName == "" {
|
if binaryName == "" {
|
||||||
binaryName = filepath.Base(projectDir)
|
binaryName = ax.Base(projectDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Print build info (verbose mode only)
|
// Print build info (verbose mode only)
|
||||||
if verbose && !ciMode {
|
if req.Verbose && !req.CIMode {
|
||||||
fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.build")), i18n.T("cmd.build.building_project"))
|
cli.Print("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.build")), i18n.T("cmd.build.building_project"))
|
||||||
fmt.Printf(" %s %s\n", i18n.T("cmd.build.label.type"), buildTargetStyle.Render(string(projectType)))
|
cli.Print(" %s %s\n", i18n.T("cmd.build.label.type"), buildTargetStyle.Render(string(projectType)))
|
||||||
fmt.Printf(" %s %s\n", i18n.T("cmd.build.label.output"), buildTargetStyle.Render(outputDir))
|
cli.Print(" %s %s\n", i18n.T("cmd.build.label.output"), buildTargetStyle.Render(outputDir))
|
||||||
fmt.Printf(" %s %s\n", i18n.T("cmd.build.label.binary"), buildTargetStyle.Render(binaryName))
|
cli.Print(" %s %s\n", i18n.T("cmd.build.label.binary"), buildTargetStyle.Render(binaryName))
|
||||||
fmt.Printf(" %s %s\n", i18n.T("cmd.build.label.targets"), buildTargetStyle.Render(formatTargets(buildTargets)))
|
cli.Print(" %s %s\n", i18n.T("cmd.build.label.targets"), buildTargetStyle.Render(formatTargets(buildTargets)))
|
||||||
fmt.Println()
|
cli.Blank()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the appropriate builder
|
// Get the appropriate builder
|
||||||
|
|
@ -114,64 +165,61 @@ func runProjectBuild(ctx context.Context, buildType string, ciMode bool, targets
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create build config for the builder
|
// Create build config for the builder
|
||||||
cfg := &build.Config{
|
version, err := resolveBuildVersion(ctx, projectDir)
|
||||||
FS: fs,
|
if err != nil {
|
||||||
ProjectDir: projectDir,
|
return coreerr.E("build.Run", "failed to determine build version", err)
|
||||||
OutputDir: outputDir,
|
|
||||||
Name: binaryName,
|
|
||||||
Version: buildCfg.Project.Name, // Could be enhanced with git describe
|
|
||||||
LDFlags: buildCfg.Build.LDFlags,
|
|
||||||
CGO: buildCfg.Build.CGO,
|
|
||||||
// Docker/LinuxKit specific
|
|
||||||
Dockerfile: configPath, // Reuse for Dockerfile path
|
|
||||||
LinuxKitConfig: configPath,
|
|
||||||
Push: push,
|
|
||||||
Image: imageName,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cfg := buildRuntimeConfig(filesystem, projectDir, outputDir, binaryName, buildConfig, req.Push, req.ImageName, version)
|
||||||
|
discovery, err := build.DiscoverFull(filesystem, projectDir)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("build.Run", "failed to inspect project for build options", err)
|
||||||
|
}
|
||||||
|
build.ApplyOptions(cfg, build.ComputeOptions(buildConfig, discovery))
|
||||||
|
|
||||||
// Parse formats for LinuxKit
|
// Parse formats for LinuxKit
|
||||||
if format != "" {
|
if req.Format != "" {
|
||||||
cfg.Formats = strings.Split(format, ",")
|
cfg.Formats = core.Split(req.Format, ",")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute build
|
// Execute build
|
||||||
artifacts, err := builder.Build(ctx, cfg, buildTargets)
|
artifacts, err := builder.Build(ctx, cfg, buildTargets)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !ciMode {
|
if !req.CIMode {
|
||||||
fmt.Printf("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err)
|
cli.Print("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if verbose && !ciMode {
|
if req.Verbose && !req.CIMode {
|
||||||
fmt.Printf("%s %s\n", buildSuccessStyle.Render(i18n.T("common.label.success")), i18n.T("cmd.build.built_artifacts", map[string]any{"Count": len(artifacts)}))
|
cli.Print("%s %s\n", buildSuccessStyle.Render(i18n.T("common.label.success")), i18n.T("cmd.build.built_artifacts", map[string]any{"Count": len(artifacts)}))
|
||||||
fmt.Println()
|
cli.Blank()
|
||||||
for _, artifact := range artifacts {
|
for _, artifact := range artifacts {
|
||||||
relPath, err := filepath.Rel(projectDir, artifact.Path)
|
relPath, err := ax.Rel(projectDir, artifact.Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
relPath = artifact.Path
|
relPath = artifact.Path
|
||||||
}
|
}
|
||||||
fmt.Printf(" %s %s %s\n",
|
cli.Print(" %s %s %s\n",
|
||||||
buildSuccessStyle.Render("*"),
|
buildSuccessStyle.Render("*"),
|
||||||
buildTargetStyle.Render(relPath),
|
buildTargetStyle.Render(relPath),
|
||||||
buildDimStyle.Render(fmt.Sprintf("(%s/%s)", artifact.OS, artifact.Arch)),
|
buildDimStyle.Render(core.Sprintf("(%s/%s)", artifact.OS, artifact.Arch)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sign macOS binaries if enabled
|
// Sign binaries if enabled.
|
||||||
signCfg := buildCfg.Sign
|
signCfg := buildConfig.Sign
|
||||||
if notarize {
|
if req.Notarize {
|
||||||
signCfg.MacOS.Notarize = true
|
signCfg.MacOS.Notarize = true
|
||||||
}
|
}
|
||||||
if noSign {
|
if req.NoSign {
|
||||||
signCfg.Enabled = false
|
signCfg.Enabled = false
|
||||||
}
|
}
|
||||||
|
|
||||||
if signCfg.Enabled && runtime.GOOS == "darwin" {
|
if signCfg.Enabled && (runtime.GOOS == "darwin" || runtime.GOOS == "windows") {
|
||||||
if verbose && !ciMode {
|
if req.Verbose && !req.CIMode {
|
||||||
fmt.Println()
|
cli.Blank()
|
||||||
fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.sign")), i18n.T("cmd.build.signing_binaries"))
|
cli.Print("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.sign")), i18n.T("cmd.build.signing_binaries"))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert build.Artifact to signing.Artifact
|
// Convert build.Artifact to signing.Artifact
|
||||||
|
|
@ -180,17 +228,17 @@ func runProjectBuild(ctx context.Context, buildType string, ciMode bool, targets
|
||||||
signingArtifacts[i] = signing.Artifact{Path: a.Path, OS: a.OS, Arch: a.Arch}
|
signingArtifacts[i] = signing.Artifact{Path: a.Path, OS: a.OS, Arch: a.Arch}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := signing.SignBinaries(ctx, fs, signCfg, signingArtifacts); err != nil {
|
if err := signing.SignBinaries(ctx, filesystem, signCfg, signingArtifacts); err != nil {
|
||||||
if !ciMode {
|
if !req.CIMode {
|
||||||
fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.signing_failed"), err)
|
cli.Print("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.signing_failed"), err)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if signCfg.MacOS.Notarize {
|
if runtime.GOOS == "darwin" && signCfg.MacOS.Notarize {
|
||||||
if err := signing.NotarizeBinaries(ctx, fs, signCfg, signingArtifacts); err != nil {
|
if err := signing.NotarizeBinaries(ctx, filesystem, signCfg, signingArtifacts); err != nil {
|
||||||
if !ciMode {
|
if !req.CIMode {
|
||||||
fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.notarization_failed"), err)
|
cli.Print("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.notarization_failed"), err)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -199,30 +247,35 @@ func runProjectBuild(ctx context.Context, buildType string, ciMode bool, targets
|
||||||
|
|
||||||
// Archive artifacts if enabled
|
// Archive artifacts if enabled
|
||||||
var archivedArtifacts []build.Artifact
|
var archivedArtifacts []build.Artifact
|
||||||
if doArchive && len(artifacts) > 0 {
|
if req.ArchiveOutput && len(artifacts) > 0 {
|
||||||
if verbose && !ciMode {
|
if req.Verbose && !req.CIMode {
|
||||||
fmt.Println()
|
cli.Blank()
|
||||||
fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.archive")), i18n.T("cmd.build.creating_archives"))
|
cli.Print("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.archive")), i18n.T("cmd.build.creating_archives"))
|
||||||
}
|
}
|
||||||
|
|
||||||
archivedArtifacts, err = build.ArchiveAll(fs, artifacts)
|
archiveFormatValue, err := resolveArchiveFormat(buildConfig.Build.ArchiveFormat, req.ArchiveFormat)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !ciMode {
|
return err
|
||||||
fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.archive_failed"), err)
|
}
|
||||||
|
|
||||||
|
archivedArtifacts, err = build.ArchiveAllWithFormat(filesystem, artifacts, archiveFormatValue)
|
||||||
|
if err != nil {
|
||||||
|
if !req.CIMode {
|
||||||
|
cli.Print("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.archive_failed"), err)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if verbose && !ciMode {
|
if req.Verbose && !req.CIMode {
|
||||||
for _, artifact := range archivedArtifacts {
|
for _, artifact := range archivedArtifacts {
|
||||||
relPath, err := filepath.Rel(projectDir, artifact.Path)
|
relPath, err := ax.Rel(projectDir, artifact.Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
relPath = artifact.Path
|
relPath = artifact.Path
|
||||||
}
|
}
|
||||||
fmt.Printf(" %s %s %s\n",
|
cli.Print(" %s %s %s\n",
|
||||||
buildSuccessStyle.Render("*"),
|
buildSuccessStyle.Render("*"),
|
||||||
buildTargetStyle.Render(relPath),
|
buildTargetStyle.Render(relPath),
|
||||||
buildDimStyle.Render(fmt.Sprintf("(%s/%s)", artifact.OS, artifact.Arch)),
|
buildDimStyle.Render(core.Sprintf("(%s/%s)", artifact.OS, artifact.Arch)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -230,79 +283,159 @@ func runProjectBuild(ctx context.Context, buildType string, ciMode bool, targets
|
||||||
|
|
||||||
// Compute checksums if enabled
|
// Compute checksums if enabled
|
||||||
var checksummedArtifacts []build.Artifact
|
var checksummedArtifacts []build.Artifact
|
||||||
if doChecksum && len(archivedArtifacts) > 0 {
|
if req.ChecksumOutput && len(archivedArtifacts) > 0 {
|
||||||
checksummedArtifacts, err = computeAndWriteChecksums(ctx, projectDir, outputDir, archivedArtifacts, signCfg, ciMode, verbose)
|
checksummedArtifacts, err = computeAndWriteChecksums(ctx, filesystem, projectDir, outputDir, archivedArtifacts, signCfg, req.CIMode, req.Verbose)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else if doChecksum && len(artifacts) > 0 && !doArchive {
|
} else if req.ChecksumOutput && len(artifacts) > 0 && !req.ArchiveOutput {
|
||||||
// Checksum raw binaries if archiving is disabled
|
// Checksum raw binaries if archiving is disabled
|
||||||
checksummedArtifacts, err = computeAndWriteChecksums(ctx, projectDir, outputDir, artifacts, signCfg, ciMode, verbose)
|
checksummedArtifacts, err = computeAndWriteChecksums(ctx, filesystem, projectDir, outputDir, artifacts, signCfg, req.CIMode, req.Verbose)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Output results
|
// Output results
|
||||||
if ciMode {
|
if req.CIMode {
|
||||||
// Determine which artifacts to output (prefer checksummed > archived > raw)
|
// Determine which artifacts to output (prefer checksummed > archived > raw).
|
||||||
var outputArtifacts []build.Artifact
|
outputArtifacts := selectOutputArtifacts(artifacts, archivedArtifacts, checksummedArtifacts)
|
||||||
if len(checksummedArtifacts) > 0 {
|
if err := writeArtifactMetadata(filesystem, binaryName, outputArtifacts); err != nil {
|
||||||
outputArtifacts = checksummedArtifacts
|
return err
|
||||||
} else if len(archivedArtifacts) > 0 {
|
|
||||||
outputArtifacts = archivedArtifacts
|
|
||||||
} else {
|
|
||||||
outputArtifacts = artifacts
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// JSON output for CI
|
// JSON output for CI
|
||||||
output, err := json.MarshalIndent(outputArtifacts, "", " ")
|
output, err := ax.JSONMarshal(outputArtifacts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("build.Run", "failed to marshal artifacts", err)
|
return coreerr.E("build.Run", "failed to marshal artifacts", err)
|
||||||
}
|
}
|
||||||
fmt.Println(string(output))
|
cli.Print("%s\n", output)
|
||||||
} else if !verbose {
|
} else if !req.Verbose {
|
||||||
// Minimal output: just success with artifact count
|
// Minimal output: just success with artifact count
|
||||||
fmt.Printf("%s %s %s\n",
|
cli.Print("%s %s %s\n",
|
||||||
buildSuccessStyle.Render(i18n.T("common.label.success")),
|
buildSuccessStyle.Render(i18n.T("common.label.success")),
|
||||||
i18n.T("cmd.build.built_artifacts", map[string]any{"Count": len(artifacts)}),
|
i18n.T("cmd.build.built_artifacts", map[string]any{"Count": len(artifacts)}),
|
||||||
buildDimStyle.Render(fmt.Sprintf("(%s)", outputDir)),
|
buildDimStyle.Render(core.Sprintf("(%s)", outputDir)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// computeAndWriteChecksums computes checksums for artifacts and writes CHECKSUMS.txt.
|
// selectOutputArtifacts chooses the final artifact list for CI output.
|
||||||
func computeAndWriteChecksums(ctx context.Context, projectDir, outputDir string, artifacts []build.Artifact, signCfg signing.SignConfig, ciMode bool, verbose bool) ([]build.Artifact, error) {
|
//
|
||||||
fs := io.Local
|
// output := selectOutputArtifacts(rawArtifacts, archivedArtifacts, checksummedArtifacts)
|
||||||
if verbose && !ciMode {
|
func selectOutputArtifacts(rawArtifacts, archivedArtifacts, checksummedArtifacts []build.Artifact) []build.Artifact {
|
||||||
fmt.Println()
|
if len(checksummedArtifacts) > 0 {
|
||||||
fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.checksum")), i18n.T("cmd.build.computing_checksums"))
|
return checksummedArtifacts
|
||||||
|
}
|
||||||
|
if len(archivedArtifacts) > 0 {
|
||||||
|
return archivedArtifacts
|
||||||
|
}
|
||||||
|
return rawArtifacts
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeArtifactMetadata writes artifact_meta.json files next to built artifacts when CI metadata is available.
|
||||||
|
func writeArtifactMetadata(filesystem io.Medium, buildName string, artifacts []build.Artifact) error {
|
||||||
|
ci := build.DetectCI()
|
||||||
|
if ci == nil {
|
||||||
|
ci = build.DetectGitHubMetadata()
|
||||||
|
}
|
||||||
|
if ci == nil {
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
checksummedArtifacts, err := build.ChecksumAll(fs, artifacts)
|
for _, artifact := range artifacts {
|
||||||
|
metaPath := ax.Join(ax.Dir(artifact.Path), "artifact_meta.json")
|
||||||
|
if err := build.WriteArtifactMeta(filesystem, metaPath, buildName, build.Target{OS: artifact.OS, Arch: artifact.Arch}, ci); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildRuntimeConfig maps persisted build configuration onto the runtime builder config.
|
||||||
|
func buildRuntimeConfig(filesystem io.Medium, projectDir, outputDir, binaryName string, buildConfig *build.BuildConfig, push bool, imageName string, version string) *build.Config {
|
||||||
|
buildDefaults := buildConfig.Build
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: filesystem,
|
||||||
|
Project: buildConfig.Project,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: binaryName,
|
||||||
|
Version: version,
|
||||||
|
LDFlags: append([]string{}, buildDefaults.LDFlags...),
|
||||||
|
Flags: append([]string{}, buildDefaults.Flags...),
|
||||||
|
BuildTags: append([]string{}, buildDefaults.BuildTags...),
|
||||||
|
Env: append([]string{}, buildDefaults.Env...),
|
||||||
|
Cache: buildDefaults.Cache,
|
||||||
|
CGO: buildDefaults.CGO,
|
||||||
|
Obfuscate: buildDefaults.Obfuscate,
|
||||||
|
NSIS: buildDefaults.NSIS,
|
||||||
|
WebView2: buildDefaults.WebView2,
|
||||||
|
Dockerfile: buildDefaults.Dockerfile,
|
||||||
|
Registry: buildDefaults.Registry,
|
||||||
|
Image: buildDefaults.Image,
|
||||||
|
Tags: append([]string{}, buildDefaults.Tags...),
|
||||||
|
BuildArgs: build.CloneStringMap(buildDefaults.BuildArgs),
|
||||||
|
Push: buildDefaults.Push || push,
|
||||||
|
Load: buildDefaults.Load,
|
||||||
|
LinuxKitConfig: buildDefaults.LinuxKitConfig,
|
||||||
|
Formats: append([]string{}, buildDefaults.Formats...),
|
||||||
|
}
|
||||||
|
|
||||||
|
if imageName != "" {
|
||||||
|
cfg.Image = imageName
|
||||||
|
}
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveArchiveFormat selects the archive format from CLI overrides or config defaults.
|
||||||
|
func resolveArchiveFormat(configFormat, cliFormat string) (build.ArchiveFormat, error) {
|
||||||
|
if cliFormat != "" {
|
||||||
|
return build.ParseArchiveFormat(cliFormat)
|
||||||
|
}
|
||||||
|
return build.ParseArchiveFormat(configFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveBuildVersion determines the version string embedded into build artifacts.
|
||||||
|
//
|
||||||
|
// version, err := resolveBuildVersion(ctx, ".")
|
||||||
|
func resolveBuildVersion(ctx context.Context, projectDir string) (string, error) {
|
||||||
|
return release.DetermineVersionWithContext(ctx, projectDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// computeAndWriteChecksums computes checksums for artifacts and writes CHECKSUMS.txt.
|
||||||
|
func computeAndWriteChecksums(ctx context.Context, filesystem io.Medium, projectDir, outputDir string, artifacts []build.Artifact, signCfg signing.SignConfig, ciMode bool, verbose bool) ([]build.Artifact, error) {
|
||||||
|
if verbose && !ciMode {
|
||||||
|
cli.Blank()
|
||||||
|
cli.Print("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.label.checksum")), i18n.T("cmd.build.computing_checksums"))
|
||||||
|
}
|
||||||
|
|
||||||
|
checksummedArtifacts, err := build.ChecksumAll(filesystem, artifacts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if !ciMode {
|
if !ciMode {
|
||||||
fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.checksum_failed"), err)
|
cli.Print("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.checksum_failed"), err)
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write CHECKSUMS.txt
|
// Write CHECKSUMS.txt
|
||||||
checksumPath := filepath.Join(outputDir, "CHECKSUMS.txt")
|
checksumPath := ax.Join(outputDir, "CHECKSUMS.txt")
|
||||||
if err := build.WriteChecksumFile(fs, checksummedArtifacts, checksumPath); err != nil {
|
if err := build.WriteChecksumFile(filesystem, checksummedArtifacts, checksumPath); err != nil {
|
||||||
if !ciMode {
|
if !ciMode {
|
||||||
fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("common.error.failed", map[string]any{"Action": "write CHECKSUMS.txt"}), err)
|
cli.Print("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("common.error.failed", map[string]any{"Action": "write CHECKSUMS.txt"}), err)
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sign checksums with GPG
|
// Sign checksums with GPG
|
||||||
if signCfg.Enabled {
|
if signCfg.Enabled {
|
||||||
if err := signing.SignChecksums(ctx, fs, signCfg, checksumPath); err != nil {
|
if err := signing.SignChecksums(ctx, filesystem, signCfg, checksumPath); err != nil {
|
||||||
if !ciMode {
|
if !ciMode {
|
||||||
fmt.Printf("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.gpg_signing_failed"), err)
|
cli.Print("%s %s: %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), i18n.T("cmd.build.error.gpg_signing_failed"), err)
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
@ -310,22 +443,22 @@ func computeAndWriteChecksums(ctx context.Context, projectDir, outputDir string,
|
||||||
|
|
||||||
if verbose && !ciMode {
|
if verbose && !ciMode {
|
||||||
for _, artifact := range checksummedArtifacts {
|
for _, artifact := range checksummedArtifacts {
|
||||||
relPath, err := filepath.Rel(projectDir, artifact.Path)
|
relPath, err := ax.Rel(projectDir, artifact.Path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
relPath = artifact.Path
|
relPath = artifact.Path
|
||||||
}
|
}
|
||||||
fmt.Printf(" %s %s\n",
|
cli.Print(" %s %s\n",
|
||||||
buildSuccessStyle.Render("*"),
|
buildSuccessStyle.Render("*"),
|
||||||
buildTargetStyle.Render(relPath),
|
buildTargetStyle.Render(relPath),
|
||||||
)
|
)
|
||||||
fmt.Printf(" %s\n", buildDimStyle.Render(artifact.Checksum))
|
cli.Print(" %s\n", buildDimStyle.Render(artifact.Checksum))
|
||||||
}
|
}
|
||||||
|
|
||||||
relChecksumPath, err := filepath.Rel(projectDir, checksumPath)
|
relChecksumPath, err := ax.Rel(projectDir, checksumPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
relChecksumPath = checksumPath
|
relChecksumPath = checksumPath
|
||||||
}
|
}
|
||||||
fmt.Printf(" %s %s\n",
|
cli.Print(" %s %s\n",
|
||||||
buildSuccessStyle.Render("*"),
|
buildSuccessStyle.Render("*"),
|
||||||
buildTargetStyle.Render(relChecksumPath),
|
buildTargetStyle.Render(relChecksumPath),
|
||||||
)
|
)
|
||||||
|
|
@ -336,23 +469,23 @@ func computeAndWriteChecksums(ctx context.Context, projectDir, outputDir string,
|
||||||
|
|
||||||
// parseTargets parses a comma-separated list of OS/arch pairs.
|
// parseTargets parses a comma-separated list of OS/arch pairs.
|
||||||
func parseTargets(targetsFlag string) ([]build.Target, error) {
|
func parseTargets(targetsFlag string) ([]build.Target, error) {
|
||||||
parts := strings.Split(targetsFlag, ",")
|
parts := core.Split(targetsFlag, ",")
|
||||||
var targets []build.Target
|
var targets []build.Target
|
||||||
|
|
||||||
for _, part := range parts {
|
for _, part := range parts {
|
||||||
part = strings.TrimSpace(part)
|
part = core.Trim(part)
|
||||||
if part == "" {
|
if part == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
osArch := strings.Split(part, "/")
|
osArch := core.Split(part, "/")
|
||||||
if len(osArch) != 2 {
|
if len(osArch) != 2 {
|
||||||
return nil, coreerr.E("build.parseTargets", "invalid target format (expected os/arch): "+part, nil)
|
return nil, coreerr.E("build.parseTargets", "invalid target format (expected os/arch): "+part, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
targets = append(targets, build.Target{
|
targets = append(targets, build.Target{
|
||||||
OS: strings.TrimSpace(osArch[0]),
|
OS: core.Trim(osArch[0]),
|
||||||
Arch: strings.TrimSpace(osArch[1]),
|
Arch: core.Trim(osArch[1]),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -369,7 +502,7 @@ func formatTargets(targets []build.Target) string {
|
||||||
for _, t := range targets {
|
for _, t := range targets {
|
||||||
parts = append(parts, t.String())
|
parts = append(parts, t.String())
|
||||||
}
|
}
|
||||||
return strings.Join(parts, ", ")
|
return core.Join(", ", parts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// getBuilder returns the appropriate builder for the project type.
|
// getBuilder returns the appropriate builder for the project type.
|
||||||
|
|
@ -388,9 +521,15 @@ func getBuilder(projectType build.ProjectType) (build.Builder, error) {
|
||||||
case build.ProjectTypeCPP:
|
case build.ProjectTypeCPP:
|
||||||
return builders.NewCPPBuilder(), nil
|
return builders.NewCPPBuilder(), nil
|
||||||
case build.ProjectTypeNode:
|
case build.ProjectTypeNode:
|
||||||
return nil, coreerr.E("build.getBuilder", "node.js builder not yet implemented", nil)
|
return builders.NewNodeBuilder(), nil
|
||||||
case build.ProjectTypePHP:
|
case build.ProjectTypePHP:
|
||||||
return nil, coreerr.E("build.getBuilder", "PHP builder not yet implemented", nil)
|
return builders.NewPHPBuilder(), nil
|
||||||
|
case build.ProjectTypePython:
|
||||||
|
return builders.NewPythonBuilder(), nil
|
||||||
|
case build.ProjectTypeRust:
|
||||||
|
return builders.NewRustBuilder(), nil
|
||||||
|
case build.ProjectTypeDocs:
|
||||||
|
return builders.NewDocsBuilder(), nil
|
||||||
default:
|
default:
|
||||||
return nil, coreerr.E("build.getBuilder", "unsupported project type: "+string(projectType), nil)
|
return nil, coreerr.E("build.getBuilder", "unsupported project type: "+string(projectType), nil)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
218
cmd/build/cmd_project_test.go
Normal file
218
cmd/build/cmd_project_test.go
Normal file
|
|
@ -0,0 +1,218 @@
|
||||||
|
package buildcmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func runGit(t *testing.T, dir string, args ...string) {
|
||||||
|
t.Helper()
|
||||||
|
require.NoError(t, ax.ExecDir(context.Background(), dir, "git", args...))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_GetBuilder_Good(t *testing.T) {
|
||||||
|
t.Run("returns Python builder for python project type", func(t *testing.T) {
|
||||||
|
builder, err := getBuilder(build.ProjectTypePython)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotNil(t, builder)
|
||||||
|
assert.Equal(t, "python", builder.Name())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_buildRuntimeConfig_Good(t *testing.T) {
|
||||||
|
buildConfig := &build.BuildConfig{
|
||||||
|
Project: build.Project{
|
||||||
|
Name: "sample",
|
||||||
|
},
|
||||||
|
Build: build.Build{
|
||||||
|
LDFlags: []string{"-s", "-w"},
|
||||||
|
Flags: []string{"-trimpath"},
|
||||||
|
BuildTags: []string{"integration"},
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
CGO: true,
|
||||||
|
Obfuscate: true,
|
||||||
|
NSIS: true,
|
||||||
|
WebView2: "embed",
|
||||||
|
Dockerfile: "Dockerfile.custom",
|
||||||
|
Registry: "ghcr.io",
|
||||||
|
Image: "owner/repo",
|
||||||
|
Tags: []string{"latest", "{{.Version}}"},
|
||||||
|
BuildArgs: map[string]string{"VERSION": "{{.Version}}"},
|
||||||
|
Push: true,
|
||||||
|
Load: true,
|
||||||
|
LinuxKitConfig: ".core/linuxkit/server.yml",
|
||||||
|
Formats: []string{"iso", "qcow2"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg := buildRuntimeConfig(io.Local, "/project", "/project/dist", "binary", buildConfig, false, "", "v1.2.3")
|
||||||
|
|
||||||
|
assert.Equal(t, []string{"-s", "-w"}, cfg.LDFlags)
|
||||||
|
assert.Equal(t, []string{"-trimpath"}, cfg.Flags)
|
||||||
|
assert.Equal(t, []string{"integration"}, cfg.BuildTags)
|
||||||
|
assert.Equal(t, []string{"FOO=bar"}, cfg.Env)
|
||||||
|
assert.True(t, cfg.CGO)
|
||||||
|
assert.True(t, cfg.Obfuscate)
|
||||||
|
assert.True(t, cfg.NSIS)
|
||||||
|
assert.Equal(t, "embed", cfg.WebView2)
|
||||||
|
assert.Equal(t, "Dockerfile.custom", cfg.Dockerfile)
|
||||||
|
assert.Equal(t, "ghcr.io", cfg.Registry)
|
||||||
|
assert.Equal(t, "owner/repo", cfg.Image)
|
||||||
|
assert.Equal(t, []string{"latest", "{{.Version}}"}, cfg.Tags)
|
||||||
|
assert.Equal(t, map[string]string{"VERSION": "{{.Version}}"}, cfg.BuildArgs)
|
||||||
|
assert.True(t, cfg.Push)
|
||||||
|
assert.True(t, cfg.Load)
|
||||||
|
assert.Equal(t, ".core/linuxkit/server.yml", cfg.LinuxKitConfig)
|
||||||
|
assert.Equal(t, []string{"iso", "qcow2"}, cfg.Formats)
|
||||||
|
assert.Equal(t, "v1.2.3", cfg.Version)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_buildRuntimeConfig_ImageOverride_Good(t *testing.T) {
|
||||||
|
buildConfig := &build.BuildConfig{
|
||||||
|
Build: build.Build{
|
||||||
|
Image: "owner/repo",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg := buildRuntimeConfig(io.Local, "/project", "/project/dist", "binary", buildConfig, true, "cli/image", "v2.0.0")
|
||||||
|
|
||||||
|
assert.Equal(t, "cli/image", cfg.Image)
|
||||||
|
assert.True(t, cfg.Push)
|
||||||
|
assert.Equal(t, "v2.0.0", cfg.Version)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_buildRuntimeConfig_ClonesBuildArgs_Good(t *testing.T) {
|
||||||
|
buildConfig := &build.BuildConfig{
|
||||||
|
Build: build.Build{
|
||||||
|
BuildArgs: map[string]string{"VERSION": "v1.2.3"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg := buildRuntimeConfig(io.Local, "/project", "/project/dist", "binary", buildConfig, false, "", "v1.2.3")
|
||||||
|
require.NotNil(t, cfg.BuildArgs)
|
||||||
|
|
||||||
|
cfg.BuildArgs["VERSION"] = "mutated"
|
||||||
|
assert.Equal(t, "v1.2.3", buildConfig.Build.BuildArgs["VERSION"])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveArchiveFormat_Good(t *testing.T) {
|
||||||
|
t.Run("uses cli override when present", func(t *testing.T) {
|
||||||
|
format, err := resolveArchiveFormat("gz", "xz")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ArchiveFormatXZ, format)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("falls back to config when cli override is empty", func(t *testing.T) {
|
||||||
|
format, err := resolveArchiveFormat("zip", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ArchiveFormatZip, format)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveBuildVersion_Good(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
runGit(t, dir, "init")
|
||||||
|
runGit(t, dir, "config", "user.email", "test@example.com")
|
||||||
|
runGit(t, dir, "config", "user.name", "Test User")
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "README.md"), []byte("hello\n"), 0644))
|
||||||
|
runGit(t, dir, "add", ".")
|
||||||
|
runGit(t, dir, "commit", "-m", "feat: initial commit")
|
||||||
|
runGit(t, dir, "tag", "v1.4.2")
|
||||||
|
|
||||||
|
version, err := resolveBuildVersion(context.Background(), dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "v1.4.2", version)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_writeArtifactMetadata_Good(t *testing.T) {
|
||||||
|
t.Setenv("GITHUB_SHA", "abc1234def5678")
|
||||||
|
t.Setenv("GITHUB_REF", "refs/tags/v1.2.3")
|
||||||
|
t.Setenv("GITHUB_REPOSITORY", "owner/repo")
|
||||||
|
|
||||||
|
fs := io.Local
|
||||||
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
linuxDir := ax.Join(dir, "linux_amd64")
|
||||||
|
windowsDir := ax.Join(dir, "windows_amd64")
|
||||||
|
require.NoError(t, ax.MkdirAll(linuxDir, 0755))
|
||||||
|
require.NoError(t, ax.MkdirAll(windowsDir, 0755))
|
||||||
|
|
||||||
|
artifacts := []build.Artifact{
|
||||||
|
{Path: ax.Join(linuxDir, "sample"), OS: "linux", Arch: "amd64"},
|
||||||
|
{Path: ax.Join(windowsDir, "sample.exe"), OS: "windows", Arch: "amd64"},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := writeArtifactMetadata(fs, "sample", artifacts)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
verifyArtifactMeta := func(path string, expectedOS string, expectedArch string) {
|
||||||
|
content, readErr := ax.ReadFile(path)
|
||||||
|
require.NoError(t, readErr)
|
||||||
|
|
||||||
|
var meta map[string]any
|
||||||
|
require.NoError(t, json.Unmarshal(content, &meta))
|
||||||
|
|
||||||
|
assert.Equal(t, "sample", meta["name"])
|
||||||
|
assert.Equal(t, expectedOS, meta["os"])
|
||||||
|
assert.Equal(t, expectedArch, meta["arch"])
|
||||||
|
assert.Equal(t, "v1.2.3", meta["tag"])
|
||||||
|
assert.Equal(t, "owner/repo", meta["repo"])
|
||||||
|
}
|
||||||
|
|
||||||
|
verifyArtifactMeta(ax.Join(linuxDir, "artifact_meta.json"), "linux", "amd64")
|
||||||
|
verifyArtifactMeta(ax.Join(windowsDir, "artifact_meta.json"), "windows", "amd64")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_selectOutputArtifacts_Good(t *testing.T) {
|
||||||
|
rawArtifacts := []build.Artifact{{Path: "dist/raw"}}
|
||||||
|
archivedArtifacts := []build.Artifact{{Path: "dist/raw.tar.gz"}}
|
||||||
|
checksummedArtifacts := []build.Artifact{{Path: "dist/raw.tar.gz", Checksum: "abc123"}}
|
||||||
|
|
||||||
|
t.Run("prefers checksummed artifacts", func(t *testing.T) {
|
||||||
|
selected := selectOutputArtifacts(rawArtifacts, archivedArtifacts, checksummedArtifacts)
|
||||||
|
assert.Equal(t, checksummedArtifacts, selected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("falls back to archived artifacts", func(t *testing.T) {
|
||||||
|
selected := selectOutputArtifacts(rawArtifacts, archivedArtifacts, nil)
|
||||||
|
assert.Equal(t, archivedArtifacts, selected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("falls back to raw artifacts", func(t *testing.T) {
|
||||||
|
selected := selectOutputArtifacts(rawArtifacts, nil, nil)
|
||||||
|
assert.Equal(t, rawArtifacts, selected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_runProjectBuild_PwaOverride_Good(t *testing.T) {
|
||||||
|
expectedWD, err := ax.Getwd()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
original := runLocalPwaBuild
|
||||||
|
t.Cleanup(func() {
|
||||||
|
runLocalPwaBuild = original
|
||||||
|
})
|
||||||
|
|
||||||
|
called := false
|
||||||
|
runLocalPwaBuild = func(ctx context.Context, projectDir string) error {
|
||||||
|
called = true
|
||||||
|
assert.Equal(t, expectedWD, projectDir)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err = runProjectBuild(ProjectBuildRequest{
|
||||||
|
Context: context.Background(),
|
||||||
|
BuildType: "pwa",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.True(t, called)
|
||||||
|
}
|
||||||
|
|
@ -7,18 +7,15 @@
|
||||||
package buildcmd
|
package buildcmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"context"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/i18n"
|
"dappco.re/go/core/i18n"
|
||||||
coreio "dappco.re/go/core/io"
|
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
"github.com/leaanthony/debme"
|
"github.com/leaanthony/debme"
|
||||||
"github.com/leaanthony/gosod"
|
"github.com/leaanthony/gosod"
|
||||||
|
|
@ -31,28 +28,32 @@ var (
|
||||||
errURLRequired = coreerr.E("buildcmd.Init", "the --url flag is required", nil)
|
errURLRequired = coreerr.E("buildcmd.Init", "the --url flag is required", nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
// runPwaBuild downloads a PWA from URL and builds it.
|
// runLocalPwaBuild points at the local PWA build entrypoint.
|
||||||
func runPwaBuild(pwaURL string) error {
|
// Tests replace this to avoid invoking the real build toolchain.
|
||||||
fmt.Printf("%s %s\n", i18n.T("cmd.build.pwa.starting"), pwaURL)
|
var runLocalPwaBuild = runBuild
|
||||||
|
|
||||||
tempDir, err := os.MkdirTemp("", "core-pwa-build-*")
|
// runPwaBuild downloads a PWA from URL and builds it.
|
||||||
|
func runPwaBuild(ctx context.Context, pwaURL string) error {
|
||||||
|
core.Print(nil, "%s %s", i18n.T("cmd.build.pwa.starting"), pwaURL)
|
||||||
|
|
||||||
|
tempDir, err := ax.TempDir("core-pwa-build-*")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("pwa.runPwaBuild", i18n.T("common.error.failed", map[string]any{"Action": "create temporary directory"}), err)
|
return coreerr.E("pwa.runPwaBuild", i18n.T("common.error.failed", map[string]any{"Action": "create temporary directory"}), err)
|
||||||
}
|
}
|
||||||
// defer os.RemoveAll(tempDir) // Keep temp dir for debugging
|
// defer os.RemoveAll(tempDir) // Keep temp dir for debugging
|
||||||
fmt.Printf("%s %s\n", i18n.T("cmd.build.pwa.downloading_to"), tempDir)
|
core.Print(nil, "%s %s", i18n.T("cmd.build.pwa.downloading_to"), tempDir)
|
||||||
|
|
||||||
if err := downloadPWA(pwaURL, tempDir); err != nil {
|
if err := downloadPWA(ctx, pwaURL, tempDir); err != nil {
|
||||||
return coreerr.E("pwa.runPwaBuild", i18n.T("common.error.failed", map[string]any{"Action": "download PWA"}), err)
|
return coreerr.E("pwa.runPwaBuild", i18n.T("common.error.failed", map[string]any{"Action": "download PWA"}), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return runBuild(tempDir)
|
return runBuild(ctx, tempDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
// downloadPWA fetches a PWA from a URL and saves assets locally.
|
// downloadPWA fetches a PWA from a URL and saves assets locally.
|
||||||
func downloadPWA(baseURL, destDir string) error {
|
func downloadPWA(ctx context.Context, baseURL, destDir string) error {
|
||||||
// Fetch the main HTML page
|
// Fetch the main HTML page
|
||||||
resp, err := http.Get(baseURL)
|
resp, err := getWithContext(ctx, baseURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("pwa.downloadPWA", i18n.T("common.error.failed", map[string]any{"Action": "fetch URL"})+" "+baseURL, err)
|
return coreerr.E("pwa.downloadPWA", i18n.T("common.error.failed", map[string]any{"Action": "fetch URL"})+" "+baseURL, err)
|
||||||
}
|
}
|
||||||
|
|
@ -67,17 +68,17 @@ func downloadPWA(baseURL, destDir string) error {
|
||||||
manifestURL, err := findManifestURL(string(body), baseURL)
|
manifestURL, err := findManifestURL(string(body), baseURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// If no manifest, it's not a PWA, but we can still try to package it as a simple site.
|
// If no manifest, it's not a PWA, but we can still try to package it as a simple site.
|
||||||
fmt.Printf("%s %s\n", i18n.T("common.label.warning"), i18n.T("cmd.build.pwa.no_manifest"))
|
core.Print(nil, "%s %s", i18n.T("common.label.warning"), i18n.T("cmd.build.pwa.no_manifest"))
|
||||||
if err := coreio.Local.Write(filepath.Join(destDir, "index.html"), string(body)); err != nil {
|
if err := ax.WriteString(ax.Join(destDir, "index.html"), string(body), 0o644); err != nil {
|
||||||
return coreerr.E("pwa.downloadPWA", i18n.T("common.error.failed", map[string]any{"Action": "write index.html"}), err)
|
return coreerr.E("pwa.downloadPWA", i18n.T("common.error.failed", map[string]any{"Action": "write index.html"}), err)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("%s %s\n", i18n.T("cmd.build.pwa.found_manifest"), manifestURL)
|
core.Print(nil, "%s %s", i18n.T("cmd.build.pwa.found_manifest"), manifestURL)
|
||||||
|
|
||||||
// Fetch and parse the manifest
|
// Fetch and parse the manifest
|
||||||
manifest, err := fetchManifest(manifestURL)
|
manifest, err := fetchManifest(ctx, manifestURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("pwa.downloadPWA", i18n.T("common.error.failed", map[string]any{"Action": "fetch or parse manifest"}), err)
|
return coreerr.E("pwa.downloadPWA", i18n.T("common.error.failed", map[string]any{"Action": "fetch or parse manifest"}), err)
|
||||||
}
|
}
|
||||||
|
|
@ -85,23 +86,26 @@ func downloadPWA(baseURL, destDir string) error {
|
||||||
// Download all assets listed in the manifest
|
// Download all assets listed in the manifest
|
||||||
assets := collectAssets(manifest, manifestURL)
|
assets := collectAssets(manifest, manifestURL)
|
||||||
for _, assetURL := range assets {
|
for _, assetURL := range assets {
|
||||||
if err := downloadAsset(assetURL, destDir); err != nil {
|
if err := downloadAsset(ctx, assetURL, destDir); err != nil {
|
||||||
fmt.Printf("%s %s %s: %v\n", i18n.T("common.label.warning"), i18n.T("common.error.failed", map[string]any{"Action": "download asset"}), assetURL, err)
|
if ctx.Err() != nil {
|
||||||
|
return coreerr.E("pwa.downloadPWA", "download cancelled", ctx.Err())
|
||||||
|
}
|
||||||
|
core.Print(nil, "%s %s %s: %v", i18n.T("common.label.warning"), i18n.T("common.error.failed", map[string]any{"Action": "download asset"}), assetURL, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also save the root index.html
|
// Also save the root index.html
|
||||||
if err := coreio.Local.Write(filepath.Join(destDir, "index.html"), string(body)); err != nil {
|
if err := ax.WriteString(ax.Join(destDir, "index.html"), string(body), 0o644); err != nil {
|
||||||
return coreerr.E("pwa.downloadPWA", i18n.T("common.error.failed", map[string]any{"Action": "write index.html"}), err)
|
return coreerr.E("pwa.downloadPWA", i18n.T("common.error.failed", map[string]any{"Action": "write index.html"}), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(i18n.T("cmd.build.pwa.download_complete"))
|
core.Println(i18n.T("cmd.build.pwa.download_complete"))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// findManifestURL extracts the manifest URL from HTML content.
|
// findManifestURL extracts the manifest URL from HTML content.
|
||||||
func findManifestURL(htmlContent, baseURL string) (string, error) {
|
func findManifestURL(htmlContent, baseURL string) (string, error) {
|
||||||
doc, err := html.Parse(strings.NewReader(htmlContent))
|
doc, err := html.Parse(core.NewReader(htmlContent))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
@ -119,7 +123,7 @@ func findManifestURL(htmlContent, baseURL string) (string, error) {
|
||||||
href = a.Val
|
href = a.Val
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if rel == "manifest" && href != "" {
|
if relIncludesManifest(rel) && href != "" {
|
||||||
manifestPath = href
|
manifestPath = href
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -147,16 +151,32 @@ func findManifestURL(htmlContent, baseURL string) (string, error) {
|
||||||
return manifestURL.String(), nil
|
return manifestURL.String(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// relIncludesManifest reports whether a rel attribute declares a manifest link.
|
||||||
|
// HTML allows multiple space-separated tokens and case-insensitive values.
|
||||||
|
func relIncludesManifest(rel string) bool {
|
||||||
|
for _, token := range strings.Fields(rel) {
|
||||||
|
if strings.EqualFold(token, "manifest") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// fetchManifest downloads and parses a PWA manifest.
|
// fetchManifest downloads and parses a PWA manifest.
|
||||||
func fetchManifest(manifestURL string) (map[string]any, error) {
|
func fetchManifest(ctx context.Context, manifestURL string) (map[string]any, error) {
|
||||||
resp, err := http.Get(manifestURL)
|
resp, err := getWithContext(ctx, manifestURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer func() { _ = resp.Body.Close() }()
|
defer func() { _ = resp.Body.Close() }()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
var manifest map[string]any
|
var manifest map[string]any
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&manifest); err != nil {
|
if err := ax.JSONUnmarshal(body, &manifest); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return manifest, nil
|
return manifest, nil
|
||||||
|
|
@ -191,8 +211,8 @@ func collectAssets(manifest map[string]any, manifestURL string) []string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// downloadAsset fetches a single asset and saves it locally.
|
// downloadAsset fetches a single asset and saves it locally.
|
||||||
func downloadAsset(assetURL, destDir string) error {
|
func downloadAsset(ctx context.Context, assetURL, destDir string) error {
|
||||||
resp, err := http.Get(assetURL)
|
resp, err := getWithContext(ctx, assetURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -203,12 +223,13 @@ func downloadAsset(assetURL, destDir string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
path := filepath.Join(destDir, filepath.FromSlash(u.Path))
|
assetPath := core.TrimPrefix(ax.FromSlash(u.Path), ax.DS())
|
||||||
if err := coreio.Local.EnsureDir(filepath.Dir(path)); err != nil {
|
path := ax.Join(destDir, assetPath)
|
||||||
|
if err := ax.MkdirAll(ax.Dir(path), 0o755); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
out, err := os.Create(path)
|
out, err := ax.Create(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -219,27 +240,27 @@ func downloadAsset(assetURL, destDir string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// runBuild builds a desktop application from a local directory.
|
// runBuild builds a desktop application from a local directory.
|
||||||
func runBuild(fromPath string) error {
|
func runBuild(ctx context.Context, fromPath string) error {
|
||||||
fmt.Printf("%s %s\n", i18n.T("cmd.build.from_path.starting"), fromPath)
|
core.Print(nil, "%s %s", i18n.T("cmd.build.from_path.starting"), fromPath)
|
||||||
|
|
||||||
if !coreio.Local.IsDir(fromPath) {
|
if !ax.IsDir(fromPath) {
|
||||||
return coreerr.E("pwa.runBuild", i18n.T("cmd.build.from_path.error.must_be_directory"), nil)
|
return coreerr.E("pwa.runBuild", i18n.T("cmd.build.from_path.error.must_be_directory"), nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
buildDir := ".core/build/app"
|
buildDir := ".core/build/app"
|
||||||
htmlDir := filepath.Join(buildDir, "html")
|
htmlDir := ax.Join(buildDir, "html")
|
||||||
appName := filepath.Base(fromPath)
|
appName := ax.Base(fromPath)
|
||||||
if strings.HasPrefix(appName, "core-pwa-build-") {
|
if core.HasPrefix(appName, "core-pwa-build-") {
|
||||||
appName = "pwa-app"
|
appName = "pwa-app"
|
||||||
}
|
}
|
||||||
outputExe := appName
|
outputExe := appName
|
||||||
|
|
||||||
if err := coreio.Local.DeleteAll(buildDir); err != nil {
|
if err := ax.RemoveAll(buildDir); err != nil {
|
||||||
return coreerr.E("pwa.runBuild", i18n.T("common.error.failed", map[string]any{"Action": "clean build directory"}), err)
|
return coreerr.E("pwa.runBuild", i18n.T("common.error.failed", map[string]any{"Action": "clean build directory"}), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 1. Generate the project from the embedded template
|
// 1. Generate the project from the embedded template
|
||||||
fmt.Println(i18n.T("cmd.build.from_path.generating_template"))
|
core.Println(i18n.T("cmd.build.from_path.generating_template"))
|
||||||
templateFS, err := debme.FS(guiTemplate, "tmpl/gui")
|
templateFS, err := debme.FS(guiTemplate, "tmpl/gui")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("pwa.runBuild", i18n.T("common.error.failed", map[string]any{"Action": "anchor template filesystem"}), err)
|
return coreerr.E("pwa.runBuild", i18n.T("common.error.failed", map[string]any{"Action": "anchor template filesystem"}), err)
|
||||||
|
|
@ -255,67 +276,83 @@ func runBuild(fromPath string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Copy the user's web app files
|
// 2. Copy the user's web app files
|
||||||
fmt.Println(i18n.T("cmd.build.from_path.copying_files"))
|
core.Println(i18n.T("cmd.build.from_path.copying_files"))
|
||||||
if err := copyDir(fromPath, htmlDir); err != nil {
|
if err := copyDir(fromPath, htmlDir); err != nil {
|
||||||
return coreerr.E("pwa.runBuild", i18n.T("common.error.failed", map[string]any{"Action": "copy application files"}), err)
|
return coreerr.E("pwa.runBuild", i18n.T("common.error.failed", map[string]any{"Action": "copy application files"}), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. Compile the application
|
// 3. Compile the application
|
||||||
fmt.Println(i18n.T("cmd.build.from_path.compiling"))
|
core.Println(i18n.T("cmd.build.from_path.compiling"))
|
||||||
|
|
||||||
// Run go mod tidy
|
// Run go mod tidy
|
||||||
cmd := exec.Command("go", "mod", "tidy")
|
if err := ax.ExecDir(ctx, buildDir, "go", "mod", "tidy"); err != nil {
|
||||||
cmd.Dir = buildDir
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("pwa.runBuild", i18n.T("cmd.build.from_path.error.go_mod_tidy"), err)
|
return coreerr.E("pwa.runBuild", i18n.T("cmd.build.from_path.error.go_mod_tidy"), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run go build
|
// Run go build
|
||||||
cmd = exec.Command("go", "build", "-o", outputExe)
|
if err := ax.ExecDir(ctx, buildDir, "go", "build", "-o", outputExe); err != nil {
|
||||||
cmd.Dir = buildDir
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("pwa.runBuild", i18n.T("cmd.build.from_path.error.go_build"), err)
|
return coreerr.E("pwa.runBuild", i18n.T("cmd.build.from_path.error.go_build"), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("\n%s %s/%s\n", i18n.T("cmd.build.from_path.success"), buildDir, outputExe)
|
core.Println()
|
||||||
|
core.Print(nil, "%s %s/%s", i18n.T("cmd.build.from_path.success"), buildDir, outputExe)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getWithContext(ctx context.Context, targetURL string) (*http.Response, error) {
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, targetURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return http.DefaultClient.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
// copyDir recursively copies a directory from src to dst.
|
// copyDir recursively copies a directory from src to dst.
|
||||||
func copyDir(src, dst string) error {
|
func copyDir(src, dst string) error {
|
||||||
return filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
|
if err := ax.MkdirAll(dst, 0o755); err != nil {
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
relPath, err := filepath.Rel(src, path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
dstPath := filepath.Join(dst, relPath)
|
|
||||||
|
|
||||||
if info.IsDir() {
|
|
||||||
return coreio.Local.EnsureDir(dstPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
srcFile, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer func() { _ = srcFile.Close() }()
|
|
||||||
|
|
||||||
dstFile, err := os.Create(dstPath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer func() { _ = dstFile.Close() }()
|
|
||||||
|
|
||||||
_, err = io.Copy(dstFile, srcFile)
|
|
||||||
return err
|
return err
|
||||||
})
|
}
|
||||||
|
|
||||||
|
entries, err := ax.ReadDir(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
srcPath := ax.Join(src, entry.Name())
|
||||||
|
dstPath := ax.Join(dst, entry.Name())
|
||||||
|
|
||||||
|
if entry.IsDir() {
|
||||||
|
if err := copyDir(srcPath, dstPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
srcFile, err := ax.Open(srcPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
dstFile, err := ax.Create(dstPath)
|
||||||
|
if err != nil {
|
||||||
|
_ = srcFile.Close()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := io.Copy(dstFile, srcFile); err != nil {
|
||||||
|
_ = srcFile.Close()
|
||||||
|
_ = dstFile.Close()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := srcFile.Close(); err != nil {
|
||||||
|
_ = dstFile.Close()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := dstFile.Close(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
37
cmd/build/cmd_pwa_test.go
Normal file
37
cmd/build/cmd_pwa_test.go
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
package buildcmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestPwa_FindManifestURL_Good(t *testing.T) {
|
||||||
|
t.Run("accepts a standard manifest link", func(t *testing.T) {
|
||||||
|
htmlContent := `<html><head><link rel="manifest" href="/manifest.json"></head></html>`
|
||||||
|
|
||||||
|
got, err := findManifestURL(htmlContent, "https://example.test/app/")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "https://example.test/manifest.json", got)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts case-insensitive tokenised rel values", func(t *testing.T) {
|
||||||
|
htmlContent := `<html><head><link rel="Manifest icon" href="manifest.json"></head></html>`
|
||||||
|
|
||||||
|
got, err := findManifestURL(htmlContent, "https://example.test/app/")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "https://example.test/app/manifest.json", got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPwa_FindManifestURL_Bad(t *testing.T) {
|
||||||
|
t.Run("returns an error when no manifest link exists", func(t *testing.T) {
|
||||||
|
htmlContent := `<html><head><link rel="icon" href="/icon.png"></head></html>`
|
||||||
|
|
||||||
|
got, err := findManifestURL(htmlContent, "https://example.test/app/")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Empty(t, got)
|
||||||
|
assert.Contains(t, err.Error(), "pwa.findManifestURL")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
@ -4,26 +4,27 @@ package buildcmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/release"
|
"dappco.re/go/core/build/pkg/release"
|
||||||
"dappco.re/go/core/i18n"
|
"dappco.re/go/core/i18n"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Flag variables for release command
|
// Flag variables for release command.
|
||||||
var (
|
var (
|
||||||
releaseVersion string
|
releaseVersion string
|
||||||
releaseDraft bool
|
releaseDraft bool
|
||||||
releasePrerelease bool
|
releasePrerelease bool
|
||||||
releaseGoForLaunch bool
|
releaseLaunchMode bool
|
||||||
|
releaseArchiveFormat string
|
||||||
)
|
)
|
||||||
|
|
||||||
var releaseCmd = &cli.Command{
|
var releaseCmd = &cli.Command{
|
||||||
Use: "release",
|
Use: "release",
|
||||||
RunE: func(cmd *cli.Command, args []string) error {
|
RunE: func(cmd *cli.Command, args []string) error {
|
||||||
return runRelease(cmd.Context(), !releaseGoForLaunch, releaseVersion, releaseDraft, releasePrerelease)
|
return runRelease(cmd.Context(), !releaseLaunchMode, releaseVersion, releaseDraft, releasePrerelease, releaseArchiveFormat)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -33,13 +34,16 @@ func setReleaseI18n() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func initReleaseFlags() {
|
func initReleaseFlags() {
|
||||||
releaseCmd.Flags().BoolVar(&releaseGoForLaunch, "we-are-go-for-launch", false, i18n.T("cmd.build.release.flag.go_for_launch"))
|
releaseCmd.Flags().BoolVar(&releaseLaunchMode, "we-are-go-for-launch", false, i18n.T("cmd.build.release.flag.go_for_launch"))
|
||||||
releaseCmd.Flags().StringVar(&releaseVersion, "version", "", i18n.T("cmd.build.release.flag.version"))
|
releaseCmd.Flags().StringVar(&releaseVersion, "version", "", i18n.T("cmd.build.release.flag.version"))
|
||||||
releaseCmd.Flags().BoolVar(&releaseDraft, "draft", false, i18n.T("cmd.build.release.flag.draft"))
|
releaseCmd.Flags().BoolVar(&releaseDraft, "draft", false, i18n.T("cmd.build.release.flag.draft"))
|
||||||
releaseCmd.Flags().BoolVar(&releasePrerelease, "prerelease", false, i18n.T("cmd.build.release.flag.prerelease"))
|
releaseCmd.Flags().BoolVar(&releasePrerelease, "prerelease", false, i18n.T("cmd.build.release.flag.prerelease"))
|
||||||
|
releaseCmd.Flags().StringVar(&releaseArchiveFormat, "archive-format", "", i18n.T("cmd.build.flag.archive_format"))
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddReleaseCommand adds the release subcommand to the build command.
|
// AddReleaseCommand adds the release subcommand to the build command.
|
||||||
|
//
|
||||||
|
// buildcmd.AddReleaseCommand(buildCmd)
|
||||||
func AddReleaseCommand(buildCmd *cli.Command) {
|
func AddReleaseCommand(buildCmd *cli.Command) {
|
||||||
setReleaseI18n()
|
setReleaseI18n()
|
||||||
initReleaseFlags()
|
initReleaseFlags()
|
||||||
|
|
@ -47,9 +51,9 @@ func AddReleaseCommand(buildCmd *cli.Command) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// runRelease executes the full release workflow: build + archive + checksum + publish.
|
// runRelease executes the full release workflow: build + archive + checksum + publish.
|
||||||
func runRelease(ctx context.Context, dryRun bool, version string, draft, prerelease bool) error {
|
func runRelease(ctx context.Context, dryRun bool, version string, draft, prerelease bool, archiveFormat string) error {
|
||||||
// Get current directory
|
// Get current directory
|
||||||
projectDir, err := os.Getwd()
|
projectDir, err := ax.Getwd()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("release", "get working directory", err)
|
return coreerr.E("release", "get working directory", err)
|
||||||
}
|
}
|
||||||
|
|
@ -74,6 +78,9 @@ func runRelease(ctx context.Context, dryRun bool, version string, draft, prerele
|
||||||
if version != "" {
|
if version != "" {
|
||||||
cfg.SetVersion(version)
|
cfg.SetVersion(version)
|
||||||
}
|
}
|
||||||
|
if err := applyReleaseArchiveFormatOverride(cfg, archiveFormat); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// Apply draft/prerelease overrides to all publishers
|
// Apply draft/prerelease overrides to all publishers
|
||||||
if draft || prerelease {
|
if draft || prerelease {
|
||||||
|
|
@ -114,3 +121,20 @@ func runRelease(ctx context.Context, dryRun bool, version string, draft, prerele
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// applyReleaseArchiveFormatOverride applies the archive-format CLI override to the release config.
|
||||||
|
//
|
||||||
|
// applyReleaseArchiveFormatOverride(cfg, "xz") // cfg.Build.ArchiveFormat = "xz"
|
||||||
|
func applyReleaseArchiveFormatOverride(cfg *release.Config, archiveFormat string) error {
|
||||||
|
if cfg == nil || archiveFormat == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
formatValue, err := resolveArchiveFormat("", archiveFormat)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.Build.ArchiveFormat = string(formatValue)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
|
||||||
25
cmd/build/cmd_release_test.go
Normal file
25
cmd/build/cmd_release_test.go
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
package buildcmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/pkg/release"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBuildCmd_applyReleaseArchiveFormatOverride_Good(t *testing.T) {
|
||||||
|
cfg := release.DefaultConfig()
|
||||||
|
|
||||||
|
err := applyReleaseArchiveFormatOverride(cfg, "xz")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "xz", cfg.Build.ArchiveFormat)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_applyReleaseArchiveFormatOverride_Bad(t *testing.T) {
|
||||||
|
cfg := release.DefaultConfig()
|
||||||
|
|
||||||
|
err := applyReleaseArchiveFormatOverride(cfg, "bogus")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Equal(t, "", cfg.Build.ArchiveFormat)
|
||||||
|
}
|
||||||
|
|
@ -7,24 +7,26 @@ package buildcmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/sdk"
|
"dappco.re/go/core/build/pkg/sdk"
|
||||||
"dappco.re/go/core/i18n"
|
"dappco.re/go/core/i18n"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
)
|
)
|
||||||
|
|
||||||
// runBuildSDK handles the `core build sdk` command.
|
// runBuildSDK handles the `core build sdk` command.
|
||||||
func runBuildSDK(specPath, lang, version string, dryRun bool) error {
|
func runBuildSDK(ctx context.Context, specPath, lang, version string, dryRun bool) error {
|
||||||
ctx := context.Background()
|
projectDir, err := ax.Getwd()
|
||||||
|
|
||||||
projectDir, err := os.Getwd()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("build.SDK", "failed to get working directory", err)
|
return coreerr.E("build.SDK", "failed to get working directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return runBuildSDKInDir(ctx, projectDir, specPath, lang, version, dryRun)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runBuildSDKInDir(ctx context.Context, projectDir, specPath, lang, version string, dryRun bool) error {
|
||||||
// Load config
|
// Load config
|
||||||
config := sdk.DefaultConfig()
|
config := sdk.DefaultConfig()
|
||||||
if specPath != "" {
|
if specPath != "" {
|
||||||
|
|
@ -36,48 +38,48 @@ func runBuildSDK(specPath, lang, version string, dryRun bool) error {
|
||||||
s.SetVersion(version)
|
s.SetVersion(version)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.sdk.label")), i18n.T("cmd.build.sdk.generating"))
|
cli.Print("%s %s\n", buildHeaderStyle.Render(i18n.T("cmd.build.sdk.label")), i18n.T("cmd.build.sdk.generating"))
|
||||||
if dryRun {
|
if dryRun {
|
||||||
fmt.Printf(" %s\n", buildDimStyle.Render(i18n.T("cmd.build.sdk.dry_run_mode")))
|
cli.Print(" %s\n", buildDimStyle.Render(i18n.T("cmd.build.sdk.dry_run_mode")))
|
||||||
}
|
}
|
||||||
fmt.Println()
|
cli.Blank()
|
||||||
|
|
||||||
// Detect spec
|
// Validate the spec before generating anything.
|
||||||
detectedSpec, err := s.DetectSpec()
|
detectedSpec, err := s.ValidateSpec(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Printf("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err)
|
cli.Print("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
fmt.Printf(" %s %s\n", i18n.T("common.label.spec"), buildTargetStyle.Render(detectedSpec))
|
cli.Print(" %s %s\n", i18n.T("common.label.spec"), buildTargetStyle.Render(detectedSpec))
|
||||||
|
|
||||||
if dryRun {
|
if dryRun {
|
||||||
if lang != "" {
|
if lang != "" {
|
||||||
fmt.Printf(" %s %s\n", i18n.T("cmd.build.sdk.language_label"), buildTargetStyle.Render(lang))
|
cli.Print(" %s %s\n", i18n.T("cmd.build.sdk.language_label"), buildTargetStyle.Render(lang))
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf(" %s %s\n", i18n.T("cmd.build.sdk.languages_label"), buildTargetStyle.Render(strings.Join(config.Languages, ", ")))
|
cli.Print(" %s %s\n", i18n.T("cmd.build.sdk.languages_label"), buildTargetStyle.Render(core.Join(", ", config.Languages...)))
|
||||||
}
|
}
|
||||||
fmt.Println()
|
cli.Blank()
|
||||||
fmt.Printf("%s %s\n", buildSuccessStyle.Render(i18n.T("cmd.build.label.ok")), i18n.T("cmd.build.sdk.would_generate"))
|
cli.Print("%s %s\n", buildSuccessStyle.Render(i18n.T("cmd.build.label.ok")), i18n.T("cmd.build.sdk.would_generate"))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if lang != "" {
|
if lang != "" {
|
||||||
// Generate single language
|
// Generate single language
|
||||||
if err := s.GenerateLanguage(ctx, lang); err != nil {
|
if err := s.GenerateLanguage(ctx, lang); err != nil {
|
||||||
fmt.Printf("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err)
|
cli.Print("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
fmt.Printf(" %s %s\n", i18n.T("cmd.build.sdk.generated_label"), buildTargetStyle.Render(lang))
|
cli.Print(" %s %s\n", i18n.T("cmd.build.sdk.generated_label"), buildTargetStyle.Render(lang))
|
||||||
} else {
|
} else {
|
||||||
// Generate all
|
// Generate all
|
||||||
if err := s.Generate(ctx); err != nil {
|
if err := s.Generate(ctx); err != nil {
|
||||||
fmt.Printf("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err)
|
cli.Print("%s %v\n", buildErrorStyle.Render(i18n.T("common.label.error")), err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
fmt.Printf(" %s %s\n", i18n.T("cmd.build.sdk.generated_label"), buildTargetStyle.Render(strings.Join(config.Languages, ", ")))
|
cli.Print(" %s %s\n", i18n.T("cmd.build.sdk.generated_label"), buildTargetStyle.Render(core.Join(", ", config.Languages...)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println()
|
cli.Blank()
|
||||||
fmt.Printf("%s %s\n", buildSuccessStyle.Render(i18n.T("common.label.success")), i18n.T("cmd.build.sdk.complete"))
|
cli.Print("%s %s\n", buildSuccessStyle.Render(i18n.T("common.label.success")), i18n.T("cmd.build.sdk.complete"))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
44
cmd/build/cmd_sdk_test.go
Normal file
44
cmd/build/cmd_sdk_test.go
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
package buildcmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
const validBuildOpenAPISpec = `openapi: "3.0.0"
|
||||||
|
info:
|
||||||
|
title: Test API
|
||||||
|
version: "1.0.0"
|
||||||
|
paths:
|
||||||
|
/health:
|
||||||
|
get:
|
||||||
|
operationId: getHealth
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
`
|
||||||
|
|
||||||
|
func TestRunBuildSDKInDir_ValidSpecDryRun_Good(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(tmpDir, "openapi.yaml"), []byte(validBuildOpenAPISpec), 0o644))
|
||||||
|
|
||||||
|
err := runBuildSDKInDir(context.Background(), tmpDir, "", "go", "", true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRunBuildSDKInDir_InvalidDocument_Bad(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(tmpDir, "openapi.yaml"), []byte(`openapi: "3.0.0"
|
||||||
|
info:
|
||||||
|
title: Test API
|
||||||
|
paths: {}
|
||||||
|
`), 0o644))
|
||||||
|
|
||||||
|
err := runBuildSDKInDir(context.Background(), tmpDir, "", "", "", true)
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "invalid OpenAPI spec")
|
||||||
|
}
|
||||||
218
cmd/build/cmd_workflow.go
Normal file
218
cmd/build/cmd_workflow.go
Normal file
|
|
@ -0,0 +1,218 @@
|
||||||
|
// cmd_workflow.go implements the release workflow generation command.
|
||||||
|
|
||||||
|
package buildcmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/i18n"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
releaseWorkflowPathInput string
|
||||||
|
releaseWorkflowPathAliasInput string
|
||||||
|
releaseWorkflowPathHyphenAliasInput string
|
||||||
|
releaseWorkflowPathSnakeAliasInput string
|
||||||
|
releaseWorkflowOutputPathHyphenInput string
|
||||||
|
releaseWorkflowOutputPathSnakeInput string
|
||||||
|
releaseWorkflowOutputPathInput string
|
||||||
|
releaseWorkflowOutputLegacyInput string
|
||||||
|
releaseWorkflowOutputPathAliasInput string
|
||||||
|
releaseWorkflowOutputPathHyphenAliasInput string
|
||||||
|
releaseWorkflowOutputPathSnakeAliasInput string
|
||||||
|
releaseWorkflowOutputHyphenAliasInput string
|
||||||
|
releaseWorkflowOutputSnakeAliasInput string
|
||||||
|
)
|
||||||
|
|
||||||
|
// releaseWorkflowRequestInputs keeps the workflow alias inputs grouped by the
|
||||||
|
// public request fields they represent, rather than by call-site position.
|
||||||
|
type releaseWorkflowRequestInputs struct {
|
||||||
|
pathInput string
|
||||||
|
workflowPathInput string
|
||||||
|
workflowPathSnakeInput string
|
||||||
|
workflowPathHyphenInput string
|
||||||
|
outputPathInput string
|
||||||
|
outputPathHyphenInput string
|
||||||
|
outputPathSnakeInput string
|
||||||
|
legacyOutputInput string
|
||||||
|
workflowOutputPathInput string
|
||||||
|
workflowOutputSnakeInput string
|
||||||
|
workflowOutputHyphenInput string
|
||||||
|
workflowOutputPathHyphenInput string
|
||||||
|
workflowOutputPathSnakeInput string
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveReleaseWorkflowTargetPath merges the workflow path aliases and the
|
||||||
|
// workflow output aliases into one final target path.
|
||||||
|
//
|
||||||
|
// inputs := releaseWorkflowRequestInputs{pathInput: "ci/release.yml", outputPathInput: "ci/release.yml"}
|
||||||
|
// path, err := inputs.resolveReleaseWorkflowTargetPath("/tmp/project", io.Local)
|
||||||
|
func (inputs releaseWorkflowRequestInputs) resolveReleaseWorkflowTargetPath(projectDir string, medium io.Medium) (string, error) {
|
||||||
|
resolvedWorkflowPath, err := resolveReleaseWorkflowInputPathAliases(
|
||||||
|
projectDir,
|
||||||
|
inputs.pathInput,
|
||||||
|
inputs.workflowPathInput,
|
||||||
|
inputs.workflowPathSnakeInput,
|
||||||
|
inputs.workflowPathHyphenInput,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
resolvedWorkflowOutputPath, err := resolveReleaseWorkflowOutputPathAliases(
|
||||||
|
projectDir,
|
||||||
|
inputs.outputPathInput,
|
||||||
|
inputs.outputPathHyphenInput,
|
||||||
|
inputs.outputPathSnakeInput,
|
||||||
|
inputs.legacyOutputInput,
|
||||||
|
inputs.workflowOutputPathInput,
|
||||||
|
inputs.workflowOutputSnakeInput,
|
||||||
|
inputs.workflowOutputHyphenInput,
|
||||||
|
inputs.workflowOutputPathSnakeInput,
|
||||||
|
inputs.workflowOutputPathHyphenInput,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return build.ResolveReleaseWorkflowInputPathWithMedium(medium, projectDir, resolvedWorkflowPath, resolvedWorkflowOutputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
var releaseWorkflowCmd = &cli.Command{
|
||||||
|
Use: "workflow",
|
||||||
|
RunE: func(cmd *cli.Command, args []string) error {
|
||||||
|
return runReleaseWorkflow(cmd.Context(), releaseWorkflowRequestInputs{
|
||||||
|
pathInput: releaseWorkflowPathInput,
|
||||||
|
workflowPathInput: releaseWorkflowPathAliasInput,
|
||||||
|
workflowPathSnakeInput: releaseWorkflowPathSnakeAliasInput,
|
||||||
|
workflowPathHyphenInput: releaseWorkflowPathHyphenAliasInput,
|
||||||
|
outputPathInput: releaseWorkflowOutputPathInput,
|
||||||
|
outputPathHyphenInput: releaseWorkflowOutputPathHyphenInput,
|
||||||
|
outputPathSnakeInput: releaseWorkflowOutputPathSnakeInput,
|
||||||
|
legacyOutputInput: releaseWorkflowOutputLegacyInput,
|
||||||
|
workflowOutputPathInput: releaseWorkflowOutputPathAliasInput,
|
||||||
|
workflowOutputSnakeInput: releaseWorkflowOutputSnakeAliasInput,
|
||||||
|
workflowOutputHyphenInput: releaseWorkflowOutputHyphenAliasInput,
|
||||||
|
workflowOutputPathHyphenInput: releaseWorkflowOutputPathHyphenAliasInput,
|
||||||
|
workflowOutputPathSnakeInput: releaseWorkflowOutputPathSnakeAliasInput,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func setWorkflowI18n() {
|
||||||
|
releaseWorkflowCmd.Short = i18n.T("cmd.build.workflow.short")
|
||||||
|
releaseWorkflowCmd.Long = i18n.T("cmd.build.workflow.long")
|
||||||
|
}
|
||||||
|
|
||||||
|
func initWorkflowFlags() {
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowPathInput, "path", "", i18n.T("cmd.build.workflow.flag.path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowPathAliasInput, "workflowPath", "", i18n.T("cmd.build.workflow.flag.path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowPathHyphenAliasInput, "workflow-path", "", i18n.T("cmd.build.workflow.flag.path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowPathSnakeAliasInput, "workflow_path", "", i18n.T("cmd.build.workflow.flag.path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputPathInput, "outputPath", "", i18n.T("cmd.build.workflow.flag.output_path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputPathHyphenInput, "output-path", "", i18n.T("cmd.build.workflow.flag.output_path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputPathSnakeInput, "output_path", "", i18n.T("cmd.build.workflow.flag.output_path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputLegacyInput, "output", "", i18n.T("cmd.build.workflow.flag.output"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputPathAliasInput, "workflowOutputPath", "", i18n.T("cmd.build.workflow.flag.workflow_output_path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputPathHyphenAliasInput, "workflow-output-path", "", i18n.T("cmd.build.workflow.flag.workflow_output_path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputPathSnakeAliasInput, "workflow_output_path", "", i18n.T("cmd.build.workflow.flag.workflow_output_path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputHyphenAliasInput, "workflow-output", "", i18n.T("cmd.build.workflow.flag.workflow_output_path"))
|
||||||
|
releaseWorkflowCmd.Flags().StringVar(&releaseWorkflowOutputSnakeAliasInput, "workflow_output", "", i18n.T("cmd.build.workflow.flag.workflow_output_path"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildCmd := &cli.Command{Use: "build"}
|
||||||
|
// buildcmd.AddWorkflowCommand(buildCmd)
|
||||||
|
func AddWorkflowCommand(buildCmd *cli.Command) {
|
||||||
|
setWorkflowI18n()
|
||||||
|
initWorkflowFlags()
|
||||||
|
buildCmd.AddCommand(releaseWorkflowCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
// runReleaseWorkflow writes the embedded release workflow into the current
|
||||||
|
// project directory.
|
||||||
|
//
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{}) // writes .github/workflows/release.yml
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{pathInput: "ci/release.yml"}) // writes ./ci/release.yml under the project root
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{workflowPathInput: "ci/release.yml"}) // uses the workflowPath alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{workflowPathSnakeInput: "ci/release.yml"}) // uses the workflow_path alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{workflowPathHyphenInput: "ci/release.yml"}) // uses the workflow-path alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{outputPathInput: "ci/release.yml"}) // uses the outputPath alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{legacyOutputInput: "ci/release.yml"}) // uses the legacy output alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{workflowOutputPathInput: "ci/release.yml"}) // uses the workflowOutputPath alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{workflowOutputHyphenInput: "ci/release.yml"}) // uses the workflow-output alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{workflowOutputSnakeInput: "ci/release.yml"}) // uses the workflow_output alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{workflowOutputPathSnakeInput: "ci/release.yml"}) // uses the workflow_output_path alias
|
||||||
|
// runReleaseWorkflow(ctx, releaseWorkflowRequestInputs{workflowOutputPathHyphenInput: "ci/release.yml"}) // uses the workflow-output-path alias
|
||||||
|
func runReleaseWorkflow(_ context.Context, inputs releaseWorkflowRequestInputs) error {
|
||||||
|
projectDir, err := ax.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("build.runReleaseWorkflow", "failed to get working directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
resolvedWorkflowPath, err := inputs.resolveReleaseWorkflowTargetPath(projectDir, io.Local)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return build.WriteReleaseWorkflow(io.Local, resolvedWorkflowPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveReleaseWorkflowInputPathAliases("/tmp/project", "ci/release.yml", "", "", "") // "/tmp/project/ci/release.yml"
|
||||||
|
// resolveReleaseWorkflowInputPathAliases("/tmp/project", "", "ci/release.yml", "", "") // "/tmp/project/ci/release.yml"
|
||||||
|
func resolveReleaseWorkflowInputPathAliases(projectDir, pathInput, workflowPathInput, workflowPathSnakeInput, workflowPathHyphenInput string) (string, error) {
|
||||||
|
resolvedWorkflowPath, err := build.ResolveReleaseWorkflowInputPathAliases(
|
||||||
|
io.Local,
|
||||||
|
projectDir,
|
||||||
|
pathInput,
|
||||||
|
workflowPathInput,
|
||||||
|
workflowPathSnakeInput,
|
||||||
|
workflowPathHyphenInput,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("build.runReleaseWorkflow", "workflow path aliases specify different locations", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedWorkflowPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveReleaseWorkflowOutputPathAliases("/tmp/project", "ci/release.yml", "", "", "", "", "", "", "", "") // "/tmp/project/ci/release.yml"
|
||||||
|
// resolveReleaseWorkflowOutputPathAliases("/tmp/project", "", "", "", "", "ci/release.yml", "", "", "", "") // "/tmp/project/ci/release.yml"
|
||||||
|
func resolveReleaseWorkflowOutputPathAliases(projectDir, outputPathInput, outputPathHyphenInput, outputPathSnakeInput, legacyOutputInput, workflowOutputPathInput, workflowOutputSnakeInput, workflowOutputHyphenInput, workflowOutputPathSnakeInput, workflowOutputPathHyphenInput string) (string, error) {
|
||||||
|
resolvedWorkflowOutputPath, err := build.ResolveReleaseWorkflowOutputPathAliasesInProjectWithMedium(
|
||||||
|
io.Local,
|
||||||
|
projectDir,
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("build.runReleaseWorkflow", "workflow output aliases specify different locations", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedWorkflowOutputPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// runReleaseWorkflowInDir writes the embedded release workflow into projectDir.
|
||||||
|
//
|
||||||
|
// runReleaseWorkflowInDir("/tmp/project", "", "") // /tmp/project/.github/workflows/release.yml
|
||||||
|
// runReleaseWorkflowInDir("/tmp/project", "ci/release.yml", "") // /tmp/project/ci/release.yml
|
||||||
|
// runReleaseWorkflowInDir("/tmp/project", ".github/workflows", "") // /tmp/project/.github/workflows/release.yml
|
||||||
|
func runReleaseWorkflowInDir(projectDir, workflowPathInput, workflowOutputPathInput string) error {
|
||||||
|
resolvedPath, err := build.ResolveReleaseWorkflowInputPathWithMedium(io.Local, projectDir, workflowPathInput, workflowOutputPathInput)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return build.WriteReleaseWorkflow(io.Local, resolvedPath)
|
||||||
|
}
|
||||||
357
cmd/build/cmd_workflow_test.go
Normal file
357
cmd/build/cmd_workflow_test.go
Normal file
|
|
@ -0,0 +1,357 @@
|
||||||
|
package buildcmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathInput_Good(t *testing.T) {
|
||||||
|
t.Run("accepts the preferred output path", func(t *testing.T) {
|
||||||
|
path, err := build.ResolveReleaseWorkflowOutputPath("ci/release.yml", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the snake_case output path alias", func(t *testing.T) {
|
||||||
|
path, err := build.ResolveReleaseWorkflowOutputPath("", "ci/release.yml", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the legacy output alias", func(t *testing.T) {
|
||||||
|
path, err := build.ResolveReleaseWorkflowOutputPath("", "", "ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts matching output aliases", func(t *testing.T) {
|
||||||
|
path, err := build.ResolveReleaseWorkflowOutputPath("ci/release.yml", "ci/release.yml", "ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathInput_Bad(t *testing.T) {
|
||||||
|
_, err := build.ResolveReleaseWorkflowOutputPath("ci/release.yml", "ops/release.yml", "")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "output aliases specify different locations")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "ci/release.yml", "", "", "", "", "./ci/release.yml", "ci/release.yml", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_CamelCaseGood(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "ci/release.yml", "", "", "", "", "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_WorkflowCamelCaseGood(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "", "", "", "", "ci/release.yml", "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_WorkflowHyphenGood(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "", "", "", "", "", "ci/release.yml", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_WorkflowSnakeGood(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "", "", "", "", "", "", "ci/release.yml", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_Bad(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
_, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "ci/release.yml", "", "", "", "ops/release.yml", "", "", "", "")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "workflow output aliases specify different locations")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_HyphenatedGood(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "", "ci/release.yml", "", "", "", "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_AbsoluteEquivalent_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
absolutePath := ax.Join(projectDir, "ci", "release.yml")
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "ci/release.yml", "", "", "", "", "", "", "", absolutePath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, absolutePath, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowOutputPathAliases_AbsoluteDirectory_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
absoluteDir := ax.Join(projectDir, "ops")
|
||||||
|
require.NoError(t, io.Local.EnsureDir(absoluteDir))
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowOutputPathAliases(projectDir, "", "", "", "", absoluteDir, "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(absoluteDir, "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowInputPathAliases_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowInputPathAliases(projectDir, "ci/release.yml", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowInputPathAliases_WorkflowPathGood(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := resolveReleaseWorkflowInputPathAliases(projectDir, "", "ci/release.yml", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_resolveReleaseWorkflowInputPathAliases_Bad(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
_, err := resolveReleaseWorkflowInputPathAliases(projectDir, "ci/release.yml", "ops/release.yml", "", "")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "workflow path aliases specify different locations")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildCmd_RunReleaseWorkflow_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
t.Run("writes to the conventional workflow path by default", func(t *testing.T) {
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
path := build.ReleaseWorkflowPath(projectDir)
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
assert.Contains(t, content, "--archive-format")
|
||||||
|
assert.Contains(t, content, "actions/download-artifact@v4")
|
||||||
|
assert.Contains(t, content, "command: ci")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("registers both path and output flags", func(t *testing.T) {
|
||||||
|
buildCmd := &cli.Command{Use: "build"}
|
||||||
|
AddWorkflowCommand(buildCmd)
|
||||||
|
|
||||||
|
pathFlag := releaseWorkflowCmd.Flags().Lookup("path")
|
||||||
|
workflowPathCamelFlag := releaseWorkflowCmd.Flags().Lookup("workflowPath")
|
||||||
|
workflowPathFlag := releaseWorkflowCmd.Flags().Lookup("workflow-path")
|
||||||
|
workflowPathSnakeFlag := releaseWorkflowCmd.Flags().Lookup("workflow_path")
|
||||||
|
outputPathCamelFlag := releaseWorkflowCmd.Flags().Lookup("outputPath")
|
||||||
|
outputPathFlag := releaseWorkflowCmd.Flags().Lookup("output-path")
|
||||||
|
outputPathSnakeFlag := releaseWorkflowCmd.Flags().Lookup("output_path")
|
||||||
|
outputFlag := releaseWorkflowCmd.Flags().Lookup("output")
|
||||||
|
workflowOutputPathCamelFlag := releaseWorkflowCmd.Flags().Lookup("workflowOutputPath")
|
||||||
|
workflowOutputPathFlag := releaseWorkflowCmd.Flags().Lookup("workflow-output-path")
|
||||||
|
workflowOutputPathSnakeFlag := releaseWorkflowCmd.Flags().Lookup("workflow_output_path")
|
||||||
|
workflowOutputFlag := releaseWorkflowCmd.Flags().Lookup("workflow-output")
|
||||||
|
workflowOutputSnakeFlag := releaseWorkflowCmd.Flags().Lookup("workflow_output")
|
||||||
|
|
||||||
|
assert.NotNil(t, pathFlag)
|
||||||
|
assert.NotNil(t, workflowPathCamelFlag)
|
||||||
|
assert.NotNil(t, workflowPathFlag)
|
||||||
|
assert.NotNil(t, workflowPathSnakeFlag)
|
||||||
|
assert.NotNil(t, outputPathCamelFlag)
|
||||||
|
assert.NotNil(t, outputPathFlag)
|
||||||
|
assert.NotNil(t, outputPathSnakeFlag)
|
||||||
|
assert.NotNil(t, outputFlag)
|
||||||
|
assert.NotNil(t, workflowOutputPathFlag)
|
||||||
|
assert.NotNil(t, workflowOutputPathSnakeFlag)
|
||||||
|
assert.NotEmpty(t, pathFlag.Usage)
|
||||||
|
assert.NotEmpty(t, workflowPathCamelFlag.Usage)
|
||||||
|
assert.NotEmpty(t, workflowPathFlag.Usage)
|
||||||
|
assert.NotEmpty(t, workflowPathSnakeFlag.Usage)
|
||||||
|
assert.NotEmpty(t, outputPathCamelFlag.Usage)
|
||||||
|
assert.NotEmpty(t, outputPathFlag.Usage)
|
||||||
|
assert.NotEmpty(t, outputPathSnakeFlag.Usage)
|
||||||
|
assert.NotEmpty(t, outputFlag.Usage)
|
||||||
|
assert.NotNil(t, workflowOutputPathCamelFlag)
|
||||||
|
assert.NotEmpty(t, workflowOutputPathFlag.Usage)
|
||||||
|
assert.NotEmpty(t, workflowOutputPathSnakeFlag.Usage)
|
||||||
|
assert.NotNil(t, workflowOutputFlag)
|
||||||
|
assert.NotNil(t, workflowOutputSnakeFlag)
|
||||||
|
assert.NotEmpty(t, workflowOutputFlag.Usage)
|
||||||
|
assert.NotEmpty(t, workflowOutputSnakeFlag.Usage)
|
||||||
|
assert.NotEqual(t, pathFlag.Usage, outputFlag.Usage)
|
||||||
|
assert.Equal(t, pathFlag.Usage, workflowPathCamelFlag.Usage)
|
||||||
|
assert.Equal(t, pathFlag.Usage, workflowPathFlag.Usage)
|
||||||
|
assert.Equal(t, workflowPathFlag.Usage, workflowPathSnakeFlag.Usage)
|
||||||
|
assert.Equal(t, outputPathFlag.Usage, outputPathCamelFlag.Usage)
|
||||||
|
assert.NotEqual(t, outputPathFlag.Usage, outputFlag.Usage)
|
||||||
|
assert.Equal(t, outputPathFlag.Usage, outputPathSnakeFlag.Usage)
|
||||||
|
assert.Equal(t, workflowOutputPathFlag.Usage, workflowOutputPathCamelFlag.Usage)
|
||||||
|
assert.Equal(t, workflowOutputPathFlag.Usage, workflowOutputPathSnakeFlag.Usage)
|
||||||
|
assert.Equal(t, workflowOutputPathFlag.Usage, workflowOutputFlag.Usage)
|
||||||
|
assert.Equal(t, workflowOutputPathFlag.Usage, workflowOutputSnakeFlag.Usage)
|
||||||
|
|
||||||
|
helpText, err := io.Local.Read("/workspace/locales/en.json")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, helpText, "--workflowPath/")
|
||||||
|
assert.Contains(t, helpText, "--outputPath/")
|
||||||
|
assert.Contains(t, helpText, "--workflow-output/")
|
||||||
|
assert.Contains(t, helpText, "--workflow_output/")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes to a custom relative path", func(t *testing.T) {
|
||||||
|
customPath := "ci/release.yml"
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, customPath, "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, customPath))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
assert.Contains(t, content, "--archive-format")
|
||||||
|
assert.Contains(t, content, "actions/download-artifact@v4")
|
||||||
|
assert.Contains(t, content, "command: ci")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml inside a directory-style relative path", func(t *testing.T) {
|
||||||
|
customPath := "ci/"
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, customPath, "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, "ci", "release.yml"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml inside an existing directory without a trailing slash", func(t *testing.T) {
|
||||||
|
require.NoError(t, io.Local.EnsureDir(ax.Join(projectDir, "ops")))
|
||||||
|
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "ops", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, "ops", "release.yml"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml inside a bare directory-style path", func(t *testing.T) {
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, "ci", "release.yml"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml inside a current-directory-prefixed directory-style path", func(t *testing.T) {
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "./ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, "ci", "release.yml"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml inside the conventional workflows directory", func(t *testing.T) {
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, ".github/workflows", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, ".github", "workflows", "release.yml"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml inside a current-directory-prefixed workflows directory", func(t *testing.T) {
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "./.github/workflows", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, ".github", "workflows", "release.yml"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes to the output alias", func(t *testing.T) {
|
||||||
|
customPath := "ci/alias-release.yml"
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "", customPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, customPath))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes to the output-path alias", func(t *testing.T) {
|
||||||
|
customPath := "ci/output-path-release.yml"
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "", customPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, customPath))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes to the output_path alias", func(t *testing.T) {
|
||||||
|
customPath := "ci/output_path-release.yml"
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "", customPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, customPath))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes to the workflow-output alias", func(t *testing.T) {
|
||||||
|
customPath := "ci/workflow-output-release.yml"
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "", customPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, customPath))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes to the workflow_output alias", func(t *testing.T) {
|
||||||
|
customPath := "ci/workflow_output-release.yml"
|
||||||
|
err := runReleaseWorkflowInDir(projectDir, "", customPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(projectDir, customPath))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
})
|
||||||
|
}
|
||||||
68
cmd/ci/ci.go
68
cmd/ci/ci.go
|
|
@ -2,17 +2,16 @@ package ci
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/release"
|
"dappco.re/go/core/build/pkg/release"
|
||||||
"dappco.re/go/core/i18n"
|
"dappco.re/go/core/i18n"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Style aliases from shared
|
// Style aliases used by CI command output.
|
||||||
var (
|
var (
|
||||||
headerStyle = cli.RepoStyle
|
headerStyle = cli.RepoStyle
|
||||||
successStyle = cli.SuccessStyle
|
successStyle = cli.SuccessStyle
|
||||||
|
|
@ -21,15 +20,15 @@ var (
|
||||||
valueStyle = cli.ValueStyle
|
valueStyle = cli.ValueStyle
|
||||||
)
|
)
|
||||||
|
|
||||||
// Flag variables for ci command
|
// Flag variables for ci command.
|
||||||
var (
|
var (
|
||||||
ciGoForLaunch bool
|
ciLaunchMode bool
|
||||||
ciVersion string
|
ciVersion string
|
||||||
ciDraft bool
|
ciDraft bool
|
||||||
ciPrerelease bool
|
ciPrerelease bool
|
||||||
)
|
)
|
||||||
|
|
||||||
// Flag variables for changelog subcommand
|
// Flag variables for changelog subcommand.
|
||||||
var (
|
var (
|
||||||
changelogFromRef string
|
changelogFromRef string
|
||||||
changelogToRef string
|
changelogToRef string
|
||||||
|
|
@ -38,8 +37,8 @@ var (
|
||||||
var ciCmd = &cli.Command{
|
var ciCmd = &cli.Command{
|
||||||
Use: "ci",
|
Use: "ci",
|
||||||
RunE: func(cmd *cli.Command, args []string) error {
|
RunE: func(cmd *cli.Command, args []string) error {
|
||||||
dryRun := !ciGoForLaunch
|
dryRun := !ciLaunchMode
|
||||||
return runCIPublish(dryRun, ciVersion, ciDraft, ciPrerelease)
|
return runCIPublish(cmd.Context(), dryRun, ciVersion, ciDraft, ciPrerelease)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -53,14 +52,14 @@ var ciInitCmd = &cli.Command{
|
||||||
var ciChangelogCmd = &cli.Command{
|
var ciChangelogCmd = &cli.Command{
|
||||||
Use: "changelog",
|
Use: "changelog",
|
||||||
RunE: func(cmd *cli.Command, args []string) error {
|
RunE: func(cmd *cli.Command, args []string) error {
|
||||||
return runChangelog(changelogFromRef, changelogToRef)
|
return runChangelog(cmd.Context(), changelogFromRef, changelogToRef)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
var ciVersionCmd = &cli.Command{
|
var ciVersionCmd = &cli.Command{
|
||||||
Use: "version",
|
Use: "version",
|
||||||
RunE: func(cmd *cli.Command, args []string) error {
|
RunE: func(cmd *cli.Command, args []string) error {
|
||||||
return runCIReleaseVersion()
|
return runCIReleaseVersion(cmd.Context())
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -77,7 +76,7 @@ func setCII18n() {
|
||||||
|
|
||||||
func initCIFlags() {
|
func initCIFlags() {
|
||||||
// Main ci command flags
|
// Main ci command flags
|
||||||
ciCmd.Flags().BoolVar(&ciGoForLaunch, "we-are-go-for-launch", false, i18n.T("cmd.ci.flag.go_for_launch"))
|
ciCmd.Flags().BoolVar(&ciLaunchMode, "we-are-go-for-launch", false, i18n.T("cmd.ci.flag.go_for_launch"))
|
||||||
ciCmd.Flags().StringVar(&ciVersion, "version", "", i18n.T("cmd.ci.flag.version"))
|
ciCmd.Flags().StringVar(&ciVersion, "version", "", i18n.T("cmd.ci.flag.version"))
|
||||||
ciCmd.Flags().BoolVar(&ciDraft, "draft", false, i18n.T("cmd.ci.flag.draft"))
|
ciCmd.Flags().BoolVar(&ciDraft, "draft", false, i18n.T("cmd.ci.flag.draft"))
|
||||||
ciCmd.Flags().BoolVar(&ciPrerelease, "prerelease", false, i18n.T("cmd.ci.flag.prerelease"))
|
ciCmd.Flags().BoolVar(&ciPrerelease, "prerelease", false, i18n.T("cmd.ci.flag.prerelease"))
|
||||||
|
|
@ -93,10 +92,8 @@ func initCIFlags() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// runCIPublish publishes pre-built artifacts from dist/.
|
// runCIPublish publishes pre-built artifacts from dist/.
|
||||||
func runCIPublish(dryRun bool, version string, draft, prerelease bool) error {
|
func runCIPublish(ctx context.Context, dryRun bool, version string, draft, prerelease bool) error {
|
||||||
ctx := context.Background()
|
projectDir, err := ax.Getwd()
|
||||||
|
|
||||||
projectDir, err := os.Getwd()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return cli.WrapVerb(err, "get", "working directory")
|
return cli.WrapVerb(err, "get", "working directory")
|
||||||
}
|
}
|
||||||
|
|
@ -155,11 +152,15 @@ func runCIPublish(dryRun bool, version string, draft, prerelease bool) error {
|
||||||
|
|
||||||
// runCIReleaseInit scaffolds a release config.
|
// runCIReleaseInit scaffolds a release config.
|
||||||
func runCIReleaseInit() error {
|
func runCIReleaseInit() error {
|
||||||
cwd, err := os.Getwd()
|
cwd, err := ax.Getwd()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err)
|
return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return runCIReleaseInitInDir(cwd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runCIReleaseInitInDir(cwd string) error {
|
||||||
cli.Print("%s %s\n\n", dimStyle.Render(i18n.Label("init")), i18n.T("cmd.ci.init.initializing"))
|
cli.Print("%s %s\n\n", dimStyle.Render(i18n.Label("init")), i18n.T("cmd.ci.init.initializing"))
|
||||||
|
|
||||||
if release.ConfigExists(cwd) {
|
if release.ConfigExists(cwd) {
|
||||||
|
|
@ -167,7 +168,7 @@ func runCIReleaseInit() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := release.DefaultConfig()
|
cfg := release.ScaffoldConfig()
|
||||||
if err := release.WriteConfig(cfg, cwd); err != nil {
|
if err := release.WriteConfig(cfg, cwd); err != nil {
|
||||||
return cli.Err("%s: %w", i18n.T("i18n.fail.create", "config"), err)
|
return cli.Err("%s: %w", i18n.T("i18n.fail.create", "config"), err)
|
||||||
}
|
}
|
||||||
|
|
@ -183,14 +184,14 @@ func runCIReleaseInit() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// runChangelog generates a changelog between two git refs.
|
// runChangelog generates a changelog between two git refs.
|
||||||
func runChangelog(fromRef, toRef string) error {
|
func runChangelog(ctx context.Context, fromRef, toRef string) error {
|
||||||
cwd, err := os.Getwd()
|
cwd, err := ax.Getwd()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err)
|
return cli.Err("%s: %w", i18n.T("i18n.fail.get", "working directory"), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if fromRef == "" || toRef == "" {
|
if fromRef == "" || toRef == "" {
|
||||||
tag, err := latestTag(cwd)
|
tag, err := latestTagWithContext(ctx, cwd)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if fromRef == "" {
|
if fromRef == "" {
|
||||||
fromRef = tag
|
fromRef = tag
|
||||||
|
|
@ -199,6 +200,9 @@ func runChangelog(fromRef, toRef string) error {
|
||||||
toRef = "HEAD"
|
toRef = "HEAD"
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
cli.Text(i18n.T("cmd.ci.changelog.no_tags"))
|
cli.Text(i18n.T("cmd.ci.changelog.no_tags"))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -206,7 +210,7 @@ func runChangelog(fromRef, toRef string) error {
|
||||||
|
|
||||||
cli.Print("%s %s..%s\n\n", dimStyle.Render(i18n.T("cmd.ci.changelog.generating")), fromRef, toRef)
|
cli.Print("%s %s..%s\n\n", dimStyle.Render(i18n.T("cmd.ci.changelog.generating")), fromRef, toRef)
|
||||||
|
|
||||||
changelog, err := release.Generate(cwd, fromRef, toRef)
|
changelog, err := release.GenerateWithContext(ctx, cwd, fromRef, toRef)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return cli.Err("%s: %w", i18n.T("i18n.fail.generate", "changelog"), err)
|
return cli.Err("%s: %w", i18n.T("i18n.fail.generate", "changelog"), err)
|
||||||
}
|
}
|
||||||
|
|
@ -216,13 +220,13 @@ func runChangelog(fromRef, toRef string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// runCIReleaseVersion shows the determined version.
|
// runCIReleaseVersion shows the determined version.
|
||||||
func runCIReleaseVersion() error {
|
func runCIReleaseVersion(ctx context.Context) error {
|
||||||
projectDir, err := os.Getwd()
|
projectDir, err := ax.Getwd()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return cli.WrapVerb(err, "get", "working directory")
|
return cli.WrapVerb(err, "get", "working directory")
|
||||||
}
|
}
|
||||||
|
|
||||||
version, err := release.DetermineVersion(projectDir)
|
version, err := release.DetermineVersionWithContext(ctx, projectDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return cli.WrapVerb(err, "determine", "version")
|
return cli.WrapVerb(err, "determine", "version")
|
||||||
}
|
}
|
||||||
|
|
@ -231,12 +235,10 @@ func runCIReleaseVersion() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func latestTag(dir string) (string, error) {
|
func latestTagWithContext(ctx context.Context, dir string) (string, error) {
|
||||||
cmd := exec.Command("git", "describe", "--tags", "--abbrev=0")
|
out, err := ax.RunDir(ctx, dir, "git", "describe", "--tags", "--abbrev=0")
|
||||||
cmd.Dir = dir
|
|
||||||
out, err := cmd.Output()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return strings.TrimSpace(string(out)), nil
|
return core.Trim(out), nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
26
cmd/ci/ci_test.go
Normal file
26
cmd/ci/ci_test.go
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
package ci
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/release"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCI_runCIReleaseInitInDir_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
err := runCIReleaseInitInDir(projectDir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
configPath := release.ConfigPath(projectDir)
|
||||||
|
content, err := ax.ReadFile(configPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Contains(t, string(content), "sdk:")
|
||||||
|
assert.Contains(t, string(content), "spec: api/openapi.yaml")
|
||||||
|
assert.Contains(t, string(content), "languages:")
|
||||||
|
assert.Contains(t, string(content), "- typescript")
|
||||||
|
}
|
||||||
|
|
@ -1,12 +1,6 @@
|
||||||
// Package ci provides release lifecycle commands for CI/CD pipelines.
|
// Package ci registers release lifecycle commands.
|
||||||
//
|
//
|
||||||
// Commands:
|
// ci.AddCICommands(root)
|
||||||
// - ci init: scaffold release config
|
|
||||||
// - ci changelog: generate changelog from git history
|
|
||||||
// - ci version: show determined version
|
|
||||||
// - ci publish: publish pre-built artifacts (dry-run by default)
|
|
||||||
//
|
|
||||||
// Configuration via .core/release.yaml.
|
|
||||||
package ci
|
package ci
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
@ -18,6 +12,8 @@ func init() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddCICommands registers the 'ci' command and all subcommands.
|
// AddCICommands registers the 'ci' command and all subcommands.
|
||||||
|
//
|
||||||
|
// ci.AddCICommands(root)
|
||||||
func AddCICommands(root *cli.Command) {
|
func AddCICommands(root *cli.Command) {
|
||||||
setCII18n()
|
setCII18n()
|
||||||
initCIFlags()
|
initCIFlags()
|
||||||
|
|
|
||||||
|
|
@ -8,8 +8,8 @@
|
||||||
package sdkcmd
|
package sdkcmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"context"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/sdk"
|
"dappco.re/go/core/build/pkg/sdk"
|
||||||
"dappco.re/go/core/i18n"
|
"dappco.re/go/core/i18n"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
|
|
@ -61,6 +61,8 @@ func setSDKI18n() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddSDKCommands registers the 'sdk' command and all subcommands.
|
// AddSDKCommands registers the 'sdk' command and all subcommands.
|
||||||
|
//
|
||||||
|
// sdkcmd.AddSDKCommands(root)
|
||||||
func AddSDKCommands(root *cli.Command) {
|
func AddSDKCommands(root *cli.Command) {
|
||||||
setSDKI18n()
|
setSDKI18n()
|
||||||
|
|
||||||
|
|
@ -79,7 +81,7 @@ func AddSDKCommands(root *cli.Command) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func runSDKDiff(basePath, specPath string) error {
|
func runSDKDiff(basePath, specPath string) error {
|
||||||
projectDir, err := os.Getwd()
|
projectDir, err := ax.Getwd()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("sdk.Diff", "failed to get working directory", err)
|
return coreerr.E("sdk.Diff", "failed to get working directory", err)
|
||||||
}
|
}
|
||||||
|
|
@ -119,16 +121,20 @@ func runSDKDiff(basePath, specPath string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func runSDKValidate(specPath string) error {
|
func runSDKValidate(specPath string) error {
|
||||||
projectDir, err := os.Getwd()
|
projectDir, err := ax.Getwd()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("sdk.Validate", "failed to get working directory", err)
|
return coreerr.E("sdk.Validate", "failed to get working directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return runSDKValidateInDir(context.Background(), projectDir, specPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func runSDKValidateInDir(ctx context.Context, projectDir, specPath string) error {
|
||||||
s := sdk.New(projectDir, &sdk.Config{Spec: specPath})
|
s := sdk.New(projectDir, &sdk.Config{Spec: specPath})
|
||||||
|
|
||||||
cli.Print("%s %s\n", sdkHeaderStyle.Render(i18n.T("cmd.sdk.label.sdk")), i18n.T("cmd.sdk.validate.validating"))
|
cli.Print("%s %s\n", sdkHeaderStyle.Render(i18n.T("cmd.sdk.label.sdk")), i18n.T("cmd.sdk.validate.validating"))
|
||||||
|
|
||||||
detectedPath, err := s.DetectSpec()
|
detectedPath, err := s.ValidateSpec(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
cli.Print("%s %v\n", sdkErrorStyle.Render(i18n.Label("error")), err)
|
cli.Print("%s %v\n", sdkErrorStyle.Render(i18n.Label("error")), err)
|
||||||
return err
|
return err
|
||||||
|
|
|
||||||
45
cmd/sdk/cmd_test.go
Normal file
45
cmd/sdk/cmd_test.go
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
package sdkcmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
const validOpenAPISpec = `openapi: "3.0.0"
|
||||||
|
info:
|
||||||
|
title: Test API
|
||||||
|
version: "1.0.0"
|
||||||
|
paths:
|
||||||
|
/health:
|
||||||
|
get:
|
||||||
|
operationId: getHealth
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: OK
|
||||||
|
`
|
||||||
|
|
||||||
|
func TestRunSDKValidate_Good(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(tmpDir, "openapi.yaml"), []byte(validOpenAPISpec), 0o644))
|
||||||
|
|
||||||
|
err := runSDKValidateInDir(context.Background(), tmpDir, "")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRunSDKValidate_InvalidDocument_Bad(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(tmpDir, "openapi.yaml"), []byte(`openapi: "3.0.0"
|
||||||
|
info:
|
||||||
|
title: Test API
|
||||||
|
paths: {}
|
||||||
|
`), 0o644))
|
||||||
|
|
||||||
|
err := runSDKValidateInDir(context.Background(), tmpDir, "")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "invalid OpenAPI spec")
|
||||||
|
}
|
||||||
440
docs/RFC-CORE-008-AGENT-EXPERIENCE.md
Normal file
440
docs/RFC-CORE-008-AGENT-EXPERIENCE.md
Normal file
|
|
@ -0,0 +1,440 @@
|
||||||
|
# RFC-025: Agent Experience (AX) Design Principles
|
||||||
|
|
||||||
|
- **Status:** Draft
|
||||||
|
- **Authors:** Snider, Cladius
|
||||||
|
- **Date:** 2026-03-19
|
||||||
|
- **Applies to:** All Core ecosystem packages (CoreGO, CorePHP, CoreTS, core-agent)
|
||||||
|
|
||||||
|
## Abstract
|
||||||
|
|
||||||
|
Agent Experience (AX) is a design paradigm for software systems where the primary code consumer is an AI agent, not a human developer. AX sits alongside User Experience (UX) and Developer Experience (DX) as the third era of interface design.
|
||||||
|
|
||||||
|
This RFC establishes AX as a formal design principle for the Core ecosystem and defines the conventions that follow from it.
|
||||||
|
|
||||||
|
## Motivation
|
||||||
|
|
||||||
|
As of early 2026, AI agents write, review, and maintain the majority of code in the Core ecosystem. The original author has not manually edited code (outside of Core struct design) since October 2025. Code is processed semantically — agents reason about intent, not characters.
|
||||||
|
|
||||||
|
Design patterns inherited from the human-developer era optimise for the wrong consumer:
|
||||||
|
|
||||||
|
- **Short names** save keystrokes but increase semantic ambiguity
|
||||||
|
- **Functional option chains** are fluent for humans but opaque for agents tracing configuration
|
||||||
|
- **Error-at-every-call-site** produces 50% boilerplate that obscures intent
|
||||||
|
- **Generic type parameters** force agents to carry type context that the runtime already has
|
||||||
|
- **Panic-hiding conventions** (`Must*`) create implicit control flow that agents must special-case
|
||||||
|
|
||||||
|
AX acknowledges this shift and provides principles for designing code, APIs, file structures, and conventions that serve AI agents as first-class consumers.
|
||||||
|
|
||||||
|
## The Three Eras
|
||||||
|
|
||||||
|
| Era | Primary Consumer | Optimises For | Key Metric |
|
||||||
|
|-----|-----------------|---------------|------------|
|
||||||
|
| UX | End users | Discoverability, forgiveness, visual clarity | Task completion time |
|
||||||
|
| DX | Developers | Typing speed, IDE support, convention familiarity | Time to first commit |
|
||||||
|
| AX | AI agents | Predictability, composability, semantic navigation | Correct-on-first-pass rate |
|
||||||
|
|
||||||
|
AX does not replace UX or DX. End users still need good UX. Developers still need good DX. But when the primary code author and maintainer is an AI agent, the codebase should be designed for that consumer first.
|
||||||
|
|
||||||
|
## Principles
|
||||||
|
|
||||||
|
### 1. Predictable Names Over Short Names
|
||||||
|
|
||||||
|
Names are tokens that agents pattern-match across languages and contexts. Abbreviations introduce mapping overhead.
|
||||||
|
|
||||||
|
```
|
||||||
|
Config not Cfg
|
||||||
|
Service not Srv
|
||||||
|
Embed not Emb
|
||||||
|
Error not Err (as a subsystem name; err for local variables is fine)
|
||||||
|
Options not Opts
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** If a name would require a comment to explain, it is too short.
|
||||||
|
|
||||||
|
**Exception:** Industry-standard abbreviations that are universally understood (`HTTP`, `URL`, `ID`, `IPC`, `I18n`) are acceptable. The test: would an agent trained on any mainstream language recognise it without context?
|
||||||
|
|
||||||
|
### 2. Comments as Usage Examples
|
||||||
|
|
||||||
|
The function signature tells WHAT. The comment shows HOW with real values.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Detect the project type from files present
|
||||||
|
setup.Detect("/path/to/project")
|
||||||
|
|
||||||
|
// Set up a workspace with auto-detected template
|
||||||
|
setup.Run(setup.Options{Path: ".", Template: "auto"})
|
||||||
|
|
||||||
|
// Scaffold a PHP module workspace
|
||||||
|
setup.Run(setup.Options{Path: "./my-module", Template: "php"})
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** If a comment restates what the type signature already says, delete it. If a comment shows a concrete usage with realistic values, keep it.
|
||||||
|
|
||||||
|
**Rationale:** Agents learn from examples more effectively than from descriptions. A comment like "Run executes the setup process" adds zero information. A comment like `setup.Run(setup.Options{Path: ".", Template: "auto"})` teaches an agent exactly how to call the function.
|
||||||
|
|
||||||
|
### 3. Path Is Documentation
|
||||||
|
|
||||||
|
File and directory paths should be self-describing. An agent navigating the filesystem should understand what it is looking at without reading a README.
|
||||||
|
|
||||||
|
```
|
||||||
|
flow/deploy/to/homelab.yaml — deploy TO the homelab
|
||||||
|
flow/deploy/from/github.yaml — deploy FROM GitHub
|
||||||
|
flow/code/review.yaml — code review flow
|
||||||
|
template/file/go/struct.go.tmpl — Go struct file template
|
||||||
|
template/dir/workspace/php/ — PHP workspace scaffold
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** If an agent needs to read a file to understand what a directory contains, the directory naming has failed.
|
||||||
|
|
||||||
|
**Corollary:** The unified path convention (folder structure = HTTP route = CLI command = test path) is AX-native. One path, every surface.
|
||||||
|
|
||||||
|
### 4. Templates Over Freeform
|
||||||
|
|
||||||
|
When an agent generates code from a template, the output is constrained to known-good shapes. When an agent writes freeform, the output varies.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Template-driven — consistent output
|
||||||
|
lib.RenderFile("php/action", data)
|
||||||
|
lib.ExtractDir("php", targetDir, data)
|
||||||
|
|
||||||
|
// Freeform — variance in output
|
||||||
|
"write a PHP action class that..."
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** For any code pattern that recurs, provide a template. Templates are guardrails for agents.
|
||||||
|
|
||||||
|
**Scope:** Templates apply to file generation, workspace scaffolding, config generation, and commit messages. They do NOT apply to novel logic — agents should write business logic freeform with the domain knowledge available.
|
||||||
|
|
||||||
|
### 5. Declarative Over Imperative
|
||||||
|
|
||||||
|
Agents reason better about declarations of intent than sequences of operations.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Declarative — agent sees what should happen
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
flow: tools/docker-build
|
||||||
|
with:
|
||||||
|
context: "{{ .app_dir }}"
|
||||||
|
image_name: "{{ .image_name }}"
|
||||||
|
|
||||||
|
- name: deploy
|
||||||
|
flow: deploy/with/docker
|
||||||
|
with:
|
||||||
|
host: "{{ .host }}"
|
||||||
|
```
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Imperative — agent must trace execution
|
||||||
|
cmd := exec.Command("docker", "build", "--platform", "linux/amd64", "-t", imageName, ".")
|
||||||
|
cmd.Dir = appDir
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
return fmt.Errorf("docker build: %w", err)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** Orchestration, configuration, and pipeline logic should be declarative (YAML/JSON). Implementation logic should be imperative (Go/PHP/TS). The boundary is: if an agent needs to compose or modify the logic, make it declarative.
|
||||||
|
|
||||||
|
### 6. Universal Types (Core Primitives)
|
||||||
|
|
||||||
|
Every component in the ecosystem accepts and returns the same primitive types. An agent processing any level of the tree sees identical shapes.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Universal contract
|
||||||
|
setup.Run(core.Options{Path: ".", Template: "auto"})
|
||||||
|
brain.New(core.Options{Name: "openbrain"})
|
||||||
|
deploy.Run(core.Options{Flow: "deploy/to/homelab"})
|
||||||
|
|
||||||
|
// Fractal — Core itself is a Service
|
||||||
|
core.New(core.Options{
|
||||||
|
Services: []core.Service{
|
||||||
|
process.New(core.Options{Name: "process"}),
|
||||||
|
brain.New(core.Options{Name: "brain"}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
**Core primitive types:**
|
||||||
|
|
||||||
|
| Type | Purpose |
|
||||||
|
|------|---------|
|
||||||
|
| `core.Options` | Input configuration (what you want) |
|
||||||
|
| `core.Config` | Runtime settings (what is active) |
|
||||||
|
| `core.Data` | Embedded or stored content |
|
||||||
|
| `core.Service` | A managed component with lifecycle |
|
||||||
|
| `core.Result[T]` | Return value with OK/fail state |
|
||||||
|
|
||||||
|
**What this replaces:**
|
||||||
|
|
||||||
|
| Go Convention | Core AX | Why |
|
||||||
|
|--------------|---------|-----|
|
||||||
|
| `func With*(v) Option` | `core.Options{Field: v}` | Struct literal is parseable; option chain requires tracing |
|
||||||
|
| `func Must*(v) T` | `core.Result[T]` | No hidden panics; errors flow through Core |
|
||||||
|
| `func *For[T](c) T` | `c.Service("name")` | String lookup is greppable; generics require type context |
|
||||||
|
| `val, err :=` everywhere | Single return via `core.Result` | Intent not obscured by error handling |
|
||||||
|
| `_ = err` | Never needed | Core handles all errors internally |
|
||||||
|
|
||||||
|
### 7. Directory as Semantics
|
||||||
|
|
||||||
|
The directory structure tells an agent the intent before it reads a word. Top-level directories are semantic categories, not organisational bins.
|
||||||
|
|
||||||
|
```
|
||||||
|
plans/
|
||||||
|
├── code/ # Pure primitives — read for WHAT exists
|
||||||
|
├── project/ # Products — read for WHAT we're building and WHY
|
||||||
|
└── rfc/ # Contracts — read for constraints and rules
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** An agent should know what kind of document it's reading from the path alone. `code/core/go/io/RFC.md` = a lib primitive spec. `project/ofm/RFC.md` = a product spec that cross-references code/. `rfc/snider/borg/RFC-BORG-006-SMSG-FORMAT.md` = an immutable contract for the Borg SMSG protocol.
|
||||||
|
|
||||||
|
**Corollary:** The three-way split (code/project/rfc) extends principle 3 (Path Is Documentation) from files to entire subtrees. The path IS the metadata.
|
||||||
|
|
||||||
|
### 8. Lib Never Imports Consumer
|
||||||
|
|
||||||
|
Dependency flows one direction. Libraries define primitives. Consumers compose from them. A new feature in a consumer can never break a library.
|
||||||
|
|
||||||
|
```
|
||||||
|
code/core/go/* → lib tier (stable foundation)
|
||||||
|
code/core/agent/ → consumer tier (composes from go/*)
|
||||||
|
code/core/cli/ → consumer tier (composes from go/*)
|
||||||
|
code/core/gui/ → consumer tier (composes from go/*)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** If package A is in `go/` and package B is in the consumer tier, B may import A but A must never import B. The repo naming convention enforces this: `go-{name}` = lib, bare `{name}` = consumer.
|
||||||
|
|
||||||
|
**Why this matters for agents:** When an agent is dispatched to implement a feature in `core/agent`, it can freely import from `go-io`, `go-scm`, `go-process`. But if an agent is dispatched to `go-io`, it knows its changes are foundational — every consumer depends on it, so the contract must not break.
|
||||||
|
|
||||||
|
### 9. Issues Are N+(rounds) Deep
|
||||||
|
|
||||||
|
Problems in code and specs are layered. Surface issues mask deeper issues. Fixing the surface reveals the next layer. This is not a failure mode — it is the discovery process.
|
||||||
|
|
||||||
|
```
|
||||||
|
Pass 1: Find 16 issues (surface — naming, imports, obvious errors)
|
||||||
|
Pass 2: Find 11 issues (structural — contradictions, missing types)
|
||||||
|
Pass 3: Find 5 issues (architectural — signature mismatches, registration gaps)
|
||||||
|
Pass 4: Find 4 issues (contract — cross-spec API mismatches)
|
||||||
|
Pass 5: Find 2 issues (mechanical — path format, nil safety)
|
||||||
|
Pass N: Findings are trivial → spec/code is complete
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** Iteration is required, not a failure. Each pass sees what the previous pass could not, because the context changed. An agent dispatched with the same task on the same repo will find different things each time — this is correct behaviour.
|
||||||
|
|
||||||
|
**Corollary:** The cheapest model should do the most passes (surface work). The frontier model should arrive last, when only deep issues remain. Tiered iteration: grunt model grinds → mid model pre-warms → frontier model polishes.
|
||||||
|
|
||||||
|
**Anti-pattern:** One-shot generation expecting valid output. No model, no human, produces correct-on-first-pass for non-trivial work. Expecting it wastes the first pass on surface issues that a cheaper pass would have caught.
|
||||||
|
|
||||||
|
### 10. CLI Tests as Artifact Validation
|
||||||
|
|
||||||
|
Unit tests verify the code. CLI tests verify the binary. The directory structure IS the command structure — path maps to command, Taskfile runs the test.
|
||||||
|
|
||||||
|
```
|
||||||
|
tests/cli/
|
||||||
|
├── core/
|
||||||
|
│ └── lint/
|
||||||
|
│ ├── Taskfile.yaml ← test `core-lint` (root)
|
||||||
|
│ ├── run/
|
||||||
|
│ │ ├── Taskfile.yaml ← test `core-lint run`
|
||||||
|
│ │ └── fixtures/
|
||||||
|
│ ├── go/
|
||||||
|
│ │ ├── Taskfile.yaml ← test `core-lint go`
|
||||||
|
│ │ └── fixtures/
|
||||||
|
│ └── security/
|
||||||
|
│ ├── Taskfile.yaml ← test `core-lint security`
|
||||||
|
│ └── fixtures/
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rule:** Every CLI command has a matching `tests/cli/{path}/Taskfile.yaml`. The Taskfile runs the compiled binary against fixtures with known inputs and validates the output. If the CLI test passes, the underlying actions work — because CLI commands call actions, MCP tools call actions, API endpoints call actions. Test the CLI, trust the rest.
|
||||||
|
|
||||||
|
**Pattern:**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# tests/cli/core/lint/go/Taskfile.yaml
|
||||||
|
version: '3'
|
||||||
|
tasks:
|
||||||
|
test:
|
||||||
|
cmds:
|
||||||
|
- core-lint go --output json fixtures/ > /tmp/result.json
|
||||||
|
- jq -e '.findings | length > 0' /tmp/result.json
|
||||||
|
- jq -e '.summary.passed == false' /tmp/result.json
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why this matters for agents:** An agent can validate its own work by running `task test` in the matching `tests/cli/` directory. No test framework, no mocking, no setup — just the binary, fixtures, and `jq` assertions. The agent builds the binary, runs the test, sees the result. If it fails, the agent can read the fixture, read the output, and fix the code.
|
||||||
|
|
||||||
|
**Corollary:** Fixtures are planted bugs. Each fixture file has a known issue that the linter must find. If the linter doesn't find it, the test fails. Fixtures are the spec for what the tool must detect — they ARE the test cases, not descriptions of test cases.
|
||||||
|
|
||||||
|
## Applying AX to Existing Patterns
|
||||||
|
|
||||||
|
### File Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
# AX-native: path describes content
|
||||||
|
core/agent/
|
||||||
|
├── go/ # Go source
|
||||||
|
├── php/ # PHP source
|
||||||
|
├── ui/ # Frontend source
|
||||||
|
├── claude/ # Claude Code plugin
|
||||||
|
└── codex/ # Codex plugin
|
||||||
|
|
||||||
|
# Not AX: generic names requiring README
|
||||||
|
src/
|
||||||
|
├── lib/
|
||||||
|
├── utils/
|
||||||
|
└── helpers/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
```go
|
||||||
|
// AX-native: errors are infrastructure, not application logic
|
||||||
|
svc := c.Service("brain")
|
||||||
|
cfg := c.Config().Get("database.host")
|
||||||
|
// Errors logged by Core. Code reads like a spec.
|
||||||
|
|
||||||
|
// Not AX: errors dominate the code
|
||||||
|
svc, err := c.ServiceFor[brain.Service]()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("get brain service: %w", err)
|
||||||
|
}
|
||||||
|
cfg, err := c.Config().Get("database.host")
|
||||||
|
if err != nil {
|
||||||
|
_ = err // silenced because "it'll be fine"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### API Design
|
||||||
|
|
||||||
|
```go
|
||||||
|
// AX-native: one shape, every surface
|
||||||
|
core.New(core.Options{
|
||||||
|
Name: "my-app",
|
||||||
|
Services: []core.Service{...},
|
||||||
|
Config: core.Config{...},
|
||||||
|
})
|
||||||
|
|
||||||
|
// Not AX: multiple patterns for the same thing
|
||||||
|
core.New(
|
||||||
|
core.WithName("my-app"),
|
||||||
|
core.WithService(factory1),
|
||||||
|
core.WithService(factory2),
|
||||||
|
core.WithConfig(cfg),
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
## The Plans Convention — AX Development Lifecycle
|
||||||
|
|
||||||
|
The `plans/` directory structure encodes a development methodology designed for how generative AI actually works: iterative refinement across structured phases, not one-shot generation.
|
||||||
|
|
||||||
|
### The Three-Way Split
|
||||||
|
|
||||||
|
```
|
||||||
|
plans/
|
||||||
|
├── project/ # 1. WHAT and WHY — start here
|
||||||
|
├── rfc/ # 2. CONSTRAINTS — immutable contracts
|
||||||
|
└── code/ # 3. HOW — implementation specs
|
||||||
|
```
|
||||||
|
|
||||||
|
Each directory is a phase. Work flows from project → rfc → code. Each transition forces a refinement pass — you cannot write a code spec without discovering gaps in the project spec, and you cannot write an RFC without discovering assumptions in both.
|
||||||
|
|
||||||
|
**Three places for data that can't be written simultaneously = three guaranteed iterations of "actually, this needs changing."** Refinement is baked into the structure, not bolted on as a review step.
|
||||||
|
|
||||||
|
### Phase 1: Project (Vision)
|
||||||
|
|
||||||
|
Start with `project/`. No code exists yet. Define:
|
||||||
|
- What the product IS and who it serves
|
||||||
|
- What existing primitives it consumes (cross-ref to `code/`)
|
||||||
|
- What constraints it operates under (cross-ref to `rfc/`)
|
||||||
|
|
||||||
|
This is where creativity lives. Map features to building blocks. Connect systems. The project spec is integrative — it references everything else.
|
||||||
|
|
||||||
|
### Phase 2: RFC (Contracts)
|
||||||
|
|
||||||
|
Extract the immutable rules into `rfc/`. These are constraints that don't change with implementation:
|
||||||
|
- Wire formats, protocols, hash algorithms
|
||||||
|
- Security properties that must hold
|
||||||
|
- Compatibility guarantees
|
||||||
|
|
||||||
|
RFCs are numbered per component (`RFC-BORG-006-SMSG-FORMAT.md`) and never modified after acceptance. If the contract changes, write a new RFC.
|
||||||
|
|
||||||
|
### Phase 3: Code (Implementation Specs)
|
||||||
|
|
||||||
|
Define the implementation in `code/`. Each component gets an RFC.md that an agent can implement from:
|
||||||
|
- Struct definitions (the DTOs — see principle 6)
|
||||||
|
- Method signatures and behaviour
|
||||||
|
- Error conditions and edge cases
|
||||||
|
- Cross-references to other code/ specs
|
||||||
|
|
||||||
|
The code spec IS the product. Write the spec → dispatch to an agent → review output → iterate.
|
||||||
|
|
||||||
|
### Pre-Launch: Alignment Protocol
|
||||||
|
|
||||||
|
Before dispatching for implementation, verify spec-model alignment:
|
||||||
|
|
||||||
|
```
|
||||||
|
1. REVIEW — The implementation model (Codex/Jules) reads the spec
|
||||||
|
and reports missing elements. This surfaces the delta between
|
||||||
|
the model's training and the spec's assumptions.
|
||||||
|
|
||||||
|
"I need X, Y, Z to implement this" is the model saying
|
||||||
|
"I hear you but I'm missing context" — without asking.
|
||||||
|
|
||||||
|
2. ADJUST — Update the spec to close the gaps. Add examples,
|
||||||
|
clarify ambiguities, provide the context the model needs.
|
||||||
|
This is shared alignment, not compromise.
|
||||||
|
|
||||||
|
3. VERIFY — A different model (or sub-agent) reviews the adjusted
|
||||||
|
spec without the planner's bias. Fresh eyes on the contract.
|
||||||
|
"Does this make sense to someone who wasn't in the room?"
|
||||||
|
|
||||||
|
4. READY — When the review findings are trivial or deployment-
|
||||||
|
related (not architectural), the spec is ready to dispatch.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Implementation: Iterative Dispatch
|
||||||
|
|
||||||
|
Same prompt, multiple runs. Each pass sees deeper because the context evolved:
|
||||||
|
|
||||||
|
```
|
||||||
|
Round 1: Build features (the obvious gaps)
|
||||||
|
Round 2: Write tests (verify what was built)
|
||||||
|
Round 3: Harden security (what can go wrong?)
|
||||||
|
Round 4: Next RFC section (what's still missing?)
|
||||||
|
Round N: Findings are trivial → implementation is complete
|
||||||
|
```
|
||||||
|
|
||||||
|
Re-running is not failure. It is the process. Each pass changes the codebase, which changes what the next pass can see. The iteration IS the refinement.
|
||||||
|
|
||||||
|
### Post-Implementation: Auto-Documentation
|
||||||
|
|
||||||
|
The QA/verify chain produces artefacts that feed forward:
|
||||||
|
- Test results document the contract (what works, what doesn't)
|
||||||
|
- Coverage reports surface untested paths
|
||||||
|
- Diff summaries prep the changelog for the next release
|
||||||
|
- Doc site updates from the spec (the spec IS the documentation)
|
||||||
|
|
||||||
|
The output of one cycle is the input to the next. The plans repo stays current because the specs drive the code, not the other way round.
|
||||||
|
|
||||||
|
## Compatibility
|
||||||
|
|
||||||
|
AX conventions are valid, idiomatic Go/PHP/TS. They do not require language extensions, code generation, or non-standard tooling. An AX-designed codebase compiles, tests, and deploys with standard toolchains.
|
||||||
|
|
||||||
|
The conventions diverge from community patterns (functional options, Must/For, etc.) but do not violate language specifications. This is a style choice, not a fork.
|
||||||
|
|
||||||
|
## Adoption
|
||||||
|
|
||||||
|
AX applies to all new code in the Core ecosystem. Existing code migrates incrementally as it is touched — no big-bang rewrite.
|
||||||
|
|
||||||
|
Priority order:
|
||||||
|
1. **Public APIs** (package-level functions, struct constructors)
|
||||||
|
2. **File structure** (path naming, template locations)
|
||||||
|
3. **Internal fields** (struct field names, local variables)
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- dAppServer unified path convention (2024)
|
||||||
|
- CoreGO DTO pattern refactor (2026-03-18)
|
||||||
|
- Core primitives design (2026-03-19)
|
||||||
|
- Go Proverbs, Rob Pike (2015) — AX provides an updated lens
|
||||||
|
|
||||||
|
## Changelog
|
||||||
|
|
||||||
|
- 2026-03-19: Initial draft
|
||||||
|
|
@ -19,6 +19,7 @@ Detection order:
|
||||||
2. `go.mod` -- `ProjectTypeGo`
|
2. `go.mod` -- `ProjectTypeGo`
|
||||||
3. `package.json` -- `ProjectTypeNode`
|
3. `package.json` -- `ProjectTypeNode`
|
||||||
4. `composer.json` -- `ProjectTypePHP`
|
4. `composer.json` -- `ProjectTypePHP`
|
||||||
|
5. `mkdocs.yml` -- `ProjectTypeDocs`
|
||||||
|
|
||||||
Docker (`Dockerfile`), LinuxKit (`linuxkit.yml` or `.core/linuxkit/*.yml`), C++ (`CMakeLists.txt`), and Taskfile (`Taskfile.yml`) are detected by their respective builders' `Detect()` methods rather than the central discovery function.
|
Docker (`Dockerfile`), LinuxKit (`linuxkit.yml` or `.core/linuxkit/*.yml`), C++ (`CMakeLists.txt`), and Taskfile (`Taskfile.yml`) are detected by their respective builders' `Detect()` methods rather than the central discovery function.
|
||||||
|
|
||||||
|
|
@ -53,6 +54,11 @@ type Artifact struct {
|
||||||
|---|---|---|
|
|---|---|---|
|
||||||
| **GoBuilder** | `go.mod` or `wails.json` | Sets `GOOS`/`GOARCH`/`CGO_ENABLED=0`, runs `go build -trimpath` with ldflags. Output per target: `dist/{os}_{arch}/{binary}`. |
|
| **GoBuilder** | `go.mod` or `wails.json` | Sets `GOOS`/`GOARCH`/`CGO_ENABLED=0`, runs `go build -trimpath` with ldflags. Output per target: `dist/{os}_{arch}/{binary}`. |
|
||||||
| **WailsBuilder** | `wails.json` | Checks `go.mod` for Wails v3 vs v2. V3 delegates to TaskfileBuilder; V2 runs `wails build -platform` then copies from `build/bin/` to `dist/`. |
|
| **WailsBuilder** | `wails.json` | Checks `go.mod` for Wails v3 vs v2. V3 delegates to TaskfileBuilder; V2 runs `wails build -platform` then copies from `build/bin/` to `dist/`. |
|
||||||
|
| **NodeBuilder** | `package.json` | Detects the active package manager from lockfiles, runs the build script once per target, and collects artifacts from `dist/{os}_{arch}/`. |
|
||||||
|
| **PHPBuilder** | `composer.json` | Runs `composer install`, then `composer run-script build` when present. Falls back to a deterministic zip bundle in `dist/{os}_{arch}/`. |
|
||||||
|
| **PythonBuilder** | `pyproject.toml` or `requirements.txt` | Packages the project tree into a deterministic zip bundle in `dist/{os}_{arch}/`. |
|
||||||
|
| **RustBuilder** | `Cargo.toml` | Runs `cargo build --release --target` per platform and collects executables from `target/{triple}/release/`. |
|
||||||
|
| **DocsBuilder** | `mkdocs.yml` | Runs `mkdocs build --clean --site-dir` and packages the generated `site/` tree into a zip bundle per target. |
|
||||||
| **DockerBuilder** | `Dockerfile` | Validates `docker` and `buildx`, builds multi-platform images with `docker buildx build --platform`. Supports `--push` or local load/OCI tarball. |
|
| **DockerBuilder** | `Dockerfile` | Validates `docker` and `buildx`, builds multi-platform images with `docker buildx build --platform`. Supports `--push` or local load/OCI tarball. |
|
||||||
| **LinuxKitBuilder** | `linuxkit.yml` or `.core/linuxkit/*.yml` | Validates `linuxkit` CLI, runs `linuxkit build --format --name --dir --arch`. Outputs qcow2, iso, raw, vmdk, vhd, or cloud images. Linux-only targets. |
|
| **LinuxKitBuilder** | `linuxkit.yml` or `.core/linuxkit/*.yml` | Validates `linuxkit` CLI, runs `linuxkit build --format --name --dir --arch`. Outputs qcow2, iso, raw, vmdk, vhd, or cloud images. Linux-only targets. |
|
||||||
| **CPPBuilder** | `CMakeLists.txt` | Validates `make`, runs `make configure` then `make build` then `make package` for host builds. Cross-compilation uses Conan profile targets (e.g. `make gcc-linux-armv8`). Finds artifacts in `build/packages/` or `build/release/src/`. |
|
| **CPPBuilder** | `CMakeLists.txt` | Validates `make`, runs `make configure` then `make build` then `make package` for host builds. Cross-compilation uses Conan profile targets (e.g. `make gcc-linux-armv8`). Finds artifacts in `build/packages/` or `build/release/src/`. |
|
||||||
|
|
@ -84,7 +90,7 @@ Three implementations:
|
||||||
|
|
||||||
- **GPGSigner** -- `gpg --detach-sign --armor --local-user {key}`. Produces `.asc` files.
|
- **GPGSigner** -- `gpg --detach-sign --armor --local-user {key}`. Produces `.asc` files.
|
||||||
- **MacOSSigner** -- `codesign --sign {identity} --timestamp --options runtime --force`. Notarisation via `xcrun notarytool submit --wait` then `xcrun stapler staple`.
|
- **MacOSSigner** -- `codesign --sign {identity} --timestamp --options runtime --force`. Notarisation via `xcrun notarytool submit --wait` then `xcrun stapler staple`.
|
||||||
- **WindowsSigner** -- Placeholder (returns `Available() == false`).
|
- **WindowsSigner** -- Uses `signtool` on Windows when a certificate is configured.
|
||||||
|
|
||||||
Configuration supports `$ENV` expansion in all credential fields, so secrets can come from environment variables without being written to YAML.
|
Configuration supports `$ENV` expansion in all credential fields, so secrets can come from environment variables without being written to YAML.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,9 +7,9 @@ description: Build system, release pipeline, and SDK generation for the Core eco
|
||||||
|
|
||||||
`forge.lthn.ai/core/go-build` is the build, release, and SDK generation toolkit for Core projects. It provides:
|
`forge.lthn.ai/core/go-build` is the build, release, and SDK generation toolkit for Core projects. It provides:
|
||||||
|
|
||||||
- **Auto-detecting builders** for Go, Wails, Docker, LinuxKit, C++, and Taskfile projects
|
- **Auto-detecting builders** for Go, Wails, Node, PHP, Python, Rust, Docs, Docker, LinuxKit, C++, and Taskfile projects
|
||||||
- **Cross-compilation** with per-target archiving (tar.gz, tar.xz, zip) and SHA-256 checksums
|
- **Cross-compilation** with per-target archiving (tar.gz, tar.xz, zip) and SHA-256 checksums
|
||||||
- **Code signing** -- macOS codesign with notarisation, GPG detached signatures, Windows signtool (placeholder)
|
- **Code signing** -- macOS codesign with notarisation, GPG detached signatures, Windows signtool
|
||||||
- **Release automation** -- semantic versioning from git tags, conventional-commit changelogs, multi-target publishing
|
- **Release automation** -- semantic versioning from git tags, conventional-commit changelogs, multi-target publishing
|
||||||
- **SDK generation** -- OpenAPI spec diffing for breaking-change detection, code generation for TypeScript, Python, Go, and PHP
|
- **SDK generation** -- OpenAPI spec diffing for breaking-change detection, code generation for TypeScript, Python, Go, and PHP
|
||||||
- **CLI integration** -- registers `core build`, `core ci`, and `core sdk` commands via the Core CLI framework
|
- **CLI integration** -- registers `core build`, `core ci`, and `core sdk` commands via the Core CLI framework
|
||||||
|
|
@ -41,8 +41,11 @@ The builder is chosen by marker-file priority:
|
||||||
|-------------------|------------|
|
|-------------------|------------|
|
||||||
| `wails.json` | Wails |
|
| `wails.json` | Wails |
|
||||||
| `go.mod` | Go |
|
| `go.mod` | Go |
|
||||||
| `package.json` | Node (stub)|
|
| `package.json` | Node |
|
||||||
| `composer.json` | PHP (stub) |
|
| `composer.json` | PHP |
|
||||||
|
| `pyproject.toml` | Python |
|
||||||
|
| `Cargo.toml` | Rust |
|
||||||
|
| `mkdocs.yml` | Docs |
|
||||||
| `CMakeLists.txt` | C++ |
|
| `CMakeLists.txt` | C++ |
|
||||||
| `Dockerfile` | Docker |
|
| `Dockerfile` | Docker |
|
||||||
| `linuxkit.yml` | LinuxKit |
|
| `linuxkit.yml` | LinuxKit |
|
||||||
|
|
@ -96,7 +99,7 @@ forge.lthn.ai/core/go-build/
|
||||||
|
|
|
|
||||||
+-- pkg/
|
+-- pkg/
|
||||||
|-- build/ Core build types, config loading, discovery, archiving, checksums
|
|-- build/ Core build types, config loading, discovery, archiving, checksums
|
||||||
| |-- builders/ Builder implementations (Go, Wails, Docker, LinuxKit, C++, Taskfile)
|
| |-- builders/ Builder implementations (Go, Wails, Node, PHP, Python, Docs, Docker, LinuxKit, C++, Taskfile)
|
||||||
| +-- signing/ Code-signing implementations (macOS codesign, GPG, Windows stub)
|
| +-- signing/ Code-signing implementations (macOS codesign, GPG, Windows stub)
|
||||||
|
|
|
|
||||||
|-- release/ Release orchestration, versioning, changelog, config
|
|-- release/ Release orchestration, versioning, changelog, config
|
||||||
|
|
|
||||||
24
go.mod
24
go.mod
|
|
@ -3,12 +3,14 @@ module dappco.re/go/core/build
|
||||||
go 1.26.0
|
go 1.26.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
dappco.re/go/core v0.8.0-alpha.1
|
||||||
dappco.re/go/core/api v0.2.0
|
dappco.re/go/core/api v0.2.0
|
||||||
dappco.re/go/core/i18n v0.2.0
|
dappco.re/go/core/i18n v0.2.0
|
||||||
dappco.re/go/core/io v0.2.0
|
dappco.re/go/core/io v0.2.0
|
||||||
dappco.re/go/core/log v0.1.0
|
dappco.re/go/core/log v0.1.0
|
||||||
|
dappco.re/go/core/process v0.3.0
|
||||||
dappco.re/go/core/ws v0.3.0
|
dappco.re/go/core/ws v0.3.0
|
||||||
forge.lthn.ai/core/cli v0.3.7
|
dappco.re/go/core/cli v0.3.7
|
||||||
github.com/Snider/Borg v0.2.0
|
github.com/Snider/Borg v0.2.0
|
||||||
github.com/gin-gonic/gin v1.12.0
|
github.com/gin-gonic/gin v1.12.0
|
||||||
github.com/leaanthony/debme v1.2.1
|
github.com/leaanthony/debme v1.2.1
|
||||||
|
|
@ -23,11 +25,10 @@ require (
|
||||||
|
|
||||||
require (
|
require (
|
||||||
cloud.google.com/go v0.123.0 // indirect
|
cloud.google.com/go v0.123.0 // indirect
|
||||||
dappco.re/go/core v0.6.0 // indirect
|
dappco.re/go/core v0.3.2 // indirect
|
||||||
forge.lthn.ai/core/go v0.3.2 // indirect
|
dappco.re/go/core/i18n v0.1.7 // indirect
|
||||||
forge.lthn.ai/core/go-i18n v0.1.7 // indirect
|
dappco.re/go/core/inference v0.1.7 // indirect
|
||||||
forge.lthn.ai/core/go-inference v0.1.7 // indirect
|
dappco.re/go/core/log v0.0.4 // indirect
|
||||||
forge.lthn.ai/core/go-log v0.0.4 // indirect
|
|
||||||
github.com/99designs/gqlgen v0.17.88 // indirect
|
github.com/99designs/gqlgen v0.17.88 // indirect
|
||||||
github.com/KyleBanks/depth v1.2.1 // indirect
|
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||||
github.com/TwiN/go-color v1.4.1 // indirect
|
github.com/TwiN/go-color v1.4.1 // indirect
|
||||||
|
|
@ -155,14 +156,3 @@ require (
|
||||||
golang.org/x/tools v0.43.0 // indirect
|
golang.org/x/tools v0.43.0 // indirect
|
||||||
google.golang.org/protobuf v1.36.11 // indirect
|
google.golang.org/protobuf v1.36.11 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
replace (
|
|
||||||
dappco.re/go/core => /Users/snider/Code/core/go
|
|
||||||
dappco.re/go/core/api => /Users/snider/Code/core/api
|
|
||||||
dappco.re/go/core/i18n => /Users/snider/Code/core/go-i18n
|
|
||||||
dappco.re/go/core/io => /Users/snider/Code/core/go-io
|
|
||||||
dappco.re/go/core/log => /Users/snider/Code/core/go-log
|
|
||||||
dappco.re/go/core/ws => /Users/snider/Code/core/go-ws
|
|
||||||
forge.lthn.ai/core/cli => /Users/snider/Code/core/cli
|
|
||||||
forge.lthn.ai/core/go-inference => /Users/snider/Code/core/go-inference
|
|
||||||
)
|
|
||||||
|
|
|
||||||
18
go.sum
18
go.sum
|
|
@ -1,9 +1,27 @@
|
||||||
cloud.google.com/go v0.123.0 h1:2NAUJwPR47q+E35uaJeYoNhuNEM9kM8SjgRgdeOJUSE=
|
cloud.google.com/go v0.123.0 h1:2NAUJwPR47q+E35uaJeYoNhuNEM9kM8SjgRgdeOJUSE=
|
||||||
cloud.google.com/go v0.123.0/go.mod h1:xBoMV08QcqUGuPW65Qfm1o9Y4zKZBpGS+7bImXLTAZU=
|
cloud.google.com/go v0.123.0/go.mod h1:xBoMV08QcqUGuPW65Qfm1o9Y4zKZBpGS+7bImXLTAZU=
|
||||||
|
dappco.re/go/core v0.8.0-alpha.1 h1:gj7+Scv+L63Z7wMxbJYHhaRFkHJo2u4MMPuUSv/Dhtk=
|
||||||
|
dappco.re/go/core v0.8.0-alpha.1/go.mod h1:f2/tBZ3+3IqDrg2F5F598llv0nmb/4gJVCFzM5geE4A=
|
||||||
|
dappco.re/go/core/api v0.2.0 h1:5OcN9nawpp18Jp6dB1OwI2CBfs0Tacb0y0zqxFB6TJ0=
|
||||||
|
dappco.re/go/core/api v0.2.0/go.mod h1:AtgNAx8lDY+qhVObFdNQOjSUQrHX1BeiDdMuA6RIfzo=
|
||||||
|
dappco.re/go/core/i18n v0.2.0 h1:NHzk6RCU93/qVRA3f2jvMr9P1R6FYheR/sHL+TnvKbI=
|
||||||
|
dappco.re/go/core/i18n v0.2.0/go.mod h1:9eSVJXr3OpIGWQvDynfhqcp27xnLMwlYLgsByU+p7ok=
|
||||||
|
dappco.re/go/core/io v0.2.0 h1:zuudgIiTsQQ5ipVt97saWdGLROovbEB/zdVyy9/l+I4=
|
||||||
|
dappco.re/go/core/io v0.2.0/go.mod h1:1QnQV6X9LNgFKfm8SkOtR9LLaj3bDcsOIeJOOyjbL5E=
|
||||||
|
dappco.re/go/core/log v0.1.0 h1:pa71Vq2TD2aoEUQWFKwNcaJ3GBY8HbaNGqtE688Unyc=
|
||||||
|
dappco.re/go/core/log v0.1.0/go.mod h1:Nkqb8gsXhZAO8VLpx7B8i1iAmohhzqA20b9Zr8VUcJs=
|
||||||
|
dappco.re/go/core/process v0.3.0 h1:BPF9R79+8ZWe34qCIy/sZy+P4HwbaO95js2oPJL7IqM=
|
||||||
|
dappco.re/go/core/process v0.3.0/go.mod h1:qwx8kt6x+J9gn7fu8lavuess72Ye9jPBODqDZQ9K0as=
|
||||||
|
dappco.re/go/core/ws v0.3.0 h1:ZxR8y5pfrWvnCHVN7qExXz7fdP5a063uNqyqE0Ab8pQ=
|
||||||
|
dappco.re/go/core/ws v0.3.0/go.mod h1:aLyXrJnbCOGL0SW9rC1EHAAIS83w3djO374gHIz4Nic=
|
||||||
|
forge.lthn.ai/core/cli v0.3.7 h1:1GrbaGg0wDGHr6+klSbbGyN/9sSbHvFbdySJznymhwg=
|
||||||
|
forge.lthn.ai/core/cli v0.3.7/go.mod h1:DBUppJkA9P45ZFGgI2B8VXw1rAZxamHoI/KG7fRvTNs=
|
||||||
forge.lthn.ai/core/go v0.3.2 h1:VB9pW6ggqBhe438cjfE2iSI5Lg+62MmRbaOFglZM+nQ=
|
forge.lthn.ai/core/go v0.3.2 h1:VB9pW6ggqBhe438cjfE2iSI5Lg+62MmRbaOFglZM+nQ=
|
||||||
forge.lthn.ai/core/go v0.3.2/go.mod h1:f7/zb3Labn4ARfwTq5Bi2AFHY+uxyPHozO+hLb54eFo=
|
forge.lthn.ai/core/go v0.3.2/go.mod h1:f7/zb3Labn4ARfwTq5Bi2AFHY+uxyPHozO+hLb54eFo=
|
||||||
forge.lthn.ai/core/go-i18n v0.1.7 h1:aHkAoc3W8fw3RPNvw/UszQbjyFWXHszzbZgty3SwyAA=
|
forge.lthn.ai/core/go-i18n v0.1.7 h1:aHkAoc3W8fw3RPNvw/UszQbjyFWXHszzbZgty3SwyAA=
|
||||||
forge.lthn.ai/core/go-i18n v0.1.7/go.mod h1:0VDjwtY99NSj2iqwrI09h5GUsJeM9s48MLkr+/Dn4G8=
|
forge.lthn.ai/core/go-i18n v0.1.7/go.mod h1:0VDjwtY99NSj2iqwrI09h5GUsJeM9s48MLkr+/Dn4G8=
|
||||||
|
forge.lthn.ai/core/go-inference v0.1.7 h1:9Dy6v03jX5ZRH3n5iTzlYyGtucuBIgSe+S7GWvBzx9Q=
|
||||||
|
forge.lthn.ai/core/go-inference v0.1.7/go.mod h1:jfWz+IJX55wAH98+ic6FEqqGB6/P31CHlg7VY7pxREw=
|
||||||
forge.lthn.ai/core/go-log v0.0.4 h1:KTuCEPgFmuM8KJfnyQ8vPOU1Jg654W74h8IJvfQMfv0=
|
forge.lthn.ai/core/go-log v0.0.4 h1:KTuCEPgFmuM8KJfnyQ8vPOU1Jg654W74h8IJvfQMfv0=
|
||||||
forge.lthn.ai/core/go-log v0.0.4/go.mod h1:r14MXKOD3LF/sI8XUJQhRk/SZHBE7jAFVuCfgkXoZPw=
|
forge.lthn.ai/core/go-log v0.0.4/go.mod h1:r14MXKOD3LF/sI8XUJQhRk/SZHBE7jAFVuCfgkXoZPw=
|
||||||
github.com/99designs/gqlgen v0.17.88 h1:neMQDgehMwT1vYIOx/w5ZYPUU/iMNAJzRO44I5Intoc=
|
github.com/99designs/gqlgen v0.17.88 h1:neMQDgehMwT1vYIOx/w5ZYPUU/iMNAJzRO44I5Intoc=
|
||||||
|
|
|
||||||
422
internal/ax/ax.go
Normal file
422
internal/ax/ax.go
Normal file
|
|
@ -0,0 +1,422 @@
|
||||||
|
package ax
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"io/fs"
|
||||||
|
"runtime"
|
||||||
|
"syscall"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
coreio "dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
process "dappco.re/go/core/process"
|
||||||
|
processexec "dappco.re/go/core/process/exec"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DS returns the current platform directory separator.
|
||||||
|
//
|
||||||
|
// Usage example: read ax.DS() when building Core-aware filesystem paths.
|
||||||
|
func DS() string {
|
||||||
|
if sep := core.Env("DS"); sep != "" {
|
||||||
|
return sep
|
||||||
|
}
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return "\\"
|
||||||
|
}
|
||||||
|
return "/"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean normalises a filesystem path using Core path primitives.
|
||||||
|
//
|
||||||
|
// Usage example: clean := ax.Clean("./dist/../dist/output")
|
||||||
|
func Clean(path string) string {
|
||||||
|
return core.CleanPath(path, DS())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Join combines path segments without relying on path/filepath.
|
||||||
|
//
|
||||||
|
// Usage example: path := ax.Join(projectDir, ".core", "build.yaml")
|
||||||
|
func Join(parts ...string) string {
|
||||||
|
return Clean(core.Join(DS(), parts...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Abs resolves a path against the current working directory.
|
||||||
|
//
|
||||||
|
// Usage example: abs, err := ax.Abs("./testdata")
|
||||||
|
func Abs(path string) (string, error) {
|
||||||
|
if core.PathIsAbs(path) {
|
||||||
|
return Clean(path), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
cwd, err := Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return Join(cwd, path), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rel returns target relative to base when target is inside base.
|
||||||
|
//
|
||||||
|
// Usage example: rel, err := ax.Rel(projectDir, artifactPath)
|
||||||
|
func Rel(base, target string) (string, error) {
|
||||||
|
base = Clean(base)
|
||||||
|
target = Clean(target)
|
||||||
|
|
||||||
|
if base == target {
|
||||||
|
return ".", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
prefix := base
|
||||||
|
if !core.HasSuffix(prefix, DS()) {
|
||||||
|
prefix = core.Concat(prefix, DS())
|
||||||
|
}
|
||||||
|
|
||||||
|
if core.HasPrefix(target, prefix) {
|
||||||
|
return core.TrimPrefix(target, prefix), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", coreerr.E("ax.Rel", "path is outside base: "+target, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base returns the last path element.
|
||||||
|
//
|
||||||
|
// Usage example: name := ax.Base("/tmp/dist/app.tar.gz")
|
||||||
|
func Base(path string) string {
|
||||||
|
return core.PathBase(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dir returns the parent directory for a path.
|
||||||
|
//
|
||||||
|
// Usage example: dir := ax.Dir("/tmp/dist/app.tar.gz")
|
||||||
|
func Dir(path string) string {
|
||||||
|
return core.PathDir(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ext returns the filename extension including the dot.
|
||||||
|
//
|
||||||
|
// Usage example: ext := ax.Ext("app.tar.gz")
|
||||||
|
func Ext(path string) string {
|
||||||
|
return core.PathExt(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsAbs reports whether a path is absolute.
|
||||||
|
//
|
||||||
|
// Usage example: if ax.IsAbs(outputDir) { ... }
|
||||||
|
func IsAbs(path string) bool {
|
||||||
|
return core.PathIsAbs(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FromSlash rewrites slash-separated paths for the current platform.
|
||||||
|
//
|
||||||
|
// Usage example: path := ax.FromSlash("ui/dist/index.html")
|
||||||
|
func FromSlash(path string) string {
|
||||||
|
if DS() == "/" {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
return core.Replace(path, "/", DS())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Getwd returns the current working directory from Core environment metadata.
|
||||||
|
//
|
||||||
|
// Usage example: cwd, err := ax.Getwd()
|
||||||
|
func Getwd() (string, error) {
|
||||||
|
cwd := core.Env("DIR_CWD")
|
||||||
|
if cwd == "" {
|
||||||
|
wd, err := syscall.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("ax.Getwd", "failed to get current working directory", err)
|
||||||
|
}
|
||||||
|
return wd, nil
|
||||||
|
}
|
||||||
|
return cwd, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TempDir creates a temporary directory via Core's filesystem primitive.
|
||||||
|
//
|
||||||
|
// Usage example: dir, err := ax.TempDir("core-build-*")
|
||||||
|
func TempDir(prefix string) (string, error) {
|
||||||
|
dir := (&core.Fs{}).NewUnrestricted().TempDir(prefix)
|
||||||
|
if dir == "" {
|
||||||
|
return "", coreerr.E("ax.TempDir", "failed to create temporary directory", nil)
|
||||||
|
}
|
||||||
|
return dir, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadFile reads a file into bytes via io.Local.
|
||||||
|
//
|
||||||
|
// Usage example: data, err := ax.ReadFile("go.mod")
|
||||||
|
func ReadFile(path string) ([]byte, error) {
|
||||||
|
content, err := coreio.Local.Read(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, coreerr.E("ax.ReadFile", "failed to read file "+path, err)
|
||||||
|
}
|
||||||
|
return []byte(content), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteFile writes bytes via io.Local with an explicit mode.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.WriteFile("README.md", []byte("hi"), 0o644)
|
||||||
|
func WriteFile(path string, data []byte, mode fs.FileMode) error {
|
||||||
|
if err := coreio.Local.WriteMode(path, string(data), mode); err != nil {
|
||||||
|
return coreerr.E("ax.WriteFile", "failed to write file "+path, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteString writes text via io.Local with an explicit mode.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.WriteString("README.md", "hi", 0o644)
|
||||||
|
func WriteString(path, data string, mode fs.FileMode) error {
|
||||||
|
if err := coreio.Local.WriteMode(path, data, mode); err != nil {
|
||||||
|
return coreerr.E("ax.WriteString", "failed to write file "+path, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MkdirAll ensures a directory exists.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.MkdirAll("dist/linux_arm64", 0o755)
|
||||||
|
func MkdirAll(path string, _ fs.FileMode) error {
|
||||||
|
if err := coreio.Local.EnsureDir(path); err != nil {
|
||||||
|
return coreerr.E("ax.MkdirAll", "failed to create directory "+path, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mkdir ensures a directory exists.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.Mkdir(".core", 0o755)
|
||||||
|
func Mkdir(path string, mode fs.FileMode) error {
|
||||||
|
return MkdirAll(path, mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemoveAll removes a file or directory tree.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.RemoveAll("dist")
|
||||||
|
func RemoveAll(path string) error {
|
||||||
|
if err := coreio.Local.DeleteAll(path); err != nil {
|
||||||
|
return coreerr.E("ax.RemoveAll", "failed to remove path "+path, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stat returns file metadata from io.Local.
|
||||||
|
//
|
||||||
|
// Usage example: info, err := ax.Stat("go.mod")
|
||||||
|
func Stat(path string) (fs.FileInfo, error) {
|
||||||
|
info, err := coreio.Local.Stat(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, coreerr.E("ax.Stat", "failed to stat path "+path, err)
|
||||||
|
}
|
||||||
|
return info, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadDir lists directory entries via io.Local.
|
||||||
|
//
|
||||||
|
// Usage example: entries, err := ax.ReadDir("dist")
|
||||||
|
func ReadDir(path string) ([]fs.DirEntry, error) {
|
||||||
|
entries, err := coreio.Local.List(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, coreerr.E("ax.ReadDir", "failed to list directory "+path, err)
|
||||||
|
}
|
||||||
|
return entries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open opens a file for reading via io.Local.
|
||||||
|
//
|
||||||
|
// Usage example: file, err := ax.Open("README.md")
|
||||||
|
func Open(path string) (fs.File, error) {
|
||||||
|
file, err := coreio.Local.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, coreerr.E("ax.Open", "failed to open file "+path, err)
|
||||||
|
}
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create opens a file for writing via io.Local.
|
||||||
|
//
|
||||||
|
// Usage example: file, err := ax.Create("dist/output.txt")
|
||||||
|
func Create(path string) (io.WriteCloser, error) {
|
||||||
|
file, err := coreio.Local.Create(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, coreerr.E("ax.Create", "failed to create file "+path, err)
|
||||||
|
}
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exists reports whether a path exists.
|
||||||
|
//
|
||||||
|
// Usage example: if ax.Exists("dist") { ... }
|
||||||
|
func Exists(path string) bool {
|
||||||
|
return coreio.Local.Exists(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsFile reports whether a path is a regular file.
|
||||||
|
//
|
||||||
|
// Usage example: if ax.IsFile("go.mod") { ... }
|
||||||
|
func IsFile(path string) bool {
|
||||||
|
return coreio.Local.IsFile(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsDir reports whether a path is a directory.
|
||||||
|
//
|
||||||
|
// Usage example: if ax.IsDir(".core") { ... }
|
||||||
|
func IsDir(path string) bool {
|
||||||
|
return coreio.Local.IsDir(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chmod updates file permissions without importing os.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.Chmod("dist/app", 0o755)
|
||||||
|
func Chmod(path string, mode fs.FileMode) error {
|
||||||
|
if err := syscall.Chmod(path, uint32(mode)); err != nil {
|
||||||
|
return coreerr.E("ax.Chmod", "failed to change permissions on "+path, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Getuid returns the current process UID.
|
||||||
|
//
|
||||||
|
// Usage example: uid := ax.Getuid()
|
||||||
|
func Getuid() int {
|
||||||
|
return syscall.Getuid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Getgid returns the current process GID.
|
||||||
|
//
|
||||||
|
// Usage example: gid := ax.Getgid()
|
||||||
|
func Getgid() int {
|
||||||
|
return syscall.Getgid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Geteuid returns the effective UID.
|
||||||
|
//
|
||||||
|
// Usage example: if ax.Geteuid() == 0 { ... }
|
||||||
|
func Geteuid() int {
|
||||||
|
return syscall.Geteuid()
|
||||||
|
}
|
||||||
|
|
||||||
|
// JSONMarshal returns a JSON string using Core's JSON wrapper.
|
||||||
|
//
|
||||||
|
// Usage example: data, err := ax.JSONMarshal(cfg)
|
||||||
|
func JSONMarshal(value any) (string, error) {
|
||||||
|
result := core.JSONMarshal(value)
|
||||||
|
if !result.OK {
|
||||||
|
err, ok := result.Value.(error)
|
||||||
|
if !ok {
|
||||||
|
return "", coreerr.E("ax.JSONMarshal", "failed to marshal JSON", nil)
|
||||||
|
}
|
||||||
|
return "", coreerr.E("ax.JSONMarshal", "failed to marshal JSON", err)
|
||||||
|
}
|
||||||
|
encoded, ok := result.Value.([]byte)
|
||||||
|
if !ok {
|
||||||
|
return "", coreerr.E("ax.JSONMarshal", "failed to marshal JSON", nil)
|
||||||
|
}
|
||||||
|
return string(encoded), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// JSONUnmarshal decodes JSON into target using Core's JSON wrapper.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.JSONUnmarshal(data, &cfg)
|
||||||
|
func JSONUnmarshal(data []byte, target any) error {
|
||||||
|
result := core.JSONUnmarshal(data, target)
|
||||||
|
if !result.OK {
|
||||||
|
err, ok := result.Value.(error)
|
||||||
|
if !ok {
|
||||||
|
return coreerr.E("ax.JSONUnmarshal", "failed to unmarshal JSON", nil)
|
||||||
|
}
|
||||||
|
return coreerr.E("ax.JSONUnmarshal", "failed to unmarshal JSON", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LookPath resolves a program on PATH via the Core process package.
|
||||||
|
//
|
||||||
|
// Usage example: path, err := ax.LookPath("git")
|
||||||
|
func LookPath(name string) (string, error) {
|
||||||
|
program := process.Program{Name: name}
|
||||||
|
if err := program.Find(); err != nil {
|
||||||
|
return "", coreerr.E("ax.LookPath", "failed to locate command "+name, err)
|
||||||
|
}
|
||||||
|
return program.Path, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveCommand resolves a program from PATH or a list of fallback paths.
|
||||||
|
//
|
||||||
|
// Usage example: path, err := ax.ResolveCommand("task", "/opt/homebrew/bin/task")
|
||||||
|
func ResolveCommand(name string, fallbackPaths ...string) (string, error) {
|
||||||
|
path, err := LookPath(name)
|
||||||
|
if err == nil {
|
||||||
|
return path, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fallbackPath := range fallbackPaths {
|
||||||
|
if IsFile(fallbackPath) {
|
||||||
|
return fallbackPath, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", coreerr.E("ax.ResolveCommand", "failed to locate command "+name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run executes a command and returns trimmed combined output.
|
||||||
|
//
|
||||||
|
// Usage example: output, err := ax.Run(ctx, "git", "status", "--short")
|
||||||
|
func Run(ctx context.Context, command string, args ...string) (string, error) {
|
||||||
|
program := process.Program{Name: command}
|
||||||
|
return program.Run(ctx, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RunDir executes a command in the provided directory and returns combined output.
|
||||||
|
//
|
||||||
|
// Usage example: output, err := ax.RunDir(ctx, repoDir, "git", "log", "--oneline")
|
||||||
|
func RunDir(ctx context.Context, dir, command string, args ...string) (string, error) {
|
||||||
|
program := process.Program{Name: command}
|
||||||
|
return program.RunDir(ctx, dir, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes a command without capturing output.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.Exec(ctx, "go", "test", "./...")
|
||||||
|
func Exec(ctx context.Context, command string, args ...string) error {
|
||||||
|
return processexec.Command(ctx, command, args...).Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecDir executes a command in a specific directory without capturing output.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.ExecDir(ctx, repoDir, "go", "test", "./...")
|
||||||
|
func ExecDir(ctx context.Context, dir, command string, args ...string) error {
|
||||||
|
return processexec.Command(ctx, command, args...).WithDir(dir).Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecWithEnv executes a command with additional environment variables.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.ExecWithEnv(ctx, repoDir, []string{"GOOS=linux"}, "go", "build")
|
||||||
|
func ExecWithEnv(ctx context.Context, dir string, env []string, command string, args ...string) error {
|
||||||
|
return processexec.Command(ctx, command, args...).WithDir(dir).WithEnv(env).Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecWithWriters executes a command and streams output to the provided writers.
|
||||||
|
//
|
||||||
|
// Usage example: err := ax.ExecWithWriters(ctx, repoDir, nil, w, w, "docker", "build", ".")
|
||||||
|
func ExecWithWriters(ctx context.Context, dir string, env []string, stdout, stderr io.Writer, command string, args ...string) error {
|
||||||
|
cmd := processexec.Command(ctx, command, args...).WithDir(dir).WithEnv(env)
|
||||||
|
if stdout != nil {
|
||||||
|
cmd = cmd.WithStdout(stdout)
|
||||||
|
}
|
||||||
|
if stderr != nil {
|
||||||
|
cmd = cmd.WithStderr(stderr)
|
||||||
|
}
|
||||||
|
return cmd.Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// CombinedOutput executes a command and returns combined output.
|
||||||
|
//
|
||||||
|
// Usage example: output, err := ax.CombinedOutput(ctx, repoDir, nil, "go", "test", "./...")
|
||||||
|
func CombinedOutput(ctx context.Context, dir string, env []string, command string, args ...string) (string, error) {
|
||||||
|
cmd := processexec.Command(ctx, command, args...).WithDir(dir).WithEnv(env)
|
||||||
|
output, err := cmd.CombinedOutput()
|
||||||
|
return core.Trim(string(output)), err
|
||||||
|
}
|
||||||
44
internal/projectdetect/projectdetect.go
Normal file
44
internal/projectdetect/projectdetect.go
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
package projectdetect
|
||||||
|
|
||||||
|
import (
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/build/pkg/build/builders"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
)
|
||||||
|
|
||||||
|
type detector struct {
|
||||||
|
projectType build.ProjectType
|
||||||
|
builder build.Builder
|
||||||
|
}
|
||||||
|
|
||||||
|
var fallbackDetectors = []detector{
|
||||||
|
{projectType: build.ProjectTypeDocker, builder: builders.NewDockerBuilder()},
|
||||||
|
{projectType: build.ProjectTypeLinuxKit, builder: builders.NewLinuxKitBuilder()},
|
||||||
|
{projectType: build.ProjectTypeCPP, builder: builders.NewCPPBuilder()},
|
||||||
|
{projectType: build.ProjectTypeTaskfile, builder: builders.NewTaskfileBuilder()},
|
||||||
|
}
|
||||||
|
|
||||||
|
// DetectProjectType returns the first buildable project type in detection order.
|
||||||
|
//
|
||||||
|
// projectType, err := projectdetect.DetectProjectType(io.Local, ".")
|
||||||
|
func DetectProjectType(fs io.Medium, dir string) (build.ProjectType, error) {
|
||||||
|
projectType, err := build.PrimaryType(fs, dir)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if projectType != "" {
|
||||||
|
return projectType, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fallback := range fallbackDetectors {
|
||||||
|
detected, err := fallback.builder.Detect(fs, dir)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if detected {
|
||||||
|
return fallback.projectType, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
93
internal/projectdetect/projectdetect_test.go
Normal file
93
internal/projectdetect/projectdetect_test.go
Normal file
|
|
@ -0,0 +1,93 @@
|
||||||
|
package projectdetect
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDetectProjectType_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("prefers core marker types over fallback builders", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module example"), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "Dockerfile"), []byte("FROM alpine"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := DetectProjectType(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeGo, projectType)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Go workspaces", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "go.work"), []byte("go 1.22\nuse ./app"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := DetectProjectType(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeGo, projectType)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Docker projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "Dockerfile"), []byte("FROM alpine"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := DetectProjectType(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeDocker, projectType)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects LinuxKit projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
linuxkitDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
|
require.NoError(t, ax.MkdirAll(linuxkitDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(linuxkitDir, "server.yml"), []byte("kernel:\n image: test"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := DetectProjectType(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeLinuxKit, projectType)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects C++ projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "CMakeLists.txt"), []byte("cmake_minimum_required(VERSION 3.16)"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := DetectProjectType(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeCPP, projectType)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Taskfile projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "Taskfile.yml"), []byte("version: '3'"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := DetectProjectType(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeTaskfile, projectType)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects nested Node.js projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
nested := ax.Join(dir, "apps", "web")
|
||||||
|
require.NoError(t, ax.MkdirAll(nested, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(nested, "package.json"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := DetectProjectType(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeNode, projectType)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDetectProjectType_Bad(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("returns empty type for empty directory", func(t *testing.T) {
|
||||||
|
projectType, err := DetectProjectType(fs, t.TempDir())
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, projectType)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
@ -7,6 +7,8 @@ import (
|
||||||
"dappco.re/go/core/i18n"
|
"dappco.re/go/core/i18n"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Usage example: use locales.FS from package consumers as needed.
|
||||||
|
//
|
||||||
//go:embed *.json
|
//go:embed *.json
|
||||||
var FS embed.FS
|
var FS embed.FS
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@
|
||||||
},
|
},
|
||||||
"flag": {
|
"flag": {
|
||||||
"archive": "Create release archives",
|
"archive": "Create release archives",
|
||||||
|
"archive_format": "Override the archive format (gz, xz, or zip)",
|
||||||
"checksum": "Generate checksums for artefacts",
|
"checksum": "Generate checksums for artefacts",
|
||||||
"ci": "Run in CI mode (all targets, JSON output)",
|
"ci": "Run in CI mode (all targets, JSON output)",
|
||||||
"config": "Path to build configuration file",
|
"config": "Path to build configuration file",
|
||||||
|
|
@ -110,6 +111,17 @@
|
||||||
"release": "Release"
|
"release": "Release"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"workflow": {
|
||||||
|
"short": "Generate the release workflow",
|
||||||
|
"long": "Write the embedded GitHub Actions release workflow into .github/workflows/release.yml, or pass --path/--workflowPath/--workflow-path/--workflow_path/--outputPath/--output-path/--output_path/--output/--workflowOutputPath/--workflow-output/--workflow_output/--workflow-output-path/--workflow_output_path for a custom location.",
|
||||||
|
"flag": {
|
||||||
|
"path": "Preferred workflow path input.",
|
||||||
|
"workflow_path": "Predictable workflow path alias.",
|
||||||
|
"output_path": "Preferred explicit workflow output path.",
|
||||||
|
"workflow_output_path": "Predictable workflow output path alias.",
|
||||||
|
"output": "Legacy alias for --output-path."
|
||||||
|
}
|
||||||
|
},
|
||||||
"sdk": {
|
"sdk": {
|
||||||
"short": "Generate SDK client libraries",
|
"short": "Generate SDK client libraries",
|
||||||
"long": "Generate typed SDK client libraries from an OpenAPI spec. Supports multiple languages and versioned output.",
|
"long": "Generate typed SDK client libraries from an OpenAPI spec. Supports multiple languages and versioned output.",
|
||||||
|
|
|
||||||
|
|
@ -7,5 +7,7 @@ import "embed"
|
||||||
// Assets holds the built UI bundle (core-build.js and related files).
|
// Assets holds the built UI bundle (core-build.js and related files).
|
||||||
// The directory is populated by running `npm run build` in the ui/ directory.
|
// The directory is populated by running `npm run build` in the ui/ directory.
|
||||||
//
|
//
|
||||||
|
// Usage example: use api.Assets from package consumers as needed.
|
||||||
|
//
|
||||||
//go:embed all:ui/dist
|
//go:embed all:ui/dist
|
||||||
var Assets embed.FS
|
var Assets embed.FS
|
||||||
|
|
|
||||||
|
|
@ -6,17 +6,21 @@
|
||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
|
stdio "io"
|
||||||
|
"io/fs"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
"dappco.re/go/core/api"
|
"dappco.re/go/core/api"
|
||||||
"dappco.re/go/core/api/pkg/provider"
|
"dappco.re/go/core/api/pkg/provider"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/internal/projectdetect"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/build/pkg/build/builders"
|
"dappco.re/go/core/build/pkg/build/builders"
|
||||||
"dappco.re/go/core/build/pkg/release"
|
"dappco.re/go/core/build/pkg/release"
|
||||||
"dappco.re/go/core/build/pkg/sdk"
|
"dappco.re/go/core/build/pkg/sdk"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
"dappco.re/go/core/ws"
|
"dappco.re/go/core/ws"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
)
|
)
|
||||||
|
|
@ -24,6 +28,8 @@ import (
|
||||||
// BuildProvider wraps go-build's build, release, and SDK operations as a
|
// BuildProvider wraps go-build's build, release, and SDK operations as a
|
||||||
// service provider. It implements Provider, Streamable, Describable, and
|
// service provider. It implements Provider, Streamable, Describable, and
|
||||||
// Renderable.
|
// Renderable.
|
||||||
|
//
|
||||||
|
// p := api.NewProvider(".", hub)
|
||||||
type BuildProvider struct {
|
type BuildProvider struct {
|
||||||
hub *ws.Hub
|
hub *ws.Hub
|
||||||
projectDir string
|
projectDir string
|
||||||
|
|
@ -41,6 +47,8 @@ var (
|
||||||
// NewProvider creates a BuildProvider for the given project directory.
|
// NewProvider creates a BuildProvider for the given project directory.
|
||||||
// If projectDir is empty, the current working directory is used.
|
// If projectDir is empty, the current working directory is used.
|
||||||
// The WS hub is used to emit real-time build events; pass nil if not available.
|
// The WS hub is used to emit real-time build events; pass nil if not available.
|
||||||
|
//
|
||||||
|
// p := api.NewProvider(".", hub)
|
||||||
func NewProvider(projectDir string, hub *ws.Hub) *BuildProvider {
|
func NewProvider(projectDir string, hub *ws.Hub) *BuildProvider {
|
||||||
if projectDir == "" {
|
if projectDir == "" {
|
||||||
projectDir = "."
|
projectDir = "."
|
||||||
|
|
@ -53,12 +61,18 @@ func NewProvider(projectDir string, hub *ws.Hub) *BuildProvider {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name implements api.RouteGroup.
|
// Name implements api.RouteGroup.
|
||||||
|
//
|
||||||
|
// name := p.Name() // → "build"
|
||||||
func (p *BuildProvider) Name() string { return "build" }
|
func (p *BuildProvider) Name() string { return "build" }
|
||||||
|
|
||||||
// BasePath implements api.RouteGroup.
|
// BasePath implements api.RouteGroup.
|
||||||
|
//
|
||||||
|
// path := p.BasePath() // → "/api/v1/build"
|
||||||
func (p *BuildProvider) BasePath() string { return "/api/v1/build" }
|
func (p *BuildProvider) BasePath() string { return "/api/v1/build" }
|
||||||
|
|
||||||
// Element implements provider.Renderable.
|
// Element implements provider.Renderable.
|
||||||
|
//
|
||||||
|
// spec := p.Element() // → {Tag: "core-build-panel", Source: "/assets/core-build.js"}
|
||||||
func (p *BuildProvider) Element() provider.ElementSpec {
|
func (p *BuildProvider) Element() provider.ElementSpec {
|
||||||
return provider.ElementSpec{
|
return provider.ElementSpec{
|
||||||
Tag: "core-build-panel",
|
Tag: "core-build-panel",
|
||||||
|
|
@ -67,6 +81,8 @@ func (p *BuildProvider) Element() provider.ElementSpec {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Channels implements provider.Streamable.
|
// Channels implements provider.Streamable.
|
||||||
|
//
|
||||||
|
// channels := p.Channels() // → ["build.started", "build.complete", ...]
|
||||||
func (p *BuildProvider) Channels() []string {
|
func (p *BuildProvider) Channels() []string {
|
||||||
return []string{
|
return []string{
|
||||||
"build.started",
|
"build.started",
|
||||||
|
|
@ -74,11 +90,14 @@ func (p *BuildProvider) Channels() []string {
|
||||||
"build.failed",
|
"build.failed",
|
||||||
"release.started",
|
"release.started",
|
||||||
"release.complete",
|
"release.complete",
|
||||||
|
"workflow.generated",
|
||||||
"sdk.generated",
|
"sdk.generated",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// RegisterRoutes implements api.RouteGroup.
|
// RegisterRoutes implements api.RouteGroup.
|
||||||
|
//
|
||||||
|
// p.RegisterRoutes(rg)
|
||||||
func (p *BuildProvider) RegisterRoutes(rg *gin.RouterGroup) {
|
func (p *BuildProvider) RegisterRoutes(rg *gin.RouterGroup) {
|
||||||
// Build
|
// Build
|
||||||
rg.GET("/config", p.getConfig)
|
rg.GET("/config", p.getConfig)
|
||||||
|
|
@ -90,6 +109,7 @@ func (p *BuildProvider) RegisterRoutes(rg *gin.RouterGroup) {
|
||||||
rg.GET("/release/version", p.getVersion)
|
rg.GET("/release/version", p.getVersion)
|
||||||
rg.GET("/release/changelog", p.getChangelog)
|
rg.GET("/release/changelog", p.getChangelog)
|
||||||
rg.POST("/release", p.triggerRelease)
|
rg.POST("/release", p.triggerRelease)
|
||||||
|
rg.POST("/release/workflow", p.generateReleaseWorkflow)
|
||||||
|
|
||||||
// SDK
|
// SDK
|
||||||
rg.GET("/sdk/diff", p.getSdkDiff)
|
rg.GET("/sdk/diff", p.getSdkDiff)
|
||||||
|
|
@ -97,6 +117,8 @@ func (p *BuildProvider) RegisterRoutes(rg *gin.RouterGroup) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Describe implements api.DescribableGroup.
|
// Describe implements api.DescribableGroup.
|
||||||
|
//
|
||||||
|
// routes := p.Describe() // → [{Method: "GET", Path: "/config", ...}, ...]
|
||||||
func (p *BuildProvider) Describe() []api.RouteDescription {
|
func (p *BuildProvider) Describe() []api.RouteDescription {
|
||||||
return []api.RouteDescription{
|
return []api.RouteDescription{
|
||||||
{
|
{
|
||||||
|
|
@ -110,7 +132,7 @@ func (p *BuildProvider) Describe() []api.RouteDescription {
|
||||||
Method: "GET",
|
Method: "GET",
|
||||||
Path: "/discover",
|
Path: "/discover",
|
||||||
Summary: "Detect project type",
|
Summary: "Detect project type",
|
||||||
Description: "Scans the project directory for marker files and returns detected project types.",
|
Description: "Scans the project directory for marker files and returns detected project types plus frontend and distro metadata.",
|
||||||
Tags: []string{"build", "discovery"},
|
Tags: []string{"build", "discovery"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
@ -148,6 +170,70 @@ func (p *BuildProvider) Describe() []api.RouteDescription {
|
||||||
Description: "Publishes pre-built artifacts from dist/ to configured targets.",
|
Description: "Publishes pre-built artifacts from dist/ to configured targets.",
|
||||||
Tags: []string{"release"},
|
Tags: []string{"release"},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Method: "POST",
|
||||||
|
Path: "/release/workflow",
|
||||||
|
Summary: "Generate release workflow",
|
||||||
|
Description: "Writes the embedded GitHub Actions release workflow into .github/workflows/release.yml or a custom path.",
|
||||||
|
Tags: []string{"release", "workflow"},
|
||||||
|
RequestBody: map[string]any{
|
||||||
|
"type": "object",
|
||||||
|
"properties": map[string]any{
|
||||||
|
"path": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Preferred workflow path input, relative to the project directory or absolute.",
|
||||||
|
},
|
||||||
|
"workflowPath": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Predictable alias for path, relative to the project directory or absolute.",
|
||||||
|
},
|
||||||
|
"workflow_path": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Snake_case alias for workflowPath.",
|
||||||
|
},
|
||||||
|
"workflow-path": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Hyphenated alias for workflowPath.",
|
||||||
|
},
|
||||||
|
"workflowOutputPath": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Predictable alias for outputPath, relative to the project directory or absolute.",
|
||||||
|
},
|
||||||
|
"workflow_output": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Snake_case alias for workflowOutputPath.",
|
||||||
|
},
|
||||||
|
"workflow-output": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Hyphenated alias for workflowOutputPath.",
|
||||||
|
},
|
||||||
|
"workflow_output_path": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Snake_case alias for workflowOutputPath.",
|
||||||
|
},
|
||||||
|
"workflow-output-path": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Hyphenated alias for workflowOutputPath.",
|
||||||
|
},
|
||||||
|
"outputPath": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Preferred explicit workflow output path, relative to the project directory or absolute.",
|
||||||
|
},
|
||||||
|
"output-path": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Hyphenated alias for outputPath.",
|
||||||
|
},
|
||||||
|
"output_path": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Snake_case alias for outputPath.",
|
||||||
|
},
|
||||||
|
"output": map[string]any{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Legacy alias for outputPath.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Method: "GET",
|
Method: "GET",
|
||||||
Path: "/sdk/diff",
|
Path: "/sdk/diff",
|
||||||
|
|
@ -180,7 +266,7 @@ func (p *BuildProvider) Describe() []api.RouteDescription {
|
||||||
|
|
||||||
// resolveDir returns the absolute project directory.
|
// resolveDir returns the absolute project directory.
|
||||||
func (p *BuildProvider) resolveDir() (string, error) {
|
func (p *BuildProvider) resolveDir() (string, error) {
|
||||||
return filepath.Abs(p.projectDir)
|
return ax.Abs(p.projectDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
// -- Build Handlers -----------------------------------------------------------
|
// -- Build Handlers -----------------------------------------------------------
|
||||||
|
|
@ -214,27 +300,32 @@ func (p *BuildProvider) discoverProject(c *gin.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
types, err := build.Discover(p.medium, dir)
|
discovery, err := build.DiscoverFull(p.medium, dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.JSON(http.StatusInternalServerError, api.Fail("discover_failed", err.Error()))
|
c.JSON(http.StatusInternalServerError, api.Fail("discover_failed", err.Error()))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert to string slice for JSON
|
// Convert to string slice for JSON
|
||||||
typeStrings := make([]string, len(types))
|
typeStrings := make([]string, len(discovery.Types))
|
||||||
for i, t := range types {
|
for i, t := range discovery.Types {
|
||||||
typeStrings[i] = string(t)
|
typeStrings[i] = string(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
primary := ""
|
primary := ""
|
||||||
if len(types) > 0 {
|
if len(discovery.Types) > 0 {
|
||||||
primary = string(types[0])
|
primary = string(discovery.Types[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, api.OK(map[string]any{
|
c.JSON(http.StatusOK, api.OK(map[string]any{
|
||||||
"types": typeStrings,
|
"types": typeStrings,
|
||||||
"primary": primary,
|
"primary": primary,
|
||||||
"dir": dir,
|
"primary_stack": discovery.PrimaryStack,
|
||||||
|
"dir": dir,
|
||||||
|
"has_frontend": discovery.HasFrontend,
|
||||||
|
"has_subtree_npm": discovery.HasSubtreeNpm,
|
||||||
|
"markers": discovery.Markers,
|
||||||
|
"distro": discovery.Distro,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -252,8 +343,14 @@ func (p *BuildProvider) triggerBuild(c *gin.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect project type
|
discovery, err := build.DiscoverFull(p.medium, dir)
|
||||||
projectType, err := build.PrimaryType(p.medium, dir)
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, api.Fail("discover_failed", err.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect project type, honouring an explicit build.type override.
|
||||||
|
projectType, err := resolveProjectType(p.medium, dir, cfg.Build.Type)
|
||||||
if err != nil || projectType == "" {
|
if err != nil || projectType == "" {
|
||||||
c.JSON(http.StatusBadRequest, api.Fail("no_project", "no buildable project detected"))
|
c.JSON(http.StatusBadRequest, api.Fail("no_project", "no buildable project detected"))
|
||||||
return
|
return
|
||||||
|
|
@ -267,7 +364,7 @@ func (p *BuildProvider) triggerBuild(c *gin.Context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine version
|
// Determine version
|
||||||
version, verr := release.DetermineVersion(dir)
|
version, verr := release.DetermineVersionWithContext(c.Request.Context(), dir)
|
||||||
if verr != nil {
|
if verr != nil {
|
||||||
version = "dev"
|
version = "dev"
|
||||||
}
|
}
|
||||||
|
|
@ -278,10 +375,10 @@ func (p *BuildProvider) triggerBuild(c *gin.Context) {
|
||||||
binaryName = cfg.Project.Name
|
binaryName = cfg.Project.Name
|
||||||
}
|
}
|
||||||
if binaryName == "" {
|
if binaryName == "" {
|
||||||
binaryName = filepath.Base(dir)
|
binaryName = ax.Base(dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
outputDir := filepath.Join(dir, "dist")
|
outputDir := ax.Join(dir, "dist")
|
||||||
|
|
||||||
buildConfig := &build.Config{
|
buildConfig := &build.Config{
|
||||||
FS: p.medium,
|
FS: p.medium,
|
||||||
|
|
@ -292,6 +389,7 @@ func (p *BuildProvider) triggerBuild(c *gin.Context) {
|
||||||
LDFlags: cfg.Build.LDFlags,
|
LDFlags: cfg.Build.LDFlags,
|
||||||
CGO: cfg.Build.CGO,
|
CGO: cfg.Build.CGO,
|
||||||
}
|
}
|
||||||
|
build.ApplyOptions(buildConfig, build.ComputeOptions(cfg, discovery))
|
||||||
|
|
||||||
targets := cfg.ToTargets()
|
targets := cfg.ToTargets()
|
||||||
|
|
||||||
|
|
@ -334,6 +432,15 @@ func (p *BuildProvider) triggerBuild(c *gin.Context) {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// resolveProjectType returns the configured build type when present, otherwise it falls back to detection.
|
||||||
|
func resolveProjectType(filesystem io.Medium, projectDir, buildType string) (build.ProjectType, error) {
|
||||||
|
if buildType != "" {
|
||||||
|
return build.ProjectType(buildType), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return projectdetect.DetectProjectType(filesystem, projectDir)
|
||||||
|
}
|
||||||
|
|
||||||
// artifactInfo holds JSON-friendly metadata about a dist/ file.
|
// artifactInfo holds JSON-friendly metadata about a dist/ file.
|
||||||
type artifactInfo struct {
|
type artifactInfo struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
|
|
@ -348,7 +455,7 @@ func (p *BuildProvider) listArtifacts(c *gin.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
distDir := filepath.Join(dir, "dist")
|
distDir := ax.Join(dir, "dist")
|
||||||
if !p.medium.IsDir(distDir) {
|
if !p.medium.IsDir(distDir) {
|
||||||
c.JSON(http.StatusOK, api.OK(map[string]any{
|
c.JSON(http.StatusOK, api.OK(map[string]any{
|
||||||
"artifacts": []artifactInfo{},
|
"artifacts": []artifactInfo{},
|
||||||
|
|
@ -374,7 +481,7 @@ func (p *BuildProvider) listArtifacts(c *gin.Context) {
|
||||||
}
|
}
|
||||||
artifacts = append(artifacts, artifactInfo{
|
artifacts = append(artifacts, artifactInfo{
|
||||||
Name: entry.Name(),
|
Name: entry.Name(),
|
||||||
Path: filepath.Join(distDir, entry.Name()),
|
Path: ax.Join(distDir, entry.Name()),
|
||||||
Size: info.Size(),
|
Size: info.Size(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
@ -398,7 +505,7 @@ func (p *BuildProvider) getVersion(c *gin.Context) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
version, err := release.DetermineVersion(dir)
|
version, err := release.DetermineVersionWithContext(c.Request.Context(), dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.JSON(http.StatusInternalServerError, api.Fail("version_failed", err.Error()))
|
c.JSON(http.StatusInternalServerError, api.Fail("version_failed", err.Error()))
|
||||||
return
|
return
|
||||||
|
|
@ -420,7 +527,7 @@ func (p *BuildProvider) getChangelog(c *gin.Context) {
|
||||||
fromRef := c.Query("from")
|
fromRef := c.Query("from")
|
||||||
toRef := c.Query("to")
|
toRef := c.Query("to")
|
||||||
|
|
||||||
changelog, err := release.Generate(dir, fromRef, toRef)
|
changelog, err := release.GenerateWithContext(c.Request.Context(), dir, fromRef, toRef)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.JSON(http.StatusInternalServerError, api.Fail("changelog_failed", err.Error()))
|
c.JSON(http.StatusInternalServerError, api.Fail("changelog_failed", err.Error()))
|
||||||
return
|
return
|
||||||
|
|
@ -471,6 +578,127 @@ func (p *BuildProvider) triggerRelease(c *gin.Context) {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ReleaseWorkflowRequest captures the workflow-generation inputs exposed by the API.
|
||||||
|
//
|
||||||
|
// request := ReleaseWorkflowRequest{Path: "ci/release.yml"} // writes ./ci/release.yml
|
||||||
|
// request := ReleaseWorkflowRequest{WorkflowOutputPath: "ops/release.yml"} // writes ./ops/release.yml
|
||||||
|
type ReleaseWorkflowRequest struct {
|
||||||
|
Path string `json:"path"`
|
||||||
|
WorkflowPath string `json:"workflowPath"`
|
||||||
|
WorkflowPathSnake string `json:"workflow_path"`
|
||||||
|
WorkflowPathHyphen string `json:"workflow-path"`
|
||||||
|
OutputPath string `json:"outputPath"`
|
||||||
|
OutputPathHyphen string `json:"output-path"`
|
||||||
|
OutputPathSnake string `json:"output_path"`
|
||||||
|
LegacyOutputPath string `json:"output"`
|
||||||
|
WorkflowOutputPath string `json:"workflowOutputPath"`
|
||||||
|
WorkflowOutputSnake string `json:"workflow_output"`
|
||||||
|
WorkflowOutputHyphen string `json:"workflow-output"`
|
||||||
|
WorkflowOutputPathSnake string `json:"workflow_output_path"`
|
||||||
|
WorkflowOutputPathHyphen string `json:"workflow-output-path"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveWorkflowTargetPath merges the workflow path and workflow output aliases into one final target path.
|
||||||
|
//
|
||||||
|
// request := ReleaseWorkflowRequest{Path: "ci/release.yml"}
|
||||||
|
// path, err := request.resolveWorkflowTargetPath("/tmp/project", io.Local)
|
||||||
|
func (r ReleaseWorkflowRequest) resolveWorkflowTargetPath(dir string, medium io.Medium) (string, error) {
|
||||||
|
outputPath, err := r.resolveOutputPath(dir, medium)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
workflowPath, err := r.resolveWorkflowPath(dir, medium)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return build.ResolveReleaseWorkflowInputPathWithMedium(medium, dir, workflowPath, outputPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveWorkflowPath("ci/release.yml") and resolveWorkflowPath("workflow-path") both resolve to the same file path.
|
||||||
|
//
|
||||||
|
// request := ReleaseWorkflowRequest{WorkflowPath: "ci/release.yml"}
|
||||||
|
// workflowPath, err := request.resolveWorkflowPath("/tmp/project", io.Local)
|
||||||
|
func (r ReleaseWorkflowRequest) resolveWorkflowPath(dir string, medium io.Medium) (string, error) {
|
||||||
|
workflowPath, err := build.ResolveReleaseWorkflowInputPathAliases(
|
||||||
|
medium,
|
||||||
|
dir,
|
||||||
|
r.Path,
|
||||||
|
r.WorkflowPath,
|
||||||
|
r.WorkflowPathSnake,
|
||||||
|
r.WorkflowPathHyphen,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("api.ReleaseWorkflowRequest", "workflow path aliases specify different locations", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
return workflowPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveOutputPath("ci/release.yml") and resolveOutputPath("workflow-output-path") both resolve to the same file path.
|
||||||
|
//
|
||||||
|
// request := ReleaseWorkflowRequest{WorkflowOutputPath: "ci/release.yml"}
|
||||||
|
// outputPath, err := request.resolveOutputPath("/tmp/project")
|
||||||
|
func (r ReleaseWorkflowRequest) resolveOutputPath(dir string, medium io.Medium) (string, error) {
|
||||||
|
resolvedOutputPath, err := build.ResolveReleaseWorkflowOutputPathAliasesInProjectWithMedium(
|
||||||
|
medium,
|
||||||
|
dir,
|
||||||
|
r.OutputPath,
|
||||||
|
r.OutputPathHyphen,
|
||||||
|
r.OutputPathSnake,
|
||||||
|
r.LegacyOutputPath,
|
||||||
|
r.WorkflowOutputPath,
|
||||||
|
r.WorkflowOutputSnake,
|
||||||
|
r.WorkflowOutputHyphen,
|
||||||
|
r.WorkflowOutputPathSnake,
|
||||||
|
r.WorkflowOutputPathHyphen,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("api.ReleaseWorkflowRequest", "workflow output aliases specify different locations", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedOutputPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *BuildProvider) generateReleaseWorkflow(c *gin.Context) {
|
||||||
|
dir, err := p.resolveDir()
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, api.Fail("resolve_failed", err.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var request ReleaseWorkflowRequest
|
||||||
|
if err := c.ShouldBindJSON(&request); err != nil {
|
||||||
|
// Empty bodies are valid; malformed JSON is not.
|
||||||
|
if !errors.Is(err, stdio.EOF) {
|
||||||
|
c.JSON(http.StatusBadRequest, api.Fail("invalid_request", err.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
workflowPath, err := request.resolveWorkflowTargetPath(dir, p.medium)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(http.StatusBadRequest, api.Fail("invalid_request", err.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := build.WriteReleaseWorkflow(p.medium, workflowPath); err != nil {
|
||||||
|
c.JSON(http.StatusInternalServerError, api.Fail("workflow_write_failed", err.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
p.emitEvent("workflow.generated", map[string]any{
|
||||||
|
"path": workflowPath,
|
||||||
|
"generated": true,
|
||||||
|
})
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, api.OK(map[string]any{
|
||||||
|
"generated": true,
|
||||||
|
"path": workflowPath,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
// -- SDK Handlers -------------------------------------------------------------
|
// -- SDK Handlers -------------------------------------------------------------
|
||||||
|
|
||||||
func (p *BuildProvider) getSdkDiff(c *gin.Context) {
|
func (p *BuildProvider) getSdkDiff(c *gin.Context) {
|
||||||
|
|
@ -529,6 +757,10 @@ func (p *BuildProvider) generateSdk(c *gin.Context) {
|
||||||
Enabled: relCfg.SDK.Diff.Enabled,
|
Enabled: relCfg.SDK.Diff.Enabled,
|
||||||
FailOnBreaking: relCfg.SDK.Diff.FailOnBreaking,
|
FailOnBreaking: relCfg.SDK.Diff.FailOnBreaking,
|
||||||
},
|
},
|
||||||
|
Publish: sdk.PublishConfig{
|
||||||
|
Repo: relCfg.SDK.Publish.Repo,
|
||||||
|
Path: relCfg.SDK.Publish.Path,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -565,8 +797,26 @@ func getBuilder(projectType build.ProjectType) (build.Builder, error) {
|
||||||
return builders.NewWailsBuilder(), nil
|
return builders.NewWailsBuilder(), nil
|
||||||
case build.ProjectTypeGo:
|
case build.ProjectTypeGo:
|
||||||
return builders.NewGoBuilder(), nil
|
return builders.NewGoBuilder(), nil
|
||||||
|
case build.ProjectTypeNode:
|
||||||
|
return builders.NewNodeBuilder(), nil
|
||||||
|
case build.ProjectTypePHP:
|
||||||
|
return builders.NewPHPBuilder(), nil
|
||||||
|
case build.ProjectTypePython:
|
||||||
|
return builders.NewPythonBuilder(), nil
|
||||||
|
case build.ProjectTypeRust:
|
||||||
|
return builders.NewRustBuilder(), nil
|
||||||
|
case build.ProjectTypeDocs:
|
||||||
|
return builders.NewDocsBuilder(), nil
|
||||||
|
case build.ProjectTypeCPP:
|
||||||
|
return builders.NewCPPBuilder(), nil
|
||||||
|
case build.ProjectTypeDocker:
|
||||||
|
return builders.NewDockerBuilder(), nil
|
||||||
|
case build.ProjectTypeLinuxKit:
|
||||||
|
return builders.NewLinuxKitBuilder(), nil
|
||||||
|
case build.ProjectTypeTaskfile:
|
||||||
|
return builders.NewTaskfileBuilder(), nil
|
||||||
default:
|
default:
|
||||||
return nil, os.ErrNotExist
|
return nil, fs.ErrNotExist
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,22 +3,30 @@
|
||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"bytes"
|
||||||
|
"io/fs"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
|
||||||
|
coreapi "dappco.re/go/core/api"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestBuildProvider_Good_Identity(t *testing.T) {
|
func TestProvider_BuildProviderIdentity_Good(t *testing.T) {
|
||||||
p := NewProvider(".", nil)
|
p := NewProvider(".", nil)
|
||||||
|
|
||||||
assert.Equal(t, "build", p.Name())
|
assert.Equal(t, "build", p.Name())
|
||||||
assert.Equal(t, "/api/v1/build", p.BasePath())
|
assert.Equal(t, "/api/v1/build", p.BasePath())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_Element(t *testing.T) {
|
func TestProvider_BuildProviderElement_Good(t *testing.T) {
|
||||||
p := NewProvider(".", nil)
|
p := NewProvider(".", nil)
|
||||||
el := p.Element()
|
el := p.Element()
|
||||||
|
|
||||||
|
|
@ -26,7 +34,7 @@ func TestBuildProvider_Good_Element(t *testing.T) {
|
||||||
assert.Equal(t, "/assets/core-build.js", el.Source)
|
assert.Equal(t, "/assets/core-build.js", el.Source)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_Channels(t *testing.T) {
|
func TestProvider_BuildProviderChannels_Good(t *testing.T) {
|
||||||
p := NewProvider(".", nil)
|
p := NewProvider(".", nil)
|
||||||
channels := p.Channels()
|
channels := p.Channels()
|
||||||
|
|
||||||
|
|
@ -35,16 +43,17 @@ func TestBuildProvider_Good_Channels(t *testing.T) {
|
||||||
assert.Contains(t, channels, "build.failed")
|
assert.Contains(t, channels, "build.failed")
|
||||||
assert.Contains(t, channels, "release.started")
|
assert.Contains(t, channels, "release.started")
|
||||||
assert.Contains(t, channels, "release.complete")
|
assert.Contains(t, channels, "release.complete")
|
||||||
|
assert.Contains(t, channels, "workflow.generated")
|
||||||
assert.Contains(t, channels, "sdk.generated")
|
assert.Contains(t, channels, "sdk.generated")
|
||||||
assert.Len(t, channels, 6)
|
assert.Len(t, channels, 7)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_Describe(t *testing.T) {
|
func TestProvider_BuildProviderDescribe_Good(t *testing.T) {
|
||||||
p := NewProvider(".", nil)
|
p := NewProvider(".", nil)
|
||||||
routes := p.Describe()
|
routes := p.Describe()
|
||||||
|
|
||||||
// Should have 9 endpoint descriptions
|
// Should have 10 endpoint descriptions
|
||||||
assert.Len(t, routes, 9)
|
assert.Len(t, routes, 10)
|
||||||
|
|
||||||
// Verify key routes exist
|
// Verify key routes exist
|
||||||
paths := make(map[string]string)
|
paths := make(map[string]string)
|
||||||
|
|
@ -59,49 +68,173 @@ func TestBuildProvider_Good_Describe(t *testing.T) {
|
||||||
assert.Equal(t, "GET", paths["/release/version"])
|
assert.Equal(t, "GET", paths["/release/version"])
|
||||||
assert.Equal(t, "GET", paths["/release/changelog"])
|
assert.Equal(t, "GET", paths["/release/changelog"])
|
||||||
assert.Equal(t, "POST", paths["/release"])
|
assert.Equal(t, "POST", paths["/release"])
|
||||||
|
assert.Equal(t, "POST", paths["/release/workflow"])
|
||||||
assert.Equal(t, "GET", paths["/sdk/diff"])
|
assert.Equal(t, "GET", paths["/sdk/diff"])
|
||||||
assert.Equal(t, "POST", paths["/sdk/generate"])
|
assert.Equal(t, "POST", paths["/sdk/generate"])
|
||||||
|
|
||||||
|
var workflowRoute *coreapi.RouteDescription
|
||||||
|
for i := range routes {
|
||||||
|
if routes[i].Path == "/release/workflow" {
|
||||||
|
workflowRoute = &routes[i]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NotNil(t, workflowRoute)
|
||||||
|
require.NotNil(t, workflowRoute.RequestBody)
|
||||||
|
|
||||||
|
properties, ok := workflowRoute.RequestBody["properties"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
|
||||||
|
pathSchema, ok := properties["path"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", pathSchema["type"])
|
||||||
|
assert.Equal(t, "Preferred workflow path input, relative to the project directory or absolute.", pathSchema["description"])
|
||||||
|
|
||||||
|
workflowPathSchema, ok := properties["workflowPath"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", workflowPathSchema["type"])
|
||||||
|
assert.Equal(t, "Predictable alias for path, relative to the project directory or absolute.", workflowPathSchema["description"])
|
||||||
|
|
||||||
|
workflowPathSnakeSchema, ok := properties["workflow_path"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", workflowPathSnakeSchema["type"])
|
||||||
|
assert.Equal(t, "Snake_case alias for workflowPath.", workflowPathSnakeSchema["description"])
|
||||||
|
|
||||||
|
workflowPathHyphenSchema, ok := properties["workflow-path"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", workflowPathHyphenSchema["type"])
|
||||||
|
assert.Equal(t, "Hyphenated alias for workflowPath.", workflowPathHyphenSchema["description"])
|
||||||
|
|
||||||
|
outputSchema, ok := properties["output"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", outputSchema["type"])
|
||||||
|
assert.Equal(t, "Legacy alias for outputPath.", outputSchema["description"])
|
||||||
|
|
||||||
|
outputPathSchema, ok := properties["outputPath"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", outputPathSchema["type"])
|
||||||
|
assert.Equal(t, "Preferred explicit workflow output path, relative to the project directory or absolute.", outputPathSchema["description"])
|
||||||
|
|
||||||
|
outputPathHyphenSchema, ok := properties["output-path"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", outputPathHyphenSchema["type"])
|
||||||
|
assert.Equal(t, "Hyphenated alias for outputPath.", outputPathHyphenSchema["description"])
|
||||||
|
|
||||||
|
workflowOutputPathSchema, ok := properties["workflowOutputPath"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", workflowOutputPathSchema["type"])
|
||||||
|
assert.Equal(t, "Predictable alias for outputPath, relative to the project directory or absolute.", workflowOutputPathSchema["description"])
|
||||||
|
|
||||||
|
workflowOutputSnakeSchema, ok := properties["workflow_output"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", workflowOutputSnakeSchema["type"])
|
||||||
|
assert.Equal(t, "Snake_case alias for workflowOutputPath.", workflowOutputSnakeSchema["description"])
|
||||||
|
|
||||||
|
workflowOutputHyphenSchema, ok := properties["workflow-output"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", workflowOutputHyphenSchema["type"])
|
||||||
|
assert.Equal(t, "Hyphenated alias for workflowOutputPath.", workflowOutputHyphenSchema["description"])
|
||||||
|
|
||||||
|
workflowOutputPathSnakeSchema, ok := properties["workflow_output_path"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", workflowOutputPathSnakeSchema["type"])
|
||||||
|
assert.Equal(t, "Snake_case alias for workflowOutputPath.", workflowOutputPathSnakeSchema["description"])
|
||||||
|
|
||||||
|
workflowOutputPathHyphenSchema, ok := properties["workflow-output-path"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", workflowOutputPathHyphenSchema["type"])
|
||||||
|
assert.Equal(t, "Hyphenated alias for workflowOutputPath.", workflowOutputPathHyphenSchema["description"])
|
||||||
|
|
||||||
|
outputPathSnakeSchema, ok := properties["output_path"].(map[string]any)
|
||||||
|
require.True(t, ok)
|
||||||
|
assert.Equal(t, "string", outputPathSnakeSchema["type"])
|
||||||
|
assert.Equal(t, "Snake_case alias for outputPath.", outputPathSnakeSchema["description"])
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_DefaultProjectDir(t *testing.T) {
|
func TestProvider_ReleaseWorkflowRequestResolvedOutputPath_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
absoluteDir := ax.Join(projectDir, "ops")
|
||||||
|
require.NoError(t, io.Local.EnsureDir(absoluteDir))
|
||||||
|
|
||||||
|
req := ReleaseWorkflowRequest{
|
||||||
|
WorkflowOutputPath: absoluteDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
path, err := req.resolveOutputPath(projectDir, io.Local)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(absoluteDir, "release.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_ReleaseWorkflowRequestResolvedOutputPathAliases_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
req := ReleaseWorkflowRequest{
|
||||||
|
WorkflowOutputSnake: "ci/workflow-output.yml",
|
||||||
|
WorkflowOutputHyphen: "ci/workflow-output.yml",
|
||||||
|
}
|
||||||
|
|
||||||
|
path, err := req.resolveOutputPath(projectDir, io.Local)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "ci", "workflow-output.yml"), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_BuildProviderDefaultProjectDir_Good(t *testing.T) {
|
||||||
p := NewProvider("", nil)
|
p := NewProvider("", nil)
|
||||||
assert.Equal(t, ".", p.projectDir)
|
assert.Equal(t, ".", p.projectDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_CustomProjectDir(t *testing.T) {
|
func TestProvider_BuildProviderCustomProjectDir_Good(t *testing.T) {
|
||||||
p := NewProvider("/tmp/myproject", nil)
|
p := NewProvider("/tmp/myproject", nil)
|
||||||
assert.Equal(t, "/tmp/myproject", p.projectDir)
|
assert.Equal(t, "/tmp/myproject", p.projectDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_NilHub(t *testing.T) {
|
func TestProvider_BuildProviderNilHub_Good(t *testing.T) {
|
||||||
p := NewProvider(".", nil)
|
p := NewProvider(".", nil)
|
||||||
// emitEvent should not panic with nil hub
|
// emitEvent should not panic with nil hub
|
||||||
p.emitEvent("build.started", map[string]any{"test": true})
|
p.emitEvent("build.started", map[string]any{"test": true})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetBuilder_Good_SupportedTypes(t *testing.T) {
|
func TestProvider_GetBuilderSupportedTypes_Good(t *testing.T) {
|
||||||
b, err := getBuilder(build.ProjectTypeGo)
|
cases := []struct {
|
||||||
require.NoError(t, err)
|
projectType build.ProjectType
|
||||||
assert.Equal(t, "go", b.Name())
|
name string
|
||||||
|
}{
|
||||||
|
{build.ProjectTypeGo, "go"},
|
||||||
|
{build.ProjectTypeWails, "wails"},
|
||||||
|
{build.ProjectTypeNode, "node"},
|
||||||
|
{build.ProjectTypePHP, "php"},
|
||||||
|
{build.ProjectTypePython, "python"},
|
||||||
|
{build.ProjectTypeRust, "rust"},
|
||||||
|
{build.ProjectTypeDocs, "docs"},
|
||||||
|
{build.ProjectTypeCPP, "cpp"},
|
||||||
|
{build.ProjectTypeDocker, "docker"},
|
||||||
|
{build.ProjectTypeLinuxKit, "linuxkit"},
|
||||||
|
{build.ProjectTypeTaskfile, "taskfile"},
|
||||||
|
}
|
||||||
|
|
||||||
b, err = getBuilder(build.ProjectTypeWails)
|
for _, tc := range cases {
|
||||||
require.NoError(t, err)
|
t.Run(string(tc.projectType), func(t *testing.T) {
|
||||||
assert.Equal(t, "wails", b.Name())
|
b, err := getBuilder(tc.projectType)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, tc.name, b.Name())
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetBuilder_Bad_UnsupportedType(t *testing.T) {
|
func TestProvider_GetBuilderUnsupportedType_Bad(t *testing.T) {
|
||||||
_, err := getBuilder(build.ProjectType("unknown"))
|
_, err := getBuilder(build.ProjectType("unknown"))
|
||||||
assert.ErrorIs(t, err, os.ErrNotExist)
|
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_ResolveDir(t *testing.T) {
|
func TestProvider_BuildProviderResolveDir_Good(t *testing.T) {
|
||||||
p := NewProvider("/tmp", nil)
|
p := NewProvider("/tmp", nil)
|
||||||
dir, err := p.resolveDir()
|
dir, err := p.resolveDir()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "/tmp", dir)
|
assert.Equal(t, "/tmp", dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_ResolveDirRelative(t *testing.T) {
|
func TestProvider_BuildProviderResolveDirRelative_Good(t *testing.T) {
|
||||||
p := NewProvider(".", nil)
|
p := NewProvider(".", nil)
|
||||||
dir, err := p.resolveDir()
|
dir, err := p.resolveDir()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
@ -109,7 +242,648 @@ func TestBuildProvider_Good_ResolveDirRelative(t *testing.T) {
|
||||||
assert.True(t, len(dir) > 1 && dir[0] == '/')
|
assert.True(t, len(dir) > 1 && dir[0] == '/')
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildProvider_Good_MediumSet(t *testing.T) {
|
func TestProvider_BuildProviderMediumSet_Good(t *testing.T) {
|
||||||
p := NewProvider(".", nil)
|
p := NewProvider(".", nil)
|
||||||
assert.NotNil(t, p.medium, "medium should be set to io.Local")
|
assert.NotNil(t, p.medium, "medium should be set to io.Local")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestProvider_ResolveProjectType_Good(t *testing.T) {
|
||||||
|
t.Run("honours explicit build type override", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module example"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := resolveProjectType(io.Local, dir, "docker")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeDocker, projectType)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("falls back to detection when build type is empty", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module example"), 0o644))
|
||||||
|
|
||||||
|
projectType, err := resolveProjectType(io.Local, dir, "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, build.ProjectTypeGo, projectType)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := build.ReleaseWorkflowPath(projectDir)
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_CustomPath_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"path":"ci/release.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "release.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowPath_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"workflowPath":"ci/workflow-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowPathSnake_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"workflow_path":"ci/workflow-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowPathHyphen_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"workflow-path":"ci/workflow-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_ConflictingWorkflowPathAliases_Bad(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"path":"ci/workflow-path.yml","workflowPath":"ops/workflow-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusBadRequest, recorder.Code)
|
||||||
|
|
||||||
|
path := build.ReleaseWorkflowPath(projectDir)
|
||||||
|
_, err := io.Local.Read(path)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_OutputAlias_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"output":"ci/release.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "release.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_OutputPath_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"outputPath":"ci/output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "output-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_OutputPathHyphen_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"output-path":"ci/output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "output-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_OutputPathSnake_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"output_path":"ci/output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "output-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowOutputPath_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"workflowOutputPath":"ci/workflow-output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-output-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowOutputSnake_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"workflow_output":"ci/workflow-output.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-output.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowOutputPathSnake_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"workflow_output_path":"ci/workflow-output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-output-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowOutputPathAbsoluteEquivalent_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
absolutePath := ax.Join(projectDir, "ci", "workflow-output-path.yml")
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"outputPath":"ci/workflow-output-path.yml","workflowOutputPath":"`+absolutePath+`"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-output-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowOutputPathHyphen_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"workflow-output-path":"ci/workflow-output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-output-path.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowOutputHyphen_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"workflow-output":"ci/workflow-output.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "workflow-output.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_ConflictingWorkflowOutputAliases_Bad(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"outputPath":"ci/output-path.yml","workflowOutputPath":"ops/output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusBadRequest, recorder.Code)
|
||||||
|
|
||||||
|
path := build.ReleaseWorkflowPath(projectDir)
|
||||||
|
_, err := io.Local.Read(path)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_ConflictingOutputAliases_Bad(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"outputPath":"ci/output-path.yml","output_path":"ops/output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusBadRequest, recorder.Code)
|
||||||
|
|
||||||
|
path := build.ReleaseWorkflowPath(projectDir)
|
||||||
|
_, err := io.Local.Read(path)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_ConflictingOutputPathHyphenAliases_Bad(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"outputPath":"ci/output-path.yml","output-path":"ops/output-path.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusBadRequest, recorder.Code)
|
||||||
|
|
||||||
|
path := build.ReleaseWorkflowPath(projectDir)
|
||||||
|
_, err := io.Local.Read(path)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_BareDirectoryPath_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"path":"ci"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "release.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_CurrentDirectoryPrefixedPath_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"path":"./ci"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "release.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_WorkflowsDirectory_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"path":".github/workflows"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, ".github", "workflows", "release.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_ExistingDirectoryPath_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Join(projectDir, "ci"), 0o755))
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
p.medium = io.Local
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"path":"ci"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
|
||||||
|
path := ax.Join(projectDir, "ci", "release.yml")
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_ConflictingPathAndOutput_Bad(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"path":"ci/release.yml","output":"ops/release.yml"}`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusBadRequest, recorder.Code)
|
||||||
|
|
||||||
|
path := build.ReleaseWorkflowPath(projectDir)
|
||||||
|
_, err := io.Local.Read(path)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_GenerateReleaseWorkflow_InvalidJSON_Bad(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodPost, "/release/workflow", bytes.NewBufferString(`{"path":`))
|
||||||
|
request.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.generateReleaseWorkflow(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusBadRequest, recorder.Code)
|
||||||
|
|
||||||
|
path := build.ReleaseWorkflowPath(projectDir)
|
||||||
|
_, err := io.Local.Read(path)
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProvider_DiscoverProject_Good(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "go.mod"), []byte("module example"), 0o644))
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Join(projectDir, "frontend"), 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "frontend", "package.json"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
p := NewProvider(projectDir, nil)
|
||||||
|
|
||||||
|
recorder := httptest.NewRecorder()
|
||||||
|
request := httptest.NewRequest(http.MethodGet, "/discover", nil)
|
||||||
|
|
||||||
|
ctx, _ := gin.CreateTestContext(recorder)
|
||||||
|
ctx.Request = request
|
||||||
|
|
||||||
|
p.discoverProject(ctx)
|
||||||
|
|
||||||
|
assert.Equal(t, http.StatusOK, recorder.Code)
|
||||||
|
body := recorder.Body.String()
|
||||||
|
assert.Contains(t, body, `"types":["go","node"]`)
|
||||||
|
assert.Contains(t, body, `"primary":"go"`)
|
||||||
|
assert.Contains(t, body, `"primary_stack":"go"`)
|
||||||
|
assert.Contains(t, body, `"has_frontend":true`)
|
||||||
|
assert.Contains(t, body, `"has_subtree_npm":true`)
|
||||||
|
assert.Contains(t, body, `"go.mod":true`)
|
||||||
|
assert.Contains(t, body, `"frontend/package.json":true`)
|
||||||
|
}
|
||||||
|
|
|
||||||
737
pkg/api/ui/dist/core-build.js
vendored
737
pkg/api/ui/dist/core-build.js
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -6,17 +6,19 @@ import (
|
||||||
"archive/zip"
|
"archive/zip"
|
||||||
"bytes"
|
"bytes"
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
io_interface "dappco.re/go/core/io"
|
io_interface "dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
"github.com/Snider/Borg/pkg/compress"
|
"github.com/Snider/Borg/pkg/compress"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ArchiveFormat specifies the compression format for archives.
|
// ArchiveFormat specifies the compression format for archives.
|
||||||
|
//
|
||||||
|
// var fmt build.ArchiveFormat = build.ArchiveFormatGzip
|
||||||
type ArchiveFormat string
|
type ArchiveFormat string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
|
@ -24,14 +26,33 @@ const (
|
||||||
ArchiveFormatGzip ArchiveFormat = "gz"
|
ArchiveFormatGzip ArchiveFormat = "gz"
|
||||||
// ArchiveFormatXZ uses tar.xz (xz/LZMA2 compression) - better compression ratio.
|
// ArchiveFormatXZ uses tar.xz (xz/LZMA2 compression) - better compression ratio.
|
||||||
ArchiveFormatXZ ArchiveFormat = "xz"
|
ArchiveFormatXZ ArchiveFormat = "xz"
|
||||||
// ArchiveFormatZip uses zip - for Windows.
|
// ArchiveFormatZip uses zip archives on any platform.
|
||||||
ArchiveFormatZip ArchiveFormat = "zip"
|
ArchiveFormatZip ArchiveFormat = "zip"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ParseArchiveFormat converts a user-facing archive format string into an ArchiveFormat.
|
||||||
|
//
|
||||||
|
// format, err := build.ParseArchiveFormat("xz") // → build.ArchiveFormatXZ
|
||||||
|
// format, err := build.ParseArchiveFormat("zip") // → build.ArchiveFormatZip
|
||||||
|
func ParseArchiveFormat(value string) (ArchiveFormat, error) {
|
||||||
|
switch core.Trim(strings.ToLower(value)) {
|
||||||
|
case "", "gz", "gzip", "tgz", "tar.gz", "tar-gz":
|
||||||
|
return ArchiveFormatGzip, nil
|
||||||
|
case "xz", "txz", "tar.xz", "tar-xz":
|
||||||
|
return ArchiveFormatXZ, nil
|
||||||
|
case "zip":
|
||||||
|
return ArchiveFormatZip, nil
|
||||||
|
default:
|
||||||
|
return "", coreerr.E("build.ParseArchiveFormat", "unsupported archive format: "+value, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Archive creates an archive for a single artifact using gzip compression.
|
// Archive creates an archive for a single artifact using gzip compression.
|
||||||
// Uses tar.gz for linux/darwin and zip for windows.
|
// Uses tar.gz for linux/darwin and zip for windows.
|
||||||
// The archive is created alongside the binary (e.g., dist/myapp_linux_amd64.tar.gz).
|
// The archive is created alongside the binary (e.g., dist/myapp_linux_amd64.tar.gz).
|
||||||
// Returns a new Artifact with Path pointing to the archive.
|
// Returns a new Artifact with Path pointing to the archive.
|
||||||
|
//
|
||||||
|
// archived, err := build.Archive(io.Local, artifact)
|
||||||
func Archive(fs io_interface.Medium, artifact Artifact) (Artifact, error) {
|
func Archive(fs io_interface.Medium, artifact Artifact) (Artifact, error) {
|
||||||
return ArchiveWithFormat(fs, artifact, ArchiveFormatGzip)
|
return ArchiveWithFormat(fs, artifact, ArchiveFormatGzip)
|
||||||
}
|
}
|
||||||
|
|
@ -39,14 +60,19 @@ func Archive(fs io_interface.Medium, artifact Artifact) (Artifact, error) {
|
||||||
// ArchiveXZ creates an archive for a single artifact using xz compression.
|
// ArchiveXZ creates an archive for a single artifact using xz compression.
|
||||||
// Uses tar.xz for linux/darwin and zip for windows.
|
// Uses tar.xz for linux/darwin and zip for windows.
|
||||||
// Returns a new Artifact with Path pointing to the archive.
|
// Returns a new Artifact with Path pointing to the archive.
|
||||||
|
//
|
||||||
|
// archived, err := build.ArchiveXZ(io.Local, artifact)
|
||||||
func ArchiveXZ(fs io_interface.Medium, artifact Artifact) (Artifact, error) {
|
func ArchiveXZ(fs io_interface.Medium, artifact Artifact) (Artifact, error) {
|
||||||
return ArchiveWithFormat(fs, artifact, ArchiveFormatXZ)
|
return ArchiveWithFormat(fs, artifact, ArchiveFormatXZ)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ArchiveWithFormat creates an archive for a single artifact with the specified format.
|
// ArchiveWithFormat creates an archive for a single artifact with the specified format.
|
||||||
// Uses tar.gz or tar.xz for linux/darwin and zip for windows.
|
// Uses tar.gz, tar.xz, or zip depending on the requested format.
|
||||||
|
// Windows artifacts always use zip unless zip is requested explicitly.
|
||||||
// The archive is created alongside the binary (e.g., dist/myapp_linux_amd64.tar.xz).
|
// The archive is created alongside the binary (e.g., dist/myapp_linux_amd64.tar.xz).
|
||||||
// Returns a new Artifact with Path pointing to the archive.
|
// Returns a new Artifact with Path pointing to the archive.
|
||||||
|
//
|
||||||
|
// archived, err := build.ArchiveWithFormat(io.Local, artifact, build.ArchiveFormatXZ)
|
||||||
func ArchiveWithFormat(fs io_interface.Medium, artifact Artifact, format ArchiveFormat) (Artifact, error) {
|
func ArchiveWithFormat(fs io_interface.Medium, artifact Artifact, format ArchiveFormat) (Artifact, error) {
|
||||||
if artifact.Path == "" {
|
if artifact.Path == "" {
|
||||||
return Artifact{}, coreerr.E("build.Archive", "artifact path is empty", nil)
|
return Artifact{}, coreerr.E("build.Archive", "artifact path is empty", nil)
|
||||||
|
|
@ -61,22 +87,20 @@ func ArchiveWithFormat(fs io_interface.Medium, artifact Artifact, format Archive
|
||||||
return Artifact{}, coreerr.E("build.Archive", "source path is a directory, expected file", nil)
|
return Artifact{}, coreerr.E("build.Archive", "source path is a directory, expected file", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine archive type based on OS and format
|
// Determine archive type based on OS and format.
|
||||||
var archivePath string
|
var archivePath string
|
||||||
var archiveFunc func(fs io_interface.Medium, src, dst string) error
|
var archiveFunc func(fs io_interface.Medium, src, dst string) error
|
||||||
|
|
||||||
if artifact.OS == "windows" {
|
switch {
|
||||||
|
case format == ArchiveFormatZip || artifact.OS == "windows":
|
||||||
archivePath = archiveFilename(artifact, ".zip")
|
archivePath = archiveFilename(artifact, ".zip")
|
||||||
archiveFunc = createZipArchive
|
archiveFunc = createZipArchive
|
||||||
} else {
|
case format == ArchiveFormatXZ:
|
||||||
switch format {
|
archivePath = archiveFilename(artifact, ".tar.xz")
|
||||||
case ArchiveFormatXZ:
|
archiveFunc = createTarXzArchive
|
||||||
archivePath = archiveFilename(artifact, ".tar.xz")
|
default:
|
||||||
archiveFunc = createTarXzArchive
|
archivePath = archiveFilename(artifact, ".tar.gz")
|
||||||
default:
|
archiveFunc = createTarGzArchive
|
||||||
archivePath = archiveFilename(artifact, ".tar.gz")
|
|
||||||
archiveFunc = createTarGzArchive
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create the archive
|
// Create the archive
|
||||||
|
|
@ -94,18 +118,24 @@ func ArchiveWithFormat(fs io_interface.Medium, artifact Artifact, format Archive
|
||||||
|
|
||||||
// ArchiveAll archives all artifacts using gzip compression.
|
// ArchiveAll archives all artifacts using gzip compression.
|
||||||
// Returns a slice of new artifacts pointing to the archives.
|
// Returns a slice of new artifacts pointing to the archives.
|
||||||
|
//
|
||||||
|
// archived, err := build.ArchiveAll(io.Local, artifacts)
|
||||||
func ArchiveAll(fs io_interface.Medium, artifacts []Artifact) ([]Artifact, error) {
|
func ArchiveAll(fs io_interface.Medium, artifacts []Artifact) ([]Artifact, error) {
|
||||||
return ArchiveAllWithFormat(fs, artifacts, ArchiveFormatGzip)
|
return ArchiveAllWithFormat(fs, artifacts, ArchiveFormatGzip)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ArchiveAllXZ archives all artifacts using xz compression.
|
// ArchiveAllXZ archives all artifacts using xz compression.
|
||||||
// Returns a slice of new artifacts pointing to the archives.
|
// Returns a slice of new artifacts pointing to the archives.
|
||||||
|
//
|
||||||
|
// archived, err := build.ArchiveAllXZ(io.Local, artifacts)
|
||||||
func ArchiveAllXZ(fs io_interface.Medium, artifacts []Artifact) ([]Artifact, error) {
|
func ArchiveAllXZ(fs io_interface.Medium, artifacts []Artifact) ([]Artifact, error) {
|
||||||
return ArchiveAllWithFormat(fs, artifacts, ArchiveFormatXZ)
|
return ArchiveAllWithFormat(fs, artifacts, ArchiveFormatXZ)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ArchiveAllWithFormat archives all artifacts with the specified format.
|
// ArchiveAllWithFormat archives all artifacts with the specified format.
|
||||||
// Returns a slice of new artifacts pointing to the archives.
|
// Returns a slice of new artifacts pointing to the archives.
|
||||||
|
//
|
||||||
|
// archived, err := build.ArchiveAllWithFormat(io.Local, artifacts, build.ArchiveFormatXZ)
|
||||||
func ArchiveAllWithFormat(fs io_interface.Medium, artifacts []Artifact, format ArchiveFormat) ([]Artifact, error) {
|
func ArchiveAllWithFormat(fs io_interface.Medium, artifacts []Artifact, format ArchiveFormat) ([]Artifact, error) {
|
||||||
if len(artifacts) == 0 {
|
if len(artifacts) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
|
@ -127,18 +157,18 @@ func ArchiveAllWithFormat(fs io_interface.Medium, artifacts []Artifact, format A
|
||||||
// Format: dist/myapp_linux_amd64.tar.gz (binary name taken from artifact path).
|
// Format: dist/myapp_linux_amd64.tar.gz (binary name taken from artifact path).
|
||||||
func archiveFilename(artifact Artifact, ext string) string {
|
func archiveFilename(artifact Artifact, ext string) string {
|
||||||
// Get the directory containing the binary (e.g., dist/linux_amd64)
|
// Get the directory containing the binary (e.g., dist/linux_amd64)
|
||||||
dir := filepath.Dir(artifact.Path)
|
dir := ax.Dir(artifact.Path)
|
||||||
// Go up one level to the output directory (e.g., dist)
|
// Go up one level to the output directory (e.g., dist)
|
||||||
outputDir := filepath.Dir(dir)
|
outputDir := ax.Dir(dir)
|
||||||
|
|
||||||
// Get the binary name without extension
|
// Get the binary name without extension
|
||||||
binaryName := filepath.Base(artifact.Path)
|
binaryName := ax.Base(artifact.Path)
|
||||||
binaryName = strings.TrimSuffix(binaryName, ".exe")
|
binaryName = core.TrimSuffix(binaryName, ".exe")
|
||||||
|
|
||||||
// Construct archive name: myapp_linux_amd64.tar.gz
|
// Construct archive name: myapp_linux_amd64.tar.gz
|
||||||
archiveName := fmt.Sprintf("%s_%s_%s%s", binaryName, artifact.OS, artifact.Arch, ext)
|
archiveName := core.Sprintf("%s_%s_%s%s", binaryName, artifact.OS, artifact.Arch, ext)
|
||||||
|
|
||||||
return filepath.Join(outputDir, archiveName)
|
return ax.Join(outputDir, archiveName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// createTarXzArchive creates a tar.xz archive containing a single file.
|
// createTarXzArchive creates a tar.xz archive containing a single file.
|
||||||
|
|
@ -165,7 +195,7 @@ func createTarXzArchive(fs io_interface.Medium, src, dst string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("build.createTarXzArchive", "failed to create tar header", err)
|
return coreerr.E("build.createTarXzArchive", "failed to create tar header", err)
|
||||||
}
|
}
|
||||||
header.Name = filepath.Base(src)
|
header.Name = ax.Base(src)
|
||||||
|
|
||||||
if err := tarWriter.WriteHeader(header); err != nil {
|
if err := tarWriter.WriteHeader(header); err != nil {
|
||||||
return coreerr.E("build.createTarXzArchive", "failed to write tar header", err)
|
return coreerr.E("build.createTarXzArchive", "failed to write tar header", err)
|
||||||
|
|
@ -234,7 +264,7 @@ func createTarGzArchive(fs io_interface.Medium, src, dst string) error {
|
||||||
return coreerr.E("build.createTarGzArchive", "failed to create tar header", err)
|
return coreerr.E("build.createTarGzArchive", "failed to create tar header", err)
|
||||||
}
|
}
|
||||||
// Use just the filename, not the full path
|
// Use just the filename, not the full path
|
||||||
header.Name = filepath.Base(src)
|
header.Name = ax.Base(src)
|
||||||
|
|
||||||
// Write header
|
// Write header
|
||||||
if err := tarWriter.WriteHeader(header); err != nil {
|
if err := tarWriter.WriteHeader(header); err != nil {
|
||||||
|
|
@ -280,7 +310,7 @@ func createZipArchive(fs io_interface.Medium, src, dst string) error {
|
||||||
return coreerr.E("build.createZipArchive", "failed to create zip header", err)
|
return coreerr.E("build.createZipArchive", "failed to create zip header", err)
|
||||||
}
|
}
|
||||||
// Use just the filename, not the full path
|
// Use just the filename, not the full path
|
||||||
header.Name = filepath.Base(src)
|
header.Name = ax.Base(src)
|
||||||
header.Method = zip.Deflate
|
header.Method = zip.Deflate
|
||||||
|
|
||||||
// Create file in archive
|
// Create file in archive
|
||||||
|
|
|
||||||
|
|
@ -6,10 +6,11 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
stdfs "io/fs"
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
io_interface "dappco.re/go/core/io"
|
io_interface "dappco.re/go/core/io"
|
||||||
"github.com/Snider/Borg/pkg/compress"
|
"github.com/Snider/Borg/pkg/compress"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
@ -24,20 +25,20 @@ func setupArchiveTestFile(t *testing.T, name, os_, arch string) (binaryPath stri
|
||||||
outputDir = t.TempDir()
|
outputDir = t.TempDir()
|
||||||
|
|
||||||
// Create platform directory: dist/os_arch
|
// Create platform directory: dist/os_arch
|
||||||
platformDir := filepath.Join(outputDir, os_+"_"+arch)
|
platformDir := ax.Join(outputDir, os_+"_"+arch)
|
||||||
err := os.MkdirAll(platformDir, 0755)
|
err := ax.MkdirAll(platformDir, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Create test binary
|
// Create test binary
|
||||||
binaryPath = filepath.Join(platformDir, name)
|
binaryPath = ax.Join(platformDir, name)
|
||||||
content := []byte("#!/bin/bash\necho 'Hello, World!'\n")
|
content := []byte("#!/bin/bash\necho 'Hello, World!'\n")
|
||||||
err = os.WriteFile(binaryPath, content, 0755)
|
err = ax.WriteFile(binaryPath, content, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return binaryPath, outputDir
|
return binaryPath, outputDir
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArchive_Good(t *testing.T) {
|
func TestArchive_Archive_Good(t *testing.T) {
|
||||||
fs := io_interface.Local
|
fs := io_interface.Local
|
||||||
t.Run("creates tar.gz for linux", func(t *testing.T) {
|
t.Run("creates tar.gz for linux", func(t *testing.T) {
|
||||||
binaryPath, outputDir := setupArchiveTestFile(t, "myapp", "linux", "amd64")
|
binaryPath, outputDir := setupArchiveTestFile(t, "myapp", "linux", "amd64")
|
||||||
|
|
@ -52,7 +53,7 @@ func TestArchive_Good(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Verify archive was created
|
// Verify archive was created
|
||||||
expectedPath := filepath.Join(outputDir, "myapp_linux_amd64.tar.gz")
|
expectedPath := ax.Join(outputDir, "myapp_linux_amd64.tar.gz")
|
||||||
assert.Equal(t, expectedPath, result.Path)
|
assert.Equal(t, expectedPath, result.Path)
|
||||||
assert.FileExists(t, result.Path)
|
assert.FileExists(t, result.Path)
|
||||||
|
|
||||||
|
|
@ -76,7 +77,7 @@ func TestArchive_Good(t *testing.T) {
|
||||||
result, err := Archive(fs, artifact)
|
result, err := Archive(fs, artifact)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
expectedPath := filepath.Join(outputDir, "myapp_darwin_arm64.tar.gz")
|
expectedPath := ax.Join(outputDir, "myapp_darwin_arm64.tar.gz")
|
||||||
assert.Equal(t, expectedPath, result.Path)
|
assert.Equal(t, expectedPath, result.Path)
|
||||||
assert.FileExists(t, result.Path)
|
assert.FileExists(t, result.Path)
|
||||||
|
|
||||||
|
|
@ -96,7 +97,7 @@ func TestArchive_Good(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Windows archives should strip .exe from archive name
|
// Windows archives should strip .exe from archive name
|
||||||
expectedPath := filepath.Join(outputDir, "myapp_windows_amd64.zip")
|
expectedPath := ax.Join(outputDir, "myapp_windows_amd64.zip")
|
||||||
assert.Equal(t, expectedPath, result.Path)
|
assert.Equal(t, expectedPath, result.Path)
|
||||||
assert.FileExists(t, result.Path)
|
assert.FileExists(t, result.Path)
|
||||||
|
|
||||||
|
|
@ -130,7 +131,7 @@ func TestArchive_Good(t *testing.T) {
|
||||||
result, err := ArchiveXZ(fs, artifact)
|
result, err := ArchiveXZ(fs, artifact)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
expectedPath := filepath.Join(outputDir, "myapp_linux_amd64.tar.xz")
|
expectedPath := ax.Join(outputDir, "myapp_linux_amd64.tar.xz")
|
||||||
assert.Equal(t, expectedPath, result.Path)
|
assert.Equal(t, expectedPath, result.Path)
|
||||||
assert.FileExists(t, result.Path)
|
assert.FileExists(t, result.Path)
|
||||||
|
|
||||||
|
|
@ -149,7 +150,7 @@ func TestArchive_Good(t *testing.T) {
|
||||||
result, err := ArchiveWithFormat(fs, artifact, ArchiveFormatXZ)
|
result, err := ArchiveWithFormat(fs, artifact, ArchiveFormatXZ)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
expectedPath := filepath.Join(outputDir, "myapp_darwin_arm64.tar.xz")
|
expectedPath := ax.Join(outputDir, "myapp_darwin_arm64.tar.xz")
|
||||||
assert.Equal(t, expectedPath, result.Path)
|
assert.Equal(t, expectedPath, result.Path)
|
||||||
assert.FileExists(t, result.Path)
|
assert.FileExists(t, result.Path)
|
||||||
|
|
||||||
|
|
@ -169,15 +170,71 @@ func TestArchive_Good(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Windows should still get .zip regardless of format
|
// Windows should still get .zip regardless of format
|
||||||
expectedPath := filepath.Join(outputDir, "myapp_windows_amd64.zip")
|
expectedPath := ax.Join(outputDir, "myapp_windows_amd64.zip")
|
||||||
assert.Equal(t, expectedPath, result.Path)
|
assert.Equal(t, expectedPath, result.Path)
|
||||||
assert.FileExists(t, result.Path)
|
assert.FileExists(t, result.Path)
|
||||||
|
|
||||||
verifyZipContent(t, result.Path, "myapp.exe")
|
verifyZipContent(t, result.Path, "myapp.exe")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("creates zip for linux when explicitly requested", func(t *testing.T) {
|
||||||
|
binaryPath, outputDir := setupArchiveTestFile(t, "myapp", "linux", "amd64")
|
||||||
|
|
||||||
|
artifact := Artifact{
|
||||||
|
Path: binaryPath,
|
||||||
|
OS: "linux",
|
||||||
|
Arch: "amd64",
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := ArchiveWithFormat(fs, artifact, ArchiveFormatZip)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expectedPath := ax.Join(outputDir, "myapp_linux_amd64.zip")
|
||||||
|
assert.Equal(t, expectedPath, result.Path)
|
||||||
|
assert.FileExists(t, result.Path)
|
||||||
|
|
||||||
|
verifyZipContent(t, result.Path, "myapp")
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArchive_Bad(t *testing.T) {
|
func TestArchive_ParseArchiveFormat_Good(t *testing.T) {
|
||||||
|
t.Run("defaults to gzip when empty", func(t *testing.T) {
|
||||||
|
format, err := ParseArchiveFormat("")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ArchiveFormatGzip, format)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts xz aliases", func(t *testing.T) {
|
||||||
|
for _, input := range []string{"xz", "txz", "tar.xz", "tar-xz"} {
|
||||||
|
format, err := ParseArchiveFormat(input)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ArchiveFormatXZ, format)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts zip", func(t *testing.T) {
|
||||||
|
format, err := ParseArchiveFormat("zip")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ArchiveFormatZip, format)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts gzip aliases", func(t *testing.T) {
|
||||||
|
for _, input := range []string{"gz", "gzip", "tgz", "tar.gz", "tar-gz"} {
|
||||||
|
format, err := ParseArchiveFormat(input)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ArchiveFormatGzip, format)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("rejects unsupported formats", func(t *testing.T) {
|
||||||
|
format, err := ParseArchiveFormat("bzip2")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Empty(t, format)
|
||||||
|
assert.Contains(t, err.Error(), "unsupported archive format")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestArchive_Archive_Bad(t *testing.T) {
|
||||||
fs := io_interface.Local
|
fs := io_interface.Local
|
||||||
t.Run("returns error for empty path", func(t *testing.T) {
|
t.Run("returns error for empty path", func(t *testing.T) {
|
||||||
artifact := Artifact{
|
artifact := Artifact{
|
||||||
|
|
@ -221,7 +278,7 @@ func TestArchive_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArchiveAll_Good(t *testing.T) {
|
func TestArchive_ArchiveAll_Good(t *testing.T) {
|
||||||
fs := io_interface.Local
|
fs := io_interface.Local
|
||||||
t.Run("archives multiple artifacts", func(t *testing.T) {
|
t.Run("archives multiple artifacts", func(t *testing.T) {
|
||||||
outputDir := t.TempDir()
|
outputDir := t.TempDir()
|
||||||
|
|
@ -239,8 +296,8 @@ func TestArchiveAll_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, target := range targets {
|
for _, target := range targets {
|
||||||
platformDir := filepath.Join(outputDir, target.os_+"_"+target.arch)
|
platformDir := ax.Join(outputDir, target.os_+"_"+target.arch)
|
||||||
err := os.MkdirAll(platformDir, 0755)
|
err := ax.MkdirAll(platformDir, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
name := "myapp"
|
name := "myapp"
|
||||||
|
|
@ -248,8 +305,8 @@ func TestArchiveAll_Good(t *testing.T) {
|
||||||
name = "myapp.exe"
|
name = "myapp.exe"
|
||||||
}
|
}
|
||||||
|
|
||||||
binaryPath := filepath.Join(platformDir, name)
|
binaryPath := ax.Join(platformDir, name)
|
||||||
err = os.WriteFile(binaryPath, []byte("binary content"), 0755)
|
err = ax.WriteFile(binaryPath, []byte("binary content"), 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
artifacts = append(artifacts, Artifact{
|
artifacts = append(artifacts, Artifact{
|
||||||
|
|
@ -284,7 +341,7 @@ func TestArchiveAll_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArchiveAll_Bad(t *testing.T) {
|
func TestArchive_ArchiveAll_Bad(t *testing.T) {
|
||||||
fs := io_interface.Local
|
fs := io_interface.Local
|
||||||
t.Run("returns partial results on error", func(t *testing.T) {
|
t.Run("returns partial results on error", func(t *testing.T) {
|
||||||
binaryPath, _ := setupArchiveTestFile(t, "myapp", "linux", "amd64")
|
binaryPath, _ := setupArchiveTestFile(t, "myapp", "linux", "amd64")
|
||||||
|
|
@ -302,7 +359,7 @@ func TestArchiveAll_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestArchiveFilename_Good(t *testing.T) {
|
func TestArchive_ArchiveFilename_Good(t *testing.T) {
|
||||||
t.Run("generates correct tar.gz filename", func(t *testing.T) {
|
t.Run("generates correct tar.gz filename", func(t *testing.T) {
|
||||||
artifact := Artifact{
|
artifact := Artifact{
|
||||||
Path: "/output/linux_amd64/myapp",
|
Path: "/output/linux_amd64/myapp",
|
||||||
|
|
@ -344,7 +401,7 @@ func TestArchive_RoundTrip_Good(t *testing.T) {
|
||||||
binaryPath, _ := setupArchiveTestFile(t, "roundtrip-app", "linux", "amd64")
|
binaryPath, _ := setupArchiveTestFile(t, "roundtrip-app", "linux", "amd64")
|
||||||
|
|
||||||
// Read original content
|
// Read original content
|
||||||
originalContent, err := os.ReadFile(binaryPath)
|
originalContent, err := ax.ReadFile(binaryPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
artifact := Artifact{
|
artifact := Artifact{
|
||||||
|
|
@ -366,7 +423,7 @@ func TestArchive_RoundTrip_Good(t *testing.T) {
|
||||||
t.Run("tar.xz round trip preserves content", func(t *testing.T) {
|
t.Run("tar.xz round trip preserves content", func(t *testing.T) {
|
||||||
binaryPath, _ := setupArchiveTestFile(t, "roundtrip-xz", "linux", "arm64")
|
binaryPath, _ := setupArchiveTestFile(t, "roundtrip-xz", "linux", "arm64")
|
||||||
|
|
||||||
originalContent, err := os.ReadFile(binaryPath)
|
originalContent, err := ax.ReadFile(binaryPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
artifact := Artifact{
|
artifact := Artifact{
|
||||||
|
|
@ -386,7 +443,7 @@ func TestArchive_RoundTrip_Good(t *testing.T) {
|
||||||
t.Run("zip round trip preserves content", func(t *testing.T) {
|
t.Run("zip round trip preserves content", func(t *testing.T) {
|
||||||
binaryPath, _ := setupArchiveTestFile(t, "roundtrip.exe", "windows", "amd64")
|
binaryPath, _ := setupArchiveTestFile(t, "roundtrip.exe", "windows", "amd64")
|
||||||
|
|
||||||
originalContent, err := os.ReadFile(binaryPath)
|
originalContent, err := ax.ReadFile(binaryPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
artifact := Artifact{
|
artifact := Artifact{
|
||||||
|
|
@ -418,21 +475,21 @@ func TestArchive_RoundTrip_Good(t *testing.T) {
|
||||||
// Extract and verify permissions are preserved
|
// Extract and verify permissions are preserved
|
||||||
mode := extractTarGzFileMode(t, archiveArtifact.Path, "perms-app")
|
mode := extractTarGzFileMode(t, archiveArtifact.Path, "perms-app")
|
||||||
// The original file was written with 0755
|
// The original file was written with 0755
|
||||||
assert.Equal(t, os.FileMode(0755), mode&os.ModePerm)
|
assert.Equal(t, stdfs.FileMode(0o755), mode&stdfs.ModePerm)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("round trip with large binary content", func(t *testing.T) {
|
t.Run("round trip with large binary content", func(t *testing.T) {
|
||||||
outputDir := t.TempDir()
|
outputDir := t.TempDir()
|
||||||
platformDir := filepath.Join(outputDir, "linux_amd64")
|
platformDir := ax.Join(outputDir, "linux_amd64")
|
||||||
require.NoError(t, os.MkdirAll(platformDir, 0755))
|
require.NoError(t, ax.MkdirAll(platformDir, 0755))
|
||||||
|
|
||||||
// Create a larger file (64KB)
|
// Create a larger file (64KB)
|
||||||
largeContent := make([]byte, 64*1024)
|
largeContent := make([]byte, 64*1024)
|
||||||
for i := range largeContent {
|
for i := range largeContent {
|
||||||
largeContent[i] = byte(i % 256)
|
largeContent[i] = byte(i % 256)
|
||||||
}
|
}
|
||||||
binaryPath := filepath.Join(platformDir, "large-app")
|
binaryPath := ax.Join(platformDir, "large-app")
|
||||||
require.NoError(t, os.WriteFile(binaryPath, largeContent, 0755))
|
require.NoError(t, ax.WriteFile(binaryPath, largeContent, 0755))
|
||||||
|
|
||||||
artifact := Artifact{
|
artifact := Artifact{
|
||||||
Path: binaryPath,
|
Path: binaryPath,
|
||||||
|
|
@ -449,16 +506,16 @@ func TestArchive_RoundTrip_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("archive is smaller than original for tar.gz", func(t *testing.T) {
|
t.Run("archive is smaller than original for tar.gz", func(t *testing.T) {
|
||||||
outputDir := t.TempDir()
|
outputDir := t.TempDir()
|
||||||
platformDir := filepath.Join(outputDir, "linux_amd64")
|
platformDir := ax.Join(outputDir, "linux_amd64")
|
||||||
require.NoError(t, os.MkdirAll(platformDir, 0755))
|
require.NoError(t, ax.MkdirAll(platformDir, 0755))
|
||||||
|
|
||||||
// Create a compressible file (repeated pattern)
|
// Create a compressible file (repeated pattern)
|
||||||
compressibleContent := make([]byte, 4096)
|
compressibleContent := make([]byte, 4096)
|
||||||
for i := range compressibleContent {
|
for i := range compressibleContent {
|
||||||
compressibleContent[i] = 'A'
|
compressibleContent[i] = 'A'
|
||||||
}
|
}
|
||||||
binaryPath := filepath.Join(platformDir, "compressible-app")
|
binaryPath := ax.Join(platformDir, "compressible-app")
|
||||||
require.NoError(t, os.WriteFile(binaryPath, compressibleContent, 0755))
|
require.NoError(t, ax.WriteFile(binaryPath, compressibleContent, 0755))
|
||||||
|
|
||||||
artifact := Artifact{
|
artifact := Artifact{
|
||||||
Path: binaryPath,
|
Path: binaryPath,
|
||||||
|
|
@ -469,9 +526,9 @@ func TestArchive_RoundTrip_Good(t *testing.T) {
|
||||||
archiveArtifact, err := Archive(fs, artifact)
|
archiveArtifact, err := Archive(fs, artifact)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
originalInfo, err := os.Stat(binaryPath)
|
originalInfo, err := ax.Stat(binaryPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
archiveInfo, err := os.Stat(archiveArtifact.Path)
|
archiveInfo, err := ax.Stat(archiveArtifact.Path)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Compressed archive should be smaller than original
|
// Compressed archive should be smaller than original
|
||||||
|
|
@ -483,7 +540,7 @@ func TestArchive_RoundTrip_Good(t *testing.T) {
|
||||||
func extractTarGzFile(t *testing.T, archivePath, fileName string) []byte {
|
func extractTarGzFile(t *testing.T, archivePath, fileName string) []byte {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
file, err := os.Open(archivePath)
|
file, err := ax.Open(archivePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer func() { _ = file.Close() }()
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
|
@ -509,10 +566,10 @@ func extractTarGzFile(t *testing.T, archivePath, fileName string) []byte {
|
||||||
}
|
}
|
||||||
|
|
||||||
// extractTarGzFileMode extracts the file mode of a named file from a tar.gz archive.
|
// extractTarGzFileMode extracts the file mode of a named file from a tar.gz archive.
|
||||||
func extractTarGzFileMode(t *testing.T, archivePath, fileName string) os.FileMode {
|
func extractTarGzFileMode(t *testing.T, archivePath, fileName string) stdfs.FileMode {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
file, err := os.Open(archivePath)
|
file, err := ax.Open(archivePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer func() { _ = file.Close() }()
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
|
@ -539,7 +596,7 @@ func extractTarGzFileMode(t *testing.T, archivePath, fileName string) os.FileMod
|
||||||
func extractTarXzFile(t *testing.T, archivePath, fileName string) []byte {
|
func extractTarXzFile(t *testing.T, archivePath, fileName string) []byte {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
xzData, err := os.ReadFile(archivePath)
|
xzData, err := ax.ReadFile(archivePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
tarData, err := compress.Decompress(xzData)
|
tarData, err := compress.Decompress(xzData)
|
||||||
|
|
@ -590,7 +647,7 @@ func extractZipFile(t *testing.T, archivePath, fileName string) []byte {
|
||||||
func verifyTarGzContent(t *testing.T, archivePath, expectedName string) {
|
func verifyTarGzContent(t *testing.T, archivePath, expectedName string) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
file, err := os.Open(archivePath)
|
file, err := ax.Open(archivePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer func() { _ = file.Close() }()
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
|
@ -626,7 +683,7 @@ func verifyTarXzContent(t *testing.T, archivePath, expectedName string) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
// Read the xz-compressed file
|
// Read the xz-compressed file
|
||||||
xzData, err := os.ReadFile(archivePath)
|
xzData, err := ax.ReadFile(archivePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Decompress with Borg
|
// Decompress with Borg
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
// Package build provides project type detection and cross-compilation for the Core build system.
|
// Package build provides project type detection and cross-compilation for the Core build system.
|
||||||
// It supports Go, Wails, Node.js, and PHP projects with automatic detection based on
|
// It supports Go, Wails, Node.js, PHP, Python, Rust, Docs, Docker, LinuxKit, C++, and Taskfile
|
||||||
// marker files (go.mod, wails.json, package.json, composer.json).
|
// projects with automatic detection based on marker files and builder-specific probes.
|
||||||
package build
|
package build
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
@ -10,11 +10,13 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// ProjectType represents a detected project type.
|
// ProjectType represents a detected project type.
|
||||||
|
//
|
||||||
|
// var t build.ProjectType = build.ProjectTypeGo
|
||||||
type ProjectType string
|
type ProjectType string
|
||||||
|
|
||||||
// Project type constants for build detection.
|
// Project type constants for build detection.
|
||||||
const (
|
const (
|
||||||
// ProjectTypeGo indicates a standard Go project with go.mod.
|
// ProjectTypeGo indicates a standard Go project with go.mod or go.work.
|
||||||
ProjectTypeGo ProjectType = "go"
|
ProjectTypeGo ProjectType = "go"
|
||||||
// ProjectTypeWails indicates a Wails desktop application.
|
// ProjectTypeWails indicates a Wails desktop application.
|
||||||
ProjectTypeWails ProjectType = "wails"
|
ProjectTypeWails ProjectType = "wails"
|
||||||
|
|
@ -30,20 +32,32 @@ const (
|
||||||
ProjectTypeLinuxKit ProjectType = "linuxkit"
|
ProjectTypeLinuxKit ProjectType = "linuxkit"
|
||||||
// ProjectTypeTaskfile indicates a project using Taskfile automation.
|
// ProjectTypeTaskfile indicates a project using Taskfile automation.
|
||||||
ProjectTypeTaskfile ProjectType = "taskfile"
|
ProjectTypeTaskfile ProjectType = "taskfile"
|
||||||
|
// ProjectTypeDocs indicates a documentation project with mkdocs.yml.
|
||||||
|
ProjectTypeDocs ProjectType = "docs"
|
||||||
|
// ProjectTypePython indicates a Python project with pyproject.toml or requirements.txt.
|
||||||
|
ProjectTypePython ProjectType = "python"
|
||||||
|
// ProjectTypeRust indicates a Rust project with Cargo.toml.
|
||||||
|
ProjectTypeRust ProjectType = "rust"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Target represents a build target platform.
|
// Target represents a build target platform.
|
||||||
|
//
|
||||||
|
// t := build.Target{OS: "linux", Arch: "amd64"}
|
||||||
type Target struct {
|
type Target struct {
|
||||||
OS string
|
OS string
|
||||||
Arch string
|
Arch string
|
||||||
}
|
}
|
||||||
|
|
||||||
// String returns the target in GOOS/GOARCH format.
|
// String returns the target in GOOS/GOARCH format.
|
||||||
|
//
|
||||||
|
// s := t.String() // → "linux/amd64"
|
||||||
func (t Target) String() string {
|
func (t Target) String() string {
|
||||||
return t.OS + "/" + t.Arch
|
return t.OS + "/" + t.Arch
|
||||||
}
|
}
|
||||||
|
|
||||||
// Artifact represents a build output file.
|
// Artifact represents a build output file.
|
||||||
|
//
|
||||||
|
// a := build.Artifact{Path: "dist/linux_amd64/myapp", OS: "linux", Arch: "amd64"}
|
||||||
type Artifact struct {
|
type Artifact struct {
|
||||||
Path string
|
Path string
|
||||||
OS string
|
OS string
|
||||||
|
|
@ -52,9 +66,13 @@ type Artifact struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Config holds build configuration.
|
// Config holds build configuration.
|
||||||
|
//
|
||||||
|
// cfg := &build.Config{FS: io.Local, ProjectDir: ".", OutputDir: "dist", Name: "myapp"}
|
||||||
type Config struct {
|
type Config struct {
|
||||||
// FS is the medium used for file operations.
|
// FS is the medium used for file operations.
|
||||||
FS io.Medium
|
FS io.Medium
|
||||||
|
// Project holds build-time project metadata.
|
||||||
|
Project Project
|
||||||
// ProjectDir is the root directory of the project.
|
// ProjectDir is the root directory of the project.
|
||||||
ProjectDir string
|
ProjectDir string
|
||||||
// OutputDir is where build artifacts are placed.
|
// OutputDir is where build artifacts are placed.
|
||||||
|
|
@ -65,8 +83,22 @@ type Config struct {
|
||||||
Version string
|
Version string
|
||||||
// LDFlags are additional linker flags.
|
// LDFlags are additional linker flags.
|
||||||
LDFlags []string
|
LDFlags []string
|
||||||
|
// Flags are additional build flags.
|
||||||
|
Flags []string
|
||||||
|
// BuildTags are Go build tags passed through to `go build`.
|
||||||
|
BuildTags []string
|
||||||
|
// Env are additional environment variables.
|
||||||
|
Env []string
|
||||||
|
// Cache holds build cache configuration for builders that can use it.
|
||||||
|
Cache CacheConfig
|
||||||
// CGO enables CGO for the build (required for Wails, FrankenPHP, etc).
|
// CGO enables CGO for the build (required for Wails, FrankenPHP, etc).
|
||||||
CGO bool
|
CGO bool
|
||||||
|
// Obfuscate uses garble instead of go build for binary obfuscation.
|
||||||
|
Obfuscate bool
|
||||||
|
// NSIS enables Windows NSIS installer generation (Wails projects only).
|
||||||
|
NSIS bool
|
||||||
|
// WebView2 sets the WebView2 delivery method: download|embed|browser|error.
|
||||||
|
WebView2 string
|
||||||
|
|
||||||
// Docker-specific config
|
// Docker-specific config
|
||||||
Dockerfile string // Path to Dockerfile (default: Dockerfile)
|
Dockerfile string // Path to Dockerfile (default: Dockerfile)
|
||||||
|
|
@ -75,13 +107,17 @@ type Config struct {
|
||||||
Tags []string // Additional tags to apply
|
Tags []string // Additional tags to apply
|
||||||
BuildArgs map[string]string // Docker build arguments
|
BuildArgs map[string]string // Docker build arguments
|
||||||
Push bool // Whether to push after build
|
Push bool // Whether to push after build
|
||||||
|
Load bool // Whether to load a single-platform image into the local daemon after build
|
||||||
|
|
||||||
// LinuxKit-specific config
|
// LinuxKit-specific config
|
||||||
LinuxKitConfig string // Path to LinuxKit YAML config
|
LinuxKitConfig string // Path to LinuxKit YAML config, relative to ProjectDir or absolute.
|
||||||
Formats []string // Output formats (iso, qcow2, raw, vmdk)
|
Formats []string // Output formats (iso, qcow2, raw, vmdk)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Builder defines the interface for project-specific build implementations.
|
// Builder defines the interface for project-specific build implementations.
|
||||||
|
//
|
||||||
|
// var b build.Builder = builders.NewGoBuilder()
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, targets)
|
||||||
type Builder interface {
|
type Builder interface {
|
||||||
// Name returns the builder's identifier.
|
// Name returns the builder's identifier.
|
||||||
Name() string
|
Name() string
|
||||||
|
|
|
||||||
|
|
@ -3,13 +3,10 @@ package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
|
|
@ -17,26 +14,35 @@ import (
|
||||||
|
|
||||||
// CPPBuilder implements the Builder interface for C++ projects using CMake + Conan.
|
// CPPBuilder implements the Builder interface for C++ projects using CMake + Conan.
|
||||||
// It wraps the Makefile-based build system from the .core/build submodule.
|
// It wraps the Makefile-based build system from the .core/build submodule.
|
||||||
|
//
|
||||||
|
// b := builders.NewCPPBuilder()
|
||||||
type CPPBuilder struct{}
|
type CPPBuilder struct{}
|
||||||
|
|
||||||
// NewCPPBuilder creates a new CPPBuilder instance.
|
// NewCPPBuilder creates a new CPPBuilder instance.
|
||||||
|
//
|
||||||
|
// b := builders.NewCPPBuilder()
|
||||||
func NewCPPBuilder() *CPPBuilder {
|
func NewCPPBuilder() *CPPBuilder {
|
||||||
return &CPPBuilder{}
|
return &CPPBuilder{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the builder's identifier.
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "cpp"
|
||||||
func (b *CPPBuilder) Name() string {
|
func (b *CPPBuilder) Name() string {
|
||||||
return "cpp"
|
return "cpp"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect checks if this builder can handle the project in the given directory.
|
// Detect checks if this builder can handle the project (checks for CMakeLists.txt).
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
func (b *CPPBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
func (b *CPPBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
return build.IsCPPProject(fs, dir), nil
|
return build.IsCPPProject(fs, dir), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build compiles the C++ project using Make targets.
|
// Build compiles the C++ project using Make targets.
|
||||||
// The build flow is: make configure → make build → make package.
|
// The build flow is: make configure → make build → make package.
|
||||||
// Cross-compilation is handled via Conan profiles specified in .core/build.yaml.
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
func (b *CPPBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
func (b *CPPBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
if cfg == nil {
|
if cfg == nil {
|
||||||
return nil, coreerr.E("CPPBuilder.Build", "config is nil", nil)
|
return nil, coreerr.E("CPPBuilder.Build", "config is nil", nil)
|
||||||
|
|
@ -83,20 +89,20 @@ func (b *CPPBuilder) buildTarget(ctx context.Context, cfg *build.Config, target
|
||||||
|
|
||||||
// buildHost runs the standard make configure → make build → make package flow.
|
// buildHost runs the standard make configure → make build → make package flow.
|
||||||
func (b *CPPBuilder) buildHost(ctx context.Context, cfg *build.Config, target build.Target) ([]build.Artifact, error) {
|
func (b *CPPBuilder) buildHost(ctx context.Context, cfg *build.Config, target build.Target) ([]build.Artifact, error) {
|
||||||
fmt.Printf("Building C++ project for %s/%s (host)\n", target.OS, target.Arch)
|
core.Print(nil, "Building C++ project for %s/%s (host)", target.OS, target.Arch)
|
||||||
|
|
||||||
// Step 1: Configure (runs conan install + cmake configure)
|
// Step 1: Configure (runs conan install + cmake configure)
|
||||||
if err := b.runMake(ctx, cfg.ProjectDir, "configure"); err != nil {
|
if err := b.runMake(ctx, cfg, "configure"); err != nil {
|
||||||
return nil, coreerr.E("CPPBuilder.buildHost", "configure failed", err)
|
return nil, coreerr.E("CPPBuilder.buildHost", "configure failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 2: Build
|
// Step 2: Build
|
||||||
if err := b.runMake(ctx, cfg.ProjectDir, "build"); err != nil {
|
if err := b.runMake(ctx, cfg, "build"); err != nil {
|
||||||
return nil, coreerr.E("CPPBuilder.buildHost", "build failed", err)
|
return nil, coreerr.E("CPPBuilder.buildHost", "build failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 3: Package
|
// Step 3: Package
|
||||||
if err := b.runMake(ctx, cfg.ProjectDir, "package"); err != nil {
|
if err := b.runMake(ctx, cfg, "package"); err != nil {
|
||||||
return nil, coreerr.E("CPPBuilder.buildHost", "package failed", err)
|
return nil, coreerr.E("CPPBuilder.buildHost", "package failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -113,10 +119,10 @@ func (b *CPPBuilder) buildCross(ctx context.Context, cfg *build.Config, target b
|
||||||
return nil, coreerr.E("CPPBuilder.buildCross", "no Conan profile mapped for target "+target.OS+"/"+target.Arch, nil)
|
return nil, coreerr.E("CPPBuilder.buildCross", "no Conan profile mapped for target "+target.OS+"/"+target.Arch, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("Building C++ project for %s/%s (cross: %s)\n", target.OS, target.Arch, profile)
|
core.Print(nil, "Building C++ project for %s/%s (cross: %s)", target.OS, target.Arch, profile)
|
||||||
|
|
||||||
// The Makefile exposes each profile as a top-level target
|
// The Makefile exposes each profile as a top-level target
|
||||||
if err := b.runMake(ctx, cfg.ProjectDir, profile); err != nil {
|
if err := b.runMake(ctx, cfg, profile); err != nil {
|
||||||
return nil, coreerr.E("CPPBuilder.buildCross", "cross-compile for "+profile+" failed", err)
|
return nil, coreerr.E("CPPBuilder.buildCross", "cross-compile for "+profile+" failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -124,14 +130,13 @@ func (b *CPPBuilder) buildCross(ctx context.Context, cfg *build.Config, target b
|
||||||
}
|
}
|
||||||
|
|
||||||
// runMake executes a make target in the project directory.
|
// runMake executes a make target in the project directory.
|
||||||
func (b *CPPBuilder) runMake(ctx context.Context, projectDir string, target string) error {
|
func (b *CPPBuilder) runMake(ctx context.Context, cfg *build.Config, target string) error {
|
||||||
cmd := exec.CommandContext(ctx, "make", target)
|
makeCommand, err := b.resolveMakeCli()
|
||||||
cmd.Dir = projectDir
|
if err != nil {
|
||||||
cmd.Stdout = os.Stdout
|
return err
|
||||||
cmd.Stderr = os.Stderr
|
}
|
||||||
cmd.Env = os.Environ()
|
|
||||||
|
|
||||||
if err := cmd.Run(); err != nil {
|
if err := ax.ExecWithEnv(ctx, cfg.ProjectDir, cfg.Env, makeCommand, target); err != nil {
|
||||||
return coreerr.E("CPPBuilder.runMake", "make "+target+" failed", err)
|
return coreerr.E("CPPBuilder.runMake", "make "+target+" failed", err)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -139,7 +144,7 @@ func (b *CPPBuilder) runMake(ctx context.Context, projectDir string, target stri
|
||||||
|
|
||||||
// findArtifacts searches for built packages in build/packages/.
|
// findArtifacts searches for built packages in build/packages/.
|
||||||
func (b *CPPBuilder) findArtifacts(fs io.Medium, projectDir string, target build.Target) ([]build.Artifact, error) {
|
func (b *CPPBuilder) findArtifacts(fs io.Medium, projectDir string, target build.Target) ([]build.Artifact, error) {
|
||||||
packagesDir := filepath.Join(projectDir, "build", "packages")
|
packagesDir := ax.Join(projectDir, "build", "packages")
|
||||||
|
|
||||||
if !fs.IsDir(packagesDir) {
|
if !fs.IsDir(packagesDir) {
|
||||||
// Fall back to searching build/release/src/ for raw binaries
|
// Fall back to searching build/release/src/ for raw binaries
|
||||||
|
|
@ -159,12 +164,12 @@ func (b *CPPBuilder) findArtifacts(fs io.Medium, projectDir string, target build
|
||||||
|
|
||||||
name := entry.Name()
|
name := entry.Name()
|
||||||
// Skip checksum files and hidden files
|
// Skip checksum files and hidden files
|
||||||
if strings.HasSuffix(name, ".sha256") || strings.HasPrefix(name, ".") {
|
if core.HasSuffix(name, ".sha256") || core.HasPrefix(name, ".") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
artifacts = append(artifacts, build.Artifact{
|
artifacts = append(artifacts, build.Artifact{
|
||||||
Path: filepath.Join(packagesDir, name),
|
Path: ax.Join(packagesDir, name),
|
||||||
OS: target.OS,
|
OS: target.OS,
|
||||||
Arch: target.Arch,
|
Arch: target.Arch,
|
||||||
})
|
})
|
||||||
|
|
@ -175,7 +180,7 @@ func (b *CPPBuilder) findArtifacts(fs io.Medium, projectDir string, target build
|
||||||
|
|
||||||
// findBinaries searches for compiled binaries in build/release/src/.
|
// findBinaries searches for compiled binaries in build/release/src/.
|
||||||
func (b *CPPBuilder) findBinaries(fs io.Medium, projectDir string, target build.Target) ([]build.Artifact, error) {
|
func (b *CPPBuilder) findBinaries(fs io.Medium, projectDir string, target build.Target) ([]build.Artifact, error) {
|
||||||
binDir := filepath.Join(projectDir, "build", "release", "src")
|
binDir := ax.Join(projectDir, "build", "release", "src")
|
||||||
|
|
||||||
if !fs.IsDir(binDir) {
|
if !fs.IsDir(binDir) {
|
||||||
return nil, coreerr.E("CPPBuilder.findBinaries", "no build output found in "+binDir, nil)
|
return nil, coreerr.E("CPPBuilder.findBinaries", "no build output found in "+binDir, nil)
|
||||||
|
|
@ -194,16 +199,16 @@ func (b *CPPBuilder) findBinaries(fs io.Medium, projectDir string, target build.
|
||||||
|
|
||||||
name := entry.Name()
|
name := entry.Name()
|
||||||
// Skip non-executable files (libraries, cmake files, etc.)
|
// Skip non-executable files (libraries, cmake files, etc.)
|
||||||
if strings.HasSuffix(name, ".a") || strings.HasSuffix(name, ".o") ||
|
if core.HasSuffix(name, ".a") || core.HasSuffix(name, ".o") ||
|
||||||
strings.HasSuffix(name, ".cmake") || strings.HasPrefix(name, ".") {
|
core.HasSuffix(name, ".cmake") || core.HasPrefix(name, ".") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
fullPath := filepath.Join(binDir, name)
|
fullPath := ax.Join(binDir, name)
|
||||||
|
|
||||||
// On Unix, check if file is executable
|
// On Unix, check if file is executable
|
||||||
if target.OS != "windows" {
|
if target.OS != "windows" {
|
||||||
info, err := os.Stat(fullPath)
|
info, err := fs.Stat(fullPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
@ -244,10 +249,28 @@ func (b *CPPBuilder) targetToProfile(target build.Target) string {
|
||||||
|
|
||||||
// validateMake checks if make is available.
|
// validateMake checks if make is available.
|
||||||
func (b *CPPBuilder) validateMake() error {
|
func (b *CPPBuilder) validateMake() error {
|
||||||
if _, err := exec.LookPath("make"); err != nil {
|
_, err := b.resolveMakeCli()
|
||||||
return coreerr.E("CPPBuilder.validateMake", "make not found. Install build-essential (Linux) or Xcode Command Line Tools (macOS)", nil)
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveMakeCli returns the executable path for make or gmake.
|
||||||
|
func (b *CPPBuilder) resolveMakeCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/bin/make",
|
||||||
|
"/usr/local/bin/make",
|
||||||
|
"/opt/homebrew/bin/make",
|
||||||
|
"/usr/local/bin/gmake",
|
||||||
|
"/opt/homebrew/bin/gmake",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
|
command, err := ax.ResolveCommand("make", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("CPPBuilder.resolveMakeCli", "make not found. Install build-essential (Linux) or Xcode Command Line Tools (macOS)", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure CPPBuilder implements the Builder interface.
|
// Ensure CPPBuilder implements the Builder interface.
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,27 @@
|
||||||
package builders
|
package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestCPPBuilder_Name_Good(t *testing.T) {
|
func TestCPP_CPPBuilderName_Good(t *testing.T) {
|
||||||
builder := NewCPPBuilder()
|
builder := NewCPPBuilder()
|
||||||
assert.Equal(t, "cpp", builder.Name())
|
assert.Equal(t, "cpp", builder.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCPPBuilder_Detect_Good(t *testing.T) {
|
func TestCPP_CPPBuilderDetect_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
|
|
||||||
t.Run("detects C++ project with CMakeLists.txt", func(t *testing.T) {
|
t.Run("detects C++ project with CMakeLists.txt", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "CMakeLists.txt"), []byte("cmake_minimum_required(VERSION 3.16)"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "CMakeLists.txt"), []byte("cmake_minimum_required(VERSION 3.16)"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewCPPBuilder()
|
builder := NewCPPBuilder()
|
||||||
|
|
@ -32,7 +32,7 @@ func TestCPPBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("returns false for non-C++ project", func(t *testing.T) {
|
t.Run("returns false for non-C++ project", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module test"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewCPPBuilder()
|
builder := NewCPPBuilder()
|
||||||
|
|
@ -51,7 +51,7 @@ func TestCPPBuilder_Detect_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCPPBuilder_Build_Bad(t *testing.T) {
|
func TestCPP_CPPBuilderBuild_Bad(t *testing.T) {
|
||||||
t.Run("returns error for nil config", func(t *testing.T) {
|
t.Run("returns error for nil config", func(t *testing.T) {
|
||||||
builder := NewCPPBuilder()
|
builder := NewCPPBuilder()
|
||||||
artifacts, err := builder.Build(nil, nil, []build.Target{{OS: "linux", Arch: "amd64"}})
|
artifacts, err := builder.Build(nil, nil, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
|
@ -61,7 +61,7 @@ func TestCPPBuilder_Build_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCPPBuilder_TargetToProfile_Good(t *testing.T) {
|
func TestCPP_CPPBuilderTargetToProfile_Good(t *testing.T) {
|
||||||
builder := NewCPPBuilder()
|
builder := NewCPPBuilder()
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
|
|
@ -84,7 +84,7 @@ func TestCPPBuilder_TargetToProfile_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCPPBuilder_TargetToProfile_Bad(t *testing.T) {
|
func TestCPP_CPPBuilderTargetToProfile_Bad(t *testing.T) {
|
||||||
builder := NewCPPBuilder()
|
builder := NewCPPBuilder()
|
||||||
|
|
||||||
t.Run("returns empty for unknown target", func(t *testing.T) {
|
t.Run("returns empty for unknown target", func(t *testing.T) {
|
||||||
|
|
@ -93,18 +93,18 @@ func TestCPPBuilder_TargetToProfile_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCPPBuilder_FindArtifacts_Good(t *testing.T) {
|
func TestCPP_CPPBuilderFindArtifacts_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
|
|
||||||
t.Run("finds packages in build/packages", func(t *testing.T) {
|
t.Run("finds packages in build/packages", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
packagesDir := filepath.Join(dir, "build", "packages")
|
packagesDir := ax.Join(dir, "build", "packages")
|
||||||
require.NoError(t, os.MkdirAll(packagesDir, 0755))
|
require.NoError(t, ax.MkdirAll(packagesDir, 0755))
|
||||||
|
|
||||||
// Create mock package files
|
// Create mock package files
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(packagesDir, "test-1.0-linux-x86_64.tar.xz"), []byte("pkg"), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(packagesDir, "test-1.0-linux-x86_64.tar.xz"), []byte("pkg"), 0644))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(packagesDir, "test-1.0-linux-x86_64.tar.xz.sha256"), []byte("checksum"), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(packagesDir, "test-1.0-linux-x86_64.tar.xz.sha256"), []byte("checksum"), 0644))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(packagesDir, "test-1.0-linux-x86_64.rpm"), []byte("rpm"), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(packagesDir, "test-1.0-linux-x86_64.rpm"), []byte("rpm"), 0644))
|
||||||
|
|
||||||
builder := NewCPPBuilder()
|
builder := NewCPPBuilder()
|
||||||
target := build.Target{OS: "linux", Arch: "amd64"}
|
target := build.Target{OS: "linux", Arch: "amd64"}
|
||||||
|
|
@ -116,21 +116,21 @@ func TestCPPBuilder_FindArtifacts_Good(t *testing.T) {
|
||||||
for _, a := range artifacts {
|
for _, a := range artifacts {
|
||||||
assert.Equal(t, "linux", a.OS)
|
assert.Equal(t, "linux", a.OS)
|
||||||
assert.Equal(t, "amd64", a.Arch)
|
assert.Equal(t, "amd64", a.Arch)
|
||||||
assert.False(t, filepath.Ext(a.Path) == ".sha256")
|
assert.False(t, ax.Ext(a.Path) == ".sha256")
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("falls back to binaries in build/release/src", func(t *testing.T) {
|
t.Run("falls back to binaries in build/release/src", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
binDir := filepath.Join(dir, "build", "release", "src")
|
binDir := ax.Join(dir, "build", "release", "src")
|
||||||
require.NoError(t, os.MkdirAll(binDir, 0755))
|
require.NoError(t, ax.MkdirAll(binDir, 0755))
|
||||||
|
|
||||||
// Create mock binary (executable)
|
// Create mock binary (executable)
|
||||||
binPath := filepath.Join(binDir, "test-daemon")
|
binPath := ax.Join(binDir, "test-daemon")
|
||||||
require.NoError(t, os.WriteFile(binPath, []byte("binary"), 0755))
|
require.NoError(t, ax.WriteFile(binPath, []byte("binary"), 0755))
|
||||||
|
|
||||||
// Create a library (should be skipped)
|
// Create a library (should be skipped)
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(binDir, "libcrypto.a"), []byte("lib"), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(binDir, "libcrypto.a"), []byte("lib"), 0644))
|
||||||
|
|
||||||
builder := NewCPPBuilder()
|
builder := NewCPPBuilder()
|
||||||
target := build.Target{OS: "linux", Arch: "amd64"}
|
target := build.Target{OS: "linux", Arch: "amd64"}
|
||||||
|
|
@ -143,7 +143,28 @@ func TestCPPBuilder_FindArtifacts_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCPPBuilder_Interface_Good(t *testing.T) {
|
func TestCPP_CPPBuilderResolveMakeCli_Good(t *testing.T) {
|
||||||
|
builder := NewCPPBuilder()
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "make")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := builder.resolveMakeCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCPP_CPPBuilderResolveMakeCli_Bad(t *testing.T) {
|
||||||
|
builder := NewCPPBuilder()
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := builder.resolveMakeCli(ax.Join(t.TempDir(), "missing-make"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "make not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCPP_CPPBuilderInterface_Good(t *testing.T) {
|
||||||
var _ build.Builder = (*CPPBuilder)(nil)
|
var _ build.Builder = (*CPPBuilder)(nil)
|
||||||
var _ build.Builder = NewCPPBuilder()
|
var _ build.Builder = NewCPPBuilder()
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,60 +3,70 @@ package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"runtime"
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DockerBuilder builds Docker images.
|
// DockerBuilder builds Docker images.
|
||||||
|
//
|
||||||
|
// b := builders.NewDockerBuilder()
|
||||||
type DockerBuilder struct{}
|
type DockerBuilder struct{}
|
||||||
|
|
||||||
// NewDockerBuilder creates a new Docker builder.
|
// NewDockerBuilder creates a new Docker builder.
|
||||||
|
//
|
||||||
|
// b := builders.NewDockerBuilder()
|
||||||
func NewDockerBuilder() *DockerBuilder {
|
func NewDockerBuilder() *DockerBuilder {
|
||||||
return &DockerBuilder{}
|
return &DockerBuilder{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the builder's identifier.
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "docker"
|
||||||
func (b *DockerBuilder) Name() string {
|
func (b *DockerBuilder) Name() string {
|
||||||
return "docker"
|
return "docker"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect checks if a Dockerfile exists in the directory.
|
// Detect checks if a Dockerfile or Containerfile exists in the directory.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
func (b *DockerBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
func (b *DockerBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
dockerfilePath := filepath.Join(dir, "Dockerfile")
|
if build.ResolveDockerfilePath(fs, dir) != "" {
|
||||||
if fs.IsFile(dockerfilePath) {
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build builds Docker images for the specified targets.
|
// Build builds Docker images for the specified targets.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
func (b *DockerBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
func (b *DockerBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
// Validate docker CLI is available
|
dockerCommand, err := b.resolveDockerCli()
|
||||||
if err := b.validateDockerCli(); err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure buildx is available
|
// Ensure buildx is available
|
||||||
if err := b.ensureBuildx(ctx); err != nil {
|
if err := b.ensureBuildx(ctx, dockerCommand); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine Dockerfile path
|
// Determine Docker manifest path
|
||||||
dockerfile := cfg.Dockerfile
|
dockerfile := cfg.Dockerfile
|
||||||
if dockerfile == "" {
|
if dockerfile == "" {
|
||||||
dockerfile = filepath.Join(cfg.ProjectDir, "Dockerfile")
|
dockerfile = build.ResolveDockerfilePath(cfg.FS, cfg.ProjectDir)
|
||||||
|
} else if !ax.IsAbs(dockerfile) {
|
||||||
|
dockerfile = ax.Join(cfg.ProjectDir, dockerfile)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate Dockerfile exists
|
// Validate Dockerfile exists
|
||||||
if !cfg.FS.IsFile(dockerfile) {
|
if dockerfile == "" || !cfg.FS.IsFile(dockerfile) {
|
||||||
return nil, coreerr.E("DockerBuilder.Build", "Dockerfile not found: "+dockerfile, nil)
|
return nil, coreerr.E("DockerBuilder.Build", "Dockerfile or Containerfile not found", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine image name
|
// Determine image name
|
||||||
|
|
@ -65,18 +75,18 @@ func (b *DockerBuilder) Build(ctx context.Context, cfg *build.Config, targets []
|
||||||
imageName = cfg.Name
|
imageName = cfg.Name
|
||||||
}
|
}
|
||||||
if imageName == "" {
|
if imageName == "" {
|
||||||
imageName = filepath.Base(cfg.ProjectDir)
|
imageName = ax.Base(cfg.ProjectDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build platform string from targets
|
// Build platform string from targets
|
||||||
var platforms []string
|
buildTargets := targets
|
||||||
for _, t := range targets {
|
if len(buildTargets) == 0 {
|
||||||
platforms = append(platforms, fmt.Sprintf("%s/%s", t.OS, t.Arch))
|
buildTargets = []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no targets specified, use current platform
|
var platforms []string
|
||||||
if len(platforms) == 0 {
|
for _, t := range buildTargets {
|
||||||
platforms = []string{"linux/amd64"}
|
platforms = append(platforms, core.Sprintf("%s/%s", t.OS, t.Arch))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine registry
|
// Determine registry
|
||||||
|
|
@ -98,13 +108,13 @@ func (b *DockerBuilder) Build(ctx context.Context, cfg *build.Config, targets []
|
||||||
var imageRefs []string
|
var imageRefs []string
|
||||||
for _, tag := range tags {
|
for _, tag := range tags {
|
||||||
// Expand version template
|
// Expand version template
|
||||||
expandedTag := strings.ReplaceAll(tag, "{{.Version}}", cfg.Version)
|
expandedTag := core.Replace(tag, "{{.Version}}", cfg.Version)
|
||||||
expandedTag = strings.ReplaceAll(expandedTag, "{{Version}}", cfg.Version)
|
expandedTag = core.Replace(expandedTag, "{{Version}}", cfg.Version)
|
||||||
|
|
||||||
if registry != "" {
|
if registry != "" {
|
||||||
imageRefs = append(imageRefs, fmt.Sprintf("%s/%s:%s", registry, imageName, expandedTag))
|
imageRefs = append(imageRefs, core.Sprintf("%s/%s:%s", registry, imageName, expandedTag))
|
||||||
} else {
|
} else {
|
||||||
imageRefs = append(imageRefs, fmt.Sprintf("%s:%s", imageName, expandedTag))
|
imageRefs = append(imageRefs, core.Sprintf("%s:%s", imageName, expandedTag))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -112,7 +122,7 @@ func (b *DockerBuilder) Build(ctx context.Context, cfg *build.Config, targets []
|
||||||
args := []string{"buildx", "build"}
|
args := []string{"buildx", "build"}
|
||||||
|
|
||||||
// Multi-platform support
|
// Multi-platform support
|
||||||
args = append(args, "--platform", strings.Join(platforms, ","))
|
args = append(args, "--platform", core.Join(",", platforms...))
|
||||||
|
|
||||||
// Add all tags
|
// Add all tags
|
||||||
for _, ref := range imageRefs {
|
for _, ref := range imageRefs {
|
||||||
|
|
@ -124,28 +134,30 @@ func (b *DockerBuilder) Build(ctx context.Context, cfg *build.Config, targets []
|
||||||
|
|
||||||
// Build arguments
|
// Build arguments
|
||||||
for k, v := range cfg.BuildArgs {
|
for k, v := range cfg.BuildArgs {
|
||||||
expandedValue := strings.ReplaceAll(v, "{{.Version}}", cfg.Version)
|
expandedValue := core.Replace(v, "{{.Version}}", cfg.Version)
|
||||||
expandedValue = strings.ReplaceAll(expandedValue, "{{Version}}", cfg.Version)
|
expandedValue = core.Replace(expandedValue, "{{Version}}", cfg.Version)
|
||||||
args = append(args, "--build-arg", fmt.Sprintf("%s=%s", k, expandedValue))
|
args = append(args, "--build-arg", core.Sprintf("%s=%s", k, expandedValue))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always add VERSION build arg if version is set
|
// Always add VERSION build arg if version is set
|
||||||
if cfg.Version != "" {
|
if cfg.Version != "" {
|
||||||
args = append(args, "--build-arg", fmt.Sprintf("VERSION=%s", cfg.Version))
|
args = append(args, "--build-arg", core.Sprintf("VERSION=%s", cfg.Version))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Output to local docker images or push
|
safeImageName := strings.ReplaceAll(imageName, "/", "_")
|
||||||
|
|
||||||
|
// Output to local docker images or push.
|
||||||
|
// `--load` only works for a single target, so multi-platform local builds
|
||||||
|
// fall back to an OCI archive on disk.
|
||||||
|
useLoad := cfg.Load && !cfg.Push && len(buildTargets) == 1
|
||||||
if cfg.Push {
|
if cfg.Push {
|
||||||
args = append(args, "--push")
|
args = append(args, "--push")
|
||||||
|
} else if useLoad {
|
||||||
|
args = append(args, "--load")
|
||||||
} else {
|
} else {
|
||||||
// For multi-platform builds without push, we need to load or output somewhere
|
// Local Docker builds emit an OCI archive so the build output is a file.
|
||||||
if len(platforms) == 1 {
|
outputPath := ax.Join(cfg.OutputDir, core.Sprintf("%s.tar", safeImageName))
|
||||||
args = append(args, "--load")
|
args = append(args, "--output", core.Sprintf("type=oci,dest=%s", outputPath))
|
||||||
} else {
|
|
||||||
// Multi-platform builds can't use --load, output to tarball
|
|
||||||
outputPath := filepath.Join(cfg.OutputDir, fmt.Sprintf("%s.tar", imageName))
|
|
||||||
args = append(args, "--output", fmt.Sprintf("type=oci,dest=%s", outputPath))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build context (project directory)
|
// Build context (project directory)
|
||||||
|
|
@ -156,58 +168,58 @@ func (b *DockerBuilder) Build(ctx context.Context, cfg *build.Config, targets []
|
||||||
return nil, coreerr.E("DockerBuilder.Build", "failed to create output directory", err)
|
return nil, coreerr.E("DockerBuilder.Build", "failed to create output directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute build
|
core.Print(nil, "Building Docker image: %s", imageName)
|
||||||
cmd := exec.CommandContext(ctx, "docker", args...)
|
core.Print(nil, " Platforms: %s", core.Join(", ", platforms...))
|
||||||
cmd.Dir = cfg.ProjectDir
|
core.Print(nil, " Tags: %s", core.Join(", ", imageRefs...))
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
|
|
||||||
fmt.Printf("Building Docker image: %s\n", imageName)
|
// Build once for the full platform set. Docker buildx produces a single
|
||||||
fmt.Printf(" Platforms: %s\n", strings.Join(platforms, ", "))
|
// multi-arch image or OCI archive from the combined platform list.
|
||||||
fmt.Printf(" Tags: %s\n", strings.Join(imageRefs, ", "))
|
if err := ax.ExecWithEnv(ctx, cfg.ProjectDir, cfg.Env, dockerCommand, args...); err != nil {
|
||||||
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return nil, coreerr.E("DockerBuilder.Build", "buildx build failed", err)
|
return nil, coreerr.E("DockerBuilder.Build", "buildx build failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create artifacts for each platform
|
artifactPath := imageRefs[0]
|
||||||
var artifacts []build.Artifact
|
if !cfg.Push && !useLoad {
|
||||||
for _, t := range targets {
|
artifactPath = ax.Join(cfg.OutputDir, core.Sprintf("%s.tar", safeImageName))
|
||||||
artifacts = append(artifacts, build.Artifact{
|
|
||||||
Path: imageRefs[0], // Primary image reference
|
|
||||||
OS: t.OS,
|
|
||||||
Arch: t.Arch,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return artifacts, nil
|
primaryTarget := buildTargets[0]
|
||||||
|
return []build.Artifact{{
|
||||||
|
Path: artifactPath,
|
||||||
|
OS: primaryTarget.OS,
|
||||||
|
Arch: primaryTarget.Arch,
|
||||||
|
}}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// validateDockerCli checks if the docker CLI is available.
|
// resolveDockerCli returns the executable path for the docker CLI.
|
||||||
func (b *DockerBuilder) validateDockerCli() error {
|
func (b *DockerBuilder) resolveDockerCli(paths ...string) (string, error) {
|
||||||
cmd := exec.Command("docker", "--version")
|
if len(paths) == 0 {
|
||||||
if err := cmd.Run(); err != nil {
|
paths = []string{
|
||||||
return coreerr.E("DockerBuilder.validateDockerCli", "docker CLI not found. Install it from https://docs.docker.com/get-docker/", err)
|
"/usr/local/bin/docker",
|
||||||
|
"/opt/homebrew/bin/docker",
|
||||||
|
"/Applications/Docker.app/Contents/Resources/bin/docker",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
|
command, err := ax.ResolveCommand("docker", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("DockerBuilder.resolveDockerCli", "docker CLI not found. Install it from https://docs.docker.com/get-docker/", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ensureBuildx ensures docker buildx is available and has a builder.
|
// ensureBuildx ensures docker buildx is available and has a builder.
|
||||||
func (b *DockerBuilder) ensureBuildx(ctx context.Context) error {
|
func (b *DockerBuilder) ensureBuildx(ctx context.Context, dockerCommand string) error {
|
||||||
// Check if buildx is available
|
// Check if buildx is available
|
||||||
cmd := exec.CommandContext(ctx, "docker", "buildx", "version")
|
if err := ax.Exec(ctx, dockerCommand, "buildx", "version"); err != nil {
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("DockerBuilder.ensureBuildx", "buildx is not available. Install it from https://docs.docker.com/buildx/working-with-buildx/", err)
|
return coreerr.E("DockerBuilder.ensureBuildx", "buildx is not available. Install it from https://docs.docker.com/buildx/working-with-buildx/", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if we have a builder, create one if not
|
// Check if we have a builder, create one if not
|
||||||
cmd = exec.CommandContext(ctx, "docker", "buildx", "inspect", "--bootstrap")
|
if err := ax.Exec(ctx, dockerCommand, "buildx", "inspect", "--bootstrap"); err != nil {
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
// Try to create a builder
|
// Try to create a builder
|
||||||
cmd = exec.CommandContext(ctx, "docker", "buildx", "create", "--use", "--bootstrap")
|
if err := ax.Exec(ctx, dockerCommand, "buildx", "create", "--use", "--bootstrap"); err != nil {
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("DockerBuilder.ensureBuildx", "failed to create buildx builder", err)
|
return coreerr.E("DockerBuilder.ensureBuildx", "failed to create buildx builder", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,73 @@
|
||||||
package builders
|
package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"runtime"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
coreio "dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestDockerBuilder_Name_Good(t *testing.T) {
|
func setupFakeDockerToolchain(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${DOCKER_BUILD_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$*" >> "$log_file"
|
||||||
|
env | sort >> "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${1:-}" = "buildx" ] && [ "${2:-}" = "build" ]; then
|
||||||
|
dest=""
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
if [ "$1" = "--output" ]; then
|
||||||
|
shift
|
||||||
|
dest="$(printf '%s' "$1" | sed -n 's#type=oci,dest=##p')"
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
if [ -n "$dest" ]; then
|
||||||
|
mkdir -p "$(dirname "$dest")"
|
||||||
|
printf 'oci archive\n' > "$dest"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
`
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(binDir, "docker"), []byte(script), 0o755))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocker_DockerBuilderName_Good(t *testing.T) {
|
||||||
builder := NewDockerBuilder()
|
builder := NewDockerBuilder()
|
||||||
assert.Equal(t, "docker", builder.Name())
|
assert.Equal(t, "docker", builder.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerBuilder_Detect_Good(t *testing.T) {
|
func TestDocker_DockerBuilderDetect_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := coreio.Local
|
||||||
|
|
||||||
t.Run("detects Dockerfile", func(t *testing.T) {
|
t.Run("detects Dockerfile", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "Dockerfile"), []byte("FROM alpine\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "Dockerfile"), []byte("FROM alpine\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
builder := NewDockerBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Containerfile", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "Containerfile"), []byte("FROM alpine\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewDockerBuilder()
|
builder := NewDockerBuilder()
|
||||||
|
|
@ -42,7 +88,7 @@ func TestDockerBuilder_Detect_Good(t *testing.T) {
|
||||||
t.Run("returns false for non-Docker project", func(t *testing.T) {
|
t.Run("returns false for non-Docker project", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
// Create a Go project instead
|
// Create a Go project instead
|
||||||
err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module test"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewDockerBuilder()
|
builder := NewDockerBuilder()
|
||||||
|
|
@ -53,7 +99,7 @@ func TestDockerBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("does not match docker-compose.yml", func(t *testing.T) {
|
t.Run("does not match docker-compose.yml", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "docker-compose.yml"), []byte("version: '3'\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "docker-compose.yml"), []byte("version: '3'\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewDockerBuilder()
|
builder := NewDockerBuilder()
|
||||||
|
|
@ -64,9 +110,9 @@ func TestDockerBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("does not match Dockerfile in subdirectory", func(t *testing.T) {
|
t.Run("does not match Dockerfile in subdirectory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
subDir := filepath.Join(dir, "subdir")
|
subDir := ax.Join(dir, "subdir")
|
||||||
require.NoError(t, os.MkdirAll(subDir, 0755))
|
require.NoError(t, ax.MkdirAll(subDir, 0755))
|
||||||
err := os.WriteFile(filepath.Join(subDir, "Dockerfile"), []byte("FROM alpine\n"), 0644)
|
err := ax.WriteFile(ax.Join(subDir, "Dockerfile"), []byte("FROM alpine\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewDockerBuilder()
|
builder := NewDockerBuilder()
|
||||||
|
|
@ -76,8 +122,205 @@ func TestDockerBuilder_Detect_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerBuilder_Interface_Good(t *testing.T) {
|
func TestDocker_DockerBuilderInterface_Good(t *testing.T) {
|
||||||
// Verify DockerBuilder implements Builder interface
|
// Verify DockerBuilder implements Builder interface
|
||||||
var _ build.Builder = (*DockerBuilder)(nil)
|
var _ build.Builder = (*DockerBuilder)(nil)
|
||||||
var _ build.Builder = NewDockerBuilder()
|
var _ build.Builder = NewDockerBuilder()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDocker_DockerBuilderResolveDockerCli_Good(t *testing.T) {
|
||||||
|
builder := NewDockerBuilder()
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "docker")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := builder.resolveDockerCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocker_DockerBuilderResolveDockerCli_Bad(t *testing.T) {
|
||||||
|
builder := NewDockerBuilder()
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := builder.resolveDockerCli(ax.Join(t.TempDir(), "missing-docker"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "docker CLI not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocker_DockerBuilderBuild_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeDockerToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "Containerfile"), []byte("FROM alpine:latest\n"), 0o644))
|
||||||
|
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "docker.log")
|
||||||
|
t.Setenv("DOCKER_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewDockerBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: coreio.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "sample-app",
|
||||||
|
Image: "owner/repo",
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: "linux", Arch: "amd64"},
|
||||||
|
{OS: "linux", Arch: "arm64"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
expectedPath := ax.Join(outputDir, "owner_repo.tar")
|
||||||
|
assert.Equal(t, expectedPath, artifacts[0].Path)
|
||||||
|
assert.Equal(t, "linux", artifacts[0].OS)
|
||||||
|
assert.Equal(t, "amd64", artifacts[0].Arch)
|
||||||
|
assert.FileExists(t, expectedPath)
|
||||||
|
|
||||||
|
logContent, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
log := string(logContent)
|
||||||
|
assert.Equal(t, 1, strings.Count(log, "buildx build"))
|
||||||
|
assert.Contains(t, log, "--platform")
|
||||||
|
assert.Contains(t, log, "linux/amd64,linux/arm64")
|
||||||
|
assert.Contains(t, log, "--output")
|
||||||
|
assert.Contains(t, log, "type=oci,dest="+expectedPath)
|
||||||
|
|
||||||
|
assert.Contains(t, log, "FOO=bar")
|
||||||
|
|
||||||
|
artifacts, err = builder.Build(context.Background(), cfg, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, runtime.GOOS, artifacts[0].OS)
|
||||||
|
assert.Equal(t, runtime.GOARCH, artifacts[0].Arch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocker_DockerBuilderBuild_ResolvesRelativeDockerfile_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeDockerToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
dockerfilePath := ax.Join(projectDir, "dockerfiles", "Dockerfile.app")
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Dir(dockerfilePath), 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0o644))
|
||||||
|
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "docker.log")
|
||||||
|
t.Setenv("DOCKER_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewDockerBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: coreio.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Dockerfile: "dockerfiles/Dockerfile.app",
|
||||||
|
Image: "owner/repo",
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, ax.Join(outputDir, "owner_repo.tar"))
|
||||||
|
|
||||||
|
logContent, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
log := string(logContent)
|
||||||
|
|
||||||
|
assert.Contains(t, log, "-f")
|
||||||
|
assert.Contains(t, log, dockerfilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocker_DockerBuilderBuild_Containerfile_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeDockerToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "Containerfile"), []byte("FROM alpine:latest\n"), 0o644))
|
||||||
|
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
builder := NewDockerBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: coreio.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Image: "owner/repo",
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, ax.Join(outputDir, "owner_repo.tar"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocker_DockerBuilderBuild_Load_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeDockerToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "Dockerfile"), []byte("FROM alpine:latest\n"), 0o644))
|
||||||
|
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "docker.log")
|
||||||
|
t.Setenv("DOCKER_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewDockerBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: coreio.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Image: "owner/repo",
|
||||||
|
Load: true,
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: "linux", Arch: "amd64"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
assert.Equal(t, "ghcr.io/owner/repo:latest", artifacts[0].Path)
|
||||||
|
assert.Equal(t, "linux", artifacts[0].OS)
|
||||||
|
assert.Equal(t, "amd64", artifacts[0].Arch)
|
||||||
|
assert.DirExists(t, outputDir)
|
||||||
|
|
||||||
|
logContent, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
log := string(logContent)
|
||||||
|
assert.Contains(t, log, "buildx build")
|
||||||
|
assert.Contains(t, log, "--load")
|
||||||
|
assert.NotContains(t, log, "--output")
|
||||||
|
}
|
||||||
|
|
|
||||||
236
pkg/build/builders/docs.go
Normal file
236
pkg/build/builders/docs.go
Normal file
|
|
@ -0,0 +1,236 @@
|
||||||
|
// Package builders provides build implementations for different project types.
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"context"
|
||||||
|
stdio "io"
|
||||||
|
"runtime"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DocsBuilder builds MkDocs projects.
|
||||||
|
//
|
||||||
|
// b := builders.NewDocsBuilder()
|
||||||
|
type DocsBuilder struct{}
|
||||||
|
|
||||||
|
// NewDocsBuilder creates a new DocsBuilder instance.
|
||||||
|
//
|
||||||
|
// b := builders.NewDocsBuilder()
|
||||||
|
func NewDocsBuilder() *DocsBuilder {
|
||||||
|
return &DocsBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "docs"
|
||||||
|
func (b *DocsBuilder) Name() string {
|
||||||
|
return "docs"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect checks if this builder can handle the project in the given directory.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
|
func (b *DocsBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
|
return build.IsMkDocsProject(fs, dir), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build runs mkdocs build and packages the generated site into a zip archive.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
func (b *DocsBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
|
if cfg == nil {
|
||||||
|
return nil, coreerr.E("DocsBuilder.Build", "config is nil", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(targets) == 0 {
|
||||||
|
targets = []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
}
|
||||||
|
|
||||||
|
outputDir := cfg.OutputDir
|
||||||
|
if outputDir == "" {
|
||||||
|
outputDir = ax.Join(cfg.ProjectDir, "dist")
|
||||||
|
}
|
||||||
|
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
||||||
|
return nil, coreerr.E("DocsBuilder.Build", "failed to create output directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
configPath := b.resolveMkDocsConfigPath(cfg.FS, cfg.ProjectDir)
|
||||||
|
if configPath == "" {
|
||||||
|
return nil, coreerr.E("DocsBuilder.Build", "mkdocs.yml or mkdocs.yaml not found", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
mkdocsCommand, err := b.resolveMkDocsCli()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var artifacts []build.Artifact
|
||||||
|
for _, target := range targets {
|
||||||
|
platformDir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
|
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
||||||
|
return artifacts, coreerr.E("DocsBuilder.Build", "failed to create platform directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
siteDir := ax.Join(platformDir, "site")
|
||||||
|
if err := cfg.FS.EnsureDir(siteDir); err != nil {
|
||||||
|
return artifacts, coreerr.E("DocsBuilder.Build", "failed to create site directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
env := appendConfiguredEnv(cfg.Env,
|
||||||
|
core.Sprintf("GOOS=%s", target.OS),
|
||||||
|
core.Sprintf("GOARCH=%s", target.Arch),
|
||||||
|
core.Sprintf("TARGET_OS=%s", target.OS),
|
||||||
|
core.Sprintf("TARGET_ARCH=%s", target.Arch),
|
||||||
|
core.Sprintf("OUTPUT_DIR=%s", outputDir),
|
||||||
|
core.Sprintf("TARGET_DIR=%s", platformDir),
|
||||||
|
)
|
||||||
|
if cfg.Name != "" {
|
||||||
|
env = append(env, core.Sprintf("NAME=%s", cfg.Name))
|
||||||
|
}
|
||||||
|
if cfg.Version != "" {
|
||||||
|
env = append(env, core.Sprintf("VERSION=%s", cfg.Version))
|
||||||
|
}
|
||||||
|
|
||||||
|
args := []string{"build", "--clean", "--site-dir", siteDir, "--config-file", configPath}
|
||||||
|
output, err := ax.CombinedOutput(ctx, cfg.ProjectDir, env, mkdocsCommand, args...)
|
||||||
|
if err != nil {
|
||||||
|
return artifacts, coreerr.E("DocsBuilder.Build", "mkdocs build failed: "+output, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
bundlePath := ax.Join(platformDir, b.bundleName(cfg)+".zip")
|
||||||
|
if err := b.bundleSite(cfg.FS, siteDir, bundlePath); err != nil {
|
||||||
|
return artifacts, err
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts = append(artifacts, build.Artifact{
|
||||||
|
Path: bundlePath,
|
||||||
|
OS: target.OS,
|
||||||
|
Arch: target.Arch,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return artifacts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveMkDocsConfigPath returns the MkDocs config file path if present.
|
||||||
|
func (b *DocsBuilder) resolveMkDocsConfigPath(fs io.Medium, projectDir string) string {
|
||||||
|
return build.ResolveMkDocsConfigPath(fs, projectDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveMkDocsCli returns the executable path for the mkdocs CLI.
|
||||||
|
func (b *DocsBuilder) resolveMkDocsCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/mkdocs",
|
||||||
|
"/opt/homebrew/bin/mkdocs",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("mkdocs", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("DocsBuilder.resolveMkDocsCli", "mkdocs CLI not found. Install it with: pip install mkdocs", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// bundleName returns the bundle filename stem.
|
||||||
|
func (b *DocsBuilder) bundleName(cfg *build.Config) string {
|
||||||
|
if cfg.Name != "" {
|
||||||
|
return cfg.Name
|
||||||
|
}
|
||||||
|
if cfg.ProjectDir != "" {
|
||||||
|
return ax.Base(cfg.ProjectDir)
|
||||||
|
}
|
||||||
|
return "docs-site"
|
||||||
|
}
|
||||||
|
|
||||||
|
// bundleSite creates a zip bundle containing the generated MkDocs site.
|
||||||
|
func (b *DocsBuilder) bundleSite(fs io.Medium, siteDir, bundlePath string) error {
|
||||||
|
if err := fs.EnsureDir(ax.Dir(bundlePath)); err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.bundleSite", "failed to create bundle directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
file, err := fs.Create(bundlePath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.bundleSite", "failed to create bundle file", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
writer := zip.NewWriter(file)
|
||||||
|
defer func() { _ = writer.Close() }()
|
||||||
|
|
||||||
|
return b.writeZipTree(fs, writer, siteDir, siteDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeZipTree walks a directory and writes files into the zip bundle.
|
||||||
|
func (b *DocsBuilder) writeZipTree(fs io.Medium, writer *zip.Writer, rootDir, currentDir string) error {
|
||||||
|
entries, err := fs.List(currentDir)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.writeZipTree", "failed to list directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(entries, func(i, j int) bool {
|
||||||
|
return entries[i].Name() < entries[j].Name()
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
entryPath := ax.Join(currentDir, entry.Name())
|
||||||
|
|
||||||
|
if entry.IsDir() {
|
||||||
|
if err := b.writeZipTree(fs, writer, rootDir, entryPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
relPath, err := ax.Rel(rootDir, entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.writeZipTree", "failed to relativise bundle path", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
info, err := fs.Stat(entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.writeZipTree", "failed to stat bundle entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
header, err := zip.FileInfoHeader(info)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.writeZipTree", "failed to create zip header", err)
|
||||||
|
}
|
||||||
|
header.Name = strings.ReplaceAll(relPath, ax.DS(), "/")
|
||||||
|
header.Method = zip.Deflate
|
||||||
|
header.SetModTime(deterministicZipTime)
|
||||||
|
|
||||||
|
zipEntry, err := writer.CreateHeader(header)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.writeZipTree", "failed to create zip entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
source, err := fs.Open(entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.writeZipTree", "failed to open bundle entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := stdio.Copy(zipEntry, source); err != nil {
|
||||||
|
_ = source.Close()
|
||||||
|
return coreerr.E("DocsBuilder.writeZipTree", "failed to write bundle entry", err)
|
||||||
|
}
|
||||||
|
if err := source.Close(); err != nil {
|
||||||
|
return coreerr.E("DocsBuilder.writeZipTree", "failed to close bundle entry", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure DocsBuilder implements the Builder interface.
|
||||||
|
var _ build.Builder = (*DocsBuilder)(nil)
|
||||||
172
pkg/build/builders/docs_test.go
Normal file
172
pkg/build/builders/docs_test.go
Normal file
|
|
@ -0,0 +1,172 @@
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"context"
|
||||||
|
stdio "io"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDocs_DocsBuilderName_Good(t *testing.T) {
|
||||||
|
builder := NewDocsBuilder()
|
||||||
|
assert.Equal(t, "docs", builder.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocs_DocsBuilderDetect_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("detects mkdocs.yml", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "mkdocs.yml"), []byte("site_name: Demo\n"), 0o644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
builder := NewDocsBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects mkdocs.yaml", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "mkdocs.yaml"), []byte("site_name: Demo\n"), 0o644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
builder := NewDocsBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns false without mkdocs.yml", func(t *testing.T) {
|
||||||
|
builder := NewDocsBuilder()
|
||||||
|
detected, err := builder.Detect(fs, t.TempDir())
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.False(t, detected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocs_DocsBuilderBuild_Good(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("mkdocs test fixture uses a shell script")
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "mkdocs.yaml"), []byte("site_name: Demo\n"), 0o644))
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
mkdocsPath := ax.Join(binDir, "mkdocs")
|
||||||
|
script := "#!/bin/sh\nset -eu\nif [ -n \"${DOCS_BUILD_LOG_FILE:-}\" ]; then\n env | sort > \"${DOCS_BUILD_LOG_FILE}\"\nfi\nsite_dir=\"\"\nwhile [ $# -gt 0 ]; do\n if [ \"$1\" = \"--site-dir\" ]; then\n shift\n site_dir=\"$1\"\n fi\n shift\ndone\nmkdir -p \"$site_dir\"\nprintf '%s' 'demo docs' > \"$site_dir/index.html\"\n"
|
||||||
|
require.NoError(t, ax.WriteFile(mkdocsPath, []byte(script), 0o755))
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
logPath := ax.Join(t.TempDir(), "docs.env")
|
||||||
|
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: dir,
|
||||||
|
OutputDir: ax.Join(dir, "dist"),
|
||||||
|
Name: "demo-site",
|
||||||
|
Env: []string{"FOO=bar", "DOCS_BUILD_LOG_FILE=" + logPath},
|
||||||
|
}
|
||||||
|
|
||||||
|
builder := NewDocsBuilder()
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
artifact := artifacts[0]
|
||||||
|
assert.Equal(t, "linux", artifact.OS)
|
||||||
|
assert.Equal(t, "amd64", artifact.Arch)
|
||||||
|
assert.FileExists(t, artifact.Path)
|
||||||
|
|
||||||
|
reader, err := zip.OpenReader(artifact.Path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer func() { _ = reader.Close() }()
|
||||||
|
|
||||||
|
require.Len(t, reader.File, 1)
|
||||||
|
assert.Equal(t, "index.html", reader.File[0].Name)
|
||||||
|
|
||||||
|
file, err := reader.File[0].Open()
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
data, err := stdio.ReadAll(file)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "demo docs", string(data))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, string(content), "FOO=bar")
|
||||||
|
assert.Contains(t, string(content), "GOOS=linux")
|
||||||
|
assert.Contains(t, string(content), "GOARCH=amd64")
|
||||||
|
assert.Contains(t, string(content), "TARGET_OS=linux")
|
||||||
|
assert.Contains(t, string(content), "TARGET_ARCH=amd64")
|
||||||
|
assert.Contains(t, string(content), "OUTPUT_DIR="+ax.Join(dir, "dist"))
|
||||||
|
assert.Contains(t, string(content), "TARGET_DIR="+ax.Join(dir, "dist", "linux_amd64"))
|
||||||
|
assert.Contains(t, string(content), "NAME=demo-site")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocs_DocsBuilderBuild_Good_NestedConfig(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("mkdocs test fixture uses a shell script")
|
||||||
|
}
|
||||||
|
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Join(dir, "docs"), 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "docs", "mkdocs.yaml"), []byte("site_name: Demo\n"), 0o644))
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
mkdocsPath := ax.Join(binDir, "mkdocs")
|
||||||
|
script := "#!/bin/sh\nset -eu\nif [ -n \"${DOCS_BUILD_LOG_FILE:-}\" ]; then\n env | sort >> \"${DOCS_BUILD_LOG_FILE}\"\n printf '%s\\n' \"$@\" >> \"${DOCS_BUILD_LOG_FILE}\"\nfi\nsite_dir=\"\"\nwhile [ $# -gt 0 ]; do\n if [ \"$1\" = \"--site-dir\" ]; then\n shift\n site_dir=\"$1\"\n fi\n shift\ndone\nmkdir -p \"$site_dir\"\nprintf '%s' 'demo docs' > \"$site_dir/index.html\"\n"
|
||||||
|
require.NoError(t, ax.WriteFile(mkdocsPath, []byte(script), 0o755))
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
logPath := ax.Join(t.TempDir(), "docs.args")
|
||||||
|
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: dir,
|
||||||
|
OutputDir: ax.Join(dir, "dist"),
|
||||||
|
Name: "demo-site",
|
||||||
|
Env: []string{"DOCS_BUILD_LOG_FILE=" + logPath},
|
||||||
|
}
|
||||||
|
|
||||||
|
builder := NewDocsBuilder()
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, string(content), "--config-file")
|
||||||
|
assert.Contains(t, string(content), "docs/mkdocs.yaml")
|
||||||
|
assert.Contains(t, string(content), "TARGET_DIR="+ax.Join(dir, "dist", "linux_amd64"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocs_DocsBuilderBuild_Bad(t *testing.T) {
|
||||||
|
builder := NewDocsBuilder()
|
||||||
|
|
||||||
|
t.Run("returns error when config is nil", func(t *testing.T) {
|
||||||
|
artifacts, err := builder.Build(context.Background(), nil, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Nil(t, artifacts)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns error when mkdocs.yml is missing", func(t *testing.T) {
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: t.TempDir(),
|
||||||
|
OutputDir: t.TempDir(),
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Nil(t, artifacts)
|
||||||
|
})
|
||||||
|
}
|
||||||
13
pkg/build/builders/env.go
Normal file
13
pkg/build/builders/env.go
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
package builders
|
||||||
|
|
||||||
|
// appendConfiguredEnv returns a fresh environment slice with the configured
|
||||||
|
// build environment prepended to any builder-specific values.
|
||||||
|
func appendConfiguredEnv(base []string, extra ...string) []string {
|
||||||
|
if len(base) == 0 && len(extra) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
env := append([]string{}, base...)
|
||||||
|
env = append(env, extra...)
|
||||||
|
return env
|
||||||
|
}
|
||||||
|
|
@ -3,57 +3,73 @@ package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"runtime"
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GoBuilder implements the Builder interface for Go projects.
|
// GoBuilder implements the Builder interface for Go projects.
|
||||||
|
//
|
||||||
|
// b := builders.NewGoBuilder()
|
||||||
type GoBuilder struct{}
|
type GoBuilder struct{}
|
||||||
|
|
||||||
// NewGoBuilder creates a new GoBuilder instance.
|
// NewGoBuilder creates a new GoBuilder instance.
|
||||||
|
//
|
||||||
|
// b := builders.NewGoBuilder()
|
||||||
func NewGoBuilder() *GoBuilder {
|
func NewGoBuilder() *GoBuilder {
|
||||||
return &GoBuilder{}
|
return &GoBuilder{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the builder's identifier.
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "go"
|
||||||
func (b *GoBuilder) Name() string {
|
func (b *GoBuilder) Name() string {
|
||||||
return "go"
|
return "go"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect checks if this builder can handle the project in the given directory.
|
// Detect checks if this builder can handle the project in the given directory.
|
||||||
// Uses IsGoProject from the build package which checks for go.mod or wails.json.
|
// Uses IsGoProject from the build package which checks for go.mod, go.work, or wails.json.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
func (b *GoBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
func (b *GoBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
return build.IsGoProject(fs, dir), nil
|
return build.IsGoProject(fs, dir), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build compiles the Go project for the specified targets.
|
// Build compiles the Go project for the specified targets.
|
||||||
// It sets GOOS, GOARCH, and CGO_ENABLED environment variables,
|
// If targets is empty, it falls back to the current host platform.
|
||||||
// applies ldflags and trimpath, and runs go build.
|
// It sets GOOS, GOARCH, and CGO_ENABLED, applies config-defined build flags
|
||||||
|
// and ldflags, and uses garble when obfuscation is enabled.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
func (b *GoBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
func (b *GoBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
if cfg == nil {
|
if cfg == nil {
|
||||||
return nil, coreerr.E("GoBuilder.Build", "config is nil", nil)
|
return nil, coreerr.E("GoBuilder.Build", "config is nil", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(targets) == 0 {
|
if len(targets) == 0 {
|
||||||
return nil, coreerr.E("GoBuilder.Build", "no targets specified", nil)
|
targets = []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
}
|
||||||
|
|
||||||
|
outputDir := cfg.OutputDir
|
||||||
|
if outputDir == "" {
|
||||||
|
outputDir = ax.Join(cfg.ProjectDir, "dist")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure output directory exists
|
// Ensure output directory exists
|
||||||
if err := cfg.FS.EnsureDir(cfg.OutputDir); err != nil {
|
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
||||||
return nil, coreerr.E("GoBuilder.Build", "failed to create output directory", err)
|
return nil, coreerr.E("GoBuilder.Build", "failed to create output directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var artifacts []build.Artifact
|
var artifacts []build.Artifact
|
||||||
|
|
||||||
for _, target := range targets {
|
for _, target := range targets {
|
||||||
artifact, err := b.buildTarget(ctx, cfg, target)
|
artifact, err := b.buildTarget(ctx, cfg, outputDir, target)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return artifacts, coreerr.E("GoBuilder.Build", "failed to build "+target.String(), err)
|
return artifacts, coreerr.E("GoBuilder.Build", "failed to build "+target.String(), err)
|
||||||
}
|
}
|
||||||
|
|
@ -64,63 +80,100 @@ func (b *GoBuilder) Build(ctx context.Context, cfg *build.Config, targets []buil
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildTarget compiles for a single target platform.
|
// buildTarget compiles for a single target platform.
|
||||||
func (b *GoBuilder) buildTarget(ctx context.Context, cfg *build.Config, target build.Target) (build.Artifact, error) {
|
func (b *GoBuilder) buildTarget(ctx context.Context, cfg *build.Config, outputDir string, target build.Target) (build.Artifact, error) {
|
||||||
// Determine output binary name
|
// Determine output binary name
|
||||||
binaryName := cfg.Name
|
binaryName := cfg.Name
|
||||||
if binaryName == "" {
|
if binaryName == "" {
|
||||||
binaryName = filepath.Base(cfg.ProjectDir)
|
binaryName = cfg.Project.Binary
|
||||||
|
}
|
||||||
|
if binaryName == "" {
|
||||||
|
binaryName = cfg.Project.Name
|
||||||
|
}
|
||||||
|
if binaryName == "" {
|
||||||
|
binaryName = ax.Base(cfg.ProjectDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add .exe extension for Windows
|
// Add .exe extension for Windows
|
||||||
if target.OS == "windows" && !strings.HasSuffix(binaryName, ".exe") {
|
if target.OS == "windows" && !core.HasSuffix(binaryName, ".exe") {
|
||||||
binaryName += ".exe"
|
binaryName += ".exe"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create platform-specific output path: output/os_arch/binary
|
// Create platform-specific output path: output/os_arch/binary
|
||||||
platformDir := filepath.Join(cfg.OutputDir, fmt.Sprintf("%s_%s", target.OS, target.Arch))
|
platformDir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
||||||
return build.Artifact{}, coreerr.E("GoBuilder.buildTarget", "failed to create platform directory", err)
|
return build.Artifact{}, coreerr.E("GoBuilder.buildTarget", "failed to create platform directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
outputPath := filepath.Join(platformDir, binaryName)
|
outputPath := ax.Join(platformDir, binaryName)
|
||||||
|
|
||||||
// Build the go build arguments
|
// Build the go/garble arguments.
|
||||||
args := []string{"build"}
|
args := []string{"build"}
|
||||||
|
if !containsString(cfg.Flags, "-trimpath") {
|
||||||
|
args = append(args, "-trimpath")
|
||||||
|
}
|
||||||
|
if len(cfg.Flags) > 0 {
|
||||||
|
args = append(args, cfg.Flags...)
|
||||||
|
}
|
||||||
|
|
||||||
// Add trimpath flag
|
if len(cfg.BuildTags) > 0 {
|
||||||
args = append(args, "-trimpath")
|
args = append(args, "-tags", core.Join(",", cfg.BuildTags...))
|
||||||
|
}
|
||||||
|
|
||||||
// Add ldflags if specified
|
// Add ldflags if specified, and inject the build version when needed.
|
||||||
if len(cfg.LDFlags) > 0 {
|
ldflags := append([]string{}, cfg.LDFlags...)
|
||||||
ldflags := strings.Join(cfg.LDFlags, " ")
|
if cfg.Version != "" && !hasVersionLDFlag(ldflags) {
|
||||||
args = append(args, "-ldflags", ldflags)
|
ldflags = append(ldflags, core.Sprintf("-X main.version=%s", cfg.Version))
|
||||||
|
}
|
||||||
|
if len(ldflags) > 0 {
|
||||||
|
args = append(args, "-ldflags", core.Join(" ", ldflags...))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add output path
|
// Add output path
|
||||||
args = append(args, "-o", outputPath)
|
args = append(args, "-o", outputPath)
|
||||||
|
|
||||||
// Add the project directory as the build target (current directory)
|
// Build the configured main package path, defaulting to the project root.
|
||||||
args = append(args, ".")
|
mainPackage := cfg.Project.Main
|
||||||
|
if mainPackage == "" {
|
||||||
|
mainPackage = "."
|
||||||
|
}
|
||||||
|
args = append(args, mainPackage)
|
||||||
|
|
||||||
// Create the command
|
// Set up environment.
|
||||||
cmd := exec.CommandContext(ctx, "go", args...)
|
env := append([]string{}, cfg.Env...)
|
||||||
cmd.Dir = cfg.ProjectDir
|
env = append(env, build.CacheEnvironment(&cfg.Cache)...)
|
||||||
|
env = append(env,
|
||||||
// Set up environment
|
core.Sprintf("TARGET_OS=%s", target.OS),
|
||||||
env := os.Environ()
|
core.Sprintf("TARGET_ARCH=%s", target.Arch),
|
||||||
env = append(env, fmt.Sprintf("GOOS=%s", target.OS))
|
core.Sprintf("OUTPUT_DIR=%s", outputDir),
|
||||||
env = append(env, fmt.Sprintf("GOARCH=%s", target.Arch))
|
core.Sprintf("TARGET_DIR=%s", platformDir),
|
||||||
|
core.Sprintf("GOOS=%s", target.OS),
|
||||||
|
core.Sprintf("GOARCH=%s", target.Arch),
|
||||||
|
)
|
||||||
|
if binaryName != "" {
|
||||||
|
env = append(env, core.Sprintf("NAME=%s", binaryName))
|
||||||
|
}
|
||||||
|
if cfg.Version != "" {
|
||||||
|
env = append(env, core.Sprintf("VERSION=%s", cfg.Version))
|
||||||
|
}
|
||||||
if cfg.CGO {
|
if cfg.CGO {
|
||||||
env = append(env, "CGO_ENABLED=1")
|
env = append(env, "CGO_ENABLED=1")
|
||||||
} else {
|
} else {
|
||||||
env = append(env, "CGO_ENABLED=0")
|
env = append(env, "CGO_ENABLED=0")
|
||||||
}
|
}
|
||||||
cmd.Env = env
|
|
||||||
|
command := "go"
|
||||||
|
var err error
|
||||||
|
if cfg.Obfuscate {
|
||||||
|
command, err = b.resolveGarbleCli()
|
||||||
|
if err != nil {
|
||||||
|
return build.Artifact{}, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Capture output for error messages
|
// Capture output for error messages
|
||||||
output, err := cmd.CombinedOutput()
|
output, err := ax.CombinedOutput(ctx, cfg.ProjectDir, env, command, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return build.Artifact{}, coreerr.E("GoBuilder.buildTarget", "go build failed: "+string(output), err)
|
return build.Artifact{}, coreerr.E("GoBuilder.buildTarget", command+" build failed: "+output, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return build.Artifact{
|
return build.Artifact{
|
||||||
|
|
@ -130,5 +183,71 @@ func (b *GoBuilder) buildTarget(ctx context.Context, cfg *build.Config, target b
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// resolveGarbleCli returns the executable path for the garble CLI.
|
||||||
|
//
|
||||||
|
// command, err := b.resolveGarbleCli()
|
||||||
|
func (b *GoBuilder) resolveGarbleCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/garble",
|
||||||
|
"/opt/homebrew/bin/garble",
|
||||||
|
}
|
||||||
|
|
||||||
|
paths = append(paths, garbleInstallPaths()...)
|
||||||
|
|
||||||
|
if home := core.Env("HOME"); home != "" {
|
||||||
|
paths = append(paths, ax.Join(home, "go", "bin", "garble"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("garble", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("GoBuilder.resolveGarbleCli", "garble CLI not found. Install it with: go install mvdan.cc/garble@latest", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// garbleInstallPaths returns the standard Go install locations for garble.
|
||||||
|
func garbleInstallPaths() []string {
|
||||||
|
var paths []string
|
||||||
|
|
||||||
|
if gobin := core.Env("GOBIN"); gobin != "" {
|
||||||
|
paths = append(paths, ax.Join(gobin, "garble"))
|
||||||
|
}
|
||||||
|
|
||||||
|
if gopath := core.Env("GOPATH"); gopath != "" {
|
||||||
|
for _, root := range strings.Split(gopath, string(os.PathListSeparator)) {
|
||||||
|
root = strings.TrimSpace(root)
|
||||||
|
if root == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
paths = append(paths, ax.Join(root, "bin", "garble"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return paths
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasVersionLDFlag reports whether a version linker flag is already present.
|
||||||
|
func hasVersionLDFlag(ldflags []string) bool {
|
||||||
|
for _, flag := range ldflags {
|
||||||
|
if strings.Contains(flag, "main.version=") || strings.Contains(flag, "main.Version=") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// containsString reports whether a slice contains the given string.
|
||||||
|
func containsString(values []string, needle string) bool {
|
||||||
|
for _, value := range values {
|
||||||
|
if value == needle {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// Ensure GoBuilder implements the Builder interface.
|
// Ensure GoBuilder implements the Builder interface.
|
||||||
var _ build.Builder = (*GoBuilder)(nil)
|
var _ build.Builder = (*GoBuilder)(nil)
|
||||||
|
|
|
||||||
|
|
@ -3,10 +3,12 @@ package builders
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
@ -23,7 +25,7 @@ func setupGoTestProject(t *testing.T) string {
|
||||||
|
|
||||||
go 1.21
|
go 1.21
|
||||||
`
|
`
|
||||||
err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0644)
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte(goMod), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Create a minimal main.go
|
// Create a minimal main.go
|
||||||
|
|
@ -33,22 +35,144 @@ func main() {
|
||||||
println("hello")
|
println("hello")
|
||||||
}
|
}
|
||||||
`
|
`
|
||||||
err = os.WriteFile(filepath.Join(dir, "main.go"), []byte(mainGo), 0644)
|
err = ax.WriteFile(ax.Join(dir, "main.go"), []byte(mainGo), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return dir
|
return dir
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGoBuilder_Name_Good(t *testing.T) {
|
func setupFakeBuildToolchain(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
goScript := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${GO_BUILD_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$@" > "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
env_log_file="${GO_BUILD_ENV_LOG_FILE:-}"
|
||||||
|
if [ -n "$env_log_file" ]; then
|
||||||
|
env | sort > "$env_log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${GOARCH:-}" = "invalid_arch" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f main.go ] && grep -q "not valid go code" main.go; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
output=""
|
||||||
|
previous=""
|
||||||
|
for argument in "$@"; do
|
||||||
|
if [ "$previous" = "-o" ]; then
|
||||||
|
output="$argument"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
previous="$argument"
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -n "$output" ]; then
|
||||||
|
mkdir -p "$(dirname "$output")"
|
||||||
|
printf 'fake binary\n' > "$output"
|
||||||
|
chmod +x "$output"
|
||||||
|
fi
|
||||||
|
`
|
||||||
|
|
||||||
|
err := ax.WriteFile(ax.Join(binDir, "go"), []byte(goScript), 0o755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
garbleScript := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${GARBLE_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$@" > "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec go "$@"
|
||||||
|
`
|
||||||
|
|
||||||
|
err = ax.WriteFile(ax.Join(binDir, "garble"), []byte(garbleScript), 0o755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupFakeGoBinary(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
goScript := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${GO_BUILD_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$@" > "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
env_log_file="${GO_BUILD_ENV_LOG_FILE:-}"
|
||||||
|
if [ -n "$env_log_file" ]; then
|
||||||
|
env | sort > "$env_log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${GOARCH:-}" = "invalid_arch" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f main.go ] && grep -q "not valid go code" main.go; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
output=""
|
||||||
|
previous=""
|
||||||
|
for argument in "$@"; do
|
||||||
|
if [ "$previous" = "-o" ]; then
|
||||||
|
output="$argument"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
previous="$argument"
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -n "$output" ]; then
|
||||||
|
mkdir -p "$(dirname "$output")"
|
||||||
|
printf 'fake binary\n' > "$output"
|
||||||
|
chmod +x "$output"
|
||||||
|
fi
|
||||||
|
`
|
||||||
|
|
||||||
|
err := ax.WriteFile(ax.Join(binDir, "go"), []byte(goScript), 0o755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupFakeGarbleBinary(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
garbleScript := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${GARBLE_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$@" > "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec go "$@"
|
||||||
|
`
|
||||||
|
|
||||||
|
err := ax.WriteFile(ax.Join(binDir, "garble"), []byte(garbleScript), 0o755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGo_GoBuilderName_Good(t *testing.T) {
|
||||||
builder := NewGoBuilder()
|
builder := NewGoBuilder()
|
||||||
assert.Equal(t, "go", builder.Name())
|
assert.Equal(t, "go", builder.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGoBuilder_Detect_Good(t *testing.T) {
|
func TestGo_GoBuilderDetect_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("detects Go project with go.mod", func(t *testing.T) {
|
t.Run("detects Go project with go.mod", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module test"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewGoBuilder()
|
builder := NewGoBuilder()
|
||||||
|
|
@ -59,7 +183,7 @@ func TestGoBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects Wails project", func(t *testing.T) {
|
t.Run("detects Wails project", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte("{}"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "wails.json"), []byte("{}"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewGoBuilder()
|
builder := NewGoBuilder()
|
||||||
|
|
@ -71,7 +195,7 @@ func TestGoBuilder_Detect_Good(t *testing.T) {
|
||||||
t.Run("returns false for non-Go project", func(t *testing.T) {
|
t.Run("returns false for non-Go project", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
// Create a Node.js project instead
|
// Create a Node.js project instead
|
||||||
err := os.WriteFile(filepath.Join(dir, "package.json"), []byte("{}"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "package.json"), []byte("{}"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewGoBuilder()
|
builder := NewGoBuilder()
|
||||||
|
|
@ -90,11 +214,15 @@ func TestGoBuilder_Detect_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGoBuilder_Build_Good(t *testing.T) {
|
func TestGo_GoBuilderBuild_Good(t *testing.T) {
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping integration test in short mode")
|
t.Skip("skipping integration test in short mode")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeBuildToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
t.Run("builds for current platform", func(t *testing.T) {
|
t.Run("builds for current platform", func(t *testing.T) {
|
||||||
projectDir := setupGoTestProject(t)
|
projectDir := setupGoTestProject(t)
|
||||||
outputDir := t.TempDir()
|
outputDir := t.TempDir()
|
||||||
|
|
@ -130,6 +258,43 @@ func TestGoBuilder_Build_Good(t *testing.T) {
|
||||||
assert.Contains(t, artifact.Path, expectedName)
|
assert.Contains(t, artifact.Path, expectedName)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("defaults to current platform when targets are empty", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "fallback",
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, runtime.GOOS, artifacts[0].OS)
|
||||||
|
assert.Equal(t, runtime.GOARCH, artifacts[0].Arch)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("does not mutate the caller output directory when using defaults", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
Name: "mutability",
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Empty(t, cfg.OutputDir)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "dist"), ax.Dir(ax.Dir(artifacts[0].Path)))
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("builds multiple targets", func(t *testing.T) {
|
t.Run("builds multiple targets", func(t *testing.T) {
|
||||||
projectDir := setupGoTestProject(t)
|
projectDir := setupGoTestProject(t)
|
||||||
outputDir := t.TempDir()
|
outputDir := t.TempDir()
|
||||||
|
|
@ -178,7 +343,7 @@ func TestGoBuilder_Build_Good(t *testing.T) {
|
||||||
require.Len(t, artifacts, 1)
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
// Verify .exe extension
|
// Verify .exe extension
|
||||||
assert.True(t, filepath.Ext(artifacts[0].Path) == ".exe")
|
assert.True(t, ax.Ext(artifacts[0].Path) == ".exe")
|
||||||
assert.FileExists(t, artifacts[0].Path)
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -202,13 +367,65 @@ func TestGoBuilder_Build_Good(t *testing.T) {
|
||||||
require.Len(t, artifacts, 1)
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
// Binary should use the project directory base name
|
// Binary should use the project directory base name
|
||||||
baseName := filepath.Base(projectDir)
|
baseName := ax.Base(projectDir)
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
baseName += ".exe"
|
baseName += ".exe"
|
||||||
}
|
}
|
||||||
assert.Contains(t, artifacts[0].Path, baseName)
|
assert.Contains(t, artifacts[0].Path, baseName)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("uses configured project binary when Name not specified", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
}
|
||||||
|
cfg.Project.Binary = "example-binary"
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
expectedName := "example-binary"
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
expectedName += ".exe"
|
||||||
|
}
|
||||||
|
assert.Contains(t, artifacts[0].Path, expectedName)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("uses configured project name when Binary not specified", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
}
|
||||||
|
cfg.Project.Name = "example-name"
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
expectedName := "example-name"
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
expectedName += ".exe"
|
||||||
|
}
|
||||||
|
assert.Contains(t, artifacts[0].Path, expectedName)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("applies ldflags", func(t *testing.T) {
|
t.Run("applies ldflags", func(t *testing.T) {
|
||||||
projectDir := setupGoTestProject(t)
|
projectDir := setupGoTestProject(t)
|
||||||
outputDir := t.TempDir()
|
outputDir := t.TempDir()
|
||||||
|
|
@ -231,9 +448,289 @@ func TestGoBuilder_Build_Good(t *testing.T) {
|
||||||
assert.FileExists(t, artifacts[0].Path)
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("applies config flags and env", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
argsLogPath := ax.Join(logDir, "go-args.log")
|
||||||
|
envLogPath := ax.Join(logDir, "go-env.log")
|
||||||
|
|
||||||
|
t.Setenv("GO_BUILD_LOG_FILE", argsLogPath)
|
||||||
|
t.Setenv("GO_BUILD_ENV_LOG_FILE", envLogPath)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "envflags",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
Flags: []string{"-race"},
|
||||||
|
Env: []string{"FOO=bar", "BAR=baz"},
|
||||||
|
}
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
|
||||||
|
argsContent, err := ax.ReadFile(argsLogPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
args := strings.Split(strings.TrimSpace(string(argsContent)), "\n")
|
||||||
|
require.NotEmpty(t, args)
|
||||||
|
assert.Equal(t, "build", args[0])
|
||||||
|
assert.Contains(t, args, "-trimpath")
|
||||||
|
assert.Contains(t, args, "-race")
|
||||||
|
|
||||||
|
envContent, err := ax.ReadFile(envLogPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
envLines := strings.Split(strings.TrimSpace(string(envContent)), "\n")
|
||||||
|
assert.Contains(t, envLines, "BAR=baz")
|
||||||
|
assert.Contains(t, envLines, "FOO=bar")
|
||||||
|
assert.Contains(t, envLines, "TARGET_OS="+runtime.GOOS)
|
||||||
|
assert.Contains(t, envLines, "TARGET_ARCH="+runtime.GOARCH)
|
||||||
|
assert.Contains(t, envLines, "OUTPUT_DIR="+outputDir)
|
||||||
|
assert.Contains(t, envLines, "TARGET_DIR="+ax.Join(outputDir, runtime.GOOS+"_"+runtime.GOARCH))
|
||||||
|
assert.Contains(t, envLines, "GOOS="+runtime.GOOS)
|
||||||
|
assert.Contains(t, envLines, "GOARCH="+runtime.GOARCH)
|
||||||
|
assert.Contains(t, envLines, "NAME=envflags")
|
||||||
|
assert.Contains(t, envLines, "VERSION=v1.2.3")
|
||||||
|
assert.Contains(t, envLines, "CGO_ENABLED=0")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("applies configured cache paths to go cache env vars", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
envLogPath := ax.Join(logDir, "go-cache-env.log")
|
||||||
|
|
||||||
|
t.Setenv("GO_BUILD_ENV_LOG_FILE", envLogPath)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "cachetest",
|
||||||
|
Cache: build.CacheConfig{
|
||||||
|
Enabled: true,
|
||||||
|
Paths: []string{
|
||||||
|
ax.Join(outputDir, "cache", "go-build"),
|
||||||
|
ax.Join(outputDir, "cache", "go-mod"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
targets := []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
|
||||||
|
envContent, err := ax.ReadFile(envLogPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
envLines := strings.Split(strings.TrimSpace(string(envContent)), "\n")
|
||||||
|
assert.Contains(t, envLines, "GOCACHE="+ax.Join(outputDir, "cache", "go-build"))
|
||||||
|
assert.Contains(t, envLines, "GOMODCACHE="+ax.Join(outputDir, "cache", "go-mod"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("passes build tags through to go build", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logPath := ax.Join(t.TempDir(), "go-tags.log")
|
||||||
|
t.Setenv("GO_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "tagged",
|
||||||
|
BuildTags: []string{"webkit2_41", "integration"},
|
||||||
|
}
|
||||||
|
targets := []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
args := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.NotEmpty(t, args)
|
||||||
|
assert.Equal(t, "build", args[0])
|
||||||
|
assert.Contains(t, args, "-tags")
|
||||||
|
assert.Contains(t, args, "webkit2_41,integration")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("injects version into ldflags and environment", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
argsLogPath := ax.Join(t.TempDir(), "go-version-args.log")
|
||||||
|
envLogPath := ax.Join(t.TempDir(), "go-version-env.log")
|
||||||
|
|
||||||
|
t.Setenv("GO_BUILD_LOG_FILE", argsLogPath)
|
||||||
|
t.Setenv("GO_BUILD_ENV_LOG_FILE", envLogPath)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "versioned",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
}
|
||||||
|
targets := []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
|
||||||
|
argsContent, err := ax.ReadFile(argsLogPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
args := strings.Split(strings.TrimSpace(string(argsContent)), "\n")
|
||||||
|
require.NotEmpty(t, args)
|
||||||
|
assert.Contains(t, args, "-ldflags")
|
||||||
|
assert.Contains(t, args, "-X main.version=v1.2.3")
|
||||||
|
|
||||||
|
envContent, err := ax.ReadFile(envLogPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
envLines := strings.Split(strings.TrimSpace(string(envContent)), "\n")
|
||||||
|
assert.Contains(t, envLines, "VERSION=v1.2.3")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("uses garble when obfuscation is enabled", func(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("garble test helper uses a shell script")
|
||||||
|
}
|
||||||
|
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "garble.log")
|
||||||
|
|
||||||
|
t.Setenv("GARBLE_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "obfuscated",
|
||||||
|
Obfuscate: true,
|
||||||
|
}
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
args := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.NotEmpty(t, args)
|
||||||
|
assert.Equal(t, "build", args[0])
|
||||||
|
assert.Contains(t, args, "-trimpath")
|
||||||
|
assert.Contains(t, args, "-o")
|
||||||
|
assert.Contains(t, args, ".")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("finds garble in GOBIN when it is not on PATH", func(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("garble test helper uses a shell script")
|
||||||
|
}
|
||||||
|
|
||||||
|
goDir := t.TempDir()
|
||||||
|
setupFakeGoBinary(t, goDir)
|
||||||
|
t.Setenv("PATH", goDir+string(os.PathListSeparator)+"/usr/bin"+string(os.PathListSeparator)+"/bin")
|
||||||
|
|
||||||
|
garbleDir := t.TempDir()
|
||||||
|
setupFakeGarbleBinary(t, garbleDir)
|
||||||
|
t.Setenv("GOBIN", garbleDir)
|
||||||
|
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "garble-gobin.log")
|
||||||
|
|
||||||
|
t.Setenv("GARBLE_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "obfuscated-gobin",
|
||||||
|
Obfuscate: true,
|
||||||
|
}
|
||||||
|
targets := []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
args := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.NotEmpty(t, args)
|
||||||
|
assert.Equal(t, "build", args[0])
|
||||||
|
assert.Contains(t, args, "-trimpath")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("builds the configured main package path", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
err := ax.MkdirAll(ax.Join(projectDir, "cmd", "myapp"), 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(projectDir, "cmd", "myapp", "main.go"), []byte("package main\n\nfunc main() {}\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logPath := ax.Join(t.TempDir(), "go-build-args.log")
|
||||||
|
t.Setenv("GO_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "mainpackage",
|
||||||
|
}
|
||||||
|
cfg.Project.Main = "./cmd/myapp"
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
args := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.NotEmpty(t, args)
|
||||||
|
assert.Contains(t, args, "./cmd/myapp")
|
||||||
|
assert.NotContains(t, args, ".")
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("creates output directory if missing", func(t *testing.T) {
|
t.Run("creates output directory if missing", func(t *testing.T) {
|
||||||
projectDir := setupGoTestProject(t)
|
projectDir := setupGoTestProject(t)
|
||||||
outputDir := filepath.Join(t.TempDir(), "nested", "output")
|
outputDir := ax.Join(t.TempDir(), "nested", "output")
|
||||||
|
|
||||||
builder := NewGoBuilder()
|
builder := NewGoBuilder()
|
||||||
cfg := &build.Config{
|
cfg := &build.Config{
|
||||||
|
|
@ -252,9 +749,36 @@ func TestGoBuilder_Build_Good(t *testing.T) {
|
||||||
assert.FileExists(t, artifacts[0].Path)
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
assert.DirExists(t, outputDir)
|
assert.DirExists(t, outputDir)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("defaults output directory to project dist when not specified", func(t *testing.T) {
|
||||||
|
projectDir := setupGoTestProject(t)
|
||||||
|
|
||||||
|
builder := NewGoBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
Name: "defaultoutput",
|
||||||
|
}
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
expectedDir := ax.Join(projectDir, "dist")
|
||||||
|
assert.DirExists(t, expectedDir)
|
||||||
|
assert.Contains(t, artifacts[0].Path, expectedDir)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGoBuilder_Build_Bad(t *testing.T) {
|
func TestGo_GoBuilderBuild_Bad(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeBuildToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
t.Run("returns error for nil config", func(t *testing.T) {
|
t.Run("returns error for nil config", func(t *testing.T) {
|
||||||
builder := NewGoBuilder()
|
builder := NewGoBuilder()
|
||||||
|
|
||||||
|
|
@ -264,7 +788,7 @@ func TestGoBuilder_Build_Bad(t *testing.T) {
|
||||||
assert.Contains(t, err.Error(), "config is nil")
|
assert.Contains(t, err.Error(), "config is nil")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("returns error for empty targets", func(t *testing.T) {
|
t.Run("defaults to current platform when targets are empty", func(t *testing.T) {
|
||||||
projectDir := setupGoTestProject(t)
|
projectDir := setupGoTestProject(t)
|
||||||
|
|
||||||
builder := NewGoBuilder()
|
builder := NewGoBuilder()
|
||||||
|
|
@ -276,9 +800,11 @@ func TestGoBuilder_Build_Bad(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{})
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{})
|
||||||
assert.Error(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Nil(t, artifacts)
|
require.Len(t, artifacts, 1)
|
||||||
assert.Contains(t, err.Error(), "no targets specified")
|
assert.Equal(t, runtime.GOOS, artifacts[0].OS)
|
||||||
|
assert.Equal(t, runtime.GOARCH, artifacts[0].Arch)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("returns error for invalid project directory", func(t *testing.T) {
|
t.Run("returns error for invalid project directory", func(t *testing.T) {
|
||||||
|
|
@ -310,11 +836,11 @@ func TestGoBuilder_Build_Bad(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
// Create go.mod
|
// Create go.mod
|
||||||
err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test\n\ngo 1.21"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module test\n\ngo 1.21"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Create invalid Go code
|
// Create invalid Go code
|
||||||
err = os.WriteFile(filepath.Join(dir, "main.go"), []byte("this is not valid go code"), 0644)
|
err = ax.WriteFile(ax.Join(dir, "main.go"), []byte("this is not valid go code"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewGoBuilder()
|
builder := NewGoBuilder()
|
||||||
|
|
@ -391,7 +917,7 @@ func TestGoBuilder_Build_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGoBuilder_Interface_Good(t *testing.T) {
|
func TestGo_GoBuilderInterface_Good(t *testing.T) {
|
||||||
// Verify GoBuilder implements Builder interface
|
// Verify GoBuilder implements Builder interface
|
||||||
var _ build.Builder = (*GoBuilder)(nil)
|
var _ build.Builder = (*GoBuilder)(nil)
|
||||||
var _ build.Builder = NewGoBuilder()
|
var _ build.Builder = NewGoBuilder()
|
||||||
|
|
|
||||||
|
|
@ -3,43 +3,52 @@ package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// LinuxKitBuilder builds LinuxKit images.
|
// LinuxKitBuilder builds LinuxKit images.
|
||||||
|
//
|
||||||
|
// b := builders.NewLinuxKitBuilder()
|
||||||
type LinuxKitBuilder struct{}
|
type LinuxKitBuilder struct{}
|
||||||
|
|
||||||
// NewLinuxKitBuilder creates a new LinuxKit builder.
|
// NewLinuxKitBuilder creates a new LinuxKit builder.
|
||||||
|
//
|
||||||
|
// b := builders.NewLinuxKitBuilder()
|
||||||
func NewLinuxKitBuilder() *LinuxKitBuilder {
|
func NewLinuxKitBuilder() *LinuxKitBuilder {
|
||||||
return &LinuxKitBuilder{}
|
return &LinuxKitBuilder{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the builder's identifier.
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "linuxkit"
|
||||||
func (b *LinuxKitBuilder) Name() string {
|
func (b *LinuxKitBuilder) Name() string {
|
||||||
return "linuxkit"
|
return "linuxkit"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect checks if a linuxkit.yml or .yml config exists in the directory.
|
// Detect checks if a linuxkit.yml, linuxkit.yaml, or nested YAML config exists in the directory.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
func (b *LinuxKitBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
func (b *LinuxKitBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
// Check for linuxkit.yml
|
// Check for linuxkit.yml
|
||||||
if fs.IsFile(filepath.Join(dir, "linuxkit.yml")) {
|
if fs.IsFile(ax.Join(dir, "linuxkit.yml")) || fs.IsFile(ax.Join(dir, "linuxkit.yaml")) {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
// Check for .core/linuxkit/
|
// Check for .core/linuxkit/
|
||||||
lkDir := filepath.Join(dir, ".core", "linuxkit")
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
if fs.IsDir(lkDir) {
|
if fs.IsDir(lkDir) {
|
||||||
entries, err := fs.List(lkDir)
|
entries, err := fs.List(lkDir)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
if !entry.IsDir() && strings.HasSuffix(entry.Name(), ".yml") {
|
if entry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
name := entry.Name()
|
||||||
|
if core.HasSuffix(name, ".yml") || core.HasSuffix(name, ".yaml") {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -49,9 +58,11 @@ func (b *LinuxKitBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build builds LinuxKit images for the specified targets.
|
// Build builds LinuxKit images for the specified targets.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
func (b *LinuxKitBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
func (b *LinuxKitBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
// Validate linuxkit CLI is available
|
linuxkitCommand, err := b.resolveLinuxKitCli()
|
||||||
if err := b.validateLinuxKitCli(); err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -59,23 +70,31 @@ func (b *LinuxKitBuilder) Build(ctx context.Context, cfg *build.Config, targets
|
||||||
configPath := cfg.LinuxKitConfig
|
configPath := cfg.LinuxKitConfig
|
||||||
if configPath == "" {
|
if configPath == "" {
|
||||||
// Auto-detect
|
// Auto-detect
|
||||||
if cfg.FS.IsFile(filepath.Join(cfg.ProjectDir, "linuxkit.yml")) {
|
if cfg.FS.IsFile(ax.Join(cfg.ProjectDir, "linuxkit.yml")) {
|
||||||
configPath = filepath.Join(cfg.ProjectDir, "linuxkit.yml")
|
configPath = ax.Join(cfg.ProjectDir, "linuxkit.yml")
|
||||||
|
} else if cfg.FS.IsFile(ax.Join(cfg.ProjectDir, "linuxkit.yaml")) {
|
||||||
|
configPath = ax.Join(cfg.ProjectDir, "linuxkit.yaml")
|
||||||
} else {
|
} else {
|
||||||
// Look in .core/linuxkit/
|
// Look in .core/linuxkit/
|
||||||
lkDir := filepath.Join(cfg.ProjectDir, ".core", "linuxkit")
|
lkDir := ax.Join(cfg.ProjectDir, ".core", "linuxkit")
|
||||||
if cfg.FS.IsDir(lkDir) {
|
if cfg.FS.IsDir(lkDir) {
|
||||||
entries, err := cfg.FS.List(lkDir)
|
entries, err := cfg.FS.List(lkDir)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
if !entry.IsDir() && strings.HasSuffix(entry.Name(), ".yml") {
|
if entry.IsDir() {
|
||||||
configPath = filepath.Join(lkDir, entry.Name())
|
continue
|
||||||
|
}
|
||||||
|
name := entry.Name()
|
||||||
|
if core.HasSuffix(name, ".yml") || core.HasSuffix(name, ".yaml") {
|
||||||
|
configPath = ax.Join(lkDir, entry.Name())
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if !ax.IsAbs(configPath) {
|
||||||
|
configPath = ax.Join(cfg.ProjectDir, configPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
if configPath == "" {
|
if configPath == "" {
|
||||||
|
|
@ -96,7 +115,7 @@ func (b *LinuxKitBuilder) Build(ctx context.Context, cfg *build.Config, targets
|
||||||
// Create output directory
|
// Create output directory
|
||||||
outputDir := cfg.OutputDir
|
outputDir := cfg.OutputDir
|
||||||
if outputDir == "" {
|
if outputDir == "" {
|
||||||
outputDir = filepath.Join(cfg.ProjectDir, "dist")
|
outputDir = ax.Join(cfg.ProjectDir, "dist")
|
||||||
}
|
}
|
||||||
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
||||||
return nil, coreerr.E("LinuxKitBuilder.Build", "failed to create output directory", err)
|
return nil, coreerr.E("LinuxKitBuilder.Build", "failed to create output directory", err)
|
||||||
|
|
@ -105,7 +124,8 @@ func (b *LinuxKitBuilder) Build(ctx context.Context, cfg *build.Config, targets
|
||||||
// Determine base name from config file or project name
|
// Determine base name from config file or project name
|
||||||
baseName := cfg.Name
|
baseName := cfg.Name
|
||||||
if baseName == "" {
|
if baseName == "" {
|
||||||
baseName = strings.TrimSuffix(filepath.Base(configPath), ".yml")
|
baseName = core.TrimSuffix(ax.Base(configPath), ".yml")
|
||||||
|
baseName = core.TrimSuffix(baseName, ".yaml")
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no targets, default to linux/amd64
|
// If no targets, default to linux/amd64
|
||||||
|
|
@ -119,23 +139,17 @@ func (b *LinuxKitBuilder) Build(ctx context.Context, cfg *build.Config, targets
|
||||||
for _, target := range targets {
|
for _, target := range targets {
|
||||||
// LinuxKit only supports Linux
|
// LinuxKit only supports Linux
|
||||||
if target.OS != "linux" {
|
if target.OS != "linux" {
|
||||||
fmt.Printf("Skipping %s/%s (LinuxKit only supports Linux)\n", target.OS, target.Arch)
|
core.Print(nil, "Skipping %s/%s (LinuxKit only supports Linux)", target.OS, target.Arch)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, format := range formats {
|
for _, format := range formats {
|
||||||
outputName := fmt.Sprintf("%s-%s", baseName, target.Arch)
|
outputName := core.Sprintf("%s-%s", baseName, target.Arch)
|
||||||
|
|
||||||
args := b.buildLinuxKitArgs(configPath, format, outputName, outputDir, target.Arch)
|
args := b.buildLinuxKitArgs(configPath, format, outputName, outputDir, target.Arch)
|
||||||
|
|
||||||
cmd := exec.CommandContext(ctx, "linuxkit", args...)
|
core.Print(nil, "Building LinuxKit image: %s (%s, %s)", outputName, format, target.Arch)
|
||||||
cmd.Dir = cfg.ProjectDir
|
if err := ax.ExecWithEnv(ctx, cfg.ProjectDir, cfg.Env, linuxkitCommand, args...); err != nil {
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
|
|
||||||
fmt.Printf("Building LinuxKit image: %s (%s, %s)\n", outputName, format, target.Arch)
|
|
||||||
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return nil, coreerr.E("LinuxKitBuilder.Build", "build failed for "+target.Arch+"/"+format, err)
|
return nil, coreerr.E("LinuxKitBuilder.Build", "build failed for "+target.Arch+"/"+format, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -189,7 +203,7 @@ func (b *LinuxKitBuilder) buildLinuxKitArgs(configPath, format, outputName, outp
|
||||||
// getArtifactPath returns the expected path of the built artifact.
|
// getArtifactPath returns the expected path of the built artifact.
|
||||||
func (b *LinuxKitBuilder) getArtifactPath(outputDir, outputName, format string) string {
|
func (b *LinuxKitBuilder) getArtifactPath(outputDir, outputName, format string) string {
|
||||||
ext := b.getFormatExtension(format)
|
ext := b.getFormatExtension(format)
|
||||||
return filepath.Join(outputDir, outputName+ext)
|
return ax.Join(outputDir, outputName+ext)
|
||||||
}
|
}
|
||||||
|
|
||||||
// findArtifact searches for the built artifact with various naming conventions.
|
// findArtifact searches for the built artifact with various naming conventions.
|
||||||
|
|
@ -202,7 +216,7 @@ func (b *LinuxKitBuilder) findArtifact(fs io.Medium, outputDir, outputName, form
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, ext := range extensions {
|
for _, ext := range extensions {
|
||||||
path := filepath.Join(outputDir, outputName+ext)
|
path := ax.Join(outputDir, outputName+ext)
|
||||||
if fs.Exists(path) {
|
if fs.Exists(path) {
|
||||||
return path
|
return path
|
||||||
}
|
}
|
||||||
|
|
@ -212,11 +226,10 @@ func (b *LinuxKitBuilder) findArtifact(fs io.Medium, outputDir, outputName, form
|
||||||
entries, err := fs.List(outputDir)
|
entries, err := fs.List(outputDir)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
if strings.HasPrefix(entry.Name(), outputName) {
|
if core.HasPrefix(entry.Name(), outputName) {
|
||||||
match := filepath.Join(outputDir, entry.Name())
|
match := ax.Join(outputDir, entry.Name())
|
||||||
// Return first match that looks like an image
|
// Return first match that looks like an image
|
||||||
ext := filepath.Ext(match)
|
if isLinuxKitArtifact(match) {
|
||||||
if ext == ".iso" || ext == ".qcow2" || ext == ".raw" || ext == ".vmdk" || ext == ".vhd" {
|
|
||||||
return match
|
return match
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -243,29 +256,56 @@ func (b *LinuxKitBuilder) getFormatExtension(format string) string {
|
||||||
return ".img.tar.gz"
|
return ".img.tar.gz"
|
||||||
case "aws":
|
case "aws":
|
||||||
return ".raw"
|
return ".raw"
|
||||||
|
case "docker":
|
||||||
|
return ".docker.tar"
|
||||||
|
case "tar":
|
||||||
|
return ".tar"
|
||||||
|
case "kernel+initrd":
|
||||||
|
return "-initrd.img"
|
||||||
default:
|
default:
|
||||||
return "." + strings.TrimSuffix(format, "-bios")
|
return "." + core.TrimSuffix(format, "-bios")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// validateLinuxKitCli checks if the linuxkit CLI is available.
|
// isLinuxKitArtifact reports whether a file path looks like a LinuxKit build output.
|
||||||
func (b *LinuxKitBuilder) validateLinuxKitCli() error {
|
func isLinuxKitArtifact(path string) bool {
|
||||||
// Check PATH first
|
switch {
|
||||||
if _, err := exec.LookPath("linuxkit"); err == nil {
|
case core.HasSuffix(path, ".img.tar.gz"):
|
||||||
return nil
|
return true
|
||||||
|
case core.HasSuffix(path, ".docker.tar"):
|
||||||
|
return true
|
||||||
|
case core.HasSuffix(path, "-initrd.img"):
|
||||||
|
return true
|
||||||
|
case core.HasSuffix(path, ".tar"):
|
||||||
|
return true
|
||||||
|
case core.HasSuffix(path, ".iso"):
|
||||||
|
return true
|
||||||
|
case core.HasSuffix(path, ".qcow2"):
|
||||||
|
return true
|
||||||
|
case core.HasSuffix(path, ".raw"):
|
||||||
|
return true
|
||||||
|
case core.HasSuffix(path, ".vmdk"):
|
||||||
|
return true
|
||||||
|
case core.HasSuffix(path, ".vhd"):
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Check common locations
|
// resolveLinuxKitCli returns the executable path for the linuxkit CLI.
|
||||||
paths := []string{
|
func (b *LinuxKitBuilder) resolveLinuxKitCli(paths ...string) (string, error) {
|
||||||
"/usr/local/bin/linuxkit",
|
if len(paths) == 0 {
|
||||||
"/opt/homebrew/bin/linuxkit",
|
paths = []string{
|
||||||
}
|
"/usr/local/bin/linuxkit",
|
||||||
|
"/opt/homebrew/bin/linuxkit",
|
||||||
for _, p := range paths {
|
|
||||||
if io.Local.IsFile(p) {
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return coreerr.E("LinuxKitBuilder.validateLinuxKitCli", "linuxkit CLI not found. Install with: brew install linuxkit (macOS) or see https://github.com/linuxkit/linuxkit", nil)
|
command, err := ax.ResolveCommand("linuxkit", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("LinuxKitBuilder.resolveLinuxKitCli", "linuxkit CLI not found. Install with: brew install linuxkit (macOS) or see https://github.com/linuxkit/linuxkit", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,73 @@
|
||||||
package builders
|
package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestLinuxKitBuilder_Name_Good(t *testing.T) {
|
func setupFakeLinuxKitToolchain(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
if [ "${1:-}" != "build" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
config=""
|
||||||
|
dir=""
|
||||||
|
name=""
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
if [ "$1" = "--dir" ]; then
|
||||||
|
shift
|
||||||
|
dir="${1:-}"
|
||||||
|
elif [ "$1" = "--name" ]; then
|
||||||
|
shift
|
||||||
|
name="${1:-}"
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -n "$dir" ] && [ -n "$name" ]; then
|
||||||
|
mkdir -p "$dir"
|
||||||
|
printf 'linuxkit image\n' > "$dir/$name.iso"
|
||||||
|
fi
|
||||||
|
`
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(binDir, "linuxkit"), []byte(script), 0o755))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLinuxKit_LinuxKitBuilderName_Good(t *testing.T) {
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
assert.Equal(t, "linuxkit", builder.Name())
|
assert.Equal(t, "linuxkit", builder.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLinuxKitBuilder_Detect_Good(t *testing.T) {
|
func TestLinuxKit_LinuxKitBuilderDetect_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
|
|
||||||
t.Run("detects linuxkit.yml in root", func(t *testing.T) {
|
t.Run("detects linuxkit.yml in root", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "linuxkit.yml"), []byte("kernel:\n image: test\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "linuxkit.yml"), []byte("kernel:\n image: test\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
builder := NewLinuxKitBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects linuxkit.yaml in root", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "linuxkit.yaml"), []byte("kernel:\n image: test\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
@ -32,9 +78,22 @@ func TestLinuxKitBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects .core/linuxkit/*.yml", func(t *testing.T) {
|
t.Run("detects .core/linuxkit/*.yml", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
lkDir := filepath.Join(dir, ".core", "linuxkit")
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
require.NoError(t, os.MkdirAll(lkDir, 0755))
|
require.NoError(t, ax.MkdirAll(lkDir, 0755))
|
||||||
err := os.WriteFile(filepath.Join(lkDir, "server.yml"), []byte("kernel:\n image: test\n"), 0644)
|
err := ax.WriteFile(ax.Join(lkDir, "server.yml"), []byte("kernel:\n image: test\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
builder := NewLinuxKitBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects .core/linuxkit/*.yaml", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
|
require.NoError(t, ax.MkdirAll(lkDir, 0755))
|
||||||
|
err := ax.WriteFile(ax.Join(lkDir, "server.yaml"), []byte("kernel:\n image: test\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
@ -45,11 +104,11 @@ func TestLinuxKitBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects .core/linuxkit with multiple yml files", func(t *testing.T) {
|
t.Run("detects .core/linuxkit with multiple yml files", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
lkDir := filepath.Join(dir, ".core", "linuxkit")
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
require.NoError(t, os.MkdirAll(lkDir, 0755))
|
require.NoError(t, ax.MkdirAll(lkDir, 0755))
|
||||||
err := os.WriteFile(filepath.Join(lkDir, "server.yml"), []byte("kernel:\n"), 0644)
|
err := ax.WriteFile(ax.Join(lkDir, "server.yml"), []byte("kernel:\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
err = os.WriteFile(filepath.Join(lkDir, "desktop.yml"), []byte("kernel:\n"), 0644)
|
err = ax.WriteFile(ax.Join(lkDir, "desktop.yml"), []byte("kernel:\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
@ -69,7 +128,7 @@ func TestLinuxKitBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("returns false for non-LinuxKit project", func(t *testing.T) {
|
t.Run("returns false for non-LinuxKit project", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module test"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
@ -80,8 +139,8 @@ func TestLinuxKitBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("returns false for empty .core/linuxkit directory", func(t *testing.T) {
|
t.Run("returns false for empty .core/linuxkit directory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
lkDir := filepath.Join(dir, ".core", "linuxkit")
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
require.NoError(t, os.MkdirAll(lkDir, 0755))
|
require.NoError(t, ax.MkdirAll(lkDir, 0755))
|
||||||
|
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
detected, err := builder.Detect(fs, dir)
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
|
@ -91,9 +150,22 @@ func TestLinuxKitBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("returns false when .core/linuxkit has only non-yml files", func(t *testing.T) {
|
t.Run("returns false when .core/linuxkit has only non-yml files", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
lkDir := filepath.Join(dir, ".core", "linuxkit")
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
require.NoError(t, os.MkdirAll(lkDir, 0755))
|
require.NoError(t, ax.MkdirAll(lkDir, 0755))
|
||||||
err := os.WriteFile(filepath.Join(lkDir, "README.md"), []byte("# LinuxKit\n"), 0644)
|
err := ax.WriteFile(ax.Join(lkDir, "README.md"), []byte("# LinuxKit\n"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
builder := NewLinuxKitBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns false when .core/linuxkit has only non-yaml files", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
|
require.NoError(t, ax.MkdirAll(lkDir, 0755))
|
||||||
|
err := ax.WriteFile(ax.Join(lkDir, "README.md"), []byte("# LinuxKit\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
@ -104,11 +176,11 @@ func TestLinuxKitBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("ignores subdirectories in .core/linuxkit", func(t *testing.T) {
|
t.Run("ignores subdirectories in .core/linuxkit", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
lkDir := filepath.Join(dir, ".core", "linuxkit")
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
subDir := filepath.Join(lkDir, "subdir")
|
subDir := ax.Join(lkDir, "subdir")
|
||||||
require.NoError(t, os.MkdirAll(subDir, 0755))
|
require.NoError(t, ax.MkdirAll(subDir, 0755))
|
||||||
// Put yml in subdir only, not in lkDir itself
|
// Put yml in subdir only, not in lkDir itself
|
||||||
err := os.WriteFile(filepath.Join(subDir, "server.yml"), []byte("kernel:\n"), 0644)
|
err := ax.WriteFile(ax.Join(subDir, "server.yml"), []byte("kernel:\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
@ -118,7 +190,7 @@ func TestLinuxKitBuilder_Detect_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLinuxKitBuilder_GetFormatExtension_Good(t *testing.T) {
|
func TestLinuxKit_LinuxKitBuilderGetFormatExtension_Good(t *testing.T) {
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
|
|
@ -138,6 +210,9 @@ func TestLinuxKitBuilder_GetFormatExtension_Good(t *testing.T) {
|
||||||
{"vhd", ".vhd"},
|
{"vhd", ".vhd"},
|
||||||
{"gcp", ".img.tar.gz"},
|
{"gcp", ".img.tar.gz"},
|
||||||
{"aws", ".raw"},
|
{"aws", ".raw"},
|
||||||
|
{"docker", ".docker.tar"},
|
||||||
|
{"tar", ".tar"},
|
||||||
|
{"kernel+initrd", "-initrd.img"},
|
||||||
{"custom", ".custom"},
|
{"custom", ".custom"},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -149,7 +224,7 @@ func TestLinuxKitBuilder_GetFormatExtension_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLinuxKitBuilder_GetArtifactPath_Good(t *testing.T) {
|
func TestLinuxKit_LinuxKitBuilderGetArtifactPath_Good(t *testing.T) {
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
||||||
t.Run("constructs correct path", func(t *testing.T) {
|
t.Run("constructs correct path", func(t *testing.T) {
|
||||||
|
|
@ -161,9 +236,19 @@ func TestLinuxKitBuilder_GetArtifactPath_Good(t *testing.T) {
|
||||||
path := builder.getArtifactPath("/output/linuxkit", "server-arm64", "qcow2-bios")
|
path := builder.getArtifactPath("/output/linuxkit", "server-arm64", "qcow2-bios")
|
||||||
assert.Equal(t, "/output/linuxkit/server-arm64.qcow2", path)
|
assert.Equal(t, "/output/linuxkit/server-arm64.qcow2", path)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("constructs correct path for docker images", func(t *testing.T) {
|
||||||
|
path := builder.getArtifactPath("/output/linuxkit", "server-amd64", "docker")
|
||||||
|
assert.Equal(t, "/output/linuxkit/server-amd64.docker.tar", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("constructs correct path for kernel+initrd images", func(t *testing.T) {
|
||||||
|
path := builder.getArtifactPath("/output/linuxkit", "server-amd64", "kernel+initrd")
|
||||||
|
assert.Equal(t, "/output/linuxkit/server-amd64-initrd.img", path)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLinuxKitBuilder_BuildLinuxKitArgs_Good(t *testing.T) {
|
func TestLinuxKit_LinuxKitBuilderBuildLinuxKitArgs_Good(t *testing.T) {
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
||||||
t.Run("builds args for amd64 without --arch", func(t *testing.T) {
|
t.Run("builds args for amd64 without --arch", func(t *testing.T) {
|
||||||
|
|
@ -186,14 +271,48 @@ func TestLinuxKitBuilder_BuildLinuxKitArgs_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLinuxKitBuilder_FindArtifact_Good(t *testing.T) {
|
func TestLinuxKit_LinuxKitBuilderBuild_ResolvesRelativeConfigPath_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeLinuxKitToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
configPath := ax.Join(projectDir, "deploy", "linuxkit.yml")
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Dir(configPath), 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(configPath, []byte("kernel:\n image: test\n"), 0o644))
|
||||||
|
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
builder := NewLinuxKitBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "sample",
|
||||||
|
LinuxKitConfig: "deploy/linuxkit.yml",
|
||||||
|
Formats: []string{"iso"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
expectedPath := ax.Join(outputDir, "sample-amd64.iso")
|
||||||
|
assert.Equal(t, expectedPath, artifacts[0].Path)
|
||||||
|
assert.FileExists(t, expectedPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLinuxKit_LinuxKitBuilderFindArtifact_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
builder := NewLinuxKitBuilder()
|
builder := NewLinuxKitBuilder()
|
||||||
|
|
||||||
t.Run("finds artifact with exact extension", func(t *testing.T) {
|
t.Run("finds artifact with exact extension", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
artifactPath := filepath.Join(dir, "server-amd64.iso")
|
artifactPath := ax.Join(dir, "server-amd64.iso")
|
||||||
require.NoError(t, os.WriteFile(artifactPath, []byte("fake iso"), 0644))
|
require.NoError(t, ax.WriteFile(artifactPath, []byte("fake iso"), 0644))
|
||||||
|
|
||||||
found := builder.findArtifact(fs, dir, "server-amd64", "iso")
|
found := builder.findArtifact(fs, dir, "server-amd64", "iso")
|
||||||
assert.Equal(t, artifactPath, found)
|
assert.Equal(t, artifactPath, found)
|
||||||
|
|
@ -209,16 +328,64 @@ func TestLinuxKitBuilder_FindArtifact_Good(t *testing.T) {
|
||||||
t.Run("finds artifact with alternate naming", func(t *testing.T) {
|
t.Run("finds artifact with alternate naming", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
// Create file matching the name prefix + known image extension
|
// Create file matching the name prefix + known image extension
|
||||||
artifactPath := filepath.Join(dir, "server-amd64.qcow2")
|
artifactPath := ax.Join(dir, "server-amd64.qcow2")
|
||||||
require.NoError(t, os.WriteFile(artifactPath, []byte("fake qcow2"), 0644))
|
require.NoError(t, ax.WriteFile(artifactPath, []byte("fake qcow2"), 0644))
|
||||||
|
|
||||||
found := builder.findArtifact(fs, dir, "server-amd64", "qcow2")
|
found := builder.findArtifact(fs, dir, "server-amd64", "qcow2")
|
||||||
assert.Equal(t, artifactPath, found)
|
assert.Equal(t, artifactPath, found)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("finds cloud image artifacts", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
artifactPath := ax.Join(dir, "server-amd64-gcp.img.tar.gz")
|
||||||
|
require.NoError(t, ax.WriteFile(artifactPath, []byte("fake gcp image"), 0644))
|
||||||
|
|
||||||
|
found := builder.findArtifact(fs, dir, "server-amd64", "gcp")
|
||||||
|
assert.Equal(t, artifactPath, found)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("finds docker artifacts", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
artifactPath := ax.Join(dir, "server-amd64.docker.tar")
|
||||||
|
require.NoError(t, ax.WriteFile(artifactPath, []byte("fake docker tar"), 0644))
|
||||||
|
|
||||||
|
found := builder.findArtifact(fs, dir, "server-amd64", "docker")
|
||||||
|
assert.Equal(t, artifactPath, found)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("finds kernel+initrd artifacts", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
artifactPath := ax.Join(dir, "server-amd64-initrd.img")
|
||||||
|
require.NoError(t, ax.WriteFile(artifactPath, []byte("fake initrd"), 0644))
|
||||||
|
|
||||||
|
found := builder.findArtifact(fs, dir, "server-amd64", "kernel+initrd")
|
||||||
|
assert.Equal(t, artifactPath, found)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLinuxKitBuilder_Interface_Good(t *testing.T) {
|
func TestLinuxKit_LinuxKitBuilderInterface_Good(t *testing.T) {
|
||||||
// Verify LinuxKitBuilder implements Builder interface
|
// Verify LinuxKitBuilder implements Builder interface
|
||||||
var _ build.Builder = (*LinuxKitBuilder)(nil)
|
var _ build.Builder = (*LinuxKitBuilder)(nil)
|
||||||
var _ build.Builder = NewLinuxKitBuilder()
|
var _ build.Builder = NewLinuxKitBuilder()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestLinuxKit_LinuxKitBuilderResolveLinuxKitCli_Good(t *testing.T) {
|
||||||
|
builder := NewLinuxKitBuilder()
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "linuxkit")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := builder.resolveLinuxKitCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestLinuxKit_LinuxKitBuilderResolveLinuxKitCli_Bad(t *testing.T) {
|
||||||
|
builder := NewLinuxKitBuilder()
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := builder.resolveLinuxKitCli(ax.Join(t.TempDir(), "missing-linuxkit"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "linuxkit CLI not found")
|
||||||
|
}
|
||||||
|
|
|
||||||
284
pkg/build/builders/node.go
Normal file
284
pkg/build/builders/node.go
Normal file
|
|
@ -0,0 +1,284 @@
|
||||||
|
// Package builders provides build implementations for different project types.
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"path"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NodeBuilder builds Node.js projects with the detected package manager.
|
||||||
|
//
|
||||||
|
// b := builders.NewNodeBuilder()
|
||||||
|
type NodeBuilder struct{}
|
||||||
|
|
||||||
|
// NewNodeBuilder creates a new NodeBuilder instance.
|
||||||
|
//
|
||||||
|
// b := builders.NewNodeBuilder()
|
||||||
|
func NewNodeBuilder() *NodeBuilder {
|
||||||
|
return &NodeBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "node"
|
||||||
|
func (b *NodeBuilder) Name() string {
|
||||||
|
return "node"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect checks if this builder can handle the project in the given directory.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
|
func (b *NodeBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
|
return build.IsNodeProject(fs, dir) || b.resolveNodeProjectDir(fs, dir) != "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build runs the project build script once per target and collects artifacts
|
||||||
|
// from the target-specific output directory.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
func (b *NodeBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
|
if cfg == nil {
|
||||||
|
return nil, coreerr.E("NodeBuilder.Build", "config is nil", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(targets) == 0 {
|
||||||
|
targets = []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
}
|
||||||
|
|
||||||
|
outputDir := cfg.OutputDir
|
||||||
|
if outputDir == "" {
|
||||||
|
outputDir = ax.Join(cfg.ProjectDir, "dist")
|
||||||
|
}
|
||||||
|
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
||||||
|
return nil, coreerr.E("NodeBuilder.Build", "failed to create output directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
projectDir := b.resolveNodeProjectDir(cfg.FS, cfg.ProjectDir)
|
||||||
|
if projectDir == "" {
|
||||||
|
projectDir = cfg.ProjectDir
|
||||||
|
}
|
||||||
|
|
||||||
|
packageManager, err := b.resolvePackageManager(cfg.FS, projectDir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
command, args, err := b.resolveBuildCommand(packageManager)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var artifacts []build.Artifact
|
||||||
|
for _, target := range targets {
|
||||||
|
platformDir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
|
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
||||||
|
return artifacts, coreerr.E("NodeBuilder.Build", "failed to create platform directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
env := appendConfiguredEnv(cfg.Env,
|
||||||
|
core.Sprintf("GOOS=%s", target.OS),
|
||||||
|
core.Sprintf("GOARCH=%s", target.Arch),
|
||||||
|
core.Sprintf("TARGET_OS=%s", target.OS),
|
||||||
|
core.Sprintf("TARGET_ARCH=%s", target.Arch),
|
||||||
|
core.Sprintf("OUTPUT_DIR=%s", outputDir),
|
||||||
|
core.Sprintf("TARGET_DIR=%s", platformDir),
|
||||||
|
)
|
||||||
|
if cfg.Name != "" {
|
||||||
|
env = append(env, core.Sprintf("NAME=%s", cfg.Name))
|
||||||
|
}
|
||||||
|
if cfg.Version != "" {
|
||||||
|
env = append(env, core.Sprintf("VERSION=%s", cfg.Version))
|
||||||
|
}
|
||||||
|
|
||||||
|
output, err := ax.CombinedOutput(ctx, projectDir, env, command, args...)
|
||||||
|
if err != nil {
|
||||||
|
return artifacts, coreerr.E("NodeBuilder.Build", command+" build failed: "+output, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
found := b.findArtifactsForTarget(cfg.FS, outputDir, target)
|
||||||
|
artifacts = append(artifacts, found...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return artifacts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveNodeProjectDir locates the directory containing package.json.
|
||||||
|
// It prefers the project root, then searches nested directories to depth 2.
|
||||||
|
func (b *NodeBuilder) resolveNodeProjectDir(fs io.Medium, projectDir string) string {
|
||||||
|
if fs.IsFile(ax.Join(projectDir, "package.json")) {
|
||||||
|
return projectDir
|
||||||
|
}
|
||||||
|
|
||||||
|
return b.findNodeProjectDir(fs, projectDir, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// findNodeProjectDir searches for a package.json within nested directories.
|
||||||
|
func (b *NodeBuilder) findNodeProjectDir(fs io.Medium, dir string, depth int) string {
|
||||||
|
if depth >= 2 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := fs.List(dir)
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if !entry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
name := entry.Name()
|
||||||
|
if name == "node_modules" || core.HasPrefix(name, ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
candidateDir := ax.Join(dir, name)
|
||||||
|
if fs.IsFile(ax.Join(candidateDir, "package.json")) {
|
||||||
|
return candidateDir
|
||||||
|
}
|
||||||
|
|
||||||
|
if nested := b.findNodeProjectDir(fs, candidateDir, depth+1); nested != "" {
|
||||||
|
return nested
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolvePackageManager selects the package manager from lockfiles.
|
||||||
|
//
|
||||||
|
// packageManager := b.resolvePackageManager(io.Local, ".")
|
||||||
|
func (b *NodeBuilder) resolvePackageManager(fs io.Medium, projectDir string) (string, error) {
|
||||||
|
if declared := detectDeclaredPackageManager(fs, projectDir); declared != "" {
|
||||||
|
return declared, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case fs.IsFile(ax.Join(projectDir, "bun.lockb")) || fs.IsFile(ax.Join(projectDir, "bun.lock")):
|
||||||
|
return "bun", nil
|
||||||
|
case fs.IsFile(ax.Join(projectDir, "pnpm-lock.yaml")):
|
||||||
|
return "pnpm", nil
|
||||||
|
case fs.IsFile(ax.Join(projectDir, "yarn.lock")):
|
||||||
|
return "yarn", nil
|
||||||
|
case fs.IsFile(ax.Join(projectDir, "package-lock.json")):
|
||||||
|
return "npm", nil
|
||||||
|
default:
|
||||||
|
return "npm", nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveBuildCommand returns the executable and arguments for the selected package manager.
|
||||||
|
//
|
||||||
|
// command, args, err := b.resolveBuildCommand("npm")
|
||||||
|
func (b *NodeBuilder) resolveBuildCommand(packageManager string) (string, []string, error) {
|
||||||
|
var paths []string
|
||||||
|
switch packageManager {
|
||||||
|
case "bun":
|
||||||
|
paths = []string{"/usr/local/bin/bun", "/opt/homebrew/bin/bun"}
|
||||||
|
case "pnpm":
|
||||||
|
paths = []string{"/usr/local/bin/pnpm", "/opt/homebrew/bin/pnpm"}
|
||||||
|
case "yarn":
|
||||||
|
paths = []string{"/usr/local/bin/yarn", "/opt/homebrew/bin/yarn"}
|
||||||
|
default:
|
||||||
|
paths = []string{"/usr/local/bin/npm", "/opt/homebrew/bin/npm"}
|
||||||
|
packageManager = "npm"
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand(packageManager, paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, coreerr.E("NodeBuilder.resolveBuildCommand", packageManager+" CLI not found", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch packageManager {
|
||||||
|
case "yarn":
|
||||||
|
return command, []string{"build"}, nil
|
||||||
|
default:
|
||||||
|
return command, []string{"run", "build"}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// findArtifactsForTarget searches for build outputs in the target-specific output directory.
|
||||||
|
//
|
||||||
|
// artifacts := b.findArtifactsForTarget(io.Local, "dist", build.Target{OS: "linux", Arch: "amd64"})
|
||||||
|
func (b *NodeBuilder) findArtifactsForTarget(fs io.Medium, outputDir string, target build.Target) []build.Artifact {
|
||||||
|
var artifacts []build.Artifact
|
||||||
|
|
||||||
|
platformDir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
|
if fs.IsDir(platformDir) {
|
||||||
|
entries, err := fs.List(platformDir)
|
||||||
|
if err == nil {
|
||||||
|
for _, entry := range entries {
|
||||||
|
if entry.IsDir() {
|
||||||
|
if target.OS == "darwin" && core.HasSuffix(entry.Name(), ".app") {
|
||||||
|
artifacts = append(artifacts, build.Artifact{
|
||||||
|
Path: ax.Join(platformDir, entry.Name()),
|
||||||
|
OS: target.OS,
|
||||||
|
Arch: target.Arch,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
name := entry.Name()
|
||||||
|
if core.HasPrefix(name, ".") || name == "CHECKSUMS.txt" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts = append(artifacts, build.Artifact{
|
||||||
|
Path: ax.Join(platformDir, name),
|
||||||
|
OS: target.OS,
|
||||||
|
Arch: target.Arch,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(artifacts) > 0 {
|
||||||
|
return artifacts
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
patterns := []string{
|
||||||
|
core.Sprintf("*-%s-%s*", target.OS, target.Arch),
|
||||||
|
core.Sprintf("*_%s_%s*", target.OS, target.Arch),
|
||||||
|
core.Sprintf("*-%s*", target.Arch),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pattern := range patterns {
|
||||||
|
entries, err := fs.List(outputDir)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, entry := range entries {
|
||||||
|
match := entry.Name()
|
||||||
|
matched, _ := path.Match(pattern, match)
|
||||||
|
if !matched {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fullPath := ax.Join(outputDir, match)
|
||||||
|
if fs.IsDir(fullPath) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts = append(artifacts, build.Artifact{
|
||||||
|
Path: fullPath,
|
||||||
|
OS: target.OS,
|
||||||
|
Arch: target.Arch,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if len(artifacts) > 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return artifacts
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure NodeBuilder implements the Builder interface.
|
||||||
|
var _ build.Builder = (*NodeBuilder)(nil)
|
||||||
283
pkg/build/builders/node_test.go
Normal file
283
pkg/build/builders/node_test.go
Normal file
|
|
@ -0,0 +1,283 @@
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func setupFakeNodeToolchain(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${NODE_BUILD_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$(basename "$0")" >> "$log_file"
|
||||||
|
printf '%s\n' "$@" >> "$log_file"
|
||||||
|
printf '%s\n' "GOOS=${GOOS:-}" >> "$log_file"
|
||||||
|
printf '%s\n' "GOARCH=${GOARCH:-}" >> "$log_file"
|
||||||
|
printf '%s\n' "OUTPUT_DIR=${OUTPUT_DIR:-}" >> "$log_file"
|
||||||
|
printf '%s\n' "TARGET_DIR=${TARGET_DIR:-}" >> "$log_file"
|
||||||
|
env | sort >> "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
output_dir="${OUTPUT_DIR:-dist}"
|
||||||
|
platform_dir="${TARGET_DIR:-$output_dir/${GOOS:-}_${GOARCH:-}}"
|
||||||
|
mkdir -p "$platform_dir"
|
||||||
|
|
||||||
|
name="${NAME:-nodeapp}"
|
||||||
|
printf 'fake node artifact\n' > "$platform_dir/$name"
|
||||||
|
chmod +x "$platform_dir/$name"
|
||||||
|
`
|
||||||
|
|
||||||
|
for _, name := range []string{"npm", "pnpm", "yarn", "bun"} {
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(binDir, name), []byte(script), 0o755))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupNodeTestProject(t *testing.T) string {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package.json"), []byte(`{"name":"testapp","scripts":{"build":"node build.js"}}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "build.js"), []byte(`console.log("build")`), 0o644))
|
||||||
|
|
||||||
|
return dir
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNode_NodeBuilderName_Good(t *testing.T) {
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
assert.Equal(t, "node", builder.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNode_NodeBuilderDetect_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("detects package.json projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package.json"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns false for empty directory", func(t *testing.T) {
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
detected, err := builder.Detect(fs, t.TempDir())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects nested package.json projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
nested := ax.Join(dir, "apps", "web")
|
||||||
|
require.NoError(t, ax.MkdirAll(nested, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(nested, "package.json"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNode_NodeBuilderBuild_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeNodeToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupNodeTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "node.log")
|
||||||
|
t.Setenv("NODE_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "pnpm-lock.yaml"), []byte("lockfile"), 0o644))
|
||||||
|
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "testapp",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: "linux", Arch: "amd64"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
assert.Equal(t, "linux", artifacts[0].OS)
|
||||||
|
assert.Equal(t, "amd64", artifacts[0].Arch)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.GreaterOrEqual(t, len(lines), 5)
|
||||||
|
assert.Equal(t, "pnpm", lines[0])
|
||||||
|
assert.Equal(t, "run", lines[1])
|
||||||
|
assert.Equal(t, "build", lines[2])
|
||||||
|
assert.Equal(t, "GOOS=linux", lines[3])
|
||||||
|
assert.Equal(t, "GOARCH=amd64", lines[4])
|
||||||
|
assert.Contains(t, lines, "OUTPUT_DIR="+outputDir)
|
||||||
|
assert.Contains(t, lines, "TARGET_DIR="+ax.Join(outputDir, "linux_amd64"))
|
||||||
|
assert.Contains(t, string(content), "FOO=bar")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNode_ResolvePackageManager_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
|
||||||
|
t.Run("prefers packageManager declaration over lockfiles", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package.json"), []byte(`{"packageManager":"pnpm@9.12.0"}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "bun.lockb"), []byte(""), 0o644))
|
||||||
|
|
||||||
|
result, err := builder.resolvePackageManager(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "pnpm", result)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("normalises package manager version pins", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package.json"), []byte(`{"packageManager":"bun@1.1.38"}`), 0o644))
|
||||||
|
|
||||||
|
result, err := builder.resolvePackageManager(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "bun", result)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNode_NodeBuilderFindArtifactsForTarget_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
|
||||||
|
t.Run("finds files in platform subdirectory", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
platformDir := ax.Join(dir, "linux_amd64")
|
||||||
|
require.NoError(t, ax.MkdirAll(platformDir, 0o755))
|
||||||
|
artifactPath := ax.Join(platformDir, "testapp")
|
||||||
|
require.NoError(t, ax.WriteFile(artifactPath, []byte("binary"), 0o755))
|
||||||
|
|
||||||
|
artifacts := builder.findArtifactsForTarget(fs, dir, build.Target{OS: "linux", Arch: "amd64"})
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, artifactPath, artifacts[0].Path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("finds darwin app bundles", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
platformDir := ax.Join(dir, "darwin_arm64")
|
||||||
|
appDir := ax.Join(platformDir, "TestApp.app")
|
||||||
|
require.NoError(t, ax.MkdirAll(appDir, 0o755))
|
||||||
|
|
||||||
|
artifacts := builder.findArtifactsForTarget(fs, dir, build.Target{OS: "darwin", Arch: "arm64"})
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, appDir, artifacts[0].Path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("falls back to name patterns in root", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
artifactPath := ax.Join(dir, "testapp-linux-amd64")
|
||||||
|
require.NoError(t, ax.WriteFile(artifactPath, []byte("binary"), 0o755))
|
||||||
|
|
||||||
|
artifacts := builder.findArtifactsForTarget(fs, dir, build.Target{OS: "linux", Arch: "amd64"})
|
||||||
|
require.NotEmpty(t, artifacts)
|
||||||
|
assert.Equal(t, artifactPath, artifacts[0].Path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNode_NodeBuilderInterface_Good(t *testing.T) {
|
||||||
|
var _ build.Builder = (*NodeBuilder)(nil)
|
||||||
|
var _ build.Builder = NewNodeBuilder()
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNode_NodeBuilderBuildDefaults_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeNodeToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupNodeTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, runtime.GOOS, artifacts[0].OS)
|
||||||
|
assert.Equal(t, runtime.GOARCH, artifacts[0].Arch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNode_NodeBuilderBuild_Good_NestedProject(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeNodeToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
nestedDir := ax.Join(projectDir, "apps", "web")
|
||||||
|
require.NoError(t, ax.MkdirAll(nestedDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(nestedDir, "package.json"), []byte(`{"name":"nested-app","scripts":{"build":"node build.js"}}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(nestedDir, "build.js"), []byte(`console.log("nested build")`), 0o644))
|
||||||
|
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "node-nested.log")
|
||||||
|
t.Setenv("NODE_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewNodeBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "nested-app",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, string(content), "PWD="+nestedDir)
|
||||||
|
assert.Contains(t, string(content), "GOOS=linux")
|
||||||
|
assert.Contains(t, string(content), "GOARCH=amd64")
|
||||||
|
}
|
||||||
52
pkg/build/builders/package_manager.go
Normal file
52
pkg/build/builders/package_manager.go
Normal file
|
|
@ -0,0 +1,52 @@
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
)
|
||||||
|
|
||||||
|
type packageJSONManifest struct {
|
||||||
|
PackageManager string `json:"packageManager"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// detectDeclaredPackageManager reads package.json and returns the declared package manager.
|
||||||
|
//
|
||||||
|
// manager := detectDeclaredPackageManager(io.Local, ".")
|
||||||
|
func detectDeclaredPackageManager(fs io.Medium, dir string) string {
|
||||||
|
content, err := fs.Read(ax.Join(dir, "package.json"))
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var manifest packageJSONManifest
|
||||||
|
if err := json.Unmarshal([]byte(content), &manifest); err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalisePackageManager(manifest.PackageManager)
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalisePackageManager trims any pinned version from a packageManager declaration.
|
||||||
|
//
|
||||||
|
// manager := normalisePackageManager("pnpm@9.12.0")
|
||||||
|
func normalisePackageManager(value string) string {
|
||||||
|
value = strings.TrimSpace(value)
|
||||||
|
if value == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
manager, _, found := strings.Cut(value, "@")
|
||||||
|
if !found {
|
||||||
|
manager = value
|
||||||
|
}
|
||||||
|
|
||||||
|
switch manager {
|
||||||
|
case "bun", "pnpm", "yarn", "npm":
|
||||||
|
return manager
|
||||||
|
default:
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
291
pkg/build/builders/php.go
Normal file
291
pkg/build/builders/php.go
Normal file
|
|
@ -0,0 +1,291 @@
|
||||||
|
// Package builders provides build implementations for different project types.
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"context"
|
||||||
|
stdio "io"
|
||||||
|
"runtime"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PHPBuilder builds PHP projects with composer.json manifests.
|
||||||
|
//
|
||||||
|
// b := builders.NewPHPBuilder()
|
||||||
|
type PHPBuilder struct{}
|
||||||
|
|
||||||
|
// NewPHPBuilder creates a new PHP builder instance.
|
||||||
|
//
|
||||||
|
// b := builders.NewPHPBuilder()
|
||||||
|
func NewPHPBuilder() *PHPBuilder {
|
||||||
|
return &PHPBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "php"
|
||||||
|
func (b *PHPBuilder) Name() string {
|
||||||
|
return "php"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect checks if this builder can handle the project in the given directory.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
|
func (b *PHPBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
|
return build.IsPHPProject(fs, dir), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build installs dependencies and produces either composer-generated artifacts
|
||||||
|
// or a deterministic bundle when the project does not emit build outputs.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
func (b *PHPBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
|
if cfg == nil {
|
||||||
|
return nil, coreerr.E("PHPBuilder.Build", "config is nil", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(targets) == 0 {
|
||||||
|
targets = []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
}
|
||||||
|
|
||||||
|
outputDir := cfg.OutputDir
|
||||||
|
if outputDir == "" {
|
||||||
|
outputDir = ax.Join(cfg.ProjectDir, "dist")
|
||||||
|
}
|
||||||
|
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
||||||
|
return nil, coreerr.E("PHPBuilder.Build", "failed to create output directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
composerCommand, err := b.resolveComposerCli()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := b.installDependencies(ctx, cfg, composerCommand); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
hasBuildScript, err := b.hasBuildScript(cfg.FS, cfg.ProjectDir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var artifacts []build.Artifact
|
||||||
|
for _, target := range targets {
|
||||||
|
platformDir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
|
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
||||||
|
return artifacts, coreerr.E("PHPBuilder.Build", "failed to create platform directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
env := appendConfiguredEnv(cfg.Env,
|
||||||
|
core.Sprintf("GOOS=%s", target.OS),
|
||||||
|
core.Sprintf("GOARCH=%s", target.Arch),
|
||||||
|
core.Sprintf("TARGET_OS=%s", target.OS),
|
||||||
|
core.Sprintf("TARGET_ARCH=%s", target.Arch),
|
||||||
|
core.Sprintf("OUTPUT_DIR=%s", outputDir),
|
||||||
|
core.Sprintf("TARGET_DIR=%s", platformDir),
|
||||||
|
)
|
||||||
|
if cfg.Name != "" {
|
||||||
|
env = append(env, core.Sprintf("NAME=%s", cfg.Name))
|
||||||
|
}
|
||||||
|
if cfg.Version != "" {
|
||||||
|
env = append(env, core.Sprintf("VERSION=%s", cfg.Version))
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasBuildScript {
|
||||||
|
output, err := ax.CombinedOutput(ctx, cfg.ProjectDir, env, composerCommand, "run-script", "build")
|
||||||
|
if err != nil {
|
||||||
|
return artifacts, coreerr.E("PHPBuilder.Build", "composer build failed: "+output, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
found := (&NodeBuilder{}).findArtifactsForTarget(cfg.FS, outputDir, target)
|
||||||
|
if len(found) == 0 {
|
||||||
|
bundlePath := ax.Join(platformDir, b.bundleName(cfg)+".zip")
|
||||||
|
if err := b.bundleProject(cfg.FS, cfg.ProjectDir, outputDir, bundlePath); err != nil {
|
||||||
|
return artifacts, err
|
||||||
|
}
|
||||||
|
|
||||||
|
found = append(found, build.Artifact{
|
||||||
|
Path: bundlePath,
|
||||||
|
OS: target.OS,
|
||||||
|
Arch: target.Arch,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts = append(artifacts, found...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return artifacts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// installDependencies runs composer install once before the per-target build.
|
||||||
|
func (b *PHPBuilder) installDependencies(ctx context.Context, cfg *build.Config, composerCommand string) error {
|
||||||
|
args := []string{"install", "--no-interaction", "--no-dev", "--prefer-dist", "--optimize-autoloader"}
|
||||||
|
output, err := ax.CombinedOutput(ctx, cfg.ProjectDir, cfg.Env, composerCommand, args...)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.installDependencies", "composer install failed: "+output, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasBuildScript reports whether composer.json defines a build script.
|
||||||
|
func (b *PHPBuilder) hasBuildScript(fs io.Medium, projectDir string) (bool, error) {
|
||||||
|
content, err := fs.Read(ax.Join(projectDir, "composer.json"))
|
||||||
|
if err != nil {
|
||||||
|
return false, coreerr.E("PHPBuilder.hasBuildScript", "failed to read composer.json", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var manifest struct {
|
||||||
|
Scripts map[string]any `json:"scripts"`
|
||||||
|
}
|
||||||
|
if err := ax.JSONUnmarshal([]byte(content), &manifest); err != nil {
|
||||||
|
return false, coreerr.E("PHPBuilder.hasBuildScript", "failed to parse composer.json", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, ok := manifest.Scripts["build"]
|
||||||
|
return ok, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// bundleName returns the bundle filename stem.
|
||||||
|
func (b *PHPBuilder) bundleName(cfg *build.Config) string {
|
||||||
|
if cfg.Name != "" {
|
||||||
|
return cfg.Name
|
||||||
|
}
|
||||||
|
if cfg.ProjectDir != "" {
|
||||||
|
return ax.Base(cfg.ProjectDir)
|
||||||
|
}
|
||||||
|
return "php-app"
|
||||||
|
}
|
||||||
|
|
||||||
|
// bundleProject creates a zip bundle containing the project tree.
|
||||||
|
func (b *PHPBuilder) bundleProject(fs io.Medium, projectDir, outputDir, bundlePath string) error {
|
||||||
|
if err := fs.EnsureDir(ax.Dir(bundlePath)); err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.bundleProject", "failed to create bundle directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
file, err := fs.Create(bundlePath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.bundleProject", "failed to create bundle file", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
writer := zip.NewWriter(file)
|
||||||
|
defer func() { _ = writer.Close() }()
|
||||||
|
|
||||||
|
return b.writeZipTree(fs, writer, projectDir, projectDir, outputDir, bundlePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeZipTree walks the project directory and writes files into the zip bundle.
|
||||||
|
func (b *PHPBuilder) writeZipTree(fs io.Medium, writer *zip.Writer, rootDir, currentDir, outputDir, bundlePath string) error {
|
||||||
|
entries, err := fs.List(currentDir)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.writeZipTree", "failed to list directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(entries, func(i, j int) bool {
|
||||||
|
return entries[i].Name() < entries[j].Name()
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
entryPath := ax.Join(currentDir, entry.Name())
|
||||||
|
if b.isExcludedPath(entryPath, outputDir, bundlePath) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.IsDir() {
|
||||||
|
if err := b.writeZipTree(fs, writer, rootDir, entryPath, outputDir, bundlePath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
relPath, err := ax.Rel(rootDir, entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.writeZipTree", "failed to relativise bundle path", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
info, err := fs.Stat(entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.writeZipTree", "failed to stat bundle entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
header, err := zip.FileInfoHeader(info)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.writeZipTree", "failed to create zip header", err)
|
||||||
|
}
|
||||||
|
header.Name = strings.ReplaceAll(relPath, ax.DS(), "/")
|
||||||
|
header.Method = zip.Deflate
|
||||||
|
header.SetModTime(deterministicZipTime)
|
||||||
|
|
||||||
|
zipEntry, err := writer.CreateHeader(header)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.writeZipTree", "failed to create zip entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
source, err := fs.Open(entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.writeZipTree", "failed to open bundle entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := stdio.Copy(zipEntry, source); err != nil {
|
||||||
|
_ = source.Close()
|
||||||
|
return coreerr.E("PHPBuilder.writeZipTree", "failed to write bundle entry", err)
|
||||||
|
}
|
||||||
|
if err := source.Close(); err != nil {
|
||||||
|
return coreerr.E("PHPBuilder.writeZipTree", "failed to close bundle entry", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// isExcludedPath reports whether a path should be omitted from the bundle.
|
||||||
|
func (b *PHPBuilder) isExcludedPath(path, outputDir, bundlePath string) bool {
|
||||||
|
cleanPath := ax.Clean(path)
|
||||||
|
cleanOutputDir := ax.Clean(outputDir)
|
||||||
|
cleanBundlePath := ax.Clean(bundlePath)
|
||||||
|
|
||||||
|
if cleanPath == cleanOutputDir || strings.HasPrefix(cleanPath, cleanOutputDir+ax.DS()) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if cleanPath == cleanBundlePath {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
base := ax.Base(cleanPath)
|
||||||
|
switch base {
|
||||||
|
case ".git", ".core":
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveComposerCli returns the executable path for the composer CLI.
|
||||||
|
func (b *PHPBuilder) resolveComposerCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/composer",
|
||||||
|
"/opt/homebrew/bin/composer",
|
||||||
|
"/usr/bin/composer",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("composer", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("PHPBuilder.resolveComposerCli", "composer CLI not found. Install it from https://getcomposer.org/", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure PHPBuilder implements the Builder interface.
|
||||||
|
var _ build.Builder = (*PHPBuilder)(nil)
|
||||||
214
pkg/build/builders/php_test.go
Normal file
214
pkg/build/builders/php_test.go
Normal file
|
|
@ -0,0 +1,214 @@
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func setupFakePHPToolchain(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${PHP_BUILD_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$(basename "$0")" >> "$log_file"
|
||||||
|
printf '%s\n' "$@" >> "$log_file"
|
||||||
|
printf '%s\n' "GOOS=${GOOS:-}" >> "$log_file"
|
||||||
|
printf '%s\n' "GOARCH=${GOARCH:-}" >> "$log_file"
|
||||||
|
printf '%s\n' "OUTPUT_DIR=${OUTPUT_DIR:-}" >> "$log_file"
|
||||||
|
printf '%s\n' "TARGET_DIR=${TARGET_DIR:-}" >> "$log_file"
|
||||||
|
env | sort >> "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
output_dir="${OUTPUT_DIR:-dist}"
|
||||||
|
platform_dir="${TARGET_DIR:-$output_dir/${GOOS:-}_${GOARCH:-}}"
|
||||||
|
mkdir -p "$platform_dir"
|
||||||
|
|
||||||
|
if [ "${1:-}" = "run-script" ] && [ "${2:-}" = "build" ]; then
|
||||||
|
artifact="${platform_dir}/${NAME:-phpapp}"
|
||||||
|
printf 'fake php artifact\n' > "$artifact"
|
||||||
|
chmod +x "$artifact"
|
||||||
|
fi
|
||||||
|
`
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(binDir, "composer"), []byte(script), 0o755))
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupPHPTestProject(t *testing.T, withBuildScript bool) string {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
composerJSON := `{"name":"test/php-app"}`
|
||||||
|
if withBuildScript {
|
||||||
|
composerJSON = `{"name":"test/php-app","scripts":{"build":"php build.php"}}`
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "composer.json"), []byte(composerJSON), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "index.php"), []byte("<?php echo 'hello';"), 0o644))
|
||||||
|
if withBuildScript {
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "build.php"), []byte("<?php echo 'build';"), 0o644))
|
||||||
|
}
|
||||||
|
|
||||||
|
return dir
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPHP_PHPBuilderName_Good(t *testing.T) {
|
||||||
|
builder := NewPHPBuilder()
|
||||||
|
assert.Equal(t, "php", builder.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPHP_PHPBuilderDetect_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("detects composer.json projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "composer.json"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
builder := NewPHPBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns false for empty directory", func(t *testing.T) {
|
||||||
|
builder := NewPHPBuilder()
|
||||||
|
detected, err := builder.Detect(fs, t.TempDir())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, detected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPHP_PHPBuilderBuild_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakePHPToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupPHPTestProject(t, true)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "php.log")
|
||||||
|
t.Setenv("PHP_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewPHPBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "testapp",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
targets := []build.Target{{OS: "linux", Arch: "amd64"}}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
assert.Equal(t, "linux", artifacts[0].OS)
|
||||||
|
assert.Equal(t, "amd64", artifacts[0].Arch)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.GreaterOrEqual(t, len(lines), 6)
|
||||||
|
assert.Equal(t, "composer", lines[0])
|
||||||
|
assert.Equal(t, "install", lines[1])
|
||||||
|
assert.Contains(t, lines, "GOOS=linux")
|
||||||
|
assert.Contains(t, lines, "GOARCH=amd64")
|
||||||
|
assert.Contains(t, lines, "OUTPUT_DIR="+outputDir)
|
||||||
|
assert.Contains(t, lines, "TARGET_DIR="+ax.Join(outputDir, "linux_amd64"))
|
||||||
|
assert.Contains(t, string(content), "FOO=bar")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPHP_PHPBuilderBuildFallbackBundle_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakePHPToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupPHPTestProject(t, false)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
|
builder := NewPHPBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "testapp",
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
assert.Equal(t, ".zip", ax.Ext(artifacts[0].Path))
|
||||||
|
|
||||||
|
reader, err := zip.OpenReader(artifacts[0].Path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer func() { _ = reader.Close() }()
|
||||||
|
|
||||||
|
var foundComposer bool
|
||||||
|
for _, file := range reader.File {
|
||||||
|
assert.True(t, file.Modified.Equal(deterministicZipTime))
|
||||||
|
if file.Name == "composer.json" {
|
||||||
|
foundComposer = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.True(t, foundComposer)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPHP_PHPBuilderBuildDefaults_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakePHPToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupPHPTestProject(t, false)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
|
builder := NewPHPBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, runtime.GOOS, artifacts[0].OS)
|
||||||
|
assert.Equal(t, runtime.GOARCH, artifacts[0].Arch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPHP_PHPBuilderInterface_Good(t *testing.T) {
|
||||||
|
var _ build.Builder = (*PHPBuilder)(nil)
|
||||||
|
var _ build.Builder = NewPHPBuilder()
|
||||||
|
}
|
||||||
186
pkg/build/builders/python.go
Normal file
186
pkg/build/builders/python.go
Normal file
|
|
@ -0,0 +1,186 @@
|
||||||
|
// Package builders provides build implementations for different project types.
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"context"
|
||||||
|
stdio "io"
|
||||||
|
"runtime"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PythonBuilder builds Python projects with pyproject.toml or requirements.txt markers.
|
||||||
|
//
|
||||||
|
// b := builders.NewPythonBuilder()
|
||||||
|
type PythonBuilder struct{}
|
||||||
|
|
||||||
|
// NewPythonBuilder creates a new PythonBuilder instance.
|
||||||
|
//
|
||||||
|
// b := builders.NewPythonBuilder()
|
||||||
|
func NewPythonBuilder() *PythonBuilder {
|
||||||
|
return &PythonBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "python"
|
||||||
|
func (b *PythonBuilder) Name() string {
|
||||||
|
return "python"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect checks if this builder can handle the project in the given directory.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
|
func (b *PythonBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
|
return build.IsPythonProject(fs, dir), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build packages the Python project into a deterministic zip bundle per target.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
func (b *PythonBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
|
if cfg == nil {
|
||||||
|
return nil, coreerr.E("PythonBuilder.Build", "config is nil", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(targets) == 0 {
|
||||||
|
targets = []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
}
|
||||||
|
|
||||||
|
outputDir := cfg.OutputDir
|
||||||
|
if outputDir == "" {
|
||||||
|
outputDir = ax.Join(cfg.ProjectDir, "dist")
|
||||||
|
}
|
||||||
|
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
||||||
|
return nil, coreerr.E("PythonBuilder.Build", "failed to create output directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var artifacts []build.Artifact
|
||||||
|
for _, target := range targets {
|
||||||
|
platformDir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
|
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
||||||
|
return artifacts, coreerr.E("PythonBuilder.Build", "failed to create platform directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
bundlePath := ax.Join(platformDir, b.bundleName(cfg)+".zip")
|
||||||
|
if err := b.bundleProject(cfg.FS, cfg.ProjectDir, outputDir, bundlePath); err != nil {
|
||||||
|
return artifacts, err
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts = append(artifacts, build.Artifact{
|
||||||
|
Path: bundlePath,
|
||||||
|
OS: target.OS,
|
||||||
|
Arch: target.Arch,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return artifacts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// bundleName returns the bundle filename stem.
|
||||||
|
func (b *PythonBuilder) bundleName(cfg *build.Config) string {
|
||||||
|
if cfg.Name != "" {
|
||||||
|
return cfg.Name
|
||||||
|
}
|
||||||
|
if cfg.ProjectDir != "" {
|
||||||
|
return ax.Base(cfg.ProjectDir)
|
||||||
|
}
|
||||||
|
return "python-app"
|
||||||
|
}
|
||||||
|
|
||||||
|
// bundleProject creates a zip bundle containing the Python project tree.
|
||||||
|
func (b *PythonBuilder) bundleProject(fs io.Medium, projectDir, outputDir, bundlePath string) error {
|
||||||
|
if err := fs.EnsureDir(ax.Dir(bundlePath)); err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.bundleProject", "failed to create bundle directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
file, err := fs.Create(bundlePath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.bundleProject", "failed to create bundle file", err)
|
||||||
|
}
|
||||||
|
defer func() { _ = file.Close() }()
|
||||||
|
|
||||||
|
writer := zip.NewWriter(file)
|
||||||
|
defer func() { _ = writer.Close() }()
|
||||||
|
|
||||||
|
return b.writeZipTree(fs, writer, projectDir, projectDir, outputDir, bundlePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeZipTree walks the project directory and writes files into the zip bundle.
|
||||||
|
func (b *PythonBuilder) writeZipTree(fs io.Medium, writer *zip.Writer, rootDir, currentDir, outputDir, bundlePath string) error {
|
||||||
|
entries, err := fs.List(currentDir)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.writeZipTree", "failed to list directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(entries, func(i, j int) bool {
|
||||||
|
return entries[i].Name() < entries[j].Name()
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
entryPath := ax.Join(currentDir, entry.Name())
|
||||||
|
if b.isExcludedPath(entryPath, outputDir, bundlePath) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.IsDir() {
|
||||||
|
if err := b.writeZipTree(fs, writer, rootDir, entryPath, outputDir, bundlePath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
relPath, err := ax.Rel(rootDir, entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.writeZipTree", "failed to relativise bundle path", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
info, err := fs.Stat(entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.writeZipTree", "failed to stat bundle entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
header, err := zip.FileInfoHeader(info)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.writeZipTree", "failed to create zip header", err)
|
||||||
|
}
|
||||||
|
header.Name = strings.ReplaceAll(relPath, ax.DS(), "/")
|
||||||
|
header.Method = zip.Deflate
|
||||||
|
header.SetModTime(deterministicZipTime)
|
||||||
|
|
||||||
|
zipEntry, err := writer.CreateHeader(header)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.writeZipTree", "failed to create zip entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
source, err := fs.Open(entryPath)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.writeZipTree", "failed to open bundle entry", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := stdio.Copy(zipEntry, source); err != nil {
|
||||||
|
_ = source.Close()
|
||||||
|
return coreerr.E("PythonBuilder.writeZipTree", "failed to write bundle entry", err)
|
||||||
|
}
|
||||||
|
if err := source.Close(); err != nil {
|
||||||
|
return coreerr.E("PythonBuilder.writeZipTree", "failed to close bundle entry", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// isExcludedPath excludes generated output from the archive.
|
||||||
|
func (b *PythonBuilder) isExcludedPath(path, outputDir, bundlePath string) bool {
|
||||||
|
return path == bundlePath || path == outputDir || core.HasPrefix(path, outputDir+ax.DS())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure PythonBuilder implements the Builder interface.
|
||||||
|
var _ build.Builder = (*PythonBuilder)(nil)
|
||||||
153
pkg/build/builders/python_test.go
Normal file
153
pkg/build/builders/python_test.go
Normal file
|
|
@ -0,0 +1,153 @@
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"context"
|
||||||
|
"runtime"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func setupPythonTestProject(t *testing.T) string {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "pyproject.toml"), []byte("[build-system]\nrequires = []\n"), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "app.py"), []byte("print('hello')\n"), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "README.md"), []byte("demo"), 0o644))
|
||||||
|
|
||||||
|
return dir
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPython_PythonBuilderName_Good(t *testing.T) {
|
||||||
|
builder := NewPythonBuilder()
|
||||||
|
assert.Equal(t, "python", builder.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPython_PythonBuilderDetect_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("detects pyproject.toml projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "pyproject.toml"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
builder := NewPythonBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects requirements.txt projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "requirements.txt"), []byte("requests"), 0o644))
|
||||||
|
|
||||||
|
builder := NewPythonBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns false for empty directory", func(t *testing.T) {
|
||||||
|
builder := NewPythonBuilder()
|
||||||
|
detected, err := builder.Detect(fs, t.TempDir())
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.False(t, detected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPython_PythonBuilderBuild_Good(t *testing.T) {
|
||||||
|
projectDir := setupPythonTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
|
builder := NewPythonBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "demo-app",
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
artifact := artifacts[0]
|
||||||
|
assert.Equal(t, "linux", artifact.OS)
|
||||||
|
assert.Equal(t, "amd64", artifact.Arch)
|
||||||
|
assert.FileExists(t, artifact.Path)
|
||||||
|
|
||||||
|
reader, err := zip.OpenReader(artifact.Path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer func() { _ = reader.Close() }()
|
||||||
|
|
||||||
|
var foundPyProject, foundApp bool
|
||||||
|
for _, file := range reader.File {
|
||||||
|
switch file.Name {
|
||||||
|
case "pyproject.toml":
|
||||||
|
foundPyProject = true
|
||||||
|
case "app.py":
|
||||||
|
foundApp = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.True(t, foundPyProject)
|
||||||
|
assert.True(t, foundApp)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPython_PythonBuilderBuildDefaults_Good(t *testing.T) {
|
||||||
|
projectDir := setupPythonTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
|
builder := NewPythonBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, runtime.GOOS, artifacts[0].OS)
|
||||||
|
assert.Equal(t, runtime.GOARCH, artifacts[0].Arch)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPython_PythonBuilderBuildIsDeterministic_Good(t *testing.T) {
|
||||||
|
projectDir := setupPythonTestProject(t)
|
||||||
|
|
||||||
|
builder := NewPythonBuilder()
|
||||||
|
buildOnce := func(outputDir string) []byte {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "demo-app",
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(artifacts[0].Path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
|
||||||
|
first := buildOnce(t.TempDir())
|
||||||
|
second := buildOnce(t.TempDir())
|
||||||
|
|
||||||
|
assert.Equal(t, first, second)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPython_PythonBuilderInterface_Good(t *testing.T) {
|
||||||
|
var _ build.Builder = (*PythonBuilder)(nil)
|
||||||
|
var _ build.Builder = NewPythonBuilder()
|
||||||
|
}
|
||||||
194
pkg/build/builders/rust.go
Normal file
194
pkg/build/builders/rust.go
Normal file
|
|
@ -0,0 +1,194 @@
|
||||||
|
// Package builders provides build implementations for different project types.
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RustBuilder implements the Builder interface for Rust projects.
|
||||||
|
//
|
||||||
|
// b := builders.NewRustBuilder()
|
||||||
|
type RustBuilder struct{}
|
||||||
|
|
||||||
|
// NewRustBuilder creates a new RustBuilder instance.
|
||||||
|
//
|
||||||
|
// b := builders.NewRustBuilder()
|
||||||
|
func NewRustBuilder() *RustBuilder {
|
||||||
|
return &RustBuilder{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "rust"
|
||||||
|
func (b *RustBuilder) Name() string {
|
||||||
|
return "rust"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect checks if this builder can handle the project in the given directory.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
|
func (b *RustBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
|
return build.IsRustProject(fs, dir), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build compiles the Rust project for the specified targets.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
func (b *RustBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
|
if cfg == nil {
|
||||||
|
return nil, coreerr.E("RustBuilder.Build", "config is nil", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
cargoCommand, err := b.resolveCargoCli()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(targets) == 0 {
|
||||||
|
targets = []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
}
|
||||||
|
|
||||||
|
outputDir := cfg.OutputDir
|
||||||
|
if outputDir == "" {
|
||||||
|
outputDir = ax.Join(cfg.ProjectDir, "dist")
|
||||||
|
}
|
||||||
|
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
||||||
|
return nil, coreerr.E("RustBuilder.Build", "failed to create output directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var artifacts []build.Artifact
|
||||||
|
for _, target := range targets {
|
||||||
|
targetTriple, err := rustTargetTriple(target)
|
||||||
|
if err != nil {
|
||||||
|
return artifacts, err
|
||||||
|
}
|
||||||
|
|
||||||
|
platformDir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
|
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
||||||
|
return artifacts, coreerr.E("RustBuilder.Build", "failed to create platform directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
env := appendConfiguredEnv(cfg.Env,
|
||||||
|
core.Sprintf("CARGO_TARGET_DIR=%s", platformDir),
|
||||||
|
core.Sprintf("TARGET_OS=%s", target.OS),
|
||||||
|
core.Sprintf("TARGET_ARCH=%s", target.Arch),
|
||||||
|
)
|
||||||
|
if cfg.Name != "" {
|
||||||
|
env = append(env, core.Sprintf("NAME=%s", cfg.Name))
|
||||||
|
}
|
||||||
|
if cfg.Version != "" {
|
||||||
|
env = append(env, core.Sprintf("VERSION=%s", cfg.Version))
|
||||||
|
}
|
||||||
|
|
||||||
|
args := []string{"build", "--release", "--target", targetTriple}
|
||||||
|
output, err := ax.CombinedOutput(ctx, cfg.ProjectDir, env, cargoCommand, args...)
|
||||||
|
if err != nil {
|
||||||
|
return artifacts, coreerr.E("RustBuilder.Build", "cargo build failed: "+output, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
found := b.findArtifactsForTarget(cfg.FS, platformDir, targetTriple, target)
|
||||||
|
if len(found) == 0 {
|
||||||
|
return artifacts, coreerr.E("RustBuilder.Build", "no build artifacts found for "+target.String(), nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts = append(artifacts, found...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return artifacts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveCargoCli returns the executable path for cargo.
|
||||||
|
//
|
||||||
|
// command, err := b.resolveCargoCli()
|
||||||
|
func (b *RustBuilder) resolveCargoCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/cargo",
|
||||||
|
"/opt/homebrew/bin/cargo",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("cargo", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("RustBuilder.resolveCargoCli", "cargo CLI not found. Install Rust from https://www.rust-lang.org/tools/install", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// findArtifactsForTarget looks for compiled binaries in the cargo target directory.
|
||||||
|
func (b *RustBuilder) findArtifactsForTarget(fs io.Medium, targetDir, targetTriple string, target build.Target) []build.Artifact {
|
||||||
|
releaseDir := ax.Join(targetDir, targetTriple, "release")
|
||||||
|
if !fs.IsDir(releaseDir) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := fs.List(releaseDir)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var artifacts []build.Artifact
|
||||||
|
for _, entry := range entries {
|
||||||
|
if entry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
name := entry.Name()
|
||||||
|
if core.HasPrefix(name, ".") ||
|
||||||
|
core.HasSuffix(name, ".d") ||
|
||||||
|
core.HasSuffix(name, ".rlib") ||
|
||||||
|
core.HasSuffix(name, ".rmeta") ||
|
||||||
|
core.HasSuffix(name, ".a") ||
|
||||||
|
core.HasSuffix(name, ".lib") ||
|
||||||
|
core.HasSuffix(name, ".pdb") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fullPath := ax.Join(releaseDir, name)
|
||||||
|
if target.OS != "windows" {
|
||||||
|
info, statErr := fs.Stat(fullPath)
|
||||||
|
if statErr != nil || info.Mode()&0o111 == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts = append(artifacts, build.Artifact{
|
||||||
|
Path: fullPath,
|
||||||
|
OS: target.OS,
|
||||||
|
Arch: target.Arch,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return artifacts
|
||||||
|
}
|
||||||
|
|
||||||
|
// rustTargetTriple maps a build target to a Rust target triple.
|
||||||
|
func rustTargetTriple(target build.Target) (string, error) {
|
||||||
|
switch target.OS + "/" + target.Arch {
|
||||||
|
case "linux/amd64":
|
||||||
|
return "x86_64-unknown-linux-gnu", nil
|
||||||
|
case "linux/arm64":
|
||||||
|
return "aarch64-unknown-linux-gnu", nil
|
||||||
|
case "darwin/amd64":
|
||||||
|
return "x86_64-apple-darwin", nil
|
||||||
|
case "darwin/arm64":
|
||||||
|
return "aarch64-apple-darwin", nil
|
||||||
|
case "windows/amd64":
|
||||||
|
return "x86_64-pc-windows-msvc", nil
|
||||||
|
case "windows/arm64":
|
||||||
|
return "aarch64-pc-windows-msvc", nil
|
||||||
|
default:
|
||||||
|
return "", coreerr.E("RustBuilder.rustTargetTriple", "unsupported Rust target: "+target.String(), nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure RustBuilder implements the Builder interface.
|
||||||
|
var _ build.Builder = (*RustBuilder)(nil)
|
||||||
151
pkg/build/builders/rust_test.go
Normal file
151
pkg/build/builders/rust_test.go
Normal file
|
|
@ -0,0 +1,151 @@
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func setupFakeRustToolchain(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${RUST_BUILD_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$(basename "$0")" >> "$log_file"
|
||||||
|
printf '%s\n' "$@" >> "$log_file"
|
||||||
|
printf '%s\n' "CARGO_TARGET_DIR=${CARGO_TARGET_DIR:-}" >> "$log_file"
|
||||||
|
printf '%s\n' "TARGET_OS=${TARGET_OS:-}" >> "$log_file"
|
||||||
|
printf '%s\n' "TARGET_ARCH=${TARGET_ARCH:-}" >> "$log_file"
|
||||||
|
env | sort >> "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
target_triple=""
|
||||||
|
prev=""
|
||||||
|
for arg in "$@"; do
|
||||||
|
if [ "$prev" = "--target" ]; then
|
||||||
|
target_triple="$arg"
|
||||||
|
prev=""
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
if [ "$arg" = "--target" ]; then
|
||||||
|
prev="--target"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
target_dir="${CARGO_TARGET_DIR:-target}"
|
||||||
|
release_dir="$target_dir/$target_triple/release"
|
||||||
|
mkdir -p "$release_dir"
|
||||||
|
|
||||||
|
name="${NAME:-rustapp}"
|
||||||
|
artifact="$release_dir/$name"
|
||||||
|
case "$target_triple" in
|
||||||
|
*-windows-*)
|
||||||
|
artifact="$artifact.exe"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
printf 'fake rust artifact\n' > "$artifact"
|
||||||
|
chmod +x "$artifact" 2>/dev/null || true
|
||||||
|
`
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(binDir, "cargo"), []byte(script), 0o755))
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupRustTestProject(t *testing.T) string {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "Cargo.toml"), []byte("[package]\nname = \"testapp\"\nversion = \"0.1.0\""), 0o644))
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Join(dir, "src"), 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "src", "main.rs"), []byte("fn main() {}"), 0o644))
|
||||||
|
return dir
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRust_RustBuilderName_Good(t *testing.T) {
|
||||||
|
builder := NewRustBuilder()
|
||||||
|
assert.Equal(t, "rust", builder.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRust_RustBuilderDetect_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("detects Cargo.toml projects", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "Cargo.toml"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
builder := NewRustBuilder()
|
||||||
|
detected, err := builder.Detect(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.True(t, detected)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns false for empty directory", func(t *testing.T) {
|
||||||
|
builder := NewRustBuilder()
|
||||||
|
detected, err := builder.Detect(fs, t.TempDir())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.False(t, detected)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRust_RustBuilderBuild_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeRustToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupRustTestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "rust.log")
|
||||||
|
t.Setenv("RUST_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewRustBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "testapp",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
targets := []build.Target{{OS: "linux", Arch: "amd64"}}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
|
assert.Equal(t, "linux", artifacts[0].OS)
|
||||||
|
assert.Equal(t, "amd64", artifacts[0].Arch)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.GreaterOrEqual(t, len(lines), 5)
|
||||||
|
assert.Equal(t, "cargo", lines[0])
|
||||||
|
assert.Equal(t, "build", lines[1])
|
||||||
|
assert.Equal(t, "--release", lines[2])
|
||||||
|
assert.Equal(t, "--target", lines[3])
|
||||||
|
assert.Equal(t, "x86_64-unknown-linux-gnu", lines[4])
|
||||||
|
assert.Contains(t, lines, "CARGO_TARGET_DIR="+ax.Join(outputDir, "linux_amd64"))
|
||||||
|
assert.Contains(t, string(content), "FOO=bar")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRust_RustBuilderInterface_Good(t *testing.T) {
|
||||||
|
var _ build.Builder = (*RustBuilder)(nil)
|
||||||
|
var _ build.Builder = NewRustBuilder()
|
||||||
|
}
|
||||||
|
|
@ -3,12 +3,11 @@ package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"path"
|
||||||
"os"
|
"runtime"
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
|
|
@ -16,19 +15,27 @@ import (
|
||||||
|
|
||||||
// TaskfileBuilder builds projects using Taskfile (https://taskfile.dev/).
|
// TaskfileBuilder builds projects using Taskfile (https://taskfile.dev/).
|
||||||
// This is a generic builder that can handle any project type that has a Taskfile.
|
// This is a generic builder that can handle any project type that has a Taskfile.
|
||||||
|
//
|
||||||
|
// b := builders.NewTaskfileBuilder()
|
||||||
type TaskfileBuilder struct{}
|
type TaskfileBuilder struct{}
|
||||||
|
|
||||||
// NewTaskfileBuilder creates a new Taskfile builder.
|
// NewTaskfileBuilder creates a new Taskfile builder.
|
||||||
|
//
|
||||||
|
// b := builders.NewTaskfileBuilder()
|
||||||
func NewTaskfileBuilder() *TaskfileBuilder {
|
func NewTaskfileBuilder() *TaskfileBuilder {
|
||||||
return &TaskfileBuilder{}
|
return &TaskfileBuilder{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the builder's identifier.
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "taskfile"
|
||||||
func (b *TaskfileBuilder) Name() string {
|
func (b *TaskfileBuilder) Name() string {
|
||||||
return "taskfile"
|
return "taskfile"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect checks if a Taskfile exists in the directory.
|
// Detect checks if a Taskfile exists in the directory.
|
||||||
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
func (b *TaskfileBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
func (b *TaskfileBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
// Check for Taskfile.yml, Taskfile.yaml, or Taskfile
|
// Check for Taskfile.yml, Taskfile.yaml, or Taskfile
|
||||||
taskfiles := []string{
|
taskfiles := []string{
|
||||||
|
|
@ -40,7 +47,7 @@ func (b *TaskfileBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tf := range taskfiles {
|
for _, tf := range taskfiles {
|
||||||
if fs.IsFile(filepath.Join(dir, tf)) {
|
if fs.IsFile(ax.Join(dir, tf)) {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -48,16 +55,18 @@ func (b *TaskfileBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build runs the Taskfile build task for each target platform.
|
// Build runs the Taskfile build task for each target platform.
|
||||||
|
//
|
||||||
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
func (b *TaskfileBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
func (b *TaskfileBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
// Validate task CLI is available
|
taskCommand, err := b.resolveTaskCli()
|
||||||
if err := b.validateTaskCli(); err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create output directory
|
// Create output directory
|
||||||
outputDir := cfg.OutputDir
|
outputDir := cfg.OutputDir
|
||||||
if outputDir == "" {
|
if outputDir == "" {
|
||||||
outputDir = filepath.Join(cfg.ProjectDir, "dist")
|
outputDir = ax.Join(cfg.ProjectDir, "dist")
|
||||||
}
|
}
|
||||||
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
if err := cfg.FS.EnsureDir(outputDir); err != nil {
|
||||||
return nil, coreerr.E("TaskfileBuilder.Build", "failed to create output directory", err)
|
return nil, coreerr.E("TaskfileBuilder.Build", "failed to create output directory", err)
|
||||||
|
|
@ -65,83 +74,86 @@ func (b *TaskfileBuilder) Build(ctx context.Context, cfg *build.Config, targets
|
||||||
|
|
||||||
var artifacts []build.Artifact
|
var artifacts []build.Artifact
|
||||||
|
|
||||||
// If no targets specified, just run the build task once
|
// If no targets are specified, build the host target so Taskfile builds
|
||||||
|
// still receive the standard GOOS/GOARCH surface.
|
||||||
if len(targets) == 0 {
|
if len(targets) == 0 {
|
||||||
if err := b.runTask(ctx, cfg, "", ""); err != nil {
|
targets = []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run build task for each target
|
||||||
|
for _, target := range targets {
|
||||||
|
if err := b.runTask(ctx, cfg, taskCommand, outputDir, target); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try to find artifacts in output directory
|
// Try to find artifacts for this target
|
||||||
found := b.findArtifacts(cfg.FS, outputDir)
|
found := b.findArtifactsForTarget(cfg.FS, outputDir, target)
|
||||||
artifacts = append(artifacts, found...)
|
artifacts = append(artifacts, found...)
|
||||||
} else {
|
|
||||||
// Run build task for each target
|
|
||||||
for _, target := range targets {
|
|
||||||
if err := b.runTask(ctx, cfg, target.OS, target.Arch); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to find artifacts for this target
|
|
||||||
found := b.findArtifactsForTarget(cfg.FS, outputDir, target)
|
|
||||||
artifacts = append(artifacts, found...)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return artifacts, nil
|
return artifacts, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// runTask executes the Taskfile build task.
|
// runTask executes the Taskfile build task.
|
||||||
func (b *TaskfileBuilder) runTask(ctx context.Context, cfg *build.Config, goos, goarch string) error {
|
func (b *TaskfileBuilder) runTask(ctx context.Context, cfg *build.Config, taskCommand string, outputDir string, target build.Target) error {
|
||||||
// Build task command
|
// Build task command
|
||||||
args := []string{"build"}
|
args := []string{"build"}
|
||||||
|
env := append([]string{}, cfg.Env...)
|
||||||
|
platformDir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
|
|
||||||
// Pass variables if targets are specified
|
// Pass variables if targets are specified
|
||||||
if goos != "" {
|
if target.OS != "" {
|
||||||
args = append(args, fmt.Sprintf("GOOS=%s", goos))
|
value := core.Sprintf("GOOS=%s", target.OS)
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
}
|
}
|
||||||
if goarch != "" {
|
if target.Arch != "" {
|
||||||
args = append(args, fmt.Sprintf("GOARCH=%s", goarch))
|
value := core.Sprintf("GOARCH=%s", target.Arch)
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
}
|
}
|
||||||
if cfg.OutputDir != "" {
|
if target.OS != "" {
|
||||||
args = append(args, fmt.Sprintf("OUTPUT_DIR=%s", cfg.OutputDir))
|
value := core.Sprintf("TARGET_OS=%s", target.OS)
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
|
}
|
||||||
|
if target.Arch != "" {
|
||||||
|
value := core.Sprintf("TARGET_ARCH=%s", target.Arch)
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
|
}
|
||||||
|
value := core.Sprintf("OUTPUT_DIR=%s", outputDir)
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
|
if platformDir != "" {
|
||||||
|
value := core.Sprintf("TARGET_DIR=%s", platformDir)
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
}
|
}
|
||||||
if cfg.Name != "" {
|
if cfg.Name != "" {
|
||||||
args = append(args, fmt.Sprintf("NAME=%s", cfg.Name))
|
value := core.Sprintf("NAME=%s", cfg.Name)
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
}
|
}
|
||||||
if cfg.Version != "" {
|
if cfg.Version != "" {
|
||||||
args = append(args, fmt.Sprintf("VERSION=%s", cfg.Version))
|
value := core.Sprintf("VERSION=%s", cfg.Version)
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
}
|
}
|
||||||
|
value = "CGO_ENABLED=0"
|
||||||
|
if cfg.CGO {
|
||||||
|
value = "CGO_ENABLED=1"
|
||||||
|
}
|
||||||
|
args = append(args, value)
|
||||||
|
env = append(env, value)
|
||||||
|
|
||||||
cmd := exec.CommandContext(ctx, "task", args...)
|
if target.OS != "" && target.Arch != "" {
|
||||||
cmd.Dir = cfg.ProjectDir
|
core.Print(nil, "Running task build for %s/%s", target.OS, target.Arch)
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
|
|
||||||
// Set environment variables
|
|
||||||
cmd.Env = os.Environ()
|
|
||||||
if goos != "" {
|
|
||||||
cmd.Env = append(cmd.Env, fmt.Sprintf("GOOS=%s", goos))
|
|
||||||
}
|
|
||||||
if goarch != "" {
|
|
||||||
cmd.Env = append(cmd.Env, fmt.Sprintf("GOARCH=%s", goarch))
|
|
||||||
}
|
|
||||||
if cfg.OutputDir != "" {
|
|
||||||
cmd.Env = append(cmd.Env, fmt.Sprintf("OUTPUT_DIR=%s", cfg.OutputDir))
|
|
||||||
}
|
|
||||||
if cfg.Name != "" {
|
|
||||||
cmd.Env = append(cmd.Env, fmt.Sprintf("NAME=%s", cfg.Name))
|
|
||||||
}
|
|
||||||
if cfg.Version != "" {
|
|
||||||
cmd.Env = append(cmd.Env, fmt.Sprintf("VERSION=%s", cfg.Version))
|
|
||||||
}
|
|
||||||
|
|
||||||
if goos != "" && goarch != "" {
|
|
||||||
fmt.Printf("Running task build for %s/%s\n", goos, goarch)
|
|
||||||
} else {
|
} else {
|
||||||
fmt.Println("Running task build")
|
core.Print(nil, "Running task build")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := cmd.Run(); err != nil {
|
if err := ax.ExecWithEnv(ctx, cfg.ProjectDir, env, taskCommand, args...); err != nil {
|
||||||
return coreerr.E("TaskfileBuilder.runTask", "task build failed", err)
|
return coreerr.E("TaskfileBuilder.runTask", "task build failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -164,12 +176,12 @@ func (b *TaskfileBuilder) findArtifacts(fs io.Medium, outputDir string) []build.
|
||||||
|
|
||||||
// Skip common non-artifact files
|
// Skip common non-artifact files
|
||||||
name := entry.Name()
|
name := entry.Name()
|
||||||
if strings.HasPrefix(name, ".") || name == "CHECKSUMS.txt" {
|
if core.HasPrefix(name, ".") || name == "CHECKSUMS.txt" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
artifacts = append(artifacts, build.Artifact{
|
artifacts = append(artifacts, build.Artifact{
|
||||||
Path: filepath.Join(outputDir, name),
|
Path: ax.Join(outputDir, name),
|
||||||
OS: "",
|
OS: "",
|
||||||
Arch: "",
|
Arch: "",
|
||||||
})
|
})
|
||||||
|
|
@ -183,15 +195,15 @@ func (b *TaskfileBuilder) findArtifactsForTarget(fs io.Medium, outputDir string,
|
||||||
var artifacts []build.Artifact
|
var artifacts []build.Artifact
|
||||||
|
|
||||||
// 1. Look for platform-specific subdirectory: output/os_arch/
|
// 1. Look for platform-specific subdirectory: output/os_arch/
|
||||||
platformSubdir := filepath.Join(outputDir, fmt.Sprintf("%s_%s", target.OS, target.Arch))
|
platformSubdir := ax.Join(outputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
if fs.IsDir(platformSubdir) {
|
if fs.IsDir(platformSubdir) {
|
||||||
entries, _ := fs.List(platformSubdir)
|
entries, _ := fs.List(platformSubdir)
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
if entry.IsDir() {
|
if entry.IsDir() {
|
||||||
// Handle .app bundles on macOS
|
// Handle .app bundles on macOS
|
||||||
if target.OS == "darwin" && strings.HasSuffix(entry.Name(), ".app") {
|
if target.OS == "darwin" && core.HasSuffix(entry.Name(), ".app") {
|
||||||
artifacts = append(artifacts, build.Artifact{
|
artifacts = append(artifacts, build.Artifact{
|
||||||
Path: filepath.Join(platformSubdir, entry.Name()),
|
Path: ax.Join(platformSubdir, entry.Name()),
|
||||||
OS: target.OS,
|
OS: target.OS,
|
||||||
Arch: target.Arch,
|
Arch: target.Arch,
|
||||||
})
|
})
|
||||||
|
|
@ -199,11 +211,11 @@ func (b *TaskfileBuilder) findArtifactsForTarget(fs io.Medium, outputDir string,
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// Skip hidden files
|
// Skip hidden files
|
||||||
if strings.HasPrefix(entry.Name(), ".") {
|
if core.HasPrefix(entry.Name(), ".") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
artifacts = append(artifacts, build.Artifact{
|
artifacts = append(artifacts, build.Artifact{
|
||||||
Path: filepath.Join(platformSubdir, entry.Name()),
|
Path: ax.Join(platformSubdir, entry.Name()),
|
||||||
OS: target.OS,
|
OS: target.OS,
|
||||||
Arch: target.Arch,
|
Arch: target.Arch,
|
||||||
})
|
})
|
||||||
|
|
@ -215,9 +227,9 @@ func (b *TaskfileBuilder) findArtifactsForTarget(fs io.Medium, outputDir string,
|
||||||
|
|
||||||
// 2. Look for files matching the target pattern in the root output dir
|
// 2. Look for files matching the target pattern in the root output dir
|
||||||
patterns := []string{
|
patterns := []string{
|
||||||
fmt.Sprintf("*-%s-%s*", target.OS, target.Arch),
|
core.Sprintf("*-%s-%s*", target.OS, target.Arch),
|
||||||
fmt.Sprintf("*_%s_%s*", target.OS, target.Arch),
|
core.Sprintf("*_%s_%s*", target.OS, target.Arch),
|
||||||
fmt.Sprintf("*-%s*", target.Arch),
|
core.Sprintf("*-%s*", target.Arch),
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, pattern := range patterns {
|
for _, pattern := range patterns {
|
||||||
|
|
@ -226,7 +238,7 @@ func (b *TaskfileBuilder) findArtifactsForTarget(fs io.Medium, outputDir string,
|
||||||
match := entry.Name()
|
match := entry.Name()
|
||||||
// Simple glob matching
|
// Simple glob matching
|
||||||
if b.matchPattern(match, pattern) {
|
if b.matchPattern(match, pattern) {
|
||||||
fullPath := filepath.Join(outputDir, match)
|
fullPath := ax.Join(outputDir, match)
|
||||||
if fs.IsDir(fullPath) {
|
if fs.IsDir(fullPath) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
@ -249,28 +261,23 @@ func (b *TaskfileBuilder) findArtifactsForTarget(fs io.Medium, outputDir string,
|
||||||
|
|
||||||
// matchPattern implements glob matching for Taskfile artifacts.
|
// matchPattern implements glob matching for Taskfile artifacts.
|
||||||
func (b *TaskfileBuilder) matchPattern(name, pattern string) bool {
|
func (b *TaskfileBuilder) matchPattern(name, pattern string) bool {
|
||||||
matched, _ := filepath.Match(pattern, name)
|
matched, _ := path.Match(pattern, name)
|
||||||
return matched
|
return matched
|
||||||
}
|
}
|
||||||
|
|
||||||
// validateTaskCli checks if the task CLI is available.
|
// resolveTaskCli returns the executable path for the task CLI.
|
||||||
func (b *TaskfileBuilder) validateTaskCli() error {
|
func (b *TaskfileBuilder) resolveTaskCli(paths ...string) (string, error) {
|
||||||
// Check PATH first
|
if len(paths) == 0 {
|
||||||
if _, err := exec.LookPath("task"); err == nil {
|
paths = []string{
|
||||||
return nil
|
"/usr/local/bin/task",
|
||||||
}
|
"/opt/homebrew/bin/task",
|
||||||
|
|
||||||
// Check common locations
|
|
||||||
paths := []string{
|
|
||||||
"/usr/local/bin/task",
|
|
||||||
"/opt/homebrew/bin/task",
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, p := range paths {
|
|
||||||
if io.Local.IsFile(p) {
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return coreerr.E("TaskfileBuilder.validateTaskCli", "task CLI not found. Install with: brew install go-task (macOS), go install github.com/go-task/task/v3/cmd/task@latest, or see https://taskfile.dev/installation/", nil)
|
command, err := ax.ResolveCommand("task", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("TaskfileBuilder.resolveTaskCli", "task CLI not found. Install with: brew install go-task (macOS), go install github.com/go-task/task/v3/cmd/task@latest, or see https://taskfile.dev/installation/", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,30 @@
|
||||||
package builders
|
package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTaskfileBuilder_Name_Good(t *testing.T) {
|
func TestTaskfile_TaskfileBuilderName_Good(t *testing.T) {
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
assert.Equal(t, "taskfile", builder.Name())
|
assert.Equal(t, "taskfile", builder.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTaskfileBuilder_Detect_Good(t *testing.T) {
|
func TestTaskfile_TaskfileBuilderDetect_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
|
|
||||||
t.Run("detects Taskfile.yml", func(t *testing.T) {
|
t.Run("detects Taskfile.yml", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "Taskfile.yml"), []byte("version: '3'\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "Taskfile.yml"), []byte("version: '3'\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
@ -32,7 +35,7 @@ func TestTaskfileBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects Taskfile.yaml", func(t *testing.T) {
|
t.Run("detects Taskfile.yaml", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "Taskfile.yaml"), []byte("version: '3'\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "Taskfile.yaml"), []byte("version: '3'\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
@ -43,7 +46,7 @@ func TestTaskfileBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects Taskfile (no extension)", func(t *testing.T) {
|
t.Run("detects Taskfile (no extension)", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "Taskfile"), []byte("version: '3'\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "Taskfile"), []byte("version: '3'\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
@ -54,7 +57,7 @@ func TestTaskfileBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects lowercase taskfile.yml", func(t *testing.T) {
|
t.Run("detects lowercase taskfile.yml", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "taskfile.yml"), []byte("version: '3'\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "taskfile.yml"), []byte("version: '3'\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
@ -65,7 +68,7 @@ func TestTaskfileBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects lowercase taskfile.yaml", func(t *testing.T) {
|
t.Run("detects lowercase taskfile.yaml", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "taskfile.yaml"), []byte("version: '3'\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "taskfile.yaml"), []byte("version: '3'\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
@ -85,7 +88,7 @@ func TestTaskfileBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("returns false for non-Taskfile project", func(t *testing.T) {
|
t.Run("returns false for non-Taskfile project", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "Makefile"), []byte("all:\n\techo hello\n"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "Makefile"), []byte("all:\n\techo hello\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
@ -96,9 +99,9 @@ func TestTaskfileBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("does not match Taskfile in subdirectory", func(t *testing.T) {
|
t.Run("does not match Taskfile in subdirectory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
subDir := filepath.Join(dir, "subdir")
|
subDir := ax.Join(dir, "subdir")
|
||||||
require.NoError(t, os.MkdirAll(subDir, 0755))
|
require.NoError(t, ax.MkdirAll(subDir, 0755))
|
||||||
err := os.WriteFile(filepath.Join(subDir, "Taskfile.yml"), []byte("version: '3'\n"), 0644)
|
err := ax.WriteFile(ax.Join(subDir, "Taskfile.yml"), []byte("version: '3'\n"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
@ -108,14 +111,14 @@ func TestTaskfileBuilder_Detect_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTaskfileBuilder_FindArtifacts_Good(t *testing.T) {
|
func TestTaskfile_TaskfileBuilderFindArtifacts_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
||||||
t.Run("finds files in output directory", func(t *testing.T) {
|
t.Run("finds files in output directory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "myapp"), []byte("binary"), 0755))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "myapp"), []byte("binary"), 0755))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "myapp.tar.gz"), []byte("archive"), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "myapp.tar.gz"), []byte("archive"), 0644))
|
||||||
|
|
||||||
artifacts := builder.findArtifacts(fs, dir)
|
artifacts := builder.findArtifacts(fs, dir)
|
||||||
assert.Len(t, artifacts, 2)
|
assert.Len(t, artifacts, 2)
|
||||||
|
|
@ -123,8 +126,8 @@ func TestTaskfileBuilder_FindArtifacts_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("skips hidden files", func(t *testing.T) {
|
t.Run("skips hidden files", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "myapp"), []byte("binary"), 0755))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "myapp"), []byte("binary"), 0755))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".hidden"), []byte("hidden"), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, ".hidden"), []byte("hidden"), 0644))
|
||||||
|
|
||||||
artifacts := builder.findArtifacts(fs, dir)
|
artifacts := builder.findArtifacts(fs, dir)
|
||||||
assert.Len(t, artifacts, 1)
|
assert.Len(t, artifacts, 1)
|
||||||
|
|
@ -133,8 +136,8 @@ func TestTaskfileBuilder_FindArtifacts_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("skips CHECKSUMS.txt", func(t *testing.T) {
|
t.Run("skips CHECKSUMS.txt", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "myapp"), []byte("binary"), 0755))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "myapp"), []byte("binary"), 0755))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "CHECKSUMS.txt"), []byte("sha256"), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "CHECKSUMS.txt"), []byte("sha256"), 0644))
|
||||||
|
|
||||||
artifacts := builder.findArtifacts(fs, dir)
|
artifacts := builder.findArtifacts(fs, dir)
|
||||||
assert.Len(t, artifacts, 1)
|
assert.Len(t, artifacts, 1)
|
||||||
|
|
@ -143,8 +146,8 @@ func TestTaskfileBuilder_FindArtifacts_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("skips directories", func(t *testing.T) {
|
t.Run("skips directories", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "myapp"), []byte("binary"), 0755))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "myapp"), []byte("binary"), 0755))
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, "subdir"), 0755))
|
require.NoError(t, ax.MkdirAll(ax.Join(dir, "subdir"), 0755))
|
||||||
|
|
||||||
artifacts := builder.findArtifacts(fs, dir)
|
artifacts := builder.findArtifacts(fs, dir)
|
||||||
assert.Len(t, artifacts, 1)
|
assert.Len(t, artifacts, 1)
|
||||||
|
|
@ -163,15 +166,15 @@ func TestTaskfileBuilder_FindArtifacts_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTaskfileBuilder_FindArtifactsForTarget_Good(t *testing.T) {
|
func TestTaskfile_TaskfileBuilderFindArtifactsForTarget_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
||||||
t.Run("finds artifacts in platform subdirectory", func(t *testing.T) {
|
t.Run("finds artifacts in platform subdirectory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
platformDir := filepath.Join(dir, "linux_amd64")
|
platformDir := ax.Join(dir, "linux_amd64")
|
||||||
require.NoError(t, os.MkdirAll(platformDir, 0755))
|
require.NoError(t, ax.MkdirAll(platformDir, 0755))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(platformDir, "myapp"), []byte("binary"), 0755))
|
require.NoError(t, ax.WriteFile(ax.Join(platformDir, "myapp"), []byte("binary"), 0755))
|
||||||
|
|
||||||
target := build.Target{OS: "linux", Arch: "amd64"}
|
target := build.Target{OS: "linux", Arch: "amd64"}
|
||||||
artifacts := builder.findArtifactsForTarget(fs, dir, target)
|
artifacts := builder.findArtifactsForTarget(fs, dir, target)
|
||||||
|
|
@ -182,7 +185,7 @@ func TestTaskfileBuilder_FindArtifactsForTarget_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("finds artifacts by name pattern in root", func(t *testing.T) {
|
t.Run("finds artifacts by name pattern in root", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "myapp-linux-amd64"), []byte("binary"), 0755))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "myapp-linux-amd64"), []byte("binary"), 0755))
|
||||||
|
|
||||||
target := build.Target{OS: "linux", Arch: "amd64"}
|
target := build.Target{OS: "linux", Arch: "amd64"}
|
||||||
artifacts := builder.findArtifactsForTarget(fs, dir, target)
|
artifacts := builder.findArtifactsForTarget(fs, dir, target)
|
||||||
|
|
@ -191,7 +194,7 @@ func TestTaskfileBuilder_FindArtifactsForTarget_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("returns empty when no matching artifacts", func(t *testing.T) {
|
t.Run("returns empty when no matching artifacts", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "myapp"), []byte("binary"), 0755))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "myapp"), []byte("binary"), 0755))
|
||||||
|
|
||||||
target := build.Target{OS: "linux", Arch: "arm64"}
|
target := build.Target{OS: "linux", Arch: "arm64"}
|
||||||
artifacts := builder.findArtifactsForTarget(fs, dir, target)
|
artifacts := builder.findArtifactsForTarget(fs, dir, target)
|
||||||
|
|
@ -200,9 +203,9 @@ func TestTaskfileBuilder_FindArtifactsForTarget_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("handles .app bundles on darwin", func(t *testing.T) {
|
t.Run("handles .app bundles on darwin", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
platformDir := filepath.Join(dir, "darwin_arm64")
|
platformDir := ax.Join(dir, "darwin_arm64")
|
||||||
appDir := filepath.Join(platformDir, "MyApp.app")
|
appDir := ax.Join(platformDir, "MyApp.app")
|
||||||
require.NoError(t, os.MkdirAll(appDir, 0755))
|
require.NoError(t, ax.MkdirAll(appDir, 0755))
|
||||||
|
|
||||||
target := build.Target{OS: "darwin", Arch: "arm64"}
|
target := build.Target{OS: "darwin", Arch: "arm64"}
|
||||||
artifacts := builder.findArtifactsForTarget(fs, dir, target)
|
artifacts := builder.findArtifactsForTarget(fs, dir, target)
|
||||||
|
|
@ -211,7 +214,7 @@ func TestTaskfileBuilder_FindArtifactsForTarget_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTaskfileBuilder_MatchPattern_Good(t *testing.T) {
|
func TestTaskfile_TaskfileBuilderMatchPattern_Good(t *testing.T) {
|
||||||
builder := NewTaskfileBuilder()
|
builder := NewTaskfileBuilder()
|
||||||
|
|
||||||
t.Run("matches simple glob", func(t *testing.T) {
|
t.Run("matches simple glob", func(t *testing.T) {
|
||||||
|
|
@ -227,8 +230,226 @@ func TestTaskfileBuilder_MatchPattern_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTaskfileBuilder_Interface_Good(t *testing.T) {
|
func TestTaskfile_TaskfileBuilderInterface_Good(t *testing.T) {
|
||||||
// Verify TaskfileBuilder implements Builder interface
|
// Verify TaskfileBuilder implements Builder interface
|
||||||
var _ build.Builder = (*TaskfileBuilder)(nil)
|
var _ build.Builder = (*TaskfileBuilder)(nil)
|
||||||
var _ build.Builder = NewTaskfileBuilder()
|
var _ build.Builder = NewTaskfileBuilder()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTaskfile_TaskfileBuilderResolveTaskCli_Good(t *testing.T) {
|
||||||
|
builder := NewTaskfileBuilder()
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "task")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := builder.resolveTaskCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTaskfile_TaskfileBuilderResolveTaskCli_Bad(t *testing.T) {
|
||||||
|
builder := NewTaskfileBuilder()
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := builder.resolveTaskCli(ax.Join(t.TempDir(), "missing-task"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "task CLI not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTaskfile_TaskfileBuilderRunTask_Good(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
taskPath := ax.Join(binDir, "task")
|
||||||
|
logPath := ax.Join(t.TempDir(), "task.env")
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
env | sort > "${TASK_BUILD_LOG_FILE}"
|
||||||
|
`
|
||||||
|
require.NoError(t, ax.WriteFile(taskPath, []byte(script), 0o755))
|
||||||
|
|
||||||
|
t.Setenv("TASK_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewTaskfileBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: t.TempDir(),
|
||||||
|
OutputDir: "/tmp/out",
|
||||||
|
Name: "sample",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, builder.runTask(context.Background(), cfg, taskPath, cfg.OutputDir, build.Target{OS: "linux", Arch: "amd64"}))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Contains(t, string(content), "FOO=bar")
|
||||||
|
assert.Contains(t, string(content), "GOOS=linux")
|
||||||
|
assert.Contains(t, string(content), "GOARCH=amd64")
|
||||||
|
assert.Contains(t, string(content), "TARGET_OS=linux")
|
||||||
|
assert.Contains(t, string(content), "TARGET_ARCH=amd64")
|
||||||
|
assert.Contains(t, string(content), "OUTPUT_DIR=/tmp/out")
|
||||||
|
assert.Contains(t, string(content), "TARGET_DIR=/tmp/out/linux_amd64")
|
||||||
|
assert.Contains(t, string(content), "NAME=sample")
|
||||||
|
assert.Contains(t, string(content), "VERSION=v1.2.3")
|
||||||
|
assert.Contains(t, string(content), "CGO_ENABLED=0")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTaskfile_TaskfileBuilderBuild_DoesNotMutateOutputDir_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "Taskfile.yml"), []byte("version: '3'\n"), 0o644))
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
taskPath := ax.Join(binDir, "task")
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
mkdir -p "${OUTPUT_DIR}/${GOOS}_${GOARCH}"
|
||||||
|
printf '%s\n' "${NAME:-taskfile}" > "${OUTPUT_DIR}/${GOOS}_${GOARCH}/${NAME:-taskfile}"
|
||||||
|
`
|
||||||
|
require.NoError(t, ax.WriteFile(taskPath, []byte(script), 0o755))
|
||||||
|
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
builder := NewTaskfileBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
Name: "sample",
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: runtime.GOOS, Arch: runtime.GOARCH}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Empty(t, cfg.OutputDir)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "dist"), ax.Dir(ax.Dir(artifacts[0].Path)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTaskfile_TaskfileBuilderBuild_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "Taskfile.yml"), []byte("version: '3'\n"), 0o644))
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
taskPath := ax.Join(binDir, "task")
|
||||||
|
logPath := ax.Join(t.TempDir(), "task.build.env")
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
mkdir -p "${OUTPUT_DIR}/${GOOS}_${GOARCH}"
|
||||||
|
printf '%s\n' "${NAME:-taskfile}" > "${OUTPUT_DIR}/${GOOS}_${GOARCH}/${NAME:-taskfile}"
|
||||||
|
env | sort > "${TASK_BUILD_LOG_FILE}"
|
||||||
|
`
|
||||||
|
require.NoError(t, ax.WriteFile(taskPath, []byte(script), 0o755))
|
||||||
|
|
||||||
|
t.Setenv("TASK_BUILD_LOG_FILE", logPath)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
builder := NewTaskfileBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
Name: "sample",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, []build.Target{{OS: "linux", Arch: "amd64"}})
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "dist", "linux_amd64", "sample"), artifacts[0].Path)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, string(content), "FOO=bar")
|
||||||
|
assert.Contains(t, string(content), "OUTPUT_DIR="+ax.Join(projectDir, "dist"))
|
||||||
|
assert.Contains(t, string(content), "GOOS=linux")
|
||||||
|
assert.Contains(t, string(content), "GOARCH=amd64")
|
||||||
|
assert.Contains(t, string(content), "TARGET_OS=linux")
|
||||||
|
assert.Contains(t, string(content), "TARGET_ARCH=amd64")
|
||||||
|
assert.Contains(t, string(content), "TARGET_DIR="+ax.Join(projectDir, "dist", "linux_amd64"))
|
||||||
|
assert.Contains(t, string(content), "CGO_ENABLED=0")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTaskfile_TaskfileBuilderBuild_DefaultTarget_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "Taskfile.yml"), []byte("version: '3'\n"), 0o644))
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
taskPath := ax.Join(binDir, "task")
|
||||||
|
logPath := ax.Join(t.TempDir(), "task.default.env")
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
mkdir -p "${OUTPUT_DIR}/${GOOS}_${GOARCH}"
|
||||||
|
printf '%s\n' "${GOOS}/${GOARCH}" > "${OUTPUT_DIR}/${GOOS}_${GOARCH}/artifact"
|
||||||
|
env | sort > "${TASK_BUILD_LOG_FILE}"
|
||||||
|
`
|
||||||
|
require.NoError(t, ax.WriteFile(taskPath, []byte(script), 0o755))
|
||||||
|
|
||||||
|
t.Setenv("TASK_BUILD_LOG_FILE", logPath)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
builder := NewTaskfileBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
Name: "sample",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
Env: []string{"FOO=bar"},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "dist", runtime.GOOS+"_"+runtime.GOARCH, "artifact"), artifacts[0].Path)
|
||||||
|
assert.Equal(t, runtime.GOOS, artifacts[0].OS)
|
||||||
|
assert.Equal(t, runtime.GOARCH, artifacts[0].Arch)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, string(content), "FOO=bar")
|
||||||
|
assert.Contains(t, string(content), "OUTPUT_DIR="+ax.Join(projectDir, "dist"))
|
||||||
|
assert.Contains(t, string(content), "GOOS="+runtime.GOOS)
|
||||||
|
assert.Contains(t, string(content), "GOARCH="+runtime.GOARCH)
|
||||||
|
assert.Contains(t, string(content), "TARGET_OS="+runtime.GOOS)
|
||||||
|
assert.Contains(t, string(content), "TARGET_ARCH="+runtime.GOARCH)
|
||||||
|
assert.Contains(t, string(content), "TARGET_DIR="+ax.Join(projectDir, "dist", runtime.GOOS+"_"+runtime.GOARCH))
|
||||||
|
assert.Contains(t, string(content), "CGO_ENABLED=0")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTaskfile_TaskfileBuilderRunTask_CGOEnabled_Good(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
taskPath := ax.Join(binDir, "task")
|
||||||
|
logPath := ax.Join(t.TempDir(), "task.cgo.env")
|
||||||
|
|
||||||
|
script := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
env | sort > "${TASK_BUILD_LOG_FILE}"
|
||||||
|
`
|
||||||
|
require.NoError(t, ax.WriteFile(taskPath, []byte(script), 0o755))
|
||||||
|
|
||||||
|
t.Setenv("TASK_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewTaskfileBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: t.TempDir(),
|
||||||
|
OutputDir: "/tmp/out",
|
||||||
|
Name: "sample",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
CGO: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, builder.runTask(context.Background(), cfg, taskPath, cfg.OutputDir, build.Target{OS: "linux", Arch: "amd64"}))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Contains(t, string(content), "CGO_ENABLED=1")
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,39 +3,44 @@ package builders
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// WailsBuilder implements the Builder interface for Wails v3 projects.
|
// WailsBuilder implements the Builder interface for Wails v3 projects.
|
||||||
|
//
|
||||||
|
// b := builders.NewWailsBuilder()
|
||||||
type WailsBuilder struct{}
|
type WailsBuilder struct{}
|
||||||
|
|
||||||
// NewWailsBuilder creates a new WailsBuilder instance.
|
// NewWailsBuilder creates a new WailsBuilder instance.
|
||||||
|
//
|
||||||
|
// b := builders.NewWailsBuilder()
|
||||||
func NewWailsBuilder() *WailsBuilder {
|
func NewWailsBuilder() *WailsBuilder {
|
||||||
return &WailsBuilder{}
|
return &WailsBuilder{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the builder's identifier.
|
// Name returns the builder's identifier.
|
||||||
|
//
|
||||||
|
// name := b.Name() // → "wails"
|
||||||
func (b *WailsBuilder) Name() string {
|
func (b *WailsBuilder) Name() string {
|
||||||
return "wails"
|
return "wails"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect checks if this builder can handle the project in the given directory.
|
// Detect checks if this builder can handle the project (checks for wails.json).
|
||||||
// Uses IsWailsProject from the build package which checks for wails.json.
|
//
|
||||||
|
// ok, err := b.Detect(io.Local, ".")
|
||||||
func (b *WailsBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
func (b *WailsBuilder) Detect(fs io.Medium, dir string) (bool, error) {
|
||||||
return build.IsWailsProject(fs, dir), nil
|
return build.IsWailsProject(fs, dir), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build compiles the Wails project for the specified targets.
|
// Build compiles the Wails project for the specified targets.
|
||||||
// It detects the Wails version and chooses the appropriate build strategy:
|
// Wails v3: delegates to Taskfile; Wails v2: uses 'wails build'.
|
||||||
// - Wails v3: Delegates to Taskfile (error if missing)
|
//
|
||||||
// - Wails v2: Uses 'wails build' command
|
// artifacts, err := b.Build(ctx, cfg, []build.Target{{OS: "darwin", Arch: "arm64"}})
|
||||||
func (b *WailsBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
func (b *WailsBuilder) Build(ctx context.Context, cfg *build.Config, targets []build.Target) ([]build.Artifact, error) {
|
||||||
if cfg == nil {
|
if cfg == nil {
|
||||||
return nil, coreerr.E("WailsBuilder.Build", "config is nil", nil)
|
return nil, coreerr.E("WailsBuilder.Build", "config is nil", nil)
|
||||||
|
|
@ -55,12 +60,16 @@ func (b *WailsBuilder) Build(ctx context.Context, cfg *build.Config, targets []b
|
||||||
return taskBuilder.Build(ctx, cfg, targets)
|
return taskBuilder.Build(ctx, cfg, targets)
|
||||||
}
|
}
|
||||||
// Fall back to Go builder — Wails v3 is just a Go project that needs CGO
|
// Fall back to Go builder — Wails v3 is just a Go project that needs CGO
|
||||||
cfg.CGO = true
|
v3Config := b.buildV3Config(cfg)
|
||||||
goBuilder := NewGoBuilder()
|
goBuilder := NewGoBuilder()
|
||||||
return goBuilder.Build(ctx, cfg, targets)
|
return goBuilder.Build(ctx, v3Config, targets)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wails v2 strategy: Use 'wails build'
|
// Wails v2 strategy: Use 'wails build'
|
||||||
|
if err := b.PreBuild(ctx, cfg); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// Ensure output directory exists
|
// Ensure output directory exists
|
||||||
if err := cfg.FS.EnsureDir(cfg.OutputDir); err != nil {
|
if err := cfg.FS.EnsureDir(cfg.OutputDir); err != nil {
|
||||||
return nil, coreerr.E("WailsBuilder.Build", "failed to create output directory", err)
|
return nil, coreerr.E("WailsBuilder.Build", "failed to create output directory", err)
|
||||||
|
|
@ -81,50 +90,221 @@ func (b *WailsBuilder) Build(ctx context.Context, cfg *build.Config, targets []b
|
||||||
return artifacts, nil
|
return artifacts, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// buildV3Config returns a copy of the build config with Wails v3 requirements applied.
|
||||||
|
func (b *WailsBuilder) buildV3Config(cfg *build.Config) *build.Config {
|
||||||
|
if cfg == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
v3Config := *cfg
|
||||||
|
v3Config.CGO = true
|
||||||
|
return &v3Config
|
||||||
|
}
|
||||||
|
|
||||||
|
// PreBuild runs the frontend build step before Wails compiles the desktop app.
|
||||||
|
//
|
||||||
|
// err := b.PreBuild(ctx, cfg) // runs `deno task build` or `npm run build`
|
||||||
|
func (b *WailsBuilder) PreBuild(ctx context.Context, cfg *build.Config) error {
|
||||||
|
if cfg == nil {
|
||||||
|
return coreerr.E("WailsBuilder.PreBuild", "config is nil", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
frontendDir, command, args, err := b.resolveFrontendBuild(cfg.FS, cfg.ProjectDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if command == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
output, err := ax.CombinedOutput(ctx, frontendDir, cfg.Env, command, args...)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("WailsBuilder.PreBuild", command+" build failed: "+output, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// isWailsV3 checks if the project uses Wails v3 by inspecting go.mod.
|
// isWailsV3 checks if the project uses Wails v3 by inspecting go.mod.
|
||||||
func (b *WailsBuilder) isWailsV3(fs io.Medium, dir string) bool {
|
func (b *WailsBuilder) isWailsV3(fs io.Medium, dir string) bool {
|
||||||
goModPath := filepath.Join(dir, "go.mod")
|
goModPath := ax.Join(dir, "go.mod")
|
||||||
content, err := fs.Read(goModPath)
|
content, err := fs.Read(goModPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return strings.Contains(content, "github.com/wailsapp/wails/v3")
|
return core.Contains(content, "github.com/wailsapp/wails/v3")
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveFrontendBuild selects the frontend directory and build command.
|
||||||
|
//
|
||||||
|
// dir, command, args, err := b.resolveFrontendBuild(io.Local, ".")
|
||||||
|
func (b *WailsBuilder) resolveFrontendBuild(fs io.Medium, projectDir string) (string, string, []string, error) {
|
||||||
|
frontendDir := b.resolveFrontendDir(fs, projectDir)
|
||||||
|
if frontendDir == "" {
|
||||||
|
return "", "", nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if b.hasDenoConfig(fs, frontendDir) {
|
||||||
|
command, err := b.resolveDenoCli()
|
||||||
|
if err != nil {
|
||||||
|
return "", "", nil, err
|
||||||
|
}
|
||||||
|
return frontendDir, command, []string{"task", "build"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if fs.IsFile(ax.Join(frontendDir, "package.json")) {
|
||||||
|
packageManager := detectPackageManager(fs, frontendDir)
|
||||||
|
return b.resolvePackageManagerBuild(frontendDir, packageManager)
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", "", nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolvePackageManagerBuild returns the frontend build command for a detected package manager.
|
||||||
|
func (b *WailsBuilder) resolvePackageManagerBuild(frontendDir, packageManager string) (string, string, []string, error) {
|
||||||
|
switch packageManager {
|
||||||
|
case "bun":
|
||||||
|
command, err := b.resolveBunCli()
|
||||||
|
if err != nil {
|
||||||
|
return "", "", nil, err
|
||||||
|
}
|
||||||
|
return frontendDir, command, []string{"run", "build"}, nil
|
||||||
|
case "pnpm":
|
||||||
|
command, err := b.resolvePnpmCli()
|
||||||
|
if err != nil {
|
||||||
|
return "", "", nil, err
|
||||||
|
}
|
||||||
|
return frontendDir, command, []string{"run", "build"}, nil
|
||||||
|
case "yarn":
|
||||||
|
command, err := b.resolveYarnCli()
|
||||||
|
if err != nil {
|
||||||
|
return "", "", nil, err
|
||||||
|
}
|
||||||
|
return frontendDir, command, []string{"build"}, nil
|
||||||
|
default:
|
||||||
|
command, err := b.resolveNpmCli()
|
||||||
|
if err != nil {
|
||||||
|
return "", "", nil, err
|
||||||
|
}
|
||||||
|
return frontendDir, command, []string{"run", "build"}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveFrontendDir returns the directory that contains the frontend build manifest.
|
||||||
|
func (b *WailsBuilder) resolveFrontendDir(fs io.Medium, projectDir string) string {
|
||||||
|
frontendDir := ax.Join(projectDir, "frontend")
|
||||||
|
if fs.IsDir(frontendDir) && (b.hasDenoConfig(fs, frontendDir) || fs.IsFile(ax.Join(frontendDir, "package.json"))) {
|
||||||
|
return frontendDir
|
||||||
|
}
|
||||||
|
|
||||||
|
if b.hasDenoConfig(fs, projectDir) || fs.IsFile(ax.Join(projectDir, "package.json")) {
|
||||||
|
return projectDir
|
||||||
|
}
|
||||||
|
|
||||||
|
if nestedFrontendDir := b.resolveSubtreeFrontendDir(fs, projectDir); nestedFrontendDir != "" {
|
||||||
|
return nestedFrontendDir
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasDenoConfig reports whether the frontend directory contains a Deno manifest.
|
||||||
|
func (b *WailsBuilder) hasDenoConfig(fs io.Medium, dir string) bool {
|
||||||
|
return fs.IsFile(ax.Join(dir, "deno.json")) || fs.IsFile(ax.Join(dir, "deno.jsonc"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveSubtreeFrontendDir finds a nested frontend manifest within the project tree.
|
||||||
|
// This supports monorepo layouts such as apps/web/package.json or apps/web/deno.json
|
||||||
|
// when frontend/ is absent.
|
||||||
|
func (b *WailsBuilder) resolveSubtreeFrontendDir(fs io.Medium, projectDir string) string {
|
||||||
|
return b.findFrontendDir(fs, projectDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// findFrontendDir walks nested directories until it finds a frontend manifest.
|
||||||
|
func (b *WailsBuilder) findFrontendDir(fs io.Medium, dir string) string {
|
||||||
|
entries, err := fs.List(dir)
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if !entry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
name := entry.Name()
|
||||||
|
if name == "node_modules" || core.HasPrefix(name, ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
candidateDir := ax.Join(dir, name)
|
||||||
|
if b.hasDenoConfig(fs, candidateDir) || fs.IsFile(ax.Join(candidateDir, "package.json")) {
|
||||||
|
return candidateDir
|
||||||
|
}
|
||||||
|
|
||||||
|
if nested := b.findFrontendDir(fs, candidateDir); nested != "" {
|
||||||
|
return nested
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildV2Target compiles for a single target platform using wails (v2).
|
// buildV2Target compiles for a single target platform using wails (v2).
|
||||||
func (b *WailsBuilder) buildV2Target(ctx context.Context, cfg *build.Config, target build.Target) (build.Artifact, error) {
|
func (b *WailsBuilder) buildV2Target(ctx context.Context, cfg *build.Config, target build.Target) (build.Artifact, error) {
|
||||||
|
wailsCommand, err := b.resolveWailsCli()
|
||||||
|
if err != nil {
|
||||||
|
return build.Artifact{}, err
|
||||||
|
}
|
||||||
|
|
||||||
// Determine output binary name
|
// Determine output binary name
|
||||||
binaryName := cfg.Name
|
binaryName := cfg.Name
|
||||||
if binaryName == "" {
|
if binaryName == "" {
|
||||||
binaryName = filepath.Base(cfg.ProjectDir)
|
binaryName = ax.Base(cfg.ProjectDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the wails build arguments
|
// Build the wails build arguments
|
||||||
args := []string{"build"}
|
args := []string{"build"}
|
||||||
|
|
||||||
|
if len(cfg.BuildTags) > 0 {
|
||||||
|
args = append(args, "-tags", core.Join(",", cfg.BuildTags...))
|
||||||
|
}
|
||||||
|
|
||||||
|
ldflags := append([]string{}, cfg.LDFlags...)
|
||||||
|
if cfg.Version != "" && !hasVersionLDFlag(ldflags) {
|
||||||
|
ldflags = append(ldflags, core.Sprintf("-X main.version=%s", cfg.Version))
|
||||||
|
}
|
||||||
|
if len(ldflags) > 0 {
|
||||||
|
args = append(args, "-ldflags", core.Join(" ", ldflags...))
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.NSIS {
|
||||||
|
args = append(args, "-nsis")
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.WebView2 != "" {
|
||||||
|
args = append(args, "-webview2", cfg.WebView2)
|
||||||
|
}
|
||||||
|
|
||||||
// Platform
|
// Platform
|
||||||
args = append(args, "-platform", fmt.Sprintf("%s/%s", target.OS, target.Arch))
|
args = append(args, "-platform", core.Sprintf("%s/%s", target.OS, target.Arch))
|
||||||
|
|
||||||
// Output (Wails v2 uses -o for the binary name, relative to build/bin usually, but we want to control it)
|
// Output (Wails v2 uses -o for the binary name, relative to build/bin usually, but we want to control it)
|
||||||
// Actually, Wails v2 is opinionated about output dir (build/bin).
|
// Actually, Wails v2 is opinionated about output dir (build/bin).
|
||||||
// We might need to copy artifacts after build if we want them in cfg.OutputDir.
|
// We might need to copy artifacts after build if we want them in cfg.OutputDir.
|
||||||
// For now, let's try to let Wails do its thing and find the artifact.
|
// For now, let's try to let Wails do its thing and find the artifact.
|
||||||
|
|
||||||
// Create the command
|
|
||||||
cmd := exec.CommandContext(ctx, "wails", args...)
|
|
||||||
cmd.Dir = cfg.ProjectDir
|
|
||||||
|
|
||||||
// Capture output for error messages
|
// Capture output for error messages
|
||||||
output, err := cmd.CombinedOutput()
|
output, err := ax.CombinedOutput(ctx, cfg.ProjectDir, cfg.Env, wailsCommand, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return build.Artifact{}, coreerr.E("WailsBuilder.buildV2Target", "wails build failed: "+string(output), err)
|
return build.Artifact{}, coreerr.E("WailsBuilder.buildV2Target", "wails build failed: "+output, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wails v2 typically outputs to build/bin
|
// Wails v2 typically outputs to build/bin
|
||||||
// We need to move/copy it to our desired output dir
|
// We need to move/copy it to our desired output dir
|
||||||
|
|
||||||
// Construct the source path where Wails v2 puts the binary
|
// Construct the source path where Wails v2 puts the binary
|
||||||
wailsOutputDir := filepath.Join(cfg.ProjectDir, "build", "bin")
|
wailsOutputDir := ax.Join(cfg.ProjectDir, "build", "bin")
|
||||||
|
|
||||||
// Find the artifact in Wails output dir
|
// Find the artifact in Wails output dir
|
||||||
sourcePath, err := b.findArtifact(cfg.FS, wailsOutputDir, binaryName, target)
|
sourcePath, err := b.findArtifact(cfg.FS, wailsOutputDir, binaryName, target)
|
||||||
|
|
@ -134,20 +314,16 @@ func (b *WailsBuilder) buildV2Target(ctx context.Context, cfg *build.Config, tar
|
||||||
|
|
||||||
// Move/Copy to our output dir
|
// Move/Copy to our output dir
|
||||||
// Create platform specific dir in our output
|
// Create platform specific dir in our output
|
||||||
platformDir := filepath.Join(cfg.OutputDir, fmt.Sprintf("%s_%s", target.OS, target.Arch))
|
platformDir := ax.Join(cfg.OutputDir, core.Sprintf("%s_%s", target.OS, target.Arch))
|
||||||
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
if err := cfg.FS.EnsureDir(platformDir); err != nil {
|
||||||
return build.Artifact{}, coreerr.E("WailsBuilder.buildV2Target", "failed to create output dir", err)
|
return build.Artifact{}, coreerr.E("WailsBuilder.buildV2Target", "failed to create output dir", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
destPath := filepath.Join(platformDir, filepath.Base(sourcePath))
|
destPath := ax.Join(platformDir, ax.Base(sourcePath))
|
||||||
|
|
||||||
// Simple copy using the medium
|
// Copy the selected artifact, preserving directory bundles such as .app packages.
|
||||||
content, err := cfg.FS.Read(sourcePath)
|
if err := copyBuildArtifact(cfg.FS, sourcePath, destPath); err != nil {
|
||||||
if err != nil {
|
return build.Artifact{}, coreerr.E("WailsBuilder.buildV2Target", "failed to copy artifact "+sourcePath, err)
|
||||||
return build.Artifact{}, coreerr.E("WailsBuilder.buildV2Target", "failed to read artifact "+sourcePath, err)
|
|
||||||
}
|
|
||||||
if err := cfg.FS.Write(destPath, content); err != nil {
|
|
||||||
return build.Artifact{}, coreerr.E("WailsBuilder.buildV2Target", "failed to write artifact "+destPath, err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return build.Artifact{
|
return build.Artifact{
|
||||||
|
|
@ -165,21 +341,21 @@ func (b *WailsBuilder) findArtifact(fs io.Medium, platformDir, binaryName string
|
||||||
case "windows":
|
case "windows":
|
||||||
// Look for NSIS installer first, then plain exe
|
// Look for NSIS installer first, then plain exe
|
||||||
candidates = []string{
|
candidates = []string{
|
||||||
filepath.Join(platformDir, binaryName+"-installer.exe"),
|
ax.Join(platformDir, binaryName+"-installer.exe"),
|
||||||
filepath.Join(platformDir, binaryName+".exe"),
|
ax.Join(platformDir, binaryName+".exe"),
|
||||||
filepath.Join(platformDir, binaryName+"-amd64-installer.exe"),
|
ax.Join(platformDir, binaryName+"-amd64-installer.exe"),
|
||||||
}
|
}
|
||||||
case "darwin":
|
case "darwin":
|
||||||
// Look for .dmg, then .app bundle, then plain binary
|
// Look for .dmg, then .app bundle, then plain binary
|
||||||
candidates = []string{
|
candidates = []string{
|
||||||
filepath.Join(platformDir, binaryName+".dmg"),
|
ax.Join(platformDir, binaryName+".dmg"),
|
||||||
filepath.Join(platformDir, binaryName+".app"),
|
ax.Join(platformDir, binaryName+".app"),
|
||||||
filepath.Join(platformDir, binaryName),
|
ax.Join(platformDir, binaryName),
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
// Linux and others: look for plain binary
|
// Linux and others: look for plain binary
|
||||||
candidates = []string{
|
candidates = []string{
|
||||||
filepath.Join(platformDir, binaryName),
|
ax.Join(platformDir, binaryName),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -199,11 +375,11 @@ func (b *WailsBuilder) findArtifact(fs io.Medium, platformDir, binaryName string
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
name := entry.Name()
|
name := entry.Name()
|
||||||
// Skip common non-artifact files
|
// Skip common non-artifact files
|
||||||
if strings.HasSuffix(name, ".go") || strings.HasSuffix(name, ".json") {
|
if core.HasSuffix(name, ".go") || core.HasSuffix(name, ".json") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
path := filepath.Join(platformDir, name)
|
path := ax.Join(platformDir, name)
|
||||||
info, err := entry.Info()
|
info, err := entry.Info()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue
|
continue
|
||||||
|
|
@ -211,7 +387,7 @@ func (b *WailsBuilder) findArtifact(fs io.Medium, platformDir, binaryName string
|
||||||
|
|
||||||
// On Unix, check if it's executable; on Windows, check for .exe
|
// On Unix, check if it's executable; on Windows, check for .exe
|
||||||
if target.OS == "windows" {
|
if target.OS == "windows" {
|
||||||
if strings.HasSuffix(name, ".exe") {
|
if core.HasSuffix(name, ".exe") {
|
||||||
return path, nil
|
return path, nil
|
||||||
}
|
}
|
||||||
} else if info.Mode()&0111 != 0 || entry.IsDir() {
|
} else if info.Mode()&0111 != 0 || entry.IsDir() {
|
||||||
|
|
@ -223,14 +399,167 @@ func (b *WailsBuilder) findArtifact(fs io.Medium, platformDir, binaryName string
|
||||||
return "", coreerr.E("WailsBuilder.findArtifact", "no artifact found in "+platformDir, nil)
|
return "", coreerr.E("WailsBuilder.findArtifact", "no artifact found in "+platformDir, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// copyBuildArtifact copies a file or directory artifact into the build output tree.
|
||||||
|
//
|
||||||
|
// err := copyBuildArtifact(io.Local, "/tmp/source.app", "/tmp/dist/source.app")
|
||||||
|
func copyBuildArtifact(fs io.Medium, sourcePath, destPath string) error {
|
||||||
|
if fs.IsDir(sourcePath) {
|
||||||
|
if err := fs.EnsureDir(destPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := fs.List(sourcePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
childSource := ax.Join(sourcePath, entry.Name())
|
||||||
|
childDest := ax.Join(destPath, entry.Name())
|
||||||
|
if err := copyBuildArtifact(fs, childSource, childDest); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
info, err := fs.Stat(sourcePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := fs.Read(sourcePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := fs.WriteMode(destPath, content, info.Mode().Perm()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveWailsCli returns the executable path for the wails CLI.
|
||||||
|
func (b *WailsBuilder) resolveWailsCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/wails",
|
||||||
|
"/opt/homebrew/bin/wails",
|
||||||
|
}
|
||||||
|
|
||||||
|
if home := core.Env("HOME"); home != "" {
|
||||||
|
paths = append(paths, ax.Join(home, "go", "bin", "wails"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("wails", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("WailsBuilder.resolveWailsCli", "wails CLI not found. Install it with: go install github.com/wailsapp/wails/v2/cmd/wails@latest", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveDenoCli returns the executable path for the deno CLI.
|
||||||
|
func (b *WailsBuilder) resolveDenoCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/deno",
|
||||||
|
"/opt/homebrew/bin/deno",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("deno", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("WailsBuilder.resolveDenoCli", "deno CLI not found. Install it from https://deno.com/runtime", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveNpmCli returns the executable path for the npm CLI.
|
||||||
|
func (b *WailsBuilder) resolveNpmCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/npm",
|
||||||
|
"/opt/homebrew/bin/npm",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("npm", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("WailsBuilder.resolveNpmCli", "npm CLI not found. Install Node.js from https://nodejs.org/", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveBunCli returns the executable path for the bun CLI.
|
||||||
|
func (b *WailsBuilder) resolveBunCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/bun",
|
||||||
|
"/opt/homebrew/bin/bun",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("bun", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("WailsBuilder.resolveBunCli", "bun CLI not found. Install it from https://bun.sh/", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolvePnpmCli returns the executable path for the pnpm CLI.
|
||||||
|
func (b *WailsBuilder) resolvePnpmCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/pnpm",
|
||||||
|
"/opt/homebrew/bin/pnpm",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("pnpm", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("WailsBuilder.resolvePnpmCli", "pnpm CLI not found. Install it from https://pnpm.io/installation", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveYarnCli returns the executable path for the yarn CLI.
|
||||||
|
func (b *WailsBuilder) resolveYarnCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/yarn",
|
||||||
|
"/opt/homebrew/bin/yarn",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("yarn", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("WailsBuilder.resolveYarnCli", "yarn CLI not found. Install it from https://yarnpkg.com/getting-started/install", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
// detectPackageManager detects the frontend package manager based on lock files.
|
// detectPackageManager detects the frontend package manager based on lock files.
|
||||||
// Returns "bun", "pnpm", "yarn", or "npm" (default).
|
// Returns "bun", "pnpm", "yarn", or "npm" (default).
|
||||||
func detectPackageManager(fs io.Medium, dir string) string {
|
func detectPackageManager(fs io.Medium, dir string) string {
|
||||||
|
if declared := detectDeclaredPackageManager(fs, dir); declared != "" {
|
||||||
|
return declared
|
||||||
|
}
|
||||||
|
|
||||||
// Check in priority order: bun, pnpm, yarn, npm
|
// Check in priority order: bun, pnpm, yarn, npm
|
||||||
lockFiles := []struct {
|
lockFiles := []struct {
|
||||||
file string
|
file string
|
||||||
manager string
|
manager string
|
||||||
}{
|
}{
|
||||||
|
{"bun.lock", "bun"},
|
||||||
{"bun.lockb", "bun"},
|
{"bun.lockb", "bun"},
|
||||||
{"pnpm-lock.yaml", "pnpm"},
|
{"pnpm-lock.yaml", "pnpm"},
|
||||||
{"yarn.lock", "yarn"},
|
{"yarn.lock", "yarn"},
|
||||||
|
|
@ -238,7 +567,7 @@ func detectPackageManager(fs io.Medium, dir string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, lf := range lockFiles {
|
for _, lf := range lockFiles {
|
||||||
if fs.IsFile(filepath.Join(dir, lf.file)) {
|
if fs.IsFile(ax.Join(dir, lf.file)) {
|
||||||
return lf.manager
|
return lf.manager
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,11 @@ package builders
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
@ -24,7 +24,7 @@ func setupWailsTestProject(t *testing.T) string {
|
||||||
"name": "testapp",
|
"name": "testapp",
|
||||||
"outputfilename": "testapp"
|
"outputfilename": "testapp"
|
||||||
}`
|
}`
|
||||||
err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte(wailsJSON), 0644)
|
err := ax.WriteFile(ax.Join(dir, "wails.json"), []byte(wailsJSON), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Create a minimal go.mod
|
// Create a minimal go.mod
|
||||||
|
|
@ -34,7 +34,7 @@ go 1.21
|
||||||
|
|
||||||
require github.com/wailsapp/wails/v3 v3.0.0
|
require github.com/wailsapp/wails/v3 v3.0.0
|
||||||
`
|
`
|
||||||
err = os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0644)
|
err = ax.WriteFile(ax.Join(dir, "go.mod"), []byte(goMod), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Create a minimal main.go
|
// Create a minimal main.go
|
||||||
|
|
@ -44,7 +44,7 @@ func main() {
|
||||||
println("hello wails")
|
println("hello wails")
|
||||||
}
|
}
|
||||||
`
|
`
|
||||||
err = os.WriteFile(filepath.Join(dir, "main.go"), []byte(mainGo), 0644)
|
err = ax.WriteFile(ax.Join(dir, "main.go"), []byte(mainGo), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Create a minimal Taskfile.yml
|
// Create a minimal Taskfile.yml
|
||||||
|
|
@ -55,7 +55,7 @@ tasks:
|
||||||
- mkdir -p {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}
|
- mkdir -p {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}
|
||||||
- touch {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}/testapp
|
- touch {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}/testapp
|
||||||
`
|
`
|
||||||
err = os.WriteFile(filepath.Join(dir, "Taskfile.yml"), []byte(taskfile), 0644)
|
err = ax.WriteFile(ax.Join(dir, "Taskfile.yml"), []byte(taskfile), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return dir
|
return dir
|
||||||
|
|
@ -67,7 +67,7 @@ func setupWailsV2TestProject(t *testing.T) string {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
// wails.json
|
// wails.json
|
||||||
err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte("{}"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "wails.json"), []byte("{}"), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// go.mod with v2
|
// go.mod with v2
|
||||||
|
|
@ -75,19 +75,69 @@ func setupWailsV2TestProject(t *testing.T) string {
|
||||||
go 1.21
|
go 1.21
|
||||||
require github.com/wailsapp/wails/v2 v2.8.0
|
require github.com/wailsapp/wails/v2 v2.8.0
|
||||||
`
|
`
|
||||||
err = os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0644)
|
err = ax.WriteFile(ax.Join(dir, "go.mod"), []byte(goMod), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return dir
|
return dir
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWailsBuilder_Build_Taskfile_Good(t *testing.T) {
|
func setupFakeWailsToolchain(t *testing.T, binDir string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
wailsScript := `#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
log_file="${WAILS_BUILD_LOG_FILE:-}"
|
||||||
|
if [ -n "$log_file" ]; then
|
||||||
|
printf '%s\n' "$@" > "$log_file"
|
||||||
|
fi
|
||||||
|
|
||||||
|
sequence_file="${BUILD_SEQUENCE_FILE:-}"
|
||||||
|
if [ -n "$sequence_file" ]; then
|
||||||
|
printf '%s\n' "wails" >> "$sequence_file"
|
||||||
|
printf '%s\n' "$@" >> "$sequence_file"
|
||||||
|
if [ -n "${CUSTOM_ENV:-}" ]; then
|
||||||
|
printf '%s\n' "CUSTOM_ENV=${CUSTOM_ENV}" >> "$sequence_file"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
output_dir="build/bin"
|
||||||
|
binary_name="testapp"
|
||||||
|
mkdir -p "$output_dir"
|
||||||
|
printf 'fake wails binary\n' > "$output_dir/$binary_name"
|
||||||
|
chmod +x "$output_dir/$binary_name"
|
||||||
|
`
|
||||||
|
|
||||||
|
err := ax.WriteFile(ax.Join(binDir, "wails"), []byte(wailsScript), 0o755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setupFakeFrontendCommand(t *testing.T, binDir, name string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
script := strings.ReplaceAll(`#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
sequence_file="${BUILD_SEQUENCE_FILE:-}"
|
||||||
|
if [ -n "$sequence_file" ]; then
|
||||||
|
printf '%s\n' "__NAME__" >> "$sequence_file"
|
||||||
|
printf '%s\n' "$@" >> "$sequence_file"
|
||||||
|
if [ -n "${CUSTOM_ENV:-}" ]; then
|
||||||
|
printf '%s\n' "CUSTOM_ENV=${CUSTOM_ENV}" >> "$sequence_file"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
`, "__NAME__", name)
|
||||||
|
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(binDir, name), []byte(script), 0o755))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderBuildTaskfile_Good(t *testing.T) {
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping integration test in short mode")
|
t.Skip("skipping integration test in short mode")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if task is available
|
// Check if task is available
|
||||||
if _, err := exec.LookPath("task"); err != nil {
|
if _, err := ax.LookPath("task"); err != nil {
|
||||||
t.Skip("task not installed, skipping test")
|
t.Skip("task not installed, skipping test")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -104,7 +154,7 @@ tasks:
|
||||||
- mkdir -p {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}
|
- mkdir -p {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}
|
||||||
- touch {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}/testapp
|
- touch {{.OUTPUT_DIR}}/{{.GOOS}}_{{.GOARCH}}/testapp
|
||||||
`
|
`
|
||||||
err := os.WriteFile(filepath.Join(projectDir, "Taskfile.yml"), []byte(taskfile), 0644)
|
err := ax.WriteFile(ax.Join(projectDir, "Taskfile.yml"), []byte(taskfile), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewWailsBuilder()
|
builder := NewWailsBuilder()
|
||||||
|
|
@ -124,26 +174,74 @@ tasks:
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWailsBuilder_Name_Good(t *testing.T) {
|
func TestWails_WailsBuilderName_Good(t *testing.T) {
|
||||||
builder := NewWailsBuilder()
|
builder := NewWailsBuilder()
|
||||||
assert.Equal(t, "wails", builder.Name())
|
assert.Equal(t, "wails", builder.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWailsBuilder_Build_V2_Good(t *testing.T) {
|
func TestWails_WailsBuilderBuildV3Config_Good(t *testing.T) {
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
CGO: false,
|
||||||
|
Name: "testapp",
|
||||||
|
Flags: []string{"-trimpath"},
|
||||||
|
LDFlags: []string{
|
||||||
|
"-s",
|
||||||
|
"-w",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
v3Config := builder.buildV3Config(cfg)
|
||||||
|
|
||||||
|
require.NotNil(t, v3Config)
|
||||||
|
assert.False(t, cfg.CGO)
|
||||||
|
assert.True(t, v3Config.CGO)
|
||||||
|
assert.Equal(t, cfg.Name, v3Config.Name)
|
||||||
|
assert.Equal(t, cfg.Flags, v3Config.Flags)
|
||||||
|
assert.Equal(t, cfg.LDFlags, v3Config.LDFlags)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderResolveFrontendDir_Good(t *testing.T) {
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("finds nested package.json frontends", func(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
frontendDir := ax.Join(projectDir, "apps", "web")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
got := builder.resolveFrontendDir(fs, projectDir)
|
||||||
|
assert.Equal(t, frontendDir, got)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("finds nested deno.json frontends", func(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
frontendDir := ax.Join(projectDir, "packages", "site")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "deno.json"), []byte("{}"), 0o644))
|
||||||
|
|
||||||
|
got := builder.resolveFrontendDir(fs, projectDir)
|
||||||
|
assert.Equal(t, frontendDir, got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderBuildV2_Good(t *testing.T) {
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping integration test in short mode")
|
t.Skip("skipping integration test in short mode")
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := exec.LookPath("wails"); err != nil {
|
binDir := t.TempDir()
|
||||||
t.Skip("wails not installed, skipping integration test")
|
setupFakeWailsToolchain(t, binDir)
|
||||||
}
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
|
||||||
t.Run("builds v2 project", func(t *testing.T) {
|
t.Run("builds v2 project", func(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
projectDir := setupWailsV2TestProject(t)
|
projectDir := setupWailsV2TestProject(t)
|
||||||
outputDir := t.TempDir()
|
outputDir := t.TempDir()
|
||||||
|
|
||||||
builder := NewWailsBuilder()
|
|
||||||
cfg := &build.Config{
|
cfg := &build.Config{
|
||||||
FS: fs,
|
FS: fs,
|
||||||
ProjectDir: projectDir,
|
ProjectDir: projectDir,
|
||||||
|
|
@ -154,19 +252,395 @@ func TestWailsBuilder_Build_V2_Good(t *testing.T) {
|
||||||
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
}
|
}
|
||||||
|
|
||||||
// This will likely fail in a real run because we can't easily mock the full wails v2 build process
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
// (which needs a valid project with main.go etc).
|
require.NoError(t, err)
|
||||||
// But it validates we are trying to run the command.
|
require.Len(t, artifacts, 1)
|
||||||
// For now, we just verify it attempts the build - error is expected
|
assert.FileExists(t, artifacts[0].Path)
|
||||||
_, _ = builder.Build(context.Background(), cfg, targets)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWailsBuilder_Detect_Good(t *testing.T) {
|
func TestWails_copyBuildArtifact_PreservesMode_Good(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("executable mode bits are not portable on Windows")
|
||||||
|
}
|
||||||
|
|
||||||
|
sourceDir := t.TempDir()
|
||||||
|
sourcePath := ax.Join(sourceDir, "testapp")
|
||||||
|
require.NoError(t, ax.WriteFile(sourcePath, []byte("fake wails binary\n"), 0o755))
|
||||||
|
|
||||||
|
destDir := t.TempDir()
|
||||||
|
destPath := ax.Join(destDir, "testapp")
|
||||||
|
|
||||||
|
require.NoError(t, copyBuildArtifact(io.Local, sourcePath, destPath))
|
||||||
|
|
||||||
|
info, err := ax.Stat(destPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotZero(t, info.Mode()&0o111)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderBuildV2Flags_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeWailsToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsV2TestProject(t)
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
logDir := t.TempDir()
|
||||||
|
logPath := ax.Join(logDir, "wails.log")
|
||||||
|
t.Setenv("WAILS_BUILD_LOG_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "testapp",
|
||||||
|
Version: "v1.2.3",
|
||||||
|
BuildTags: []string{"integration", "webkit2_41"},
|
||||||
|
LDFlags: []string{"-s", "-w"},
|
||||||
|
NSIS: true,
|
||||||
|
WebView2: "embed",
|
||||||
|
}
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
args := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.NotEmpty(t, args)
|
||||||
|
assert.Equal(t, "build", args[0])
|
||||||
|
assert.Contains(t, args, "-tags")
|
||||||
|
assert.Contains(t, args, "integration,webkit2_41")
|
||||||
|
assert.Contains(t, args, "-ldflags")
|
||||||
|
assert.Contains(t, args, "-s -w -X main.version=v1.2.3")
|
||||||
|
assert.Contains(t, args, "-nsis")
|
||||||
|
assert.Contains(t, args, "-webview2")
|
||||||
|
assert.Contains(t, args, "embed")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderPreBuild_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("uses deno when deno manifest exists", func(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeFrontendCommand(t, binDir, "deno")
|
||||||
|
setupFakeFrontendCommand(t, binDir, "npm")
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsTestProject(t)
|
||||||
|
frontendDir := ax.Join(projectDir, "frontend")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "deno.json"), []byte(`{}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte(`{}`), 0o644))
|
||||||
|
|
||||||
|
logPath := ax.Join(t.TempDir(), "frontend.log")
|
||||||
|
t.Setenv("BUILD_SEQUENCE_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, builder.PreBuild(context.Background(), cfg))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.Len(t, lines, 3)
|
||||||
|
assert.Equal(t, "deno", lines[0])
|
||||||
|
assert.Equal(t, "task", lines[1])
|
||||||
|
assert.Equal(t, "build", lines[2])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("falls back to npm when only package.json exists", func(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeFrontendCommand(t, binDir, "deno")
|
||||||
|
setupFakeFrontendCommand(t, binDir, "npm")
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsTestProject(t)
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "package.json"), []byte(`{}`), 0o644))
|
||||||
|
|
||||||
|
logPath := ax.Join(t.TempDir(), "frontend.log")
|
||||||
|
t.Setenv("BUILD_SEQUENCE_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, builder.PreBuild(context.Background(), cfg))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.Len(t, lines, 3)
|
||||||
|
assert.Equal(t, "npm", lines[0])
|
||||||
|
assert.Equal(t, "run", lines[1])
|
||||||
|
assert.Equal(t, "build", lines[2])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("discovers nested package.json in a monorepo", func(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeFrontendCommand(t, binDir, "npm")
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsTestProject(t)
|
||||||
|
frontendDir := ax.Join(projectDir, "apps", "web")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte(`{}`), 0o644))
|
||||||
|
|
||||||
|
logPath := ax.Join(t.TempDir(), "frontend.log")
|
||||||
|
t.Setenv("BUILD_SEQUENCE_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, builder.PreBuild(context.Background(), cfg))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.Len(t, lines, 3)
|
||||||
|
assert.Equal(t, "npm", lines[0])
|
||||||
|
assert.Equal(t, "run", lines[1])
|
||||||
|
assert.Equal(t, "build", lines[2])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("uses bun when bun.lockb exists", func(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeFrontendCommand(t, binDir, "bun")
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsTestProject(t)
|
||||||
|
frontendDir := ax.Join(projectDir, "frontend")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte(`{}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "bun.lockb"), []byte(""), 0o644))
|
||||||
|
|
||||||
|
logPath := ax.Join(t.TempDir(), "frontend.log")
|
||||||
|
t.Setenv("BUILD_SEQUENCE_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, builder.PreBuild(context.Background(), cfg))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.Len(t, lines, 3)
|
||||||
|
assert.Equal(t, "bun", lines[0])
|
||||||
|
assert.Equal(t, "run", lines[1])
|
||||||
|
assert.Equal(t, "build", lines[2])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("uses pnpm when pnpm-lock.yaml exists", func(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeFrontendCommand(t, binDir, "pnpm")
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsTestProject(t)
|
||||||
|
frontendDir := ax.Join(projectDir, "frontend")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte(`{}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "pnpm-lock.yaml"), []byte(""), 0o644))
|
||||||
|
|
||||||
|
logPath := ax.Join(t.TempDir(), "frontend.log")
|
||||||
|
t.Setenv("BUILD_SEQUENCE_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, builder.PreBuild(context.Background(), cfg))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.Len(t, lines, 3)
|
||||||
|
assert.Equal(t, "pnpm", lines[0])
|
||||||
|
assert.Equal(t, "run", lines[1])
|
||||||
|
assert.Equal(t, "build", lines[2])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("uses yarn when yarn.lock exists", func(t *testing.T) {
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeFrontendCommand(t, binDir, "yarn")
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsTestProject(t)
|
||||||
|
frontendDir := ax.Join(projectDir, "frontend")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte(`{}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "yarn.lock"), []byte(""), 0o644))
|
||||||
|
|
||||||
|
logPath := ax.Join(t.TempDir(), "frontend.log")
|
||||||
|
t.Setenv("BUILD_SEQUENCE_FILE", logPath)
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, builder.PreBuild(context.Background(), cfg))
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(logPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.Len(t, lines, 2)
|
||||||
|
assert.Equal(t, "yarn", lines[0])
|
||||||
|
assert.Equal(t, "build", lines[1])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderBuildV2PreBuild_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeFrontendCommand(t, binDir, "deno")
|
||||||
|
setupFakeFrontendCommand(t, binDir, "npm")
|
||||||
|
setupFakeWailsToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsV2TestProject(t)
|
||||||
|
frontendDir := ax.Join(projectDir, "frontend")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "deno.json"), []byte(`{}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte(`{}`), 0o644))
|
||||||
|
|
||||||
|
outputDir := t.TempDir()
|
||||||
|
sequencePath := ax.Join(t.TempDir(), "build-sequence.log")
|
||||||
|
wailsLogPath := ax.Join(t.TempDir(), "wails.log")
|
||||||
|
t.Setenv("BUILD_SEQUENCE_FILE", sequencePath)
|
||||||
|
t.Setenv("WAILS_BUILD_LOG_FILE", wailsLogPath)
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: outputDir,
|
||||||
|
Name: "testapp",
|
||||||
|
}
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(sequencePath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.GreaterOrEqual(t, len(lines), 4)
|
||||||
|
assert.Equal(t, "deno", lines[0])
|
||||||
|
assert.Equal(t, "task", lines[1])
|
||||||
|
assert.Equal(t, "build", lines[2])
|
||||||
|
assert.Equal(t, "wails", lines[3])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderPropagatesEnvToExternalCommands_Good(t *testing.T) {
|
||||||
|
if testing.Short() {
|
||||||
|
t.Skip("skipping integration test in short mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
binDir := t.TempDir()
|
||||||
|
setupFakeFrontendCommand(t, binDir, "deno")
|
||||||
|
setupFakeWailsToolchain(t, binDir)
|
||||||
|
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
||||||
|
|
||||||
|
projectDir := setupWailsV2TestProject(t)
|
||||||
|
frontendDir := ax.Join(projectDir, "frontend")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "deno.json"), []byte(`{}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte(`{}`), 0o644))
|
||||||
|
|
||||||
|
sequencePath := ax.Join(t.TempDir(), "build-sequence.log")
|
||||||
|
t.Setenv("BUILD_SEQUENCE_FILE", sequencePath)
|
||||||
|
t.Setenv("CUSTOM_ENV", "expected-value")
|
||||||
|
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
cfg := &build.Config{
|
||||||
|
FS: io.Local,
|
||||||
|
ProjectDir: projectDir,
|
||||||
|
OutputDir: t.TempDir(),
|
||||||
|
Name: "testapp",
|
||||||
|
Env: []string{"CUSTOM_ENV=expected-value"},
|
||||||
|
}
|
||||||
|
targets := []build.Target{
|
||||||
|
{OS: runtime.GOOS, Arch: runtime.GOARCH},
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts, err := builder.Build(context.Background(), cfg, targets)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Len(t, artifacts, 1)
|
||||||
|
|
||||||
|
content, err := ax.ReadFile(sequencePath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||||
|
require.Contains(t, lines, "CUSTOM_ENV=expected-value")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderResolveWailsCli_Good(t *testing.T) {
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "wails")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := builder.resolveWailsCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderResolveWailsCli_Bad(t *testing.T) {
|
||||||
|
builder := NewWailsBuilder()
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := builder.resolveWailsCli(ax.Join(t.TempDir(), "missing-wails"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "wails CLI not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderDetect_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("detects Wails project with wails.json", func(t *testing.T) {
|
t.Run("detects Wails project with wails.json", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte("{}"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "wails.json"), []byte("{}"), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewWailsBuilder()
|
builder := NewWailsBuilder()
|
||||||
|
|
@ -177,7 +651,7 @@ func TestWailsBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("returns false for Go-only project", func(t *testing.T) {
|
t.Run("returns false for Go-only project", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module test"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("module test"), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewWailsBuilder()
|
builder := NewWailsBuilder()
|
||||||
|
|
@ -188,7 +662,7 @@ func TestWailsBuilder_Detect_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("returns false for Node.js project", func(t *testing.T) {
|
t.Run("returns false for Node.js project", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "package.json"), []byte("{}"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "package.json"), []byte("{}"), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewWailsBuilder()
|
builder := NewWailsBuilder()
|
||||||
|
|
@ -207,11 +681,29 @@ func TestWailsBuilder_Detect_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDetectPackageManager_Good(t *testing.T) {
|
func TestWails_DetectPackageManager_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
|
t.Run("detects declared packageManager value", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package.json"), []byte(`{"packageManager":"yarn@4.5.1"}`), 0o644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "pnpm-lock.yaml"), []byte(""), 0o644))
|
||||||
|
|
||||||
|
result := detectPackageManager(fs, dir)
|
||||||
|
assert.Equal(t, "yarn", result)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("detects bun from bun.lockb", func(t *testing.T) {
|
t.Run("detects bun from bun.lockb", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "bun.lockb"), []byte(""), 0644)
|
err := ax.WriteFile(ax.Join(dir, "bun.lockb"), []byte(""), 0o644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
result := detectPackageManager(fs, dir)
|
||||||
|
assert.Equal(t, "bun", result)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects bun from bun.lock", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "bun.lock"), []byte(""), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
result := detectPackageManager(fs, dir)
|
result := detectPackageManager(fs, dir)
|
||||||
|
|
@ -220,7 +712,7 @@ func TestDetectPackageManager_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects pnpm from pnpm-lock.yaml", func(t *testing.T) {
|
t.Run("detects pnpm from pnpm-lock.yaml", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "pnpm-lock.yaml"), []byte(""), 0644)
|
err := ax.WriteFile(ax.Join(dir, "pnpm-lock.yaml"), []byte(""), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
result := detectPackageManager(fs, dir)
|
result := detectPackageManager(fs, dir)
|
||||||
|
|
@ -229,7 +721,7 @@ func TestDetectPackageManager_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects yarn from yarn.lock", func(t *testing.T) {
|
t.Run("detects yarn from yarn.lock", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644)
|
err := ax.WriteFile(ax.Join(dir, "yarn.lock"), []byte(""), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
result := detectPackageManager(fs, dir)
|
result := detectPackageManager(fs, dir)
|
||||||
|
|
@ -238,7 +730,7 @@ func TestDetectPackageManager_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("detects npm from package-lock.json", func(t *testing.T) {
|
t.Run("detects npm from package-lock.json", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte(""), 0644)
|
err := ax.WriteFile(ax.Join(dir, "package-lock.json"), []byte(""), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
result := detectPackageManager(fs, dir)
|
result := detectPackageManager(fs, dir)
|
||||||
|
|
@ -255,9 +747,9 @@ func TestDetectPackageManager_Good(t *testing.T) {
|
||||||
t.Run("prefers bun over other lock files", func(t *testing.T) {
|
t.Run("prefers bun over other lock files", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
// Create multiple lock files
|
// Create multiple lock files
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "bun.lockb"), []byte(""), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "bun.lockb"), []byte(""), 0o644))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "yarn.lock"), []byte(""), 0o644))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte(""), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package-lock.json"), []byte(""), 0o644))
|
||||||
|
|
||||||
result := detectPackageManager(fs, dir)
|
result := detectPackageManager(fs, dir)
|
||||||
assert.Equal(t, "bun", result)
|
assert.Equal(t, "bun", result)
|
||||||
|
|
@ -266,9 +758,9 @@ func TestDetectPackageManager_Good(t *testing.T) {
|
||||||
t.Run("prefers pnpm over yarn and npm", func(t *testing.T) {
|
t.Run("prefers pnpm over yarn and npm", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
// Create multiple lock files (no bun)
|
// Create multiple lock files (no bun)
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "pnpm-lock.yaml"), []byte(""), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "pnpm-lock.yaml"), []byte(""), 0o644))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "yarn.lock"), []byte(""), 0o644))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte(""), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package-lock.json"), []byte(""), 0o644))
|
||||||
|
|
||||||
result := detectPackageManager(fs, dir)
|
result := detectPackageManager(fs, dir)
|
||||||
assert.Equal(t, "pnpm", result)
|
assert.Equal(t, "pnpm", result)
|
||||||
|
|
@ -277,15 +769,58 @@ func TestDetectPackageManager_Good(t *testing.T) {
|
||||||
t.Run("prefers yarn over npm", func(t *testing.T) {
|
t.Run("prefers yarn over npm", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
// Create multiple lock files (no bun or pnpm)
|
// Create multiple lock files (no bun or pnpm)
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "yarn.lock"), []byte(""), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "yarn.lock"), []byte(""), 0o644))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "package-lock.json"), []byte(""), 0644))
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package-lock.json"), []byte(""), 0o644))
|
||||||
|
|
||||||
result := detectPackageManager(fs, dir)
|
result := detectPackageManager(fs, dir)
|
||||||
assert.Equal(t, "yarn", result)
|
assert.Equal(t, "yarn", result)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("normalises package manager version pins", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package.json"), []byte(`{"packageManager":"npm@10.8.2"}`), 0o644))
|
||||||
|
|
||||||
|
result := detectPackageManager(fs, dir)
|
||||||
|
assert.Equal(t, "npm", result)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWailsBuilder_Build_Bad(t *testing.T) {
|
func TestWails_CopyBuildArtifact_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("copies files", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
sourcePath := ax.Join(dir, "build", "bin", "testapp")
|
||||||
|
destPath := ax.Join(dir, "dist", "linux_amd64", "testapp")
|
||||||
|
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Dir(sourcePath), 0o755))
|
||||||
|
require.NoError(t, fs.Write(sourcePath, "binary-data"))
|
||||||
|
|
||||||
|
require.NoError(t, copyBuildArtifact(fs, sourcePath, destPath))
|
||||||
|
|
||||||
|
got, err := fs.Read(destPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "binary-data", got)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("copies app bundles recursively", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
sourcePath := ax.Join(dir, "build", "bin", "testapp.app")
|
||||||
|
binaryPath := ax.Join(sourcePath, "Contents", "MacOS", "testapp")
|
||||||
|
destPath := ax.Join(dir, "dist", "darwin_arm64", "testapp.app")
|
||||||
|
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Dir(binaryPath), 0o755))
|
||||||
|
require.NoError(t, fs.Write(binaryPath, "bundle-binary"))
|
||||||
|
|
||||||
|
require.NoError(t, copyBuildArtifact(fs, sourcePath, destPath))
|
||||||
|
|
||||||
|
got, err := fs.Read(ax.Join(destPath, "Contents", "MacOS", "testapp"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "bundle-binary", got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWails_WailsBuilderBuild_Bad(t *testing.T) {
|
||||||
t.Run("returns error for nil config", func(t *testing.T) {
|
t.Run("returns error for nil config", func(t *testing.T) {
|
||||||
builder := NewWailsBuilder()
|
builder := NewWailsBuilder()
|
||||||
|
|
||||||
|
|
@ -313,13 +848,13 @@ func TestWailsBuilder_Build_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWailsBuilder_Build_Good(t *testing.T) {
|
func TestWails_WailsBuilderBuild_Good(t *testing.T) {
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping integration test in short mode")
|
t.Skip("skipping integration test in short mode")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if wails3 is available in PATH
|
// Check if wails3 is available in PATH
|
||||||
if _, err := exec.LookPath("wails3"); err != nil {
|
if _, err := ax.LookPath("wails3"); err != nil {
|
||||||
t.Skip("wails3 not installed, skipping integration test")
|
t.Skip("wails3 not installed, skipping integration test")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -349,13 +884,13 @@ func TestWailsBuilder_Build_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWailsBuilder_Interface_Good(t *testing.T) {
|
func TestWails_WailsBuilderInterface_Good(t *testing.T) {
|
||||||
// Verify WailsBuilder implements Builder interface
|
// Verify WailsBuilder implements Builder interface
|
||||||
var _ build.Builder = (*WailsBuilder)(nil)
|
var _ build.Builder = (*WailsBuilder)(nil)
|
||||||
var _ build.Builder = NewWailsBuilder()
|
var _ build.Builder = NewWailsBuilder()
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWailsBuilder_Ugly(t *testing.T) {
|
func TestWails_WailsBuilder_Ugly(t *testing.T) {
|
||||||
t.Run("handles nonexistent frontend directory gracefully", func(t *testing.T) {
|
t.Run("handles nonexistent frontend directory gracefully", func(t *testing.T) {
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping integration test in short mode")
|
t.Skip("skipping integration test in short mode")
|
||||||
|
|
@ -363,7 +898,7 @@ func TestWailsBuilder_Ugly(t *testing.T) {
|
||||||
|
|
||||||
// Create a Wails project without a frontend directory
|
// Create a Wails project without a frontend directory
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
err := os.WriteFile(filepath.Join(dir, "wails.json"), []byte("{}"), 0644)
|
err := ax.WriteFile(ax.Join(dir, "wails.json"), []byte("{}"), 0o644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
builder := NewWailsBuilder()
|
builder := NewWailsBuilder()
|
||||||
|
|
|
||||||
5
pkg/build/builders/zip_deterministic.go
Normal file
5
pkg/build/builders/zip_deterministic.go
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
package builders
|
||||||
|
|
||||||
|
import "time"
|
||||||
|
|
||||||
|
var deterministicZipTime = time.Unix(0, 0).UTC()
|
||||||
261
pkg/build/cache.go
Normal file
261
pkg/build/cache.go
Normal file
|
|
@ -0,0 +1,261 @@
|
||||||
|
// Package build provides project type detection and cross-compilation for the Core build system.
|
||||||
|
// This file handles build cache configuration and key generation.
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CacheConfig holds build cache configuration loaded from .core/build.yaml.
|
||||||
|
//
|
||||||
|
// cfg := build.CacheConfig{
|
||||||
|
// Enabled: true,
|
||||||
|
// Directory: ".core/cache",
|
||||||
|
// Paths: []string{"~/.cache/go-build", "~/go/pkg/mod"},
|
||||||
|
// }
|
||||||
|
type CacheConfig struct {
|
||||||
|
// Enabled turns cache setup on for the build.
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
// Directory is where cache metadata is stored.
|
||||||
|
Directory string `yaml:"dir,omitempty"`
|
||||||
|
// KeyPrefix prefixes the generated cache key.
|
||||||
|
KeyPrefix string `yaml:"key_prefix,omitempty"`
|
||||||
|
// Paths are cache directories that should exist before the build starts.
|
||||||
|
Paths []string `yaml:"paths,omitempty"`
|
||||||
|
// RestoreKeys are fallback prefixes used when the exact cache key is not present.
|
||||||
|
RestoreKeys []string `yaml:"restore_keys,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalYAML accepts both the concise build config keys and the longer aliases.
|
||||||
|
//
|
||||||
|
// err := yaml.Unmarshal([]byte("dir: .core/cache"), &cfg)
|
||||||
|
func (c *CacheConfig) UnmarshalYAML(value *yaml.Node) error {
|
||||||
|
type rawCacheConfig struct {
|
||||||
|
Enabled bool `yaml:"enabled"`
|
||||||
|
Directory string `yaml:"directory"`
|
||||||
|
Dir string `yaml:"dir"`
|
||||||
|
KeyPrefix string `yaml:"key_prefix"`
|
||||||
|
Key string `yaml:"key"`
|
||||||
|
Paths []string `yaml:"paths"`
|
||||||
|
RestoreKeys []string `yaml:"restore_keys"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var raw rawCacheConfig
|
||||||
|
if err := value.Decode(&raw); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Enabled = raw.Enabled
|
||||||
|
c.Directory = firstNonEmpty(raw.Directory, raw.Dir)
|
||||||
|
c.KeyPrefix = firstNonEmpty(raw.KeyPrefix, raw.Key)
|
||||||
|
c.Paths = raw.Paths
|
||||||
|
c.RestoreKeys = raw.RestoreKeys
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetupCache normalises cache paths and ensures the cache directories exist.
|
||||||
|
//
|
||||||
|
// err := build.SetupCache(io.Local, ".", &build.CacheConfig{
|
||||||
|
// Enabled: true,
|
||||||
|
// Paths: []string{"~/.cache/go-build", "~/go/pkg/mod"},
|
||||||
|
// })
|
||||||
|
func SetupCache(fs io.Medium, dir string, cfg *CacheConfig) error {
|
||||||
|
if fs == nil || cfg == nil || !cfg.Enabled {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.Directory == "" {
|
||||||
|
cfg.Directory = ax.Join(dir, ConfigDir, "cache")
|
||||||
|
}
|
||||||
|
cfg.Directory = normaliseCachePath(dir, cfg.Directory)
|
||||||
|
|
||||||
|
if err := fs.EnsureDir(cfg.Directory); err != nil {
|
||||||
|
return coreerr.E("build.SetupCache", "failed to create cache directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
normalisedPaths := make([]string, 0, len(cfg.Paths))
|
||||||
|
for _, path := range cfg.Paths {
|
||||||
|
path = normaliseCachePath(dir, path)
|
||||||
|
if path == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := fs.EnsureDir(path); err != nil {
|
||||||
|
return coreerr.E("build.SetupCache", "failed to create cache path "+path, err)
|
||||||
|
}
|
||||||
|
normalisedPaths = append(normalisedPaths, path)
|
||||||
|
}
|
||||||
|
cfg.Paths = deduplicateStrings(normalisedPaths)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetupBuildCache prepares the cache configuration stored on a build config.
|
||||||
|
//
|
||||||
|
// err := build.SetupBuildCache(io.Local, ".", cfg)
|
||||||
|
func SetupBuildCache(fs io.Medium, dir string, cfg *BuildConfig) error {
|
||||||
|
if fs == nil || cfg == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return SetupCache(fs, dir, &cfg.Build.Cache)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheKey returns a deterministic cache key for the build configuration and target.
|
||||||
|
//
|
||||||
|
// key := build.CacheKey("core-build", build.Target{OS: "linux", Arch: "amd64"}, &build.CacheConfig{
|
||||||
|
// KeyPrefix: "main",
|
||||||
|
// })
|
||||||
|
func CacheKey(buildName string, target Target, cfg *CacheConfig) string {
|
||||||
|
if buildName == "" {
|
||||||
|
buildName = "build"
|
||||||
|
}
|
||||||
|
|
||||||
|
keyPrefix := buildName
|
||||||
|
if cfg != nil && cfg.KeyPrefix != "" {
|
||||||
|
keyPrefix = cfg.KeyPrefix
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot := cacheKeySnapshot(buildName, target, cfg)
|
||||||
|
sum := sha256.Sum256([]byte(snapshot))
|
||||||
|
suffix := hex.EncodeToString(sum[:])[:12]
|
||||||
|
|
||||||
|
return core.Join("-", keyPrefix, target.OS, target.Arch, suffix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheEnvironment returns environment variables derived from the cache config.
|
||||||
|
//
|
||||||
|
// env := build.CacheEnvironment(&build.CacheConfig{Enabled: true, Paths: []string{"/tmp/go-build"}})
|
||||||
|
func CacheEnvironment(cfg *CacheConfig) []string {
|
||||||
|
if cfg == nil || !cfg.Enabled {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var env []string
|
||||||
|
|
||||||
|
for _, path := range cfg.Paths {
|
||||||
|
switch cacheEnvironmentName(path) {
|
||||||
|
case "GOCACHE":
|
||||||
|
env = appendIfMissing(env, "GOCACHE="+path)
|
||||||
|
case "GOMODCACHE":
|
||||||
|
env = appendIfMissing(env, "GOMODCACHE="+path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return deduplicateStrings(env)
|
||||||
|
}
|
||||||
|
|
||||||
|
func cacheKeySnapshot(buildName string, target Target, cfg *CacheConfig) string {
|
||||||
|
parts := []string{
|
||||||
|
"build",
|
||||||
|
buildName,
|
||||||
|
target.OS,
|
||||||
|
target.Arch,
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg == nil {
|
||||||
|
return core.Join("\n", parts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
parts = append(parts,
|
||||||
|
strconv.FormatBool(cfg.Enabled),
|
||||||
|
cfg.Directory,
|
||||||
|
cfg.KeyPrefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
paths := deduplicateStrings(append([]string(nil), cfg.Paths...))
|
||||||
|
sort.Strings(paths)
|
||||||
|
parts = append(parts, "paths:"+core.Join(",", paths...))
|
||||||
|
|
||||||
|
restoreKeys := deduplicateStrings(append([]string(nil), cfg.RestoreKeys...))
|
||||||
|
sort.Strings(restoreKeys)
|
||||||
|
parts = append(parts, "restore:"+core.Join(",", restoreKeys...))
|
||||||
|
|
||||||
|
return core.Join("\n", parts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func cacheEnvironmentName(path string) string {
|
||||||
|
base := strings.ToLower(ax.Base(path))
|
||||||
|
|
||||||
|
switch base {
|
||||||
|
case "go-build", "gocache":
|
||||||
|
return "GOCACHE"
|
||||||
|
case "go-mod", "gomodcache":
|
||||||
|
return "GOMODCACHE"
|
||||||
|
default:
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendIfMissing(values []string, value string) []string {
|
||||||
|
for _, current := range values {
|
||||||
|
if current == value {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return append(values, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func normaliseCachePath(baseDir, path string) string {
|
||||||
|
path = strings.TrimSpace(path)
|
||||||
|
if path == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(path, "~") {
|
||||||
|
home := core.Env("HOME")
|
||||||
|
if home != "" {
|
||||||
|
if path == "~" {
|
||||||
|
return ax.Clean(home)
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(path, "~/") {
|
||||||
|
return ax.Join(home, strings.TrimPrefix(path, "~/"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ax.IsAbs(path) {
|
||||||
|
return ax.Clean(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ax.Join(baseDir, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
func deduplicateStrings(values []string) []string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
seen := make(map[string]struct{}, len(values))
|
||||||
|
result := make([]string, 0, len(values))
|
||||||
|
for _, value := range values {
|
||||||
|
if value == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := seen[value]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[value] = struct{}{}
|
||||||
|
result = append(result, value)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func firstNonEmpty(values ...string) string {
|
||||||
|
for _, value := range values {
|
||||||
|
if strings.TrimSpace(value) != "" {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
144
pkg/build/cache_test.go
Normal file
144
pkg/build/cache_test.go
Normal file
|
|
@ -0,0 +1,144 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCache_SetupCache_Good(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
cfg := &CacheConfig{
|
||||||
|
Enabled: true,
|
||||||
|
Paths: []string{
|
||||||
|
"cache/go-build",
|
||||||
|
"cache/go-mod",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := SetupCache(fs, "/workspace/project", cfg)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, cfg)
|
||||||
|
|
||||||
|
assert.Equal(t, "/workspace/project/.core/cache", cfg.Directory)
|
||||||
|
assert.Equal(t, []string{
|
||||||
|
"/workspace/project/cache/go-build",
|
||||||
|
"/workspace/project/cache/go-mod",
|
||||||
|
}, cfg.Paths)
|
||||||
|
|
||||||
|
assert.True(t, fs.Exists("/workspace/project/.core/cache"))
|
||||||
|
assert.True(t, fs.Exists("/workspace/project/cache/go-build"))
|
||||||
|
assert.True(t, fs.Exists("/workspace/project/cache/go-mod"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCache_SetupBuildCache_Good(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
cfg := &BuildConfig{
|
||||||
|
Build: Build{
|
||||||
|
Cache: CacheConfig{
|
||||||
|
Enabled: true,
|
||||||
|
Paths: []string{
|
||||||
|
"cache/go-build",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := SetupBuildCache(fs, "/workspace/project", cfg)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, cfg)
|
||||||
|
|
||||||
|
assert.Equal(t, "/workspace/project/.core/cache", cfg.Build.Cache.Directory)
|
||||||
|
assert.Equal(t, []string{"/workspace/project/cache/go-build"}, cfg.Build.Cache.Paths)
|
||||||
|
assert.True(t, fs.Exists("/workspace/project/.core/cache"))
|
||||||
|
assert.True(t, fs.Exists("/workspace/project/cache/go-build"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCache_SetupCache_Good_Disabled(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
cfg := &CacheConfig{
|
||||||
|
Enabled: false,
|
||||||
|
Paths: []string{"cache/go-build"},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := SetupCache(fs, "/workspace/project", cfg)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Empty(t, fs.Dirs)
|
||||||
|
assert.Empty(t, fs.Files)
|
||||||
|
assert.Empty(t, cfg.Directory)
|
||||||
|
assert.Equal(t, []string{"cache/go-build"}, cfg.Paths)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCache_SetupBuildCache_Good_Disabled(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
cfg := &BuildConfig{
|
||||||
|
Build: Build{
|
||||||
|
Cache: CacheConfig{
|
||||||
|
Enabled: false,
|
||||||
|
Paths: []string{"cache/go-build"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := SetupBuildCache(fs, "/workspace/project", cfg)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Empty(t, fs.Dirs)
|
||||||
|
assert.Empty(t, cfg.Build.Cache.Directory)
|
||||||
|
assert.Equal(t, []string{"cache/go-build"}, cfg.Build.Cache.Paths)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCache_CacheKey_Good(t *testing.T) {
|
||||||
|
first := CacheKey("core-build", Target{OS: "linux", Arch: "amd64"}, &CacheConfig{
|
||||||
|
KeyPrefix: "main",
|
||||||
|
Paths: []string{
|
||||||
|
"cache/go-build",
|
||||||
|
"cache/go-mod",
|
||||||
|
},
|
||||||
|
RestoreKeys: []string{
|
||||||
|
"main-linux",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
second := CacheKey("core-build", Target{OS: "linux", Arch: "amd64"}, &CacheConfig{
|
||||||
|
KeyPrefix: "main",
|
||||||
|
Paths: []string{
|
||||||
|
"cache/go-mod",
|
||||||
|
"cache/go-build",
|
||||||
|
},
|
||||||
|
RestoreKeys: []string{
|
||||||
|
"main-linux",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
third := CacheKey("core-build", Target{OS: "darwin", Arch: "arm64"}, &CacheConfig{
|
||||||
|
KeyPrefix: "main",
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, first, second)
|
||||||
|
assert.NotEqual(t, first, third)
|
||||||
|
assert.Contains(t, first, "main-linux-amd64-")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCache_CacheEnvironment_Good(t *testing.T) {
|
||||||
|
t.Run("maps cache directory and Go cache paths to env vars", func(t *testing.T) {
|
||||||
|
env := CacheEnvironment(&CacheConfig{
|
||||||
|
Enabled: true,
|
||||||
|
Paths: []string{
|
||||||
|
"/workspace/project/cache/go-build",
|
||||||
|
"/workspace/project/cache/go-mod",
|
||||||
|
"/workspace/project/cache/go-build",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, []string{
|
||||||
|
"GOCACHE=/workspace/project/cache/go-build",
|
||||||
|
"GOMODCACHE=/workspace/project/cache/go-mod",
|
||||||
|
}, env)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("disabled cache returns no env vars", func(t *testing.T) {
|
||||||
|
assert.Nil(t, CacheEnvironment(&CacheConfig{Enabled: false}))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
@ -4,18 +4,17 @@ package build
|
||||||
import (
|
import (
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"path/filepath"
|
"sort"
|
||||||
"slices"
|
|
||||||
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
io_interface "dappco.re/go/core/io"
|
io_interface "dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Checksum computes SHA256 for an artifact and returns the artifact with the Checksum field filled.
|
// Checksum computes SHA256 for an artifact and returns the artifact with the Checksum field filled.
|
||||||
|
//
|
||||||
|
// cs, err := build.Checksum(io.Local, artifact)
|
||||||
func Checksum(fs io_interface.Medium, artifact Artifact) (Artifact, error) {
|
func Checksum(fs io_interface.Medium, artifact Artifact) (Artifact, error) {
|
||||||
if artifact.Path == "" {
|
if artifact.Path == "" {
|
||||||
return Artifact{}, coreerr.E("build.Checksum", "artifact path is empty", nil)
|
return Artifact{}, coreerr.E("build.Checksum", "artifact path is empty", nil)
|
||||||
|
|
@ -46,6 +45,8 @@ func Checksum(fs io_interface.Medium, artifact Artifact) (Artifact, error) {
|
||||||
|
|
||||||
// ChecksumAll computes checksums for all artifacts.
|
// ChecksumAll computes checksums for all artifacts.
|
||||||
// Returns a slice of artifacts with their Checksum fields filled.
|
// Returns a slice of artifacts with their Checksum fields filled.
|
||||||
|
//
|
||||||
|
// checked, err := build.ChecksumAll(io.Local, artifacts)
|
||||||
func ChecksumAll(fs io_interface.Medium, artifacts []Artifact) ([]Artifact, error) {
|
func ChecksumAll(fs io_interface.Medium, artifacts []Artifact) ([]Artifact, error) {
|
||||||
if len(artifacts) == 0 {
|
if len(artifacts) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
|
@ -70,6 +71,8 @@ func ChecksumAll(fs io_interface.Medium, artifacts []Artifact) ([]Artifact, erro
|
||||||
//
|
//
|
||||||
// The artifacts should have their Checksum fields filled (call ChecksumAll first).
|
// The artifacts should have their Checksum fields filled (call ChecksumAll first).
|
||||||
// Filenames are relative to the output directory (just the basename).
|
// Filenames are relative to the output directory (just the basename).
|
||||||
|
//
|
||||||
|
// err := build.WriteChecksumFile(io.Local, artifacts, "dist/CHECKSUMS.txt")
|
||||||
func WriteChecksumFile(fs io_interface.Medium, artifacts []Artifact, path string) error {
|
func WriteChecksumFile(fs io_interface.Medium, artifacts []Artifact, path string) error {
|
||||||
if len(artifacts) == 0 {
|
if len(artifacts) == 0 {
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -81,14 +84,14 @@ func WriteChecksumFile(fs io_interface.Medium, artifacts []Artifact, path string
|
||||||
if artifact.Checksum == "" {
|
if artifact.Checksum == "" {
|
||||||
return coreerr.E("build.WriteChecksumFile", "artifact "+artifact.Path+" has no checksum", nil)
|
return coreerr.E("build.WriteChecksumFile", "artifact "+artifact.Path+" has no checksum", nil)
|
||||||
}
|
}
|
||||||
filename := filepath.Base(artifact.Path)
|
filename := core.PathBase(artifact.Path)
|
||||||
lines = append(lines, fmt.Sprintf("%s %s", artifact.Checksum, filename))
|
lines = append(lines, core.Sprintf("%s %s", artifact.Checksum, filename))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort lines for consistent output
|
// Sort lines for consistent output
|
||||||
slices.Sort(lines)
|
sort.Strings(lines)
|
||||||
|
|
||||||
content := strings.Join(lines, "\n") + "\n"
|
content := core.Concat(core.Join("\n", lines...), "\n")
|
||||||
|
|
||||||
// Write the file using the medium (which handles directory creation in Write)
|
// Write the file using the medium (which handles directory creation in Write)
|
||||||
if err := fs.Write(path, content); err != nil {
|
if err := fs.Write(path, content); err != nil {
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
package build
|
package build
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
@ -16,14 +15,14 @@ func setupChecksumTestFile(t *testing.T, content string) string {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
path := filepath.Join(dir, "testfile")
|
path := ax.Join(dir, "testfile")
|
||||||
err := os.WriteFile(path, []byte(content), 0644)
|
err := ax.WriteFile(path, []byte(content), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return path
|
return path
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChecksum_Good(t *testing.T) {
|
func TestChecksum_Checksum_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("computes SHA256 checksum", func(t *testing.T) {
|
t.Run("computes SHA256 checksum", func(t *testing.T) {
|
||||||
// Known SHA256 of "Hello, World!\n"
|
// Known SHA256 of "Hello, World!\n"
|
||||||
|
|
@ -99,7 +98,7 @@ func TestChecksum_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChecksum_Bad(t *testing.T) {
|
func TestChecksum_Checksum_Bad(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("returns error for empty path", func(t *testing.T) {
|
t.Run("returns error for empty path", func(t *testing.T) {
|
||||||
artifact := Artifact{
|
artifact := Artifact{
|
||||||
|
|
@ -128,7 +127,7 @@ func TestChecksum_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChecksumAll_Good(t *testing.T) {
|
func TestChecksum_ChecksumAll_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("checksums multiple artifacts", func(t *testing.T) {
|
t.Run("checksums multiple artifacts", func(t *testing.T) {
|
||||||
paths := []string{
|
paths := []string{
|
||||||
|
|
@ -168,7 +167,7 @@ func TestChecksumAll_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChecksumAll_Bad(t *testing.T) {
|
func TestChecksum_ChecksumAll_Bad(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("returns partial results on error", func(t *testing.T) {
|
t.Run("returns partial results on error", func(t *testing.T) {
|
||||||
path := setupChecksumTestFile(t, "valid content")
|
path := setupChecksumTestFile(t, "valid content")
|
||||||
|
|
@ -186,11 +185,11 @@ func TestChecksumAll_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWriteChecksumFile_Good(t *testing.T) {
|
func TestChecksum_WriteChecksumFile_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("writes checksum file with correct format", func(t *testing.T) {
|
t.Run("writes checksum file with correct format", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
checksumPath := filepath.Join(dir, "CHECKSUMS.txt")
|
checksumPath := ax.Join(dir, "CHECKSUMS.txt")
|
||||||
|
|
||||||
artifacts := []Artifact{
|
artifacts := []Artifact{
|
||||||
{Path: "/output/app_linux_amd64.tar.gz", Checksum: "abc123def456", OS: "linux", Arch: "amd64"},
|
{Path: "/output/app_linux_amd64.tar.gz", Checksum: "abc123def456", OS: "linux", Arch: "amd64"},
|
||||||
|
|
@ -201,10 +200,10 @@ func TestWriteChecksumFile_Good(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Read and verify content
|
// Read and verify content
|
||||||
content, err := os.ReadFile(checksumPath)
|
content, err := ax.ReadFile(checksumPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
lines := core.Split(core.Trim(string(content)), "\n")
|
||||||
require.Len(t, lines, 2)
|
require.Len(t, lines, 2)
|
||||||
|
|
||||||
// Lines should be sorted alphabetically
|
// Lines should be sorted alphabetically
|
||||||
|
|
@ -214,7 +213,7 @@ func TestWriteChecksumFile_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("creates parent directories", func(t *testing.T) {
|
t.Run("creates parent directories", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
checksumPath := filepath.Join(dir, "nested", "deep", "CHECKSUMS.txt")
|
checksumPath := ax.Join(dir, "nested", "deep", "CHECKSUMS.txt")
|
||||||
|
|
||||||
artifacts := []Artifact{
|
artifacts := []Artifact{
|
||||||
{Path: "/output/app.tar.gz", Checksum: "abc123", OS: "linux", Arch: "amd64"},
|
{Path: "/output/app.tar.gz", Checksum: "abc123", OS: "linux", Arch: "amd64"},
|
||||||
|
|
@ -227,19 +226,18 @@ func TestWriteChecksumFile_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("does nothing for empty artifacts", func(t *testing.T) {
|
t.Run("does nothing for empty artifacts", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
checksumPath := filepath.Join(dir, "CHECKSUMS.txt")
|
checksumPath := ax.Join(dir, "CHECKSUMS.txt")
|
||||||
|
|
||||||
err := WriteChecksumFile(fs, []Artifact{}, checksumPath)
|
err := WriteChecksumFile(fs, []Artifact{}, checksumPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// File should not exist
|
// File should not exist
|
||||||
_, err = os.Stat(checksumPath)
|
assert.False(t, ax.Exists(checksumPath))
|
||||||
assert.True(t, os.IsNotExist(err))
|
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("does nothing for nil artifacts", func(t *testing.T) {
|
t.Run("does nothing for nil artifacts", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
checksumPath := filepath.Join(dir, "CHECKSUMS.txt")
|
checksumPath := ax.Join(dir, "CHECKSUMS.txt")
|
||||||
|
|
||||||
err := WriteChecksumFile(fs, nil, checksumPath)
|
err := WriteChecksumFile(fs, nil, checksumPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
@ -247,7 +245,7 @@ func TestWriteChecksumFile_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("uses only basename for filenames", func(t *testing.T) {
|
t.Run("uses only basename for filenames", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
checksumPath := filepath.Join(dir, "CHECKSUMS.txt")
|
checksumPath := ax.Join(dir, "CHECKSUMS.txt")
|
||||||
|
|
||||||
artifacts := []Artifact{
|
artifacts := []Artifact{
|
||||||
{Path: "/some/deep/nested/path/myapp_linux_amd64.tar.gz", Checksum: "checksum123", OS: "linux", Arch: "amd64"},
|
{Path: "/some/deep/nested/path/myapp_linux_amd64.tar.gz", Checksum: "checksum123", OS: "linux", Arch: "amd64"},
|
||||||
|
|
@ -256,7 +254,7 @@ func TestWriteChecksumFile_Good(t *testing.T) {
|
||||||
err := WriteChecksumFile(fs, artifacts, checksumPath)
|
err := WriteChecksumFile(fs, artifacts, checksumPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
content, err := os.ReadFile(checksumPath)
|
content, err := ax.ReadFile(checksumPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Should only contain the basename
|
// Should only contain the basename
|
||||||
|
|
@ -265,11 +263,11 @@ func TestWriteChecksumFile_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWriteChecksumFile_Bad(t *testing.T) {
|
func TestChecksum_WriteChecksumFile_Bad(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("returns error for artifact without checksum", func(t *testing.T) {
|
t.Run("returns error for artifact without checksum", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
checksumPath := filepath.Join(dir, "CHECKSUMS.txt")
|
checksumPath := ax.Join(dir, "CHECKSUMS.txt")
|
||||||
|
|
||||||
artifacts := []Artifact{
|
artifacts := []Artifact{
|
||||||
{Path: "/output/app.tar.gz", Checksum: "", OS: "linux", Arch: "amd64"}, // No checksum
|
{Path: "/output/app.tar.gz", Checksum: "", OS: "linux", Arch: "amd64"}, // No checksum
|
||||||
|
|
|
||||||
210
pkg/build/ci.go
Normal file
210
pkg/build/ci.go
Normal file
|
|
@ -0,0 +1,210 @@
|
||||||
|
// Package build provides project type detection and cross-compilation for the Core build system.
|
||||||
|
// This file handles CI environment detection and GitHub Actions output formatting.
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
io_interface "dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CIContext holds environment information detected from a GitHub Actions run.
|
||||||
|
//
|
||||||
|
// ci := build.DetectCI()
|
||||||
|
// if ci != nil {
|
||||||
|
// fmt.Println(ci.ShortSHA) // "abc1234"
|
||||||
|
// }
|
||||||
|
type CIContext struct {
|
||||||
|
// Ref is the full git ref (GITHUB_REF).
|
||||||
|
// ci.Ref // "refs/tags/v1.2.3"
|
||||||
|
Ref string
|
||||||
|
// SHA is the full commit hash (GITHUB_SHA).
|
||||||
|
// ci.SHA // "abc1234def5678..."
|
||||||
|
SHA string
|
||||||
|
// ShortSHA is the first 7 characters of SHA.
|
||||||
|
// ci.ShortSHA // "abc1234"
|
||||||
|
ShortSHA string
|
||||||
|
// Tag is the tag name when the ref is a tag ref.
|
||||||
|
// ci.Tag // "v1.2.3"
|
||||||
|
Tag string
|
||||||
|
// IsTag is true when the ref is a tag ref (refs/tags/...).
|
||||||
|
// ci.IsTag // true
|
||||||
|
IsTag bool
|
||||||
|
// Branch is the branch name when the ref is a branch ref.
|
||||||
|
// ci.Branch // "main"
|
||||||
|
Branch string
|
||||||
|
// Repo is the owner/repo string (GITHUB_REPOSITORY).
|
||||||
|
// ci.Repo // "dappcore/core"
|
||||||
|
Repo string
|
||||||
|
// Owner is the repository owner derived from Repo.
|
||||||
|
// ci.Owner // "dappcore"
|
||||||
|
Owner string
|
||||||
|
}
|
||||||
|
|
||||||
|
// artifactMeta is the structure written to artifact_meta.json.
|
||||||
|
type artifactMeta struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
OS string `json:"os"`
|
||||||
|
Arch string `json:"arch"`
|
||||||
|
Ref string `json:"ref,omitempty"`
|
||||||
|
SHA string `json:"sha,omitempty"`
|
||||||
|
Tag string `json:"tag,omitempty"`
|
||||||
|
Branch string `json:"branch,omitempty"`
|
||||||
|
IsTag bool `json:"is_tag"`
|
||||||
|
Repo string `json:"repo,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// FormatGitHubAnnotation formats a build message as a GitHub Actions annotation.
|
||||||
|
//
|
||||||
|
// s := build.FormatGitHubAnnotation("error", "main.go", 42, "undefined: foo")
|
||||||
|
// // "::error file=main.go,line=42::undefined: foo"
|
||||||
|
//
|
||||||
|
// s := build.FormatGitHubAnnotation("warning", "pkg/build/ci.go", 10, "unused import")
|
||||||
|
// // "::warning file=pkg/build/ci.go,line=10::unused import"
|
||||||
|
func FormatGitHubAnnotation(level, file string, line int, message string) string {
|
||||||
|
return core.Sprintf("::%s file=%s,line=%d::%s", level, file, line, message)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DetectCI reads GitHub Actions environment variables and returns a populated CIContext.
|
||||||
|
// Returns nil if GITHUB_ACTIONS is not set or GITHUB_SHA is empty, which indicates
|
||||||
|
// the process is not running inside GitHub Actions.
|
||||||
|
//
|
||||||
|
// ci := build.DetectCI()
|
||||||
|
// if ci == nil {
|
||||||
|
// // running locally, skip CI-specific output
|
||||||
|
// }
|
||||||
|
// if ci != nil && ci.IsTag {
|
||||||
|
// // upload release assets
|
||||||
|
// }
|
||||||
|
func DetectCI() *CIContext {
|
||||||
|
return detectGitHubContext(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DetectGitHubMetadata returns GitHub CI metadata when the standard environment
|
||||||
|
// variables are present, even if GITHUB_ACTIONS is unset.
|
||||||
|
//
|
||||||
|
// This is useful for metadata emission paths that only need the GitHub ref/SHA
|
||||||
|
// shape and should not be coupled to a specific runner environment.
|
||||||
|
func DetectGitHubMetadata() *CIContext {
|
||||||
|
return detectGitHubContext(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func detectGitHubContext(requireActions bool) *CIContext {
|
||||||
|
if requireActions && core.Env("GITHUB_ACTIONS") == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
sha := core.Env("GITHUB_SHA")
|
||||||
|
if sha == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ref := core.Env("GITHUB_REF")
|
||||||
|
repo := core.Env("GITHUB_REPOSITORY")
|
||||||
|
|
||||||
|
ctx := &CIContext{
|
||||||
|
Ref: ref,
|
||||||
|
SHA: sha,
|
||||||
|
Repo: repo,
|
||||||
|
}
|
||||||
|
|
||||||
|
populateGitHubContext(ctx)
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func populateGitHubContext(ctx *CIContext) {
|
||||||
|
if ctx == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ShortSHA is first 7 chars of SHA.
|
||||||
|
runes := []rune(ctx.SHA)
|
||||||
|
if len(runes) >= 7 {
|
||||||
|
ctx.ShortSHA = string(runes[:7])
|
||||||
|
} else {
|
||||||
|
ctx.ShortSHA = ctx.SHA
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive owner from "owner/repo" format.
|
||||||
|
if ctx.Repo != "" {
|
||||||
|
parts := core.SplitN(ctx.Repo, "/", 2)
|
||||||
|
if len(parts) == 2 {
|
||||||
|
ctx.Owner = parts[0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Classify ref as tag or branch.
|
||||||
|
const tagPrefix = "refs/tags/"
|
||||||
|
const branchPrefix = "refs/heads/"
|
||||||
|
|
||||||
|
if core.HasPrefix(ctx.Ref, tagPrefix) {
|
||||||
|
ctx.IsTag = true
|
||||||
|
ctx.Tag = core.TrimPrefix(ctx.Ref, tagPrefix)
|
||||||
|
} else if core.HasPrefix(ctx.Ref, branchPrefix) {
|
||||||
|
ctx.Branch = core.TrimPrefix(ctx.Ref, branchPrefix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ArtifactName generates a canonical artifact filename from the build name, CI context, and target.
|
||||||
|
// Format: {name}_{OS}_{ARCH}_{TAG|SHORT_SHA}
|
||||||
|
// When ci is nil or has no tag or SHA, only the name and target are used.
|
||||||
|
//
|
||||||
|
// name := build.ArtifactName("core", ci, build.Target{OS: "linux", Arch: "amd64"})
|
||||||
|
// // "core_linux_amd64_v1.2.3" (when ci.IsTag)
|
||||||
|
// // "core_linux_amd64_abc1234" (when ci != nil, not a tag)
|
||||||
|
// // "core_linux_amd64" (when ci is nil)
|
||||||
|
func ArtifactName(buildName string, ci *CIContext, target Target) string {
|
||||||
|
base := core.Join("_", buildName, target.OS, target.Arch)
|
||||||
|
|
||||||
|
if ci == nil {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
|
||||||
|
var version string
|
||||||
|
if ci.IsTag && ci.Tag != "" {
|
||||||
|
version = ci.Tag
|
||||||
|
} else if ci.ShortSHA != "" {
|
||||||
|
version = ci.ShortSHA
|
||||||
|
}
|
||||||
|
|
||||||
|
if version == "" {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
|
||||||
|
return core.Concat(base, "_", version)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteArtifactMeta writes an artifact_meta.json file to path.
|
||||||
|
// The file contains the build name, target OS/arch, and CI metadata if available.
|
||||||
|
//
|
||||||
|
// err := build.WriteArtifactMeta(io.Local, "dist/artifact_meta.json", "core", build.Target{OS: "linux", Arch: "amd64"}, ci)
|
||||||
|
// // writes: {"name":"core","os":"linux","arch":"amd64","tag":"v1.2.3","is_tag":true,...}
|
||||||
|
func WriteArtifactMeta(fs io_interface.Medium, path string, buildName string, target Target, ci *CIContext) error {
|
||||||
|
meta := artifactMeta{
|
||||||
|
Name: buildName,
|
||||||
|
OS: target.OS,
|
||||||
|
Arch: target.Arch,
|
||||||
|
}
|
||||||
|
|
||||||
|
if ci != nil {
|
||||||
|
meta.Ref = ci.Ref
|
||||||
|
meta.SHA = ci.SHA
|
||||||
|
meta.Tag = ci.Tag
|
||||||
|
meta.Branch = ci.Branch
|
||||||
|
meta.IsTag = ci.IsTag
|
||||||
|
meta.Repo = ci.Repo
|
||||||
|
}
|
||||||
|
|
||||||
|
data, err := json.MarshalIndent(meta, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("build.WriteArtifactMeta", "failed to marshal artifact meta", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := fs.Write(path, string(data)); err != nil {
|
||||||
|
return coreerr.E("build.WriteArtifactMeta", "failed to write artifact meta", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
332
pkg/build/ci_test.go
Normal file
332
pkg/build/ci_test.go
Normal file
|
|
@ -0,0 +1,332 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
// setenvCI sets the GitHub Actions environment variables for a test and cleans up afterwards.
|
||||||
|
func setenvCI(t *testing.T, sha, ref, repo string) {
|
||||||
|
t.Helper()
|
||||||
|
t.Setenv("GITHUB_ACTIONS", "true")
|
||||||
|
t.Setenv("GITHUB_SHA", sha)
|
||||||
|
t.Setenv("GITHUB_REF", ref)
|
||||||
|
t.Setenv("GITHUB_REPOSITORY", repo)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_FormatGitHubAnnotation_Good(t *testing.T) {
|
||||||
|
t.Run("formats error annotation correctly", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("error", "main.go", 42, "undefined: foo")
|
||||||
|
assert.Equal(t, "::error file=main.go,line=42::undefined: foo", s)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("formats warning annotation correctly", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("warning", "pkg/build/ci.go", 10, "unused import")
|
||||||
|
assert.Equal(t, "::warning file=pkg/build/ci.go,line=10::unused import", s)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("formats notice annotation correctly", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("notice", "cmd/main.go", 1, "build started")
|
||||||
|
assert.Equal(t, "::notice file=cmd/main.go,line=1::build started", s)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("uses correct line numbers", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("error", "file.go", 99, "msg")
|
||||||
|
assert.Contains(t, s, "line=99")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_FormatGitHubAnnotation_Bad(t *testing.T) {
|
||||||
|
t.Run("empty file produces empty file field", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("error", "", 1, "message")
|
||||||
|
assert.Equal(t, "::error file=,line=1::message", s)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty level still produces annotation format", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("", "main.go", 1, "message")
|
||||||
|
assert.Equal(t, ":: file=main.go,line=1::message", s)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty message produces empty message section", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("error", "main.go", 1, "")
|
||||||
|
assert.Equal(t, "::error file=main.go,line=1::", s)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("line zero is valid", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("error", "main.go", 0, "msg")
|
||||||
|
assert.Contains(t, s, "line=0")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_FormatGitHubAnnotation_Ugly(t *testing.T) {
|
||||||
|
t.Run("message with newline is included as-is", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("error", "main.go", 1, "line one\nline two")
|
||||||
|
assert.Contains(t, s, "line one\nline two")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("message with colons does not break format", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("error", "main.go", 1, "error: something::bad")
|
||||||
|
// The leading ::level file=... part should still be present
|
||||||
|
assert.Contains(t, s, "::error file=main.go,line=1::")
|
||||||
|
assert.Contains(t, s, "error: something::bad")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("file path with spaces is included as-is", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("warning", "my file.go", 5, "msg")
|
||||||
|
assert.Contains(t, s, "file=my file.go")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("unicode message is preserved", func(t *testing.T) {
|
||||||
|
s := FormatGitHubAnnotation("error", "main.go", 1, "résumé: 日本語")
|
||||||
|
assert.Contains(t, s, "résumé: 日本語")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_DetectCI_Good(t *testing.T) {
|
||||||
|
t.Run("detects tag ref", func(t *testing.T) {
|
||||||
|
setenvCI(t, "abc1234def5678901234567890123456789012345", "refs/tags/v1.2.3", "dappcore/core")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
require.NotNil(t, ci)
|
||||||
|
assert.Equal(t, "refs/tags/v1.2.3", ci.Ref)
|
||||||
|
assert.Equal(t, "abc1234def5678901234567890123456789012345", ci.SHA)
|
||||||
|
assert.Equal(t, "abc1234", ci.ShortSHA)
|
||||||
|
assert.Equal(t, "v1.2.3", ci.Tag)
|
||||||
|
assert.True(t, ci.IsTag)
|
||||||
|
assert.Equal(t, "", ci.Branch)
|
||||||
|
assert.Equal(t, "dappcore/core", ci.Repo)
|
||||||
|
assert.Equal(t, "dappcore", ci.Owner)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects branch ref", func(t *testing.T) {
|
||||||
|
setenvCI(t, "deadbeef1234567890123456789012345678abcd", "refs/heads/main", "org/repo")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
require.NotNil(t, ci)
|
||||||
|
assert.Equal(t, "main", ci.Branch)
|
||||||
|
assert.False(t, ci.IsTag)
|
||||||
|
assert.Equal(t, "", ci.Tag)
|
||||||
|
assert.Equal(t, "deadbee", ci.ShortSHA)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("owner is derived from repo", func(t *testing.T) {
|
||||||
|
setenvCI(t, "aaaaaaaaaaaaaaaa", "refs/heads/dev", "myorg/myrepo")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
require.NotNil(t, ci)
|
||||||
|
assert.Equal(t, "myorg", ci.Owner)
|
||||||
|
assert.Equal(t, "myorg/myrepo", ci.Repo)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_DetectCI_Bad(t *testing.T) {
|
||||||
|
t.Run("returns nil when GITHUB_ACTIONS is not set", func(t *testing.T) {
|
||||||
|
t.Setenv("GITHUB_ACTIONS", "")
|
||||||
|
t.Setenv("GITHUB_SHA", "abc1234def5678901234567890123456789012345")
|
||||||
|
t.Setenv("GITHUB_REF", "refs/heads/main")
|
||||||
|
t.Setenv("GITHUB_REPOSITORY", "org/repo")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
assert.Nil(t, ci)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns nil when GITHUB_SHA is not set", func(t *testing.T) {
|
||||||
|
t.Setenv("GITHUB_ACTIONS", "true")
|
||||||
|
t.Setenv("GITHUB_SHA", "")
|
||||||
|
t.Setenv("GITHUB_REF", "")
|
||||||
|
t.Setenv("GITHUB_REPOSITORY", "")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
assert.Nil(t, ci)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_DetectGitHubMetadata_Good(t *testing.T) {
|
||||||
|
t.Run("detects GitHub metadata without GITHUB_ACTIONS", func(t *testing.T) {
|
||||||
|
t.Setenv("GITHUB_ACTIONS", "")
|
||||||
|
t.Setenv("GITHUB_SHA", "abc1234def5678901234567890123456789012345")
|
||||||
|
t.Setenv("GITHUB_REF", "refs/heads/main")
|
||||||
|
t.Setenv("GITHUB_REPOSITORY", "org/repo")
|
||||||
|
|
||||||
|
ci := DetectGitHubMetadata()
|
||||||
|
require.NotNil(t, ci)
|
||||||
|
assert.Equal(t, "abc1234", ci.ShortSHA)
|
||||||
|
assert.Equal(t, "main", ci.Branch)
|
||||||
|
assert.Equal(t, "org/repo", ci.Repo)
|
||||||
|
assert.Equal(t, "org", ci.Owner)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_DetectCI_Ugly(t *testing.T) {
|
||||||
|
t.Run("SHA shorter than 7 chars still works", func(t *testing.T) {
|
||||||
|
setenvCI(t, "abc", "refs/heads/main", "org/repo")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
require.NotNil(t, ci)
|
||||||
|
assert.Equal(t, "abc", ci.ShortSHA)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("ref with unknown prefix leaves tag and branch empty", func(t *testing.T) {
|
||||||
|
setenvCI(t, "abc1234def5678", "refs/pull/42/merge", "org/repo")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
require.NotNil(t, ci)
|
||||||
|
assert.Equal(t, "", ci.Tag)
|
||||||
|
assert.Equal(t, "", ci.Branch)
|
||||||
|
assert.False(t, ci.IsTag)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("repo without slash leaves owner empty", func(t *testing.T) {
|
||||||
|
setenvCI(t, "abc1234def5678", "refs/heads/main", "noslashrepo")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
require.NotNil(t, ci)
|
||||||
|
assert.Equal(t, "", ci.Owner)
|
||||||
|
assert.Equal(t, "noslashrepo", ci.Repo)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty repo is tolerated", func(t *testing.T) {
|
||||||
|
setenvCI(t, "abc1234def5678", "refs/heads/main", "")
|
||||||
|
|
||||||
|
ci := DetectCI()
|
||||||
|
require.NotNil(t, ci)
|
||||||
|
assert.Equal(t, "", ci.Owner)
|
||||||
|
assert.Equal(t, "", ci.Repo)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_ArtifactName_Good(t *testing.T) {
|
||||||
|
t.Run("uses tag when IsTag is true", func(t *testing.T) {
|
||||||
|
ci := &CIContext{
|
||||||
|
IsTag: true,
|
||||||
|
Tag: "v1.2.3",
|
||||||
|
ShortSHA: "abc1234",
|
||||||
|
}
|
||||||
|
name := ArtifactName("core", ci, Target{OS: "linux", Arch: "amd64"})
|
||||||
|
assert.Equal(t, "core_linux_amd64_v1.2.3", name)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("uses ShortSHA when not a tag", func(t *testing.T) {
|
||||||
|
ci := &CIContext{
|
||||||
|
IsTag: false,
|
||||||
|
ShortSHA: "abc1234",
|
||||||
|
}
|
||||||
|
name := ArtifactName("myapp", ci, Target{OS: "darwin", Arch: "arm64"})
|
||||||
|
assert.Equal(t, "myapp_darwin_arm64_abc1234", name)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("produces correct format for windows", func(t *testing.T) {
|
||||||
|
ci := &CIContext{IsTag: true, Tag: "v2.0.0", ShortSHA: "ff00ff0"}
|
||||||
|
name := ArtifactName("core", ci, Target{OS: "windows", Arch: "amd64"})
|
||||||
|
assert.Equal(t, "core_windows_amd64_v2.0.0", name)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_ArtifactName_Bad(t *testing.T) {
|
||||||
|
t.Run("nil ci returns name_os_arch only", func(t *testing.T) {
|
||||||
|
name := ArtifactName("core", nil, Target{OS: "linux", Arch: "amd64"})
|
||||||
|
assert.Equal(t, "core_linux_amd64", name)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("ci with no tag and no SHA returns name_os_arch only", func(t *testing.T) {
|
||||||
|
ci := &CIContext{IsTag: false, ShortSHA: "", Tag: ""}
|
||||||
|
name := ArtifactName("core", ci, Target{OS: "linux", Arch: "amd64"})
|
||||||
|
assert.Equal(t, "core_linux_amd64", name)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_ArtifactName_Ugly(t *testing.T) {
|
||||||
|
t.Run("empty build name produces leading underscore segments", func(t *testing.T) {
|
||||||
|
ci := &CIContext{IsTag: true, Tag: "v1.0.0", ShortSHA: "abc1234"}
|
||||||
|
name := ArtifactName("", ci, Target{OS: "linux", Arch: "amd64"})
|
||||||
|
// Empty name results in "_linux_amd64_v1.0.0"
|
||||||
|
assert.Contains(t, name, "linux_amd64_v1.0.0")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("IsTag true but empty tag falls back to ShortSHA", func(t *testing.T) {
|
||||||
|
ci := &CIContext{IsTag: true, Tag: "", ShortSHA: "abc1234"}
|
||||||
|
name := ArtifactName("core", ci, Target{OS: "linux", Arch: "amd64"})
|
||||||
|
assert.Equal(t, "core_linux_amd64_abc1234", name)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("special chars in build name are preserved", func(t *testing.T) {
|
||||||
|
ci := &CIContext{IsTag: true, Tag: "v1.0.0"}
|
||||||
|
name := ArtifactName("core-build", ci, Target{OS: "linux", Arch: "amd64"})
|
||||||
|
assert.Equal(t, "core-build_linux_amd64_v1.0.0", name)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCi_WriteArtifactMeta_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("writes valid JSON with CI context", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := ax.Join(dir, "artifact_meta.json")
|
||||||
|
|
||||||
|
ci := &CIContext{
|
||||||
|
Ref: "refs/tags/v1.2.3",
|
||||||
|
SHA: "abc1234def5678",
|
||||||
|
ShortSHA: "abc1234",
|
||||||
|
Tag: "v1.2.3",
|
||||||
|
IsTag: true,
|
||||||
|
Repo: "dappcore/core",
|
||||||
|
Owner: "dappcore",
|
||||||
|
}
|
||||||
|
|
||||||
|
err := WriteArtifactMeta(fs, path, "core", Target{OS: "linux", Arch: "amd64"}, ci)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, readErr := ax.ReadFile(path)
|
||||||
|
require.NoError(t, readErr)
|
||||||
|
|
||||||
|
var meta map[string]any
|
||||||
|
require.NoError(t, json.Unmarshal(content, &meta))
|
||||||
|
|
||||||
|
assert.Equal(t, "core", meta["name"])
|
||||||
|
assert.Equal(t, "linux", meta["os"])
|
||||||
|
assert.Equal(t, "amd64", meta["arch"])
|
||||||
|
assert.Equal(t, "v1.2.3", meta["tag"])
|
||||||
|
assert.Equal(t, true, meta["is_tag"])
|
||||||
|
assert.Equal(t, "dappcore/core", meta["repo"])
|
||||||
|
assert.Equal(t, "refs/tags/v1.2.3", meta["ref"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes valid JSON without CI context", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := ax.Join(dir, "artifact_meta.json")
|
||||||
|
|
||||||
|
err := WriteArtifactMeta(fs, path, "myapp", Target{OS: "darwin", Arch: "arm64"}, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, readErr := ax.ReadFile(path)
|
||||||
|
require.NoError(t, readErr)
|
||||||
|
|
||||||
|
var meta map[string]any
|
||||||
|
require.NoError(t, json.Unmarshal(content, &meta))
|
||||||
|
|
||||||
|
assert.Equal(t, "myapp", meta["name"])
|
||||||
|
assert.Equal(t, "darwin", meta["os"])
|
||||||
|
assert.Equal(t, "arm64", meta["arch"])
|
||||||
|
assert.Equal(t, false, meta["is_tag"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("output is pretty-printed JSON", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
path := ax.Join(dir, "artifact_meta.json")
|
||||||
|
|
||||||
|
err := WriteArtifactMeta(fs, path, "core", Target{OS: "windows", Arch: "amd64"}, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, readErr := ax.ReadFile(path)
|
||||||
|
require.NoError(t, readErr)
|
||||||
|
|
||||||
|
// Pretty-printed JSON contains indentation
|
||||||
|
assert.Contains(t, string(content), "\n")
|
||||||
|
assert.Contains(t, string(content), " ")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
@ -4,9 +4,9 @@ package build
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"iter"
|
"iter"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build/signing"
|
"dappco.re/go/core/build/pkg/build/signing"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
|
|
@ -14,13 +14,19 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// ConfigFileName is the name of the build configuration file.
|
// ConfigFileName is the name of the build configuration file.
|
||||||
|
//
|
||||||
|
// configPath := ax.Join(projectDir, build.ConfigDir, build.ConfigFileName)
|
||||||
const ConfigFileName = "build.yaml"
|
const ConfigFileName = "build.yaml"
|
||||||
|
|
||||||
// ConfigDir is the directory where build configuration is stored.
|
// ConfigDir is the directory where build configuration is stored.
|
||||||
|
//
|
||||||
|
// configPath := ax.Join(projectDir, build.ConfigDir, build.ConfigFileName)
|
||||||
const ConfigDir = ".core"
|
const ConfigDir = ".core"
|
||||||
|
|
||||||
// BuildConfig holds the complete build configuration loaded from .core/build.yaml.
|
// BuildConfig holds the complete build configuration loaded from .core/build.yaml.
|
||||||
// This is distinct from Config which holds runtime build parameters.
|
// This is distinct from Config which holds runtime build parameters.
|
||||||
|
//
|
||||||
|
// cfg, err := build.LoadConfig(io.Local, ".")
|
||||||
type BuildConfig struct {
|
type BuildConfig struct {
|
||||||
// Version is the config file format version.
|
// Version is the config file format version.
|
||||||
Version int `yaml:"version"`
|
Version int `yaml:"version"`
|
||||||
|
|
@ -35,6 +41,8 @@ type BuildConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Project holds project metadata.
|
// Project holds project metadata.
|
||||||
|
//
|
||||||
|
// cfg.Project.Binary = "core-build"
|
||||||
type Project struct {
|
type Project struct {
|
||||||
// Name is the project name.
|
// Name is the project name.
|
||||||
Name string `yaml:"name"`
|
Name string `yaml:"name"`
|
||||||
|
|
@ -47,21 +55,57 @@ type Project struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build holds build-time settings.
|
// Build holds build-time settings.
|
||||||
|
//
|
||||||
|
// cfg.Build.LDFlags = []string{"-s", "-w", "-X main.version=" + version}
|
||||||
type Build struct {
|
type Build struct {
|
||||||
// Type overrides project type auto-detection (e.g., "go", "wails", "docker").
|
// Type overrides project type auto-detection (e.g., "go", "wails", "docker").
|
||||||
Type string `yaml:"type"`
|
Type string `yaml:"type"`
|
||||||
// CGO enables CGO for the build.
|
// CGO enables CGO for the build.
|
||||||
CGO bool `yaml:"cgo"`
|
CGO bool `yaml:"cgo"`
|
||||||
|
// Obfuscate uses garble instead of go build for binary obfuscation.
|
||||||
|
Obfuscate bool `yaml:"obfuscate"`
|
||||||
|
// NSIS enables Windows NSIS installer generation (Wails projects only).
|
||||||
|
NSIS bool `yaml:"nsis"`
|
||||||
|
// WebView2 sets the WebView2 delivery method: download|embed|browser|error.
|
||||||
|
WebView2 string `yaml:"webview2,omitempty"`
|
||||||
// Flags are additional build flags (e.g., ["-trimpath"]).
|
// Flags are additional build flags (e.g., ["-trimpath"]).
|
||||||
Flags []string `yaml:"flags"`
|
Flags []string `yaml:"flags"`
|
||||||
// LDFlags are linker flags (e.g., ["-s", "-w"]).
|
// LDFlags are linker flags (e.g., ["-s", "-w"]).
|
||||||
LDFlags []string `yaml:"ldflags"`
|
LDFlags []string `yaml:"ldflags"`
|
||||||
|
// BuildTags are Go build tags passed through to `go build`.
|
||||||
|
BuildTags []string `yaml:"build_tags,omitempty"`
|
||||||
|
// ArchiveFormat selects the archive compression format for build outputs.
|
||||||
|
// Supported values are "gz", "xz", and "zip"; empty uses gzip.
|
||||||
|
ArchiveFormat string `yaml:"archive_format,omitempty"`
|
||||||
// Env are additional environment variables.
|
// Env are additional environment variables.
|
||||||
Env []string `yaml:"env"`
|
Env []string `yaml:"env"`
|
||||||
|
// Cache controls build cache setup.
|
||||||
|
Cache CacheConfig `yaml:"cache,omitempty"`
|
||||||
|
// Dockerfile is the path to the Dockerfile used by Docker builds.
|
||||||
|
Dockerfile string `yaml:"dockerfile,omitempty"`
|
||||||
|
// Registry is the container registry used for Docker image references.
|
||||||
|
Registry string `yaml:"registry,omitempty"`
|
||||||
|
// Image is the image name used for Docker builds.
|
||||||
|
Image string `yaml:"image,omitempty"`
|
||||||
|
// Tags are Docker image tags to apply.
|
||||||
|
Tags []string `yaml:"tags,omitempty"`
|
||||||
|
// BuildArgs are Docker build arguments.
|
||||||
|
BuildArgs map[string]string `yaml:"build_args,omitempty"`
|
||||||
|
// Push enables pushing Docker images after build.
|
||||||
|
Push bool `yaml:"push,omitempty"`
|
||||||
|
// Load loads a single-platform Docker image into the local daemon after build.
|
||||||
|
Load bool `yaml:"load,omitempty"`
|
||||||
|
// LinuxKitConfig is the path to the LinuxKit config file.
|
||||||
|
LinuxKitConfig string `yaml:"linuxkit_config,omitempty"`
|
||||||
|
// Formats is the list of LinuxKit output formats.
|
||||||
|
// Supported values include iso, raw, qcow2, vmdk, vhd, gcp, aws, docker, tar, and kernel+initrd.
|
||||||
|
Formats []string `yaml:"formats,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TargetConfig defines a build target in the config file.
|
// TargetConfig defines a build target in the config file.
|
||||||
// This is separate from Target to allow for additional config-specific fields.
|
// This is separate from Target to allow for additional config-specific fields.
|
||||||
|
//
|
||||||
|
// cfg.Targets = []build.TargetConfig{{OS: "linux", Arch: "amd64"}, {OS: "darwin", Arch: "arm64"}}
|
||||||
type TargetConfig struct {
|
type TargetConfig struct {
|
||||||
// OS is the target operating system (e.g., "linux", "darwin", "windows").
|
// OS is the target operating system (e.g., "linux", "darwin", "windows").
|
||||||
OS string `yaml:"os"`
|
OS string `yaml:"os"`
|
||||||
|
|
@ -72,30 +116,45 @@ type TargetConfig struct {
|
||||||
// LoadConfig loads build configuration from the .core/build.yaml file in the given directory.
|
// LoadConfig loads build configuration from the .core/build.yaml file in the given directory.
|
||||||
// If the config file does not exist, it returns DefaultConfig().
|
// If the config file does not exist, it returns DefaultConfig().
|
||||||
// Returns an error if the file exists but cannot be parsed.
|
// Returns an error if the file exists but cannot be parsed.
|
||||||
|
//
|
||||||
|
// cfg, err := build.LoadConfig(io.Local, ".")
|
||||||
func LoadConfig(fs io.Medium, dir string) (*BuildConfig, error) {
|
func LoadConfig(fs io.Medium, dir string) (*BuildConfig, error) {
|
||||||
configPath := filepath.Join(dir, ConfigDir, ConfigFileName)
|
return LoadConfigAtPath(fs, ax.Join(dir, ConfigDir, ConfigFileName))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadConfigAtPath loads build configuration from an explicit file path.
|
||||||
|
// If the file does not exist, it returns DefaultConfig().
|
||||||
|
// Returns an error if the file exists but cannot be parsed.
|
||||||
|
//
|
||||||
|
// cfg, err := build.LoadConfigAtPath(io.Local, "/tmp/project/build.yaml")
|
||||||
|
func LoadConfigAtPath(fs io.Medium, configPath string) (*BuildConfig, error) {
|
||||||
content, err := fs.Read(configPath)
|
content, err := fs.Read(configPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if os.IsNotExist(err) {
|
if !fs.Exists(configPath) {
|
||||||
return DefaultConfig(), nil
|
return DefaultConfig(), nil
|
||||||
}
|
}
|
||||||
return nil, coreerr.E("build.LoadConfig", "failed to read config file", err)
|
return nil, coreerr.E("build.LoadConfigAtPath", "failed to read config file", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var cfg BuildConfig
|
cfg := DefaultConfig()
|
||||||
data := []byte(content)
|
data := []byte(content)
|
||||||
if err := yaml.Unmarshal(data, &cfg); err != nil {
|
if err := yaml.Unmarshal(data, cfg); err != nil {
|
||||||
return nil, coreerr.E("build.LoadConfig", "failed to parse config file", err)
|
return nil, coreerr.E("build.LoadConfigAtPath", "failed to parse config file", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply defaults for any missing fields
|
// Apply defaults for any missing fields
|
||||||
applyDefaults(&cfg)
|
applyDefaults(cfg)
|
||||||
|
|
||||||
return &cfg, nil
|
// Expand environment variables after defaults so overrides can still be
|
||||||
|
// expressed declaratively in config files.
|
||||||
|
cfg.ExpandEnv()
|
||||||
|
|
||||||
|
return cfg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultConfig returns sensible defaults for Go projects.
|
// DefaultConfig returns sensible defaults for Go projects.
|
||||||
|
//
|
||||||
|
// cfg := build.DefaultConfig()
|
||||||
func DefaultConfig() *BuildConfig {
|
func DefaultConfig() *BuildConfig {
|
||||||
return &BuildConfig{
|
return &BuildConfig{
|
||||||
Version: 1,
|
Version: 1,
|
||||||
|
|
@ -144,25 +203,169 @@ func applyDefaults(cfg *BuildConfig) {
|
||||||
cfg.Build.Env = defaults.Build.Env
|
cfg.Build.Env = defaults.Build.Env
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(cfg.Targets) == 0 {
|
if cfg.Targets == nil {
|
||||||
cfg.Targets = defaults.Targets
|
cfg.Targets = defaults.Targets
|
||||||
}
|
}
|
||||||
|
|
||||||
// Expand environment variables in sign config
|
}
|
||||||
|
|
||||||
|
// ExpandEnv expands environment variables across the build config.
|
||||||
|
//
|
||||||
|
// cfg.ExpandEnv() // expands $APP_NAME, $IMAGE_TAG, $GPG_KEY_ID, etc.
|
||||||
|
func (cfg *BuildConfig) ExpandEnv() {
|
||||||
|
if cfg == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg.Project.Name = expandEnv(cfg.Project.Name)
|
||||||
|
cfg.Project.Description = expandEnv(cfg.Project.Description)
|
||||||
|
cfg.Project.Main = expandEnv(cfg.Project.Main)
|
||||||
|
cfg.Project.Binary = expandEnv(cfg.Project.Binary)
|
||||||
|
|
||||||
|
cfg.Build.Type = expandEnv(cfg.Build.Type)
|
||||||
|
cfg.Build.WebView2 = expandEnv(cfg.Build.WebView2)
|
||||||
|
cfg.Build.ArchiveFormat = expandEnv(cfg.Build.ArchiveFormat)
|
||||||
|
cfg.Build.Dockerfile = expandEnv(cfg.Build.Dockerfile)
|
||||||
|
cfg.Build.Registry = expandEnv(cfg.Build.Registry)
|
||||||
|
cfg.Build.Image = expandEnv(cfg.Build.Image)
|
||||||
|
cfg.Build.LinuxKitConfig = expandEnv(cfg.Build.LinuxKitConfig)
|
||||||
|
|
||||||
|
cfg.Build.Flags = expandEnvSlice(cfg.Build.Flags)
|
||||||
|
cfg.Build.LDFlags = expandEnvSlice(cfg.Build.LDFlags)
|
||||||
|
cfg.Build.BuildTags = expandEnvSlice(cfg.Build.BuildTags)
|
||||||
|
cfg.Build.Env = expandEnvSlice(cfg.Build.Env)
|
||||||
|
cfg.Build.Tags = expandEnvSlice(cfg.Build.Tags)
|
||||||
|
cfg.Build.Formats = expandEnvSlice(cfg.Build.Formats)
|
||||||
|
|
||||||
|
cfg.Build.Cache.Directory = expandEnv(cfg.Build.Cache.Directory)
|
||||||
|
cfg.Build.Cache.KeyPrefix = expandEnv(cfg.Build.Cache.KeyPrefix)
|
||||||
|
cfg.Build.Cache.Paths = expandEnvSlice(cfg.Build.Cache.Paths)
|
||||||
|
cfg.Build.Cache.RestoreKeys = expandEnvSlice(cfg.Build.Cache.RestoreKeys)
|
||||||
|
|
||||||
|
cfg.Build.BuildArgs = expandEnvMap(cfg.Build.BuildArgs)
|
||||||
|
cfg.Targets = expandTargetConfigs(cfg.Targets)
|
||||||
|
|
||||||
cfg.Sign.ExpandEnv()
|
cfg.Sign.ExpandEnv()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func expandEnvSlice(values []string) []string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]string, len(values))
|
||||||
|
for i, value := range values {
|
||||||
|
result[i] = expandEnv(value)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func expandEnvMap(values map[string]string) map[string]string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make(map[string]string, len(values))
|
||||||
|
for key, value := range values {
|
||||||
|
result[key] = expandEnv(value)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func expandTargetConfigs(values []TargetConfig) []TargetConfig {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]TargetConfig, len(values))
|
||||||
|
for i, value := range values {
|
||||||
|
result[i] = TargetConfig{
|
||||||
|
OS: expandEnv(value.OS),
|
||||||
|
Arch: expandEnv(value.Arch),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// CloneStringMap returns a shallow copy of a string map.
|
||||||
|
//
|
||||||
|
// clone := build.CloneStringMap(map[string]string{"VERSION": "v1.2.3"})
|
||||||
|
func CloneStringMap(values map[string]string) map[string]string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make(map[string]string, len(values))
|
||||||
|
for key, value := range values {
|
||||||
|
result[key] = value
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// expandEnv expands $VAR or ${VAR} using the current process environment.
|
||||||
|
func expandEnv(s string) string {
|
||||||
|
if !core.Contains(s, "$") {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := core.NewBuilder()
|
||||||
|
for i := 0; i < len(s); {
|
||||||
|
if s[i] != '$' {
|
||||||
|
buf.WriteByte(s[i])
|
||||||
|
i++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if i+1 < len(s) && s[i+1] == '{' {
|
||||||
|
j := i + 2
|
||||||
|
for j < len(s) && s[j] != '}' {
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
if j < len(s) {
|
||||||
|
buf.WriteString(core.Env(s[i+2 : j]))
|
||||||
|
i = j + 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
j := i + 1
|
||||||
|
for j < len(s) {
|
||||||
|
c := s[j]
|
||||||
|
if c != '_' && (c < '0' || c > '9') && (c < 'A' || c > 'Z') && (c < 'a' || c > 'z') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
if j > i+1 {
|
||||||
|
buf.WriteString(core.Env(s[i+1 : j]))
|
||||||
|
i = j
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.WriteByte(s[i])
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
// ConfigPath returns the path to the build config file for a given directory.
|
// ConfigPath returns the path to the build config file for a given directory.
|
||||||
|
//
|
||||||
|
// path := build.ConfigPath("/home/user/my-project") // → "/home/user/my-project/.core/build.yaml"
|
||||||
func ConfigPath(dir string) string {
|
func ConfigPath(dir string) string {
|
||||||
return filepath.Join(dir, ConfigDir, ConfigFileName)
|
return ax.Join(dir, ConfigDir, ConfigFileName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ConfigExists checks if a build config file exists in the given directory.
|
// ConfigExists checks if a build config file exists in the given directory.
|
||||||
|
//
|
||||||
|
// if build.ConfigExists(io.Local, ".") { ... }
|
||||||
func ConfigExists(fs io.Medium, dir string) bool {
|
func ConfigExists(fs io.Medium, dir string) bool {
|
||||||
return fileExists(fs, ConfigPath(dir))
|
return fileExists(fs, ConfigPath(dir))
|
||||||
}
|
}
|
||||||
|
|
||||||
// TargetsIter returns an iterator for the build targets.
|
// TargetsIter returns an iterator for the build targets.
|
||||||
|
//
|
||||||
|
// for t := range cfg.TargetsIter() { fmt.Println(t.OS, t.Arch) }
|
||||||
func (cfg *BuildConfig) TargetsIter() iter.Seq[TargetConfig] {
|
func (cfg *BuildConfig) TargetsIter() iter.Seq[TargetConfig] {
|
||||||
return func(yield func(TargetConfig) bool) {
|
return func(yield func(TargetConfig) bool) {
|
||||||
for _, t := range cfg.Targets {
|
for _, t := range cfg.Targets {
|
||||||
|
|
@ -174,6 +377,8 @@ func (cfg *BuildConfig) TargetsIter() iter.Seq[TargetConfig] {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToTargets converts TargetConfig slice to Target slice for use with builders.
|
// ToTargets converts TargetConfig slice to Target slice for use with builders.
|
||||||
|
//
|
||||||
|
// targets := cfg.ToTargets()
|
||||||
func (cfg *BuildConfig) ToTargets() []Target {
|
func (cfg *BuildConfig) ToTargets() []Target {
|
||||||
targets := make([]Target, len(cfg.Targets))
|
targets := make([]Target, len(cfg.Targets))
|
||||||
for i, t := range cfg.Targets {
|
for i, t := range cfg.Targets {
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
package build
|
package build
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
@ -16,19 +16,19 @@ func setupConfigTestDir(t *testing.T, configContent string) string {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
if configContent != "" {
|
if configContent != "" {
|
||||||
coreDir := filepath.Join(dir, ConfigDir)
|
coreDir := ax.Join(dir, ConfigDir)
|
||||||
err := os.MkdirAll(coreDir, 0755)
|
err := ax.MkdirAll(coreDir, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
configPath := filepath.Join(coreDir, ConfigFileName)
|
configPath := ax.Join(coreDir, ConfigFileName)
|
||||||
err = os.WriteFile(configPath, []byte(configContent), 0644)
|
err = ax.WriteFile(configPath, []byte(configContent), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return dir
|
return dir
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadConfig_Good(t *testing.T) {
|
func TestConfig_LoadConfig_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("loads valid config", func(t *testing.T) {
|
t.Run("loads valid config", func(t *testing.T) {
|
||||||
content := `
|
content := `
|
||||||
|
|
@ -46,8 +46,13 @@ build:
|
||||||
ldflags:
|
ldflags:
|
||||||
- -s
|
- -s
|
||||||
- -w
|
- -w
|
||||||
|
build_tags:
|
||||||
|
- integration
|
||||||
|
- webkit2_41
|
||||||
|
archive_format: xz
|
||||||
env:
|
env:
|
||||||
- FOO=bar
|
- FOO=bar
|
||||||
|
load: true
|
||||||
targets:
|
targets:
|
||||||
- os: linux
|
- os: linux
|
||||||
arch: amd64
|
arch: amd64
|
||||||
|
|
@ -68,7 +73,10 @@ targets:
|
||||||
assert.True(t, cfg.Build.CGO)
|
assert.True(t, cfg.Build.CGO)
|
||||||
assert.Equal(t, []string{"-trimpath", "-race"}, cfg.Build.Flags)
|
assert.Equal(t, []string{"-trimpath", "-race"}, cfg.Build.Flags)
|
||||||
assert.Equal(t, []string{"-s", "-w"}, cfg.Build.LDFlags)
|
assert.Equal(t, []string{"-s", "-w"}, cfg.Build.LDFlags)
|
||||||
|
assert.Equal(t, []string{"integration", "webkit2_41"}, cfg.Build.BuildTags)
|
||||||
|
assert.Equal(t, "xz", cfg.Build.ArchiveFormat)
|
||||||
assert.Equal(t, []string{"FOO=bar"}, cfg.Build.Env)
|
assert.Equal(t, []string{"FOO=bar"}, cfg.Build.Env)
|
||||||
|
assert.True(t, cfg.Build.Load)
|
||||||
assert.Len(t, cfg.Targets, 2)
|
assert.Len(t, cfg.Targets, 2)
|
||||||
assert.Equal(t, "linux", cfg.Targets[0].OS)
|
assert.Equal(t, "linux", cfg.Targets[0].OS)
|
||||||
assert.Equal(t, "amd64", cfg.Targets[0].Arch)
|
assert.Equal(t, "amd64", cfg.Targets[0].Arch)
|
||||||
|
|
@ -76,6 +84,103 @@ targets:
|
||||||
assert.Equal(t, "arm64", cfg.Targets[1].Arch)
|
assert.Equal(t, "arm64", cfg.Targets[1].Arch)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("expands environment variables in target config", func(t *testing.T) {
|
||||||
|
t.Setenv("TARGET_OS", "linux")
|
||||||
|
t.Setenv("TARGET_ARCH", "arm64")
|
||||||
|
|
||||||
|
content := `
|
||||||
|
version: 1
|
||||||
|
targets:
|
||||||
|
- os: ${TARGET_OS}
|
||||||
|
arch: ${TARGET_ARCH}
|
||||||
|
`
|
||||||
|
dir := setupConfigTestDir(t, content)
|
||||||
|
|
||||||
|
cfg, err := LoadConfig(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, cfg)
|
||||||
|
|
||||||
|
require.Len(t, cfg.Targets, 1)
|
||||||
|
assert.Equal(t, "linux", cfg.Targets[0].OS)
|
||||||
|
assert.Equal(t, "arm64", cfg.Targets[0].Arch)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("expands environment variables in build and signing config", func(t *testing.T) {
|
||||||
|
t.Setenv("APP_NAME", "demo-app")
|
||||||
|
t.Setenv("APP_ROOT", "./cmd/demo")
|
||||||
|
t.Setenv("APP_BINARY", "demo-bin")
|
||||||
|
t.Setenv("BUILD_TYPE", "wails")
|
||||||
|
t.Setenv("WEBVIEW2", "embed")
|
||||||
|
t.Setenv("ARCHIVE_FORMAT", "xz")
|
||||||
|
t.Setenv("APP_VERSION", "v1.2.3")
|
||||||
|
t.Setenv("APP_TAG", "integration")
|
||||||
|
t.Setenv("CACHE_DIR", ".core/cache/demo-app")
|
||||||
|
t.Setenv("DOCKERFILE", "Dockerfile.release")
|
||||||
|
t.Setenv("IMAGE_NAME", "owner/demo-app")
|
||||||
|
t.Setenv("GPG_KEY_ID", "ABCD1234")
|
||||||
|
|
||||||
|
content := `
|
||||||
|
version: 1
|
||||||
|
project:
|
||||||
|
name: ${APP_NAME}
|
||||||
|
main: ${APP_ROOT}
|
||||||
|
binary: ${APP_BINARY}
|
||||||
|
build:
|
||||||
|
type: ${BUILD_TYPE}
|
||||||
|
webview2: ${WEBVIEW2}
|
||||||
|
archive_format: ${ARCHIVE_FORMAT}
|
||||||
|
flags:
|
||||||
|
- -trimpath
|
||||||
|
- -X
|
||||||
|
- main.version=${APP_VERSION}
|
||||||
|
ldflags:
|
||||||
|
- -s
|
||||||
|
- -w
|
||||||
|
build_tags:
|
||||||
|
- ${APP_TAG}
|
||||||
|
env:
|
||||||
|
- VERSION=${APP_VERSION}
|
||||||
|
cache:
|
||||||
|
enabled: true
|
||||||
|
dir: ${CACHE_DIR}
|
||||||
|
paths:
|
||||||
|
- ${CACHE_DIR}/go-build
|
||||||
|
dockerfile: ${DOCKERFILE}
|
||||||
|
image: ${IMAGE_NAME}
|
||||||
|
tags:
|
||||||
|
- latest
|
||||||
|
- ${APP_VERSION}
|
||||||
|
build_args:
|
||||||
|
VERSION: ${APP_VERSION}
|
||||||
|
sign:
|
||||||
|
gpg:
|
||||||
|
key: ${GPG_KEY_ID}
|
||||||
|
`
|
||||||
|
dir := setupConfigTestDir(t, content)
|
||||||
|
|
||||||
|
cfg, err := LoadConfig(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, cfg)
|
||||||
|
|
||||||
|
assert.Equal(t, "demo-app", cfg.Project.Name)
|
||||||
|
assert.Equal(t, "./cmd/demo", cfg.Project.Main)
|
||||||
|
assert.Equal(t, "demo-bin", cfg.Project.Binary)
|
||||||
|
assert.Equal(t, "wails", cfg.Build.Type)
|
||||||
|
assert.Equal(t, "embed", cfg.Build.WebView2)
|
||||||
|
assert.Equal(t, "xz", cfg.Build.ArchiveFormat)
|
||||||
|
assert.Equal(t, []string{"-trimpath", "-X", "main.version=v1.2.3"}, cfg.Build.Flags)
|
||||||
|
assert.Equal(t, []string{"-s", "-w"}, cfg.Build.LDFlags)
|
||||||
|
assert.Equal(t, []string{"integration"}, cfg.Build.BuildTags)
|
||||||
|
assert.Equal(t, []string{"VERSION=v1.2.3"}, cfg.Build.Env)
|
||||||
|
assert.Equal(t, ".core/cache/demo-app", cfg.Build.Cache.Directory)
|
||||||
|
assert.Equal(t, []string{".core/cache/demo-app/go-build"}, cfg.Build.Cache.Paths)
|
||||||
|
assert.Equal(t, "Dockerfile.release", cfg.Build.Dockerfile)
|
||||||
|
assert.Equal(t, "owner/demo-app", cfg.Build.Image)
|
||||||
|
assert.Equal(t, []string{"latest", "v1.2.3"}, cfg.Build.Tags)
|
||||||
|
assert.Equal(t, map[string]string{"VERSION": "v1.2.3"}, cfg.Build.BuildArgs)
|
||||||
|
assert.Equal(t, "ABCD1234", cfg.Sign.GPG.Key)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("returns defaults when config file missing", func(t *testing.T) {
|
t.Run("returns defaults when config file missing", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
|
@ -89,6 +194,8 @@ targets:
|
||||||
assert.Equal(t, defaults.Build.CGO, cfg.Build.CGO)
|
assert.Equal(t, defaults.Build.CGO, cfg.Build.CGO)
|
||||||
assert.Equal(t, defaults.Build.Flags, cfg.Build.Flags)
|
assert.Equal(t, defaults.Build.Flags, cfg.Build.Flags)
|
||||||
assert.Equal(t, defaults.Build.LDFlags, cfg.Build.LDFlags)
|
assert.Equal(t, defaults.Build.LDFlags, cfg.Build.LDFlags)
|
||||||
|
assert.False(t, cfg.Build.Load)
|
||||||
|
assert.Empty(t, cfg.Build.BuildTags)
|
||||||
assert.Equal(t, defaults.Targets, cfg.Targets)
|
assert.Equal(t, defaults.Targets, cfg.Targets)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -114,6 +221,22 @@ project:
|
||||||
assert.Equal(t, defaults.Build.Flags, cfg.Build.Flags)
|
assert.Equal(t, defaults.Build.Flags, cfg.Build.Flags)
|
||||||
assert.Equal(t, defaults.Build.LDFlags, cfg.Build.LDFlags)
|
assert.Equal(t, defaults.Build.LDFlags, cfg.Build.LDFlags)
|
||||||
assert.Equal(t, defaults.Targets, cfg.Targets)
|
assert.Equal(t, defaults.Targets, cfg.Targets)
|
||||||
|
assert.True(t, cfg.Sign.Enabled)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("preserves explicit signing disablement", func(t *testing.T) {
|
||||||
|
content := `
|
||||||
|
version: 1
|
||||||
|
sign:
|
||||||
|
enabled: false
|
||||||
|
`
|
||||||
|
dir := setupConfigTestDir(t, content)
|
||||||
|
|
||||||
|
cfg, err := LoadConfig(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, cfg)
|
||||||
|
|
||||||
|
assert.False(t, cfg.Sign.Enabled)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("preserves empty arrays when explicitly set", func(t *testing.T) {
|
t.Run("preserves empty arrays when explicitly set", func(t *testing.T) {
|
||||||
|
|
@ -124,6 +247,7 @@ project:
|
||||||
build:
|
build:
|
||||||
flags: []
|
flags: []
|
||||||
ldflags: []
|
ldflags: []
|
||||||
|
build_tags: []
|
||||||
targets:
|
targets:
|
||||||
- os: linux
|
- os: linux
|
||||||
arch: amd64
|
arch: amd64
|
||||||
|
|
@ -137,12 +261,48 @@ targets:
|
||||||
// Empty arrays are preserved (not replaced with defaults)
|
// Empty arrays are preserved (not replaced with defaults)
|
||||||
assert.Empty(t, cfg.Build.Flags)
|
assert.Empty(t, cfg.Build.Flags)
|
||||||
assert.Empty(t, cfg.Build.LDFlags)
|
assert.Empty(t, cfg.Build.LDFlags)
|
||||||
|
assert.Empty(t, cfg.Build.BuildTags)
|
||||||
// Targets explicitly set
|
// Targets explicitly set
|
||||||
assert.Len(t, cfg.Targets, 1)
|
assert.Len(t, cfg.Targets, 1)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadConfig_Bad(t *testing.T) {
|
func TestConfig_LoadConfigAtPath_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
|
||||||
|
t.Run("loads config from explicit file path", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
configPath := ax.Join(dir, "custom-build.yaml")
|
||||||
|
content := `
|
||||||
|
version: 3
|
||||||
|
project:
|
||||||
|
name: custom-app
|
||||||
|
binary: custom-app
|
||||||
|
build:
|
||||||
|
cgo: true
|
||||||
|
targets:
|
||||||
|
- os: linux
|
||||||
|
arch: amd64
|
||||||
|
`
|
||||||
|
err := ax.WriteFile(configPath, []byte(content), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
cfg, err := LoadConfigAtPath(fs, configPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, cfg)
|
||||||
|
|
||||||
|
assert.Equal(t, 3, cfg.Version)
|
||||||
|
assert.Equal(t, "custom-app", cfg.Project.Name)
|
||||||
|
assert.Equal(t, "custom-app", cfg.Project.Binary)
|
||||||
|
assert.True(t, cfg.Build.CGO)
|
||||||
|
assert.Empty(t, cfg.Build.BuildTags)
|
||||||
|
assert.Len(t, cfg.Targets, 1)
|
||||||
|
assert.Equal(t, "linux", cfg.Targets[0].OS)
|
||||||
|
assert.Equal(t, "amd64", cfg.Targets[0].Arch)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfig_LoadConfig_Bad(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("returns error for invalid YAML", func(t *testing.T) {
|
t.Run("returns error for invalid YAML", func(t *testing.T) {
|
||||||
content := `
|
content := `
|
||||||
|
|
@ -160,13 +320,13 @@ project:
|
||||||
|
|
||||||
t.Run("returns error for unreadable file", func(t *testing.T) {
|
t.Run("returns error for unreadable file", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
coreDir := filepath.Join(dir, ConfigDir)
|
coreDir := ax.Join(dir, ConfigDir)
|
||||||
err := os.MkdirAll(coreDir, 0755)
|
err := ax.MkdirAll(coreDir, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Create config as a directory instead of file
|
// Create config as a directory instead of file
|
||||||
configPath := filepath.Join(coreDir, ConfigFileName)
|
configPath := ax.Join(coreDir, ConfigFileName)
|
||||||
err = os.Mkdir(configPath, 0755)
|
err = ax.Mkdir(configPath, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
cfg, err := LoadConfig(fs, dir)
|
cfg, err := LoadConfig(fs, dir)
|
||||||
|
|
@ -176,7 +336,7 @@ project:
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDefaultConfig_Good(t *testing.T) {
|
func TestConfig_DefaultConfig_Good(t *testing.T) {
|
||||||
t.Run("returns sensible defaults", func(t *testing.T) {
|
t.Run("returns sensible defaults", func(t *testing.T) {
|
||||||
cfg := DefaultConfig()
|
cfg := DefaultConfig()
|
||||||
|
|
||||||
|
|
@ -212,14 +372,14 @@ func TestDefaultConfig_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestConfigPath_Good(t *testing.T) {
|
func TestConfig_ConfigPath_Good(t *testing.T) {
|
||||||
t.Run("returns correct path", func(t *testing.T) {
|
t.Run("returns correct path", func(t *testing.T) {
|
||||||
path := ConfigPath("/project/root")
|
path := ConfigPath("/project/root")
|
||||||
assert.Equal(t, "/project/root/.core/build.yaml", path)
|
assert.Equal(t, "/project/root/.core/build.yaml", path)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestConfigExists_Good(t *testing.T) {
|
func TestConfig_ConfigExists_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("returns true when config exists", func(t *testing.T) {
|
t.Run("returns true when config exists", func(t *testing.T) {
|
||||||
dir := setupConfigTestDir(t, "version: 1")
|
dir := setupConfigTestDir(t, "version: 1")
|
||||||
|
|
@ -237,10 +397,10 @@ func TestConfigExists_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadConfig_Good_SignConfig(t *testing.T) {
|
func TestConfig_LoadConfigSignConfig_Good(t *testing.T) {
|
||||||
tmpDir := t.TempDir()
|
tmpDir := t.TempDir()
|
||||||
coreDir := filepath.Join(tmpDir, ".core")
|
coreDir := ax.Join(tmpDir, ".core")
|
||||||
_ = os.MkdirAll(coreDir, 0755)
|
_ = ax.MkdirAll(coreDir, 0755)
|
||||||
|
|
||||||
configContent := `version: 1
|
configContent := `version: 1
|
||||||
sign:
|
sign:
|
||||||
|
|
@ -251,7 +411,7 @@ sign:
|
||||||
identity: "Developer ID Application: Test"
|
identity: "Developer ID Application: Test"
|
||||||
notarize: true
|
notarize: true
|
||||||
`
|
`
|
||||||
_ = os.WriteFile(filepath.Join(coreDir, "build.yaml"), []byte(configContent), 0644)
|
_ = ax.WriteFile(ax.Join(coreDir, "build.yaml"), []byte(configContent), 0644)
|
||||||
|
|
||||||
cfg, err := LoadConfig(io.Local, tmpDir)
|
cfg, err := LoadConfig(io.Local, tmpDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -272,7 +432,7 @@ sign:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildConfig_ToTargets_Good(t *testing.T) {
|
func TestConfig_BuildConfigToTargets_Good(t *testing.T) {
|
||||||
t.Run("converts TargetConfig to Target", func(t *testing.T) {
|
t.Run("converts TargetConfig to Target", func(t *testing.T) {
|
||||||
cfg := &BuildConfig{
|
cfg := &BuildConfig{
|
||||||
Targets: []TargetConfig{
|
Targets: []TargetConfig{
|
||||||
|
|
@ -301,9 +461,9 @@ func TestBuildConfig_ToTargets_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestLoadConfig_Testdata tests loading from the testdata fixture.
|
// TestLoadConfig_Testdata tests loading from the testdata fixture.
|
||||||
func TestLoadConfig_Testdata(t *testing.T) {
|
func TestConfig_LoadConfigTestdata_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
abs, err := filepath.Abs("testdata/config-project")
|
abs, err := ax.Abs("testdata/config-project")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
t.Run("loads config-project fixture", func(t *testing.T) {
|
t.Run("loads config-project fixture", func(t *testing.T) {
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,39 @@
|
||||||
package build
|
package build
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"path/filepath"
|
"dappco.re/go/core"
|
||||||
"slices"
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Marker files for project type detection.
|
// Marker files for project type detection.
|
||||||
const (
|
const (
|
||||||
markerGoMod = "go.mod"
|
markerGoMod = "go.mod"
|
||||||
markerWails = "wails.json"
|
markerGoWork = "go.work"
|
||||||
markerNodePackage = "package.json"
|
markerWails = "wails.json"
|
||||||
markerComposer = "composer.json"
|
markerNodePackage = "package.json"
|
||||||
|
markerComposer = "composer.json"
|
||||||
|
markerMkDocs = "mkdocs.yml"
|
||||||
|
markerMkDocsYAML = "mkdocs.yaml"
|
||||||
|
markerDocsMkDocs = "docs/mkdocs.yml"
|
||||||
|
markerDocsMkDocsYAML = "docs/mkdocs.yaml"
|
||||||
|
markerPyProject = "pyproject.toml"
|
||||||
|
markerRequirements = "requirements.txt"
|
||||||
|
markerCargo = "Cargo.toml"
|
||||||
|
markerDockerfile = "Dockerfile"
|
||||||
|
markerFrontendPackage = "frontend/package.json"
|
||||||
|
markerFrontendDenoJSON = "frontend/deno.json"
|
||||||
|
markerFrontendDenoJSONC = "frontend/deno.jsonc"
|
||||||
|
markerLinuxKitYAML = "linuxkit.yml"
|
||||||
|
markerLinuxKitYAMLAlt = "linuxkit.yaml"
|
||||||
|
markerTaskfileYML = "Taskfile.yml"
|
||||||
|
markerTaskfileYAML = "Taskfile.yaml"
|
||||||
|
markerTaskfileBare = "Taskfile"
|
||||||
|
markerTaskfileLowerYML = "taskfile.yml"
|
||||||
|
markerTaskfileLowerYAML = "taskfile.yaml"
|
||||||
|
markerLinuxKitNestedYML = ".core/linuxkit/*.yml"
|
||||||
|
markerLinuxKitNestedYAML = ".core/linuxkit/*.yaml"
|
||||||
)
|
)
|
||||||
|
|
||||||
// projectMarker maps a marker file to its project type.
|
// projectMarker maps a marker file to its project type.
|
||||||
|
|
@ -26,31 +47,60 @@ type projectMarker struct {
|
||||||
var markers = []projectMarker{
|
var markers = []projectMarker{
|
||||||
{markerWails, ProjectTypeWails},
|
{markerWails, ProjectTypeWails},
|
||||||
{markerGoMod, ProjectTypeGo},
|
{markerGoMod, ProjectTypeGo},
|
||||||
|
{markerGoWork, ProjectTypeGo},
|
||||||
{markerNodePackage, ProjectTypeNode},
|
{markerNodePackage, ProjectTypeNode},
|
||||||
{markerComposer, ProjectTypePHP},
|
{markerComposer, ProjectTypePHP},
|
||||||
|
{markerMkDocs, ProjectTypeDocs},
|
||||||
|
{markerMkDocsYAML, ProjectTypeDocs},
|
||||||
|
{markerDocsMkDocs, ProjectTypeDocs},
|
||||||
|
{markerDocsMkDocsYAML, ProjectTypeDocs},
|
||||||
|
{markerPyProject, ProjectTypePython},
|
||||||
|
{markerRequirements, ProjectTypePython},
|
||||||
|
{markerCargo, ProjectTypeRust},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Discover detects project types in the given directory by checking for marker files.
|
// Discover detects project types in the given directory by checking for marker files.
|
||||||
// Returns a slice of detected project types, ordered by priority (most specific first).
|
// Returns a slice of detected project types, ordered by priority (most specific first).
|
||||||
// For example, a Wails project returns [wails, go] since it has both wails.json and go.mod.
|
// For example, a Wails project returns [wails, go] since it has both wails.json and go.mod.
|
||||||
|
//
|
||||||
|
// types, err := build.Discover(io.Local, "/home/user/my-project") // → [go]
|
||||||
func Discover(fs io.Medium, dir string) ([]ProjectType, error) {
|
func Discover(fs io.Medium, dir string) ([]ProjectType, error) {
|
||||||
var detected []ProjectType
|
var detected []ProjectType
|
||||||
|
|
||||||
for _, m := range markers {
|
for _, m := range markers {
|
||||||
path := filepath.Join(dir, m.file)
|
path := ax.Join(dir, m.file)
|
||||||
if fileExists(fs, path) {
|
if fileExists(fs, path) {
|
||||||
// Avoid duplicates (shouldn't happen with current markers, but defensive)
|
// Avoid duplicates (shouldn't happen with current markers, but defensive)
|
||||||
if !slices.Contains(detected, m.projectType) {
|
if !core.NewArray(detected...).Contains(m.projectType) {
|
||||||
detected = append(detected, m.projectType)
|
detected = append(detected, m.projectType)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
additionalTypes := []struct {
|
||||||
|
projectType ProjectType
|
||||||
|
detected bool
|
||||||
|
}{
|
||||||
|
{ProjectTypeNode, IsNodeProject(fs, dir) || HasSubtreeNpm(fs, dir)},
|
||||||
|
{ProjectTypeDocs, IsMkDocsProject(fs, dir)},
|
||||||
|
{ProjectTypeDocker, IsDockerProject(fs, dir)},
|
||||||
|
{ProjectTypeLinuxKit, IsLinuxKitProject(fs, dir)},
|
||||||
|
{ProjectTypeCPP, IsCPPProject(fs, dir)},
|
||||||
|
{ProjectTypeTaskfile, IsTaskfileProject(fs, dir)},
|
||||||
|
}
|
||||||
|
for _, candidate := range additionalTypes {
|
||||||
|
if candidate.detected && !core.NewArray(detected...).Contains(candidate.projectType) {
|
||||||
|
detected = append(detected, candidate.projectType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return detected, nil
|
return detected, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrimaryType returns the most specific project type detected in the directory.
|
// PrimaryType returns the most specific project type detected in the directory.
|
||||||
// Returns empty string if no project type is detected.
|
// Returns empty string if no project type is detected.
|
||||||
|
//
|
||||||
|
// pt, err := build.PrimaryType(io.Local, ".") // → "go"
|
||||||
func PrimaryType(fs io.Medium, dir string) (ProjectType, error) {
|
func PrimaryType(fs io.Medium, dir string) (ProjectType, error) {
|
||||||
types, err := Discover(fs, dir)
|
types, err := Discover(fs, dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -62,33 +112,397 @@ func PrimaryType(fs io.Medium, dir string) (ProjectType, error) {
|
||||||
return types[0], nil
|
return types[0], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsGoProject checks if the directory contains a Go project (go.mod or wails.json).
|
// IsGoProject checks if the directory contains a Go project (go.mod, go.work, or wails.json).
|
||||||
|
//
|
||||||
|
// if build.IsGoProject(io.Local, ".") { ... }
|
||||||
func IsGoProject(fs io.Medium, dir string) bool {
|
func IsGoProject(fs io.Medium, dir string) bool {
|
||||||
return fileExists(fs, filepath.Join(dir, markerGoMod)) ||
|
return fileExists(fs, ax.Join(dir, markerGoMod)) ||
|
||||||
fileExists(fs, filepath.Join(dir, markerWails))
|
fileExists(fs, ax.Join(dir, markerGoWork)) ||
|
||||||
|
fileExists(fs, ax.Join(dir, markerWails))
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsWailsProject checks if the directory contains a Wails project.
|
// IsWailsProject checks if the directory contains a Wails project.
|
||||||
|
//
|
||||||
|
// if build.IsWailsProject(io.Local, ".") { ... }
|
||||||
func IsWailsProject(fs io.Medium, dir string) bool {
|
func IsWailsProject(fs io.Medium, dir string) bool {
|
||||||
return fileExists(fs, filepath.Join(dir, markerWails))
|
return fileExists(fs, ax.Join(dir, markerWails))
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsNodeProject checks if the directory contains a Node.js project.
|
// IsNodeProject checks if the directory contains a Node.js project.
|
||||||
|
//
|
||||||
|
// if build.IsNodeProject(io.Local, ".") { ... }
|
||||||
func IsNodeProject(fs io.Medium, dir string) bool {
|
func IsNodeProject(fs io.Medium, dir string) bool {
|
||||||
return fileExists(fs, filepath.Join(dir, markerNodePackage))
|
return fileExists(fs, ax.Join(dir, markerNodePackage))
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsPHPProject checks if the directory contains a PHP project.
|
// IsPHPProject checks if the directory contains a PHP project.
|
||||||
|
//
|
||||||
|
// if build.IsPHPProject(io.Local, ".") { ... }
|
||||||
func IsPHPProject(fs io.Medium, dir string) bool {
|
func IsPHPProject(fs io.Medium, dir string) bool {
|
||||||
return fileExists(fs, filepath.Join(dir, markerComposer))
|
return fileExists(fs, ax.Join(dir, markerComposer))
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsCPPProject checks if the directory contains a C++ project (CMakeLists.txt).
|
// IsCPPProject checks if the directory contains a C++ project (CMakeLists.txt).
|
||||||
|
//
|
||||||
|
// if build.IsCPPProject(io.Local, ".") { ... }
|
||||||
func IsCPPProject(fs io.Medium, dir string) bool {
|
func IsCPPProject(fs io.Medium, dir string) bool {
|
||||||
return fileExists(fs, filepath.Join(dir, "CMakeLists.txt"))
|
return fileExists(fs, ax.Join(dir, "CMakeLists.txt"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsMkDocsProject checks for MkDocs config at the project root or in docs/.
|
||||||
|
//
|
||||||
|
// ok := build.IsMkDocsProject(io.Local, ".")
|
||||||
|
func IsMkDocsProject(fs io.Medium, dir string) bool {
|
||||||
|
return ResolveMkDocsConfigPath(fs, dir) != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveMkDocsConfigPath returns the first MkDocs config path that exists.
|
||||||
|
//
|
||||||
|
// configPath := build.ResolveMkDocsConfigPath(io.Local, ".")
|
||||||
|
func ResolveMkDocsConfigPath(fs io.Medium, dir string) string {
|
||||||
|
for _, path := range []string{
|
||||||
|
ax.Join(dir, markerMkDocs),
|
||||||
|
ax.Join(dir, markerMkDocsYAML),
|
||||||
|
ax.Join(dir, "docs", "mkdocs.yml"),
|
||||||
|
ax.Join(dir, "docs", "mkdocs.yaml"),
|
||||||
|
} {
|
||||||
|
if fileExists(fs, path) {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasSubtreeNpm checks for package.json within depth 2 subdirectories.
|
||||||
|
// Ignores root package.json and node_modules directories.
|
||||||
|
// Returns true when a monorepo-style nested package.json is found.
|
||||||
|
//
|
||||||
|
// ok := build.HasSubtreeNpm(io.Local, ".") // true if apps/web/package.json exists
|
||||||
|
func HasSubtreeNpm(fs io.Medium, dir string) bool {
|
||||||
|
// Depth 1: list immediate subdirectories
|
||||||
|
entries, err := fs.List(dir)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if !entry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
name := entry.Name()
|
||||||
|
if name == "node_modules" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
subdir := ax.Join(dir, name)
|
||||||
|
|
||||||
|
// Depth 1: check subdir/package.json
|
||||||
|
if fileExists(fs, ax.Join(subdir, markerNodePackage)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Depth 2: list subdirectories of subdir
|
||||||
|
subEntries, err := fs.List(subdir)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, subEntry := range subEntries {
|
||||||
|
if !subEntry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if subEntry.Name() == "node_modules" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
nested := ax.Join(subdir, subEntry.Name())
|
||||||
|
if fileExists(fs, ax.Join(nested, markerNodePackage)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsPythonProject checks for pyproject.toml or requirements.txt at the project root.
|
||||||
|
//
|
||||||
|
// ok := build.IsPythonProject(io.Local, ".")
|
||||||
|
func IsPythonProject(fs io.Medium, dir string) bool {
|
||||||
|
return fileExists(fs, ax.Join(dir, markerPyProject)) ||
|
||||||
|
fileExists(fs, ax.Join(dir, markerRequirements))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsRustProject checks for Cargo.toml at the project root.
|
||||||
|
//
|
||||||
|
// ok := build.IsRustProject(io.Local, ".")
|
||||||
|
func IsRustProject(fs io.Medium, dir string) bool {
|
||||||
|
return fileExists(fs, ax.Join(dir, markerCargo))
|
||||||
|
}
|
||||||
|
|
||||||
|
// DiscoveryResult holds the full project analysis from DiscoverFull().
|
||||||
|
//
|
||||||
|
// result, err := build.DiscoverFull(io.Local, ".")
|
||||||
|
// fmt.Println(result.PrimaryStack) // "wails"
|
||||||
|
type DiscoveryResult struct {
|
||||||
|
// Types lists all detected project types in priority order.
|
||||||
|
Types []ProjectType
|
||||||
|
// PrimaryStack is the best stack suggestion based on detected types.
|
||||||
|
PrimaryStack string
|
||||||
|
// HasFrontend is true when a root or frontend/ package.json/deno manifest is found,
|
||||||
|
// or when a nested frontend tree is detected.
|
||||||
|
HasFrontend bool
|
||||||
|
// HasSubtreeNpm is true when a nested package.json exists within depth 2.
|
||||||
|
HasSubtreeNpm bool
|
||||||
|
// Markers records the presence of each raw marker file checked.
|
||||||
|
Markers map[string]bool
|
||||||
|
// Distro holds the detected Linux distribution version (e.g., "24.04").
|
||||||
|
// Used by ComputeOptions to inject webkit2_41 tag on Ubuntu 24.04+.
|
||||||
|
Distro string
|
||||||
|
}
|
||||||
|
|
||||||
|
// DiscoverFull returns a rich discovery result with all markers and metadata.
|
||||||
|
//
|
||||||
|
// result, err := build.DiscoverFull(io.Local, ".")
|
||||||
|
// if result.HasFrontend { ... }
|
||||||
|
func DiscoverFull(fs io.Medium, dir string) (*DiscoveryResult, error) {
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
result := &DiscoveryResult{
|
||||||
|
Types: types,
|
||||||
|
Markers: make(map[string]bool),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Record raw marker presence
|
||||||
|
allMarkers := []string{
|
||||||
|
markerGoMod, markerGoWork, markerWails, markerNodePackage, markerComposer,
|
||||||
|
markerMkDocs, markerMkDocsYAML, markerDocsMkDocs, markerDocsMkDocsYAML,
|
||||||
|
markerPyProject, markerRequirements, markerCargo,
|
||||||
|
"CMakeLists.txt", markerDockerfile, "Containerfile", "dockerfile", "containerfile",
|
||||||
|
markerFrontendPackage, markerFrontendDenoJSON, markerFrontendDenoJSONC,
|
||||||
|
markerLinuxKitYAML, markerLinuxKitYAMLAlt,
|
||||||
|
markerTaskfileYML, markerTaskfileYAML, markerTaskfileBare,
|
||||||
|
markerTaskfileLowerYML, markerTaskfileLowerYAML,
|
||||||
|
}
|
||||||
|
for _, m := range allMarkers {
|
||||||
|
result.Markers[m] = fileExists(fs, ax.Join(dir, m))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pattern-based marker: LinuxKit configs may live in .core/linuxkit/*.yml or *.yaml.
|
||||||
|
result.Markers[markerLinuxKitNestedYML] = hasYAMLInDir(fs, ax.Join(dir, ".core", "linuxkit"))
|
||||||
|
result.Markers[markerLinuxKitNestedYAML] = result.Markers[markerLinuxKitNestedYML]
|
||||||
|
|
||||||
|
// Subtree npm detection
|
||||||
|
result.HasSubtreeNpm = HasSubtreeNpm(fs, dir)
|
||||||
|
|
||||||
|
// Frontend detection: root manifests, frontend/ manifests, or nested frontend trees.
|
||||||
|
result.HasFrontend = hasFrontendManifest(fs, dir) ||
|
||||||
|
hasFrontendManifest(fs, ax.Join(dir, "frontend")) ||
|
||||||
|
hasSubtreeFrontendManifest(fs, dir) ||
|
||||||
|
result.HasSubtreeNpm
|
||||||
|
|
||||||
|
result.Types = types
|
||||||
|
|
||||||
|
// Linux distro detection: used for distro-sensitive build flags.
|
||||||
|
result.Distro = detectDistroVersion(fs)
|
||||||
|
|
||||||
|
// Primary stack: first detected type as string, or empty
|
||||||
|
if len(types) > 0 {
|
||||||
|
result.PrimaryStack = string(types[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasFrontendManifest reports whether a frontend directory contains a supported manifest.
|
||||||
|
func hasFrontendManifest(fs io.Medium, dir string) bool {
|
||||||
|
return fs.IsFile(ax.Join(dir, markerNodePackage)) ||
|
||||||
|
fs.IsFile(ax.Join(dir, "deno.json")) ||
|
||||||
|
fs.IsFile(ax.Join(dir, "deno.jsonc"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasSubtreeFrontendManifest checks for package.json or deno.json within depth 2 subdirectories.
|
||||||
|
func hasSubtreeFrontendManifest(fs io.Medium, dir string) bool {
|
||||||
|
entries, err := fs.List(dir)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if !entry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
name := entry.Name()
|
||||||
|
if name == "node_modules" || strings.HasPrefix(name, ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
subdir := ax.Join(dir, name)
|
||||||
|
if hasFrontendManifest(fs, subdir) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
subEntries, err := fs.List(subdir)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, subEntry := range subEntries {
|
||||||
|
if !subEntry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if subEntry.Name() == "node_modules" || strings.HasPrefix(subEntry.Name(), ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
nested := ax.Join(subdir, subEntry.Name())
|
||||||
|
if hasFrontendManifest(fs, nested) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// fileExists checks if a file exists and is not a directory.
|
// fileExists checks if a file exists and is not a directory.
|
||||||
func fileExists(fs io.Medium, path string) bool {
|
func fileExists(fs io.Medium, path string) bool {
|
||||||
return fs.IsFile(path)
|
return fs.IsFile(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ResolveDockerfilePath returns the first Docker manifest path that exists.
|
||||||
|
//
|
||||||
|
// dockerfile := build.ResolveDockerfilePath(io.Local, ".")
|
||||||
|
func ResolveDockerfilePath(fs io.Medium, dir string) string {
|
||||||
|
for _, path := range []string{
|
||||||
|
ax.Join(dir, "Dockerfile"),
|
||||||
|
ax.Join(dir, "Containerfile"),
|
||||||
|
ax.Join(dir, "dockerfile"),
|
||||||
|
ax.Join(dir, "containerfile"),
|
||||||
|
} {
|
||||||
|
if fileExists(fs, path) {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsDockerProject checks if the directory contains a Dockerfile or Containerfile.
|
||||||
|
//
|
||||||
|
// if build.IsDockerProject(io.Local, ".") { ... }
|
||||||
|
func IsDockerProject(fs io.Medium, dir string) bool {
|
||||||
|
return ResolveDockerfilePath(fs, dir) != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsLinuxKitProject checks for linuxkit.yml or .core/linuxkit/*.yml.
|
||||||
|
//
|
||||||
|
// ok := build.IsLinuxKitProject(io.Local, ".")
|
||||||
|
func IsLinuxKitProject(fs io.Medium, dir string) bool {
|
||||||
|
if fileExists(fs, ax.Join(dir, markerLinuxKitYAML)) ||
|
||||||
|
fileExists(fs, ax.Join(dir, markerLinuxKitYAMLAlt)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return hasYAMLInDir(fs, ax.Join(dir, ".core", "linuxkit"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsTaskfileProject checks for supported Taskfile names in the project root.
|
||||||
|
//
|
||||||
|
// ok := build.IsTaskfileProject(io.Local, ".")
|
||||||
|
func IsTaskfileProject(fs io.Medium, dir string) bool {
|
||||||
|
for _, name := range []string{
|
||||||
|
markerTaskfileYML,
|
||||||
|
markerTaskfileYAML,
|
||||||
|
markerTaskfileBare,
|
||||||
|
markerTaskfileLowerYML,
|
||||||
|
markerTaskfileLowerYAML,
|
||||||
|
} {
|
||||||
|
if fileExists(fs, ax.Join(dir, name)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasYAMLInDir reports whether a directory contains at least one YAML file.
|
||||||
|
func hasYAMLInDir(fs io.Medium, dir string) bool {
|
||||||
|
if !fs.IsDir(dir) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := fs.List(dir)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, entry := range entries {
|
||||||
|
if entry.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
name := strings.ToLower(entry.Name())
|
||||||
|
if strings.HasSuffix(name, ".yml") || strings.HasSuffix(name, ".yaml") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// detectDistroVersion extracts the Ubuntu VERSION_ID from os-release data.
|
||||||
|
func detectDistroVersion(fs io.Medium) string {
|
||||||
|
if fs == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, path := range []string{"/etc/os-release", "/usr/lib/os-release"} {
|
||||||
|
content, err := fs.Read(path)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if distro := parseOSReleaseDistro(content); distro != "" {
|
||||||
|
return distro
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseOSReleaseDistro returns VERSION_ID for Ubuntu-style os-release content.
|
||||||
|
func parseOSReleaseDistro(content string) string {
|
||||||
|
var id string
|
||||||
|
var idLike string
|
||||||
|
var version string
|
||||||
|
|
||||||
|
for _, line := range strings.Split(content, "\n") {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if line == "" || strings.HasPrefix(line, "#") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
key, value, ok := strings.Cut(line, "=")
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
key = strings.TrimSpace(key)
|
||||||
|
value = strings.Trim(strings.TrimSpace(value), `"'`)
|
||||||
|
|
||||||
|
switch key {
|
||||||
|
case "ID":
|
||||||
|
id = value
|
||||||
|
case "ID_LIKE":
|
||||||
|
idLike = value
|
||||||
|
case "VERSION_ID":
|
||||||
|
version = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if version == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if id == "ubuntu" || strings.Contains(" "+idLike+" ", " ubuntu ") {
|
||||||
|
return version
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
package build
|
package build
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
@ -15,14 +15,14 @@ func setupTestDir(t *testing.T, markers ...string) string {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
for _, m := range markers {
|
for _, m := range markers {
|
||||||
path := filepath.Join(dir, m)
|
path := ax.Join(dir, m)
|
||||||
err := os.WriteFile(path, []byte("{}"), 0644)
|
err := ax.WriteFile(path, []byte("{}"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
return dir
|
return dir
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDiscover_Good(t *testing.T) {
|
func TestDiscovery_Discover_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("detects Go project", func(t *testing.T) {
|
t.Run("detects Go project", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "go.mod")
|
dir := setupTestDir(t, "go.mod")
|
||||||
|
|
@ -31,6 +31,13 @@ func TestDiscover_Good(t *testing.T) {
|
||||||
assert.Equal(t, []ProjectType{ProjectTypeGo}, types)
|
assert.Equal(t, []ProjectType{ProjectTypeGo}, types)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("detects Go workspace project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "go.work")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeGo}, types)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("detects Wails project with priority over Go", func(t *testing.T) {
|
t.Run("detects Wails project with priority over Go", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "wails.json", "go.mod")
|
dir := setupTestDir(t, "wails.json", "go.mod")
|
||||||
types, err := Discover(fs, dir)
|
types, err := Discover(fs, dir)
|
||||||
|
|
@ -45,6 +52,17 @@ func TestDiscover_Good(t *testing.T) {
|
||||||
assert.Equal(t, []ProjectType{ProjectTypeNode}, types)
|
assert.Equal(t, []ProjectType{ProjectTypeNode}, types)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("detects nested Node.js project", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
nested := ax.Join(dir, "apps", "web")
|
||||||
|
require.NoError(t, ax.MkdirAll(nested, 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(nested, "package.json"), []byte("{}"), 0644))
|
||||||
|
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeNode}, types)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("detects PHP project", func(t *testing.T) {
|
t.Run("detects PHP project", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "composer.json")
|
dir := setupTestDir(t, "composer.json")
|
||||||
types, err := Discover(fs, dir)
|
types, err := Discover(fs, dir)
|
||||||
|
|
@ -52,6 +70,116 @@ func TestDiscover_Good(t *testing.T) {
|
||||||
assert.Equal(t, []ProjectType{ProjectTypePHP}, types)
|
assert.Equal(t, []ProjectType{ProjectTypePHP}, types)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("detects docs project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "mkdocs.yml")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocs}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects docs project with mkdocs.yaml", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "mkdocs.yaml")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocs}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects docs project in docs directory", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Join(dir, "docs"), 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "docs", "mkdocs.yml"), []byte("site_name: Demo\n"), 0644))
|
||||||
|
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocs}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects docs project in docs directory with mkdocs.yaml", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Join(dir, "docs"), 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "docs", "mkdocs.yaml"), []byte("site_name: Demo\n"), 0644))
|
||||||
|
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocs}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Python project with pyproject.toml", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "pyproject.toml")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypePython}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Python project with requirements.txt", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "requirements.txt")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypePython}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Python only once with both markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "pyproject.toml", "requirements.txt")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypePython}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Rust project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Cargo.toml")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeRust}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Docker project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Dockerfile")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocker}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Containerfile project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Containerfile")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocker}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects LinuxKit project", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
|
require.NoError(t, ax.MkdirAll(lkDir, 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(lkDir, "server.yml"), []byte("kernel:\n"), 0644))
|
||||||
|
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeLinuxKit}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects LinuxKit project from yaml config", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "linuxkit.yaml"), []byte("kernel:\n"), 0644))
|
||||||
|
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeLinuxKit}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects C++ project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "CMakeLists.txt")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeCPP}, types)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Taskfile project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Taskfile.yml")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeTaskfile}, types)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("detects multiple project types", func(t *testing.T) {
|
t.Run("detects multiple project types", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "go.mod", "package.json")
|
dir := setupTestDir(t, "go.mod", "package.json")
|
||||||
types, err := Discover(fs, dir)
|
types, err := Discover(fs, dir)
|
||||||
|
|
@ -59,6 +187,13 @@ func TestDiscover_Good(t *testing.T) {
|
||||||
assert.Equal(t, []ProjectType{ProjectTypeGo, ProjectTypeNode}, types)
|
assert.Equal(t, []ProjectType{ProjectTypeGo, ProjectTypeNode}, types)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("preserves priority when core and fallback markers overlap", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "go.mod", "Dockerfile", "Taskfile.yml")
|
||||||
|
types, err := Discover(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeGo, ProjectTypeDocker, ProjectTypeTaskfile}, types)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("empty directory returns empty slice", func(t *testing.T) {
|
t.Run("empty directory returns empty slice", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
types, err := Discover(fs, dir)
|
types, err := Discover(fs, dir)
|
||||||
|
|
@ -67,18 +202,18 @@ func TestDiscover_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDiscover_Bad(t *testing.T) {
|
func TestDiscovery_Discover_Bad(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("non-existent directory returns empty slice", func(t *testing.T) {
|
t.Run("non-existent directory returns empty slice", func(t *testing.T) {
|
||||||
types, err := Discover(fs, "/non/existent/path")
|
types, err := Discover(fs, "/non/existent/path")
|
||||||
assert.NoError(t, err) // os.Stat fails silently in fileExists
|
assert.NoError(t, err) // ax.Stat fails silently in fileExists
|
||||||
assert.Empty(t, types)
|
assert.Empty(t, types)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("directory marker is ignored", func(t *testing.T) {
|
t.Run("directory marker is ignored", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
// Create go.mod as a directory instead of a file
|
// Create go.mod as a directory instead of a file
|
||||||
err := os.Mkdir(filepath.Join(dir, "go.mod"), 0755)
|
err := ax.Mkdir(ax.Join(dir, "go.mod"), 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
types, err := Discover(fs, dir)
|
types, err := Discover(fs, dir)
|
||||||
|
|
@ -87,7 +222,7 @@ func TestDiscover_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPrimaryType_Good(t *testing.T) {
|
func TestDiscovery_PrimaryType_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("returns wails for wails project", func(t *testing.T) {
|
t.Run("returns wails for wails project", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "wails.json", "go.mod")
|
dir := setupTestDir(t, "wails.json", "go.mod")
|
||||||
|
|
@ -103,6 +238,17 @@ func TestPrimaryType_Good(t *testing.T) {
|
||||||
assert.Equal(t, ProjectTypeGo, primary)
|
assert.Equal(t, ProjectTypeGo, primary)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("returns node for nested package.json project", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
nested := ax.Join(dir, "apps", "web")
|
||||||
|
require.NoError(t, ax.MkdirAll(nested, 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(nested, "package.json"), []byte("{}"), 0644))
|
||||||
|
|
||||||
|
primary, err := PrimaryType(fs, dir)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, ProjectTypeNode, primary)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("returns empty string for empty directory", func(t *testing.T) {
|
t.Run("returns empty string for empty directory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
primary, err := PrimaryType(fs, dir)
|
primary, err := PrimaryType(fs, dir)
|
||||||
|
|
@ -111,13 +257,18 @@ func TestPrimaryType_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIsGoProject_Good(t *testing.T) {
|
func TestDiscovery_IsGoProject_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("true with go.mod", func(t *testing.T) {
|
t.Run("true with go.mod", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "go.mod")
|
dir := setupTestDir(t, "go.mod")
|
||||||
assert.True(t, IsGoProject(fs, dir))
|
assert.True(t, IsGoProject(fs, dir))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("true with go.work", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "go.work")
|
||||||
|
assert.True(t, IsGoProject(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("true with wails.json", func(t *testing.T) {
|
t.Run("true with wails.json", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "wails.json")
|
dir := setupTestDir(t, "wails.json")
|
||||||
assert.True(t, IsGoProject(fs, dir))
|
assert.True(t, IsGoProject(fs, dir))
|
||||||
|
|
@ -129,7 +280,7 @@ func TestIsGoProject_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIsWailsProject_Good(t *testing.T) {
|
func TestDiscovery_IsWailsProject_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("true with wails.json", func(t *testing.T) {
|
t.Run("true with wails.json", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "wails.json")
|
dir := setupTestDir(t, "wails.json")
|
||||||
|
|
@ -142,7 +293,7 @@ func TestIsWailsProject_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIsNodeProject_Good(t *testing.T) {
|
func TestDiscovery_IsNodeProject_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("true with package.json", func(t *testing.T) {
|
t.Run("true with package.json", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "package.json")
|
dir := setupTestDir(t, "package.json")
|
||||||
|
|
@ -155,7 +306,7 @@ func TestIsNodeProject_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIsPHPProject_Good(t *testing.T) {
|
func TestDiscovery_IsPHPProject_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("true with composer.json", func(t *testing.T) {
|
t.Run("true with composer.json", func(t *testing.T) {
|
||||||
dir := setupTestDir(t, "composer.json")
|
dir := setupTestDir(t, "composer.json")
|
||||||
|
|
@ -168,17 +319,17 @@ func TestIsPHPProject_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTarget_Good(t *testing.T) {
|
func TestDiscovery_Target_Good(t *testing.T) {
|
||||||
target := Target{OS: "linux", Arch: "amd64"}
|
target := Target{OS: "linux", Arch: "amd64"}
|
||||||
assert.Equal(t, "linux/amd64", target.String())
|
assert.Equal(t, "linux/amd64", target.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFileExists_Good(t *testing.T) {
|
func TestDiscovery_FileExists_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("returns true for existing file", func(t *testing.T) {
|
t.Run("returns true for existing file", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
path := filepath.Join(dir, "test.txt")
|
path := ax.Join(dir, "test.txt")
|
||||||
err := os.WriteFile(path, []byte("content"), 0644)
|
err := ax.WriteFile(path, []byte("content"), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.True(t, fileExists(fs, path))
|
assert.True(t, fileExists(fs, path))
|
||||||
})
|
})
|
||||||
|
|
@ -195,9 +346,9 @@ func TestFileExists_Good(t *testing.T) {
|
||||||
|
|
||||||
// TestDiscover_Testdata tests discovery using the testdata fixtures.
|
// TestDiscover_Testdata tests discovery using the testdata fixtures.
|
||||||
// These serve as integration tests with realistic project structures.
|
// These serve as integration tests with realistic project structures.
|
||||||
func TestDiscover_Testdata(t *testing.T) {
|
func TestDiscovery_DiscoverTestdata_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
testdataDir, err := filepath.Abs("testdata")
|
testdataDir, err := ax.Abs("testdata")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
|
|
@ -211,11 +362,14 @@ func TestDiscover_Testdata(t *testing.T) {
|
||||||
{"php-project", "php-project", []ProjectType{ProjectTypePHP}},
|
{"php-project", "php-project", []ProjectType{ProjectTypePHP}},
|
||||||
{"multi-project", "multi-project", []ProjectType{ProjectTypeGo, ProjectTypeNode}},
|
{"multi-project", "multi-project", []ProjectType{ProjectTypeGo, ProjectTypeNode}},
|
||||||
{"empty-project", "empty-project", []ProjectType{}},
|
{"empty-project", "empty-project", []ProjectType{}},
|
||||||
|
{"docs-project", "docs-project", []ProjectType{ProjectTypeDocs}},
|
||||||
|
{"python-project", "python-project", []ProjectType{ProjectTypePython}},
|
||||||
|
{"rust-project", "rust-project", []ProjectType{ProjectTypeRust}},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
dir := filepath.Join(testdataDir, tt.dir)
|
dir := ax.Join(testdataDir, tt.dir)
|
||||||
types, err := Discover(fs, dir)
|
types, err := Discover(fs, dir)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
if len(tt.expected) == 0 {
|
if len(tt.expected) == 0 {
|
||||||
|
|
@ -226,3 +380,500 @@ func TestDiscover_Testdata(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsMkDocsProject_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("true with mkdocs.yml", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "mkdocs.yml")
|
||||||
|
assert.True(t, IsMkDocsProject(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("true with mkdocs.yaml", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "mkdocs.yaml")
|
||||||
|
assert.True(t, IsMkDocsProject(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("false without mkdocs.yml", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
assert.False(t, IsMkDocsProject(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsMkDocsProject_Bad(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("false for non-existent directory", func(t *testing.T) {
|
||||||
|
assert.False(t, IsMkDocsProject(fs, "/non/existent/path"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsMkDocsProject_Ugly(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("false when mkdocs.yml is a directory", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.Mkdir(ax.Join(dir, "mkdocs.yml"), 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.False(t, IsMkDocsProject(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_HasSubtreeNpm_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("true with depth 1 nested package.json", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
subdir := ax.Join(dir, "packages", "web")
|
||||||
|
err := ax.MkdirAll(subdir, 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(dir, "packages", "package.json"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.True(t, HasSubtreeNpm(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("true with depth 2 nested package.json", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
nested := ax.Join(dir, "apps", "web")
|
||||||
|
err := ax.MkdirAll(nested, 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(nested, "package.json"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.True(t, HasSubtreeNpm(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("false with only root package.json", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "package.json")
|
||||||
|
assert.False(t, HasSubtreeNpm(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("false with empty directory", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
assert.False(t, HasSubtreeNpm(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_HasSubtreeNpm_Bad(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("false for non-existent directory", func(t *testing.T) {
|
||||||
|
assert.False(t, HasSubtreeNpm(fs, "/non/existent/path"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("ignores node_modules at depth 1", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
nmDir := ax.Join(dir, "node_modules", "some-pkg")
|
||||||
|
err := ax.MkdirAll(nmDir, 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(nmDir, "package.json"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.False(t, HasSubtreeNpm(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("ignores node_modules at depth 2", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
nmDir := ax.Join(dir, "apps", "node_modules", "some-pkg")
|
||||||
|
err := ax.MkdirAll(nmDir, 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(nmDir, "package.json"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
// Also need the apps dir to be listable — it is since we created nmDir inside it
|
||||||
|
assert.False(t, HasSubtreeNpm(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_HasSubtreeNpm_Ugly(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("false when nested package.json is beyond depth 2", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
deep := ax.Join(dir, "a", "b", "c")
|
||||||
|
err := ax.MkdirAll(deep, 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(deep, "package.json"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.False(t, HasSubtreeNpm(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsPythonProject_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("true with pyproject.toml", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "pyproject.toml")
|
||||||
|
assert.True(t, IsPythonProject(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("true with requirements.txt", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "requirements.txt")
|
||||||
|
assert.True(t, IsPythonProject(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("true with both markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "pyproject.toml", "requirements.txt")
|
||||||
|
assert.True(t, IsPythonProject(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("false without markers", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
assert.False(t, IsPythonProject(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsPythonProject_Bad(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("false for non-existent directory", func(t *testing.T) {
|
||||||
|
assert.False(t, IsPythonProject(fs, "/non/existent/path"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsPythonProject_Ugly(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("false when pyproject.toml is a directory", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.Mkdir(ax.Join(dir, "pyproject.toml"), 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.False(t, IsPythonProject(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsRustProject_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("true with Cargo.toml", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Cargo.toml")
|
||||||
|
assert.True(t, IsRustProject(fs, dir))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("false without Cargo.toml", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
assert.False(t, IsRustProject(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsRustProject_Bad(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("false for non-existent directory", func(t *testing.T) {
|
||||||
|
assert.False(t, IsRustProject(fs, "/non/existent/path"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_IsRustProject_Ugly(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("false when Cargo.toml is a directory", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.Mkdir(ax.Join(dir, "Cargo.toml"), 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.False(t, IsRustProject(fs, dir))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_DiscoverFull_Good(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("returns complete result for Go project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "go.mod")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeGo}, result.Types)
|
||||||
|
assert.Equal(t, "go", result.PrimaryStack)
|
||||||
|
assert.False(t, result.HasFrontend)
|
||||||
|
assert.False(t, result.HasSubtreeNpm)
|
||||||
|
assert.True(t, result.Markers["go.mod"])
|
||||||
|
assert.False(t, result.Markers["wails.json"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns complete result for Go workspace project", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "go.work")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeGo}, result.Types)
|
||||||
|
assert.Equal(t, "go", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["go.work"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns complete result for Wails project with frontend", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
// Create wails.json, go.mod, and frontend/package.json
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "wails.json"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(dir, "go.mod"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.MkdirAll(ax.Join(dir, "frontend"), 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(dir, "frontend", "package.json"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeWails, ProjectTypeGo, ProjectTypeNode}, result.Types)
|
||||||
|
assert.Equal(t, "wails", result.PrimaryStack)
|
||||||
|
assert.True(t, result.HasFrontend)
|
||||||
|
assert.True(t, result.Markers["wails.json"])
|
||||||
|
assert.True(t, result.Markers["go.mod"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects subtree npm as frontend", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
nested := ax.Join(dir, "apps", "web")
|
||||||
|
err = ax.MkdirAll(nested, 0755)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = ax.WriteFile(ax.Join(nested, "package.json"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeGo, ProjectTypeNode}, result.Types)
|
||||||
|
assert.True(t, result.HasSubtreeNpm)
|
||||||
|
assert.True(t, result.HasFrontend)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects root package.json as frontend", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "package.json"), []byte("{}"), 0644))
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeNode}, result.Types)
|
||||||
|
assert.Equal(t, "node", result.PrimaryStack)
|
||||||
|
assert.True(t, result.HasFrontend)
|
||||||
|
assert.False(t, result.HasSubtreeNpm)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects frontend deno manifest at project root", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
frontendDir := ax.Join(dir, "frontend")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "deno.json"), []byte("{}"), 0644))
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeGo}, result.Types)
|
||||||
|
assert.True(t, result.HasFrontend)
|
||||||
|
assert.False(t, result.HasSubtreeNpm)
|
||||||
|
assert.True(t, result.Markers["frontend/deno.json"])
|
||||||
|
assert.False(t, result.Markers["frontend/package.json"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects nested deno frontend manifests", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
err := ax.WriteFile(ax.Join(dir, "go.mod"), []byte("{}"), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
frontendDir := ax.Join(dir, "apps", "site")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "deno.jsonc"), []byte("{}"), 0644))
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeGo}, result.Types)
|
||||||
|
assert.True(t, result.HasFrontend)
|
||||||
|
assert.False(t, result.HasSubtreeNpm)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("records frontend package manifest markers", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
frontendDir := ax.Join(dir, "frontend")
|
||||||
|
require.NoError(t, ax.MkdirAll(frontendDir, 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "package.json"), []byte("{}"), 0644))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(frontendDir, "deno.jsonc"), []byte("{}"), 0644))
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.True(t, result.HasFrontend)
|
||||||
|
assert.True(t, result.Markers["frontend/package.json"])
|
||||||
|
assert.True(t, result.Markers["frontend/deno.jsonc"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty directory returns empty result", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, result.Types)
|
||||||
|
assert.Empty(t, result.PrimaryStack)
|
||||||
|
assert.False(t, result.HasFrontend)
|
||||||
|
assert.False(t, result.HasSubtreeNpm)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects docs project markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "mkdocs.yml")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocs}, result.Types)
|
||||||
|
assert.Equal(t, "docs", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["mkdocs.yml"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects docs project markers with mkdocs.yaml", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "mkdocs.yaml")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocs}, result.Types)
|
||||||
|
assert.Equal(t, "docs", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["mkdocs.yaml"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects docs project markers in docs directory", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
require.NoError(t, ax.MkdirAll(ax.Join(dir, "docs"), 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(dir, "docs", "mkdocs.yaml"), []byte("site_name: Demo\n"), 0644))
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocs}, result.Types)
|
||||||
|
assert.Equal(t, "docs", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["docs/mkdocs.yaml"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Rust project markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Cargo.toml")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeRust}, result.Types)
|
||||||
|
assert.Equal(t, "rust", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["Cargo.toml"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Python project markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "pyproject.toml")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypePython}, result.Types)
|
||||||
|
assert.Equal(t, "python", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["pyproject.toml"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Docker project markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Dockerfile")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocker}, result.Types)
|
||||||
|
assert.Equal(t, "docker", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["Dockerfile"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("records alternate Docker manifest markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Containerfile", "dockerfile", "containerfile")
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeDocker}, result.Types)
|
||||||
|
assert.Equal(t, "docker", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["Containerfile"])
|
||||||
|
assert.True(t, result.Markers["dockerfile"])
|
||||||
|
assert.True(t, result.Markers["containerfile"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects LinuxKit project markers in .core/linuxkit", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
lkDir := ax.Join(dir, ".core", "linuxkit")
|
||||||
|
require.NoError(t, ax.MkdirAll(lkDir, 0755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(lkDir, "server.yml"), []byte("kernel:\n image: test"), 0644))
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeLinuxKit}, result.Types)
|
||||||
|
assert.Equal(t, "linuxkit", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers[".core/linuxkit/*.yml"])
|
||||||
|
assert.True(t, result.Markers[".core/linuxkit/*.yaml"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects LinuxKit project markers in linuxkit.yaml", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "linuxkit.yaml")
|
||||||
|
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeLinuxKit}, result.Types)
|
||||||
|
assert.Equal(t, "linuxkit", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["linuxkit.yaml"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects C++ project markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "CMakeLists.txt")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeCPP}, result.Types)
|
||||||
|
assert.Equal(t, "cpp", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["CMakeLists.txt"])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("detects Taskfile project markers", func(t *testing.T) {
|
||||||
|
dir := setupTestDir(t, "Taskfile.yaml")
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, []ProjectType{ProjectTypeTaskfile}, result.Types)
|
||||||
|
assert.Equal(t, "taskfile", result.PrimaryStack)
|
||||||
|
assert.True(t, result.Markers["Taskfile.yaml"])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_DiscoverFull_Bad(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("non-existent directory returns empty result", func(t *testing.T) {
|
||||||
|
result, err := DiscoverFull(fs, "/non/existent/path")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Empty(t, result.Types)
|
||||||
|
assert.Empty(t, result.PrimaryStack)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_DiscoverFull_Ugly(t *testing.T) {
|
||||||
|
fs := io.Local
|
||||||
|
t.Run("markers map is never nil even for empty directory", func(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
result, err := DiscoverFull(fs, dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotNil(t, result.Markers)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_ParseOSReleaseDistro_Good(t *testing.T) {
|
||||||
|
t.Run("returns ubuntu version id", func(t *testing.T) {
|
||||||
|
content := `
|
||||||
|
NAME="Ubuntu"
|
||||||
|
ID=ubuntu
|
||||||
|
VERSION_ID="24.04"
|
||||||
|
ID_LIKE=debian
|
||||||
|
`
|
||||||
|
assert.Equal(t, "24.04", parseOSReleaseDistro(content))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts ubuntu-style values without quotes", func(t *testing.T) {
|
||||||
|
content := `
|
||||||
|
ID=ubuntu
|
||||||
|
VERSION_ID=25.10
|
||||||
|
`
|
||||||
|
assert.Equal(t, "25.10", parseOSReleaseDistro(content))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_ParseOSReleaseDistro_Bad(t *testing.T) {
|
||||||
|
t.Run("returns empty for non-ubuntu distro", func(t *testing.T) {
|
||||||
|
content := `
|
||||||
|
ID=fedora
|
||||||
|
VERSION_ID=41
|
||||||
|
`
|
||||||
|
assert.Empty(t, parseOSReleaseDistro(content))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("returns empty when version missing", func(t *testing.T) {
|
||||||
|
content := `
|
||||||
|
ID=ubuntu
|
||||||
|
`
|
||||||
|
assert.Empty(t, parseOSReleaseDistro(content))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_DetectDistroVersion_Good(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
require.NoError(t, fs.Write("/etc/os-release", `
|
||||||
|
ID=ubuntu
|
||||||
|
VERSION_ID="24.04"
|
||||||
|
`))
|
||||||
|
|
||||||
|
assert.Equal(t, "24.04", detectDistroVersion(fs))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDiscovery_DetectDistroVersion_Bad(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
require.NoError(t, fs.Write("/etc/os-release", `
|
||||||
|
ID=fedora
|
||||||
|
VERSION_ID=41
|
||||||
|
`))
|
||||||
|
|
||||||
|
assert.Empty(t, detectDistroVersion(fs))
|
||||||
|
}
|
||||||
|
|
|
||||||
194
pkg/build/options.go
Normal file
194
pkg/build/options.go
Normal file
|
|
@ -0,0 +1,194 @@
|
||||||
|
// Package build provides project type detection and cross-compilation for the Core build system.
|
||||||
|
// This file handles build options computation from config + discovery.
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
)
|
||||||
|
|
||||||
|
// BuildOptions holds computed build flags from config + discovery.
|
||||||
|
//
|
||||||
|
// opts := build.ComputeOptions(cfg, discovery)
|
||||||
|
// fmt.Println(opts.String()) // "-tags webkit2_41"
|
||||||
|
type BuildOptions struct {
|
||||||
|
// Obfuscate uses garble instead of go build for obfuscation.
|
||||||
|
Obfuscate bool
|
||||||
|
// Tags holds de-duplicated Go build tags.
|
||||||
|
Tags []string
|
||||||
|
// NSIS enables Windows NSIS installer generation (Wails only).
|
||||||
|
NSIS bool
|
||||||
|
// WebView2 sets the WebView2 delivery method: download|embed|browser|error.
|
||||||
|
WebView2 string
|
||||||
|
// LDFlags holds linker flags merged from config.
|
||||||
|
LDFlags []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ComputeOptions merges config + discovery into build flags.
|
||||||
|
// Handles distro-aware webkit tag injection (Ubuntu 24.04 → webkit2_41).
|
||||||
|
// Returns safe defaults when cfg or discovery is nil.
|
||||||
|
//
|
||||||
|
// opts := build.ComputeOptions(cfg, result)
|
||||||
|
// if opts.Obfuscate { /* use garble */ }
|
||||||
|
func ComputeOptions(cfg *BuildConfig, discovery *DiscoveryResult) *BuildOptions {
|
||||||
|
options := &BuildOptions{}
|
||||||
|
|
||||||
|
if cfg != nil {
|
||||||
|
options.Obfuscate = cfg.Build.Obfuscate
|
||||||
|
options.NSIS = cfg.Build.NSIS
|
||||||
|
options.WebView2 = cfg.Build.WebView2
|
||||||
|
options.LDFlags = append(options.LDFlags, cfg.Build.LDFlags...)
|
||||||
|
options.Tags = append(options.Tags, cfg.Build.BuildTags...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inject webkit2_41 tag for Ubuntu 24.04+ when discovery provides distro info
|
||||||
|
if discovery != nil && discovery.Distro != "" {
|
||||||
|
options.Tags = InjectWebKitTag(options.Tags, discovery.Distro)
|
||||||
|
}
|
||||||
|
|
||||||
|
// De-duplicate tags
|
||||||
|
options.Tags = deduplicateTags(options.Tags)
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// ApplyOptions copies computed build options onto a runtime build config.
|
||||||
|
//
|
||||||
|
// build.ApplyOptions(cfg, build.ComputeOptions(config, discovery))
|
||||||
|
func ApplyOptions(cfg *Config, options *BuildOptions) {
|
||||||
|
if cfg == nil || options == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if options.Obfuscate {
|
||||||
|
cfg.Obfuscate = true
|
||||||
|
}
|
||||||
|
if options.NSIS {
|
||||||
|
cfg.NSIS = true
|
||||||
|
}
|
||||||
|
if options.WebView2 != "" {
|
||||||
|
cfg.WebView2 = options.WebView2
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(options.LDFlags) > 0 {
|
||||||
|
cfg.LDFlags = append([]string{}, options.LDFlags...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(options.Tags) > 0 {
|
||||||
|
cfg.BuildTags = deduplicateTags(append(cfg.BuildTags, options.Tags...))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// InjectWebKitTag adds webkit2_41 tag for Ubuntu 24.04+ if not already present.
|
||||||
|
// Called automatically by ComputeOptions when discovery detects Linux.
|
||||||
|
//
|
||||||
|
// tags := build.InjectWebKitTag(tags, "24.04") // ["webkit2_41"]
|
||||||
|
// tags := build.InjectWebKitTag(tags, "22.04") // unchanged
|
||||||
|
func InjectWebKitTag(tags []string, distro string) []string {
|
||||||
|
if distro == "" {
|
||||||
|
return tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the distro version is 24.04 or newer
|
||||||
|
if !isUbuntu2404OrNewer(distro) {
|
||||||
|
return tags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if tag is already present
|
||||||
|
for _, tag := range tags {
|
||||||
|
if tag == "webkit2_41" {
|
||||||
|
return tags
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return append([]string{"webkit2_41"}, tags...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the options as a CLI flag string.
|
||||||
|
//
|
||||||
|
// s := opts.String() // "-tags webkit2_41 -ldflags '-s -w'"
|
||||||
|
func (o *BuildOptions) String() string {
|
||||||
|
if o == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var parts []string
|
||||||
|
|
||||||
|
if o.Obfuscate {
|
||||||
|
parts = append(parts, "-obfuscated")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(o.Tags) > 0 {
|
||||||
|
parts = append(parts, "-tags "+core.Join(",", o.Tags...))
|
||||||
|
}
|
||||||
|
|
||||||
|
if o.NSIS {
|
||||||
|
parts = append(parts, "-nsis")
|
||||||
|
}
|
||||||
|
|
||||||
|
if o.WebView2 != "" {
|
||||||
|
parts = append(parts, "-webview2 "+o.WebView2)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(o.LDFlags) > 0 {
|
||||||
|
parts = append(parts, "-ldflags '"+core.Join(" ", o.LDFlags...)+"'")
|
||||||
|
}
|
||||||
|
|
||||||
|
return core.Join(" ", parts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// isUbuntu2404OrNewer checks if the distro version string represents Ubuntu 24.04+.
|
||||||
|
// Compares major.minor version numerically.
|
||||||
|
//
|
||||||
|
// isUbuntu2404OrNewer("24.04") // true
|
||||||
|
// isUbuntu2404OrNewer("22.04") // false
|
||||||
|
// isUbuntu2404OrNewer("25.10") // true
|
||||||
|
func isUbuntu2404OrNewer(distro string) bool {
|
||||||
|
parts := core.Split(distro, ".")
|
||||||
|
if len(parts) != 2 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
major, err := strconv.Atoi(parts[0])
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
minor, err := strconv.Atoi(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// 24.04 or newer: major > 24, or major == 24 and minor >= 4
|
||||||
|
if major > 24 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if major == 24 && minor >= 4 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// deduplicateTags removes duplicate entries from a tag slice while preserving order.
|
||||||
|
//
|
||||||
|
// deduplicateTags([]string{"a", "b", "a"}) // ["a", "b"]
|
||||||
|
func deduplicateTags(tags []string) []string {
|
||||||
|
if len(tags) == 0 {
|
||||||
|
return tags
|
||||||
|
}
|
||||||
|
|
||||||
|
seen := make(map[string]bool, len(tags))
|
||||||
|
result := make([]string, 0, len(tags))
|
||||||
|
|
||||||
|
for _, tag := range tags {
|
||||||
|
if tag == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !seen[tag] {
|
||||||
|
seen[tag] = true
|
||||||
|
result = append(result, tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
358
pkg/build/options_test.go
Normal file
358
pkg/build/options_test.go
Normal file
|
|
@ -0,0 +1,358 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
// --- ComputeOptions ---
|
||||||
|
|
||||||
|
func TestOptions_ComputeOptions_Good(t *testing.T) {
|
||||||
|
t.Run("normal config produces correct options", func(t *testing.T) {
|
||||||
|
cfg := &BuildConfig{
|
||||||
|
Build: Build{
|
||||||
|
Obfuscate: true,
|
||||||
|
NSIS: true,
|
||||||
|
WebView2: "embed",
|
||||||
|
BuildTags: []string{"integration"},
|
||||||
|
LDFlags: []string{"-s", "-w"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
discovery := &DiscoveryResult{
|
||||||
|
Types: []ProjectType{ProjectTypeWails},
|
||||||
|
PrimaryStack: "wails",
|
||||||
|
Distro: "24.04",
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := ComputeOptions(cfg, discovery)
|
||||||
|
|
||||||
|
assert.NotNil(t, opts)
|
||||||
|
assert.True(t, opts.Obfuscate)
|
||||||
|
assert.True(t, opts.NSIS)
|
||||||
|
assert.Equal(t, "embed", opts.WebView2)
|
||||||
|
assert.Equal(t, []string{"-s", "-w"}, opts.LDFlags)
|
||||||
|
assert.Equal(t, []string{"webkit2_41", "integration"}, opts.Tags)
|
||||||
|
// webkit2_41 injected for 24.04
|
||||||
|
assert.Contains(t, opts.Tags, "webkit2_41")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("discovery with non-Ubuntu distro leaves tags empty", func(t *testing.T) {
|
||||||
|
cfg := &BuildConfig{
|
||||||
|
Build: Build{
|
||||||
|
LDFlags: []string{"-s"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
discovery := &DiscoveryResult{
|
||||||
|
Distro: "22.04",
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := ComputeOptions(cfg, discovery)
|
||||||
|
|
||||||
|
assert.NotNil(t, opts)
|
||||||
|
assert.Empty(t, opts.Tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("discovery with 25.10 distro injects webkit tag", func(t *testing.T) {
|
||||||
|
opts := ComputeOptions(&BuildConfig{}, &DiscoveryResult{Distro: "25.10"})
|
||||||
|
assert.Contains(t, opts.Tags, "webkit2_41")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptions_ComputeOptions_Bad(t *testing.T) {
|
||||||
|
t.Run("nil config returns safe defaults", func(t *testing.T) {
|
||||||
|
discovery := &DiscoveryResult{Distro: "24.04"}
|
||||||
|
|
||||||
|
opts := ComputeOptions(nil, discovery)
|
||||||
|
|
||||||
|
assert.NotNil(t, opts)
|
||||||
|
assert.False(t, opts.Obfuscate)
|
||||||
|
assert.False(t, opts.NSIS)
|
||||||
|
assert.Empty(t, opts.WebView2)
|
||||||
|
assert.Empty(t, opts.LDFlags)
|
||||||
|
// webkit2_41 still injected from discovery
|
||||||
|
assert.Contains(t, opts.Tags, "webkit2_41")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("nil discovery skips webkit injection", func(t *testing.T) {
|
||||||
|
cfg := &BuildConfig{
|
||||||
|
Build: Build{
|
||||||
|
Obfuscate: true,
|
||||||
|
BuildTags: []string{"existing"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := ComputeOptions(cfg, nil)
|
||||||
|
|
||||||
|
assert.NotNil(t, opts)
|
||||||
|
assert.True(t, opts.Obfuscate)
|
||||||
|
assert.Equal(t, []string{"existing"}, opts.Tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("both nil returns empty options", func(t *testing.T) {
|
||||||
|
opts := ComputeOptions(nil, nil)
|
||||||
|
|
||||||
|
assert.NotNil(t, opts)
|
||||||
|
assert.False(t, opts.Obfuscate)
|
||||||
|
assert.False(t, opts.NSIS)
|
||||||
|
assert.Empty(t, opts.Tags)
|
||||||
|
assert.Empty(t, opts.LDFlags)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptions_ComputeOptions_Ugly(t *testing.T) {
|
||||||
|
t.Run("duplicate tags from deduplication", func(t *testing.T) {
|
||||||
|
// Seed webkit2_41 before discovery also injects it
|
||||||
|
cfg := &BuildConfig{
|
||||||
|
Build: Build{
|
||||||
|
BuildTags: []string{"integration", "integration", "ui"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
discovery := &DiscoveryResult{Distro: "24.04"}
|
||||||
|
|
||||||
|
opts := ComputeOptions(cfg, discovery)
|
||||||
|
|
||||||
|
// Even though InjectWebKitTag is called once, deduplication must hold
|
||||||
|
count := 0
|
||||||
|
for _, tag := range opts.Tags {
|
||||||
|
if tag == "webkit2_41" {
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.Equal(t, 1, count, "webkit2_41 must appear exactly once")
|
||||||
|
assert.Equal(t, []string{"webkit2_41", "integration", "ui"}, opts.Tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty distro in discovery produces no webkit tag", func(t *testing.T) {
|
||||||
|
opts := ComputeOptions(&BuildConfig{}, &DiscoveryResult{Distro: ""})
|
||||||
|
assert.Empty(t, opts.Tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("all flags set simultaneously do not conflict", func(t *testing.T) {
|
||||||
|
cfg := &BuildConfig{
|
||||||
|
Build: Build{
|
||||||
|
Obfuscate: true,
|
||||||
|
NSIS: true,
|
||||||
|
WebView2: "download",
|
||||||
|
LDFlags: []string{"-s", "-w", "-X main.version=v1.0.0"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
discovery := &DiscoveryResult{Distro: "24.04"}
|
||||||
|
|
||||||
|
opts := ComputeOptions(cfg, discovery)
|
||||||
|
|
||||||
|
assert.True(t, opts.Obfuscate)
|
||||||
|
assert.True(t, opts.NSIS)
|
||||||
|
assert.Equal(t, "download", opts.WebView2)
|
||||||
|
assert.Equal(t, []string{"-s", "-w", "-X main.version=v1.0.0"}, opts.LDFlags)
|
||||||
|
assert.Contains(t, opts.Tags, "webkit2_41")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- InjectWebKitTag ---
|
||||||
|
|
||||||
|
func TestOptions_InjectWebKitTag_Good(t *testing.T) {
|
||||||
|
t.Run("24.04 adds webkit2_41", func(t *testing.T) {
|
||||||
|
// InjectWebKitTag(tags, "24.04") → ["webkit2_41"]
|
||||||
|
tags := InjectWebKitTag(nil, "24.04")
|
||||||
|
assert.Equal(t, []string{"webkit2_41"}, tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("24.10 adds webkit2_41", func(t *testing.T) {
|
||||||
|
tags := InjectWebKitTag([]string{}, "24.10")
|
||||||
|
assert.Contains(t, tags, "webkit2_41")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("25.04 adds webkit2_41", func(t *testing.T) {
|
||||||
|
tags := InjectWebKitTag(nil, "25.04")
|
||||||
|
assert.Contains(t, tags, "webkit2_41")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("existing tags are preserved before webkit2_41", func(t *testing.T) {
|
||||||
|
existing := []string{"foo", "bar"}
|
||||||
|
tags := InjectWebKitTag(existing, "24.04")
|
||||||
|
assert.Contains(t, tags, "webkit2_41")
|
||||||
|
assert.Contains(t, tags, "foo")
|
||||||
|
assert.Contains(t, tags, "bar")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptions_InjectWebKitTag_Bad(t *testing.T) {
|
||||||
|
t.Run("22.04 does not add tag", func(t *testing.T) {
|
||||||
|
// InjectWebKitTag(nil, "22.04") → unchanged (nil)
|
||||||
|
tags := InjectWebKitTag(nil, "22.04")
|
||||||
|
assert.Empty(t, tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("23.10 does not add tag", func(t *testing.T) {
|
||||||
|
tags := InjectWebKitTag([]string{"existing"}, "23.10")
|
||||||
|
assert.NotContains(t, tags, "webkit2_41")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptions_InjectWebKitTag_Ugly(t *testing.T) {
|
||||||
|
t.Run("tag already present — not duplicated", func(t *testing.T) {
|
||||||
|
// InjectWebKitTag(["webkit2_41"], "24.04") → ["webkit2_41"] (unchanged)
|
||||||
|
tags := InjectWebKitTag([]string{"webkit2_41"}, "24.04")
|
||||||
|
count := 0
|
||||||
|
for _, tag := range tags {
|
||||||
|
if tag == "webkit2_41" {
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.Equal(t, 1, count)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty distro returns tags unchanged", func(t *testing.T) {
|
||||||
|
input := []string{"foo"}
|
||||||
|
tags := InjectWebKitTag(input, "")
|
||||||
|
assert.Equal(t, input, tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("malformed version — no dot — returns tags unchanged", func(t *testing.T) {
|
||||||
|
// isUbuntu2404OrNewer("2404") → false (no dot)
|
||||||
|
tags := InjectWebKitTag(nil, "2404")
|
||||||
|
assert.Empty(t, tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("malformed version — non-numeric major — returns unchanged", func(t *testing.T) {
|
||||||
|
tags := InjectWebKitTag(nil, "ubuntu.04")
|
||||||
|
assert.Empty(t, tags)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("malformed version — non-numeric minor — returns unchanged", func(t *testing.T) {
|
||||||
|
tags := InjectWebKitTag(nil, "24.lts")
|
||||||
|
assert.Empty(t, tags)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- ApplyOptions ---
|
||||||
|
|
||||||
|
func TestOptions_ApplyOptions_Good(t *testing.T) {
|
||||||
|
t.Run("copies computed options onto runtime config", func(t *testing.T) {
|
||||||
|
cfg := &Config{
|
||||||
|
BuildTags: []string{"existing"},
|
||||||
|
LDFlags: []string{"-s"},
|
||||||
|
}
|
||||||
|
options := &BuildOptions{
|
||||||
|
Obfuscate: true,
|
||||||
|
Tags: []string{"webkit2_41", "integration"},
|
||||||
|
NSIS: true,
|
||||||
|
WebView2: "embed",
|
||||||
|
LDFlags: []string{"-trimpath", "-w"},
|
||||||
|
}
|
||||||
|
|
||||||
|
ApplyOptions(cfg, options)
|
||||||
|
|
||||||
|
assert.True(t, cfg.Obfuscate)
|
||||||
|
assert.True(t, cfg.NSIS)
|
||||||
|
assert.Equal(t, "embed", cfg.WebView2)
|
||||||
|
assert.Equal(t, []string{"-trimpath", "-w"}, cfg.LDFlags)
|
||||||
|
assert.Equal(t, []string{"existing", "webkit2_41", "integration"}, cfg.BuildTags)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptions_ApplyOptions_Bad(t *testing.T) {
|
||||||
|
t.Run("nil config is ignored", func(t *testing.T) {
|
||||||
|
assert.NotPanics(t, func() {
|
||||||
|
ApplyOptions(nil, &BuildOptions{Obfuscate: true})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("nil options are ignored", func(t *testing.T) {
|
||||||
|
cfg := &Config{BuildTags: []string{"existing"}}
|
||||||
|
|
||||||
|
assert.NotPanics(t, func() {
|
||||||
|
ApplyOptions(cfg, nil)
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, []string{"existing"}, cfg.BuildTags)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptions_ApplyOptions_Ugly(t *testing.T) {
|
||||||
|
t.Run("empty options leaves config unchanged", func(t *testing.T) {
|
||||||
|
cfg := &Config{
|
||||||
|
BuildTags: []string{"existing"},
|
||||||
|
LDFlags: []string{"-s"},
|
||||||
|
Obfuscate: true,
|
||||||
|
NSIS: true,
|
||||||
|
WebView2: "browser",
|
||||||
|
}
|
||||||
|
|
||||||
|
ApplyOptions(cfg, &BuildOptions{})
|
||||||
|
|
||||||
|
assert.True(t, cfg.Obfuscate)
|
||||||
|
assert.True(t, cfg.NSIS)
|
||||||
|
assert.Equal(t, "browser", cfg.WebView2)
|
||||||
|
assert.Equal(t, []string{"-s"}, cfg.LDFlags)
|
||||||
|
assert.Equal(t, []string{"existing"}, cfg.BuildTags)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- String ---
|
||||||
|
|
||||||
|
func TestOptions_String_Good(t *testing.T) {
|
||||||
|
t.Run("tags only produces correct string", func(t *testing.T) {
|
||||||
|
// opts.String() // "-tags webkit2_41"
|
||||||
|
opts := &BuildOptions{Tags: []string{"webkit2_41"}}
|
||||||
|
assert.Equal(t, "-tags webkit2_41", opts.String())
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("ldflags only produces correct string", func(t *testing.T) {
|
||||||
|
opts := &BuildOptions{LDFlags: []string{"-s", "-w"}}
|
||||||
|
assert.Equal(t, "-ldflags '-s -w'", opts.String())
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("tags and ldflags are space-separated", func(t *testing.T) {
|
||||||
|
opts := &BuildOptions{
|
||||||
|
Tags: []string{"webkit2_41"},
|
||||||
|
LDFlags: []string{"-s", "-w"},
|
||||||
|
}
|
||||||
|
s := opts.String()
|
||||||
|
assert.Contains(t, s, "-tags webkit2_41")
|
||||||
|
assert.Contains(t, s, "-ldflags '-s -w'")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty options returns empty string", func(t *testing.T) {
|
||||||
|
opts := &BuildOptions{}
|
||||||
|
assert.Equal(t, "", opts.String())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptions_String_Bad(t *testing.T) {
|
||||||
|
t.Run("nil receiver returns empty string", func(t *testing.T) {
|
||||||
|
// var opts *BuildOptions; opts.String() → ""
|
||||||
|
var opts *BuildOptions
|
||||||
|
assert.Equal(t, "", opts.String())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestOptions_String_Ugly(t *testing.T) {
|
||||||
|
t.Run("all fields set simultaneously", func(t *testing.T) {
|
||||||
|
// s := opts.String() // "-obfuscated -tags webkit2_41 -nsis -webview2 embed -ldflags '-s -w'"
|
||||||
|
opts := &BuildOptions{
|
||||||
|
Obfuscate: true,
|
||||||
|
Tags: []string{"webkit2_41"},
|
||||||
|
NSIS: true,
|
||||||
|
WebView2: "embed",
|
||||||
|
LDFlags: []string{"-s", "-w"},
|
||||||
|
}
|
||||||
|
s := opts.String()
|
||||||
|
assert.Contains(t, s, "-obfuscated")
|
||||||
|
assert.Contains(t, s, "-tags webkit2_41")
|
||||||
|
assert.Contains(t, s, "-nsis")
|
||||||
|
assert.Contains(t, s, "-webview2 embed")
|
||||||
|
assert.Contains(t, s, "-ldflags '-s -w'")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("multiple tags joined with comma", func(t *testing.T) {
|
||||||
|
opts := &BuildOptions{Tags: []string{"webkit2_41", "integration"}}
|
||||||
|
assert.Equal(t, "-tags webkit2_41,integration", opts.String())
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("webview2 without other flags is isolated", func(t *testing.T) {
|
||||||
|
opts := &BuildOptions{WebView2: "browser"}
|
||||||
|
assert.Equal(t, "-webview2 browser", opts.String())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
@ -2,14 +2,16 @@ package signing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os/exec"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// MacOSSigner signs binaries using macOS codesign.
|
// MacOSSigner signs binaries using macOS codesign.
|
||||||
|
//
|
||||||
|
// s := signing.NewMacOSSigner(cfg.MacOS)
|
||||||
type MacOSSigner struct {
|
type MacOSSigner struct {
|
||||||
config MacOSConfig
|
config MacOSConfig
|
||||||
}
|
}
|
||||||
|
|
@ -18,16 +20,22 @@ type MacOSSigner struct {
|
||||||
var _ Signer = (*MacOSSigner)(nil)
|
var _ Signer = (*MacOSSigner)(nil)
|
||||||
|
|
||||||
// NewMacOSSigner creates a new macOS signer.
|
// NewMacOSSigner creates a new macOS signer.
|
||||||
|
//
|
||||||
|
// s := signing.NewMacOSSigner(cfg.MacOS)
|
||||||
func NewMacOSSigner(cfg MacOSConfig) *MacOSSigner {
|
func NewMacOSSigner(cfg MacOSConfig) *MacOSSigner {
|
||||||
return &MacOSSigner{config: cfg}
|
return &MacOSSigner{config: cfg}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns "codesign".
|
// Name returns "codesign".
|
||||||
|
//
|
||||||
|
// name := s.Name() // → "codesign"
|
||||||
func (s *MacOSSigner) Name() string {
|
func (s *MacOSSigner) Name() string {
|
||||||
return "codesign"
|
return "codesign"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Available checks if running on macOS with codesign and identity configured.
|
// Available checks if running on macOS with codesign and identity configured.
|
||||||
|
//
|
||||||
|
// ok := s.Available() // → true if on macOS with identity set
|
||||||
func (s *MacOSSigner) Available() bool {
|
func (s *MacOSSigner) Available() bool {
|
||||||
if runtime.GOOS != "darwin" {
|
if runtime.GOOS != "darwin" {
|
||||||
return false
|
return false
|
||||||
|
|
@ -35,11 +43,13 @@ func (s *MacOSSigner) Available() bool {
|
||||||
if s.config.Identity == "" {
|
if s.config.Identity == "" {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
_, err := exec.LookPath("codesign")
|
_, err := resolveCodesignCli()
|
||||||
return err == nil
|
return err == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sign codesigns a binary with hardened runtime.
|
// Sign codesigns a binary with hardened runtime.
|
||||||
|
//
|
||||||
|
// err := s.Sign(ctx, io.Local, "dist/myapp")
|
||||||
func (s *MacOSSigner) Sign(ctx context.Context, fs io.Medium, binary string) error {
|
func (s *MacOSSigner) Sign(ctx context.Context, fs io.Medium, binary string) error {
|
||||||
if !s.Available() {
|
if !s.Available() {
|
||||||
if runtime.GOOS != "darwin" {
|
if runtime.GOOS != "darwin" {
|
||||||
|
|
@ -51,17 +61,20 @@ func (s *MacOSSigner) Sign(ctx context.Context, fs io.Medium, binary string) err
|
||||||
return coreerr.E("codesign.Sign", "codesign tool not found in PATH", nil)
|
return coreerr.E("codesign.Sign", "codesign tool not found in PATH", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd := exec.CommandContext(ctx, "codesign",
|
codesignCommand, err := resolveCodesignCli()
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("codesign.Sign", "codesign tool not found in PATH", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
output, err := ax.CombinedOutput(ctx, "", nil, codesignCommand,
|
||||||
"--sign", s.config.Identity,
|
"--sign", s.config.Identity,
|
||||||
"--timestamp",
|
"--timestamp",
|
||||||
"--options", "runtime", // Hardened runtime for notarization
|
"--options", "runtime", // Hardened runtime for notarization
|
||||||
"--force",
|
"--force",
|
||||||
binary,
|
binary,
|
||||||
)
|
)
|
||||||
|
|
||||||
output, err := cmd.CombinedOutput()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("codesign.Sign", string(output), err)
|
return coreerr.E("codesign.Sign", output, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -69,41 +82,103 @@ func (s *MacOSSigner) Sign(ctx context.Context, fs io.Medium, binary string) err
|
||||||
|
|
||||||
// Notarize submits binary to Apple for notarization and staples the ticket.
|
// Notarize submits binary to Apple for notarization and staples the ticket.
|
||||||
// This blocks until Apple responds (typically 1-5 minutes).
|
// This blocks until Apple responds (typically 1-5 minutes).
|
||||||
|
//
|
||||||
|
// err := s.Notarize(ctx, io.Local, "dist/myapp")
|
||||||
func (s *MacOSSigner) Notarize(ctx context.Context, fs io.Medium, binary string) error {
|
func (s *MacOSSigner) Notarize(ctx context.Context, fs io.Medium, binary string) error {
|
||||||
if s.config.AppleID == "" || s.config.TeamID == "" || s.config.AppPassword == "" {
|
if s.config.AppleID == "" || s.config.TeamID == "" || s.config.AppPassword == "" {
|
||||||
return coreerr.E("codesign.Notarize", "missing Apple credentials (apple_id, team_id, app_password)", nil)
|
return coreerr.E("codesign.Notarize", "missing Apple credentials (apple_id, team_id, app_password)", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
zipCommand, err := resolveZipCli()
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("codesign.Notarize", "zip tool not found in PATH", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
xcrunCommand, err := resolveXcrunCli()
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("codesign.Notarize", "xcrun tool not found in PATH", err)
|
||||||
|
}
|
||||||
|
|
||||||
// Create ZIP for submission
|
// Create ZIP for submission
|
||||||
zipPath := binary + ".zip"
|
zipPath := binary + ".zip"
|
||||||
zipCmd := exec.CommandContext(ctx, "zip", "-j", zipPath, binary)
|
if output, err := ax.CombinedOutput(ctx, "", nil, zipCommand, "-j", zipPath, binary); err != nil {
|
||||||
if output, err := zipCmd.CombinedOutput(); err != nil {
|
return coreerr.E("codesign.Notarize", "failed to create zip: "+output, err)
|
||||||
return coreerr.E("codesign.Notarize", "failed to create zip: "+string(output), err)
|
|
||||||
}
|
}
|
||||||
defer func() { _ = fs.Delete(zipPath) }()
|
defer func() { _ = fs.Delete(zipPath) }()
|
||||||
|
|
||||||
// Submit to Apple and wait
|
// Submit to Apple and wait
|
||||||
submitCmd := exec.CommandContext(ctx, "xcrun", "notarytool", "submit",
|
if output, err := ax.CombinedOutput(ctx, "", nil, xcrunCommand, "notarytool", "submit",
|
||||||
zipPath,
|
zipPath,
|
||||||
"--apple-id", s.config.AppleID,
|
"--apple-id", s.config.AppleID,
|
||||||
"--team-id", s.config.TeamID,
|
"--team-id", s.config.TeamID,
|
||||||
"--password", s.config.AppPassword,
|
"--password", s.config.AppPassword,
|
||||||
"--wait",
|
"--wait",
|
||||||
)
|
); err != nil {
|
||||||
if output, err := submitCmd.CombinedOutput(); err != nil {
|
return coreerr.E("codesign.Notarize", "notarization failed: "+output, err)
|
||||||
return coreerr.E("codesign.Notarize", "notarization failed: "+string(output), err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Staple the ticket
|
// Staple the ticket
|
||||||
stapleCmd := exec.CommandContext(ctx, "xcrun", "stapler", "staple", binary)
|
if output, err := ax.CombinedOutput(ctx, "", nil, xcrunCommand, "stapler", "staple", binary); err != nil {
|
||||||
if output, err := stapleCmd.CombinedOutput(); err != nil {
|
return coreerr.E("codesign.Notarize", "failed to staple: "+output, err)
|
||||||
return coreerr.E("codesign.Notarize", "failed to staple: "+string(output), err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ShouldNotarize returns true if notarization is enabled.
|
// ShouldNotarize returns true if notarization is enabled.
|
||||||
|
//
|
||||||
|
// if s.ShouldNotarize() { ... }
|
||||||
func (s *MacOSSigner) ShouldNotarize() bool {
|
func (s *MacOSSigner) ShouldNotarize() bool {
|
||||||
return s.config.Notarize
|
return s.config.Notarize
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func resolveCodesignCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/bin/codesign",
|
||||||
|
"/usr/local/bin/codesign",
|
||||||
|
"/opt/homebrew/bin/codesign",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("codesign", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("codesign.resolveCodesignCli", "codesign tool not found. Install Xcode Command Line Tools on macOS.", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveZipCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/bin/zip",
|
||||||
|
"/usr/local/bin/zip",
|
||||||
|
"/opt/homebrew/bin/zip",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("zip", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("codesign.resolveZipCli", "zip tool not found. Install the zip utility for notarisation packaging.", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveXcrunCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/bin/xcrun",
|
||||||
|
"/usr/local/bin/xcrun",
|
||||||
|
"/opt/homebrew/bin/xcrun",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("xcrun", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("codesign.resolveXcrunCli", "xcrun tool not found. Install Xcode Command Line Tools on macOS.", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,16 +5,18 @@ import (
|
||||||
"runtime"
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestMacOSSigner_Good_Name(t *testing.T) {
|
func TestCodesign_MacOSSignerName_Good(t *testing.T) {
|
||||||
s := NewMacOSSigner(MacOSConfig{Identity: "Developer ID Application: Test"})
|
s := NewMacOSSigner(MacOSConfig{Identity: "Developer ID Application: Test"})
|
||||||
assert.Equal(t, "codesign", s.Name())
|
assert.Equal(t, "codesign", s.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMacOSSigner_Good_Available(t *testing.T) {
|
func TestCodesign_MacOSSignerAvailable_Good(t *testing.T) {
|
||||||
s := NewMacOSSigner(MacOSConfig{Identity: "Developer ID Application: Test"})
|
s := NewMacOSSigner(MacOSConfig{Identity: "Developer ID Application: Test"})
|
||||||
|
|
||||||
if runtime.GOOS == "darwin" {
|
if runtime.GOOS == "darwin" {
|
||||||
|
|
@ -25,12 +27,12 @@ func TestMacOSSigner_Good_Available(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMacOSSigner_Bad_NoIdentity(t *testing.T) {
|
func TestCodesign_MacOSSignerNoIdentity_Bad(t *testing.T) {
|
||||||
s := NewMacOSSigner(MacOSConfig{})
|
s := NewMacOSSigner(MacOSConfig{})
|
||||||
assert.False(t, s.Available())
|
assert.False(t, s.Available())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMacOSSigner_Sign_Bad(t *testing.T) {
|
func TestCodesign_MacOSSignerSign_Bad(t *testing.T) {
|
||||||
t.Run("fails when not available", func(t *testing.T) {
|
t.Run("fails when not available", func(t *testing.T) {
|
||||||
if runtime.GOOS == "darwin" {
|
if runtime.GOOS == "darwin" {
|
||||||
t.Skip("skipping on macOS")
|
t.Skip("skipping on macOS")
|
||||||
|
|
@ -43,7 +45,7 @@ func TestMacOSSigner_Sign_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMacOSSigner_Notarize_Bad(t *testing.T) {
|
func TestCodesign_MacOSSignerNotarize_Bad(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("fails with missing credentials", func(t *testing.T) {
|
t.Run("fails with missing credentials", func(t *testing.T) {
|
||||||
s := NewMacOSSigner(MacOSConfig{})
|
s := NewMacOSSigner(MacOSConfig{})
|
||||||
|
|
@ -53,10 +55,67 @@ func TestMacOSSigner_Notarize_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMacOSSigner_ShouldNotarize(t *testing.T) {
|
func TestCodesign_MacOSSignerShouldNotarize_Good(t *testing.T) {
|
||||||
s := NewMacOSSigner(MacOSConfig{Notarize: true})
|
s := NewMacOSSigner(MacOSConfig{Notarize: true})
|
||||||
assert.True(t, s.ShouldNotarize())
|
assert.True(t, s.ShouldNotarize())
|
||||||
|
|
||||||
s2 := NewMacOSSigner(MacOSConfig{Notarize: false})
|
s2 := NewMacOSSigner(MacOSConfig{Notarize: false})
|
||||||
assert.False(t, s2.ShouldNotarize())
|
assert.False(t, s2.ShouldNotarize())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestCodesign_ResolveCodesignCli_Good(t *testing.T) {
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "codesign")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := resolveCodesignCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCodesign_ResolveCodesignCli_Bad(t *testing.T) {
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := resolveCodesignCli(ax.Join(t.TempDir(), "missing-codesign"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "codesign tool not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCodesign_ResolveZipCli_Good(t *testing.T) {
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "zip")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := resolveZipCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCodesign_ResolveZipCli_Bad(t *testing.T) {
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := resolveZipCli(ax.Join(t.TempDir(), "missing-zip"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "zip tool not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCodesign_ResolveXcrunCli_Good(t *testing.T) {
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "xcrun")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := resolveXcrunCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCodesign_ResolveXcrunCli_Bad(t *testing.T) {
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := resolveXcrunCli(ax.Join(t.TempDir(), "missing-xcrun"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "xcrun tool not found")
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,13 +2,15 @@ package signing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os/exec"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GPGSigner signs files using GPG.
|
// GPGSigner signs files using GPG.
|
||||||
|
//
|
||||||
|
// s := signing.NewGPGSigner("ABCD1234")
|
||||||
type GPGSigner struct {
|
type GPGSigner struct {
|
||||||
KeyID string
|
KeyID string
|
||||||
}
|
}
|
||||||
|
|
@ -17,43 +19,71 @@ type GPGSigner struct {
|
||||||
var _ Signer = (*GPGSigner)(nil)
|
var _ Signer = (*GPGSigner)(nil)
|
||||||
|
|
||||||
// NewGPGSigner creates a new GPG signer.
|
// NewGPGSigner creates a new GPG signer.
|
||||||
|
//
|
||||||
|
// s := signing.NewGPGSigner("ABCD1234")
|
||||||
func NewGPGSigner(keyID string) *GPGSigner {
|
func NewGPGSigner(keyID string) *GPGSigner {
|
||||||
return &GPGSigner{KeyID: keyID}
|
return &GPGSigner{KeyID: keyID}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns "gpg".
|
// Name returns "gpg".
|
||||||
|
//
|
||||||
|
// name := s.Name() // → "gpg"
|
||||||
func (s *GPGSigner) Name() string {
|
func (s *GPGSigner) Name() string {
|
||||||
return "gpg"
|
return "gpg"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Available checks if gpg is installed and key is configured.
|
// Available checks if gpg is installed and key is configured.
|
||||||
|
//
|
||||||
|
// ok := s.Available() // → true if gpg is in PATH and key is set
|
||||||
func (s *GPGSigner) Available() bool {
|
func (s *GPGSigner) Available() bool {
|
||||||
if s.KeyID == "" {
|
if s.KeyID == "" {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
_, err := exec.LookPath("gpg")
|
_, err := resolveGpgCli()
|
||||||
return err == nil
|
return err == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sign creates a detached ASCII-armored signature.
|
// Sign creates a detached ASCII-armored signature.
|
||||||
// For file.txt, creates file.txt.asc
|
// For file.txt, creates file.txt.asc
|
||||||
|
//
|
||||||
|
// err := s.Sign(ctx, io.Local, "dist/CHECKSUMS.txt") // creates CHECKSUMS.txt.asc
|
||||||
func (s *GPGSigner) Sign(ctx context.Context, fs io.Medium, file string) error {
|
func (s *GPGSigner) Sign(ctx context.Context, fs io.Medium, file string) error {
|
||||||
if !s.Available() {
|
if s.KeyID == "" {
|
||||||
return coreerr.E("gpg.Sign", "gpg not available or key not configured", nil)
|
return coreerr.E("gpg.Sign", "gpg not available or key not configured", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd := exec.CommandContext(ctx, "gpg",
|
gpgCommand, err := resolveGpgCli()
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("gpg.Sign", "gpg not available or key not configured", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
output, err := ax.CombinedOutput(ctx, "", nil, gpgCommand,
|
||||||
"--detach-sign",
|
"--detach-sign",
|
||||||
"--armor",
|
"--armor",
|
||||||
"--local-user", s.KeyID,
|
"--local-user", s.KeyID,
|
||||||
"--output", file+".asc",
|
"--output", file+".asc",
|
||||||
file,
|
file,
|
||||||
)
|
)
|
||||||
|
|
||||||
output, err := cmd.CombinedOutput()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("gpg.Sign", string(output), err)
|
return coreerr.E("gpg.Sign", output, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func resolveGpgCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/gpg",
|
||||||
|
"/opt/homebrew/bin/gpg",
|
||||||
|
"/usr/local/MacGPG2/bin/gpg",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("gpg", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("gpg.resolveGpgCli", "gpg CLI not found. Install it from https://gnupg.org/download/", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,26 +4,28 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestGPGSigner_Good_Name(t *testing.T) {
|
func TestGPG_GPGSignerName_Good(t *testing.T) {
|
||||||
s := NewGPGSigner("ABCD1234")
|
s := NewGPGSigner("ABCD1234")
|
||||||
assert.Equal(t, "gpg", s.Name())
|
assert.Equal(t, "gpg", s.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGPGSigner_Good_Available(t *testing.T) {
|
func TestGPG_GPGSignerAvailable_Good(t *testing.T) {
|
||||||
s := NewGPGSigner("ABCD1234")
|
s := NewGPGSigner("ABCD1234")
|
||||||
_ = s.Available()
|
_ = s.Available()
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGPGSigner_Bad_NoKey(t *testing.T) {
|
func TestGPG_GPGSignerNoKey_Bad(t *testing.T) {
|
||||||
s := NewGPGSigner("")
|
s := NewGPGSigner("")
|
||||||
assert.False(t, s.Available())
|
assert.False(t, s.Available())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGPGSigner_Sign_Bad(t *testing.T) {
|
func TestGPG_GPGSignerSign_Bad(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
t.Run("fails when no key", func(t *testing.T) {
|
t.Run("fails when no key", func(t *testing.T) {
|
||||||
s := NewGPGSigner("")
|
s := NewGPGSigner("")
|
||||||
|
|
@ -32,3 +34,22 @@ func TestGPGSigner_Sign_Bad(t *testing.T) {
|
||||||
assert.Contains(t, err.Error(), "not available or key not configured")
|
assert.Contains(t, err.Error(), "not available or key not configured")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGPG_ResolveGpgCli_Good(t *testing.T) {
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "gpg")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := resolveGpgCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGPG_ResolveGpgCli_Bad(t *testing.T) {
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := resolveGpgCli(ax.Join(t.TempDir(), "missing-gpg"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "gpg CLI not found")
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,53 +2,57 @@ package signing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Artifact represents a build output that can be signed.
|
// Artifact represents a build output that can be signed.
|
||||||
// This mirrors build.Artifact to avoid import cycles.
|
// This mirrors build.Artifact to avoid import cycles.
|
||||||
|
//
|
||||||
|
// a := signing.Artifact{Path: "dist/myapp", OS: "darwin", Arch: "arm64"}
|
||||||
type Artifact struct {
|
type Artifact struct {
|
||||||
Path string
|
Path string
|
||||||
OS string
|
OS string
|
||||||
Arch string
|
Arch string
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignBinaries signs macOS binaries in the artifacts list.
|
// SignBinaries signs binaries for the current host OS in the artifacts list.
|
||||||
// Only signs darwin binaries when running on macOS with a configured identity.
|
// On macOS it signs darwin artifacts with codesign; on Windows it signs windows
|
||||||
|
// artifacts with signtool when the relevant credentials are configured.
|
||||||
|
//
|
||||||
|
// err := signing.SignBinaries(ctx, io.Local, cfg, artifacts)
|
||||||
func SignBinaries(ctx context.Context, fs io.Medium, cfg SignConfig, artifacts []Artifact) error {
|
func SignBinaries(ctx context.Context, fs io.Medium, cfg SignConfig, artifacts []Artifact) error {
|
||||||
if !cfg.Enabled {
|
if !cfg.Enabled {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only sign on macOS
|
var signer Signer
|
||||||
if runtime.GOOS != "darwin" {
|
var targetOS string
|
||||||
|
|
||||||
|
switch runtime.GOOS {
|
||||||
|
case "darwin":
|
||||||
|
signer = NewMacOSSigner(cfg.MacOS)
|
||||||
|
targetOS = "darwin"
|
||||||
|
case "windows":
|
||||||
|
signer = NewWindowsSigner(cfg.Windows)
|
||||||
|
targetOS = "windows"
|
||||||
|
default:
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
signer := NewMacOSSigner(cfg.MacOS)
|
|
||||||
if !signer.Available() {
|
if !signer.Available() {
|
||||||
return nil // Silently skip if not configured
|
return nil // Silently skip if not configured
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, artifact := range artifacts {
|
return signArtifactsWithSigner(ctx, fs, signer, targetOS, artifacts)
|
||||||
if artifact.OS != "darwin" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Printf(" Signing %s...\n", artifact.Path)
|
|
||||||
if err := signer.Sign(ctx, fs, artifact.Path); err != nil {
|
|
||||||
return coreerr.E("signing.SignBinaries", "failed to sign "+artifact.Path, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NotarizeBinaries notarizes macOS binaries if enabled.
|
// NotarizeBinaries notarizes macOS binaries if enabled.
|
||||||
|
//
|
||||||
|
// err := signing.NotarizeBinaries(ctx, io.Local, cfg, artifacts)
|
||||||
func NotarizeBinaries(ctx context.Context, fs io.Medium, cfg SignConfig, artifacts []Artifact) error {
|
func NotarizeBinaries(ctx context.Context, fs io.Medium, cfg SignConfig, artifacts []Artifact) error {
|
||||||
if !cfg.Enabled || !cfg.MacOS.Notarize {
|
if !cfg.Enabled || !cfg.MacOS.Notarize {
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -68,7 +72,7 @@ func NotarizeBinaries(ctx context.Context, fs io.Medium, cfg SignConfig, artifac
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf(" Notarizing %s (this may take a few minutes)...\n", artifact.Path)
|
core.Print(nil, " Notarizing %s (this may take a few minutes)...", artifact.Path)
|
||||||
if err := signer.Notarize(ctx, fs, artifact.Path); err != nil {
|
if err := signer.Notarize(ctx, fs, artifact.Path); err != nil {
|
||||||
return coreerr.E("signing.NotarizeBinaries", "failed to notarize "+artifact.Path, err)
|
return coreerr.E("signing.NotarizeBinaries", "failed to notarize "+artifact.Path, err)
|
||||||
}
|
}
|
||||||
|
|
@ -78,6 +82,8 @@ func NotarizeBinaries(ctx context.Context, fs io.Medium, cfg SignConfig, artifac
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignChecksums signs the checksums file with GPG.
|
// SignChecksums signs the checksums file with GPG.
|
||||||
|
//
|
||||||
|
// err := signing.SignChecksums(ctx, io.Local, cfg, "dist/CHECKSUMS.txt")
|
||||||
func SignChecksums(ctx context.Context, fs io.Medium, cfg SignConfig, checksumFile string) error {
|
func SignChecksums(ctx context.Context, fs io.Medium, cfg SignConfig, checksumFile string) error {
|
||||||
if !cfg.Enabled {
|
if !cfg.Enabled {
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -88,10 +94,27 @@ func SignChecksums(ctx context.Context, fs io.Medium, cfg SignConfig, checksumFi
|
||||||
return nil // Silently skip if not configured
|
return nil // Silently skip if not configured
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf(" Signing %s with GPG...\n", checksumFile)
|
core.Print(nil, " Signing %s with GPG...", checksumFile)
|
||||||
if err := signer.Sign(ctx, fs, checksumFile); err != nil {
|
if err := signer.Sign(ctx, fs, checksumFile); err != nil {
|
||||||
return coreerr.E("signing.SignChecksums", "failed to sign checksums file "+checksumFile, err)
|
return coreerr.E("signing.SignChecksums", "failed to sign checksums file "+checksumFile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func signArtifactsWithSigner(ctx context.Context, fs io.Medium, signer Signer, targetOS string, artifacts []Artifact) error {
|
||||||
|
_ = fs
|
||||||
|
|
||||||
|
for _, artifact := range artifacts {
|
||||||
|
if artifact.OS != targetOS {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
core.Print(nil, " Signing %s...", artifact.Path)
|
||||||
|
if err := signer.Sign(ctx, fs, artifact.Path); err != nil {
|
||||||
|
return coreerr.E("signing.SignBinaries", "failed to sign "+artifact.Path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,13 +3,15 @@ package signing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Signer defines the interface for code signing implementations.
|
// Signer defines the interface for code signing implementations.
|
||||||
|
//
|
||||||
|
// var s signing.Signer = signing.NewGPGSigner(keyID)
|
||||||
|
// err := s.Sign(ctx, io.Local, "dist/myapp")
|
||||||
type Signer interface {
|
type Signer interface {
|
||||||
// Name returns the signer's identifier.
|
// Name returns the signer's identifier.
|
||||||
Name() string
|
Name() string
|
||||||
|
|
@ -20,6 +22,8 @@ type Signer interface {
|
||||||
}
|
}
|
||||||
|
|
||||||
// SignConfig holds signing configuration from .core/build.yaml.
|
// SignConfig holds signing configuration from .core/build.yaml.
|
||||||
|
//
|
||||||
|
// cfg := signing.DefaultSignConfig()
|
||||||
type SignConfig struct {
|
type SignConfig struct {
|
||||||
Enabled bool `yaml:"enabled"`
|
Enabled bool `yaml:"enabled"`
|
||||||
GPG GPGConfig `yaml:"gpg,omitempty"`
|
GPG GPGConfig `yaml:"gpg,omitempty"`
|
||||||
|
|
@ -28,11 +32,15 @@ type SignConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// GPGConfig holds GPG signing configuration.
|
// GPGConfig holds GPG signing configuration.
|
||||||
|
//
|
||||||
|
// cfg := signing.GPGConfig{Key: "ABCD1234"}
|
||||||
type GPGConfig struct {
|
type GPGConfig struct {
|
||||||
Key string `yaml:"key"` // Key ID or fingerprint, supports $ENV
|
Key string `yaml:"key"` // Key ID or fingerprint, supports $ENV
|
||||||
}
|
}
|
||||||
|
|
||||||
// MacOSConfig holds macOS codesign configuration.
|
// MacOSConfig holds macOS codesign configuration.
|
||||||
|
//
|
||||||
|
// cfg := signing.MacOSConfig{Identity: "Developer ID Application: Acme Inc (TEAM123)"}
|
||||||
type MacOSConfig struct {
|
type MacOSConfig struct {
|
||||||
Identity string `yaml:"identity"` // Developer ID Application: ...
|
Identity string `yaml:"identity"` // Developer ID Application: ...
|
||||||
Notarize bool `yaml:"notarize"` // Submit to Apple for notarization
|
Notarize bool `yaml:"notarize"` // Submit to Apple for notarization
|
||||||
|
|
@ -41,29 +49,39 @@ type MacOSConfig struct {
|
||||||
AppPassword string `yaml:"app_password"` // App-specific password
|
AppPassword string `yaml:"app_password"` // App-specific password
|
||||||
}
|
}
|
||||||
|
|
||||||
// WindowsConfig holds Windows signtool configuration (placeholder).
|
// WindowsConfig holds Windows signtool configuration.
|
||||||
|
//
|
||||||
|
// cfg := signing.WindowsConfig{Certificate: "cert.pfx", Password: "secret"}
|
||||||
type WindowsConfig struct {
|
type WindowsConfig struct {
|
||||||
Certificate string `yaml:"certificate"` // Path to .pfx
|
Certificate string `yaml:"certificate"` // Path to .pfx
|
||||||
Password string `yaml:"password"` // Certificate password
|
Password string `yaml:"password"` // Certificate password
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultSignConfig returns sensible defaults.
|
// DefaultSignConfig returns sensible defaults.
|
||||||
|
//
|
||||||
|
// cfg := signing.DefaultSignConfig()
|
||||||
func DefaultSignConfig() SignConfig {
|
func DefaultSignConfig() SignConfig {
|
||||||
return SignConfig{
|
return SignConfig{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
GPG: GPGConfig{
|
GPG: GPGConfig{
|
||||||
Key: os.Getenv("GPG_KEY_ID"),
|
Key: core.Env("GPG_KEY_ID"),
|
||||||
},
|
},
|
||||||
MacOS: MacOSConfig{
|
MacOS: MacOSConfig{
|
||||||
Identity: os.Getenv("CODESIGN_IDENTITY"),
|
Identity: core.Env("CODESIGN_IDENTITY"),
|
||||||
AppleID: os.Getenv("APPLE_ID"),
|
AppleID: core.Env("APPLE_ID"),
|
||||||
TeamID: os.Getenv("APPLE_TEAM_ID"),
|
TeamID: core.Env("APPLE_TEAM_ID"),
|
||||||
AppPassword: os.Getenv("APPLE_APP_PASSWORD"),
|
AppPassword: core.Env("APPLE_APP_PASSWORD"),
|
||||||
|
},
|
||||||
|
Windows: WindowsConfig{
|
||||||
|
Certificate: core.Env("SIGNTOOL_CERTIFICATE"),
|
||||||
|
Password: core.Env("SIGNTOOL_PASSWORD"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExpandEnv expands environment variables in config values.
|
// ExpandEnv expands environment variables in config values.
|
||||||
|
//
|
||||||
|
// cfg.ExpandEnv() // expands $GPG_KEY_ID, $CODESIGN_IDENTITY etc.
|
||||||
func (c *SignConfig) ExpandEnv() {
|
func (c *SignConfig) ExpandEnv() {
|
||||||
c.GPG.Key = expandEnv(c.GPG.Key)
|
c.GPG.Key = expandEnv(c.GPG.Key)
|
||||||
c.MacOS.Identity = expandEnv(c.MacOS.Identity)
|
c.MacOS.Identity = expandEnv(c.MacOS.Identity)
|
||||||
|
|
@ -76,8 +94,47 @@ func (c *SignConfig) ExpandEnv() {
|
||||||
|
|
||||||
// expandEnv expands $VAR or ${VAR} in a string.
|
// expandEnv expands $VAR or ${VAR} in a string.
|
||||||
func expandEnv(s string) string {
|
func expandEnv(s string) string {
|
||||||
if strings.HasPrefix(s, "$") {
|
if !core.Contains(s, "$") {
|
||||||
return os.ExpandEnv(s)
|
return s
|
||||||
}
|
}
|
||||||
return s
|
|
||||||
|
buf := core.NewBuilder()
|
||||||
|
for i := 0; i < len(s); {
|
||||||
|
if s[i] != '$' {
|
||||||
|
buf.WriteByte(s[i])
|
||||||
|
i++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if i+1 < len(s) && s[i+1] == '{' {
|
||||||
|
j := i + 2
|
||||||
|
for j < len(s) && s[j] != '}' {
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
if j < len(s) {
|
||||||
|
buf.WriteString(core.Env(s[i+2 : j]))
|
||||||
|
i = j + 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
j := i + 1
|
||||||
|
for j < len(s) {
|
||||||
|
c := s[j]
|
||||||
|
if c != '_' && (c < '0' || c > '9') && (c < 'A' || c > 'Z') && (c < 'a' || c > 'z') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
if j > i+1 {
|
||||||
|
buf.WriteString(core.Env(s[i+1 : j]))
|
||||||
|
i = j
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.WriteByte(s[i])
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,9 +7,10 @@ import (
|
||||||
|
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestSignBinaries_Good_SkipsNonDarwin(t *testing.T) {
|
func TestSigning_SignBinariesSkipsNonDarwin_Good(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
|
|
@ -31,7 +32,7 @@ func TestSignBinaries_Good_SkipsNonDarwin(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSignBinaries_Good_DisabledConfig(t *testing.T) {
|
func TestSigning_SignBinariesDisabledConfig_Good(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
|
|
@ -48,7 +49,7 @@ func TestSignBinaries_Good_DisabledConfig(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSignBinaries_Good_SkipsOnNonMacOS(t *testing.T) {
|
func TestSigning_SignBinariesSkipsOnNonMacOS_Good(t *testing.T) {
|
||||||
if runtime.GOOS == "darwin" {
|
if runtime.GOOS == "darwin" {
|
||||||
t.Skip("Skipping on macOS - this tests non-macOS behavior")
|
t.Skip("Skipping on macOS - this tests non-macOS behavior")
|
||||||
}
|
}
|
||||||
|
|
@ -72,7 +73,7 @@ func TestSignBinaries_Good_SkipsOnNonMacOS(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNotarizeBinaries_Good_DisabledConfig(t *testing.T) {
|
func TestSigning_NotarizeBinariesDisabledConfig_Good(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
|
|
@ -89,7 +90,7 @@ func TestNotarizeBinaries_Good_DisabledConfig(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNotarizeBinaries_Good_NotarizeDisabled(t *testing.T) {
|
func TestSigning_NotarizeBinariesNotarizeDisabled_Good(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
|
|
@ -109,7 +110,7 @@ func TestNotarizeBinaries_Good_NotarizeDisabled(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSignChecksums_Good_SkipsNoKey(t *testing.T) {
|
func TestSigning_SignChecksumsSkipsNoKey_Good(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
|
|
@ -126,7 +127,7 @@ func TestSignChecksums_Good_SkipsNoKey(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSignChecksums_Good_Disabled(t *testing.T) {
|
func TestSigning_SignChecksumsDisabled_Good(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
|
|
@ -139,12 +140,12 @@ func TestSignChecksums_Good_Disabled(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDefaultSignConfig(t *testing.T) {
|
func TestSigning_DefaultSignConfig_Good(t *testing.T) {
|
||||||
cfg := DefaultSignConfig()
|
cfg := DefaultSignConfig()
|
||||||
assert.True(t, cfg.Enabled)
|
assert.True(t, cfg.Enabled)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSignConfig_ExpandEnv(t *testing.T) {
|
func TestSigning_SignConfigExpandEnv_Good(t *testing.T) {
|
||||||
t.Setenv("TEST_KEY", "ABC")
|
t.Setenv("TEST_KEY", "ABC")
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
GPG: GPGConfig{Key: "$TEST_KEY"},
|
GPG: GPGConfig{Key: "$TEST_KEY"},
|
||||||
|
|
@ -153,20 +154,27 @@ func TestSignConfig_ExpandEnv(t *testing.T) {
|
||||||
assert.Equal(t, "ABC", cfg.GPG.Key)
|
assert.Equal(t, "ABC", cfg.GPG.Key)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWindowsSigner_Good(t *testing.T) {
|
func TestSigning_WindowsSigner_Good(t *testing.T) {
|
||||||
fs := io.Local
|
fs := io.Local
|
||||||
s := NewWindowsSigner(WindowsConfig{})
|
s := NewWindowsSigner(WindowsConfig{Certificate: "cert.pfx"})
|
||||||
assert.Equal(t, "signtool", s.Name())
|
assert.Equal(t, "signtool", s.Name())
|
||||||
assert.False(t, s.Available())
|
|
||||||
assert.NoError(t, s.Sign(context.Background(), fs, "test.exe"))
|
if runtime.GOOS != "windows" {
|
||||||
|
assert.False(t, s.Available())
|
||||||
|
assert.Error(t, s.Sign(context.Background(), fs, "test.exe"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// On Windows, availability depends on the SDK toolchain being installed.
|
||||||
|
_ = s.Available()
|
||||||
}
|
}
|
||||||
|
|
||||||
// mockSigner is a test double that records calls to Sign.
|
// mockSigner is a test double that records calls to Sign.
|
||||||
type mockSigner struct {
|
type mockSigner struct {
|
||||||
name string
|
name string
|
||||||
available bool
|
available bool
|
||||||
signedPaths []string
|
signedPaths []string
|
||||||
signError error
|
signError error
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *mockSigner) Name() string {
|
func (m *mockSigner) Name() string {
|
||||||
|
|
@ -185,7 +193,7 @@ func (m *mockSigner) Sign(ctx context.Context, fs io.Medium, path string) error
|
||||||
// Verify mockSigner implements Signer
|
// Verify mockSigner implements Signer
|
||||||
var _ Signer = (*mockSigner)(nil)
|
var _ Signer = (*mockSigner)(nil)
|
||||||
|
|
||||||
func TestSignBinaries_Good_MockSigner(t *testing.T) {
|
func TestSigning_SignBinariesMockSigner_Good(t *testing.T) {
|
||||||
t.Run("signs only darwin artifacts", func(t *testing.T) {
|
t.Run("signs only darwin artifacts", func(t *testing.T) {
|
||||||
artifacts := []Artifact{
|
artifacts := []Artifact{
|
||||||
{Path: "/dist/linux_amd64/myapp", OS: "linux", Arch: "amd64"},
|
{Path: "/dist/linux_amd64/myapp", OS: "linux", Arch: "amd64"},
|
||||||
|
|
@ -230,7 +238,39 @@ func TestSignBinaries_Good_MockSigner(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSignChecksums_Good_MockSigner(t *testing.T) {
|
func TestSigning_signArtifactsWithSigner_Good(t *testing.T) {
|
||||||
|
signer := &mockSigner{name: "mock", available: true}
|
||||||
|
artifacts := []Artifact{
|
||||||
|
{Path: "/dist/linux_amd64/myapp", OS: "linux", Arch: "amd64"},
|
||||||
|
{Path: "/dist/windows_amd64/myapp.exe", OS: "windows", Arch: "amd64"},
|
||||||
|
{Path: "/dist/windows_arm64/myapp.exe", OS: "windows", Arch: "arm64"},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := signArtifactsWithSigner(context.Background(), io.Local, signer, "windows", artifacts)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, []string{"/dist/windows_amd64/myapp.exe", "/dist/windows_arm64/myapp.exe"}, signer.signedPaths)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSigning_ResolveSigntoolCli_Good(t *testing.T) {
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := fallbackDir + "/signtool.exe"
|
||||||
|
require.NoError(t, io.Local.Write(fallbackPath, "#!/bin/sh\nexit 0\n"))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := resolveSigntoolCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSigning_ResolveSigntoolCli_Bad(t *testing.T) {
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
_, err := resolveSigntoolCli(t.TempDir() + "/missing-signtool.exe")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "signtool tool not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSigning_SignChecksumsMockSigner_Good(t *testing.T) {
|
||||||
t.Run("skips when GPG key is empty", func(t *testing.T) {
|
t.Run("skips when GPG key is empty", func(t *testing.T) {
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
|
|
@ -252,7 +292,7 @@ func TestSignChecksums_Good_MockSigner(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNotarizeBinaries_Good_MockSigner(t *testing.T) {
|
func TestSigning_NotarizeBinariesMockSigner_Good(t *testing.T) {
|
||||||
t.Run("skips when notarize is false", func(t *testing.T) {
|
t.Run("skips when notarize is false", func(t *testing.T) {
|
||||||
cfg := SignConfig{
|
cfg := SignConfig{
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
|
|
@ -292,7 +332,7 @@ func TestNotarizeBinaries_Good_MockSigner(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExpandEnv_Good(t *testing.T) {
|
func TestSigning_ExpandEnv_Good(t *testing.T) {
|
||||||
t.Run("expands all config fields", func(t *testing.T) {
|
t.Run("expands all config fields", func(t *testing.T) {
|
||||||
t.Setenv("TEST_GPG_KEY", "GPG123")
|
t.Setenv("TEST_GPG_KEY", "GPG123")
|
||||||
t.Setenv("TEST_IDENTITY", "Developer ID Application: Test")
|
t.Setenv("TEST_IDENTITY", "Developer ID Application: Test")
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,16 @@ package signing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// WindowsSigner signs binaries using Windows signtool (placeholder).
|
// WindowsSigner signs binaries using Windows signtool.
|
||||||
|
//
|
||||||
|
// s := signing.NewWindowsSigner(cfg.Windows)
|
||||||
type WindowsSigner struct {
|
type WindowsSigner struct {
|
||||||
config WindowsConfig
|
config WindowsConfig
|
||||||
}
|
}
|
||||||
|
|
@ -15,22 +20,88 @@ type WindowsSigner struct {
|
||||||
var _ Signer = (*WindowsSigner)(nil)
|
var _ Signer = (*WindowsSigner)(nil)
|
||||||
|
|
||||||
// NewWindowsSigner creates a new Windows signer.
|
// NewWindowsSigner creates a new Windows signer.
|
||||||
|
//
|
||||||
|
// s := signing.NewWindowsSigner(cfg.Windows)
|
||||||
func NewWindowsSigner(cfg WindowsConfig) *WindowsSigner {
|
func NewWindowsSigner(cfg WindowsConfig) *WindowsSigner {
|
||||||
return &WindowsSigner{config: cfg}
|
return &WindowsSigner{config: cfg}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns "signtool".
|
// Name returns "signtool".
|
||||||
|
//
|
||||||
|
// name := s.Name() // → "signtool"
|
||||||
func (s *WindowsSigner) Name() string {
|
func (s *WindowsSigner) Name() string {
|
||||||
return "signtool"
|
return "signtool"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Available returns false (not yet implemented).
|
// Available checks if running on Windows with signtool and certificate configured.
|
||||||
|
//
|
||||||
|
// ok := s.Available() // → true if on Windows with certificate configured
|
||||||
func (s *WindowsSigner) Available() bool {
|
func (s *WindowsSigner) Available() bool {
|
||||||
return false
|
if runtime.GOOS != "windows" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if s.config.Certificate == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
_, err := resolveSigntoolCli()
|
||||||
|
return err == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sign is a placeholder that does nothing.
|
// Sign signs a binary using signtool and a PFX certificate.
|
||||||
|
//
|
||||||
|
// err := s.Sign(ctx, io.Local, "dist/myapp.exe")
|
||||||
func (s *WindowsSigner) Sign(ctx context.Context, fs io.Medium, binary string) error {
|
func (s *WindowsSigner) Sign(ctx context.Context, fs io.Medium, binary string) error {
|
||||||
// TODO: Implement Windows signing
|
_ = fs
|
||||||
|
|
||||||
|
if !s.Available() {
|
||||||
|
if runtime.GOOS != "windows" {
|
||||||
|
return coreerr.E("signtool.Sign", "signtool is only available on Windows", nil)
|
||||||
|
}
|
||||||
|
if s.config.Certificate == "" {
|
||||||
|
return coreerr.E("signtool.Sign", "signtool certificate not configured", nil)
|
||||||
|
}
|
||||||
|
return coreerr.E("signtool.Sign", "signtool tool not found in PATH", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
signtoolCommand, err := resolveSigntoolCli()
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("signtool.Sign", "signtool tool not found in PATH", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
args := []string{
|
||||||
|
"sign",
|
||||||
|
"/f", s.config.Certificate,
|
||||||
|
"/fd", "sha256",
|
||||||
|
"/tr", "http://timestamp.digicert.com",
|
||||||
|
"/td", "sha256",
|
||||||
|
}
|
||||||
|
if s.config.Password != "" {
|
||||||
|
args = append(args, "/p", s.config.Password)
|
||||||
|
}
|
||||||
|
args = append(args, binary)
|
||||||
|
|
||||||
|
output, err := ax.CombinedOutput(ctx, "", nil, signtoolCommand, args...)
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("signtool.Sign", output, err)
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func resolveSigntoolCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
`C:\\Program Files (x86)\\Windows Kits\\10\\bin\\x64\\signtool.exe`,
|
||||||
|
`C:\\Program Files (x86)\\Windows Kits\\10\\bin\\x86\\signtool.exe`,
|
||||||
|
`C:\\Program Files\\Windows Kits\\10\\bin\\x64\\signtool.exe`,
|
||||||
|
`C:\\Program Files\\Windows Kits\\10\\bin\\x86\\signtool.exe`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("signtool", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("signtool.resolveSigntoolCli", "signtool tool not found. Install the Windows SDK.", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
|
||||||
146
pkg/build/templates/release.yml
Normal file
146
pkg/build/templates/release.yml
Normal file
|
|
@ -0,0 +1,146 @@
|
||||||
|
name: Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
working-directory:
|
||||||
|
description: Directory that contains the Core project.
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: .
|
||||||
|
core-version:
|
||||||
|
description: Core CLI version to install.
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: latest
|
||||||
|
version:
|
||||||
|
description: Release version override.
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
draft:
|
||||||
|
description: Mark the release as a draft.
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
prerelease:
|
||||||
|
description: Mark the release as a pre-release.
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
archive-format:
|
||||||
|
description: Archive compression format for release artefacts.
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
working-directory:
|
||||||
|
description: Directory that contains the Core project.
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: .
|
||||||
|
core-version:
|
||||||
|
description: Core CLI version to install.
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: latest
|
||||||
|
version:
|
||||||
|
description: Release version override.
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
draft:
|
||||||
|
description: Mark the release as a draft.
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
prerelease:
|
||||||
|
description: Mark the release as a pre-release.
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
archive-format:
|
||||||
|
description: Archive compression format for release artefacts.
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build ${{ matrix.target }}
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- target: linux/amd64
|
||||||
|
artifact_name: linux-amd64
|
||||||
|
runner: ubuntu-latest
|
||||||
|
- target: linux/arm64
|
||||||
|
artifact_name: linux-arm64
|
||||||
|
runner: ubuntu-latest
|
||||||
|
- target: darwin/amd64
|
||||||
|
artifact_name: darwin-amd64
|
||||||
|
runner: macos-13
|
||||||
|
- target: darwin/arm64
|
||||||
|
artifact_name: darwin-arm64
|
||||||
|
runner: macos-14
|
||||||
|
- target: windows/amd64
|
||||||
|
artifact_name: windows-amd64
|
||||||
|
runner: windows-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Install Core CLI
|
||||||
|
uses: dAppCore/build@v4
|
||||||
|
with:
|
||||||
|
command: build
|
||||||
|
working-directory: ${{ inputs.working-directory }}
|
||||||
|
core-version: ${{ inputs.core-version }}
|
||||||
|
|
||||||
|
- name: Build release artefacts
|
||||||
|
working-directory: ${{ inputs.working-directory }}
|
||||||
|
shell: bash
|
||||||
|
run: core build --targets "${{ matrix.target }}" --archive --checksum --archive-format "${{ inputs.archive-format }}"
|
||||||
|
|
||||||
|
- name: Upload artefacts
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: release-${{ matrix.artifact_name }}
|
||||||
|
path: ${{ inputs.working-directory }}/dist/**
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
release:
|
||||||
|
name: Publish release
|
||||||
|
needs: build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Download build artefacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
pattern: release-*
|
||||||
|
path: ${{ inputs.working-directory }}/dist
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Install Core CLI
|
||||||
|
uses: dAppCore/build@v4
|
||||||
|
with:
|
||||||
|
command: ci
|
||||||
|
working-directory: ${{ inputs.working-directory }}
|
||||||
|
core-version: ${{ inputs.core-version }}
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
draft: ${{ inputs.draft }}
|
||||||
|
prerelease: ${{ inputs.prerelease }}
|
||||||
|
we-are-go-for-launch: true
|
||||||
1
pkg/build/testdata/docs-project/mkdocs.yml
vendored
Normal file
1
pkg/build/testdata/docs-project/mkdocs.yml
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{}
|
||||||
1
pkg/build/testdata/monorepo-project/apps/web/package.json
vendored
Normal file
1
pkg/build/testdata/monorepo-project/apps/web/package.json
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{}
|
||||||
1
pkg/build/testdata/python-project/pyproject.toml
vendored
Normal file
1
pkg/build/testdata/python-project/pyproject.toml
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{}
|
||||||
1
pkg/build/testdata/rust-project/Cargo.toml
vendored
Normal file
1
pkg/build/testdata/rust-project/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{}
|
||||||
526
pkg/build/workflow.go
Normal file
526
pkg/build/workflow.go
Normal file
|
|
@ -0,0 +1,526 @@
|
||||||
|
// Package build provides project type detection and cross-compilation for the Core build system.
|
||||||
|
// This file exposes the release workflow generator and its path-resolution helpers.
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"embed"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
io_interface "dappco.re/go/core/io"
|
||||||
|
coreerr "dappco.re/go/core/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed templates/release.yml
|
||||||
|
var releaseWorkflowTemplate embed.FS
|
||||||
|
|
||||||
|
// DefaultReleaseWorkflowPath is the conventional output path for the release workflow.
|
||||||
|
//
|
||||||
|
// path := build.DefaultReleaseWorkflowPath // ".github/workflows/release.yml"
|
||||||
|
const DefaultReleaseWorkflowPath = ".github/workflows/release.yml"
|
||||||
|
|
||||||
|
// DefaultReleaseWorkflowFileName is the workflow filename used when a directory-style
|
||||||
|
// output path is supplied.
|
||||||
|
const DefaultReleaseWorkflowFileName = "release.yml"
|
||||||
|
|
||||||
|
// WriteReleaseWorkflow writes the embedded release workflow template to outputPath.
|
||||||
|
//
|
||||||
|
// build.WriteReleaseWorkflow(io.Local, "") // writes .github/workflows/release.yml
|
||||||
|
// build.WriteReleaseWorkflow(io.Local, "ci") // writes ./ci/release.yml under the project root
|
||||||
|
// build.WriteReleaseWorkflow(io.Local, "./ci") // writes ./ci/release.yml under the project root
|
||||||
|
// build.WriteReleaseWorkflow(io.Local, ".github/workflows") // writes .github/workflows/release.yml
|
||||||
|
// build.WriteReleaseWorkflow(io.Local, "ci/release.yml") // writes ./ci/release.yml under the project root
|
||||||
|
// build.WriteReleaseWorkflow(io.Local, "/tmp/repo/.github/workflows/release.yml") // writes the absolute path unchanged
|
||||||
|
func WriteReleaseWorkflow(filesystem io_interface.Medium, outputPath string) error {
|
||||||
|
if filesystem == nil {
|
||||||
|
return coreerr.E("build.WriteReleaseWorkflow", "filesystem medium is required", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
outputPath = cleanWorkflowInput(outputPath)
|
||||||
|
if outputPath == "" {
|
||||||
|
outputPath = DefaultReleaseWorkflowPath
|
||||||
|
}
|
||||||
|
|
||||||
|
if isWorkflowDirectoryInput(outputPath) || filesystem.IsDir(outputPath) {
|
||||||
|
outputPath = ax.Join(outputPath, DefaultReleaseWorkflowFileName)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := releaseWorkflowTemplate.ReadFile("templates/release.yml")
|
||||||
|
if err != nil {
|
||||||
|
return coreerr.E("build.WriteReleaseWorkflow", "failed to read embedded workflow template", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := filesystem.EnsureDir(ax.Dir(outputPath)); err != nil {
|
||||||
|
return coreerr.E("build.WriteReleaseWorkflow", "failed to create release workflow directory", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := filesystem.Write(outputPath, string(content)); err != nil {
|
||||||
|
return coreerr.E("build.WriteReleaseWorkflow", "failed to write release workflow", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReleaseWorkflowPath joins a project directory with the conventional workflow path.
|
||||||
|
//
|
||||||
|
// build.ReleaseWorkflowPath("/home/user/project") // /home/user/project/.github/workflows/release.yml
|
||||||
|
func ReleaseWorkflowPath(projectDir string) string {
|
||||||
|
return ax.Join(projectDir, DefaultReleaseWorkflowPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowOutputPathWithMedium resolves the workflow output path
|
||||||
|
// relative to the project directory and treats an existing directory as a
|
||||||
|
// workflow directory even when the caller omits a trailing slash.
|
||||||
|
//
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathWithMedium(io.Local, "/tmp/project", "ci") // /tmp/project/ci/release.yml when /tmp/project/ci exists
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathWithMedium(io.Local, "/tmp/project", ".github/workflows") // /tmp/project/.github/workflows/release.yml
|
||||||
|
func ResolveReleaseWorkflowOutputPathWithMedium(filesystem io_interface.Medium, projectDir, outputPath string) string {
|
||||||
|
outputPath = cleanWorkflowInput(outputPath)
|
||||||
|
if outputPath == "" {
|
||||||
|
return ReleaseWorkflowPath(projectDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
resolved := ResolveReleaseWorkflowPath(projectDir, outputPath)
|
||||||
|
if filesystem != nil && filesystem.IsDir(resolved) {
|
||||||
|
return ax.Join(resolved, DefaultReleaseWorkflowFileName)
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolved
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowPath resolves the workflow output path relative to the
|
||||||
|
// project directory when the caller supplies a relative path.
|
||||||
|
//
|
||||||
|
// build.ResolveReleaseWorkflowPath("/tmp/project", "") // /tmp/project/.github/workflows/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowPath("/tmp/project", "./ci") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowPath("/tmp/project", ".github/workflows") // /tmp/project/.github/workflows/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowPath("/tmp/project", "ci/release.yml") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowPath("/tmp/project", "ci") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowPath("/tmp/project", "/tmp/release.yml") // /tmp/release.yml
|
||||||
|
func ResolveReleaseWorkflowPath(projectDir, outputPath string) string {
|
||||||
|
outputPath = cleanWorkflowInput(outputPath)
|
||||||
|
if outputPath == "" {
|
||||||
|
return ReleaseWorkflowPath(projectDir)
|
||||||
|
}
|
||||||
|
if isWorkflowDirectoryPath(outputPath) || isWorkflowDirectoryInput(outputPath) {
|
||||||
|
if ax.IsAbs(outputPath) {
|
||||||
|
return ax.Join(outputPath, DefaultReleaseWorkflowFileName)
|
||||||
|
}
|
||||||
|
return ax.Join(projectDir, outputPath, DefaultReleaseWorkflowFileName)
|
||||||
|
}
|
||||||
|
if !ax.IsAbs(outputPath) {
|
||||||
|
return ax.Join(projectDir, outputPath)
|
||||||
|
}
|
||||||
|
return outputPath
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowInputPath resolves a workflow target from the CLI/API
|
||||||
|
// `path` field and its `output` alias.
|
||||||
|
//
|
||||||
|
// build.ResolveReleaseWorkflowInputPath("/tmp/project", "", "") // /tmp/project/.github/workflows/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowInputPath("/tmp/project", "./ci", "") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowInputPath("/tmp/project", "ci/release.yml", "") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowInputPath("/tmp/project", "", "ci/release.yml") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowInputPath("/tmp/project", "ci/release.yml", "ci.yml") // error
|
||||||
|
func ResolveReleaseWorkflowInputPath(projectDir, pathInput, outputPathInput string) (string, error) {
|
||||||
|
return resolveReleaseWorkflowInputPathPair(
|
||||||
|
pathInput,
|
||||||
|
outputPathInput,
|
||||||
|
func(input string) string {
|
||||||
|
return resolveReleaseWorkflowInputPath(projectDir, input, nil)
|
||||||
|
},
|
||||||
|
"build.ResolveReleaseWorkflowInputPath",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowInputPathWithMedium resolves the workflow path and
|
||||||
|
// treats an existing directory as a directory even when the caller omits a
|
||||||
|
// trailing slash.
|
||||||
|
//
|
||||||
|
// build.ResolveReleaseWorkflowInputPathWithMedium(io.Local, "/tmp/project", "ci", "") // /tmp/project/ci/release.yml when /tmp/project/ci exists
|
||||||
|
// build.ResolveReleaseWorkflowInputPathWithMedium(io.Local, "/tmp/project", "./ci", "") // /tmp/project/ci/release.yml
|
||||||
|
func ResolveReleaseWorkflowInputPathWithMedium(filesystem io_interface.Medium, projectDir, pathInput, outputPathInput string) (string, error) {
|
||||||
|
return resolveReleaseWorkflowInputPathPair(
|
||||||
|
pathInput,
|
||||||
|
outputPathInput,
|
||||||
|
func(input string) string {
|
||||||
|
return resolveReleaseWorkflowInputPath(projectDir, input, filesystem)
|
||||||
|
},
|
||||||
|
"build.ResolveReleaseWorkflowInputPathWithMedium",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowInputPathAliases resolves the workflow path across the
|
||||||
|
// public path aliases and treats an existing directory as a directory even
|
||||||
|
// when the caller omits a trailing slash.
|
||||||
|
//
|
||||||
|
// build.ResolveReleaseWorkflowInputPathAliases(io.Local, "/tmp/project", "ci", "", "", "") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowInputPathAliases(io.Local, "/tmp/project", "", "ci", "", "") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowInputPathAliases(io.Local, "/tmp/project", "", "", "ci", "") // /tmp/project/ci/release.yml
|
||||||
|
// build.ResolveReleaseWorkflowInputPathAliases(io.Local, "/tmp/project", "", "", "", "ci") // /tmp/project/ci/release.yml
|
||||||
|
func ResolveReleaseWorkflowInputPathAliases(filesystem io_interface.Medium, projectDir, pathInput, workflowPathInput, workflowPathSnakeInput, workflowPathHyphenInput string) (string, error) {
|
||||||
|
return resolveReleaseWorkflowInputPathAliasSet(
|
||||||
|
filesystem,
|
||||||
|
projectDir,
|
||||||
|
"path",
|
||||||
|
pathInput,
|
||||||
|
workflowPathInput,
|
||||||
|
workflowPathSnakeInput,
|
||||||
|
workflowPathHyphenInput,
|
||||||
|
"build.ResolveReleaseWorkflowInputPathAliases",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowOutputPath("ci/release.yml", "", "") // "ci/release.yml"
|
||||||
|
// ResolveReleaseWorkflowOutputPath("", "ci/release.yml", "") // "ci/release.yml"
|
||||||
|
// ResolveReleaseWorkflowOutputPath("", "", "ci/release.yml") // "ci/release.yml"
|
||||||
|
// ResolveReleaseWorkflowOutputPath("ci/release.yml", "ops.yml", "") // error
|
||||||
|
func ResolveReleaseWorkflowOutputPath(outputPathInput, outputPathSnakeInput, legacyOutputInput string) (string, error) {
|
||||||
|
return ResolveReleaseWorkflowOutputPathAliases(
|
||||||
|
outputPathInput,
|
||||||
|
"",
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowOutputPathAliases resolves every public workflow output
|
||||||
|
// alias across the CLI, API, and UI layers.
|
||||||
|
//
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathAliases("ci/release.yml", "", "", "", "", "", "", "", "") // "ci/release.yml"
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathAliases("", "ci/release.yml", "", "", "", "", "", "", "") // "ci/release.yml"
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathAliases("", "", "", "", "ci/release.yml", "", "", "", "") // "ci/release.yml"
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathAliases("", "", "", "", "", "ci/release.yml", "", "", "") // "ci/release.yml"
|
||||||
|
func ResolveReleaseWorkflowOutputPathAliases(
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput string,
|
||||||
|
) (string, error) {
|
||||||
|
return resolveReleaseWorkflowOutputAliasSet(
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput,
|
||||||
|
"build.ResolveReleaseWorkflowOutputPathAliases",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowOutputPathAliasesInProject resolves the workflow output
|
||||||
|
// aliases relative to a project directory before checking for conflicts.
|
||||||
|
//
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathAliasesInProject("/tmp/project", "ci/release.yml", "", "", "", "", "", "", "") // "/tmp/project/ci/release.yml"
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathAliasesInProject("/tmp/project", "", "", "", "", "/tmp/project/ci/release.yml", "", "", "") // "/tmp/project/ci/release.yml"
|
||||||
|
func ResolveReleaseWorkflowOutputPathAliasesInProject(
|
||||||
|
projectDir,
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput string,
|
||||||
|
) (string, error) {
|
||||||
|
return ResolveReleaseWorkflowOutputPathAliasesInProjectWithMedium(
|
||||||
|
nil,
|
||||||
|
projectDir,
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResolveReleaseWorkflowOutputPathAliasesInProjectWithMedium resolves the
|
||||||
|
// workflow output aliases relative to a project directory and uses the
|
||||||
|
// provided filesystem medium to treat existing directories as workflow
|
||||||
|
// directories even when callers omit a trailing separator.
|
||||||
|
//
|
||||||
|
// build.ResolveReleaseWorkflowOutputPathAliasesInProjectWithMedium(io.Local, "/tmp/project", "", "", "", "", "/tmp/project/ci", "", "", "") // "/tmp/project/ci/release.yml"
|
||||||
|
func ResolveReleaseWorkflowOutputPathAliasesInProjectWithMedium(
|
||||||
|
filesystem io_interface.Medium,
|
||||||
|
projectDir,
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput string,
|
||||||
|
) (string, error) {
|
||||||
|
return resolveReleaseWorkflowOutputAliasSetInProject(
|
||||||
|
filesystem,
|
||||||
|
projectDir,
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput,
|
||||||
|
"build.ResolveReleaseWorkflowOutputPathAliasesInProject",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveReleaseWorkflowInputPathPair resolves the workflow path from the path
|
||||||
|
// and output aliases, rejecting conflicting values and preferring explicit
|
||||||
|
// inputs over the default.
|
||||||
|
func resolveReleaseWorkflowInputPathPair(pathInput, outputPathInput string, resolve func(string) string, errorName string) (string, error) {
|
||||||
|
pathInput = cleanWorkflowInput(pathInput)
|
||||||
|
outputPathInput = cleanWorkflowInput(outputPathInput)
|
||||||
|
|
||||||
|
if pathInput != "" && outputPathInput != "" {
|
||||||
|
resolvedPath := resolve(pathInput)
|
||||||
|
resolvedOutput := resolve(outputPathInput)
|
||||||
|
if resolvedPath != resolvedOutput {
|
||||||
|
return "", coreerr.E(errorName, "path and output specify different locations", nil)
|
||||||
|
}
|
||||||
|
return resolvedPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if pathInput != "" {
|
||||||
|
return resolve(pathInput), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if outputPathInput != "" {
|
||||||
|
return resolve(outputPathInput), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolve(""), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveReleaseWorkflowOutputAliasSet resolves a workflow output alias set by
|
||||||
|
// trimming whitespace, rejecting conflicts, and returning the first non-empty
|
||||||
|
// value when aliases agree.
|
||||||
|
func resolveReleaseWorkflowOutputAliasSet(
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput,
|
||||||
|
errorName string,
|
||||||
|
) (string, error) {
|
||||||
|
values := []string{
|
||||||
|
normalizeWorkflowOutputAlias(outputPathInput),
|
||||||
|
normalizeWorkflowOutputAlias(outputPathHyphenInput),
|
||||||
|
normalizeWorkflowOutputAlias(outputPathSnakeInput),
|
||||||
|
normalizeWorkflowOutputAlias(legacyOutputInput),
|
||||||
|
normalizeWorkflowOutputAlias(workflowOutputPathInput),
|
||||||
|
normalizeWorkflowOutputAlias(workflowOutputSnakeInput),
|
||||||
|
normalizeWorkflowOutputAlias(workflowOutputHyphenInput),
|
||||||
|
normalizeWorkflowOutputAlias(workflowOutputPathSnakeInput),
|
||||||
|
normalizeWorkflowOutputAlias(workflowOutputPathHyphenInput),
|
||||||
|
}
|
||||||
|
|
||||||
|
var resolved string
|
||||||
|
for _, value := range values {
|
||||||
|
if value == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if resolved == "" {
|
||||||
|
resolved = value
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if resolved != value {
|
||||||
|
return "", coreerr.E(errorName, "output aliases specify different locations", nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolved, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveReleaseWorkflowOutputAliasSetInProject resolves workflow output aliases
|
||||||
|
// against a project directory so relative and absolute paths can be compared.
|
||||||
|
func resolveReleaseWorkflowOutputAliasSetInProject(
|
||||||
|
filesystem io_interface.Medium,
|
||||||
|
projectDir,
|
||||||
|
outputPathInput,
|
||||||
|
outputPathHyphenInput,
|
||||||
|
outputPathSnakeInput,
|
||||||
|
legacyOutputInput,
|
||||||
|
workflowOutputPathInput,
|
||||||
|
workflowOutputSnakeInput,
|
||||||
|
workflowOutputHyphenInput,
|
||||||
|
workflowOutputPathSnakeInput,
|
||||||
|
workflowOutputPathHyphenInput,
|
||||||
|
errorName string,
|
||||||
|
) (string, error) {
|
||||||
|
values := []string{
|
||||||
|
cleanWorkflowInput(outputPathInput),
|
||||||
|
cleanWorkflowInput(outputPathHyphenInput),
|
||||||
|
cleanWorkflowInput(outputPathSnakeInput),
|
||||||
|
cleanWorkflowInput(legacyOutputInput),
|
||||||
|
cleanWorkflowInput(workflowOutputPathInput),
|
||||||
|
cleanWorkflowInput(workflowOutputSnakeInput),
|
||||||
|
cleanWorkflowInput(workflowOutputHyphenInput),
|
||||||
|
cleanWorkflowInput(workflowOutputPathSnakeInput),
|
||||||
|
cleanWorkflowInput(workflowOutputPathHyphenInput),
|
||||||
|
}
|
||||||
|
|
||||||
|
var resolved string
|
||||||
|
for _, value := range values {
|
||||||
|
if value == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
candidate := ResolveReleaseWorkflowOutputPathWithMedium(filesystem, projectDir, value)
|
||||||
|
if resolved == "" {
|
||||||
|
resolved = candidate
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if resolved != candidate {
|
||||||
|
return "", coreerr.E(errorName, "output aliases specify different locations", nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolved, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// normalizeWorkflowOutputAlias canonicalises a workflow output alias for comparison.
|
||||||
|
func normalizeWorkflowOutputAlias(path string) string {
|
||||||
|
path = cleanWorkflowInput(path)
|
||||||
|
if path == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return ax.Clean(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveReleaseWorkflowInputPath resolves one workflow input into a file path.
|
||||||
|
//
|
||||||
|
// resolveReleaseWorkflowInputPath("/tmp/project", "ci", io.Local) // /tmp/project/ci/release.yml
|
||||||
|
func resolveReleaseWorkflowInputPath(projectDir, input string, medium io_interface.Medium) string {
|
||||||
|
input = cleanWorkflowInput(input)
|
||||||
|
if input == "" {
|
||||||
|
return ReleaseWorkflowPath(projectDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
if isWorkflowDirectoryInput(input) {
|
||||||
|
if ax.IsAbs(input) {
|
||||||
|
return ax.Join(input, DefaultReleaseWorkflowFileName)
|
||||||
|
}
|
||||||
|
return ax.Join(projectDir, input, DefaultReleaseWorkflowFileName)
|
||||||
|
}
|
||||||
|
|
||||||
|
resolved := ResolveReleaseWorkflowPath(projectDir, input)
|
||||||
|
if medium != nil && medium.IsDir(resolved) {
|
||||||
|
return ax.Join(resolved, DefaultReleaseWorkflowFileName)
|
||||||
|
}
|
||||||
|
return resolved
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolveReleaseWorkflowInputPathAliasSet resolves a workflow path from a set
|
||||||
|
// of aliases and rejects conflicting values.
|
||||||
|
func resolveReleaseWorkflowInputPathAliasSet(filesystem io_interface.Medium, projectDir, fieldLabel, primaryInput, secondaryInput, tertiaryInput, quaternaryInput, errorName string) (string, error) {
|
||||||
|
values := []string{
|
||||||
|
cleanWorkflowInput(primaryInput),
|
||||||
|
cleanWorkflowInput(secondaryInput),
|
||||||
|
cleanWorkflowInput(tertiaryInput),
|
||||||
|
cleanWorkflowInput(quaternaryInput),
|
||||||
|
}
|
||||||
|
|
||||||
|
var resolved string
|
||||||
|
for _, value := range values {
|
||||||
|
if value == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
candidate := resolveReleaseWorkflowInputPath(projectDir, value, filesystem)
|
||||||
|
if resolved == "" {
|
||||||
|
resolved = candidate
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if resolved != candidate {
|
||||||
|
return "", coreerr.E(errorName, fieldLabel+" aliases specify different locations", nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolved, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// isWorkflowDirectoryPath reports whether a workflow path is explicitly marked
|
||||||
|
// as a directory with a trailing separator.
|
||||||
|
func isWorkflowDirectoryPath(path string) bool {
|
||||||
|
path = cleanWorkflowInput(path)
|
||||||
|
if path == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if path == "." || path == "./" || path == ".\\" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
last := path[len(path)-1]
|
||||||
|
return last == '/' || last == '\\'
|
||||||
|
}
|
||||||
|
|
||||||
|
// isWorkflowDirectoryInput reports whether a workflow input should be treated
|
||||||
|
// as a directory target. This includes explicit directory paths and bare names
|
||||||
|
// without path separators or a file extension, plus current-directory-prefixed
|
||||||
|
// directory targets like "./ci" and the conventional ".github/workflows" path.
|
||||||
|
func isWorkflowDirectoryInput(path string) bool {
|
||||||
|
path = cleanWorkflowInput(path)
|
||||||
|
if isWorkflowDirectoryPath(path) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if path == "" || ax.Ext(path) != "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !strings.ContainsAny(path, "/\\") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if ax.Base(path) == "workflows" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(path, "./") || strings.HasPrefix(path, ".\\") {
|
||||||
|
trimmed := strings.TrimPrefix(strings.TrimPrefix(path, "./"), ".\\")
|
||||||
|
if trimmed == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if ax.Base(trimmed) == "workflows" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return !strings.ContainsAny(trimmed, "/\\")
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// cleanWorkflowInput trims surrounding whitespace from a workflow path input.
|
||||||
|
func cleanWorkflowInput(path string) string {
|
||||||
|
return strings.TrimSpace(path)
|
||||||
|
}
|
||||||
523
pkg/build/workflow_test.go
Normal file
523
pkg/build/workflow_test.go
Normal file
|
|
@ -0,0 +1,523 @@
|
||||||
|
package build
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestWorkflow_WriteReleaseWorkflow_Good(t *testing.T) {
|
||||||
|
t.Run("writes the embedded template to the default path", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
err := WriteReleaseWorkflow(fs, "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := fs.Read(DefaultReleaseWorkflowPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
template, err := releaseWorkflowTemplate.ReadFile("templates/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, string(template), content)
|
||||||
|
assert.Contains(t, content, "workflow_call:")
|
||||||
|
assert.Contains(t, content, "workflow_dispatch:")
|
||||||
|
assert.Contains(t, content, "core build --targets")
|
||||||
|
assert.Contains(t, content, "--archive-format")
|
||||||
|
assert.Contains(t, content, "actions/download-artifact@v4")
|
||||||
|
assert.Contains(t, content, "command: ci")
|
||||||
|
assert.Contains(t, content, "we-are-go-for-launch: true")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes to a custom path", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
err := WriteReleaseWorkflow(fs, "custom/workflow.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := fs.Read("custom/workflow.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, content)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("trims surrounding whitespace from the output path", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
err := WriteReleaseWorkflow(fs, " ci ")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := fs.Read("ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, content)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml for a bare directory-style path", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
err := WriteReleaseWorkflow(fs, "ci")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := fs.Read("ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, content)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml inside an existing directory", func(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
outputDir := ax.Join(projectDir, "ci")
|
||||||
|
require.NoError(t, ax.MkdirAll(outputDir, 0o755))
|
||||||
|
|
||||||
|
err := WriteReleaseWorkflow(io.Local, outputDir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(ax.Join(outputDir, DefaultReleaseWorkflowFileName))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
template, err := releaseWorkflowTemplate.ReadFile("templates/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, string(template), content)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("writes release.yml for directory-style output paths", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
err := WriteReleaseWorkflow(fs, "ci/")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := fs.Read("ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, content)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("creates parent directories on a real filesystem", func(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
path := ax.Join(projectDir, ".github", "workflows", "release.yml")
|
||||||
|
|
||||||
|
err := WriteReleaseWorkflow(io.Local, path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content, err := io.Local.Read(path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
template, err := releaseWorkflowTemplate.ReadFile("templates/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, string(template), content)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_WriteReleaseWorkflow_Bad(t *testing.T) {
|
||||||
|
t.Run("rejects a nil filesystem medium", func(t *testing.T) {
|
||||||
|
err := WriteReleaseWorkflow(nil, "")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "filesystem medium is required")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ReleaseWorkflowPath_Good(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", ReleaseWorkflowPath("/tmp/project"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowOutputPathWithMedium_Good(t *testing.T) {
|
||||||
|
t.Run("treats an existing directory as a workflow directory", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
fs.Dirs["/tmp/project/ci"] = true
|
||||||
|
|
||||||
|
path := ResolveReleaseWorkflowOutputPathWithMedium(fs, "/tmp/project", "ci")
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("keeps explicit file paths unchanged", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path := ResolveReleaseWorkflowOutputPathWithMedium(fs, "/tmp/project", "ci/release.yml")
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowPath_Good(t *testing.T) {
|
||||||
|
t.Run("uses the conventional path when empty", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", ResolveReleaseWorkflowPath("/tmp/project", ""))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("joins relative paths to the project directory", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", ResolveReleaseWorkflowPath("/tmp/project", "ci/release.yml"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats bare relative directory names as directories", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", ResolveReleaseWorkflowPath("/tmp/project", "ci"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats current-directory-prefixed directory names as directories", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", ResolveReleaseWorkflowPath("/tmp/project", "./ci"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats the conventional workflows directory as a directory", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", ResolveReleaseWorkflowPath("/tmp/project", ".github/workflows"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats current-directory-prefixed workflows directories as directories", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", ResolveReleaseWorkflowPath("/tmp/project", "./.github/workflows"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("keeps nested extensionless paths as files", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release", ResolveReleaseWorkflowPath("/tmp/project", "ci/release"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats the current directory as a workflow directory", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/release.yml", ResolveReleaseWorkflowPath("/tmp/project", "."))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats trailing-slash relative paths as directories", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", ResolveReleaseWorkflowPath("/tmp/project", "ci/"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("keeps absolute paths unchanged", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/release.yml", ResolveReleaseWorkflowPath("/tmp/project", "/tmp/release.yml"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats trailing-slash absolute paths as directories", func(t *testing.T) {
|
||||||
|
assert.Equal(t, "/tmp/workflows/release.yml", ResolveReleaseWorkflowPath("/tmp/project", "/tmp/workflows/"))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowInputPath_Good(t *testing.T) {
|
||||||
|
t.Run("uses the conventional path when both inputs are empty", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts path as the primary input", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "ci/release.yml", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts bare directory-style path as the primary input", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts current-directory-prefixed directory-style path as the primary input", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "./ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the conventional workflows directory as the primary input", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", ".github/workflows", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts current-directory-prefixed workflows directories as the primary input", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "./.github/workflows", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("keeps nested extensionless paths as files", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "ci/release", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the current directory as the primary input", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", ".", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts output as an alias for path", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "", "ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("trims surrounding whitespace from inputs", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", " ci ", " ")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts matching path and output values", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "ci/release.yml", "ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts matching directory-style path and output values", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "ci/", "ci/")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowInputPath_Bad(t *testing.T) {
|
||||||
|
t.Run("rejects conflicting path and output values", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowInputPath("/tmp/project", "ci/release.yml", "ops/release.yml")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Empty(t, path)
|
||||||
|
assert.Contains(t, err.Error(), "path and output specify different locations")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowInputPathWithMedium_Good(t *testing.T) {
|
||||||
|
t.Run("treats an existing directory as a workflow directory", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
fs.Dirs["/tmp/project/ci"] = true
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathWithMedium(fs, "/tmp/project", "ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats a bare directory-style path as a workflow directory", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathWithMedium(fs, "/tmp/project", "ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats a current-directory-prefixed directory-style path as a workflow directory", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathWithMedium(fs, "/tmp/project", "./ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats the conventional workflows directory as a workflow directory", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathWithMedium(fs, "/tmp/project", ".github/workflows", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats current-directory-prefixed workflows directories as workflow directories", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathWithMedium(fs, "/tmp/project", "./.github/workflows", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/.github/workflows/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("keeps a file path unchanged when the target is not a directory", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathWithMedium(fs, "/tmp/project", "ci/release.yml", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("normalizes matching directory aliases", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
fs.Dirs["/tmp/project/ci"] = true
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathWithMedium(fs, "/tmp/project", "ci", "ci/")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("trims surrounding whitespace before resolving", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
fs.Dirs["/tmp/project/ci"] = true
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathWithMedium(fs, "/tmp/project", " ci ", " ")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowInputPathAliases_Good(t *testing.T) {
|
||||||
|
t.Run("accepts the preferred path input", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathAliases(fs, "/tmp/project", "ci", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the workflowPath alias", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathAliases(fs, "/tmp/project", "", "ci", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the workflow_path alias", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathAliases(fs, "/tmp/project", "", "", "ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the workflow-path alias", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathAliases(fs, "/tmp/project", "", "", "", "ci")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("normalises matching aliases", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
fs.Dirs["/tmp/project/ci"] = true
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathAliases(fs, "/tmp/project", "ci/", "./ci", "ci", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "/tmp/project/ci/release.yml", path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowInputPathAliases_Bad(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowInputPathAliases(fs, "/tmp/project", "ci/release.yml", "ops/release.yml", "", "")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Empty(t, path)
|
||||||
|
assert.Contains(t, err.Error(), "path aliases specify different locations")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowOutputPath_Good(t *testing.T) {
|
||||||
|
t.Run("accepts the preferred output path", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPath("ci/release.yml", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the snake_case output path alias", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPath("", "ci/release.yml", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the hyphenated output path alias", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliases("", "ci/release.yml", "", "", "", "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the legacy output alias", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPath("", "", "ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("trims surrounding whitespace from aliases", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPath(" ci/release.yml ", " ", " ")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts matching aliases", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPath("ci/release.yml", "ci/release.yml", "ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("normalises equivalent path aliases", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPath("ci/release.yml", "./ci/release.yml", "ci/release.yml")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowOutputPath_Bad(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPath("ci/release.yml", "ops/release.yml", "")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Empty(t, path)
|
||||||
|
assert.Contains(t, err.Error(), "output aliases specify different locations")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowOutputPathAliases_Good(t *testing.T) {
|
||||||
|
t.Run("accepts workflowOutputPath aliases", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliases("", "", "", "", "ci/release.yml", "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the hyphenated workflowOutputPath alias", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliases("", "", "", "", "", "", "ci/release.yml", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the workflow_output alias", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliases("", "", "", "", "", "ci/release.yml", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts the workflow-output alias", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliases("", "", "", "", "", "", "ci/release.yml", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("normalises matching workflow output aliases", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliases("ci/release.yml", "", "", "./ci/release.yml", "ci/release.yml", "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "ci/release.yml", path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowOutputPathAliasesInProject_Good(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
absolutePath := ax.Join(projectDir, "ci", "release.yml")
|
||||||
|
absoluteDirectory := ax.Join(projectDir, "ops")
|
||||||
|
require.NoError(t, ax.MkdirAll(absoluteDirectory, 0o755))
|
||||||
|
|
||||||
|
t.Run("accepts the preferred output path", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliasesInProject(projectDir, "ci/release.yml", "", "", "", "", "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, absolutePath, path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts an absolute workflow output alias equivalent to the project path", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliasesInProject(projectDir, "", "", "", "", absolutePath, "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, absolutePath, path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("accepts matching relative and absolute aliases", func(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliasesInProject(projectDir, "ci/release.yml", "", "", "", "", "", "", "", absolutePath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, absolutePath, path)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("treats an existing absolute directory as a workflow directory", func(t *testing.T) {
|
||||||
|
fs := io.NewMockMedium()
|
||||||
|
fs.Dirs[absoluteDirectory] = true
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliasesInProjectWithMedium(fs, projectDir, "", "", "", "", absoluteDirectory, "", "", "", "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, ax.Join(absoluteDirectory, DefaultReleaseWorkflowFileName), path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowOutputPathAliasesInProject_Bad(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliasesInProject(projectDir, "ci/release.yml", "", "", "", "", "", "", "", ax.Join(projectDir, "ops", "release.yml"))
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Empty(t, path)
|
||||||
|
assert.Contains(t, err.Error(), "output aliases specify different locations")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestWorkflow_ResolveReleaseWorkflowOutputPathAliases_Bad(t *testing.T) {
|
||||||
|
path, err := ResolveReleaseWorkflowOutputPathAliases("ci/release.yml", "", "", "", "ops/release.yml", "", "", "", "")
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Empty(t, path)
|
||||||
|
assert.Contains(t, err.Error(), "output aliases specify different locations")
|
||||||
|
}
|
||||||
|
|
@ -4,18 +4,20 @@ package release
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"context"
|
||||||
"os/exec"
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
"sort"
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
"golang.org/x/text/cases"
|
"golang.org/x/text/cases"
|
||||||
"golang.org/x/text/language"
|
"golang.org/x/text/language"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ConventionalCommit represents a parsed conventional commit.
|
// ConventionalCommit represents a parsed conventional commit.
|
||||||
|
//
|
||||||
|
// commit := release.ConventionalCommit{Type: "feat", Scope: "build", Description: "add linuxkit support"}
|
||||||
type ConventionalCommit struct {
|
type ConventionalCommit struct {
|
||||||
Type string // feat, fix, etc.
|
Type string // feat, fix, etc.
|
||||||
Scope string // optional scope in parentheses
|
Scope string // optional scope in parentheses
|
||||||
|
|
@ -61,15 +63,29 @@ var conventionalCommitRegex = regexp.MustCompile(`^(\w+)(?:\(([^)]+)\))?(!)?:\s*
|
||||||
// Generate generates a markdown changelog from git commits between two refs.
|
// Generate generates a markdown changelog from git commits between two refs.
|
||||||
// If fromRef is empty, it uses the previous tag or initial commit.
|
// If fromRef is empty, it uses the previous tag or initial commit.
|
||||||
// If toRef is empty, it uses HEAD.
|
// If toRef is empty, it uses HEAD.
|
||||||
|
//
|
||||||
|
// md, err := release.Generate(".", "v1.2.3", "HEAD")
|
||||||
func Generate(dir, fromRef, toRef string) (string, error) {
|
func Generate(dir, fromRef, toRef string) (string, error) {
|
||||||
|
return GenerateWithContext(context.Background(), dir, fromRef, toRef)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateWithContext generates a markdown changelog while honouring caller cancellation.
|
||||||
|
// If fromRef is empty, it uses the previous tag or initial commit.
|
||||||
|
// If toRef is empty, it uses HEAD.
|
||||||
|
//
|
||||||
|
// md, err := release.GenerateWithContext(ctx, ".", "v1.2.3", "HEAD")
|
||||||
|
func GenerateWithContext(ctx context.Context, dir, fromRef, toRef string) (string, error) {
|
||||||
if toRef == "" {
|
if toRef == "" {
|
||||||
toRef = "HEAD"
|
toRef = "HEAD"
|
||||||
}
|
}
|
||||||
|
|
||||||
// If fromRef is empty, try to find previous tag
|
// If fromRef is empty, try to find previous tag
|
||||||
if fromRef == "" {
|
if fromRef == "" {
|
||||||
prevTag, err := getPreviousTag(dir, toRef)
|
prevTag, err := getPreviousTagWithContext(ctx, dir, toRef)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return "", coreerr.E("changelog.Generate", "generation cancelled", ctx.Err())
|
||||||
|
}
|
||||||
// No previous tag, use initial commit
|
// No previous tag, use initial commit
|
||||||
fromRef = ""
|
fromRef = ""
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -78,7 +94,7 @@ func Generate(dir, fromRef, toRef string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get commits between refs
|
// Get commits between refs
|
||||||
commits, err := getCommits(dir, fromRef, toRef)
|
commits, err := getCommitsWithContext(ctx, dir, fromRef, toRef)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", coreerr.E("changelog.Generate", "failed to get commits", err)
|
return "", coreerr.E("changelog.Generate", "failed to get commits", err)
|
||||||
}
|
}
|
||||||
|
|
@ -97,15 +113,27 @@ func Generate(dir, fromRef, toRef string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// GenerateWithConfig generates a changelog with filtering based on config.
|
// GenerateWithConfig generates a changelog with filtering based on config.
|
||||||
|
//
|
||||||
|
// md, err := release.GenerateWithConfig(".", "v1.2.3", "HEAD", &cfg.Changelog)
|
||||||
func GenerateWithConfig(dir, fromRef, toRef string, cfg *ChangelogConfig) (string, error) {
|
func GenerateWithConfig(dir, fromRef, toRef string, cfg *ChangelogConfig) (string, error) {
|
||||||
|
return GenerateWithConfigWithContext(context.Background(), dir, fromRef, toRef, cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateWithConfigWithContext generates a filtered changelog while honouring caller cancellation.
|
||||||
|
//
|
||||||
|
// md, err := release.GenerateWithConfigWithContext(ctx, ".", "v1.2.3", "HEAD", &cfg.Changelog)
|
||||||
|
func GenerateWithConfigWithContext(ctx context.Context, dir, fromRef, toRef string, cfg *ChangelogConfig) (string, error) {
|
||||||
if toRef == "" {
|
if toRef == "" {
|
||||||
toRef = "HEAD"
|
toRef = "HEAD"
|
||||||
}
|
}
|
||||||
|
|
||||||
// If fromRef is empty, try to find previous tag
|
// If fromRef is empty, try to find previous tag
|
||||||
if fromRef == "" {
|
if fromRef == "" {
|
||||||
prevTag, err := getPreviousTag(dir, toRef)
|
prevTag, err := getPreviousTagWithContext(ctx, dir, toRef)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if ctx.Err() != nil {
|
||||||
|
return "", coreerr.E("changelog.GenerateWithConfig", "generation cancelled", ctx.Err())
|
||||||
|
}
|
||||||
fromRef = ""
|
fromRef = ""
|
||||||
} else {
|
} else {
|
||||||
fromRef = prevTag
|
fromRef = prevTag
|
||||||
|
|
@ -113,7 +141,7 @@ func GenerateWithConfig(dir, fromRef, toRef string, cfg *ChangelogConfig) (strin
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get commits between refs
|
// Get commits between refs
|
||||||
commits, err := getCommits(dir, fromRef, toRef)
|
commits, err := getCommitsWithContext(ctx, dir, fromRef, toRef)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", coreerr.E("changelog.GenerateWithConfig", "failed to get commits", err)
|
return "", coreerr.E("changelog.GenerateWithConfig", "failed to get commits", err)
|
||||||
}
|
}
|
||||||
|
|
@ -150,20 +178,15 @@ func GenerateWithConfig(dir, fromRef, toRef string, cfg *ChangelogConfig) (strin
|
||||||
return formatChangelog(parsedCommits, toRef), nil
|
return formatChangelog(parsedCommits, toRef), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// getPreviousTag returns the tag before the given ref.
|
func getPreviousTagWithContext(ctx context.Context, dir, ref string) (string, error) {
|
||||||
func getPreviousTag(dir, ref string) (string, error) {
|
output, err := ax.RunDir(ctx, dir, "git", "describe", "--tags", "--abbrev=0", ref+"^")
|
||||||
cmd := exec.Command("git", "describe", "--tags", "--abbrev=0", ref+"^")
|
|
||||||
cmd.Dir = dir
|
|
||||||
output, err := cmd.Output()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return strings.TrimSpace(string(output)), nil
|
return core.Trim(output), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// getCommits returns a slice of commit strings between two refs.
|
func getCommitsWithContext(ctx context.Context, dir, fromRef, toRef string) ([]string, error) {
|
||||||
// Format: "hash subject"
|
|
||||||
func getCommits(dir, fromRef, toRef string) ([]string, error) {
|
|
||||||
var args []string
|
var args []string
|
||||||
if fromRef == "" {
|
if fromRef == "" {
|
||||||
// All commits up to toRef
|
// All commits up to toRef
|
||||||
|
|
@ -173,15 +196,13 @@ func getCommits(dir, fromRef, toRef string) ([]string, error) {
|
||||||
args = []string{"log", "--oneline", "--no-merges", fromRef + ".." + toRef}
|
args = []string{"log", "--oneline", "--no-merges", fromRef + ".." + toRef}
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd := exec.Command("git", args...)
|
output, err := ax.RunDir(ctx, dir, "git", args...)
|
||||||
cmd.Dir = dir
|
|
||||||
output, err := cmd.Output()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var commits []string
|
var commits []string
|
||||||
scanner := bufio.NewScanner(bytes.NewReader(output))
|
scanner := bufio.NewScanner(bytes.NewReader([]byte(output)))
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
line := scanner.Text()
|
line := scanner.Text()
|
||||||
if line != "" {
|
if line != "" {
|
||||||
|
|
@ -196,7 +217,7 @@ func getCommits(dir, fromRef, toRef string) ([]string, error) {
|
||||||
// Returns nil if the commit doesn't follow conventional commit format.
|
// Returns nil if the commit doesn't follow conventional commit format.
|
||||||
func parseConventionalCommit(commitLine string) *ConventionalCommit {
|
func parseConventionalCommit(commitLine string) *ConventionalCommit {
|
||||||
// Split hash and subject
|
// Split hash and subject
|
||||||
parts := strings.SplitN(commitLine, " ", 2)
|
parts := core.SplitN(commitLine, " ", 2)
|
||||||
if len(parts) != 2 {
|
if len(parts) != 2 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -211,7 +232,7 @@ func parseConventionalCommit(commitLine string) *ConventionalCommit {
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ConventionalCommit{
|
return &ConventionalCommit{
|
||||||
Type: strings.ToLower(matches[1]),
|
Type: core.Lower(matches[1]),
|
||||||
Scope: matches[2],
|
Scope: matches[2],
|
||||||
Breaking: matches[3] == "!",
|
Breaking: matches[3] == "!",
|
||||||
Description: matches[4],
|
Description: matches[4],
|
||||||
|
|
@ -222,7 +243,7 @@ func parseConventionalCommit(commitLine string) *ConventionalCommit {
|
||||||
// formatChangelog formats parsed commits into markdown.
|
// formatChangelog formats parsed commits into markdown.
|
||||||
func formatChangelog(commits []ConventionalCommit, version string) string {
|
func formatChangelog(commits []ConventionalCommit, version string) string {
|
||||||
if len(commits) == 0 {
|
if len(commits) == 0 {
|
||||||
return fmt.Sprintf("## %s\n\nNo notable changes.", version)
|
return core.Sprintf("## %s\n\nNo notable changes.", version)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Group commits by type
|
// Group commits by type
|
||||||
|
|
@ -236,8 +257,8 @@ func formatChangelog(commits []ConventionalCommit, version string) string {
|
||||||
grouped[commit.Type] = append(grouped[commit.Type], commit)
|
grouped[commit.Type] = append(grouped[commit.Type], commit)
|
||||||
}
|
}
|
||||||
|
|
||||||
var buf strings.Builder
|
buf := core.NewBuilder()
|
||||||
buf.WriteString(fmt.Sprintf("## %s\n\n", version))
|
buf.WriteString(core.Sprintf("## %s\n\n", version))
|
||||||
|
|
||||||
// Breaking changes first
|
// Breaking changes first
|
||||||
if len(breaking) > 0 {
|
if len(breaking) > 0 {
|
||||||
|
|
@ -260,7 +281,7 @@ func formatChangelog(commits []ConventionalCommit, version string) string {
|
||||||
label = cases.Title(language.English).String(commitType)
|
label = cases.Title(language.English).String(commitType)
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.WriteString(fmt.Sprintf("### %s\n\n", label))
|
buf.WriteString(core.Sprintf("### %s\n\n", label))
|
||||||
for _, commit := range commits {
|
for _, commit := range commits {
|
||||||
buf.WriteString(formatCommitLine(commit))
|
buf.WriteString(formatCommitLine(commit))
|
||||||
}
|
}
|
||||||
|
|
@ -270,46 +291,57 @@ func formatChangelog(commits []ConventionalCommit, version string) string {
|
||||||
// Any remaining types not in the order list
|
// Any remaining types not in the order list
|
||||||
var remainingTypes []string
|
var remainingTypes []string
|
||||||
for commitType := range grouped {
|
for commitType := range grouped {
|
||||||
if !slices.Contains(commitTypeOrder, commitType) {
|
if !containsCommitType(commitTypeOrder, commitType) {
|
||||||
remainingTypes = append(remainingTypes, commitType)
|
remainingTypes = append(remainingTypes, commitType)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
slices.Sort(remainingTypes)
|
sort.Strings(remainingTypes)
|
||||||
|
|
||||||
for _, commitType := range remainingTypes {
|
for _, commitType := range remainingTypes {
|
||||||
commits := grouped[commitType]
|
commits := grouped[commitType]
|
||||||
label := cases.Title(language.English).String(commitType)
|
label := cases.Title(language.English).String(commitType)
|
||||||
buf.WriteString(fmt.Sprintf("### %s\n\n", label))
|
buf.WriteString(core.Sprintf("### %s\n\n", label))
|
||||||
for _, commit := range commits {
|
for _, commit := range commits {
|
||||||
buf.WriteString(formatCommitLine(commit))
|
buf.WriteString(formatCommitLine(commit))
|
||||||
}
|
}
|
||||||
buf.WriteString("\n")
|
buf.WriteString("\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
return strings.TrimSuffix(buf.String(), "\n")
|
return core.TrimSuffix(buf.String(), "\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
// formatCommitLine formats a single commit as a changelog line.
|
// formatCommitLine formats a single commit as a changelog line.
|
||||||
func formatCommitLine(commit ConventionalCommit) string {
|
func formatCommitLine(commit ConventionalCommit) string {
|
||||||
var buf strings.Builder
|
buf := core.NewBuilder()
|
||||||
buf.WriteString("- ")
|
buf.WriteString("- ")
|
||||||
|
|
||||||
if commit.Scope != "" {
|
if commit.Scope != "" {
|
||||||
buf.WriteString(fmt.Sprintf("**%s**: ", commit.Scope))
|
buf.WriteString(core.Sprintf("**%s**: ", commit.Scope))
|
||||||
}
|
}
|
||||||
|
|
||||||
buf.WriteString(commit.Description)
|
buf.WriteString(commit.Description)
|
||||||
buf.WriteString(fmt.Sprintf(" (%s)\n", commit.Hash))
|
buf.WriteString(core.Sprintf(" (%s)\n", commit.Hash))
|
||||||
|
|
||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseCommitType extracts the type from a conventional commit subject.
|
// ParseCommitType extracts the type from a conventional commit subject.
|
||||||
// Returns empty string if not a conventional commit.
|
// Returns empty string if not a conventional commit.
|
||||||
|
//
|
||||||
|
// t := release.ParseCommitType("feat(build): add linuxkit support") // → "feat"
|
||||||
func ParseCommitType(subject string) string {
|
func ParseCommitType(subject string) string {
|
||||||
matches := conventionalCommitRegex.FindStringSubmatch(subject)
|
matches := conventionalCommitRegex.FindStringSubmatch(subject)
|
||||||
if matches == nil {
|
if matches == nil {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
return strings.ToLower(matches[1])
|
return core.Lower(matches[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsCommitType(types []string, target string) bool {
|
||||||
|
for _, item := range types {
|
||||||
|
if item == target {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,16 @@
|
||||||
package release
|
package release
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"context"
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseConventionalCommit_Good(t *testing.T) {
|
func TestChangelog_ParseConventionalCommit_Good(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
|
|
@ -108,7 +108,7 @@ func TestParseConventionalCommit_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseConventionalCommit_Bad(t *testing.T) {
|
func TestChangelog_ParseConventionalCommit_Bad(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
|
|
@ -143,7 +143,7 @@ func TestParseConventionalCommit_Bad(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFormatChangelog_Good(t *testing.T) {
|
func TestChangelog_FormatChangelog_Good(t *testing.T) {
|
||||||
t.Run("formats commits by type", func(t *testing.T) {
|
t.Run("formats commits by type", func(t *testing.T) {
|
||||||
commits := []ConventionalCommit{
|
commits := []ConventionalCommit{
|
||||||
{Type: "feat", Description: "add feature A", Hash: "abc1234"},
|
{Type: "feat", Description: "add feature A", Hash: "abc1234"},
|
||||||
|
|
@ -194,7 +194,7 @@ func TestFormatChangelog_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseCommitType_Good(t *testing.T) {
|
func TestChangelog_ParseCommitType_Good(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
expected string
|
expected string
|
||||||
|
|
@ -213,7 +213,7 @@ func TestParseCommitType_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseCommitType_Bad(t *testing.T) {
|
func TestChangelog_ParseCommitType_Bad(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
}{
|
}{
|
||||||
|
|
@ -230,7 +230,7 @@ func TestParseCommitType_Bad(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGenerateWithConfig_ConfigValues(t *testing.T) {
|
func TestChangelog_GenerateWithConfigConfigValues_Good(t *testing.T) {
|
||||||
t.Run("config filters are parsed correctly", func(t *testing.T) {
|
t.Run("config filters are parsed correctly", func(t *testing.T) {
|
||||||
cfg := &ChangelogConfig{
|
cfg := &ChangelogConfig{
|
||||||
Include: []string{"feat", "fix"},
|
Include: []string{"feat", "fix"},
|
||||||
|
|
@ -261,18 +261,11 @@ func setupChangelogGitRepo(t *testing.T) string {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
// Initialize git repo
|
// Initialize git repo
|
||||||
cmd := exec.Command("git", "init")
|
runGit(t, dir, "init")
|
||||||
cmd.Dir = dir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
// Configure git user for commits
|
// Configure git user for commits
|
||||||
cmd = exec.Command("git", "config", "user.email", "test@example.com")
|
runGit(t, dir, "config", "user.email", "test@example.com")
|
||||||
cmd.Dir = dir
|
runGit(t, dir, "config", "user.name", "Test User")
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
cmd = exec.Command("git", "config", "user.name", "Test User")
|
|
||||||
cmd.Dir = dir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
return dir
|
return dir
|
||||||
}
|
}
|
||||||
|
|
@ -282,30 +275,23 @@ func createChangelogCommit(t *testing.T, dir, message string) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
// Create or modify a file
|
// Create or modify a file
|
||||||
filePath := filepath.Join(dir, "changelog_test.txt")
|
filePath := ax.Join(dir, "changelog_test.txt")
|
||||||
content, _ := os.ReadFile(filePath)
|
content, _ := ax.ReadFile(filePath)
|
||||||
content = append(content, []byte(message+"\n")...)
|
content = append(content, []byte(message+"\n")...)
|
||||||
require.NoError(t, os.WriteFile(filePath, content, 0644))
|
require.NoError(t, ax.WriteFile(filePath, content, 0644))
|
||||||
|
|
||||||
// Stage and commit
|
// Stage and commit
|
||||||
cmd := exec.Command("git", "add", ".")
|
runGit(t, dir, "add", ".")
|
||||||
cmd.Dir = dir
|
runGit(t, dir, "commit", "-m", message)
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
cmd = exec.Command("git", "commit", "-m", message)
|
|
||||||
cmd.Dir = dir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// createChangelogTag creates a tag in the given directory.
|
// createChangelogTag creates a tag in the given directory.
|
||||||
func createChangelogTag(t *testing.T, dir, tag string) {
|
func createChangelogTag(t *testing.T, dir, tag string) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
cmd := exec.Command("git", "tag", tag)
|
runGit(t, dir, "tag", tag)
|
||||||
cmd.Dir = dir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGenerate_Good(t *testing.T) {
|
func TestChangelog_Generate_Good(t *testing.T) {
|
||||||
t.Run("generates changelog from commits", func(t *testing.T) {
|
t.Run("generates changelog from commits", func(t *testing.T) {
|
||||||
dir := setupChangelogGitRepo(t)
|
dir := setupChangelogGitRepo(t)
|
||||||
createChangelogCommit(t, dir, "feat: add new feature")
|
createChangelogCommit(t, dir, "feat: add new feature")
|
||||||
|
|
@ -386,16 +372,28 @@ func TestGenerate_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGenerate_Bad(t *testing.T) {
|
func TestChangelog_Generate_Bad(t *testing.T) {
|
||||||
t.Run("returns error for non-git directory", func(t *testing.T) {
|
t.Run("returns error for non-git directory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
_, err := Generate(dir, "", "HEAD")
|
_, err := Generate(dir, "", "HEAD")
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("returns error when context is cancelled", func(t *testing.T) {
|
||||||
|
dir := setupChangelogGitRepo(t)
|
||||||
|
createChangelogCommit(t, dir, "feat: add new feature")
|
||||||
|
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
cancel()
|
||||||
|
|
||||||
|
_, err := GenerateWithContext(ctx, dir, "", "HEAD")
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.ErrorIs(t, err, context.Canceled)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGenerateWithConfig_Good(t *testing.T) {
|
func TestChangelog_GenerateWithConfig_Good(t *testing.T) {
|
||||||
t.Run("filters commits by include list", func(t *testing.T) {
|
t.Run("filters commits by include list", func(t *testing.T) {
|
||||||
dir := setupChangelogGitRepo(t)
|
dir := setupChangelogGitRepo(t)
|
||||||
createChangelogCommit(t, dir, "feat: new feature")
|
createChangelogCommit(t, dir, "feat: new feature")
|
||||||
|
|
@ -452,14 +450,14 @@ func TestGenerateWithConfig_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetCommits_Good(t *testing.T) {
|
func TestChangelog_GetCommits_Good(t *testing.T) {
|
||||||
t.Run("returns all commits when fromRef is empty", func(t *testing.T) {
|
t.Run("returns all commits when fromRef is empty", func(t *testing.T) {
|
||||||
dir := setupChangelogGitRepo(t)
|
dir := setupChangelogGitRepo(t)
|
||||||
createChangelogCommit(t, dir, "feat: first")
|
createChangelogCommit(t, dir, "feat: first")
|
||||||
createChangelogCommit(t, dir, "feat: second")
|
createChangelogCommit(t, dir, "feat: second")
|
||||||
createChangelogCommit(t, dir, "feat: third")
|
createChangelogCommit(t, dir, "feat: third")
|
||||||
|
|
||||||
commits, err := getCommits(dir, "", "HEAD")
|
commits, err := getCommitsWithContext(context.Background(), dir, "", "HEAD")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Len(t, commits, 3)
|
assert.Len(t, commits, 3)
|
||||||
|
|
@ -472,7 +470,7 @@ func TestGetCommits_Good(t *testing.T) {
|
||||||
createChangelogCommit(t, dir, "feat: second")
|
createChangelogCommit(t, dir, "feat: second")
|
||||||
createChangelogCommit(t, dir, "feat: third")
|
createChangelogCommit(t, dir, "feat: third")
|
||||||
|
|
||||||
commits, err := getCommits(dir, "v1.0.0", "HEAD")
|
commits, err := getCommitsWithContext(context.Background(), dir, "v1.0.0", "HEAD")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Len(t, commits, 2)
|
assert.Len(t, commits, 2)
|
||||||
|
|
@ -484,7 +482,7 @@ func TestGetCommits_Good(t *testing.T) {
|
||||||
// Merge commits are excluded by --no-merges flag
|
// Merge commits are excluded by --no-merges flag
|
||||||
// We can verify by checking the count matches expected
|
// We can verify by checking the count matches expected
|
||||||
|
|
||||||
commits, err := getCommits(dir, "", "HEAD")
|
commits, err := getCommitsWithContext(context.Background(), dir, "", "HEAD")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Len(t, commits, 1)
|
assert.Len(t, commits, 1)
|
||||||
|
|
@ -496,31 +494,31 @@ func TestGetCommits_Good(t *testing.T) {
|
||||||
createChangelogCommit(t, dir, "feat: only commit")
|
createChangelogCommit(t, dir, "feat: only commit")
|
||||||
createChangelogTag(t, dir, "v1.0.0")
|
createChangelogTag(t, dir, "v1.0.0")
|
||||||
|
|
||||||
commits, err := getCommits(dir, "v1.0.0", "HEAD")
|
commits, err := getCommitsWithContext(context.Background(), dir, "v1.0.0", "HEAD")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Empty(t, commits)
|
assert.Empty(t, commits)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetCommits_Bad(t *testing.T) {
|
func TestChangelog_GetCommits_Bad(t *testing.T) {
|
||||||
t.Run("returns error for invalid ref", func(t *testing.T) {
|
t.Run("returns error for invalid ref", func(t *testing.T) {
|
||||||
dir := setupChangelogGitRepo(t)
|
dir := setupChangelogGitRepo(t)
|
||||||
createChangelogCommit(t, dir, "feat: commit")
|
createChangelogCommit(t, dir, "feat: commit")
|
||||||
|
|
||||||
_, err := getCommits(dir, "nonexistent-tag", "HEAD")
|
_, err := getCommitsWithContext(context.Background(), dir, "nonexistent-tag", "HEAD")
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("returns error for non-git directory", func(t *testing.T) {
|
t.Run("returns error for non-git directory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
_, err := getCommits(dir, "", "HEAD")
|
_, err := getCommitsWithContext(context.Background(), dir, "", "HEAD")
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetPreviousTag_Good(t *testing.T) {
|
func TestChangelog_GetPreviousTag_Good(t *testing.T) {
|
||||||
t.Run("returns previous tag", func(t *testing.T) {
|
t.Run("returns previous tag", func(t *testing.T) {
|
||||||
dir := setupChangelogGitRepo(t)
|
dir := setupChangelogGitRepo(t)
|
||||||
createChangelogCommit(t, dir, "feat: first")
|
createChangelogCommit(t, dir, "feat: first")
|
||||||
|
|
@ -528,7 +526,7 @@ func TestGetPreviousTag_Good(t *testing.T) {
|
||||||
createChangelogCommit(t, dir, "feat: second")
|
createChangelogCommit(t, dir, "feat: second")
|
||||||
createChangelogTag(t, dir, "v1.1.0")
|
createChangelogTag(t, dir, "v1.1.0")
|
||||||
|
|
||||||
tag, err := getPreviousTag(dir, "v1.1.0")
|
tag, err := getPreviousTagWithContext(context.Background(), dir, "v1.1.0")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "v1.0.0", tag)
|
assert.Equal(t, "v1.0.0", tag)
|
||||||
})
|
})
|
||||||
|
|
@ -539,20 +537,20 @@ func TestGetPreviousTag_Good(t *testing.T) {
|
||||||
createChangelogTag(t, dir, "v1.0.0")
|
createChangelogTag(t, dir, "v1.0.0")
|
||||||
createChangelogCommit(t, dir, "feat: second")
|
createChangelogCommit(t, dir, "feat: second")
|
||||||
|
|
||||||
tag, err := getPreviousTag(dir, "HEAD")
|
tag, err := getPreviousTagWithContext(context.Background(), dir, "HEAD")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "v1.0.0", tag)
|
assert.Equal(t, "v1.0.0", tag)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetPreviousTag_Bad(t *testing.T) {
|
func TestChangelog_GetPreviousTag_Bad(t *testing.T) {
|
||||||
t.Run("returns error when no previous tag exists", func(t *testing.T) {
|
t.Run("returns error when no previous tag exists", func(t *testing.T) {
|
||||||
dir := setupChangelogGitRepo(t)
|
dir := setupChangelogGitRepo(t)
|
||||||
createChangelogCommit(t, dir, "feat: first")
|
createChangelogCommit(t, dir, "feat: first")
|
||||||
createChangelogTag(t, dir, "v1.0.0")
|
createChangelogTag(t, dir, "v1.0.0")
|
||||||
|
|
||||||
// v1.0.0^ has no tag before it
|
// v1.0.0^ has no tag before it
|
||||||
_, err := getPreviousTag(dir, "v1.0.0")
|
_, err := getPreviousTagWithContext(context.Background(), dir, "v1.0.0")
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
@ -560,12 +558,12 @@ func TestGetPreviousTag_Bad(t *testing.T) {
|
||||||
dir := setupChangelogGitRepo(t)
|
dir := setupChangelogGitRepo(t)
|
||||||
createChangelogCommit(t, dir, "feat: commit")
|
createChangelogCommit(t, dir, "feat: commit")
|
||||||
|
|
||||||
_, err := getPreviousTag(dir, "nonexistent")
|
_, err := getPreviousTagWithContext(context.Background(), dir, "nonexistent")
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFormatCommitLine_Good(t *testing.T) {
|
func TestChangelog_FormatCommitLine_Good(t *testing.T) {
|
||||||
t.Run("formats commit without scope", func(t *testing.T) {
|
t.Run("formats commit without scope", func(t *testing.T) {
|
||||||
commit := ConventionalCommit{
|
commit := ConventionalCommit{
|
||||||
Type: "feat",
|
Type: "feat",
|
||||||
|
|
@ -590,7 +588,7 @@ func TestFormatCommitLine_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFormatChangelog_Ugly(t *testing.T) {
|
func TestChangelog_FormatChangelog_Ugly(t *testing.T) {
|
||||||
t.Run("handles custom commit type not in order", func(t *testing.T) {
|
t.Run("handles custom commit type not in order", func(t *testing.T) {
|
||||||
commits := []ConventionalCommit{
|
commits := []ConventionalCommit{
|
||||||
{Type: "custom", Description: "custom type", Hash: "abc1234"},
|
{Type: "custom", Description: "custom type", Hash: "abc1234"},
|
||||||
|
|
@ -616,7 +614,7 @@ func TestFormatChangelog_Ugly(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGenerateWithConfig_Bad(t *testing.T) {
|
func TestChangelog_GenerateWithConfig_Bad(t *testing.T) {
|
||||||
t.Run("returns error for non-git directory", func(t *testing.T) {
|
t.Run("returns error for non-git directory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
cfg := &ChangelogConfig{
|
cfg := &ChangelogConfig{
|
||||||
|
|
@ -628,7 +626,7 @@ func TestGenerateWithConfig_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGenerateWithConfig_EdgeCases(t *testing.T) {
|
func TestChangelog_GenerateWithConfigEdgeCases_Ugly(t *testing.T) {
|
||||||
t.Run("uses HEAD when toRef is empty", func(t *testing.T) {
|
t.Run("uses HEAD when toRef is empty", func(t *testing.T) {
|
||||||
dir := setupChangelogGitRepo(t)
|
dir := setupChangelogGitRepo(t)
|
||||||
createChangelogCommit(t, dir, "feat: new feature")
|
createChangelogCommit(t, dir, "feat: new feature")
|
||||||
|
|
|
||||||
|
|
@ -3,21 +3,26 @@ package release
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"iter"
|
"iter"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ConfigFileName is the name of the release configuration file.
|
// ConfigFileName is the name of the release configuration file.
|
||||||
|
//
|
||||||
|
// configPath := ax.Join(projectDir, release.ConfigDir, release.ConfigFileName)
|
||||||
const ConfigFileName = "release.yaml"
|
const ConfigFileName = "release.yaml"
|
||||||
|
|
||||||
// ConfigDir is the directory where release configuration is stored.
|
// ConfigDir is the directory where release configuration is stored.
|
||||||
|
//
|
||||||
|
// configPath := ax.Join(projectDir, release.ConfigDir, release.ConfigFileName)
|
||||||
const ConfigDir = ".core"
|
const ConfigDir = ".core"
|
||||||
|
|
||||||
// Config holds the complete release configuration loaded from .core/release.yaml.
|
// Config holds the complete release configuration loaded from .core/release.yaml.
|
||||||
|
//
|
||||||
|
// cfg, err := release.LoadConfig(".")
|
||||||
type Config struct {
|
type Config struct {
|
||||||
// Version is the config file format version.
|
// Version is the config file format version.
|
||||||
Version int `yaml:"version"`
|
Version int `yaml:"version"`
|
||||||
|
|
@ -38,6 +43,8 @@ type Config struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ProjectConfig holds project metadata for releases.
|
// ProjectConfig holds project metadata for releases.
|
||||||
|
//
|
||||||
|
// cfg.Project = release.ProjectConfig{Name: "core-build", Repository: "host-uk/core-build"}
|
||||||
type ProjectConfig struct {
|
type ProjectConfig struct {
|
||||||
// Name is the project name.
|
// Name is the project name.
|
||||||
Name string `yaml:"name"`
|
Name string `yaml:"name"`
|
||||||
|
|
@ -46,12 +53,19 @@ type ProjectConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// BuildConfig holds build settings for releases.
|
// BuildConfig holds build settings for releases.
|
||||||
|
//
|
||||||
|
// cfg.Build.Targets = []release.TargetConfig{{OS: "linux", Arch: "amd64"}}
|
||||||
type BuildConfig struct {
|
type BuildConfig struct {
|
||||||
// Targets defines the build targets.
|
// Targets defines the build targets.
|
||||||
Targets []TargetConfig `yaml:"targets"`
|
Targets []TargetConfig `yaml:"targets"`
|
||||||
|
// ArchiveFormat selects the archive compression format for build outputs.
|
||||||
|
// Supported values are "gz", "xz", and "zip"; empty uses gzip.
|
||||||
|
ArchiveFormat string `yaml:"archive_format,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TargetConfig defines a build target.
|
// TargetConfig defines a build target.
|
||||||
|
//
|
||||||
|
// t := release.TargetConfig{OS: "linux", Arch: "arm64"}
|
||||||
type TargetConfig struct {
|
type TargetConfig struct {
|
||||||
// OS is the target operating system (e.g., "linux", "darwin", "windows").
|
// OS is the target operating system (e.g., "linux", "darwin", "windows").
|
||||||
OS string `yaml:"os"`
|
OS string `yaml:"os"`
|
||||||
|
|
@ -60,6 +74,8 @@ type TargetConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// PublisherConfig holds configuration for a publisher.
|
// PublisherConfig holds configuration for a publisher.
|
||||||
|
//
|
||||||
|
// cfg.Publishers = []release.PublisherConfig{{Type: "github", Draft: false}}
|
||||||
type PublisherConfig struct {
|
type PublisherConfig struct {
|
||||||
// Type is the publisher type (e.g., "github", "linuxkit", "docker").
|
// Type is the publisher type (e.g., "github", "linuxkit", "docker").
|
||||||
Type string `yaml:"type"`
|
Type string `yaml:"type"`
|
||||||
|
|
@ -118,6 +134,8 @@ type PublisherConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// OfficialConfig holds configuration for generating files for official repo PRs.
|
// OfficialConfig holds configuration for generating files for official repo PRs.
|
||||||
|
//
|
||||||
|
// pub.Official = &release.OfficialConfig{Enabled: true, Output: "dist/homebrew"}
|
||||||
type OfficialConfig struct {
|
type OfficialConfig struct {
|
||||||
// Enabled determines whether to generate files for official repos.
|
// Enabled determines whether to generate files for official repos.
|
||||||
Enabled bool `yaml:"enabled"`
|
Enabled bool `yaml:"enabled"`
|
||||||
|
|
@ -126,6 +144,8 @@ type OfficialConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// SDKConfig holds SDK generation configuration.
|
// SDKConfig holds SDK generation configuration.
|
||||||
|
//
|
||||||
|
// cfg.SDK = &release.SDKConfig{Spec: "docs/openapi.yaml", Languages: []string{"typescript", "go"}}
|
||||||
type SDKConfig struct {
|
type SDKConfig struct {
|
||||||
// Spec is the path to the OpenAPI spec file.
|
// Spec is the path to the OpenAPI spec file.
|
||||||
Spec string `yaml:"spec,omitempty"`
|
Spec string `yaml:"spec,omitempty"`
|
||||||
|
|
@ -142,24 +162,32 @@ type SDKConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// SDKPackageConfig holds package naming configuration.
|
// SDKPackageConfig holds package naming configuration.
|
||||||
|
//
|
||||||
|
// cfg.SDK.Package = release.SDKPackageConfig{Name: "@host-uk/api-client", Version: "1.0.0"}
|
||||||
type SDKPackageConfig struct {
|
type SDKPackageConfig struct {
|
||||||
Name string `yaml:"name,omitempty"`
|
Name string `yaml:"name,omitempty"`
|
||||||
Version string `yaml:"version,omitempty"`
|
Version string `yaml:"version,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SDKDiffConfig holds diff configuration.
|
// SDKDiffConfig holds diff configuration.
|
||||||
|
//
|
||||||
|
// cfg.SDK.Diff = release.SDKDiffConfig{Enabled: true, FailOnBreaking: true}
|
||||||
type SDKDiffConfig struct {
|
type SDKDiffConfig struct {
|
||||||
Enabled bool `yaml:"enabled,omitempty"`
|
Enabled bool `yaml:"enabled,omitempty"`
|
||||||
FailOnBreaking bool `yaml:"fail_on_breaking,omitempty"`
|
FailOnBreaking bool `yaml:"fail_on_breaking,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SDKPublishConfig holds monorepo publish configuration.
|
// SDKPublishConfig holds monorepo publish configuration.
|
||||||
|
//
|
||||||
|
// cfg.SDK.Publish = release.SDKPublishConfig{Repo: "host-uk/ts", Path: "packages/api-client"}
|
||||||
type SDKPublishConfig struct {
|
type SDKPublishConfig struct {
|
||||||
Repo string `yaml:"repo,omitempty"`
|
Repo string `yaml:"repo,omitempty"`
|
||||||
Path string `yaml:"path,omitempty"`
|
Path string `yaml:"path,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChangelogConfig holds changelog generation settings.
|
// ChangelogConfig holds changelog generation settings.
|
||||||
|
//
|
||||||
|
// cfg.Changelog = release.ChangelogConfig{Include: []string{"feat", "fix"}, Exclude: []string{"chore"}}
|
||||||
type ChangelogConfig struct {
|
type ChangelogConfig struct {
|
||||||
// Include specifies commit types to include in the changelog.
|
// Include specifies commit types to include in the changelog.
|
||||||
Include []string `yaml:"include"`
|
Include []string `yaml:"include"`
|
||||||
|
|
@ -168,6 +196,8 @@ type ChangelogConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// PublishersIter returns an iterator for the publishers.
|
// PublishersIter returns an iterator for the publishers.
|
||||||
|
//
|
||||||
|
// for p := range cfg.PublishersIter() { fmt.Println(p.Type) }
|
||||||
func (c *Config) PublishersIter() iter.Seq[PublisherConfig] {
|
func (c *Config) PublishersIter() iter.Seq[PublisherConfig] {
|
||||||
return func(yield func(PublisherConfig) bool) {
|
return func(yield func(PublisherConfig) bool) {
|
||||||
for _, p := range c.Publishers {
|
for _, p := range c.Publishers {
|
||||||
|
|
@ -181,18 +211,20 @@ func (c *Config) PublishersIter() iter.Seq[PublisherConfig] {
|
||||||
// LoadConfig loads release configuration from the .core/release.yaml file in the given directory.
|
// LoadConfig loads release configuration from the .core/release.yaml file in the given directory.
|
||||||
// If the config file does not exist, it returns DefaultConfig().
|
// If the config file does not exist, it returns DefaultConfig().
|
||||||
// Returns an error if the file exists but cannot be parsed.
|
// Returns an error if the file exists but cannot be parsed.
|
||||||
|
//
|
||||||
|
// cfg, err := release.LoadConfig(".")
|
||||||
func LoadConfig(dir string) (*Config, error) {
|
func LoadConfig(dir string) (*Config, error) {
|
||||||
configPath := filepath.Join(dir, ConfigDir, ConfigFileName)
|
configPath := ax.Join(dir, ConfigDir, ConfigFileName)
|
||||||
|
|
||||||
// Convert to absolute path for io.Local
|
// Resolve path with AX-aware helpers.
|
||||||
absPath, err := filepath.Abs(configPath)
|
absPath, err := ax.Abs(configPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, coreerr.E("release.LoadConfig", "failed to resolve path", err)
|
return nil, coreerr.E("release.LoadConfig", "failed to resolve path", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
content, err := io.Local.Read(absPath)
|
content, err := ax.ReadFile(absPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if os.IsNotExist(err) {
|
if !ax.IsFile(absPath) {
|
||||||
cfg := DefaultConfig()
|
cfg := DefaultConfig()
|
||||||
cfg.projectDir = dir
|
cfg.projectDir = dir
|
||||||
return cfg, nil
|
return cfg, nil
|
||||||
|
|
@ -207,12 +239,15 @@ func LoadConfig(dir string) (*Config, error) {
|
||||||
|
|
||||||
// Apply defaults for any missing fields
|
// Apply defaults for any missing fields
|
||||||
applyDefaults(&cfg)
|
applyDefaults(&cfg)
|
||||||
|
cfg.ExpandEnv()
|
||||||
cfg.projectDir = dir
|
cfg.projectDir = dir
|
||||||
|
|
||||||
return &cfg, nil
|
return &cfg, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefaultConfig returns sensible defaults for release configuration.
|
// DefaultConfig returns sensible defaults for release configuration.
|
||||||
|
//
|
||||||
|
// cfg := release.DefaultConfig()
|
||||||
func DefaultConfig() *Config {
|
func DefaultConfig() *Config {
|
||||||
return &Config{
|
return &Config{
|
||||||
Version: 1,
|
Version: 1,
|
||||||
|
|
@ -242,6 +277,23 @@ func DefaultConfig() *Config {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ScaffoldConfig returns the config shape written by `core ci init`.
|
||||||
|
//
|
||||||
|
// cfg := release.ScaffoldConfig()
|
||||||
|
func ScaffoldConfig() *Config {
|
||||||
|
cfg := DefaultConfig()
|
||||||
|
cfg.SDK = &SDKConfig{
|
||||||
|
Spec: "api/openapi.yaml",
|
||||||
|
Languages: []string{"typescript", "python", "go", "php"},
|
||||||
|
Output: "sdk",
|
||||||
|
Diff: SDKDiffConfig{
|
||||||
|
Enabled: true,
|
||||||
|
FailOnBreaking: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return cfg
|
||||||
|
}
|
||||||
|
|
||||||
// applyDefaults fills in default values for any empty fields in the config.
|
// applyDefaults fills in default values for any empty fields in the config.
|
||||||
func applyDefaults(cfg *Config) {
|
func applyDefaults(cfg *Config) {
|
||||||
defaults := DefaultConfig()
|
defaults := DefaultConfig()
|
||||||
|
|
@ -264,54 +316,129 @@ func applyDefaults(cfg *Config) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ExpandEnv expands environment variables across the release config.
|
||||||
|
//
|
||||||
|
// cfg.ExpandEnv() // expands $REPO, $PACKAGE_NAME, $SDK_SPEC, etc.
|
||||||
|
func (c *Config) ExpandEnv() {
|
||||||
|
if c == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Project.Name = expandEnv(c.Project.Name)
|
||||||
|
c.Project.Repository = expandEnv(c.Project.Repository)
|
||||||
|
|
||||||
|
c.Build.ArchiveFormat = expandEnv(c.Build.ArchiveFormat)
|
||||||
|
c.Build.Targets = expandTargetConfigs(c.Build.Targets)
|
||||||
|
|
||||||
|
c.Publishers = expandPublisherConfigs(c.Publishers)
|
||||||
|
|
||||||
|
c.Changelog.Include = expandEnvSlice(c.Changelog.Include)
|
||||||
|
c.Changelog.Exclude = expandEnvSlice(c.Changelog.Exclude)
|
||||||
|
|
||||||
|
if c.SDK != nil {
|
||||||
|
c.SDK.Spec = expandEnv(c.SDK.Spec)
|
||||||
|
c.SDK.Output = expandEnv(c.SDK.Output)
|
||||||
|
c.SDK.Package.Name = expandEnv(c.SDK.Package.Name)
|
||||||
|
c.SDK.Package.Version = expandEnv(c.SDK.Package.Version)
|
||||||
|
c.SDK.Publish.Repo = expandEnv(c.SDK.Publish.Repo)
|
||||||
|
c.SDK.Publish.Path = expandEnv(c.SDK.Publish.Path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// SetProjectDir sets the project directory on the config.
|
// SetProjectDir sets the project directory on the config.
|
||||||
|
//
|
||||||
|
// cfg.SetProjectDir("/home/user/my-project")
|
||||||
func (c *Config) SetProjectDir(dir string) {
|
func (c *Config) SetProjectDir(dir string) {
|
||||||
c.projectDir = dir
|
c.projectDir = dir
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetVersion sets the version override on the config.
|
// SetVersion sets the version override on the config.
|
||||||
|
//
|
||||||
|
// cfg.SetVersion("v1.2.3")
|
||||||
func (c *Config) SetVersion(version string) {
|
func (c *Config) SetVersion(version string) {
|
||||||
c.version = version
|
c.version = version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func expandPublisherConfigs(publishers []PublisherConfig) []PublisherConfig {
|
||||||
|
if len(publishers) == 0 {
|
||||||
|
return publishers
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]PublisherConfig, len(publishers))
|
||||||
|
copy(result, publishers)
|
||||||
|
|
||||||
|
for i := range result {
|
||||||
|
result[i].Type = expandEnv(result[i].Type)
|
||||||
|
result[i].Config = expandEnv(result[i].Config)
|
||||||
|
result[i].Formats = expandEnvSlice(result[i].Formats)
|
||||||
|
result[i].Platforms = expandEnvSlice(result[i].Platforms)
|
||||||
|
result[i].Registry = expandEnv(result[i].Registry)
|
||||||
|
result[i].Image = expandEnv(result[i].Image)
|
||||||
|
result[i].Dockerfile = expandEnv(result[i].Dockerfile)
|
||||||
|
result[i].Tags = expandEnvSlice(result[i].Tags)
|
||||||
|
result[i].BuildArgs = expandEnvMap(result[i].BuildArgs)
|
||||||
|
result[i].Package = expandEnv(result[i].Package)
|
||||||
|
result[i].Access = expandEnv(result[i].Access)
|
||||||
|
result[i].Tap = expandEnv(result[i].Tap)
|
||||||
|
result[i].Formula = expandEnv(result[i].Formula)
|
||||||
|
result[i].Bucket = expandEnv(result[i].Bucket)
|
||||||
|
result[i].Maintainer = expandEnv(result[i].Maintainer)
|
||||||
|
if result[i].Official != nil {
|
||||||
|
result[i].Official.Output = expandEnv(result[i].Official.Output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
// ConfigPath returns the path to the release config file for a given directory.
|
// ConfigPath returns the path to the release config file for a given directory.
|
||||||
|
//
|
||||||
|
// path := release.ConfigPath("/home/user/my-project") // → "/home/user/my-project/.core/release.yaml"
|
||||||
func ConfigPath(dir string) string {
|
func ConfigPath(dir string) string {
|
||||||
return filepath.Join(dir, ConfigDir, ConfigFileName)
|
return ax.Join(dir, ConfigDir, ConfigFileName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ConfigExists checks if a release config file exists in the given directory.
|
// ConfigExists checks if a release config file exists in the given directory.
|
||||||
|
//
|
||||||
|
// if release.ConfigExists(".") { ... }
|
||||||
func ConfigExists(dir string) bool {
|
func ConfigExists(dir string) bool {
|
||||||
configPath := ConfigPath(dir)
|
configPath := ConfigPath(dir)
|
||||||
absPath, err := filepath.Abs(configPath)
|
absPath, err := ax.Abs(configPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return io.Local.IsFile(absPath)
|
return ax.IsFile(absPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetRepository returns the repository from the config.
|
// GetRepository returns the repository from the config.
|
||||||
|
//
|
||||||
|
// repo := cfg.GetRepository() // → "host-uk/core-build"
|
||||||
func (c *Config) GetRepository() string {
|
func (c *Config) GetRepository() string {
|
||||||
return c.Project.Repository
|
return c.Project.Repository
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetProjectName returns the project name from the config.
|
// GetProjectName returns the project name from the config.
|
||||||
|
//
|
||||||
|
// name := cfg.GetProjectName() // → "core-build"
|
||||||
func (c *Config) GetProjectName() string {
|
func (c *Config) GetProjectName() string {
|
||||||
return c.Project.Name
|
return c.Project.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteConfig writes the config to the .core/release.yaml file.
|
// WriteConfig writes the config to the .core/release.yaml file.
|
||||||
|
//
|
||||||
|
// err := release.WriteConfig(cfg, ".")
|
||||||
func WriteConfig(cfg *Config, dir string) error {
|
func WriteConfig(cfg *Config, dir string) error {
|
||||||
configPath := ConfigPath(dir)
|
configPath := ConfigPath(dir)
|
||||||
|
|
||||||
// Convert to absolute path for io.Local
|
// Resolve path with AX-aware helpers.
|
||||||
absPath, err := filepath.Abs(configPath)
|
absPath, err := ax.Abs(configPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("release.WriteConfig", "failed to resolve path", err)
|
return coreerr.E("release.WriteConfig", "failed to resolve path", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure directory exists
|
// Ensure directory exists
|
||||||
configDir := filepath.Dir(absPath)
|
configDir := ax.Dir(absPath)
|
||||||
if err := io.Local.EnsureDir(configDir); err != nil {
|
if err := ax.MkdirAll(configDir, 0o755); err != nil {
|
||||||
return coreerr.E("release.WriteConfig", "failed to create directory", err)
|
return coreerr.E("release.WriteConfig", "failed to create directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -320,9 +447,95 @@ func WriteConfig(cfg *Config, dir string) error {
|
||||||
return coreerr.E("release.WriteConfig", "failed to marshal config", err)
|
return coreerr.E("release.WriteConfig", "failed to marshal config", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := io.Local.Write(absPath, string(data)); err != nil {
|
if err := ax.WriteString(absPath, string(data), 0o644); err != nil {
|
||||||
return coreerr.E("release.WriteConfig", "failed to write config file", err)
|
return coreerr.E("release.WriteConfig", "failed to write config file", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func expandEnvSlice(values []string) []string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]string, len(values))
|
||||||
|
for i, value := range values {
|
||||||
|
result[i] = expandEnv(value)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func expandEnvMap(values map[string]string) map[string]string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make(map[string]string, len(values))
|
||||||
|
for key, value := range values {
|
||||||
|
result[key] = expandEnv(value)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func expandTargetConfigs(values []TargetConfig) []TargetConfig {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]TargetConfig, len(values))
|
||||||
|
for i, value := range values {
|
||||||
|
result[i] = TargetConfig{
|
||||||
|
OS: expandEnv(value.OS),
|
||||||
|
Arch: expandEnv(value.Arch),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// expandEnv expands $VAR or ${VAR} using the current process environment.
|
||||||
|
func expandEnv(s string) string {
|
||||||
|
if !core.Contains(s, "$") {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := core.NewBuilder()
|
||||||
|
for i := 0; i < len(s); {
|
||||||
|
if s[i] != '$' {
|
||||||
|
buf.WriteByte(s[i])
|
||||||
|
i++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if i+1 < len(s) && s[i+1] == '{' {
|
||||||
|
j := i + 2
|
||||||
|
for j < len(s) && s[j] != '}' {
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
if j < len(s) {
|
||||||
|
buf.WriteString(core.Env(s[i+2 : j]))
|
||||||
|
i = j + 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
j := i + 1
|
||||||
|
for j < len(s) {
|
||||||
|
c := s[j]
|
||||||
|
if c != '_' && (c < '0' || c > '9') && (c < 'A' || c > 'Z') && (c < 'a' || c > 'z') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
if j > i+1 {
|
||||||
|
buf.WriteString(core.Env(s[i+1 : j]))
|
||||||
|
i = j
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.WriteByte(s[i])
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,9 @@
|
||||||
package release
|
package release
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
@ -15,19 +14,19 @@ func setupConfigTestDir(t *testing.T, configContent string) string {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
if configContent != "" {
|
if configContent != "" {
|
||||||
coreDir := filepath.Join(dir, ConfigDir)
|
coreDir := ax.Join(dir, ConfigDir)
|
||||||
err := os.MkdirAll(coreDir, 0755)
|
err := ax.MkdirAll(coreDir, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
configPath := filepath.Join(coreDir, ConfigFileName)
|
configPath := ax.Join(coreDir, ConfigFileName)
|
||||||
err = os.WriteFile(configPath, []byte(configContent), 0644)
|
err = ax.WriteFile(configPath, []byte(configContent), 0644)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return dir
|
return dir
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadConfig_Good(t *testing.T) {
|
func TestConfig_LoadConfig_Good(t *testing.T) {
|
||||||
t.Run("loads valid config", func(t *testing.T) {
|
t.Run("loads valid config", func(t *testing.T) {
|
||||||
content := `
|
content := `
|
||||||
version: 1
|
version: 1
|
||||||
|
|
@ -40,6 +39,7 @@ build:
|
||||||
arch: amd64
|
arch: amd64
|
||||||
- os: darwin
|
- os: darwin
|
||||||
arch: arm64
|
arch: arm64
|
||||||
|
archive_format: xz
|
||||||
publishers:
|
publishers:
|
||||||
- type: github
|
- type: github
|
||||||
prerelease: true
|
prerelease: true
|
||||||
|
|
@ -61,6 +61,7 @@ changelog:
|
||||||
assert.Equal(t, "myapp", cfg.Project.Name)
|
assert.Equal(t, "myapp", cfg.Project.Name)
|
||||||
assert.Equal(t, "owner/repo", cfg.Project.Repository)
|
assert.Equal(t, "owner/repo", cfg.Project.Repository)
|
||||||
assert.Len(t, cfg.Build.Targets, 2)
|
assert.Len(t, cfg.Build.Targets, 2)
|
||||||
|
assert.Equal(t, "xz", cfg.Build.ArchiveFormat)
|
||||||
assert.Equal(t, "linux", cfg.Build.Targets[0].OS)
|
assert.Equal(t, "linux", cfg.Build.Targets[0].OS)
|
||||||
assert.Equal(t, "amd64", cfg.Build.Targets[0].Arch)
|
assert.Equal(t, "amd64", cfg.Build.Targets[0].Arch)
|
||||||
assert.Equal(t, "darwin", cfg.Build.Targets[1].OS)
|
assert.Equal(t, "darwin", cfg.Build.Targets[1].OS)
|
||||||
|
|
@ -119,7 +120,51 @@ project:
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadConfig_Bad(t *testing.T) {
|
func TestConfig_LoadConfig_ExpandEnv_Good(t *testing.T) {
|
||||||
|
t.Setenv("RELEASE_REPO", "owner/release-app")
|
||||||
|
t.Setenv("RELEASE_ARCHIVE", "xz")
|
||||||
|
t.Setenv("RELEASE_TARGET_OS", "darwin")
|
||||||
|
t.Setenv("RELEASE_TARGET_ARCH", "arm64")
|
||||||
|
t.Setenv("HOMEBREW_TAP", "owner/homebrew-tap")
|
||||||
|
t.Setenv("SDK_SPEC", "docs/openapi.yaml")
|
||||||
|
t.Setenv("SDK_OUTPUT", "generated/sdk")
|
||||||
|
|
||||||
|
content := `
|
||||||
|
version: 1
|
||||||
|
project:
|
||||||
|
name: release-app
|
||||||
|
repository: $RELEASE_REPO
|
||||||
|
build:
|
||||||
|
archive_format: $RELEASE_ARCHIVE
|
||||||
|
targets:
|
||||||
|
- os: $RELEASE_TARGET_OS
|
||||||
|
arch: $RELEASE_TARGET_ARCH
|
||||||
|
publishers:
|
||||||
|
- type: homebrew
|
||||||
|
tap: $HOMEBREW_TAP
|
||||||
|
sdk:
|
||||||
|
spec: $SDK_SPEC
|
||||||
|
output: $SDK_OUTPUT
|
||||||
|
`
|
||||||
|
dir := setupConfigTestDir(t, content)
|
||||||
|
|
||||||
|
cfg, err := LoadConfig(dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, cfg)
|
||||||
|
|
||||||
|
assert.Equal(t, "owner/release-app", cfg.Project.Repository)
|
||||||
|
assert.Equal(t, "xz", cfg.Build.ArchiveFormat)
|
||||||
|
require.Len(t, cfg.Build.Targets, 1)
|
||||||
|
assert.Equal(t, "darwin", cfg.Build.Targets[0].OS)
|
||||||
|
assert.Equal(t, "arm64", cfg.Build.Targets[0].Arch)
|
||||||
|
require.Len(t, cfg.Publishers, 1)
|
||||||
|
assert.Equal(t, "owner/homebrew-tap", cfg.Publishers[0].Tap)
|
||||||
|
require.NotNil(t, cfg.SDK)
|
||||||
|
assert.Equal(t, "docs/openapi.yaml", cfg.SDK.Spec)
|
||||||
|
assert.Equal(t, "generated/sdk", cfg.SDK.Output)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfig_LoadConfig_Bad(t *testing.T) {
|
||||||
t.Run("returns error for invalid YAML", func(t *testing.T) {
|
t.Run("returns error for invalid YAML", func(t *testing.T) {
|
||||||
content := `
|
content := `
|
||||||
version: 1
|
version: 1
|
||||||
|
|
@ -134,25 +179,26 @@ project:
|
||||||
assert.Contains(t, err.Error(), "failed to parse config file")
|
assert.Contains(t, err.Error(), "failed to parse config file")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("returns error for unreadable file", func(t *testing.T) {
|
t.Run("returns default config when config path is a directory", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
coreDir := filepath.Join(dir, ConfigDir)
|
coreDir := ax.Join(dir, ConfigDir)
|
||||||
err := os.MkdirAll(coreDir, 0755)
|
err := ax.MkdirAll(coreDir, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Create config as a directory instead of file
|
// Create config as a directory instead of file
|
||||||
configPath := filepath.Join(coreDir, ConfigFileName)
|
configPath := ax.Join(coreDir, ConfigFileName)
|
||||||
err = os.Mkdir(configPath, 0755)
|
err = ax.Mkdir(configPath, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
cfg, err := LoadConfig(dir)
|
cfg, err := LoadConfig(dir)
|
||||||
assert.Error(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Nil(t, cfg)
|
require.NotNil(t, cfg)
|
||||||
assert.Contains(t, err.Error(), "failed to read config file")
|
assert.Equal(t, 1, cfg.Version)
|
||||||
|
assert.Equal(t, dir, cfg.projectDir)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDefaultConfig_Good(t *testing.T) {
|
func TestConfig_DefaultConfig_Good(t *testing.T) {
|
||||||
t.Run("returns sensible defaults", func(t *testing.T) {
|
t.Run("returns sensible defaults", func(t *testing.T) {
|
||||||
cfg := DefaultConfig()
|
cfg := DefaultConfig()
|
||||||
|
|
||||||
|
|
@ -194,14 +240,27 @@ func TestDefaultConfig_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestConfigPath_Good(t *testing.T) {
|
func TestConfig_ScaffoldConfig_Good(t *testing.T) {
|
||||||
|
t.Run("returns documented init scaffold", func(t *testing.T) {
|
||||||
|
cfg := ScaffoldConfig()
|
||||||
|
|
||||||
|
require.NotNil(t, cfg.SDK)
|
||||||
|
assert.Equal(t, "api/openapi.yaml", cfg.SDK.Spec)
|
||||||
|
assert.Equal(t, []string{"typescript", "python", "go", "php"}, cfg.SDK.Languages)
|
||||||
|
assert.Equal(t, "sdk", cfg.SDK.Output)
|
||||||
|
assert.True(t, cfg.SDK.Diff.Enabled)
|
||||||
|
assert.False(t, cfg.SDK.Diff.FailOnBreaking)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfig_ConfigPath_Good(t *testing.T) {
|
||||||
t.Run("returns correct path", func(t *testing.T) {
|
t.Run("returns correct path", func(t *testing.T) {
|
||||||
path := ConfigPath("/project/root")
|
path := ConfigPath("/project/root")
|
||||||
assert.Equal(t, "/project/root/.core/release.yaml", path)
|
assert.Equal(t, "/project/root/.core/release.yaml", path)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestConfigExists_Good(t *testing.T) {
|
func TestConfig_ConfigExists_Good(t *testing.T) {
|
||||||
t.Run("returns true when config exists", func(t *testing.T) {
|
t.Run("returns true when config exists", func(t *testing.T) {
|
||||||
dir := setupConfigTestDir(t, "version: 1")
|
dir := setupConfigTestDir(t, "version: 1")
|
||||||
assert.True(t, ConfigExists(dir))
|
assert.True(t, ConfigExists(dir))
|
||||||
|
|
@ -218,7 +277,7 @@ func TestConfigExists_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWriteConfig_Good(t *testing.T) {
|
func TestConfig_WriteConfig_Good(t *testing.T) {
|
||||||
t.Run("writes config to file", func(t *testing.T) {
|
t.Run("writes config to file", func(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
|
@ -247,8 +306,8 @@ func TestWriteConfig_Good(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Check directory was created
|
// Check directory was created
|
||||||
coreDir := filepath.Join(dir, ConfigDir)
|
coreDir := ax.Join(dir, ConfigDir)
|
||||||
info, err := os.Stat(coreDir)
|
info, err := ax.Stat(coreDir)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.True(t, info.IsDir())
|
assert.True(t, info.IsDir())
|
||||||
})
|
})
|
||||||
|
|
@ -302,22 +361,22 @@ func TestConfig_SetProjectDir_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWriteConfig_Bad(t *testing.T) {
|
func TestConfig_WriteConfig_Bad(t *testing.T) {
|
||||||
t.Run("returns error for unwritable directory", func(t *testing.T) {
|
t.Run("returns error for unwritable directory", func(t *testing.T) {
|
||||||
if os.Geteuid() == 0 {
|
if ax.Geteuid() == 0 {
|
||||||
t.Skip("root can write to any directory")
|
t.Skip("root can write to any directory")
|
||||||
}
|
}
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
// Create .core directory and make it unwritable
|
// Create .core directory and make it unwritable
|
||||||
coreDir := filepath.Join(dir, ConfigDir)
|
coreDir := ax.Join(dir, ConfigDir)
|
||||||
err := os.MkdirAll(coreDir, 0755)
|
err := ax.MkdirAll(coreDir, 0755)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Make directory read-only
|
// Make directory read-only
|
||||||
err = os.Chmod(coreDir, 0555)
|
err = ax.Chmod(coreDir, 0555)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer func() { _ = os.Chmod(coreDir, 0755) }()
|
defer func() { _ = ax.Chmod(coreDir, 0755) }()
|
||||||
|
|
||||||
cfg := DefaultConfig()
|
cfg := DefaultConfig()
|
||||||
err = WriteConfig(cfg, dir)
|
err = WriteConfig(cfg, dir)
|
||||||
|
|
@ -326,7 +385,7 @@ func TestWriteConfig_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("returns error when directory creation fails", func(t *testing.T) {
|
t.Run("returns error when directory creation fails", func(t *testing.T) {
|
||||||
if os.Geteuid() == 0 {
|
if ax.Geteuid() == 0 {
|
||||||
t.Skip("root can create directories anywhere")
|
t.Skip("root can create directories anywhere")
|
||||||
}
|
}
|
||||||
// Use a path that doesn't exist and can't be created
|
// Use a path that doesn't exist and can't be created
|
||||||
|
|
@ -336,7 +395,7 @@ func TestWriteConfig_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestApplyDefaults_Good(t *testing.T) {
|
func TestConfig_ApplyDefaults_Good(t *testing.T) {
|
||||||
t.Run("applies version default when zero", func(t *testing.T) {
|
t.Run("applies version default when zero", func(t *testing.T) {
|
||||||
cfg := &Config{Version: 0}
|
cfg := &Config{Version: 0}
|
||||||
applyDefaults(cfg)
|
applyDefaults(cfg)
|
||||||
|
|
|
||||||
|
|
@ -5,13 +5,10 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"embed"
|
"embed"
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"text/template"
|
"text/template"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
coreio "dappco.re/go/core/io"
|
coreio "dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
|
|
@ -21,6 +18,8 @@ import (
|
||||||
var aurTemplates embed.FS
|
var aurTemplates embed.FS
|
||||||
|
|
||||||
// AURConfig holds AUR-specific configuration.
|
// AURConfig holds AUR-specific configuration.
|
||||||
|
//
|
||||||
|
// cfg := publishers.AURConfig{Package: "core-build", Maintainer: "Jane Doe <jane@example.com>"}
|
||||||
type AURConfig struct {
|
type AURConfig struct {
|
||||||
// Package is the AUR package name.
|
// Package is the AUR package name.
|
||||||
Package string
|
Package string
|
||||||
|
|
@ -31,19 +30,27 @@ type AURConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// AURPublisher publishes releases to AUR.
|
// AURPublisher publishes releases to AUR.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewAURPublisher()
|
||||||
type AURPublisher struct{}
|
type AURPublisher struct{}
|
||||||
|
|
||||||
// NewAURPublisher creates a new AUR publisher.
|
// NewAURPublisher creates a new AUR publisher.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewAURPublisher()
|
||||||
func NewAURPublisher() *AURPublisher {
|
func NewAURPublisher() *AURPublisher {
|
||||||
return &AURPublisher{}
|
return &AURPublisher{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the publisher's identifier.
|
// Name returns the publisher's identifier.
|
||||||
|
//
|
||||||
|
// name := pub.Name() // → "aur"
|
||||||
func (p *AURPublisher) Name() string {
|
func (p *AURPublisher) Name() string {
|
||||||
return "aur"
|
return "aur"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Publish publishes the release to AUR.
|
// Publish publishes the release to AUR.
|
||||||
|
//
|
||||||
|
// err := pub.Publish(ctx, rel, pubCfg, relCfg, false)
|
||||||
func (p *AURPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
func (p *AURPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
||||||
cfg := p.parseConfig(pubCfg, relCfg)
|
cfg := p.parseConfig(pubCfg, relCfg)
|
||||||
|
|
||||||
|
|
@ -56,7 +63,7 @@ func (p *AURPublisher) Publish(ctx context.Context, release *Release, pubCfg Pub
|
||||||
repo = relCfg.GetRepository()
|
repo = relCfg.GetRepository()
|
||||||
}
|
}
|
||||||
if repo == "" {
|
if repo == "" {
|
||||||
detectedRepo, err := detectRepository(release.ProjectDir)
|
detectedRepo, err := detectRepository(ctx, release.ProjectDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("aur.Publish", "could not determine repository", err)
|
return coreerr.E("aur.Publish", "could not determine repository", err)
|
||||||
}
|
}
|
||||||
|
|
@ -68,7 +75,7 @@ func (p *AURPublisher) Publish(ctx context.Context, release *Release, pubCfg Pub
|
||||||
projectName = relCfg.GetProjectName()
|
projectName = relCfg.GetProjectName()
|
||||||
}
|
}
|
||||||
if projectName == "" {
|
if projectName == "" {
|
||||||
parts := strings.Split(repo, "/")
|
parts := core.Split(repo, "/")
|
||||||
projectName = parts[len(parts)-1]
|
projectName = parts[len(parts)-1]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -77,12 +84,12 @@ func (p *AURPublisher) Publish(ctx context.Context, release *Release, pubCfg Pub
|
||||||
packageName = projectName
|
packageName = projectName
|
||||||
}
|
}
|
||||||
|
|
||||||
version := strings.TrimPrefix(release.Version, "v")
|
version := core.TrimPrefix(release.Version, "v")
|
||||||
checksums := buildChecksumMap(release.Artifacts)
|
checksums := buildChecksumMap(release.Artifacts)
|
||||||
|
|
||||||
data := aurTemplateData{
|
data := aurTemplateData{
|
||||||
PackageName: packageName,
|
PackageName: packageName,
|
||||||
Description: fmt.Sprintf("%s CLI", projectName),
|
Description: core.Sprintf("%s CLI", projectName),
|
||||||
Repository: repo,
|
Repository: repo,
|
||||||
Version: version,
|
Version: version,
|
||||||
License: "MIT",
|
License: "MIT",
|
||||||
|
|
@ -134,38 +141,38 @@ func (p *AURPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConfig)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *AURPublisher) dryRunPublish(m coreio.Medium, data aurTemplateData, cfg AURConfig) error {
|
func (p *AURPublisher) dryRunPublish(m coreio.Medium, data aurTemplateData, cfg AURConfig) error {
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== DRY RUN: AUR Publish ===")
|
publisherPrintln("=== DRY RUN: AUR Publish ===")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Printf("Package: %s-bin\n", data.PackageName)
|
publisherPrint("Package: %s-bin", data.PackageName)
|
||||||
fmt.Printf("Version: %s\n", data.Version)
|
publisherPrint("Version: %s", data.Version)
|
||||||
fmt.Printf("Maintainer: %s\n", data.Maintainer)
|
publisherPrint("Maintainer: %s", data.Maintainer)
|
||||||
fmt.Printf("Repository: %s\n", data.Repository)
|
publisherPrint("Repository: %s", data.Repository)
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
pkgbuild, err := p.renderTemplate(m, "templates/aur/PKGBUILD.tmpl", data)
|
pkgbuild, err := p.renderTemplate(m, "templates/aur/PKGBUILD.tmpl", data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("aur.dryRunPublish", "failed to render PKGBUILD template", err)
|
return coreerr.E("aur.dryRunPublish", "failed to render PKGBUILD template", err)
|
||||||
}
|
}
|
||||||
fmt.Println("Generated PKGBUILD:")
|
publisherPrintln("Generated PKGBUILD:")
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println(pkgbuild)
|
publisherPrintln(pkgbuild)
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
srcinfo, err := p.renderTemplate(m, "templates/aur/.SRCINFO.tmpl", data)
|
srcinfo, err := p.renderTemplate(m, "templates/aur/.SRCINFO.tmpl", data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("aur.dryRunPublish", "failed to render .SRCINFO template", err)
|
return coreerr.E("aur.dryRunPublish", "failed to render .SRCINFO template", err)
|
||||||
}
|
}
|
||||||
fmt.Println("Generated .SRCINFO:")
|
publisherPrintln("Generated .SRCINFO:")
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println(srcinfo)
|
publisherPrintln(srcinfo)
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
fmt.Printf("Would push to AUR: ssh://aur@aur.archlinux.org/%s-bin.git\n", data.PackageName)
|
publisherPrint("Would push to AUR: ssh://aur@aur.archlinux.org/%s-bin.git", data.PackageName)
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== END DRY RUN ===")
|
publisherPrintln("=== END DRY RUN ===")
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -185,25 +192,25 @@ func (p *AURPublisher) executePublish(ctx context.Context, projectDir string, da
|
||||||
if cfg.Official != nil && cfg.Official.Enabled {
|
if cfg.Official != nil && cfg.Official.Enabled {
|
||||||
output := cfg.Official.Output
|
output := cfg.Official.Output
|
||||||
if output == "" {
|
if output == "" {
|
||||||
output = filepath.Join(projectDir, "dist", "aur")
|
output = ax.Join(projectDir, "dist", "aur")
|
||||||
} else if !filepath.IsAbs(output) {
|
} else if !ax.IsAbs(output) {
|
||||||
output = filepath.Join(projectDir, output)
|
output = ax.Join(projectDir, output)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := release.FS.EnsureDir(output); err != nil {
|
if err := release.FS.EnsureDir(output); err != nil {
|
||||||
return coreerr.E("aur.Publish", "failed to create output directory", err)
|
return coreerr.E("aur.Publish", "failed to create output directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
pkgbuildPath := filepath.Join(output, "PKGBUILD")
|
pkgbuildPath := ax.Join(output, "PKGBUILD")
|
||||||
if err := release.FS.Write(pkgbuildPath, pkgbuild); err != nil {
|
if err := release.FS.Write(pkgbuildPath, pkgbuild); err != nil {
|
||||||
return coreerr.E("aur.Publish", "failed to write PKGBUILD", err)
|
return coreerr.E("aur.Publish", "failed to write PKGBUILD", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
srcinfoPath := filepath.Join(output, ".SRCINFO")
|
srcinfoPath := ax.Join(output, ".SRCINFO")
|
||||||
if err := release.FS.Write(srcinfoPath, srcinfo); err != nil {
|
if err := release.FS.Write(srcinfoPath, srcinfo); err != nil {
|
||||||
return coreerr.E("aur.Publish", "failed to write .SRCINFO", err)
|
return coreerr.E("aur.Publish", "failed to write .SRCINFO", err)
|
||||||
}
|
}
|
||||||
fmt.Printf("Wrote AUR files: %s\n", output)
|
publisherPrint("Wrote AUR files: %s", output)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Push to AUR if not in official-only mode
|
// Push to AUR if not in official-only mode
|
||||||
|
|
@ -217,62 +224,49 @@ func (p *AURPublisher) executePublish(ctx context.Context, projectDir string, da
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *AURPublisher) pushToAUR(ctx context.Context, data aurTemplateData, pkgbuild, srcinfo string) error {
|
func (p *AURPublisher) pushToAUR(ctx context.Context, data aurTemplateData, pkgbuild, srcinfo string) error {
|
||||||
aurURL := fmt.Sprintf("ssh://aur@aur.archlinux.org/%s-bin.git", data.PackageName)
|
aurURL := core.Sprintf("ssh://aur@aur.archlinux.org/%s-bin.git", data.PackageName)
|
||||||
|
|
||||||
tmpDir, err := os.MkdirTemp("", "aur-package-*")
|
tmpDir, err := ax.TempDir("aur-package-*")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("aur.pushToAUR", "failed to create temp directory", err)
|
return coreerr.E("aur.pushToAUR", "failed to create temp directory", err)
|
||||||
}
|
}
|
||||||
defer func() { _ = coreio.Local.DeleteAll(tmpDir) }()
|
defer func() { _ = ax.RemoveAll(tmpDir) }()
|
||||||
|
|
||||||
// Clone existing AUR repo (or initialise new one)
|
// Clone existing AUR repo (or initialise new one)
|
||||||
fmt.Printf("Cloning AUR package %s-bin...\n", data.PackageName)
|
publisherPrint("Cloning AUR package %s-bin...", data.PackageName)
|
||||||
cmd := exec.CommandContext(ctx, "git", "clone", aurURL, tmpDir)
|
if err := ax.Exec(ctx, "git", "clone", aurURL, tmpDir); err != nil {
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
// If clone fails, init a new repo
|
// If clone fails, init a new repo
|
||||||
cmd = exec.CommandContext(ctx, "git", "init", tmpDir)
|
if err := ax.Exec(ctx, "git", "init", tmpDir); err != nil {
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("aur.pushToAUR", "failed to initialise repo", err)
|
return coreerr.E("aur.pushToAUR", "failed to initialise repo", err)
|
||||||
}
|
}
|
||||||
cmd = exec.CommandContext(ctx, "git", "-C", tmpDir, "remote", "add", "origin", aurURL)
|
if err := ax.Exec(ctx, "git", "-C", tmpDir, "remote", "add", "origin", aurURL); err != nil {
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("aur.pushToAUR", "failed to add remote", err)
|
return coreerr.E("aur.pushToAUR", "failed to add remote", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write files
|
// Write files
|
||||||
if err := coreio.Local.Write(filepath.Join(tmpDir, "PKGBUILD"), pkgbuild); err != nil {
|
if err := ax.WriteString(ax.Join(tmpDir, "PKGBUILD"), pkgbuild, 0o644); err != nil {
|
||||||
return coreerr.E("aur.pushToAUR", "failed to write PKGBUILD", err)
|
return coreerr.E("aur.pushToAUR", "failed to write PKGBUILD", err)
|
||||||
}
|
}
|
||||||
if err := coreio.Local.Write(filepath.Join(tmpDir, ".SRCINFO"), srcinfo); err != nil {
|
if err := ax.WriteString(ax.Join(tmpDir, ".SRCINFO"), srcinfo, 0o644); err != nil {
|
||||||
return coreerr.E("aur.pushToAUR", "failed to write .SRCINFO", err)
|
return coreerr.E("aur.pushToAUR", "failed to write .SRCINFO", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
commitMsg := fmt.Sprintf("Update to %s", data.Version)
|
commitMsg := core.Sprintf("Update to %s", data.Version)
|
||||||
|
|
||||||
cmd = exec.CommandContext(ctx, "git", "add", ".")
|
if err := ax.ExecDir(ctx, tmpDir, "git", "add", "."); err != nil {
|
||||||
cmd.Dir = tmpDir
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("aur.pushToAUR", "git add failed", err)
|
return coreerr.E("aur.pushToAUR", "git add failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd = exec.CommandContext(ctx, "git", "commit", "-m", commitMsg)
|
if err := publisherRun(ctx, tmpDir, nil, "git", "commit", "-m", commitMsg); err != nil {
|
||||||
cmd.Dir = tmpDir
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("aur.pushToAUR", "git commit failed", err)
|
return coreerr.E("aur.pushToAUR", "git commit failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd = exec.CommandContext(ctx, "git", "push", "origin", "master")
|
if err := publisherRun(ctx, tmpDir, nil, "git", "push", "origin", "master"); err != nil {
|
||||||
cmd.Dir = tmpDir
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("aur.pushToAUR", "git push failed", err)
|
return coreerr.E("aur.pushToAUR", "git push failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("Published to AUR: https://aur.archlinux.org/packages/%s-bin\n", data.PackageName)
|
publisherPrint("Published to AUR: https://aur.archlinux.org/packages/%s-bin", data.PackageName)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -281,7 +275,7 @@ func (p *AURPublisher) renderTemplate(m coreio.Medium, name string, data aurTemp
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
// Try custom template from medium
|
// Try custom template from medium
|
||||||
customPath := filepath.Join(".core", name)
|
customPath := ax.Join(".core", name)
|
||||||
if m != nil && m.IsFile(customPath) {
|
if m != nil && m.IsFile(customPath) {
|
||||||
customContent, err := m.Read(customPath)
|
customContent, err := m.Read(customPath)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|
@ -297,7 +291,7 @@ func (p *AURPublisher) renderTemplate(m coreio.Medium, name string, data aurTemp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tmpl, err := template.New(filepath.Base(name)).Parse(string(content))
|
tmpl, err := template.New(ax.Base(name)).Parse(string(content))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", coreerr.E("aur.renderTemplate", "failed to parse template "+name, err)
|
return "", coreerr.E("aur.renderTemplate", "failed to parse template "+name, err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,7 @@
|
||||||
package publishers
|
package publishers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"os"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
|
|
@ -11,14 +9,14 @@ import (
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestAURPublisher_Name_Good(t *testing.T) {
|
func TestAUR_AURPublisherName_Good(t *testing.T) {
|
||||||
t.Run("returns aur", func(t *testing.T) {
|
t.Run("returns aur", func(t *testing.T) {
|
||||||
p := NewAURPublisher()
|
p := NewAURPublisher()
|
||||||
assert.Equal(t, "aur", p.Name())
|
assert.Equal(t, "aur", p.Name())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAURPublisher_ParseConfig_Good(t *testing.T) {
|
func TestAUR_AURPublisherParseConfig_Good(t *testing.T) {
|
||||||
p := NewAURPublisher()
|
p := NewAURPublisher()
|
||||||
|
|
||||||
t.Run("uses defaults when no extended config", func(t *testing.T) {
|
t.Run("uses defaults when no extended config", func(t *testing.T) {
|
||||||
|
|
@ -80,7 +78,7 @@ func TestAURPublisher_ParseConfig_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAURPublisher_RenderTemplate_Good(t *testing.T) {
|
func TestAUR_AURPublisherRenderTemplate_Good(t *testing.T) {
|
||||||
p := NewAURPublisher()
|
p := NewAURPublisher()
|
||||||
|
|
||||||
t.Run("renders PKGBUILD template with data", func(t *testing.T) {
|
t.Run("renders PKGBUILD template with data", func(t *testing.T) {
|
||||||
|
|
@ -140,7 +138,7 @@ func TestAURPublisher_RenderTemplate_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAURPublisher_RenderTemplate_Bad(t *testing.T) {
|
func TestAUR_AURPublisherRenderTemplate_Bad(t *testing.T) {
|
||||||
p := NewAURPublisher()
|
p := NewAURPublisher()
|
||||||
|
|
||||||
t.Run("returns error for non-existent template", func(t *testing.T) {
|
t.Run("returns error for non-existent template", func(t *testing.T) {
|
||||||
|
|
@ -151,14 +149,10 @@ func TestAURPublisher_RenderTemplate_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAURPublisher_DryRunPublish_Good(t *testing.T) {
|
func TestAUR_AURPublisherDryRunPublish_Good(t *testing.T) {
|
||||||
p := NewAURPublisher()
|
p := NewAURPublisher()
|
||||||
|
|
||||||
t.Run("outputs expected dry run information", func(t *testing.T) {
|
t.Run("outputs expected dry run information", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
data := aurTemplateData{
|
data := aurTemplateData{
|
||||||
PackageName: "myapp",
|
PackageName: "myapp",
|
||||||
Version: "1.0.0",
|
Version: "1.0.0",
|
||||||
|
|
@ -171,15 +165,11 @@ func TestAURPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
Maintainer: "John Doe <john@example.com>",
|
Maintainer: "John Doe <john@example.com>",
|
||||||
}
|
}
|
||||||
|
|
||||||
err := p.dryRunPublish(io.Local, data, cfg)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(io.Local, data, cfg)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
|
|
||||||
assert.Contains(t, output, "DRY RUN: AUR Publish")
|
assert.Contains(t, output, "DRY RUN: AUR Publish")
|
||||||
assert.Contains(t, output, "Package: myapp-bin")
|
assert.Contains(t, output, "Package: myapp-bin")
|
||||||
|
|
@ -193,7 +183,7 @@ func TestAURPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAURPublisher_Publish_Bad(t *testing.T) {
|
func TestAUR_AURPublisherPublish_Bad(t *testing.T) {
|
||||||
p := NewAURPublisher()
|
p := NewAURPublisher()
|
||||||
|
|
||||||
t.Run("fails when maintainer not configured", func(t *testing.T) {
|
t.Run("fails when maintainer not configured", func(t *testing.T) {
|
||||||
|
|
@ -211,7 +201,7 @@ func TestAURPublisher_Publish_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAURConfig_Defaults_Good(t *testing.T) {
|
func TestAUR_AURConfigDefaults_Good(t *testing.T) {
|
||||||
t.Run("has sensible defaults", func(t *testing.T) {
|
t.Run("has sensible defaults", func(t *testing.T) {
|
||||||
p := NewAURPublisher()
|
p := NewAURPublisher()
|
||||||
pubCfg := PublisherConfig{Type: "aur"}
|
pubCfg := PublisherConfig{Type: "aur"}
|
||||||
|
|
|
||||||
|
|
@ -5,13 +5,10 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"embed"
|
"embed"
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"text/template"
|
"text/template"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/i18n"
|
"dappco.re/go/core/i18n"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
|
|
@ -22,6 +19,8 @@ import (
|
||||||
var chocolateyTemplates embed.FS
|
var chocolateyTemplates embed.FS
|
||||||
|
|
||||||
// ChocolateyConfig holds Chocolatey-specific configuration.
|
// ChocolateyConfig holds Chocolatey-specific configuration.
|
||||||
|
//
|
||||||
|
// cfg := publishers.ChocolateyConfig{Package: "core-build", Push: true}
|
||||||
type ChocolateyConfig struct {
|
type ChocolateyConfig struct {
|
||||||
// Package is the Chocolatey package name.
|
// Package is the Chocolatey package name.
|
||||||
Package string
|
Package string
|
||||||
|
|
@ -32,19 +31,27 @@ type ChocolateyConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChocolateyPublisher publishes releases to Chocolatey.
|
// ChocolateyPublisher publishes releases to Chocolatey.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewChocolateyPublisher()
|
||||||
type ChocolateyPublisher struct{}
|
type ChocolateyPublisher struct{}
|
||||||
|
|
||||||
// NewChocolateyPublisher creates a new Chocolatey publisher.
|
// NewChocolateyPublisher creates a new Chocolatey publisher.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewChocolateyPublisher()
|
||||||
func NewChocolateyPublisher() *ChocolateyPublisher {
|
func NewChocolateyPublisher() *ChocolateyPublisher {
|
||||||
return &ChocolateyPublisher{}
|
return &ChocolateyPublisher{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the publisher's identifier.
|
// Name returns the publisher's identifier.
|
||||||
|
//
|
||||||
|
// name := pub.Name() // → "chocolatey"
|
||||||
func (p *ChocolateyPublisher) Name() string {
|
func (p *ChocolateyPublisher) Name() string {
|
||||||
return "chocolatey"
|
return "chocolatey"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Publish publishes the release to Chocolatey.
|
// Publish publishes the release to Chocolatey.
|
||||||
|
//
|
||||||
|
// err := pub.Publish(ctx, rel, pubCfg, relCfg, false)
|
||||||
func (p *ChocolateyPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
func (p *ChocolateyPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
||||||
cfg := p.parseConfig(pubCfg, relCfg)
|
cfg := p.parseConfig(pubCfg, relCfg)
|
||||||
|
|
||||||
|
|
@ -53,7 +60,7 @@ func (p *ChocolateyPublisher) Publish(ctx context.Context, release *Release, pub
|
||||||
repo = relCfg.GetRepository()
|
repo = relCfg.GetRepository()
|
||||||
}
|
}
|
||||||
if repo == "" {
|
if repo == "" {
|
||||||
detectedRepo, err := detectRepository(release.ProjectDir)
|
detectedRepo, err := detectRepository(ctx, release.ProjectDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("chocolatey.Publish", "could not determine repository", err)
|
return coreerr.E("chocolatey.Publish", "could not determine repository", err)
|
||||||
}
|
}
|
||||||
|
|
@ -65,7 +72,7 @@ func (p *ChocolateyPublisher) Publish(ctx context.Context, release *Release, pub
|
||||||
projectName = relCfg.GetProjectName()
|
projectName = relCfg.GetProjectName()
|
||||||
}
|
}
|
||||||
if projectName == "" {
|
if projectName == "" {
|
||||||
parts := strings.Split(repo, "/")
|
parts := core.Split(repo, "/")
|
||||||
projectName = parts[len(parts)-1]
|
projectName = parts[len(parts)-1]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -74,22 +81,22 @@ func (p *ChocolateyPublisher) Publish(ctx context.Context, release *Release, pub
|
||||||
packageName = projectName
|
packageName = projectName
|
||||||
}
|
}
|
||||||
|
|
||||||
version := strings.TrimPrefix(release.Version, "v")
|
version := core.TrimPrefix(release.Version, "v")
|
||||||
checksums := buildChecksumMap(release.Artifacts)
|
checksums := buildChecksumMap(release.Artifacts)
|
||||||
|
|
||||||
// Extract authors from repository
|
// Extract authors from repository
|
||||||
authors := strings.Split(repo, "/")[0]
|
authors := core.Split(repo, "/")[0]
|
||||||
|
|
||||||
data := chocolateyTemplateData{
|
data := chocolateyTemplateData{
|
||||||
PackageName: packageName,
|
PackageName: packageName,
|
||||||
Title: fmt.Sprintf("%s CLI", i18n.Title(projectName)),
|
Title: core.Sprintf("%s CLI", i18n.Title(projectName)),
|
||||||
Description: fmt.Sprintf("%s CLI", projectName),
|
Description: core.Sprintf("%s CLI", projectName),
|
||||||
Repository: repo,
|
Repository: repo,
|
||||||
Version: version,
|
Version: version,
|
||||||
License: "MIT",
|
License: "MIT",
|
||||||
BinaryName: projectName,
|
BinaryName: projectName,
|
||||||
Authors: authors,
|
Authors: authors,
|
||||||
Tags: fmt.Sprintf("cli %s", projectName),
|
Tags: core.Sprintf("cli %s", projectName),
|
||||||
Checksums: checksums,
|
Checksums: checksums,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -140,42 +147,42 @@ func (p *ChocolateyPublisher) parseConfig(pubCfg PublisherConfig, relCfg Release
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *ChocolateyPublisher) dryRunPublish(m io.Medium, data chocolateyTemplateData, cfg ChocolateyConfig) error {
|
func (p *ChocolateyPublisher) dryRunPublish(m io.Medium, data chocolateyTemplateData, cfg ChocolateyConfig) error {
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== DRY RUN: Chocolatey Publish ===")
|
publisherPrintln("=== DRY RUN: Chocolatey Publish ===")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Printf("Package: %s\n", data.PackageName)
|
publisherPrint("Package: %s", data.PackageName)
|
||||||
fmt.Printf("Version: %s\n", data.Version)
|
publisherPrint("Version: %s", data.Version)
|
||||||
fmt.Printf("Push: %t\n", cfg.Push)
|
publisherPrint("Push: %t", cfg.Push)
|
||||||
fmt.Printf("Repository: %s\n", data.Repository)
|
publisherPrint("Repository: %s", data.Repository)
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
nuspec, err := p.renderTemplate(m, "templates/chocolatey/package.nuspec.tmpl", data)
|
nuspec, err := p.renderTemplate(m, "templates/chocolatey/package.nuspec.tmpl", data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("chocolatey.dryRunPublish", "failed to render nuspec", err)
|
return coreerr.E("chocolatey.dryRunPublish", "failed to render nuspec", err)
|
||||||
}
|
}
|
||||||
fmt.Println("Generated package.nuspec:")
|
publisherPrintln("Generated package.nuspec:")
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println(nuspec)
|
publisherPrintln(nuspec)
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
install, err := p.renderTemplate(m, "templates/chocolatey/tools/chocolateyinstall.ps1.tmpl", data)
|
install, err := p.renderTemplate(m, "templates/chocolatey/tools/chocolateyinstall.ps1.tmpl", data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("chocolatey.dryRunPublish", "failed to render install script", err)
|
return coreerr.E("chocolatey.dryRunPublish", "failed to render install script", err)
|
||||||
}
|
}
|
||||||
fmt.Println("Generated chocolateyinstall.ps1:")
|
publisherPrintln("Generated chocolateyinstall.ps1:")
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println(install)
|
publisherPrintln(install)
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
if cfg.Push {
|
if cfg.Push {
|
||||||
fmt.Println("Would push to Chocolatey community repo")
|
publisherPrintln("Would push to Chocolatey community repo")
|
||||||
} else {
|
} else {
|
||||||
fmt.Println("Would generate package files only (push=false)")
|
publisherPrintln("Would generate package files only (push=false)")
|
||||||
}
|
}
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== END DRY RUN ===")
|
publisherPrintln("=== END DRY RUN ===")
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -192,31 +199,31 @@ func (p *ChocolateyPublisher) executePublish(ctx context.Context, projectDir str
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create package directory
|
// Create package directory
|
||||||
output := filepath.Join(projectDir, "dist", "chocolatey")
|
output := ax.Join(projectDir, "dist", "chocolatey")
|
||||||
if cfg.Official != nil && cfg.Official.Enabled && cfg.Official.Output != "" {
|
if cfg.Official != nil && cfg.Official.Enabled && cfg.Official.Output != "" {
|
||||||
output = cfg.Official.Output
|
output = cfg.Official.Output
|
||||||
if !filepath.IsAbs(output) {
|
if !ax.IsAbs(output) {
|
||||||
output = filepath.Join(projectDir, output)
|
output = ax.Join(projectDir, output)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
toolsDir := filepath.Join(output, "tools")
|
toolsDir := ax.Join(output, "tools")
|
||||||
if err := release.FS.EnsureDir(toolsDir); err != nil {
|
if err := release.FS.EnsureDir(toolsDir); err != nil {
|
||||||
return coreerr.E("chocolatey.Publish", "failed to create output directory", err)
|
return coreerr.E("chocolatey.Publish", "failed to create output directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write files
|
// Write files
|
||||||
nuspecPath := filepath.Join(output, fmt.Sprintf("%s.nuspec", data.PackageName))
|
nuspecPath := ax.Join(output, core.Sprintf("%s.nuspec", data.PackageName))
|
||||||
if err := release.FS.Write(nuspecPath, nuspec); err != nil {
|
if err := release.FS.Write(nuspecPath, nuspec); err != nil {
|
||||||
return coreerr.E("chocolatey.Publish", "failed to write nuspec", err)
|
return coreerr.E("chocolatey.Publish", "failed to write nuspec", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
installPath := filepath.Join(toolsDir, "chocolateyinstall.ps1")
|
installPath := ax.Join(toolsDir, "chocolateyinstall.ps1")
|
||||||
if err := release.FS.Write(installPath, install); err != nil {
|
if err := release.FS.Write(installPath, install); err != nil {
|
||||||
return coreerr.E("chocolatey.Publish", "failed to write install script", err)
|
return coreerr.E("chocolatey.Publish", "failed to write install script", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("Wrote Chocolatey package files: %s\n", output)
|
publisherPrint("Wrote Chocolatey package files: %s", output)
|
||||||
|
|
||||||
// Push to Chocolatey if configured
|
// Push to Chocolatey if configured
|
||||||
if cfg.Push {
|
if cfg.Push {
|
||||||
|
|
@ -230,31 +237,24 @@ func (p *ChocolateyPublisher) executePublish(ctx context.Context, projectDir str
|
||||||
|
|
||||||
func (p *ChocolateyPublisher) pushToChocolatey(ctx context.Context, packageDir string, data chocolateyTemplateData) error {
|
func (p *ChocolateyPublisher) pushToChocolatey(ctx context.Context, packageDir string, data chocolateyTemplateData) error {
|
||||||
// Check for CHOCOLATEY_API_KEY
|
// Check for CHOCOLATEY_API_KEY
|
||||||
apiKey := os.Getenv("CHOCOLATEY_API_KEY")
|
apiKey := core.Env("CHOCOLATEY_API_KEY")
|
||||||
if apiKey == "" {
|
if apiKey == "" {
|
||||||
return coreerr.E("chocolatey.Publish", "CHOCOLATEY_API_KEY environment variable is required for push", nil)
|
return coreerr.E("chocolatey.Publish", "CHOCOLATEY_API_KEY environment variable is required for push", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pack the package
|
// Pack the package
|
||||||
nupkgPath := filepath.Join(packageDir, fmt.Sprintf("%s.%s.nupkg", data.PackageName, data.Version))
|
nupkgPath := ax.Join(packageDir, core.Sprintf("%s.%s.nupkg", data.PackageName, data.Version))
|
||||||
|
|
||||||
cmd := exec.CommandContext(ctx, "choco", "pack", filepath.Join(packageDir, fmt.Sprintf("%s.nuspec", data.PackageName)), "-OutputDirectory", packageDir)
|
if err := publisherRun(ctx, "", nil, "choco", "pack", ax.Join(packageDir, core.Sprintf("%s.nuspec", data.PackageName)), "-OutputDirectory", packageDir); err != nil {
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("chocolatey.Publish", "choco pack failed", err)
|
return coreerr.E("chocolatey.Publish", "choco pack failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Push the package — pass API key via environment variable to avoid exposing it in process listings
|
// Push the package — pass API key via environment variable to avoid exposing it in process listings
|
||||||
cmd = exec.CommandContext(ctx, "choco", "push", nupkgPath, "--source", "https://push.chocolatey.org/")
|
if err := publisherRun(ctx, "", []string{"chocolateyApiKey=" + apiKey}, "choco", "push", nupkgPath, "--source", "https://push.chocolatey.org/"); err != nil {
|
||||||
cmd.Env = append(os.Environ(), "chocolateyApiKey="+apiKey)
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("chocolatey.Publish", "choco push failed", err)
|
return coreerr.E("chocolatey.Publish", "choco push failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("Published to Chocolatey: https://community.chocolatey.org/packages/%s\n", data.PackageName)
|
publisherPrint("Published to Chocolatey: https://community.chocolatey.org/packages/%s", data.PackageName)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -263,7 +263,7 @@ func (p *ChocolateyPublisher) renderTemplate(m io.Medium, name string, data choc
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
// Try custom template from medium
|
// Try custom template from medium
|
||||||
customPath := filepath.Join(".core", name)
|
customPath := ax.Join(".core", name)
|
||||||
if m != nil && m.IsFile(customPath) {
|
if m != nil && m.IsFile(customPath) {
|
||||||
customContent, err := m.Read(customPath)
|
customContent, err := m.Read(customPath)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|
@ -279,7 +279,7 @@ func (p *ChocolateyPublisher) renderTemplate(m io.Medium, name string, data choc
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tmpl, err := template.New(filepath.Base(name)).Parse(string(content))
|
tmpl, err := template.New(ax.Base(name)).Parse(string(content))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", coreerr.E("chocolatey.renderTemplate", "failed to parse template "+name, err)
|
return "", coreerr.E("chocolatey.renderTemplate", "failed to parse template "+name, err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,25 +1,24 @@
|
||||||
package publishers
|
package publishers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"os"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestChocolateyPublisher_Name_Good(t *testing.T) {
|
func TestChocolatey_ChocolateyPublisherName_Good(t *testing.T) {
|
||||||
t.Run("returns chocolatey", func(t *testing.T) {
|
t.Run("returns chocolatey", func(t *testing.T) {
|
||||||
p := NewChocolateyPublisher()
|
p := NewChocolateyPublisher()
|
||||||
assert.Equal(t, "chocolatey", p.Name())
|
assert.Equal(t, "chocolatey", p.Name())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChocolateyPublisher_ParseConfig_Good(t *testing.T) {
|
func TestChocolatey_ChocolateyPublisherParseConfig_Good(t *testing.T) {
|
||||||
p := NewChocolateyPublisher()
|
p := NewChocolateyPublisher()
|
||||||
|
|
||||||
t.Run("uses defaults when no extended config", func(t *testing.T) {
|
t.Run("uses defaults when no extended config", func(t *testing.T) {
|
||||||
|
|
@ -107,7 +106,7 @@ func TestChocolateyPublisher_ParseConfig_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChocolateyPublisher_RenderTemplate_Good(t *testing.T) {
|
func TestChocolatey_ChocolateyPublisherRenderTemplate_Good(t *testing.T) {
|
||||||
p := NewChocolateyPublisher()
|
p := NewChocolateyPublisher()
|
||||||
|
|
||||||
t.Run("renders nuspec template with data", func(t *testing.T) {
|
t.Run("renders nuspec template with data", func(t *testing.T) {
|
||||||
|
|
@ -160,7 +159,7 @@ func TestChocolateyPublisher_RenderTemplate_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChocolateyPublisher_RenderTemplate_Bad(t *testing.T) {
|
func TestChocolatey_ChocolateyPublisherRenderTemplate_Bad(t *testing.T) {
|
||||||
p := NewChocolateyPublisher()
|
p := NewChocolateyPublisher()
|
||||||
|
|
||||||
t.Run("returns error for non-existent template", func(t *testing.T) {
|
t.Run("returns error for non-existent template", func(t *testing.T) {
|
||||||
|
|
@ -171,14 +170,10 @@ func TestChocolateyPublisher_RenderTemplate_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChocolateyPublisher_DryRunPublish_Good(t *testing.T) {
|
func TestChocolatey_ChocolateyPublisherDryRunPublish_Good(t *testing.T) {
|
||||||
p := NewChocolateyPublisher()
|
p := NewChocolateyPublisher()
|
||||||
|
|
||||||
t.Run("outputs expected dry run information", func(t *testing.T) {
|
t.Run("outputs expected dry run information", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
data := chocolateyTemplateData{
|
data := chocolateyTemplateData{
|
||||||
PackageName: "myapp",
|
PackageName: "myapp",
|
||||||
Version: "1.0.0",
|
Version: "1.0.0",
|
||||||
|
|
@ -192,15 +187,11 @@ func TestChocolateyPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
Push: false,
|
Push: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := p.dryRunPublish(io.Local, data, cfg)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(io.Local, data, cfg)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
|
|
||||||
assert.Contains(t, output, "DRY RUN: Chocolatey Publish")
|
assert.Contains(t, output, "DRY RUN: Chocolatey Publish")
|
||||||
assert.Contains(t, output, "Package: myapp")
|
assert.Contains(t, output, "Package: myapp")
|
||||||
|
|
@ -214,10 +205,6 @@ func TestChocolateyPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("shows push message when push is enabled", func(t *testing.T) {
|
t.Run("shows push message when push is enabled", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
data := chocolateyTemplateData{
|
data := chocolateyTemplateData{
|
||||||
PackageName: "myapp",
|
PackageName: "myapp",
|
||||||
Version: "1.0.0",
|
Version: "1.0.0",
|
||||||
|
|
@ -230,37 +217,25 @@ func TestChocolateyPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
Push: true,
|
Push: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := p.dryRunPublish(io.Local, data, cfg)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(io.Local, data, cfg)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
assert.Contains(t, output, "Push: true")
|
assert.Contains(t, output, "Push: true")
|
||||||
assert.Contains(t, output, "Would push to Chocolatey community repo")
|
assert.Contains(t, output, "Would push to Chocolatey community repo")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChocolateyPublisher_ExecutePublish_Bad(t *testing.T) {
|
func TestChocolatey_ChocolateyPublisherExecutePublish_Bad(t *testing.T) {
|
||||||
p := NewChocolateyPublisher()
|
p := NewChocolateyPublisher()
|
||||||
|
|
||||||
t.Run("fails when CHOCOLATEY_API_KEY not set for push", func(t *testing.T) {
|
t.Run("fails when CHOCOLATEY_API_KEY not set for push", func(t *testing.T) {
|
||||||
// Ensure CHOCOLATEY_API_KEY is not set
|
t.Setenv("CHOCOLATEY_API_KEY", "")
|
||||||
oldKey := os.Getenv("CHOCOLATEY_API_KEY")
|
|
||||||
_ = os.Unsetenv("CHOCOLATEY_API_KEY")
|
|
||||||
defer func() {
|
|
||||||
if oldKey != "" {
|
|
||||||
_ = os.Setenv("CHOCOLATEY_API_KEY", oldKey)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Create a temp directory for the test
|
// Create a temp directory for the test
|
||||||
tmpDir, err := os.MkdirTemp("", "choco-test-*")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
require.True(t, ax.IsDir(tmpDir))
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
|
||||||
|
|
||||||
data := chocolateyTemplateData{
|
data := chocolateyTemplateData{
|
||||||
PackageName: "testpkg",
|
PackageName: "testpkg",
|
||||||
|
|
@ -272,13 +247,13 @@ func TestChocolateyPublisher_ExecutePublish_Bad(t *testing.T) {
|
||||||
Checksums: ChecksumMap{},
|
Checksums: ChecksumMap{},
|
||||||
}
|
}
|
||||||
|
|
||||||
err = p.pushToChocolatey(context.TODO(), tmpDir, data)
|
err := p.pushToChocolatey(context.TODO(), tmpDir, data)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Contains(t, err.Error(), "CHOCOLATEY_API_KEY environment variable is required")
|
assert.Contains(t, err.Error(), "CHOCOLATEY_API_KEY environment variable is required")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChocolateyConfig_Defaults_Good(t *testing.T) {
|
func TestChocolatey_ChocolateyConfigDefaults_Good(t *testing.T) {
|
||||||
t.Run("has sensible defaults", func(t *testing.T) {
|
t.Run("has sensible defaults", func(t *testing.T) {
|
||||||
p := NewChocolateyPublisher()
|
p := NewChocolateyPublisher()
|
||||||
pubCfg := PublisherConfig{Type: "chocolatey"}
|
pubCfg := PublisherConfig{Type: "chocolatey"}
|
||||||
|
|
@ -292,7 +267,7 @@ func TestChocolateyConfig_Defaults_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChocolateyTemplateData_Good(t *testing.T) {
|
func TestChocolatey_ChocolateyTemplateData_Good(t *testing.T) {
|
||||||
t.Run("struct has all expected fields", func(t *testing.T) {
|
t.Run("struct has all expected fields", func(t *testing.T) {
|
||||||
data := chocolateyTemplateData{
|
data := chocolateyTemplateData{
|
||||||
PackageName: "myapp",
|
PackageName: "myapp",
|
||||||
|
|
|
||||||
|
|
@ -3,16 +3,17 @@ package publishers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
|
"dappco.re/go/core/build/pkg/build"
|
||||||
|
"dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DockerConfig holds configuration for the Docker publisher.
|
// DockerConfig holds configuration for the Docker publisher.
|
||||||
|
//
|
||||||
|
// cfg := publishers.DockerConfig{Registry: "ghcr.io", Image: "host-uk/core-build", Platforms: []string{"linux/amd64", "linux/arm64"}}
|
||||||
type DockerConfig struct {
|
type DockerConfig struct {
|
||||||
// Registry is the container registry (default: ghcr.io).
|
// Registry is the container registry (default: ghcr.io).
|
||||||
Registry string `yaml:"registry"`
|
Registry string `yaml:"registry"`
|
||||||
|
|
@ -29,49 +30,63 @@ type DockerConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// DockerPublisher builds and publishes Docker images.
|
// DockerPublisher builds and publishes Docker images.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewDockerPublisher()
|
||||||
type DockerPublisher struct{}
|
type DockerPublisher struct{}
|
||||||
|
|
||||||
// NewDockerPublisher creates a new Docker publisher.
|
// NewDockerPublisher creates a new Docker publisher.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewDockerPublisher()
|
||||||
func NewDockerPublisher() *DockerPublisher {
|
func NewDockerPublisher() *DockerPublisher {
|
||||||
return &DockerPublisher{}
|
return &DockerPublisher{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the publisher's identifier.
|
// Name returns the publisher's identifier.
|
||||||
|
//
|
||||||
|
// name := pub.Name() // → "docker"
|
||||||
func (p *DockerPublisher) Name() string {
|
func (p *DockerPublisher) Name() string {
|
||||||
return "docker"
|
return "docker"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Publish builds and pushes Docker images.
|
// Publish builds and pushes Docker images.
|
||||||
|
//
|
||||||
|
// err := pub.Publish(ctx, rel, pubCfg, relCfg, false)
|
||||||
func (p *DockerPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
func (p *DockerPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
||||||
// Validate docker CLI is available
|
|
||||||
if err := validateDockerCli(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse Docker-specific config from publisher config
|
// Parse Docker-specific config from publisher config
|
||||||
dockerCfg := p.parseConfig(pubCfg, relCfg, release.ProjectDir)
|
dockerCfg := p.parseConfig(release.FS, pubCfg, relCfg, release.ProjectDir)
|
||||||
|
|
||||||
// Validate Dockerfile exists
|
// Validate Dockerfile exists
|
||||||
if !release.FS.Exists(dockerCfg.Dockerfile) {
|
if !release.FS.Exists(dockerCfg.Dockerfile) {
|
||||||
return coreerr.E("docker.Publish", "Dockerfile not found: "+dockerCfg.Dockerfile, nil)
|
return coreerr.E("docker.Publish", "Dockerfile not found: "+dockerCfg.Dockerfile, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate docker CLI is available after local config checks.
|
||||||
|
dockerCommand, err := resolveDockerCli()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if dryRun {
|
if dryRun {
|
||||||
return p.dryRunPublish(release, dockerCfg)
|
return p.dryRunPublish(release, dockerCfg)
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.executePublish(ctx, release, dockerCfg)
|
return p.executePublish(ctx, release, dockerCfg, dockerCommand)
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseConfig extracts Docker-specific configuration.
|
// parseConfig extracts Docker-specific configuration.
|
||||||
func (p *DockerPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConfig, projectDir string) DockerConfig {
|
func (p *DockerPublisher) parseConfig(fs io.Medium, pubCfg PublisherConfig, relCfg ReleaseConfig, projectDir string) DockerConfig {
|
||||||
cfg := DockerConfig{
|
cfg := DockerConfig{
|
||||||
Registry: "ghcr.io",
|
Registry: "ghcr.io",
|
||||||
Image: "",
|
Image: "",
|
||||||
Dockerfile: filepath.Join(projectDir, "Dockerfile"),
|
Platforms: []string{"linux/amd64", "linux/arm64"},
|
||||||
Platforms: []string{"linux/amd64", "linux/arm64"},
|
Tags: []string{"latest", "{{.Version}}"},
|
||||||
Tags: []string{"latest", "{{.Version}}"},
|
BuildArgs: make(map[string]string),
|
||||||
BuildArgs: make(map[string]string),
|
}
|
||||||
|
|
||||||
|
if dockerfile := build.ResolveDockerfilePath(fs, projectDir); dockerfile != "" {
|
||||||
|
cfg.Dockerfile = dockerfile
|
||||||
|
} else {
|
||||||
|
cfg.Dockerfile = ax.Join(projectDir, "Dockerfile")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try to get image from repository config
|
// Try to get image from repository config
|
||||||
|
|
@ -88,10 +103,10 @@ func (p *DockerPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConf
|
||||||
cfg.Image = image
|
cfg.Image = image
|
||||||
}
|
}
|
||||||
if dockerfile, ok := ext["dockerfile"].(string); ok && dockerfile != "" {
|
if dockerfile, ok := ext["dockerfile"].(string); ok && dockerfile != "" {
|
||||||
if filepath.IsAbs(dockerfile) {
|
if ax.IsAbs(dockerfile) {
|
||||||
cfg.Dockerfile = dockerfile
|
cfg.Dockerfile = dockerfile
|
||||||
} else {
|
} else {
|
||||||
cfg.Dockerfile = filepath.Join(projectDir, dockerfile)
|
cfg.Dockerfile = ax.Join(projectDir, dockerfile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if platforms, ok := ext["platforms"].([]any); ok && len(platforms) > 0 {
|
if platforms, ok := ext["platforms"].([]any); ok && len(platforms) > 0 {
|
||||||
|
|
@ -124,47 +139,47 @@ func (p *DockerPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseConf
|
||||||
|
|
||||||
// dryRunPublish shows what would be done without actually building.
|
// dryRunPublish shows what would be done without actually building.
|
||||||
func (p *DockerPublisher) dryRunPublish(release *Release, cfg DockerConfig) error {
|
func (p *DockerPublisher) dryRunPublish(release *Release, cfg DockerConfig) error {
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== DRY RUN: Docker Build & Push ===")
|
publisherPrintln("=== DRY RUN: Docker Build & Push ===")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Printf("Version: %s\n", release.Version)
|
publisherPrint("Version: %s", release.Version)
|
||||||
fmt.Printf("Registry: %s\n", cfg.Registry)
|
publisherPrint("Registry: %s", cfg.Registry)
|
||||||
fmt.Printf("Image: %s\n", cfg.Image)
|
publisherPrint("Image: %s", cfg.Image)
|
||||||
fmt.Printf("Dockerfile: %s\n", cfg.Dockerfile)
|
publisherPrint("Dockerfile: %s", cfg.Dockerfile)
|
||||||
fmt.Printf("Platforms: %s\n", strings.Join(cfg.Platforms, ", "))
|
publisherPrint("Platforms: %s", core.Join(", ", cfg.Platforms...))
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
// Resolve tags
|
// Resolve tags
|
||||||
tags := p.resolveTags(cfg.Tags, release.Version)
|
tags := p.resolveTags(cfg.Tags, release.Version)
|
||||||
fmt.Println("Tags to be applied:")
|
publisherPrintln("Tags to be applied:")
|
||||||
for _, tag := range tags {
|
for _, tag := range tags {
|
||||||
fullTag := p.buildFullTag(cfg.Registry, cfg.Image, tag)
|
fullTag := p.buildFullTag(cfg.Registry, cfg.Image, tag)
|
||||||
fmt.Printf(" - %s\n", fullTag)
|
publisherPrint(" - %s", fullTag)
|
||||||
}
|
}
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
fmt.Println("Would execute command:")
|
publisherPrintln("Would execute command:")
|
||||||
args := p.buildBuildxArgs(cfg, tags, release.Version)
|
args := p.buildBuildxArgs(cfg, tags, release.Version)
|
||||||
fmt.Printf(" docker %s\n", strings.Join(args, " "))
|
publisherPrint(" docker %s", core.Join(" ", args...))
|
||||||
|
|
||||||
if len(cfg.BuildArgs) > 0 {
|
if len(cfg.BuildArgs) > 0 {
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("Build arguments:")
|
publisherPrintln("Build arguments:")
|
||||||
for k, v := range cfg.BuildArgs {
|
for k, v := range cfg.BuildArgs {
|
||||||
fmt.Printf(" %s=%s\n", k, v)
|
publisherPrint(" %s=%s", k, v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== END DRY RUN ===")
|
publisherPrintln("=== END DRY RUN ===")
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// executePublish builds and pushes Docker images.
|
// executePublish builds and pushes Docker images.
|
||||||
func (p *DockerPublisher) executePublish(ctx context.Context, release *Release, cfg DockerConfig) error {
|
func (p *DockerPublisher) executePublish(ctx context.Context, release *Release, cfg DockerConfig, dockerCommand string) error {
|
||||||
// Ensure buildx is available and builder is set up
|
// Ensure buildx is available and builder is set up
|
||||||
if err := p.ensureBuildx(ctx); err != nil {
|
if err := p.ensureBuildx(ctx, dockerCommand); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -174,13 +189,8 @@ func (p *DockerPublisher) executePublish(ctx context.Context, release *Release,
|
||||||
// Build the docker buildx command
|
// Build the docker buildx command
|
||||||
args := p.buildBuildxArgs(cfg, tags, release.Version)
|
args := p.buildBuildxArgs(cfg, tags, release.Version)
|
||||||
|
|
||||||
cmd := exec.CommandContext(ctx, "docker", args...)
|
publisherPrint("Building and pushing Docker image: %s", cfg.Image)
|
||||||
cmd.Dir = release.ProjectDir
|
if err := publisherRun(ctx, release.ProjectDir, nil, dockerCommand, args...); err != nil {
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
|
|
||||||
fmt.Printf("Building and pushing Docker image: %s\n", cfg.Image)
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("docker.Publish", "buildx build failed", err)
|
return coreerr.E("docker.Publish", "buildx build failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -192,9 +202,9 @@ func (p *DockerPublisher) resolveTags(tags []string, version string) []string {
|
||||||
resolved := make([]string, 0, len(tags))
|
resolved := make([]string, 0, len(tags))
|
||||||
for _, tag := range tags {
|
for _, tag := range tags {
|
||||||
// Replace {{.Version}} with actual version
|
// Replace {{.Version}} with actual version
|
||||||
resolvedTag := strings.ReplaceAll(tag, "{{.Version}}", version)
|
resolvedTag := core.Replace(tag, "{{.Version}}", version)
|
||||||
// Also support simpler {{Version}} syntax
|
// Also support simpler {{Version}} syntax
|
||||||
resolvedTag = strings.ReplaceAll(resolvedTag, "{{Version}}", version)
|
resolvedTag = core.Replace(resolvedTag, "{{Version}}", version)
|
||||||
resolved = append(resolved, resolvedTag)
|
resolved = append(resolved, resolvedTag)
|
||||||
}
|
}
|
||||||
return resolved
|
return resolved
|
||||||
|
|
@ -203,9 +213,9 @@ func (p *DockerPublisher) resolveTags(tags []string, version string) []string {
|
||||||
// buildFullTag builds the full image tag including registry.
|
// buildFullTag builds the full image tag including registry.
|
||||||
func (p *DockerPublisher) buildFullTag(registry, image, tag string) string {
|
func (p *DockerPublisher) buildFullTag(registry, image, tag string) string {
|
||||||
if registry != "" {
|
if registry != "" {
|
||||||
return fmt.Sprintf("%s/%s:%s", registry, image, tag)
|
return core.Sprintf("%s/%s:%s", registry, image, tag)
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("%s:%s", image, tag)
|
return core.Sprintf("%s:%s", image, tag)
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildBuildxArgs builds the arguments for docker buildx build command.
|
// buildBuildxArgs builds the arguments for docker buildx build command.
|
||||||
|
|
@ -214,7 +224,7 @@ func (p *DockerPublisher) buildBuildxArgs(cfg DockerConfig, tags []string, versi
|
||||||
|
|
||||||
// Multi-platform support
|
// Multi-platform support
|
||||||
if len(cfg.Platforms) > 0 {
|
if len(cfg.Platforms) > 0 {
|
||||||
args = append(args, "--platform", strings.Join(cfg.Platforms, ","))
|
args = append(args, "--platform", core.Join(",", cfg.Platforms...))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add all tags
|
// Add all tags
|
||||||
|
|
@ -230,13 +240,13 @@ func (p *DockerPublisher) buildBuildxArgs(cfg DockerConfig, tags []string, versi
|
||||||
// Build arguments
|
// Build arguments
|
||||||
for k, v := range cfg.BuildArgs {
|
for k, v := range cfg.BuildArgs {
|
||||||
// Expand version in build args
|
// Expand version in build args
|
||||||
expandedValue := strings.ReplaceAll(v, "{{.Version}}", version)
|
expandedValue := core.Replace(v, "{{.Version}}", version)
|
||||||
expandedValue = strings.ReplaceAll(expandedValue, "{{Version}}", version)
|
expandedValue = core.Replace(expandedValue, "{{Version}}", version)
|
||||||
args = append(args, "--build-arg", fmt.Sprintf("%s=%s", k, expandedValue))
|
args = append(args, "--build-arg", core.Sprintf("%s=%s", k, expandedValue))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always add VERSION build arg
|
// Always add VERSION build arg
|
||||||
args = append(args, "--build-arg", fmt.Sprintf("VERSION=%s", version))
|
args = append(args, "--build-arg", core.Sprintf("VERSION=%s", version))
|
||||||
|
|
||||||
// Push the image
|
// Push the image
|
||||||
args = append(args, "--push")
|
args = append(args, "--push")
|
||||||
|
|
@ -248,21 +258,16 @@ func (p *DockerPublisher) buildBuildxArgs(cfg DockerConfig, tags []string, versi
|
||||||
}
|
}
|
||||||
|
|
||||||
// ensureBuildx ensures docker buildx is available and has a builder.
|
// ensureBuildx ensures docker buildx is available and has a builder.
|
||||||
func (p *DockerPublisher) ensureBuildx(ctx context.Context) error {
|
func (p *DockerPublisher) ensureBuildx(ctx context.Context, dockerCommand string) error {
|
||||||
// Check if buildx is available
|
// Check if buildx is available
|
||||||
cmd := exec.CommandContext(ctx, "docker", "buildx", "version")
|
if err := ax.Exec(ctx, dockerCommand, "buildx", "version"); err != nil {
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("docker.ensureBuildx", "buildx is not available. Install it from https://docs.docker.com/buildx/working-with-buildx/", nil)
|
return coreerr.E("docker.ensureBuildx", "buildx is not available. Install it from https://docs.docker.com/buildx/working-with-buildx/", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if we have a builder, create one if not
|
// Check if we have a builder, create one if not
|
||||||
cmd = exec.CommandContext(ctx, "docker", "buildx", "inspect", "--bootstrap")
|
if err := ax.Exec(ctx, dockerCommand, "buildx", "inspect", "--bootstrap"); err != nil {
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
// Try to create a builder
|
// Try to create a builder
|
||||||
cmd = exec.CommandContext(ctx, "docker", "buildx", "create", "--use", "--bootstrap")
|
if err := publisherRun(ctx, "", nil, dockerCommand, "buildx", "create", "--use", "--bootstrap"); err != nil {
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("docker.ensureBuildx", "failed to create buildx builder", err)
|
return coreerr.E("docker.ensureBuildx", "failed to create buildx builder", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -270,11 +275,28 @@ func (p *DockerPublisher) ensureBuildx(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// resolveDockerCli returns the executable path for the docker CLI.
|
||||||
|
func resolveDockerCli(paths ...string) (string, error) {
|
||||||
|
if len(paths) == 0 {
|
||||||
|
paths = []string{
|
||||||
|
"/usr/local/bin/docker",
|
||||||
|
"/opt/homebrew/bin/docker",
|
||||||
|
"/Applications/Docker.app/Contents/Resources/bin/docker",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
command, err := ax.ResolveCommand("docker", paths...)
|
||||||
|
if err != nil {
|
||||||
|
return "", coreerr.E("docker.resolveDockerCli", "docker CLI not found. Install it from https://docs.docker.com/get-docker/", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
// validateDockerCli checks if the docker CLI is available.
|
// validateDockerCli checks if the docker CLI is available.
|
||||||
func validateDockerCli() error {
|
func validateDockerCli() error {
|
||||||
cmd := exec.Command("docker", "--version")
|
if _, err := resolveDockerCli(); err != nil {
|
||||||
if err := cmd.Run(); err != nil {
|
return coreerr.E("docker.validateDockerCli", "docker CLI not found. Install it from https://docs.docker.com/get-docker/", err)
|
||||||
return coreerr.E("docker.validateDockerCli", "docker CLI not found. Install it from https://docs.docker.com/get-docker/", nil)
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,31 +1,29 @@
|
||||||
package publishers
|
package publishers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestDockerPublisher_Name_Good(t *testing.T) {
|
func TestDocker_DockerPublisherName_Good(t *testing.T) {
|
||||||
t.Run("returns docker", func(t *testing.T) {
|
t.Run("returns docker", func(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
assert.Equal(t, "docker", p.Name())
|
assert.Equal(t, "docker", p.Name())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_ParseConfig_Good(t *testing.T) {
|
func TestDocker_DockerPublisherParseConfig_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("uses defaults when no extended config", func(t *testing.T) {
|
t.Run("uses defaults when no extended config", func(t *testing.T) {
|
||||||
pubCfg := PublisherConfig{Type: "docker"}
|
pubCfg := PublisherConfig{Type: "docker"}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
cfg := p.parseConfig(pubCfg, relCfg, "/project")
|
cfg := p.parseConfig(io.Local, pubCfg, relCfg, "/project")
|
||||||
|
|
||||||
assert.Equal(t, "ghcr.io", cfg.Registry)
|
assert.Equal(t, "ghcr.io", cfg.Registry)
|
||||||
assert.Equal(t, "owner/repo", cfg.Image)
|
assert.Equal(t, "owner/repo", cfg.Image)
|
||||||
|
|
@ -49,7 +47,7 @@ func TestDockerPublisher_ParseConfig_Good(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
cfg := p.parseConfig(pubCfg, relCfg, "/project")
|
cfg := p.parseConfig(io.Local, pubCfg, relCfg, "/project")
|
||||||
|
|
||||||
assert.Equal(t, "docker.io", cfg.Registry)
|
assert.Equal(t, "docker.io", cfg.Registry)
|
||||||
assert.Equal(t, "myorg/myimage", cfg.Image)
|
assert.Equal(t, "myorg/myimage", cfg.Image)
|
||||||
|
|
@ -67,13 +65,24 @@ func TestDockerPublisher_ParseConfig_Good(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
cfg := p.parseConfig(pubCfg, relCfg, "/project")
|
cfg := p.parseConfig(io.Local, pubCfg, relCfg, "/project")
|
||||||
|
|
||||||
assert.Equal(t, "/absolute/path/Dockerfile", cfg.Dockerfile)
|
assert.Equal(t, "/absolute/path/Dockerfile", cfg.Dockerfile)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("detects Containerfile when Dockerfile is absent", func(t *testing.T) {
|
||||||
|
projectDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(projectDir, "Containerfile"), []byte("FROM alpine\n"), 0o644))
|
||||||
|
|
||||||
|
pubCfg := PublisherConfig{Type: "docker"}
|
||||||
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
|
cfg := p.parseConfig(io.Local, pubCfg, relCfg, projectDir)
|
||||||
|
|
||||||
|
assert.Equal(t, ax.Join(projectDir, "Containerfile"), cfg.Dockerfile)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_ResolveTags_Good(t *testing.T) {
|
func TestDocker_DockerPublisherResolveTags_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("resolves version template", func(t *testing.T) {
|
t.Run("resolves version template", func(t *testing.T) {
|
||||||
|
|
@ -95,7 +104,7 @@ func TestDockerPublisher_ResolveTags_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_BuildFullTag_Good(t *testing.T) {
|
func TestDocker_DockerPublisherBuildFullTag_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
|
|
@ -136,7 +145,7 @@ func TestDockerPublisher_BuildFullTag_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_BuildBuildxArgs_Good(t *testing.T) {
|
func TestDocker_DockerPublisherBuildBuildxArgs_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("builds basic args", func(t *testing.T) {
|
t.Run("builds basic args", func(t *testing.T) {
|
||||||
|
|
@ -228,7 +237,7 @@ func TestDockerPublisher_BuildBuildxArgs_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_Publish_Bad(t *testing.T) {
|
func TestDocker_DockerPublisherPublish_Bad(t *testing.T) {
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping integration test in short mode")
|
t.Skip("skipping integration test in short mode")
|
||||||
}
|
}
|
||||||
|
|
@ -255,13 +264,13 @@ func TestDockerPublisher_Publish_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerConfig_Defaults_Good(t *testing.T) {
|
func TestDocker_DockerConfigDefaults_Good(t *testing.T) {
|
||||||
t.Run("has sensible defaults", func(t *testing.T) {
|
t.Run("has sensible defaults", func(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
pubCfg := PublisherConfig{Type: "docker"}
|
pubCfg := PublisherConfig{Type: "docker"}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
|
|
||||||
cfg := p.parseConfig(pubCfg, relCfg, "/project")
|
cfg := p.parseConfig(io.Local, pubCfg, relCfg, "/project")
|
||||||
|
|
||||||
// Verify defaults
|
// Verify defaults
|
||||||
assert.Equal(t, "ghcr.io", cfg.Registry)
|
assert.Equal(t, "ghcr.io", cfg.Registry)
|
||||||
|
|
@ -273,14 +282,10 @@ func TestDockerConfig_Defaults_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_DryRunPublish_Good(t *testing.T) {
|
func TestDocker_DockerPublisherDryRunPublish_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("outputs expected dry run information", func(t *testing.T) {
|
t.Run("outputs expected dry run information", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
ProjectDir: "/project",
|
ProjectDir: "/project",
|
||||||
|
|
@ -295,15 +300,11 @@ func TestDockerPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
BuildArgs: make(map[string]string),
|
BuildArgs: make(map[string]string),
|
||||||
}
|
}
|
||||||
|
|
||||||
err := p.dryRunPublish(release, cfg)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(release, cfg)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
|
|
||||||
assert.Contains(t, output, "DRY RUN: Docker Build & Push")
|
assert.Contains(t, output, "DRY RUN: Docker Build & Push")
|
||||||
assert.Contains(t, output, "Version: v1.0.0")
|
assert.Contains(t, output, "Version: v1.0.0")
|
||||||
|
|
@ -320,10 +321,6 @@ func TestDockerPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("shows build args when present", func(t *testing.T) {
|
t.Run("shows build args when present", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
ProjectDir: "/project",
|
ProjectDir: "/project",
|
||||||
|
|
@ -341,15 +338,11 @@ func TestDockerPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
err := p.dryRunPublish(release, cfg)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(release, cfg)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
|
|
||||||
assert.Contains(t, output, "Build arguments:")
|
assert.Contains(t, output, "Build arguments:")
|
||||||
assert.Contains(t, output, "GO_VERSION=1.21")
|
assert.Contains(t, output, "GO_VERSION=1.21")
|
||||||
|
|
@ -357,10 +350,6 @@ func TestDockerPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("handles single platform", func(t *testing.T) {
|
t.Run("handles single platform", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v2.0.0",
|
Version: "v2.0.0",
|
||||||
ProjectDir: "/project",
|
ProjectDir: "/project",
|
||||||
|
|
@ -375,22 +364,18 @@ func TestDockerPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
BuildArgs: make(map[string]string),
|
BuildArgs: make(map[string]string),
|
||||||
}
|
}
|
||||||
|
|
||||||
err := p.dryRunPublish(release, cfg)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(release, cfg)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
|
|
||||||
assert.Contains(t, output, "Platforms: linux/amd64")
|
assert.Contains(t, output, "Platforms: linux/amd64")
|
||||||
assert.Contains(t, output, "ghcr.io/owner/repo:stable")
|
assert.Contains(t, output, "ghcr.io/owner/repo:stable")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_ParseConfig_EdgeCases_Good(t *testing.T) {
|
func TestDocker_DockerPublisherParseConfigEdgeCases_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("handles nil release config", func(t *testing.T) {
|
t.Run("handles nil release config", func(t *testing.T) {
|
||||||
|
|
@ -401,7 +386,7 @@ func TestDockerPublisher_ParseConfig_EdgeCases_Good(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := p.parseConfig(pubCfg, nil, "/project")
|
cfg := p.parseConfig(io.Local, pubCfg, nil, "/project")
|
||||||
|
|
||||||
assert.Equal(t, "custom/image", cfg.Image)
|
assert.Equal(t, "custom/image", cfg.Image)
|
||||||
assert.Equal(t, "ghcr.io", cfg.Registry)
|
assert.Equal(t, "ghcr.io", cfg.Registry)
|
||||||
|
|
@ -416,7 +401,7 @@ func TestDockerPublisher_ParseConfig_EdgeCases_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
relCfg := &mockReleaseConfig{repository: ""}
|
relCfg := &mockReleaseConfig{repository: ""}
|
||||||
|
|
||||||
cfg := p.parseConfig(pubCfg, relCfg, "/project")
|
cfg := p.parseConfig(io.Local, pubCfg, relCfg, "/project")
|
||||||
|
|
||||||
assert.Equal(t, "fallback/image", cfg.Image)
|
assert.Equal(t, "fallback/image", cfg.Image)
|
||||||
})
|
})
|
||||||
|
|
@ -430,7 +415,7 @@ func TestDockerPublisher_ParseConfig_EdgeCases_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
relCfg := &mockReleaseConfig{repository: "original/repo"}
|
relCfg := &mockReleaseConfig{repository: "original/repo"}
|
||||||
|
|
||||||
cfg := p.parseConfig(pubCfg, relCfg, "/project")
|
cfg := p.parseConfig(io.Local, pubCfg, relCfg, "/project")
|
||||||
|
|
||||||
assert.Equal(t, "override/image", cfg.Image)
|
assert.Equal(t, "override/image", cfg.Image)
|
||||||
})
|
})
|
||||||
|
|
@ -447,7 +432,7 @@ func TestDockerPublisher_ParseConfig_EdgeCases_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
|
|
||||||
cfg := p.parseConfig(pubCfg, relCfg, "/project")
|
cfg := p.parseConfig(io.Local, pubCfg, relCfg, "/project")
|
||||||
|
|
||||||
assert.Equal(t, "value", cfg.BuildArgs["STRING_ARG"])
|
assert.Equal(t, "value", cfg.BuildArgs["STRING_ARG"])
|
||||||
_, exists := cfg.BuildArgs["INT_ARG"]
|
_, exists := cfg.BuildArgs["INT_ARG"]
|
||||||
|
|
@ -455,7 +440,7 @@ func TestDockerPublisher_ParseConfig_EdgeCases_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_ResolveTags_EdgeCases_Good(t *testing.T) {
|
func TestDocker_DockerPublisherResolveTagsEdgeCases_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("handles empty tags", func(t *testing.T) {
|
t.Run("handles empty tags", func(t *testing.T) {
|
||||||
|
|
@ -474,7 +459,7 @@ func TestDockerPublisher_ResolveTags_EdgeCases_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_BuildBuildxArgs_EdgeCases_Good(t *testing.T) {
|
func TestDocker_DockerPublisherBuildBuildxArgsEdgeCases_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("handles empty platforms", func(t *testing.T) {
|
t.Run("handles empty platforms", func(t *testing.T) {
|
||||||
|
|
@ -563,7 +548,7 @@ func TestDockerPublisher_BuildBuildxArgs_EdgeCases_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) {
|
func TestDocker_DockerPublisherPublishDryRun_Good(t *testing.T) {
|
||||||
// Skip if docker CLI is not available - dry run still validates docker is installed
|
// Skip if docker CLI is not available - dry run still validates docker is installed
|
||||||
if err := validateDockerCli(); err != nil {
|
if err := validateDockerCli(); err != nil {
|
||||||
t.Skip("skipping test: docker CLI not available")
|
t.Skip("skipping test: docker CLI not available")
|
||||||
|
|
@ -573,17 +558,8 @@ func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) {
|
||||||
|
|
||||||
t.Run("dry run succeeds with valid Dockerfile", func(t *testing.T) {
|
t.Run("dry run succeeds with valid Dockerfile", func(t *testing.T) {
|
||||||
// Create temp directory with Dockerfile
|
// Create temp directory with Dockerfile
|
||||||
tmpDir, err := os.MkdirTemp("", "docker-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
require.NoError(t, ax.WriteFile(ax.Join(tmpDir, "Dockerfile"), []byte("FROM alpine:latest\n"), 0o644))
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
|
||||||
|
|
||||||
dockerfilePath := filepath.Join(tmpDir, "Dockerfile")
|
|
||||||
err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
|
|
@ -593,35 +569,20 @@ func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) {
|
||||||
pubCfg := PublisherConfig{Type: "docker"}
|
pubCfg := PublisherConfig{Type: "docker"}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
|
|
||||||
err = p.Publish(context.TODO(), release, pubCfg, relCfg, true)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.Publish(context.TODO(), release, pubCfg, relCfg, true)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
assert.Contains(t, output, "DRY RUN: Docker Build & Push")
|
assert.Contains(t, output, "DRY RUN: Docker Build & Push")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("dry run uses custom dockerfile path", func(t *testing.T) {
|
t.Run("dry run uses custom dockerfile path", func(t *testing.T) {
|
||||||
// Create temp directory with custom Dockerfile
|
// Create temp directory with custom Dockerfile
|
||||||
tmpDir, err := os.MkdirTemp("", "docker-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
customDir := ax.Join(tmpDir, "docker")
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
require.NoError(t, ax.MkdirAll(customDir, 0o755))
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(customDir, "Dockerfile.prod"), []byte("FROM alpine:latest\n"), 0o644))
|
||||||
customDir := filepath.Join(tmpDir, "docker")
|
|
||||||
err = os.MkdirAll(customDir, 0755)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
dockerfilePath := filepath.Join(customDir, "Dockerfile.prod")
|
|
||||||
err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
|
|
@ -636,20 +597,16 @@ func TestDockerPublisher_Publish_DryRun_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
|
|
||||||
err = p.Publish(context.TODO(), release, pubCfg, relCfg, true)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.Publish(context.TODO(), release, pubCfg, relCfg, true)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
assert.Contains(t, output, "Dockerfile.prod")
|
assert.Contains(t, output, "Dockerfile.prod")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_Publish_Validation_Bad(t *testing.T) {
|
func TestDocker_DockerPublisherPublishValidation_Bad(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("fails when Dockerfile not found with docker installed", func(t *testing.T) {
|
t.Run("fails when Dockerfile not found with docker installed", func(t *testing.T) {
|
||||||
|
|
@ -675,9 +632,12 @@ func TestDockerPublisher_Publish_Validation_Bad(t *testing.T) {
|
||||||
t.Skip("skipping test: docker CLI is available")
|
t.Skip("skipping test: docker CLI is available")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
require.NoError(t, ax.WriteFile(ax.Join(tmpDir, "Dockerfile"), []byte("FROM alpine:latest\n"), 0o644))
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
ProjectDir: "/tmp",
|
ProjectDir: tmpDir,
|
||||||
FS: io.Local,
|
FS: io.Local,
|
||||||
}
|
}
|
||||||
pubCfg := PublisherConfig{Type: "docker"}
|
pubCfg := PublisherConfig{Type: "docker"}
|
||||||
|
|
@ -689,7 +649,7 @@ func TestDockerPublisher_Publish_Validation_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateDockerCli_Good(t *testing.T) {
|
func TestDocker_ValidateDockerCli_Good(t *testing.T) {
|
||||||
t.Run("returns nil when docker is installed", func(t *testing.T) {
|
t.Run("returns nil when docker is installed", func(t *testing.T) {
|
||||||
err := validateDockerCli()
|
err := validateDockerCli()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -700,7 +660,25 @@ func TestValidateDockerCli_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) {
|
func TestDocker_ResolveDockerCli_Good(t *testing.T) {
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "docker")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := resolveDockerCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocker_ResolveDockerCli_Bad(t *testing.T) {
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
_, err := resolveDockerCli(ax.Join(t.TempDir(), "missing-docker"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "docker CLI not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocker_DockerPublisherPublishWithCLI_Good(t *testing.T) {
|
||||||
// These tests run only when docker CLI is available
|
// These tests run only when docker CLI is available
|
||||||
if err := validateDockerCli(); err != nil {
|
if err := validateDockerCli(); err != nil {
|
||||||
t.Skip("skipping test: docker CLI not available")
|
t.Skip("skipping test: docker CLI not available")
|
||||||
|
|
@ -709,17 +687,8 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) {
|
||||||
p := NewDockerPublisher()
|
p := NewDockerPublisher()
|
||||||
|
|
||||||
t.Run("dry run succeeds with all config options", func(t *testing.T) {
|
t.Run("dry run succeeds with all config options", func(t *testing.T) {
|
||||||
tmpDir, err := os.MkdirTemp("", "docker-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
require.NoError(t, ax.WriteFile(ax.Join(tmpDir, "Dockerfile"), []byte("FROM alpine:latest\n"), 0o644))
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
|
||||||
|
|
||||||
dockerfilePath := filepath.Join(tmpDir, "Dockerfile")
|
|
||||||
err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
|
|
@ -738,32 +707,19 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
|
|
||||||
err = p.Publish(context.TODO(), release, pubCfg, relCfg, true)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.Publish(context.TODO(), release, pubCfg, relCfg, true)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
assert.Contains(t, output, "DRY RUN: Docker Build & Push")
|
assert.Contains(t, output, "DRY RUN: Docker Build & Push")
|
||||||
assert.Contains(t, output, "docker.io")
|
assert.Contains(t, output, "docker.io")
|
||||||
assert.Contains(t, output, "myorg/myapp")
|
assert.Contains(t, output, "myorg/myapp")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("dry run with nil relCfg uses extended image", func(t *testing.T) {
|
t.Run("dry run with nil relCfg uses extended image", func(t *testing.T) {
|
||||||
tmpDir, err := os.MkdirTemp("", "docker-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
require.NoError(t, ax.WriteFile(ax.Join(tmpDir, "Dockerfile"), []byte("FROM alpine:latest\n"), 0o644))
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
|
||||||
|
|
||||||
dockerfilePath := filepath.Join(tmpDir, "Dockerfile")
|
|
||||||
err = os.WriteFile(dockerfilePath, []byte("FROM alpine:latest\n"), 0644)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
|
|
@ -777,22 +733,16 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
err = p.Publish(context.TODO(), release, pubCfg, nil, true) // nil relCfg
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.Publish(context.TODO(), release, pubCfg, nil, true)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
assert.Contains(t, output, "standalone/image")
|
assert.Contains(t, output, "standalone/image")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("fails with non-existent Dockerfile in non-dry-run", func(t *testing.T) {
|
t.Run("fails with non-existent Dockerfile in non-dry-run", func(t *testing.T) {
|
||||||
tmpDir, err := os.MkdirTemp("", "docker-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
|
||||||
|
|
||||||
// Don't create a Dockerfile
|
// Don't create a Dockerfile
|
||||||
release := &Release{
|
release := &Release{
|
||||||
|
|
@ -803,7 +753,7 @@ func TestDockerPublisher_Publish_WithCLI_Good(t *testing.T) {
|
||||||
pubCfg := PublisherConfig{Type: "docker"}
|
pubCfg := PublisherConfig{Type: "docker"}
|
||||||
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
relCfg := &mockReleaseConfig{repository: "owner/repo"}
|
||||||
|
|
||||||
err = p.Publish(context.TODO(), release, pubCfg, relCfg, false)
|
err := p.Publish(context.TODO(), release, pubCfg, relCfg, false)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Contains(t, err.Error(), "Dockerfile not found")
|
assert.Contains(t, err.Error(), "Dockerfile not found")
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -3,30 +3,34 @@ package publishers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// GitHubPublisher publishes releases to GitHub using the gh CLI.
|
// GitHubPublisher publishes releases to GitHub using the gh CLI.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewGitHubPublisher()
|
||||||
type GitHubPublisher struct{}
|
type GitHubPublisher struct{}
|
||||||
|
|
||||||
// NewGitHubPublisher creates a new GitHub publisher.
|
// NewGitHubPublisher creates a new GitHub publisher.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewGitHubPublisher()
|
||||||
func NewGitHubPublisher() *GitHubPublisher {
|
func NewGitHubPublisher() *GitHubPublisher {
|
||||||
return &GitHubPublisher{}
|
return &GitHubPublisher{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the publisher's identifier.
|
// Name returns the publisher's identifier.
|
||||||
|
//
|
||||||
|
// name := pub.Name() // → "github"
|
||||||
func (p *GitHubPublisher) Name() string {
|
func (p *GitHubPublisher) Name() string {
|
||||||
return "github"
|
return "github"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Publish publishes the release to GitHub.
|
// Publish publishes the release to GitHub using the gh CLI.
|
||||||
// Uses the gh CLI for creating releases and uploading assets.
|
//
|
||||||
|
// err := pub.Publish(ctx, rel, pubCfg, relCfg, false) // dryRun=true to preview
|
||||||
func (p *GitHubPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
func (p *GitHubPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
||||||
// Determine repository
|
// Determine repository
|
||||||
repo := ""
|
repo := ""
|
||||||
|
|
@ -35,7 +39,7 @@ func (p *GitHubPublisher) Publish(ctx context.Context, release *Release, pubCfg
|
||||||
}
|
}
|
||||||
if repo == "" {
|
if repo == "" {
|
||||||
// Try to detect from git remote
|
// Try to detect from git remote
|
||||||
detectedRepo, err := detectRepository(release.ProjectDir)
|
detectedRepo, err := detectRepository(ctx, release.ProjectDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("github.Publish", "could not determine repository", err)
|
return coreerr.E("github.Publish", "could not determine repository", err)
|
||||||
}
|
}
|
||||||
|
|
@ -46,50 +50,55 @@ func (p *GitHubPublisher) Publish(ctx context.Context, release *Release, pubCfg
|
||||||
return p.dryRunPublish(release, pubCfg, repo)
|
return p.dryRunPublish(release, pubCfg, repo)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate gh CLI is available and authenticated for actual publish
|
ghCommand, err := resolveGhCli()
|
||||||
if err := validateGhCli(); err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.executePublish(ctx, release, pubCfg, repo)
|
// Validate gh CLI is available and authenticated for actual publish
|
||||||
|
if err := validateGhAuth(ctx, ghCommand); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.executePublish(ctx, release, pubCfg, repo, ghCommand)
|
||||||
}
|
}
|
||||||
|
|
||||||
// dryRunPublish shows what would be done without actually publishing.
|
// dryRunPublish shows what would be done without actually publishing.
|
||||||
func (p *GitHubPublisher) dryRunPublish(release *Release, pubCfg PublisherConfig, repo string) error {
|
func (p *GitHubPublisher) dryRunPublish(release *Release, pubCfg PublisherConfig, repo string) error {
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== DRY RUN: GitHub Release ===")
|
publisherPrintln("=== DRY RUN: GitHub Release ===")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Printf("Repository: %s\n", repo)
|
publisherPrint("Repository: %s", repo)
|
||||||
fmt.Printf("Version: %s\n", release.Version)
|
publisherPrint("Version: %s", release.Version)
|
||||||
fmt.Printf("Draft: %t\n", pubCfg.Draft)
|
publisherPrint("Draft: %t", pubCfg.Draft)
|
||||||
fmt.Printf("Prerelease: %t\n", pubCfg.Prerelease)
|
publisherPrint("Prerelease: %t", pubCfg.Prerelease)
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
fmt.Println("Would create release with command:")
|
publisherPrintln("Would create release with command:")
|
||||||
args := p.buildCreateArgs(release, pubCfg, repo)
|
args := p.buildCreateArgs(release, pubCfg, repo)
|
||||||
fmt.Printf(" gh %s\n", strings.Join(args, " "))
|
publisherPrint(" gh %s", core.Join(" ", args...))
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
if len(release.Artifacts) > 0 {
|
if len(release.Artifacts) > 0 {
|
||||||
fmt.Println("Would upload artifacts:")
|
publisherPrintln("Would upload artifacts:")
|
||||||
for _, artifact := range release.Artifacts {
|
for _, artifact := range release.Artifacts {
|
||||||
fmt.Printf(" - %s\n", filepath.Base(artifact.Path))
|
publisherPrint(" - %s", ax.Base(artifact.Path))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("Changelog:")
|
publisherPrintln("Changelog:")
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println(release.Changelog)
|
publisherPrintln(release.Changelog)
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== END DRY RUN ===")
|
publisherPrintln("=== END DRY RUN ===")
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// executePublish actually creates the release and uploads artifacts.
|
// executePublish actually creates the release and uploads artifacts.
|
||||||
func (p *GitHubPublisher) executePublish(ctx context.Context, release *Release, pubCfg PublisherConfig, repo string) error {
|
func (p *GitHubPublisher) executePublish(ctx context.Context, release *Release, pubCfg PublisherConfig, repo, ghCommand string) error {
|
||||||
// Build the release create command
|
// Build the release create command
|
||||||
args := p.buildCreateArgs(release, pubCfg, repo)
|
args := p.buildCreateArgs(release, pubCfg, repo)
|
||||||
|
|
||||||
|
|
@ -99,12 +108,7 @@ func (p *GitHubPublisher) executePublish(ctx context.Context, release *Release,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute gh release create
|
// Execute gh release create
|
||||||
cmd := exec.CommandContext(ctx, "gh", args...)
|
if err := publisherRun(ctx, release.ProjectDir, nil, ghCommand, args...); err != nil {
|
||||||
cmd.Dir = release.ProjectDir
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("github.Publish", "gh release create failed", err)
|
return coreerr.E("github.Publish", "gh release create failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -143,22 +147,39 @@ func (p *GitHubPublisher) buildCreateArgs(release *Release, pubCfg PublisherConf
|
||||||
return args
|
return args
|
||||||
}
|
}
|
||||||
|
|
||||||
// validateGhCli checks if the gh CLI is available and authenticated.
|
func resolveGhCli(paths ...string) (string, error) {
|
||||||
func validateGhCli() error {
|
if len(paths) == 0 {
|
||||||
// Check if gh is installed
|
paths = []string{
|
||||||
cmd := exec.Command("gh", "--version")
|
"/usr/local/bin/gh",
|
||||||
if err := cmd.Run(); err != nil {
|
"/opt/homebrew/bin/gh",
|
||||||
return coreerr.E("github.validateGhCli", "gh CLI not found. Install it from https://cli.github.com", err)
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if authenticated
|
command, err := ax.ResolveCommand("gh", paths...)
|
||||||
cmd = exec.Command("gh", "auth", "status")
|
if err != nil {
|
||||||
output, err := cmd.CombinedOutput()
|
return "", coreerr.E("github.resolveGhCli", "gh CLI not found. Install it from https://cli.github.com", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return command, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateGhCli checks if the gh CLI is available and authenticated.
|
||||||
|
func validateGhCli(ctx context.Context) error {
|
||||||
|
ghCommand, err := resolveGhCli()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return validateGhAuth(ctx, ghCommand)
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateGhAuth(ctx context.Context, ghCommand string) error {
|
||||||
|
output, err := ax.CombinedOutput(ctx, "", nil, ghCommand, "auth", "status")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("github.validateGhCli", "not authenticated with gh CLI. Run 'gh auth login' first", err)
|
return coreerr.E("github.validateGhCli", "not authenticated with gh CLI. Run 'gh auth login' first", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !strings.Contains(string(output), "Logged in") {
|
if !core.Contains(output, "Logged in") {
|
||||||
return coreerr.E("github.validateGhCli", "not authenticated with gh CLI. Run 'gh auth login' first", nil)
|
return coreerr.E("github.validateGhCli", "not authenticated with gh CLI. Run 'gh auth login' first", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -166,16 +187,13 @@ func validateGhCli() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// detectRepository detects the GitHub repository from git remote.
|
// detectRepository detects the GitHub repository from git remote.
|
||||||
func detectRepository(dir string) (string, error) {
|
func detectRepository(ctx context.Context, dir string) (string, error) {
|
||||||
cmd := exec.Command("git", "remote", "get-url", "origin")
|
output, err := ax.RunDir(ctx, dir, "git", "remote", "get-url", "origin")
|
||||||
cmd.Dir = dir
|
|
||||||
output, err := cmd.Output()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", coreerr.E("github.detectRepository", "failed to get git remote", err)
|
return "", coreerr.E("github.detectRepository", "failed to get git remote", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
url := strings.TrimSpace(string(output))
|
return parseGitHubRepo(core.Trim(output))
|
||||||
return parseGitHubRepo(url)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseGitHubRepo extracts owner/repo from a GitHub URL.
|
// parseGitHubRepo extracts owner/repo from a GitHub URL.
|
||||||
|
|
@ -185,16 +203,16 @@ func detectRepository(dir string) (string, error) {
|
||||||
// - https://github.com/owner/repo
|
// - https://github.com/owner/repo
|
||||||
func parseGitHubRepo(url string) (string, error) {
|
func parseGitHubRepo(url string) (string, error) {
|
||||||
// SSH format
|
// SSH format
|
||||||
if strings.HasPrefix(url, "git@github.com:") {
|
if core.HasPrefix(url, "git@github.com:") {
|
||||||
repo := strings.TrimPrefix(url, "git@github.com:")
|
repo := core.TrimPrefix(url, "git@github.com:")
|
||||||
repo = strings.TrimSuffix(repo, ".git")
|
repo = core.TrimSuffix(repo, ".git")
|
||||||
return repo, nil
|
return repo, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// HTTPS format
|
// HTTPS format
|
||||||
if strings.HasPrefix(url, "https://github.com/") {
|
if core.HasPrefix(url, "https://github.com/") {
|
||||||
repo := strings.TrimPrefix(url, "https://github.com/")
|
repo := core.TrimPrefix(url, "https://github.com/")
|
||||||
repo = strings.TrimSuffix(repo, ".git")
|
repo = core.TrimSuffix(repo, ".git")
|
||||||
return repo, nil
|
return repo, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -203,12 +221,15 @@ func parseGitHubRepo(url string) (string, error) {
|
||||||
|
|
||||||
// UploadArtifact uploads a single artifact to an existing release.
|
// UploadArtifact uploads a single artifact to an existing release.
|
||||||
// This can be used to add artifacts to a release after creation.
|
// This can be used to add artifacts to a release after creation.
|
||||||
|
//
|
||||||
|
// err := publishers.UploadArtifact(ctx, "host-uk/core-build", "v1.2.3", "dist/core-build_v1.2.3_linux_amd64.tar.gz")
|
||||||
func UploadArtifact(ctx context.Context, repo, version, artifactPath string) error {
|
func UploadArtifact(ctx context.Context, repo, version, artifactPath string) error {
|
||||||
cmd := exec.CommandContext(ctx, "gh", "release", "upload", version, artifactPath, "--repo", repo)
|
ghCommand, err := resolveGhCli()
|
||||||
cmd.Stdout = os.Stdout
|
if err != nil {
|
||||||
cmd.Stderr = os.Stderr
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if err := cmd.Run(); err != nil {
|
if err := publisherRun(ctx, "", nil, ghCommand, "release", "upload", version, artifactPath, "--repo", repo); err != nil {
|
||||||
return coreerr.E("github.UploadArtifact", "failed to upload "+artifactPath, err)
|
return coreerr.E("github.UploadArtifact", "failed to upload "+artifactPath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -216,12 +237,15 @@ func UploadArtifact(ctx context.Context, repo, version, artifactPath string) err
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteRelease deletes a release by tag name.
|
// DeleteRelease deletes a release by tag name.
|
||||||
|
//
|
||||||
|
// err := publishers.DeleteRelease(ctx, "host-uk/core-build", "v1.2.3")
|
||||||
func DeleteRelease(ctx context.Context, repo, version string) error {
|
func DeleteRelease(ctx context.Context, repo, version string) error {
|
||||||
cmd := exec.CommandContext(ctx, "gh", "release", "delete", version, "--repo", repo, "--yes")
|
ghCommand, err := resolveGhCli()
|
||||||
cmd.Stdout = os.Stdout
|
if err != nil {
|
||||||
cmd.Stderr = os.Stderr
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if err := cmd.Run(); err != nil {
|
if err := publisherRun(ctx, "", nil, ghCommand, "release", "delete", version, "--repo", repo, "--yes"); err != nil {
|
||||||
return coreerr.E("github.DeleteRelease", "failed to delete "+version, err)
|
return coreerr.E("github.DeleteRelease", "failed to delete "+version, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -229,7 +253,13 @@ func DeleteRelease(ctx context.Context, repo, version string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReleaseExists checks if a release exists for the given version.
|
// ReleaseExists checks if a release exists for the given version.
|
||||||
|
//
|
||||||
|
// exists := publishers.ReleaseExists(ctx, "host-uk/core-build", "v1.2.3")
|
||||||
func ReleaseExists(ctx context.Context, repo, version string) bool {
|
func ReleaseExists(ctx context.Context, repo, version string) bool {
|
||||||
cmd := exec.CommandContext(ctx, "gh", "release", "view", version, "--repo", repo)
|
ghCommand, err := resolveGhCli()
|
||||||
return cmd.Run() == nil
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return ax.Exec(ctx, ghCommand, "release", "view", version, "--repo", repo) == nil
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,18 @@
|
||||||
package publishers
|
package publishers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
"dappco.re/go/core/io"
|
"dappco.re/go/core/io"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseGitHubRepo_Good(t *testing.T) {
|
func TestGitHub_ParseGitHubRepo_Good(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
|
|
@ -51,7 +49,7 @@ func TestParseGitHubRepo_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseGitHubRepo_Bad(t *testing.T) {
|
func TestGitHub_ParseGitHubRepo_Bad(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
|
|
@ -82,14 +80,14 @@ func TestParseGitHubRepo_Bad(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGitHubPublisher_Name_Good(t *testing.T) {
|
func TestGitHub_GitHubPublisherName_Good(t *testing.T) {
|
||||||
t.Run("returns github", func(t *testing.T) {
|
t.Run("returns github", func(t *testing.T) {
|
||||||
p := NewGitHubPublisher()
|
p := NewGitHubPublisher()
|
||||||
assert.Equal(t, "github", p.Name())
|
assert.Equal(t, "github", p.Name())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewRelease_Good(t *testing.T) {
|
func TestGitHub_NewRelease_Good(t *testing.T) {
|
||||||
t.Run("creates release struct", func(t *testing.T) {
|
t.Run("creates release struct", func(t *testing.T) {
|
||||||
r := NewRelease("v1.0.0", nil, "changelog", "/project", io.Local)
|
r := NewRelease("v1.0.0", nil, "changelog", "/project", io.Local)
|
||||||
assert.Equal(t, "v1.0.0", r.Version)
|
assert.Equal(t, "v1.0.0", r.Version)
|
||||||
|
|
@ -99,7 +97,7 @@ func TestNewRelease_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewPublisherConfig_Good(t *testing.T) {
|
func TestGitHub_NewPublisherConfig_Good(t *testing.T) {
|
||||||
t.Run("creates config struct", func(t *testing.T) {
|
t.Run("creates config struct", func(t *testing.T) {
|
||||||
cfg := NewPublisherConfig("github", true, false, nil)
|
cfg := NewPublisherConfig("github", true, false, nil)
|
||||||
assert.Equal(t, "github", cfg.Type)
|
assert.Equal(t, "github", cfg.Type)
|
||||||
|
|
@ -116,7 +114,7 @@ func TestNewPublisherConfig_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildCreateArgs_Good(t *testing.T) {
|
func TestGitHub_BuildCreateArgs_Good(t *testing.T) {
|
||||||
p := NewGitHubPublisher()
|
p := NewGitHubPublisher()
|
||||||
|
|
||||||
t.Run("basic args", func(t *testing.T) {
|
t.Run("basic args", func(t *testing.T) {
|
||||||
|
|
@ -221,14 +219,10 @@ func TestBuildCreateArgs_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) {
|
func TestGitHub_GitHubPublisherDryRunPublish_Good(t *testing.T) {
|
||||||
p := NewGitHubPublisher()
|
p := NewGitHubPublisher()
|
||||||
|
|
||||||
t.Run("outputs expected dry run information", func(t *testing.T) {
|
t.Run("outputs expected dry run information", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
Changelog: "## Changes\n\n- Feature A\n- Bug fix B",
|
Changelog: "## Changes\n\n- Feature A\n- Bug fix B",
|
||||||
|
|
@ -241,15 +235,11 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
Prerelease: false,
|
Prerelease: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := p.dryRunPublish(release, cfg, "owner/repo")
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(release, cfg, "owner/repo")
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
|
|
||||||
assert.Contains(t, output, "DRY RUN: GitHub Release")
|
assert.Contains(t, output, "DRY RUN: GitHub Release")
|
||||||
assert.Contains(t, output, "Repository: owner/repo")
|
assert.Contains(t, output, "Repository: owner/repo")
|
||||||
|
|
@ -264,10 +254,6 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("shows artifacts when present", func(t *testing.T) {
|
t.Run("shows artifacts when present", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
Changelog: "Changes",
|
Changelog: "Changes",
|
||||||
|
|
@ -280,15 +266,11 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
}
|
}
|
||||||
cfg := PublisherConfig{Type: "github"}
|
cfg := PublisherConfig{Type: "github"}
|
||||||
|
|
||||||
err := p.dryRunPublish(release, cfg, "owner/repo")
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(release, cfg, "owner/repo")
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
|
|
||||||
assert.Contains(t, output, "Would upload artifacts:")
|
assert.Contains(t, output, "Would upload artifacts:")
|
||||||
assert.Contains(t, output, "myapp-darwin-amd64.tar.gz")
|
assert.Contains(t, output, "myapp-darwin-amd64.tar.gz")
|
||||||
|
|
@ -296,10 +278,6 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("shows draft and prerelease flags", func(t *testing.T) {
|
t.Run("shows draft and prerelease flags", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0-beta",
|
Version: "v1.0.0-beta",
|
||||||
Changelog: "Beta release",
|
Changelog: "Beta release",
|
||||||
|
|
@ -312,15 +290,11 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
Prerelease: true,
|
Prerelease: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
err := p.dryRunPublish(release, cfg, "owner/repo")
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.dryRunPublish(release, cfg, "owner/repo")
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
|
|
||||||
assert.Contains(t, output, "Draft: true")
|
assert.Contains(t, output, "Draft: true")
|
||||||
assert.Contains(t, output, "Prerelease: true")
|
assert.Contains(t, output, "Prerelease: true")
|
||||||
|
|
@ -329,14 +303,10 @@ func TestGitHubPublisher_DryRunPublish_Good(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGitHubPublisher_Publish_Good(t *testing.T) {
|
func TestGitHub_GitHubPublisherPublish_Good(t *testing.T) {
|
||||||
p := NewGitHubPublisher()
|
p := NewGitHubPublisher()
|
||||||
|
|
||||||
t.Run("dry run uses repository from config", func(t *testing.T) {
|
t.Run("dry run uses repository from config", func(t *testing.T) {
|
||||||
oldStdout := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
Changelog: "Changes",
|
Changelog: "Changes",
|
||||||
|
|
@ -347,20 +317,16 @@ func TestGitHubPublisher_Publish_Good(t *testing.T) {
|
||||||
relCfg := &mockReleaseConfig{repository: "custom/repo"}
|
relCfg := &mockReleaseConfig{repository: "custom/repo"}
|
||||||
|
|
||||||
// Dry run should succeed without needing gh CLI
|
// Dry run should succeed without needing gh CLI
|
||||||
err := p.Publish(context.TODO(), release, pubCfg, relCfg, true)
|
var err error
|
||||||
|
output := capturePublisherOutput(t, func() {
|
||||||
_ = w.Close()
|
err = p.Publish(context.TODO(), release, pubCfg, relCfg, true)
|
||||||
var buf bytes.Buffer
|
})
|
||||||
_, _ = buf.ReadFrom(r)
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
output := buf.String()
|
|
||||||
assert.Contains(t, output, "Repository: custom/repo")
|
assert.Contains(t, output, "Repository: custom/repo")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGitHubPublisher_Publish_Bad(t *testing.T) {
|
func TestGitHub_GitHubPublisherPublish_Bad(t *testing.T) {
|
||||||
if testing.Short() {
|
if testing.Short() {
|
||||||
t.Skip("skipping integration test in short mode")
|
t.Skip("skipping integration test in short mode")
|
||||||
}
|
}
|
||||||
|
|
@ -387,9 +353,7 @@ func TestGitHubPublisher_Publish_Bad(t *testing.T) {
|
||||||
|
|
||||||
t.Run("fails when repository cannot be detected", func(t *testing.T) {
|
t.Run("fails when repository cannot be detected", func(t *testing.T) {
|
||||||
// Create a temp directory that is NOT a git repo
|
// Create a temp directory that is NOT a git repo
|
||||||
tmpDir, err := os.MkdirTemp("", "github-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
|
||||||
|
|
||||||
release := &Release{
|
release := &Release{
|
||||||
Version: "v1.0.0",
|
Version: "v1.0.0",
|
||||||
|
|
@ -400,7 +364,7 @@ func TestGitHubPublisher_Publish_Bad(t *testing.T) {
|
||||||
pubCfg := PublisherConfig{Type: "github"}
|
pubCfg := PublisherConfig{Type: "github"}
|
||||||
relCfg := &mockReleaseConfig{repository: ""} // Empty repository
|
relCfg := &mockReleaseConfig{repository: ""} // Empty repository
|
||||||
|
|
||||||
err = p.Publish(context.Background(), release, pubCfg, relCfg, true)
|
err := p.Publish(context.Background(), release, pubCfg, relCfg, true)
|
||||||
|
|
||||||
// Should fail because detectRepository will fail on non-git dir
|
// Should fail because detectRepository will fail on non-git dir
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
|
|
@ -408,103 +372,124 @@ func TestGitHubPublisher_Publish_Bad(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDetectRepository_Good(t *testing.T) {
|
func TestGitHub_DetectRepository_Good(t *testing.T) {
|
||||||
t.Run("detects repository from git remote", func(t *testing.T) {
|
t.Run("detects repository from git remote", func(t *testing.T) {
|
||||||
// Create a temp git repo
|
// Create a temp git repo
|
||||||
tmpDir, err := os.MkdirTemp("", "git-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
|
||||||
|
|
||||||
// Initialize git repo and set remote
|
// Initialize git repo and set remote
|
||||||
cmd := exec.Command("git", "init")
|
runPublisherCommand(t, tmpDir, "git", "init")
|
||||||
cmd.Dir = tmpDir
|
runPublisherCommand(t, tmpDir, "git", "remote", "add", "origin", "git@github.com:test-owner/test-repo.git")
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
cmd = exec.Command("git", "remote", "add", "origin", "git@github.com:test-owner/test-repo.git")
|
repo, err := detectRepository(context.Background(), tmpDir)
|
||||||
cmd.Dir = tmpDir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
repo, err := detectRepository(tmpDir)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "test-owner/test-repo", repo)
|
assert.Equal(t, "test-owner/test-repo", repo)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("detects repository from HTTPS remote", func(t *testing.T) {
|
t.Run("detects repository from HTTPS remote", func(t *testing.T) {
|
||||||
tmpDir, err := os.MkdirTemp("", "git-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
runPublisherCommand(t, tmpDir, "git", "init")
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
runPublisherCommand(t, tmpDir, "git", "remote", "add", "origin", "https://github.com/another-owner/another-repo.git")
|
||||||
|
|
||||||
cmd := exec.Command("git", "init")
|
repo, err := detectRepository(context.Background(), tmpDir)
|
||||||
cmd.Dir = tmpDir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
cmd = exec.Command("git", "remote", "add", "origin", "https://github.com/another-owner/another-repo.git")
|
|
||||||
cmd.Dir = tmpDir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
repo, err := detectRepository(tmpDir)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "another-owner/another-repo", repo)
|
assert.Equal(t, "another-owner/another-repo", repo)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDetectRepository_Bad(t *testing.T) {
|
func TestGitHub_DetectRepository_Bad(t *testing.T) {
|
||||||
t.Run("fails when not a git repository", func(t *testing.T) {
|
t.Run("fails when not a git repository", func(t *testing.T) {
|
||||||
tmpDir, err := os.MkdirTemp("", "no-git-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
|
||||||
|
|
||||||
_, err = detectRepository(tmpDir)
|
_, err := detectRepository(context.Background(), tmpDir)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Contains(t, err.Error(), "failed to get git remote")
|
assert.Contains(t, err.Error(), "failed to get git remote")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("fails when directory does not exist", func(t *testing.T) {
|
t.Run("fails when directory does not exist", func(t *testing.T) {
|
||||||
_, err := detectRepository("/nonexistent/directory/that/does/not/exist")
|
_, err := detectRepository(context.Background(), "/nonexistent/directory/that/does/not/exist")
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("fails when remote is not GitHub", func(t *testing.T) {
|
t.Run("fails when remote is not GitHub", func(t *testing.T) {
|
||||||
tmpDir, err := os.MkdirTemp("", "git-test")
|
tmpDir := t.TempDir()
|
||||||
require.NoError(t, err)
|
runPublisherCommand(t, tmpDir, "git", "init")
|
||||||
defer func() { _ = os.RemoveAll(tmpDir) }()
|
runPublisherCommand(t, tmpDir, "git", "remote", "add", "origin", "git@gitlab.com:owner/repo.git")
|
||||||
|
|
||||||
cmd := exec.Command("git", "init")
|
_, err := detectRepository(context.Background(), tmpDir)
|
||||||
cmd.Dir = tmpDir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
cmd = exec.Command("git", "remote", "add", "origin", "git@gitlab.com:owner/repo.git")
|
|
||||||
cmd.Dir = tmpDir
|
|
||||||
require.NoError(t, cmd.Run())
|
|
||||||
|
|
||||||
_, err = detectRepository(tmpDir)
|
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Contains(t, err.Error(), "not a GitHub URL")
|
assert.Contains(t, err.Error(), "not a GitHub URL")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("respects cancelled context", func(t *testing.T) {
|
||||||
|
commandDir := t.TempDir()
|
||||||
|
commandPath := ax.Join(commandDir, "git")
|
||||||
|
require.NoError(t, ax.WriteFile(commandPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", commandDir)
|
||||||
|
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
cancel()
|
||||||
|
|
||||||
|
_, err := detectRepository(ctx, t.TempDir())
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "context canceled")
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateGhCli_Bad(t *testing.T) {
|
func TestGitHub_ValidateGhCli_Bad(t *testing.T) {
|
||||||
// This test verifies the error messages from validateGhCli
|
// This test verifies the error messages from validateGhCli
|
||||||
// We can't easily mock exec.Command, but we can at least
|
// We can't easily mock exec.Command, but we can at least
|
||||||
// verify the function exists and returns expected error types
|
// verify the function exists and returns expected error types
|
||||||
t.Run("returns error when gh not installed", func(t *testing.T) {
|
t.Run("returns error when gh not installed", func(t *testing.T) {
|
||||||
// We can't force gh to not be installed, but we can verify
|
// We can't force gh to not be installed, but we can verify
|
||||||
// the function signature works correctly
|
// the function signature works correctly
|
||||||
err := validateGhCli()
|
err := validateGhCli(context.Background())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Either gh is not installed or not authenticated
|
// Either gh is not installed or not authenticated
|
||||||
assert.True(t,
|
assert.True(t,
|
||||||
strings.Contains(err.Error(), "gh CLI not found") ||
|
core.Contains(err.Error(), "gh CLI not found") ||
|
||||||
strings.Contains(err.Error(), "not authenticated"),
|
core.Contains(err.Error(), "not authenticated"),
|
||||||
"unexpected error: %s", err.Error())
|
"unexpected error: %s", err.Error())
|
||||||
}
|
}
|
||||||
// If err is nil, gh is installed and authenticated - that's OK too
|
// If err is nil, gh is installed and authenticated - that's OK too
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("respects cancelled context during auth check", func(t *testing.T) {
|
||||||
|
commandDir := t.TempDir()
|
||||||
|
commandPath := ax.Join(commandDir, "gh")
|
||||||
|
require.NoError(t, ax.WriteFile(commandPath, []byte("#!/bin/sh\necho 'Logged in'\n"), 0o755))
|
||||||
|
t.Setenv("PATH", commandDir)
|
||||||
|
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
cancel()
|
||||||
|
|
||||||
|
err := validateGhCli(ctx)
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "context canceled")
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGitHubPublisher_ExecutePublish_Good(t *testing.T) {
|
func TestGitHub_ResolveGhCli_Good(t *testing.T) {
|
||||||
|
fallbackDir := t.TempDir()
|
||||||
|
fallbackPath := ax.Join(fallbackDir, "gh")
|
||||||
|
require.NoError(t, ax.WriteFile(fallbackPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
|
||||||
|
command, err := resolveGhCli(fallbackPath)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fallbackPath, command)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGitHub_ResolveGhCli_Bad(t *testing.T) {
|
||||||
|
t.Setenv("PATH", "")
|
||||||
|
_, err := resolveGhCli(ax.Join(t.TempDir(), "missing-gh"))
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, err.Error(), "gh CLI not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGitHub_GitHubPublisherExecutePublish_Good(t *testing.T) {
|
||||||
// These tests run only when gh CLI is available and authenticated
|
// These tests run only when gh CLI is available and authenticated
|
||||||
if err := validateGhCli(); err != nil {
|
if err := validateGhCli(context.Background()); err != nil {
|
||||||
t.Skip("skipping test: gh CLI not available or not authenticated")
|
t.Skip("skipping test: gh CLI not available or not authenticated")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -530,14 +515,17 @@ func TestGitHubPublisher_ExecutePublish_Good(t *testing.T) {
|
||||||
|
|
||||||
// This will fail because the artifact doesn't exist, but it proves
|
// This will fail because the artifact doesn't exist, but it proves
|
||||||
// the code path runs
|
// the code path runs
|
||||||
err := p.executePublish(context.Background(), release, cfg, "test-owner/test-repo-nonexistent")
|
command, err := resolveGhCli()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
err = p.executePublish(context.Background(), release, cfg, "test-owner/test-repo-nonexistent", command)
|
||||||
assert.Error(t, err) // Expected to fail
|
assert.Error(t, err) // Expected to fail
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestReleaseExists_Good(t *testing.T) {
|
func TestGitHub_ReleaseExists_Good(t *testing.T) {
|
||||||
// These tests run only when gh CLI is available
|
// These tests run only when gh CLI is available
|
||||||
if err := validateGhCli(); err != nil {
|
if err := validateGhCli(context.Background()); err != nil {
|
||||||
t.Skip("skipping test: gh CLI not available or not authenticated")
|
t.Skip("skipping test: gh CLI not available or not authenticated")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,13 +5,10 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"embed"
|
"embed"
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"text/template"
|
"text/template"
|
||||||
|
|
||||||
|
"dappco.re/go/core"
|
||||||
|
"dappco.re/go/core/build/internal/ax"
|
||||||
"dappco.re/go/core/build/pkg/build"
|
"dappco.re/go/core/build/pkg/build"
|
||||||
coreio "dappco.re/go/core/io"
|
coreio "dappco.re/go/core/io"
|
||||||
coreerr "dappco.re/go/core/log"
|
coreerr "dappco.re/go/core/log"
|
||||||
|
|
@ -21,6 +18,8 @@ import (
|
||||||
var homebrewTemplates embed.FS
|
var homebrewTemplates embed.FS
|
||||||
|
|
||||||
// HomebrewConfig holds Homebrew-specific configuration.
|
// HomebrewConfig holds Homebrew-specific configuration.
|
||||||
|
//
|
||||||
|
// cfg := publishers.HomebrewConfig{Tap: "host-uk/homebrew-tap", Formula: "core-build"}
|
||||||
type HomebrewConfig struct {
|
type HomebrewConfig struct {
|
||||||
// Tap is the Homebrew tap repository (e.g., "host-uk/homebrew-tap").
|
// Tap is the Homebrew tap repository (e.g., "host-uk/homebrew-tap").
|
||||||
Tap string
|
Tap string
|
||||||
|
|
@ -31,6 +30,8 @@ type HomebrewConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// OfficialConfig holds configuration for generating files for official repo PRs.
|
// OfficialConfig holds configuration for generating files for official repo PRs.
|
||||||
|
//
|
||||||
|
// cfg.Official = &publishers.OfficialConfig{Enabled: true, Output: "dist/homebrew"}
|
||||||
type OfficialConfig struct {
|
type OfficialConfig struct {
|
||||||
// Enabled determines whether to generate files for official repos.
|
// Enabled determines whether to generate files for official repos.
|
||||||
Enabled bool
|
Enabled bool
|
||||||
|
|
@ -39,19 +40,27 @@ type OfficialConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// HomebrewPublisher publishes releases to Homebrew.
|
// HomebrewPublisher publishes releases to Homebrew.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewHomebrewPublisher()
|
||||||
type HomebrewPublisher struct{}
|
type HomebrewPublisher struct{}
|
||||||
|
|
||||||
// NewHomebrewPublisher creates a new Homebrew publisher.
|
// NewHomebrewPublisher creates a new Homebrew publisher.
|
||||||
|
//
|
||||||
|
// pub := publishers.NewHomebrewPublisher()
|
||||||
func NewHomebrewPublisher() *HomebrewPublisher {
|
func NewHomebrewPublisher() *HomebrewPublisher {
|
||||||
return &HomebrewPublisher{}
|
return &HomebrewPublisher{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Name returns the publisher's identifier.
|
// Name returns the publisher's identifier.
|
||||||
|
//
|
||||||
|
// name := pub.Name() // → "homebrew"
|
||||||
func (p *HomebrewPublisher) Name() string {
|
func (p *HomebrewPublisher) Name() string {
|
||||||
return "homebrew"
|
return "homebrew"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Publish publishes the release to Homebrew.
|
// Publish publishes the release to Homebrew.
|
||||||
|
//
|
||||||
|
// err := pub.Publish(ctx, rel, pubCfg, relCfg, false)
|
||||||
func (p *HomebrewPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
func (p *HomebrewPublisher) Publish(ctx context.Context, release *Release, pubCfg PublisherConfig, relCfg ReleaseConfig, dryRun bool) error {
|
||||||
// Parse config
|
// Parse config
|
||||||
cfg := p.parseConfig(pubCfg, relCfg)
|
cfg := p.parseConfig(pubCfg, relCfg)
|
||||||
|
|
@ -67,7 +76,7 @@ func (p *HomebrewPublisher) Publish(ctx context.Context, release *Release, pubCf
|
||||||
repo = relCfg.GetRepository()
|
repo = relCfg.GetRepository()
|
||||||
}
|
}
|
||||||
if repo == "" {
|
if repo == "" {
|
||||||
detectedRepo, err := detectRepository(release.ProjectDir)
|
detectedRepo, err := detectRepository(ctx, release.ProjectDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("homebrew.Publish", "could not determine repository", err)
|
return coreerr.E("homebrew.Publish", "could not determine repository", err)
|
||||||
}
|
}
|
||||||
|
|
@ -79,7 +88,7 @@ func (p *HomebrewPublisher) Publish(ctx context.Context, release *Release, pubCf
|
||||||
projectName = relCfg.GetProjectName()
|
projectName = relCfg.GetProjectName()
|
||||||
}
|
}
|
||||||
if projectName == "" {
|
if projectName == "" {
|
||||||
parts := strings.Split(repo, "/")
|
parts := core.Split(repo, "/")
|
||||||
projectName = parts[len(parts)-1]
|
projectName = parts[len(parts)-1]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -89,7 +98,7 @@ func (p *HomebrewPublisher) Publish(ctx context.Context, release *Release, pubCf
|
||||||
}
|
}
|
||||||
|
|
||||||
// Strip leading 'v' from version
|
// Strip leading 'v' from version
|
||||||
version := strings.TrimPrefix(release.Version, "v")
|
version := core.TrimPrefix(release.Version, "v")
|
||||||
|
|
||||||
// Build checksums map from artifacts
|
// Build checksums map from artifacts
|
||||||
checksums := buildChecksumMap(release.Artifacts)
|
checksums := buildChecksumMap(release.Artifacts)
|
||||||
|
|
@ -97,7 +106,7 @@ func (p *HomebrewPublisher) Publish(ctx context.Context, release *Release, pubCf
|
||||||
// Template data
|
// Template data
|
||||||
data := homebrewTemplateData{
|
data := homebrewTemplateData{
|
||||||
FormulaClass: toFormulaClass(formulaName),
|
FormulaClass: toFormulaClass(formulaName),
|
||||||
Description: fmt.Sprintf("%s CLI", projectName),
|
Description: core.Sprintf("%s CLI", projectName),
|
||||||
Repository: repo,
|
Repository: repo,
|
||||||
Version: version,
|
Version: version,
|
||||||
License: "MIT",
|
License: "MIT",
|
||||||
|
|
@ -124,6 +133,8 @@ type homebrewTemplateData struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChecksumMap holds checksums for different platform/arch combinations.
|
// ChecksumMap holds checksums for different platform/arch combinations.
|
||||||
|
//
|
||||||
|
// data.Checksums.LinuxAMD64 = "abc123..."
|
||||||
type ChecksumMap struct {
|
type ChecksumMap struct {
|
||||||
DarwinAmd64 string
|
DarwinAmd64 string
|
||||||
DarwinArm64 string
|
DarwinArm64 string
|
||||||
|
|
@ -163,38 +174,38 @@ func (p *HomebrewPublisher) parseConfig(pubCfg PublisherConfig, relCfg ReleaseCo
|
||||||
|
|
||||||
// dryRunPublish shows what would be done.
|
// dryRunPublish shows what would be done.
|
||||||
func (p *HomebrewPublisher) dryRunPublish(m coreio.Medium, data homebrewTemplateData, cfg HomebrewConfig) error {
|
func (p *HomebrewPublisher) dryRunPublish(m coreio.Medium, data homebrewTemplateData, cfg HomebrewConfig) error {
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== DRY RUN: Homebrew Publish ===")
|
publisherPrintln("=== DRY RUN: Homebrew Publish ===")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Printf("Formula: %s\n", data.FormulaClass)
|
publisherPrint("Formula: %s", data.FormulaClass)
|
||||||
fmt.Printf("Version: %s\n", data.Version)
|
publisherPrint("Version: %s", data.Version)
|
||||||
fmt.Printf("Tap: %s\n", cfg.Tap)
|
publisherPrint("Tap: %s", cfg.Tap)
|
||||||
fmt.Printf("Repository: %s\n", data.Repository)
|
publisherPrint("Repository: %s", data.Repository)
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
// Generate and show formula
|
// Generate and show formula
|
||||||
formula, err := p.renderTemplate(m, "templates/homebrew/formula.rb.tmpl", data)
|
formula, err := p.renderTemplate(m, "templates/homebrew/formula.rb.tmpl", data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("homebrew.dryRunPublish", "failed to render template", err)
|
return coreerr.E("homebrew.dryRunPublish", "failed to render template", err)
|
||||||
}
|
}
|
||||||
fmt.Println("Generated formula.rb:")
|
publisherPrintln("Generated formula.rb:")
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println(formula)
|
publisherPrintln(formula)
|
||||||
fmt.Println("---")
|
publisherPrintln("---")
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
|
|
||||||
if cfg.Tap != "" {
|
if cfg.Tap != "" {
|
||||||
fmt.Printf("Would commit to tap: %s\n", cfg.Tap)
|
publisherPrint("Would commit to tap: %s", cfg.Tap)
|
||||||
}
|
}
|
||||||
if cfg.Official != nil && cfg.Official.Enabled {
|
if cfg.Official != nil && cfg.Official.Enabled {
|
||||||
output := cfg.Official.Output
|
output := cfg.Official.Output
|
||||||
if output == "" {
|
if output == "" {
|
||||||
output = "dist/homebrew"
|
output = "dist/homebrew"
|
||||||
}
|
}
|
||||||
fmt.Printf("Would write files for official PR to: %s\n", output)
|
publisherPrint("Would write files for official PR to: %s", output)
|
||||||
}
|
}
|
||||||
fmt.Println()
|
publisherPrintln()
|
||||||
fmt.Println("=== END DRY RUN ===")
|
publisherPrintln("=== END DRY RUN ===")
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -211,20 +222,20 @@ func (p *HomebrewPublisher) executePublish(ctx context.Context, projectDir strin
|
||||||
if cfg.Official != nil && cfg.Official.Enabled {
|
if cfg.Official != nil && cfg.Official.Enabled {
|
||||||
output := cfg.Official.Output
|
output := cfg.Official.Output
|
||||||
if output == "" {
|
if output == "" {
|
||||||
output = filepath.Join(projectDir, "dist", "homebrew")
|
output = ax.Join(projectDir, "dist", "homebrew")
|
||||||
} else if !filepath.IsAbs(output) {
|
} else if !ax.IsAbs(output) {
|
||||||
output = filepath.Join(projectDir, output)
|
output = ax.Join(projectDir, output)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := release.FS.EnsureDir(output); err != nil {
|
if err := release.FS.EnsureDir(output); err != nil {
|
||||||
return coreerr.E("homebrew.Publish", "failed to create output directory", err)
|
return coreerr.E("homebrew.Publish", "failed to create output directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
formulaPath := filepath.Join(output, fmt.Sprintf("%s.rb", strings.ToLower(data.FormulaClass)))
|
formulaPath := ax.Join(output, core.Sprintf("%s.rb", core.Lower(data.FormulaClass)))
|
||||||
if err := release.FS.Write(formulaPath, formula); err != nil {
|
if err := release.FS.Write(formulaPath, formula); err != nil {
|
||||||
return coreerr.E("homebrew.Publish", "failed to write formula", err)
|
return coreerr.E("homebrew.Publish", "failed to write formula", err)
|
||||||
}
|
}
|
||||||
fmt.Printf("Wrote Homebrew formula for official PR: %s\n", formulaPath)
|
publisherPrint("Wrote Homebrew formula for official PR: %s", formulaPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If tap is configured, commit to it
|
// If tap is configured, commit to it
|
||||||
|
|
@ -240,59 +251,46 @@ func (p *HomebrewPublisher) executePublish(ctx context.Context, projectDir strin
|
||||||
// commitToTap commits the formula to the tap repository.
|
// commitToTap commits the formula to the tap repository.
|
||||||
func (p *HomebrewPublisher) commitToTap(ctx context.Context, tap string, data homebrewTemplateData, formula string) error {
|
func (p *HomebrewPublisher) commitToTap(ctx context.Context, tap string, data homebrewTemplateData, formula string) error {
|
||||||
// Clone tap repo to temp directory
|
// Clone tap repo to temp directory
|
||||||
tmpDir, err := os.MkdirTemp("", "homebrew-tap-*")
|
tmpDir, err := ax.TempDir("homebrew-tap-*")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("homebrew.commitToTap", "failed to create temp directory", err)
|
return coreerr.E("homebrew.commitToTap", "failed to create temp directory", err)
|
||||||
}
|
}
|
||||||
defer func() { _ = coreio.Local.DeleteAll(tmpDir) }()
|
defer func() { _ = ax.RemoveAll(tmpDir) }()
|
||||||
|
|
||||||
// Clone the tap
|
// Clone the tap
|
||||||
fmt.Printf("Cloning tap %s...\n", tap)
|
publisherPrint("Cloning tap %s...", tap)
|
||||||
cmd := exec.CommandContext(ctx, "gh", "repo", "clone", tap, tmpDir, "--", "--depth=1")
|
if err := publisherRun(ctx, "", nil, "gh", "repo", "clone", tap, tmpDir, "--", "--depth=1"); err != nil {
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("homebrew.commitToTap", "failed to clone tap", err)
|
return coreerr.E("homebrew.commitToTap", "failed to clone tap", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure Formula directory exists
|
// Ensure Formula directory exists
|
||||||
formulaDir := filepath.Join(tmpDir, "Formula")
|
formulaDir := ax.Join(tmpDir, "Formula")
|
||||||
if err := coreio.Local.EnsureDir(formulaDir); err != nil {
|
if err := ax.MkdirAll(formulaDir, 0o755); err != nil {
|
||||||
return coreerr.E("homebrew.commitToTap", "failed to create Formula directory", err)
|
return coreerr.E("homebrew.commitToTap", "failed to create Formula directory", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write formula
|
// Write formula
|
||||||
formulaPath := filepath.Join(formulaDir, fmt.Sprintf("%s.rb", strings.ToLower(data.FormulaClass)))
|
formulaPath := ax.Join(formulaDir, core.Sprintf("%s.rb", core.Lower(data.FormulaClass)))
|
||||||
if err := coreio.Local.Write(formulaPath, formula); err != nil {
|
if err := ax.WriteString(formulaPath, formula, 0o644); err != nil {
|
||||||
return coreerr.E("homebrew.commitToTap", "failed to write formula", err)
|
return coreerr.E("homebrew.commitToTap", "failed to write formula", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Git add, commit, push
|
// Git add, commit, push
|
||||||
commitMsg := fmt.Sprintf("Update %s to %s", data.FormulaClass, data.Version)
|
commitMsg := core.Sprintf("Update %s to %s", data.FormulaClass, data.Version)
|
||||||
|
|
||||||
cmd = exec.CommandContext(ctx, "git", "add", ".")
|
if err := ax.ExecDir(ctx, tmpDir, "git", "add", "."); err != nil {
|
||||||
cmd.Dir = tmpDir
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("homebrew.commitToTap", "git add failed", err)
|
return coreerr.E("homebrew.commitToTap", "git add failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd = exec.CommandContext(ctx, "git", "commit", "-m", commitMsg)
|
if err := publisherRun(ctx, tmpDir, nil, "git", "commit", "-m", commitMsg); err != nil {
|
||||||
cmd.Dir = tmpDir
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("homebrew.commitToTap", "git commit failed", err)
|
return coreerr.E("homebrew.commitToTap", "git commit failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd = exec.CommandContext(ctx, "git", "push")
|
if err := publisherRun(ctx, tmpDir, nil, "git", "push"); err != nil {
|
||||||
cmd.Dir = tmpDir
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return coreerr.E("homebrew.commitToTap", "git push failed", err)
|
return coreerr.E("homebrew.commitToTap", "git push failed", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("Updated Homebrew tap: %s\n", tap)
|
publisherPrint("Updated Homebrew tap: %s", tap)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -302,7 +300,7 @@ func (p *HomebrewPublisher) renderTemplate(m coreio.Medium, name string, data ho
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
// Try custom template from medium
|
// Try custom template from medium
|
||||||
customPath := filepath.Join(".core", name)
|
customPath := ax.Join(".core", name)
|
||||||
if m != nil && m.IsFile(customPath) {
|
if m != nil && m.IsFile(customPath) {
|
||||||
customContent, err := m.Read(customPath)
|
customContent, err := m.Read(customPath)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|
@ -318,7 +316,7 @@ func (p *HomebrewPublisher) renderTemplate(m coreio.Medium, name string, data ho
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tmpl, err := template.New(filepath.Base(name)).Parse(string(content))
|
tmpl, err := template.New(ax.Base(name)).Parse(string(content))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", coreerr.E("homebrew.renderTemplate", "failed to parse template "+name, err)
|
return "", coreerr.E("homebrew.renderTemplate", "failed to parse template "+name, err)
|
||||||
}
|
}
|
||||||
|
|
@ -334,13 +332,13 @@ func (p *HomebrewPublisher) renderTemplate(m coreio.Medium, name string, data ho
|
||||||
// toFormulaClass converts a package name to a Ruby class name.
|
// toFormulaClass converts a package name to a Ruby class name.
|
||||||
func toFormulaClass(name string) string {
|
func toFormulaClass(name string) string {
|
||||||
// Convert kebab-case to PascalCase
|
// Convert kebab-case to PascalCase
|
||||||
parts := strings.Split(name, "-")
|
parts := core.Split(name, "-")
|
||||||
for i, part := range parts {
|
for i, part := range parts {
|
||||||
if len(part) > 0 {
|
if len(part) > 0 {
|
||||||
parts[i] = strings.ToUpper(part[:1]) + part[1:]
|
parts[i] = core.Upper(part[:1]) + part[1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return strings.Join(parts, "")
|
return core.Join("", parts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildChecksumMap extracts checksums from artifacts into a structured map.
|
// buildChecksumMap extracts checksums from artifacts into a structured map.
|
||||||
|
|
@ -349,21 +347,21 @@ func buildChecksumMap(artifacts []build.Artifact) ChecksumMap {
|
||||||
|
|
||||||
for _, a := range artifacts {
|
for _, a := range artifacts {
|
||||||
// Parse artifact name to determine platform
|
// Parse artifact name to determine platform
|
||||||
name := filepath.Base(a.Path)
|
name := ax.Base(a.Path)
|
||||||
checksum := a.Checksum
|
checksum := a.Checksum
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case strings.Contains(name, "darwin-amd64"):
|
case core.Contains(name, "darwin-amd64"):
|
||||||
checksums.DarwinAmd64 = checksum
|
checksums.DarwinAmd64 = checksum
|
||||||
case strings.Contains(name, "darwin-arm64"):
|
case core.Contains(name, "darwin-arm64"):
|
||||||
checksums.DarwinArm64 = checksum
|
checksums.DarwinArm64 = checksum
|
||||||
case strings.Contains(name, "linux-amd64"):
|
case core.Contains(name, "linux-amd64"):
|
||||||
checksums.LinuxAmd64 = checksum
|
checksums.LinuxAmd64 = checksum
|
||||||
case strings.Contains(name, "linux-arm64"):
|
case core.Contains(name, "linux-arm64"):
|
||||||
checksums.LinuxArm64 = checksum
|
checksums.LinuxArm64 = checksum
|
||||||
case strings.Contains(name, "windows-amd64"):
|
case core.Contains(name, "windows-amd64"):
|
||||||
checksums.WindowsAmd64 = checksum
|
checksums.WindowsAmd64 = checksum
|
||||||
case strings.Contains(name, "windows-arm64"):
|
case core.Contains(name, "windows-arm64"):
|
||||||
checksums.WindowsArm64 = checksum
|
checksums.WindowsArm64 = checksum
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue