From e0cc9526ea7f11a7730dd3dca0297b664bd3f43c Mon Sep 17 00:00:00 2001 From: Vi Date: Thu, 5 Feb 2026 18:14:59 +0000 Subject: [PATCH 01/10] chore(io): Migrate internal/cmd/php to Medium abstraction (#338) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Completes issue #112 by migrating all direct os.* filesystem calls in internal/cmd/php to use the io.Medium abstraction via getMedium(). Changes: - packages.go: os.ReadFile/WriteFile → getMedium().Read/Write - container.go: os.WriteFile/Remove/MkdirAll/Stat → getMedium().Write/Delete/EnsureDir/IsFile - services.go: os.MkdirAll/OpenFile/Open → getMedium().EnsureDir/Create/Open - dockerfile.go: os.ReadFile/Stat → getMedium().Read/IsFile - ssl.go: os.MkdirAll/Stat → getMedium().EnsureDir/IsFile - cmd_ci.go: os.WriteFile → getMedium().Write - cmd.go: os.Stat → getMedium().IsDir - coolify.go: os.Open → getMedium().Read - testing.go: os.Stat → getMedium().IsFile - cmd_qa_runner.go: os.Stat → getMedium().IsFile - detect.go: os.Stat/ReadFile → getMedium().Exists/Read - quality.go: os.Stat/ReadFile → getMedium().Exists/IsFile/Read All production files now use the consistent getMedium() pattern for testability. Test files retain direct os.* calls as they manage test fixtures directly. Closes #112 Co-authored-by: Claude Co-authored-by: Claude Opus 4.5 --- internal/cmd/php/cmd.go | 19 +++++- internal/cmd/php/cmd_ci.go | 2 +- internal/cmd/php/cmd_qa_runner.go | 21 ++++--- internal/cmd/php/container.go | 18 +++--- internal/cmd/php/coolify.go | 16 ++--- internal/cmd/php/detect.go | 64 ++++++++++---------- internal/cmd/php/dockerfile.go | 13 +++-- internal/cmd/php/packages.go | 15 +++-- internal/cmd/php/quality.go | 97 +++++++++++++++++-------------- internal/cmd/php/services.go | 24 ++++++-- internal/cmd/php/ssl.go | 8 ++- internal/cmd/php/testing.go | 10 ++-- 12 files changed, 179 insertions(+), 128 deletions(-) diff --git a/internal/cmd/php/cmd.go b/internal/cmd/php/cmd.go index 80091ea9..0bbfc6f3 100644 --- a/internal/cmd/php/cmd.go +++ b/internal/cmd/php/cmd.go @@ -7,9 +7,26 @@ import ( "github.com/host-uk/core/internal/cmd/workspace" "github.com/host-uk/core/pkg/cli" "github.com/host-uk/core/pkg/i18n" + "github.com/host-uk/core/pkg/io" "github.com/spf13/cobra" ) +// DefaultMedium is the default filesystem medium used by the php package. +// It defaults to io.Local (unsandboxed filesystem access). +// Use SetMedium to change this for testing or sandboxed operation. +var DefaultMedium io.Medium = io.Local + +// SetMedium sets the default medium for filesystem operations. +// This is primarily useful for testing with mock mediums. +func SetMedium(m io.Medium) { + DefaultMedium = m +} + +// getMedium returns the default medium for filesystem operations. +func getMedium() io.Medium { + return DefaultMedium +} + func init() { cli.RegisterCommands(AddPHPCommands) } @@ -89,7 +106,7 @@ func AddPHPCommands(root *cobra.Command) { targetDir := filepath.Join(pkgDir, config.Active) // Check if target directory exists - if _, err := os.Stat(targetDir); err != nil { + if !getMedium().IsDir(targetDir) { cli.Warnf("Active package directory not found: %s", targetDir) return nil } diff --git a/internal/cmd/php/cmd_ci.go b/internal/cmd/php/cmd_ci.go index 40b23fe2..8c9c6195 100644 --- a/internal/cmd/php/cmd_ci.go +++ b/internal/cmd/php/cmd_ci.go @@ -515,7 +515,7 @@ func generateSARIF(ctx context.Context, dir, checkName, outputFile string) error return fmt.Errorf("invalid SARIF output: %w", err) } - return os.WriteFile(outputFile, output, 0644) + return getMedium().Write(outputFile, string(output)) } // uploadSARIFToGitHub uploads a SARIF file to GitHub Security tab diff --git a/internal/cmd/php/cmd_qa_runner.go b/internal/cmd/php/cmd_qa_runner.go index c61ea466..69c8a6e4 100644 --- a/internal/cmd/php/cmd_qa_runner.go +++ b/internal/cmd/php/cmd_qa_runner.go @@ -2,7 +2,6 @@ package php import ( "context" - "os" "path/filepath" "strings" "sync" @@ -77,6 +76,7 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec { } case "fmt": + m := getMedium() formatter, found := DetectFormatter(r.dir) if !found { return nil @@ -84,7 +84,7 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec { if formatter == FormatterPint { vendorBin := filepath.Join(r.dir, "vendor", "bin", "pint") cmd := "pint" - if _, err := os.Stat(vendorBin); err == nil { + if m.IsFile(vendorBin) { cmd = vendorBin } args := []string{} @@ -102,13 +102,14 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec { return nil case "stan": + m := getMedium() _, found := DetectAnalyser(r.dir) if !found { return nil } vendorBin := filepath.Join(r.dir, "vendor", "bin", "phpstan") cmd := "phpstan" - if _, err := os.Stat(vendorBin); err == nil { + if m.IsFile(vendorBin) { cmd = vendorBin } return &process.RunSpec{ @@ -120,13 +121,14 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec { } case "psalm": + m := getMedium() _, found := DetectPsalm(r.dir) if !found { return nil } vendorBin := filepath.Join(r.dir, "vendor", "bin", "psalm") cmd := "psalm" - if _, err := os.Stat(vendorBin); err == nil { + if m.IsFile(vendorBin) { cmd = vendorBin } args := []string{"--no-progress"} @@ -142,14 +144,15 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec { } case "test": + m := getMedium() // Check for Pest first, fall back to PHPUnit pestBin := filepath.Join(r.dir, "vendor", "bin", "pest") phpunitBin := filepath.Join(r.dir, "vendor", "bin", "phpunit") var cmd string - if _, err := os.Stat(pestBin); err == nil { + if m.IsFile(pestBin) { cmd = pestBin - } else if _, err := os.Stat(phpunitBin); err == nil { + } else if m.IsFile(phpunitBin) { cmd = phpunitBin } else { return nil @@ -170,12 +173,13 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec { } case "rector": + m := getMedium() if !DetectRector(r.dir) { return nil } vendorBin := filepath.Join(r.dir, "vendor", "bin", "rector") cmd := "rector" - if _, err := os.Stat(vendorBin); err == nil { + if m.IsFile(vendorBin) { cmd = vendorBin } args := []string{"process"} @@ -192,12 +196,13 @@ func (r *QARunner) buildSpec(check string) *process.RunSpec { } case "infection": + m := getMedium() if !DetectInfection(r.dir) { return nil } vendorBin := filepath.Join(r.dir, "vendor", "bin", "infection") cmd := "infection" - if _, err := os.Stat(vendorBin); err == nil { + if m.IsFile(vendorBin) { cmd = vendorBin } return &process.RunSpec{ diff --git a/internal/cmd/php/container.go b/internal/cmd/php/container.go index 9b8f630a..8fe16e05 100644 --- a/internal/cmd/php/container.go +++ b/internal/cmd/php/container.go @@ -128,11 +128,12 @@ func BuildDocker(ctx context.Context, opts DockerBuildOptions) error { } // Write to temporary file + m := getMedium() tempDockerfile = filepath.Join(opts.ProjectDir, "Dockerfile.core-generated") - if err := os.WriteFile(tempDockerfile, []byte(content), 0644); err != nil { + if err := m.Write(tempDockerfile, content); err != nil { return cli.WrapVerb(err, "write", "Dockerfile") } - defer func() { _ = os.Remove(tempDockerfile) }() + defer func() { _ = m.Delete(tempDockerfile) }() dockerfilePath = tempDockerfile } @@ -198,8 +199,9 @@ func BuildLinuxKit(ctx context.Context, opts LinuxKitBuildOptions) error { } // Ensure output directory exists + m := getMedium() outputDir := filepath.Dir(opts.OutputPath) - if err := os.MkdirAll(outputDir, 0755); err != nil { + if err := m.EnsureDir(outputDir); err != nil { return cli.WrapVerb(err, "create", "output directory") } @@ -230,10 +232,10 @@ func BuildLinuxKit(ctx context.Context, opts LinuxKitBuildOptions) error { // Write template to temp file tempYAML := filepath.Join(opts.ProjectDir, ".core-linuxkit.yml") - if err := os.WriteFile(tempYAML, []byte(content), 0644); err != nil { + if err := m.Write(tempYAML, content); err != nil { return cli.WrapVerb(err, "write", "template") } - defer func() { _ = os.Remove(tempYAML) }() + defer func() { _ = m.Delete(tempYAML) }() // Build LinuxKit image args := []string{ @@ -345,8 +347,7 @@ func Shell(ctx context.Context, containerID string) error { // IsPHPProject checks if the given directory is a PHP project. func IsPHPProject(dir string) bool { composerPath := filepath.Join(dir, "composer.json") - _, err := os.Stat(composerPath) - return err == nil + return getMedium().IsFile(composerPath) } // commonLinuxKitPaths defines default search locations for linuxkit. @@ -362,8 +363,9 @@ func lookupLinuxKit() (string, error) { return path, nil } + m := getMedium() for _, p := range commonLinuxKitPaths { - if _, err := os.Stat(p); err == nil { + if m.IsFile(p) { return p, nil } } diff --git a/internal/cmd/php/coolify.go b/internal/cmd/php/coolify.go index 76aa4ca9..017fa260 100644 --- a/internal/cmd/php/coolify.go +++ b/internal/cmd/php/coolify.go @@ -75,6 +75,7 @@ func LoadCoolifyConfig(dir string) (*CoolifyConfig, error) { // LoadCoolifyConfigFromFile loads Coolify configuration from a specific .env file. func LoadCoolifyConfigFromFile(path string) (*CoolifyConfig, error) { + m := getMedium() config := &CoolifyConfig{} // First try environment variables @@ -84,23 +85,18 @@ func LoadCoolifyConfigFromFile(path string) (*CoolifyConfig, error) { config.StagingAppID = os.Getenv("COOLIFY_STAGING_APP_ID") // Then try .env file - file, err := os.Open(path) - if err != nil { - if os.IsNotExist(err) { - // No .env file, just use env vars - return validateCoolifyConfig(config) - } - return nil, cli.WrapVerb(err, "open", ".env file") + if !m.Exists(path) { + // No .env file, just use env vars + return validateCoolifyConfig(config) } - defer func() { _ = file.Close() }() - content, err := io.ReadAll(file) + content, err := m.Read(path) if err != nil { return nil, cli.WrapVerb(err, "read", ".env file") } // Parse .env file - lines := strings.Split(string(content), "\n") + lines := strings.Split(content, "\n") for _, line := range lines { line = strings.TrimSpace(line) if line == "" || strings.HasPrefix(line, "#") { diff --git a/internal/cmd/php/detect.go b/internal/cmd/php/detect.go index 7a977090..c13da9d7 100644 --- a/internal/cmd/php/detect.go +++ b/internal/cmd/php/detect.go @@ -1,9 +1,7 @@ package php import ( - "bufio" "encoding/json" - "os" "path/filepath" "strings" ) @@ -28,15 +26,17 @@ const ( // IsLaravelProject checks if the given directory is a Laravel project. // It looks for the presence of artisan file and laravel in composer.json. func IsLaravelProject(dir string) bool { + m := getMedium() + // Check for artisan file artisanPath := filepath.Join(dir, "artisan") - if _, err := os.Stat(artisanPath); os.IsNotExist(err) { + if !m.Exists(artisanPath) { return false } // Check composer.json for laravel/framework composerPath := filepath.Join(dir, "composer.json") - data, err := os.ReadFile(composerPath) + data, err := m.Read(composerPath) if err != nil { return false } @@ -46,7 +46,7 @@ func IsLaravelProject(dir string) bool { RequireDev map[string]string `json:"require-dev"` } - if err := json.Unmarshal(data, &composer); err != nil { + if err := json.Unmarshal([]byte(data), &composer); err != nil { return false } @@ -66,9 +66,11 @@ func IsLaravelProject(dir string) bool { // IsFrankenPHPProject checks if the project is configured for FrankenPHP. // It looks for laravel/octane with frankenphp driver. func IsFrankenPHPProject(dir string) bool { + m := getMedium() + // Check composer.json for laravel/octane composerPath := filepath.Join(dir, "composer.json") - data, err := os.ReadFile(composerPath) + data, err := m.Read(composerPath) if err != nil { return false } @@ -77,7 +79,7 @@ func IsFrankenPHPProject(dir string) bool { Require map[string]string `json:"require"` } - if err := json.Unmarshal(data, &composer); err != nil { + if err := json.Unmarshal([]byte(data), &composer); err != nil { return false } @@ -87,18 +89,18 @@ func IsFrankenPHPProject(dir string) bool { // Check octane config for frankenphp configPath := filepath.Join(dir, "config", "octane.php") - if _, err := os.Stat(configPath); os.IsNotExist(err) { + if !m.Exists(configPath) { // If no config exists but octane is installed, assume frankenphp return true } - configData, err := os.ReadFile(configPath) + configData, err := m.Read(configPath) if err != nil { return true // Assume frankenphp if we can't read config } // Look for frankenphp in the config - return strings.Contains(string(configData), "frankenphp") + return strings.Contains(configData, "frankenphp") } // DetectServices detects which services are needed based on project files. @@ -135,6 +137,7 @@ func DetectServices(dir string) []DetectedService { // hasVite checks if the project uses Vite. func hasVite(dir string) bool { + m := getMedium() viteConfigs := []string{ "vite.config.js", "vite.config.ts", @@ -143,7 +146,7 @@ func hasVite(dir string) bool { } for _, config := range viteConfigs { - if _, err := os.Stat(filepath.Join(dir, config)); err == nil { + if m.Exists(filepath.Join(dir, config)) { return true } } @@ -154,29 +157,27 @@ func hasVite(dir string) bool { // hasHorizon checks if Laravel Horizon is configured. func hasHorizon(dir string) bool { horizonConfig := filepath.Join(dir, "config", "horizon.php") - _, err := os.Stat(horizonConfig) - return err == nil + return getMedium().Exists(horizonConfig) } // hasReverb checks if Laravel Reverb is configured. func hasReverb(dir string) bool { reverbConfig := filepath.Join(dir, "config", "reverb.php") - _, err := os.Stat(reverbConfig) - return err == nil + return getMedium().Exists(reverbConfig) } // needsRedis checks if the project uses Redis based on .env configuration. func needsRedis(dir string) bool { + m := getMedium() envPath := filepath.Join(dir, ".env") - file, err := os.Open(envPath) + content, err := m.Read(envPath) if err != nil { return false } - defer func() { _ = file.Close() }() - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) + lines := strings.Split(content, "\n") + for _, line := range lines { + line = strings.TrimSpace(line) if strings.HasPrefix(line, "#") { continue } @@ -207,6 +208,7 @@ func needsRedis(dir string) bool { // DetectPackageManager detects which package manager is used in the project. // Returns "npm", "pnpm", "yarn", or "bun". func DetectPackageManager(dir string) string { + m := getMedium() // Check for lock files in order of preference lockFiles := []struct { file string @@ -219,7 +221,7 @@ func DetectPackageManager(dir string) string { } for _, lf := range lockFiles { - if _, err := os.Stat(filepath.Join(dir, lf.file)); err == nil { + if m.Exists(filepath.Join(dir, lf.file)) { return lf.manager } } @@ -230,16 +232,16 @@ func DetectPackageManager(dir string) string { // GetLaravelAppName extracts the application name from Laravel's .env file. func GetLaravelAppName(dir string) string { + m := getMedium() envPath := filepath.Join(dir, ".env") - file, err := os.Open(envPath) + content, err := m.Read(envPath) if err != nil { return "" } - defer func() { _ = file.Close() }() - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) + lines := strings.Split(content, "\n") + for _, line := range lines { + line = strings.TrimSpace(line) if strings.HasPrefix(line, "APP_NAME=") { value := strings.TrimPrefix(line, "APP_NAME=") // Remove quotes if present @@ -253,16 +255,16 @@ func GetLaravelAppName(dir string) string { // GetLaravelAppURL extracts the application URL from Laravel's .env file. func GetLaravelAppURL(dir string) string { + m := getMedium() envPath := filepath.Join(dir, ".env") - file, err := os.Open(envPath) + content, err := m.Read(envPath) if err != nil { return "" } - defer func() { _ = file.Close() }() - scanner := bufio.NewScanner(file) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) + lines := strings.Split(content, "\n") + for _, line := range lines { + line = strings.TrimSpace(line) if strings.HasPrefix(line, "APP_URL=") { value := strings.TrimPrefix(line, "APP_URL=") // Remove quotes if present diff --git a/internal/cmd/php/dockerfile.go b/internal/cmd/php/dockerfile.go index 43a3b6cf..4081a166 100644 --- a/internal/cmd/php/dockerfile.go +++ b/internal/cmd/php/dockerfile.go @@ -2,7 +2,6 @@ package php import ( "encoding/json" - "os" "path/filepath" "sort" "strings" @@ -50,6 +49,7 @@ func GenerateDockerfile(dir string) (string, error) { // DetectDockerfileConfig detects configuration from project files. func DetectDockerfileConfig(dir string) (*DockerfileConfig, error) { + m := getMedium() config := &DockerfileConfig{ PHPVersion: "8.3", BaseImage: "dunglas/frankenphp", @@ -58,13 +58,13 @@ func DetectDockerfileConfig(dir string) (*DockerfileConfig, error) { // Read composer.json composerPath := filepath.Join(dir, "composer.json") - composerData, err := os.ReadFile(composerPath) + composerContent, err := m.Read(composerPath) if err != nil { return nil, cli.WrapVerb(err, "read", "composer.json") } var composer ComposerJSON - if err := json.Unmarshal(composerData, &composer); err != nil { + if err := json.Unmarshal([]byte(composerContent), &composer); err != nil { return nil, cli.WrapVerb(err, "parse", "composer.json") } @@ -318,13 +318,14 @@ func extractPHPVersion(constraint string) string { // hasNodeAssets checks if the project has frontend assets. func hasNodeAssets(dir string) bool { + m := getMedium() packageJSON := filepath.Join(dir, "package.json") - if _, err := os.Stat(packageJSON); err != nil { + if !m.IsFile(packageJSON) { return false } // Check for build script in package.json - data, err := os.ReadFile(packageJSON) + content, err := m.Read(packageJSON) if err != nil { return false } @@ -333,7 +334,7 @@ func hasNodeAssets(dir string) bool { Scripts map[string]string `json:"scripts"` } - if err := json.Unmarshal(data, &pkg); err != nil { + if err := json.Unmarshal([]byte(content), &pkg); err != nil { return false } diff --git a/internal/cmd/php/packages.go b/internal/cmd/php/packages.go index ba3501ff..ce686053 100644 --- a/internal/cmd/php/packages.go +++ b/internal/cmd/php/packages.go @@ -25,14 +25,15 @@ type composerRepository struct { // readComposerJSON reads and parses composer.json from the given directory. func readComposerJSON(dir string) (map[string]json.RawMessage, error) { + m := getMedium() composerPath := filepath.Join(dir, "composer.json") - data, err := os.ReadFile(composerPath) + content, err := m.Read(composerPath) if err != nil { return nil, cli.WrapVerb(err, "read", "composer.json") } var raw map[string]json.RawMessage - if err := json.Unmarshal(data, &raw); err != nil { + if err := json.Unmarshal([]byte(content), &raw); err != nil { return nil, cli.WrapVerb(err, "parse", "composer.json") } @@ -41,6 +42,7 @@ func readComposerJSON(dir string) (map[string]json.RawMessage, error) { // writeComposerJSON writes the composer.json to the given directory. func writeComposerJSON(dir string, raw map[string]json.RawMessage) error { + m := getMedium() composerPath := filepath.Join(dir, "composer.json") data, err := json.MarshalIndent(raw, "", " ") @@ -49,9 +51,9 @@ func writeComposerJSON(dir string, raw map[string]json.RawMessage) error { } // Add trailing newline - data = append(data, '\n') + content := string(data) + "\n" - if err := os.WriteFile(composerPath, data, 0644); err != nil { + if err := m.Write(composerPath, content); err != nil { return cli.WrapVerb(err, "write", "composer.json") } @@ -91,8 +93,9 @@ func setRepositories(raw map[string]json.RawMessage, repos []composerRepository) // getPackageInfo reads package name and version from a composer.json in the given path. func getPackageInfo(packagePath string) (name, version string, err error) { + m := getMedium() composerPath := filepath.Join(packagePath, "composer.json") - data, err := os.ReadFile(composerPath) + content, err := m.Read(composerPath) if err != nil { return "", "", cli.WrapVerb(err, "read", "package composer.json") } @@ -102,7 +105,7 @@ func getPackageInfo(packagePath string) (name, version string, err error) { Version string `json:"version"` } - if err := json.Unmarshal(data, &pkg); err != nil { + if err := json.Unmarshal([]byte(content), &pkg); err != nil { return "", "", cli.WrapVerb(err, "parse", "package composer.json") } diff --git a/internal/cmd/php/quality.go b/internal/cmd/php/quality.go index 8f9109f2..1e398636 100644 --- a/internal/cmd/php/quality.go +++ b/internal/cmd/php/quality.go @@ -3,7 +3,7 @@ package php import ( "context" "encoding/json" - "io" + goio "io" "os" "os/exec" "path/filepath" @@ -31,7 +31,7 @@ type FormatOptions struct { Paths []string // Output is the writer for output (defaults to os.Stdout). - Output io.Writer + Output goio.Writer } // AnalyseOptions configures PHP static analysis. @@ -55,7 +55,7 @@ type AnalyseOptions struct { SARIF bool // Output is the writer for output (defaults to os.Stdout). - Output io.Writer + Output goio.Writer } // FormatterType represents the detected formatter. @@ -80,15 +80,17 @@ const ( // DetectFormatter detects which formatter is available in the project. func DetectFormatter(dir string) (FormatterType, bool) { + m := getMedium() + // Check for Pint config pintConfig := filepath.Join(dir, "pint.json") - if _, err := os.Stat(pintConfig); err == nil { + if m.Exists(pintConfig) { return FormatterPint, true } // Check for vendor binary pintBin := filepath.Join(dir, "vendor", "bin", "pint") - if _, err := os.Stat(pintBin); err == nil { + if m.Exists(pintBin) { return FormatterPint, true } @@ -97,34 +99,27 @@ func DetectFormatter(dir string) (FormatterType, bool) { // DetectAnalyser detects which static analyser is available in the project. func DetectAnalyser(dir string) (AnalyserType, bool) { + m := getMedium() + // Check for PHPStan config phpstanConfig := filepath.Join(dir, "phpstan.neon") phpstanDistConfig := filepath.Join(dir, "phpstan.neon.dist") - hasConfig := false - if _, err := os.Stat(phpstanConfig); err == nil { - hasConfig = true - } - if _, err := os.Stat(phpstanDistConfig); err == nil { - hasConfig = true - } + hasConfig := m.Exists(phpstanConfig) || m.Exists(phpstanDistConfig) // Check for vendor binary phpstanBin := filepath.Join(dir, "vendor", "bin", "phpstan") - hasBin := false - if _, err := os.Stat(phpstanBin); err == nil { - hasBin = true - } + hasBin := m.Exists(phpstanBin) if hasConfig || hasBin { // Check if it's Larastan (Laravel-specific PHPStan) larastanPath := filepath.Join(dir, "vendor", "larastan", "larastan") - if _, err := os.Stat(larastanPath); err == nil { + if m.Exists(larastanPath) { return AnalyserLarastan, true } // Also check nunomaduro/larastan larastanPath2 := filepath.Join(dir, "vendor", "nunomaduro", "larastan") - if _, err := os.Stat(larastanPath2); err == nil { + if m.Exists(larastanPath2) { return AnalyserLarastan, true } return AnalyserPHPStan, true @@ -207,10 +202,12 @@ func Analyse(ctx context.Context, opts AnalyseOptions) error { // buildPintCommand builds the command for running Laravel Pint. func buildPintCommand(opts FormatOptions) (string, []string) { + m := getMedium() + // Check for vendor binary first vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "pint") cmdName := "pint" - if _, err := os.Stat(vendorBin); err == nil { + if m.Exists(vendorBin) { cmdName = vendorBin } @@ -236,10 +233,12 @@ func buildPintCommand(opts FormatOptions) (string, []string) { // buildPHPStanCommand builds the command for running PHPStan. func buildPHPStanCommand(opts AnalyseOptions) (string, []string) { + m := getMedium() + // Check for vendor binary first vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "phpstan") cmdName := "phpstan" - if _, err := os.Stat(vendorBin); err == nil { + if m.Exists(vendorBin) { cmdName = vendorBin } @@ -279,7 +278,7 @@ type PsalmOptions struct { ShowInfo bool // Show info-level issues JSON bool // Output in JSON format SARIF bool // Output in SARIF format for GitHub Security tab - Output io.Writer + Output goio.Writer } // PsalmType represents the detected Psalm configuration. @@ -293,21 +292,17 @@ const ( // DetectPsalm checks if Psalm is available in the project. func DetectPsalm(dir string) (PsalmType, bool) { + m := getMedium() + // Check for psalm.xml config psalmConfig := filepath.Join(dir, "psalm.xml") psalmDistConfig := filepath.Join(dir, "psalm.xml.dist") - hasConfig := false - if _, err := os.Stat(psalmConfig); err == nil { - hasConfig = true - } - if _, err := os.Stat(psalmDistConfig); err == nil { - hasConfig = true - } + hasConfig := m.Exists(psalmConfig) || m.Exists(psalmDistConfig) // Check for vendor binary psalmBin := filepath.Join(dir, "vendor", "bin", "psalm") - if _, err := os.Stat(psalmBin); err == nil { + if m.Exists(psalmBin) { return PsalmStandard, true } @@ -332,10 +327,12 @@ func RunPsalm(ctx context.Context, opts PsalmOptions) error { opts.Output = os.Stdout } + m := getMedium() + // Build command vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "psalm") cmdName := "psalm" - if _, err := os.Stat(vendorBin); err == nil { + if m.Exists(vendorBin) { cmdName = vendorBin } @@ -381,7 +378,7 @@ type AuditOptions struct { Dir string JSON bool // Output in JSON format Fix bool // Auto-fix vulnerabilities (npm only) - Output io.Writer + Output goio.Writer } // AuditResult holds the results of a security audit. @@ -422,7 +419,7 @@ func RunAudit(ctx context.Context, opts AuditOptions) ([]AuditResult, error) { results = append(results, composerResult) // Run npm audit if package.json exists - if _, err := os.Stat(filepath.Join(opts.Dir, "package.json")); err == nil { + if getMedium().Exists(filepath.Join(opts.Dir, "package.json")) { npmResult := runNpmAudit(ctx, opts) results = append(results, npmResult) } @@ -533,20 +530,22 @@ type RectorOptions struct { Fix bool // Apply changes (default is dry-run) Diff bool // Show detailed diff ClearCache bool // Clear cache before running - Output io.Writer + Output goio.Writer } // DetectRector checks if Rector is available in the project. func DetectRector(dir string) bool { + m := getMedium() + // Check for rector.php config rectorConfig := filepath.Join(dir, "rector.php") - if _, err := os.Stat(rectorConfig); err == nil { + if m.Exists(rectorConfig) { return true } // Check for vendor binary rectorBin := filepath.Join(dir, "vendor", "bin", "rector") - if _, err := os.Stat(rectorBin); err == nil { + if m.Exists(rectorBin) { return true } @@ -567,10 +566,12 @@ func RunRector(ctx context.Context, opts RectorOptions) error { opts.Output = os.Stdout } + m := getMedium() + // Build command vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "rector") cmdName := "rector" - if _, err := os.Stat(vendorBin); err == nil { + if m.Exists(vendorBin) { cmdName = vendorBin } @@ -608,22 +609,24 @@ type InfectionOptions struct { Threads int // Number of parallel threads Filter string // Filter files by pattern OnlyCovered bool // Only mutate covered code - Output io.Writer + Output goio.Writer } // DetectInfection checks if Infection is available in the project. func DetectInfection(dir string) bool { + m := getMedium() + // Check for infection config files configs := []string{"infection.json", "infection.json5", "infection.json.dist"} for _, config := range configs { - if _, err := os.Stat(filepath.Join(dir, config)); err == nil { + if m.Exists(filepath.Join(dir, config)) { return true } } // Check for vendor binary infectionBin := filepath.Join(dir, "vendor", "bin", "infection") - if _, err := os.Stat(infectionBin); err == nil { + if m.Exists(infectionBin) { return true } @@ -644,10 +647,12 @@ func RunInfection(ctx context.Context, opts InfectionOptions) error { opts.Output = os.Stdout } + m := getMedium() + // Build command vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "infection") cmdName := "infection" - if _, err := os.Stat(vendorBin); err == nil { + if m.Exists(vendorBin) { cmdName = vendorBin } @@ -780,7 +785,7 @@ type SecurityOptions struct { JSON bool // Output in JSON format SARIF bool // Output in SARIF format URL string // URL to check HTTP headers (optional) - Output io.Writer + Output goio.Writer } // SecurityResult holds the results of security scanning. @@ -873,13 +878,14 @@ func RunSecurityChecks(ctx context.Context, opts SecurityOptions) (*SecurityResu func runEnvSecurityChecks(dir string) []SecurityCheck { var checks []SecurityCheck + m := getMedium() envPath := filepath.Join(dir, ".env") - envContent, err := os.ReadFile(envPath) + envContent, err := m.Read(envPath) if err != nil { return checks } - envLines := strings.Split(string(envContent), "\n") + envLines := strings.Split(envContent, "\n") envMap := make(map[string]string) for _, line := range envLines { line = strings.TrimSpace(line) @@ -948,12 +954,13 @@ func runEnvSecurityChecks(dir string) []SecurityCheck { func runFilesystemSecurityChecks(dir string) []SecurityCheck { var checks []SecurityCheck + m := getMedium() // Check .env not in public publicEnvPaths := []string{"public/.env", "public_html/.env"} for _, path := range publicEnvPaths { fullPath := filepath.Join(dir, path) - if _, err := os.Stat(fullPath); err == nil { + if m.Exists(fullPath) { checks = append(checks, SecurityCheck{ ID: "env_not_public", Name: ".env Not Publicly Accessible", @@ -970,7 +977,7 @@ func runFilesystemSecurityChecks(dir string) []SecurityCheck { publicGitPaths := []string{"public/.git", "public_html/.git"} for _, path := range publicGitPaths { fullPath := filepath.Join(dir, path) - if _, err := os.Stat(fullPath); err == nil { + if m.Exists(fullPath) { checks = append(checks, SecurityCheck{ ID: "git_not_public", Name: ".git Not Publicly Accessible", diff --git a/internal/cmd/php/services.go b/internal/cmd/php/services.go index 81b8594c..583dc1f7 100644 --- a/internal/cmd/php/services.go +++ b/internal/cmd/php/services.go @@ -78,17 +78,24 @@ func (s *baseService) Logs(follow bool) (io.ReadCloser, error) { return nil, cli.Err("no log file available for %s", s.name) } - file, err := os.Open(s.logPath) + m := getMedium() + file, err := m.Open(s.logPath) if err != nil { return nil, cli.WrapVerb(err, "open", "log file") } if !follow { - return file, nil + return file.(io.ReadCloser), nil } // For follow mode, return a tailing reader - return newTailReader(file), nil + // Type assert to get the underlying *os.File for tailing + osFile, ok := file.(*os.File) + if !ok { + file.Close() + return nil, cli.Err("log file is not a regular file") + } + return newTailReader(osFile), nil } func (s *baseService) startProcess(ctx context.Context, cmdName string, args []string, env []string) error { @@ -100,16 +107,23 @@ func (s *baseService) startProcess(ctx context.Context, cmdName string, args []s } // Create log file + m := getMedium() logDir := filepath.Join(s.dir, ".core", "logs") - if err := os.MkdirAll(logDir, 0755); err != nil { + if err := m.EnsureDir(logDir); err != nil { return cli.WrapVerb(err, "create", "log directory") } s.logPath = filepath.Join(logDir, cli.Sprintf("%s.log", strings.ToLower(s.name))) - logFile, err := os.OpenFile(s.logPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644) + logWriter, err := m.Create(s.logPath) if err != nil { return cli.WrapVerb(err, "create", "log file") } + // Type assert to get the underlying *os.File for use with exec.Cmd + logFile, ok := logWriter.(*os.File) + if !ok { + logWriter.Close() + return cli.Err("log file is not a regular file") + } s.logFile = logFile // Create command diff --git a/internal/cmd/php/ssl.go b/internal/cmd/php/ssl.go index c81e7627..f3cd2d2f 100644 --- a/internal/cmd/php/ssl.go +++ b/internal/cmd/php/ssl.go @@ -22,6 +22,7 @@ type SSLOptions struct { // GetSSLDir returns the SSL directory, creating it if necessary. func GetSSLDir(opts SSLOptions) (string, error) { + m := getMedium() dir := opts.Dir if dir == "" { home, err := os.UserHomeDir() @@ -31,7 +32,7 @@ func GetSSLDir(opts SSLOptions) (string, error) { dir = filepath.Join(home, DefaultSSLDir) } - if err := os.MkdirAll(dir, 0755); err != nil { + if err := m.EnsureDir(dir); err != nil { return "", cli.WrapVerb(err, "create", "SSL directory") } @@ -53,16 +54,17 @@ func CertPaths(domain string, opts SSLOptions) (certFile, keyFile string, err er // CertsExist checks if SSL certificates exist for the given domain. func CertsExist(domain string, opts SSLOptions) bool { + m := getMedium() certFile, keyFile, err := CertPaths(domain, opts) if err != nil { return false } - if _, err := os.Stat(certFile); os.IsNotExist(err) { + if !m.IsFile(certFile) { return false } - if _, err := os.Stat(keyFile); os.IsNotExist(err) { + if !m.IsFile(keyFile) { return false } diff --git a/internal/cmd/php/testing.go b/internal/cmd/php/testing.go index 7a5ebbb4..520aff2b 100644 --- a/internal/cmd/php/testing.go +++ b/internal/cmd/php/testing.go @@ -53,7 +53,7 @@ const ( func DetectTestRunner(dir string) TestRunner { // Check for Pest pestFile := filepath.Join(dir, "tests", "Pest.php") - if _, err := os.Stat(pestFile); err == nil { + if getMedium().IsFile(pestFile) { return TestRunnerPest } @@ -108,10 +108,11 @@ func RunParallel(ctx context.Context, opts TestOptions) error { // buildPestCommand builds the command for running Pest tests. func buildPestCommand(opts TestOptions) (string, []string) { + m := getMedium() // Check for vendor binary first vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "pest") cmdName := "pest" - if _, err := os.Stat(vendorBin); err == nil { + if m.IsFile(vendorBin) { cmdName = vendorBin } @@ -149,10 +150,11 @@ func buildPestCommand(opts TestOptions) (string, []string) { // buildPHPUnitCommand builds the command for running PHPUnit tests. func buildPHPUnitCommand(opts TestOptions) (string, []string) { + m := getMedium() // Check for vendor binary first vendorBin := filepath.Join(opts.Dir, "vendor", "bin", "phpunit") cmdName := "phpunit" - if _, err := os.Stat(vendorBin); err == nil { + if m.IsFile(vendorBin) { cmdName = vendorBin } @@ -165,7 +167,7 @@ func buildPHPUnitCommand(opts TestOptions) (string, []string) { if opts.Parallel { // PHPUnit uses paratest for parallel execution paratestBin := filepath.Join(opts.Dir, "vendor", "bin", "paratest") - if _, err := os.Stat(paratestBin); err == nil { + if m.IsFile(paratestBin) { cmdName = paratestBin } } From dd25cff835070eb41b3a6fa108fa25041bb5d31d Mon Sep 17 00:00:00 2001 From: Vi Date: Thu, 5 Feb 2026 20:30:23 +0000 Subject: [PATCH 02/10] feat(io): add Node in-memory filesystem (port from Borg DataNode) (#343) (#352) Co-authored-by: Claude Co-authored-by: Claude Opus 4.6 --- pkg/io/node/node.go | 382 +++++++++++++++++++++++++++ pkg/io/node/node_test.go | 543 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 925 insertions(+) create mode 100644 pkg/io/node/node.go create mode 100644 pkg/io/node/node_test.go diff --git a/pkg/io/node/node.go b/pkg/io/node/node.go new file mode 100644 index 00000000..a213cb28 --- /dev/null +++ b/pkg/io/node/node.go @@ -0,0 +1,382 @@ +package node + +import ( + "archive/tar" + "bytes" + "io" + "io/fs" + "os" + "path" + "sort" + "strings" + "time" +) + +// Node is an in-memory filesystem that implements fs.FS, fs.StatFS, +// and fs.ReadFileFS. It stores files as byte slices keyed by their +// path, with directories being implicit based on path prefixes. +// +// Ported from github.com/Snider/Borg/pkg/datanode. +type Node struct { + files map[string]*nodeFile +} + +// Compile-time interface checks. +var ( + _ fs.FS = (*Node)(nil) + _ fs.StatFS = (*Node)(nil) + _ fs.ReadFileFS = (*Node)(nil) +) + +// New creates a new, empty Node. +func New() *Node { + return &Node{files: make(map[string]*nodeFile)} +} + +// FromTar creates a new Node from a tarball. +func FromTar(tarball []byte) (*Node, error) { + n := New() + tarReader := tar.NewReader(bytes.NewReader(tarball)) + + for { + header, err := tarReader.Next() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + if header.Typeflag == tar.TypeReg { + data, err := io.ReadAll(tarReader) + if err != nil { + return nil, err + } + n.AddData(header.Name, data) + } + } + + return n, nil +} + +// ToTar serializes the Node to a tarball. +func (n *Node) ToTar() ([]byte, error) { + buf := new(bytes.Buffer) + tw := tar.NewWriter(buf) + + for _, file := range n.files { + hdr := &tar.Header{ + Name: file.name, + Mode: 0600, + Size: int64(len(file.content)), + ModTime: file.modTime, + } + if err := tw.WriteHeader(hdr); err != nil { + return nil, err + } + if _, err := tw.Write(file.content); err != nil { + return nil, err + } + } + + if err := tw.Close(); err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +// AddData adds a file to the Node. If a file with the same name +// already exists it is overwritten. Directory entries (names ending +// in "/") and empty names are silently ignored. +func (n *Node) AddData(name string, content []byte) { + name = strings.TrimPrefix(name, "/") + if name == "" { + return + } + // Directories are implicit, so we don't store them. + // A name ending in "/" is treated as a directory. + if strings.HasSuffix(name, "/") { + return + } + n.files[name] = &nodeFile{ + name: name, + content: content, + modTime: time.Now(), + } +} + +// Open opens a file from the Node, satisfying the fs.FS interface. +func (n *Node) Open(name string) (fs.File, error) { + name = strings.TrimPrefix(name, "/") + if file, ok := n.files[name]; ok { + return &nodeFileReader{file: file}, nil + } + // Check if it's a directory. + prefix := name + "/" + if name == "." || name == "" { + prefix = "" + } + for p := range n.files { + if strings.HasPrefix(p, prefix) { + return &dirFile{path: name, modTime: time.Now()}, nil + } + } + return nil, fs.ErrNotExist +} + +// Stat returns the FileInfo for the named file, satisfying the +// fs.StatFS interface. +func (n *Node) Stat(name string) (fs.FileInfo, error) { + name = strings.TrimPrefix(name, "/") + if file, ok := n.files[name]; ok { + return file.Stat() + } + // Check if it's a directory. + prefix := name + "/" + if name == "." || name == "" { + prefix = "" + } + for p := range n.files { + if strings.HasPrefix(p, prefix) { + return &dirInfo{name: path.Base(name), modTime: time.Now()}, nil + } + } + return nil, fs.ErrNotExist +} + +// ReadFile reads the named file and returns its contents, satisfying +// the fs.ReadFileFS interface. +func (n *Node) ReadFile(name string) ([]byte, error) { + name = strings.TrimPrefix(name, "/") + if file, ok := n.files[name]; ok { + // Return a copy so callers cannot mutate the internal state. + out := make([]byte, len(file.content)) + copy(out, file.content) + return out, nil + } + return nil, fs.ErrNotExist +} + +// ReadDir reads and returns all directory entries for the named directory. +func (n *Node) ReadDir(name string) ([]fs.DirEntry, error) { + name = strings.TrimPrefix(name, "/") + if name == "." { + name = "" + } + + // Disallow reading a file as a directory. + if info, err := n.Stat(name); err == nil && !info.IsDir() { + return nil, &fs.PathError{Op: "readdir", Path: name, Err: fs.ErrInvalid} + } + + entries := []fs.DirEntry{} + seen := make(map[string]bool) + + prefix := "" + if name != "" { + prefix = name + "/" + } + + for p := range n.files { + if !strings.HasPrefix(p, prefix) { + continue + } + + relPath := strings.TrimPrefix(p, prefix) + firstComponent := strings.Split(relPath, "/")[0] + + if seen[firstComponent] { + continue + } + seen[firstComponent] = true + + if strings.Contains(relPath, "/") { + // It's a directory. + dir := &dirInfo{name: firstComponent, modTime: time.Now()} + entries = append(entries, fs.FileInfoToDirEntry(dir)) + } else { + // It's a file. + file := n.files[p] + info, _ := file.Stat() + entries = append(entries, fs.FileInfoToDirEntry(info)) + } + } + + // Sort for stable order. + sort.Slice(entries, func(i, j int) bool { + return entries[i].Name() < entries[j].Name() + }) + + return entries, nil +} + +// Exists returns true if the file or directory exists in the Node. +func (n *Node) Exists(name string, opts ...ExistsOptions) (bool, error) { + info, err := n.Stat(name) + if err != nil { + if err == fs.ErrNotExist || os.IsNotExist(err) { + return false, nil + } + return false, err + } + if len(opts) > 0 { + if opts[0].WantType == fs.ModeDir && !info.IsDir() { + return false, nil + } + if opts[0].WantType != fs.ModeDir && info.IsDir() { + return false, nil + } + } + return true, nil +} + +// ExistsOptions allows customizing the Exists check. +type ExistsOptions struct { + WantType fs.FileMode +} + +// WalkOptions allows customizing the Walk behavior. +type WalkOptions struct { + MaxDepth int + Filter func(path string, d fs.DirEntry) bool + SkipErrors bool +} + +// Walk recursively descends the file tree rooted at root, calling fn +// for each file or directory in the tree. +func (n *Node) Walk(root string, fn fs.WalkDirFunc, opts ...WalkOptions) error { + var maxDepth int + var filter func(string, fs.DirEntry) bool + var skipErrors bool + if len(opts) > 0 { + maxDepth = opts[0].MaxDepth + filter = opts[0].Filter + skipErrors = opts[0].SkipErrors + } + + return fs.WalkDir(n, root, func(p string, de fs.DirEntry, err error) error { + if err != nil { + if skipErrors { + return nil + } + return fn(p, de, err) + } + if filter != nil && !filter(p, de) { + if de.IsDir() { + return fs.SkipDir + } + return nil + } + + // Process the entry first. + if err := fn(p, de, nil); err != nil { + return err + } + + if maxDepth > 0 { + // Calculate depth relative to root. + cleanedPath := strings.TrimPrefix(p, root) + cleanedPath = strings.TrimPrefix(cleanedPath, "/") + + currentDepth := 0 + if p != root { + if cleanedPath == "" { + currentDepth = 0 + } else { + currentDepth = strings.Count(cleanedPath, "/") + 1 + } + } + + if de.IsDir() && currentDepth >= maxDepth { + return fs.SkipDir + } + } + return nil + }) +} + +// CopyFile copies a file from the Node to the local filesystem. +func (n *Node) CopyFile(sourcePath string, target string, perm os.FileMode) error { + sourceFile, err := n.Open(sourcePath) + if err != nil { + return err + } + defer sourceFile.Close() + + targetFile, err := os.OpenFile(target, os.O_CREATE|os.O_RDWR, perm) + if err != nil { + return err + } + defer targetFile.Close() + + _, err = io.Copy(targetFile, sourceFile) + return err +} + +// --------------------------------------------------------------------------- +// Internal types +// --------------------------------------------------------------------------- + +// nodeFile represents a file stored in the Node. +type nodeFile struct { + name string + content []byte + modTime time.Time +} + +func (f *nodeFile) Stat() (fs.FileInfo, error) { return &nodeFileInfo{file: f}, nil } +func (f *nodeFile) Read([]byte) (int, error) { return 0, io.EOF } +func (f *nodeFile) Close() error { return nil } + +// nodeFileInfo implements fs.FileInfo for a nodeFile. +type nodeFileInfo struct{ file *nodeFile } + +func (i *nodeFileInfo) Name() string { return path.Base(i.file.name) } +func (i *nodeFileInfo) Size() int64 { return int64(len(i.file.content)) } +func (i *nodeFileInfo) Mode() fs.FileMode { return 0444 } +func (i *nodeFileInfo) ModTime() time.Time { return i.file.modTime } +func (i *nodeFileInfo) IsDir() bool { return false } +func (i *nodeFileInfo) Sys() interface{} { return nil } + +// nodeFileReader implements fs.File for reading a nodeFile. +type nodeFileReader struct { + file *nodeFile + reader *bytes.Reader +} + +func (r *nodeFileReader) Stat() (fs.FileInfo, error) { return r.file.Stat() } +func (r *nodeFileReader) Read(p []byte) (int, error) { + if r.reader == nil { + r.reader = bytes.NewReader(r.file.content) + } + return r.reader.Read(p) +} +func (r *nodeFileReader) Close() error { return nil } + +// dirInfo implements fs.FileInfo for an implicit directory. +type dirInfo struct { + name string + modTime time.Time +} + +func (d *dirInfo) Name() string { return d.name } +func (d *dirInfo) Size() int64 { return 0 } +func (d *dirInfo) Mode() fs.FileMode { return fs.ModeDir | 0555 } +func (d *dirInfo) ModTime() time.Time { return d.modTime } +func (d *dirInfo) IsDir() bool { return true } +func (d *dirInfo) Sys() interface{} { return nil } + +// dirFile implements fs.File for a directory. +type dirFile struct { + path string + modTime time.Time +} + +func (d *dirFile) Stat() (fs.FileInfo, error) { + return &dirInfo{name: path.Base(d.path), modTime: d.modTime}, nil +} +func (d *dirFile) Read([]byte) (int, error) { + return 0, &fs.PathError{Op: "read", Path: d.path, Err: fs.ErrInvalid} +} +func (d *dirFile) Close() error { return nil } diff --git a/pkg/io/node/node_test.go b/pkg/io/node/node_test.go new file mode 100644 index 00000000..5ef1afab --- /dev/null +++ b/pkg/io/node/node_test.go @@ -0,0 +1,543 @@ +package node + +import ( + "archive/tar" + "bytes" + "errors" + "io" + "io/fs" + "os" + "path/filepath" + "sort" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// --------------------------------------------------------------------------- +// New +// --------------------------------------------------------------------------- + +func TestNew_Good(t *testing.T) { + n := New() + require.NotNil(t, n, "New() must not return nil") + assert.NotNil(t, n.files, "New() must initialize the files map") +} + +// --------------------------------------------------------------------------- +// AddData +// --------------------------------------------------------------------------- + +func TestAddData_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + file, ok := n.files["foo.txt"] + require.True(t, ok, "file foo.txt should be present") + assert.Equal(t, []byte("foo"), file.content) + + info, err := file.Stat() + require.NoError(t, err) + assert.Equal(t, "foo.txt", info.Name()) +} + +func TestAddData_Bad(t *testing.T) { + n := New() + + // Empty name is silently ignored. + n.AddData("", []byte("data")) + assert.Empty(t, n.files, "empty name must not be stored") + + // Directory entry (trailing slash) is silently ignored. + n.AddData("dir/", nil) + assert.Empty(t, n.files, "directory entry must not be stored") +} + +func TestAddData_Ugly(t *testing.T) { + t.Run("Overwrite", func(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("foo.txt", []byte("bar")) + + file := n.files["foo.txt"] + assert.Equal(t, []byte("bar"), file.content, "second AddData should overwrite") + }) + + t.Run("LeadingSlash", func(t *testing.T) { + n := New() + n.AddData("/hello.txt", []byte("hi")) + _, ok := n.files["hello.txt"] + assert.True(t, ok, "leading slash should be trimmed") + }) +} + +// --------------------------------------------------------------------------- +// Open +// --------------------------------------------------------------------------- + +func TestOpen_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + file, err := n.Open("foo.txt") + require.NoError(t, err) + defer file.Close() + + buf := make([]byte, 10) + nr, err := file.Read(buf) + require.True(t, nr > 0 || err == io.EOF) + assert.Equal(t, "foo", string(buf[:nr])) +} + +func TestOpen_Bad(t *testing.T) { + n := New() + _, err := n.Open("nonexistent.txt") + require.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestOpen_Ugly(t *testing.T) { + n := New() + n.AddData("bar/baz.txt", []byte("baz")) + + // Opening a directory should succeed. + file, err := n.Open("bar") + require.NoError(t, err) + defer file.Close() + + // Reading from a directory should fail. + _, err = file.Read(make([]byte, 1)) + require.Error(t, err) + + var pathErr *fs.PathError + require.True(t, errors.As(err, &pathErr)) + assert.Equal(t, fs.ErrInvalid, pathErr.Err) +} + +// --------------------------------------------------------------------------- +// Stat +// --------------------------------------------------------------------------- + +func TestStat_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + + // File stat. + info, err := n.Stat("bar/baz.txt") + require.NoError(t, err) + assert.Equal(t, "baz.txt", info.Name()) + assert.Equal(t, int64(3), info.Size()) + assert.False(t, info.IsDir()) + + // Directory stat. + dirInfo, err := n.Stat("bar") + require.NoError(t, err) + assert.True(t, dirInfo.IsDir()) + assert.Equal(t, "bar", dirInfo.Name()) +} + +func TestStat_Bad(t *testing.T) { + n := New() + _, err := n.Stat("nonexistent") + require.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestStat_Ugly(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + // Root directory. + info, err := n.Stat(".") + require.NoError(t, err) + assert.True(t, info.IsDir()) + assert.Equal(t, ".", info.Name()) +} + +// --------------------------------------------------------------------------- +// ReadFile +// --------------------------------------------------------------------------- + +func TestReadFile_Good(t *testing.T) { + n := New() + n.AddData("hello.txt", []byte("hello world")) + + data, err := n.ReadFile("hello.txt") + require.NoError(t, err) + assert.Equal(t, []byte("hello world"), data) +} + +func TestReadFile_Bad(t *testing.T) { + n := New() + _, err := n.ReadFile("missing.txt") + require.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestReadFile_Ugly(t *testing.T) { + n := New() + n.AddData("data.bin", []byte("original")) + + // Returned slice must be a copy — mutating it must not affect internal state. + data, err := n.ReadFile("data.bin") + require.NoError(t, err) + data[0] = 'X' + + data2, err := n.ReadFile("data.bin") + require.NoError(t, err) + assert.Equal(t, []byte("original"), data2, "ReadFile must return an independent copy") +} + +// --------------------------------------------------------------------------- +// ReadDir +// --------------------------------------------------------------------------- + +func TestReadDir_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + n.AddData("bar/qux.txt", []byte("qux")) + + // Root. + entries, err := n.ReadDir(".") + require.NoError(t, err) + assert.Equal(t, []string{"bar", "foo.txt"}, sortedNames(entries)) + + // Subdirectory. + barEntries, err := n.ReadDir("bar") + require.NoError(t, err) + assert.Equal(t, []string{"baz.txt", "qux.txt"}, sortedNames(barEntries)) +} + +func TestReadDir_Bad(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + // Reading a file as a directory should fail. + _, err := n.ReadDir("foo.txt") + require.Error(t, err) + var pathErr *fs.PathError + require.True(t, errors.As(err, &pathErr)) + assert.Equal(t, fs.ErrInvalid, pathErr.Err) +} + +func TestReadDir_Ugly(t *testing.T) { + n := New() + n.AddData("bar/baz.txt", []byte("baz")) + n.AddData("empty_dir/", nil) // Ignored by AddData. + + entries, err := n.ReadDir(".") + require.NoError(t, err) + assert.Equal(t, []string{"bar"}, sortedNames(entries)) +} + +// --------------------------------------------------------------------------- +// Exists +// --------------------------------------------------------------------------- + +func TestExists_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + + exists, err := n.Exists("foo.txt") + require.NoError(t, err) + assert.True(t, exists) + + exists, err = n.Exists("bar") + require.NoError(t, err) + assert.True(t, exists) +} + +func TestExists_Bad(t *testing.T) { + n := New() + exists, err := n.Exists("nonexistent") + require.NoError(t, err) + assert.False(t, exists) +} + +func TestExists_Ugly(t *testing.T) { + n := New() + n.AddData("dummy.txt", []byte("dummy")) + + exists, err := n.Exists(".") + require.NoError(t, err) + assert.True(t, exists, "root '.' must exist") + + exists, err = n.Exists("") + require.NoError(t, err) + assert.True(t, exists, "empty path (root) must exist") +} + +// --------------------------------------------------------------------------- +// Walk +// --------------------------------------------------------------------------- + +func TestWalk_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + n.AddData("bar/qux.txt", []byte("qux")) + + var paths []string + err := n.Walk(".", func(p string, d fs.DirEntry, err error) error { + paths = append(paths, p) + return nil + }) + require.NoError(t, err) + + sort.Strings(paths) + assert.Equal(t, []string{".", "bar", "bar/baz.txt", "bar/qux.txt", "foo.txt"}, paths) +} + +func TestWalk_Bad(t *testing.T) { + n := New() + + var called bool + err := n.Walk("nonexistent", func(p string, d fs.DirEntry, err error) error { + called = true + assert.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) + return err + }) + assert.True(t, called, "walk function must be called for nonexistent root") + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestWalk_Ugly(t *testing.T) { + n := New() + n.AddData("a/b.txt", []byte("b")) + n.AddData("a/c.txt", []byte("c")) + + // Stop walk early with a custom error. + walkErr := errors.New("stop walking") + var paths []string + err := n.Walk(".", func(p string, d fs.DirEntry, err error) error { + if p == "a/b.txt" { + return walkErr + } + paths = append(paths, p) + return nil + }) + + assert.Equal(t, walkErr, err, "Walk must propagate the callback error") +} + +func TestWalk_Options(t *testing.T) { + n := New() + n.AddData("root.txt", []byte("root")) + n.AddData("a/a1.txt", []byte("a1")) + n.AddData("a/b/b1.txt", []byte("b1")) + n.AddData("c/c1.txt", []byte("c1")) + + t.Run("MaxDepth", func(t *testing.T) { + var paths []string + err := n.Walk(".", func(p string, d fs.DirEntry, err error) error { + paths = append(paths, p) + return nil + }, WalkOptions{MaxDepth: 1}) + require.NoError(t, err) + + sort.Strings(paths) + assert.Equal(t, []string{".", "a", "c", "root.txt"}, paths) + }) + + t.Run("Filter", func(t *testing.T) { + var paths []string + err := n.Walk(".", func(p string, d fs.DirEntry, err error) error { + paths = append(paths, p) + return nil + }, WalkOptions{Filter: func(p string, d fs.DirEntry) bool { + return !strings.HasPrefix(p, "a") + }}) + require.NoError(t, err) + + sort.Strings(paths) + assert.Equal(t, []string{".", "c", "c/c1.txt", "root.txt"}, paths) + }) + + t.Run("SkipErrors", func(t *testing.T) { + var called bool + err := n.Walk("nonexistent", func(p string, d fs.DirEntry, err error) error { + called = true + return err + }, WalkOptions{SkipErrors: true}) + + assert.NoError(t, err, "SkipErrors should suppress the error") + assert.False(t, called, "callback should not be called when error is skipped") + }) +} + +// --------------------------------------------------------------------------- +// CopyFile +// --------------------------------------------------------------------------- + +func TestCopyFile_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + + tmpfile := filepath.Join(t.TempDir(), "test.txt") + err := n.CopyFile("foo.txt", tmpfile, 0644) + require.NoError(t, err) + + content, err := os.ReadFile(tmpfile) + require.NoError(t, err) + assert.Equal(t, "foo", string(content)) +} + +func TestCopyFile_Bad(t *testing.T) { + n := New() + tmpfile := filepath.Join(t.TempDir(), "test.txt") + + // Source does not exist. + err := n.CopyFile("nonexistent.txt", tmpfile, 0644) + assert.Error(t, err) + + // Destination not writable. + n.AddData("foo.txt", []byte("foo")) + err = n.CopyFile("foo.txt", "/nonexistent_dir/test.txt", 0644) + assert.Error(t, err) +} + +func TestCopyFile_Ugly(t *testing.T) { + n := New() + n.AddData("bar/baz.txt", []byte("baz")) + tmpfile := filepath.Join(t.TempDir(), "test.txt") + + // Attempting to copy a directory should fail. + err := n.CopyFile("bar", tmpfile, 0644) + assert.Error(t, err) +} + +// --------------------------------------------------------------------------- +// ToTar / FromTar +// --------------------------------------------------------------------------- + +func TestToTar_Good(t *testing.T) { + n := New() + n.AddData("foo.txt", []byte("foo")) + n.AddData("bar/baz.txt", []byte("baz")) + + tarball, err := n.ToTar() + require.NoError(t, err) + require.NotEmpty(t, tarball) + + // Verify tar content. + tr := tar.NewReader(bytes.NewReader(tarball)) + files := make(map[string]string) + for { + header, err := tr.Next() + if err == io.EOF { + break + } + require.NoError(t, err) + content, err := io.ReadAll(tr) + require.NoError(t, err) + files[header.Name] = string(content) + } + + assert.Equal(t, "foo", files["foo.txt"]) + assert.Equal(t, "baz", files["bar/baz.txt"]) +} + +func TestFromTar_Good(t *testing.T) { + buf := new(bytes.Buffer) + tw := tar.NewWriter(buf) + + for _, f := range []struct{ Name, Body string }{ + {"foo.txt", "foo"}, + {"bar/baz.txt", "baz"}, + } { + hdr := &tar.Header{ + Name: f.Name, + Mode: 0600, + Size: int64(len(f.Body)), + Typeflag: tar.TypeReg, + } + require.NoError(t, tw.WriteHeader(hdr)) + _, err := tw.Write([]byte(f.Body)) + require.NoError(t, err) + } + require.NoError(t, tw.Close()) + + n, err := FromTar(buf.Bytes()) + require.NoError(t, err) + + exists, _ := n.Exists("foo.txt") + assert.True(t, exists, "foo.txt should exist") + + exists, _ = n.Exists("bar/baz.txt") + assert.True(t, exists, "bar/baz.txt should exist") +} + +func TestFromTar_Bad(t *testing.T) { + // Truncated data that cannot be a valid tar. + truncated := make([]byte, 100) + _, err := FromTar(truncated) + assert.Error(t, err, "truncated data should produce an error") +} + +func TestTarRoundTrip_Good(t *testing.T) { + n1 := New() + n1.AddData("a.txt", []byte("alpha")) + n1.AddData("b/c.txt", []byte("charlie")) + + tarball, err := n1.ToTar() + require.NoError(t, err) + + n2, err := FromTar(tarball) + require.NoError(t, err) + + // Verify n2 matches n1. + data, err := n2.ReadFile("a.txt") + require.NoError(t, err) + assert.Equal(t, []byte("alpha"), data) + + data, err = n2.ReadFile("b/c.txt") + require.NoError(t, err) + assert.Equal(t, []byte("charlie"), data) +} + +// --------------------------------------------------------------------------- +// fs.FS interface compliance +// --------------------------------------------------------------------------- + +func TestFSInterface_Good(t *testing.T) { + n := New() + n.AddData("hello.txt", []byte("world")) + + // fs.FS + var fsys fs.FS = n + file, err := fsys.Open("hello.txt") + require.NoError(t, err) + defer file.Close() + + // fs.StatFS + var statFS fs.StatFS = n + info, err := statFS.Stat("hello.txt") + require.NoError(t, err) + assert.Equal(t, "hello.txt", info.Name()) + assert.Equal(t, int64(5), info.Size()) + + // fs.ReadFileFS + var readFS fs.ReadFileFS = n + data, err := readFS.ReadFile("hello.txt") + require.NoError(t, err) + assert.Equal(t, []byte("world"), data) +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +func sortedNames(entries []fs.DirEntry) []string { + var names []string + for _, e := range entries { + names = append(names, e.Name()) + } + sort.Strings(names) + return names +} From c122e89f40d9d64a34ba2bb6c94178ba252f5389 Mon Sep 17 00:00:00 2001 From: Vi Date: Thu, 5 Feb 2026 20:30:26 +0000 Subject: [PATCH 03/10] feat(io): add Sigil composable transform framework (port from Enchantrix) (#345) (#353) Co-authored-by: Claude Co-authored-by: Claude Opus 4.6 --- pkg/io/sigil/sigil.go | 70 ++++++ pkg/io/sigil/sigil_test.go | 422 +++++++++++++++++++++++++++++++++++++ pkg/io/sigil/sigils.go | 273 ++++++++++++++++++++++++ 3 files changed, 765 insertions(+) create mode 100644 pkg/io/sigil/sigil.go create mode 100644 pkg/io/sigil/sigil_test.go create mode 100644 pkg/io/sigil/sigils.go diff --git a/pkg/io/sigil/sigil.go b/pkg/io/sigil/sigil.go new file mode 100644 index 00000000..69feed84 --- /dev/null +++ b/pkg/io/sigil/sigil.go @@ -0,0 +1,70 @@ +// Package sigil provides the Sigil composable transform framework for reversible +// and irreversible data transformations. +// +// Sigils are the core abstraction -- each sigil implements a specific transformation +// (encoding, compression, hashing) with a uniform interface. Sigils can be chained +// together to create transformation pipelines via Transmute and Untransmute. +// +// Example usage: +// +// hexSigil, _ := sigil.NewSigil("hex") +// base64Sigil, _ := sigil.NewSigil("base64") +// encoded, _ := sigil.Transmute(data, []sigil.Sigil{hexSigil, base64Sigil}) +// decoded, _ := sigil.Untransmute(encoded, []sigil.Sigil{hexSigil, base64Sigil}) +package sigil + +// Sigil defines the interface for a composable data transformer. +// +// A Sigil represents a single transformation unit that can be applied to byte data. +// Sigils may be reversible (encoding, compression) or irreversible (hashing). +// +// For reversible sigils: Out(In(x)) == x for all valid x +// For irreversible sigils: Out returns the input unchanged +// For symmetric sigils: In(x) == Out(x) +// +// Implementations must handle nil input by returning nil without error, +// and empty input by returning an empty slice without error. +type Sigil interface { + // In applies the forward transformation to the data. + // For encoding sigils, this encodes the data. + // For compression sigils, this compresses the data. + // For hash sigils, this computes the digest. + In(data []byte) ([]byte, error) + + // Out applies the reverse transformation to the data. + // For reversible sigils, this recovers the original data. + // For irreversible sigils (e.g., hashing), this returns the input unchanged. + Out(data []byte) ([]byte, error) +} + +// Transmute applies a series of sigils to data in forward sequence. +// +// Each sigil's In method is called in order, with the output of one sigil +// becoming the input of the next. If any sigil returns an error, Transmute +// stops immediately and returns nil with that error. +func Transmute(data []byte, sigils []Sigil) ([]byte, error) { + var err error + for _, s := range sigils { + data, err = s.In(data) + if err != nil { + return nil, err + } + } + return data, nil +} + +// Untransmute applies a series of sigils to data in reverse sequence. +// +// Each sigil's Out method is called in reverse order, unwinding a previous +// Transmute operation. If any sigil returns an error, Untransmute stops +// immediately and returns nil with that error. +func Untransmute(data []byte, sigils []Sigil) ([]byte, error) { + var err error + for i := len(sigils) - 1; i >= 0; i-- { + data, err = sigils[i].Out(data) + if err != nil { + return nil, err + } + } + return data, nil +} diff --git a/pkg/io/sigil/sigil_test.go b/pkg/io/sigil/sigil_test.go new file mode 100644 index 00000000..17aa2efa --- /dev/null +++ b/pkg/io/sigil/sigil_test.go @@ -0,0 +1,422 @@ +package sigil + +import ( + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/base64" + "encoding/hex" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// --------------------------------------------------------------------------- +// ReverseSigil +// --------------------------------------------------------------------------- + +func TestReverseSigil_Good(t *testing.T) { + s := &ReverseSigil{} + + out, err := s.In([]byte("hello")) + require.NoError(t, err) + assert.Equal(t, []byte("olleh"), out) + + // Symmetric: Out does the same thing. + restored, err := s.Out(out) + require.NoError(t, err) + assert.Equal(t, []byte("hello"), restored) +} + +func TestReverseSigil_Bad(t *testing.T) { + s := &ReverseSigil{} + + // Empty input returns empty. + out, err := s.In([]byte{}) + require.NoError(t, err) + assert.Equal(t, []byte{}, out) +} + +func TestReverseSigil_Ugly(t *testing.T) { + s := &ReverseSigil{} + + // Nil input returns nil. + out, err := s.In(nil) + require.NoError(t, err) + assert.Nil(t, out) + + out, err = s.Out(nil) + require.NoError(t, err) + assert.Nil(t, out) +} + +// --------------------------------------------------------------------------- +// HexSigil +// --------------------------------------------------------------------------- + +func TestHexSigil_Good(t *testing.T) { + s := &HexSigil{} + data := []byte("hello world") + + encoded, err := s.In(data) + require.NoError(t, err) + assert.Equal(t, []byte(hex.EncodeToString(data)), encoded) + + decoded, err := s.Out(encoded) + require.NoError(t, err) + assert.Equal(t, data, decoded) +} + +func TestHexSigil_Bad(t *testing.T) { + s := &HexSigil{} + + // Invalid hex input. + _, err := s.Out([]byte("zzzz")) + assert.Error(t, err) + + // Empty input. + out, err := s.In([]byte{}) + require.NoError(t, err) + assert.Equal(t, []byte{}, out) +} + +func TestHexSigil_Ugly(t *testing.T) { + s := &HexSigil{} + + out, err := s.In(nil) + require.NoError(t, err) + assert.Nil(t, out) + + out, err = s.Out(nil) + require.NoError(t, err) + assert.Nil(t, out) +} + +// --------------------------------------------------------------------------- +// Base64Sigil +// --------------------------------------------------------------------------- + +func TestBase64Sigil_Good(t *testing.T) { + s := &Base64Sigil{} + data := []byte("composable transforms") + + encoded, err := s.In(data) + require.NoError(t, err) + assert.Equal(t, []byte(base64.StdEncoding.EncodeToString(data)), encoded) + + decoded, err := s.Out(encoded) + require.NoError(t, err) + assert.Equal(t, data, decoded) +} + +func TestBase64Sigil_Bad(t *testing.T) { + s := &Base64Sigil{} + + // Invalid base64 (wrong padding). + _, err := s.Out([]byte("!!!")) + assert.Error(t, err) + + // Empty input. + out, err := s.In([]byte{}) + require.NoError(t, err) + assert.Equal(t, []byte{}, out) +} + +func TestBase64Sigil_Ugly(t *testing.T) { + s := &Base64Sigil{} + + out, err := s.In(nil) + require.NoError(t, err) + assert.Nil(t, out) + + out, err = s.Out(nil) + require.NoError(t, err) + assert.Nil(t, out) +} + +// --------------------------------------------------------------------------- +// GzipSigil +// --------------------------------------------------------------------------- + +func TestGzipSigil_Good(t *testing.T) { + s := &GzipSigil{} + data := []byte("the quick brown fox jumps over the lazy dog") + + compressed, err := s.In(data) + require.NoError(t, err) + assert.NotEqual(t, data, compressed) + + decompressed, err := s.Out(compressed) + require.NoError(t, err) + assert.Equal(t, data, decompressed) +} + +func TestGzipSigil_Bad(t *testing.T) { + s := &GzipSigil{} + + // Invalid gzip data. + _, err := s.Out([]byte("not gzip")) + assert.Error(t, err) + + // Empty input compresses to a valid gzip stream. + compressed, err := s.In([]byte{}) + require.NoError(t, err) + assert.NotEmpty(t, compressed) // gzip header is always present + + decompressed, err := s.Out(compressed) + require.NoError(t, err) + assert.Equal(t, []byte{}, decompressed) +} + +func TestGzipSigil_Ugly(t *testing.T) { + s := &GzipSigil{} + + out, err := s.In(nil) + require.NoError(t, err) + assert.Nil(t, out) + + out, err = s.Out(nil) + require.NoError(t, err) + assert.Nil(t, out) +} + +// --------------------------------------------------------------------------- +// JSONSigil +// --------------------------------------------------------------------------- + +func TestJSONSigil_Good(t *testing.T) { + s := &JSONSigil{Indent: false} + data := []byte(`{ "key" : "value" }`) + + compacted, err := s.In(data) + require.NoError(t, err) + assert.Equal(t, []byte(`{"key":"value"}`), compacted) + + // Out is passthrough. + passthrough, err := s.Out(compacted) + require.NoError(t, err) + assert.Equal(t, compacted, passthrough) +} + +func TestJSONSigil_Good_Indent(t *testing.T) { + s := &JSONSigil{Indent: true} + data := []byte(`{"key":"value"}`) + + indented, err := s.In(data) + require.NoError(t, err) + assert.Contains(t, string(indented), "\n") + assert.Contains(t, string(indented), " ") +} + +func TestJSONSigil_Bad(t *testing.T) { + s := &JSONSigil{Indent: false} + + // Invalid JSON. + _, err := s.In([]byte("not json")) + assert.Error(t, err) +} + +func TestJSONSigil_Ugly(t *testing.T) { + s := &JSONSigil{Indent: false} + + // json.Compact on nil/empty will produce an error (invalid JSON). + _, err := s.In(nil) + assert.Error(t, err) + + // Out with nil is passthrough. + out, err := s.Out(nil) + require.NoError(t, err) + assert.Nil(t, out) +} + +// --------------------------------------------------------------------------- +// HashSigil +// --------------------------------------------------------------------------- + +func TestHashSigil_Good(t *testing.T) { + data := []byte("hash me") + + tests := []struct { + name string + sigilName string + size int + }{ + {"md5", "md5", md5.Size}, + {"sha1", "sha1", sha1.Size}, + {"sha256", "sha256", sha256.Size}, + {"sha512", "sha512", sha512.Size}, + {"sha224", "sha224", sha256.Size224}, + {"sha384", "sha384", sha512.Size384}, + {"sha512-224", "sha512-224", 28}, + {"sha512-256", "sha512-256", 32}, + {"sha3-224", "sha3-224", 28}, + {"sha3-256", "sha3-256", 32}, + {"sha3-384", "sha3-384", 48}, + {"sha3-512", "sha3-512", 64}, + {"ripemd160", "ripemd160", 20}, + {"blake2s-256", "blake2s-256", 32}, + {"blake2b-256", "blake2b-256", 32}, + {"blake2b-384", "blake2b-384", 48}, + {"blake2b-512", "blake2b-512", 64}, + {"md4", "md4", 16}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s, err := NewSigil(tt.sigilName) + require.NoError(t, err) + + hashed, err := s.In(data) + require.NoError(t, err) + assert.Len(t, hashed, tt.size) + + // Out is passthrough. + passthrough, err := s.Out(hashed) + require.NoError(t, err) + assert.Equal(t, hashed, passthrough) + }) + } +} + +func TestHashSigil_Bad(t *testing.T) { + // Unsupported hash constant. + s := &HashSigil{Hash: 0} + _, err := s.In([]byte("data")) + assert.Error(t, err) + assert.Contains(t, err.Error(), "not available") +} + +func TestHashSigil_Ugly(t *testing.T) { + // Hashing empty data should still produce a valid digest. + s, err := NewSigil("sha256") + require.NoError(t, err) + + hashed, err := s.In([]byte{}) + require.NoError(t, err) + assert.Len(t, hashed, sha256.Size) +} + +// --------------------------------------------------------------------------- +// NewSigil factory +// --------------------------------------------------------------------------- + +func TestNewSigil_Good(t *testing.T) { + names := []string{ + "reverse", "hex", "base64", "gzip", "json", "json-indent", + "md4", "md5", "sha1", "sha224", "sha256", "sha384", "sha512", + "ripemd160", + "sha3-224", "sha3-256", "sha3-384", "sha3-512", + "sha512-224", "sha512-256", + "blake2s-256", "blake2b-256", "blake2b-384", "blake2b-512", + } + + for _, name := range names { + t.Run(name, func(t *testing.T) { + s, err := NewSigil(name) + require.NoError(t, err) + assert.NotNil(t, s) + }) + } +} + +func TestNewSigil_Bad(t *testing.T) { + _, err := NewSigil("nonexistent") + assert.Error(t, err) + assert.Contains(t, err.Error(), "unknown sigil name") +} + +func TestNewSigil_Ugly(t *testing.T) { + _, err := NewSigil("") + assert.Error(t, err) +} + +// --------------------------------------------------------------------------- +// Transmute / Untransmute +// --------------------------------------------------------------------------- + +func TestTransmute_Good(t *testing.T) { + data := []byte("round trip") + + hexSigil, err := NewSigil("hex") + require.NoError(t, err) + base64Sigil, err := NewSigil("base64") + require.NoError(t, err) + + chain := []Sigil{hexSigil, base64Sigil} + + encoded, err := Transmute(data, chain) + require.NoError(t, err) + assert.NotEqual(t, data, encoded) + + decoded, err := Untransmute(encoded, chain) + require.NoError(t, err) + assert.Equal(t, data, decoded) +} + +func TestTransmute_Good_MultiSigil(t *testing.T) { + data := []byte("multi sigil pipeline test data") + + reverseSigil, err := NewSigil("reverse") + require.NoError(t, err) + hexSigil, err := NewSigil("hex") + require.NoError(t, err) + base64Sigil, err := NewSigil("base64") + require.NoError(t, err) + + chain := []Sigil{reverseSigil, hexSigil, base64Sigil} + + encoded, err := Transmute(data, chain) + require.NoError(t, err) + + decoded, err := Untransmute(encoded, chain) + require.NoError(t, err) + assert.Equal(t, data, decoded) +} + +func TestTransmute_Good_GzipRoundTrip(t *testing.T) { + data := []byte("compress then encode then decode then decompress") + + gzipSigil, err := NewSigil("gzip") + require.NoError(t, err) + hexSigil, err := NewSigil("hex") + require.NoError(t, err) + + chain := []Sigil{gzipSigil, hexSigil} + + encoded, err := Transmute(data, chain) + require.NoError(t, err) + + decoded, err := Untransmute(encoded, chain) + require.NoError(t, err) + assert.Equal(t, data, decoded) +} + +func TestTransmute_Bad(t *testing.T) { + // Transmute with a sigil that will fail: hex decode on non-hex input. + hexSigil := &HexSigil{} + + // Calling Out (decode) with invalid input via manual chain. + _, err := Untransmute([]byte("not-hex!!"), []Sigil{hexSigil}) + assert.Error(t, err) +} + +func TestTransmute_Ugly(t *testing.T) { + // Empty sigil chain is a no-op. + data := []byte("unchanged") + + result, err := Transmute(data, nil) + require.NoError(t, err) + assert.Equal(t, data, result) + + result, err = Untransmute(data, nil) + require.NoError(t, err) + assert.Equal(t, data, result) + + // Nil data through a chain. + hexSigil, _ := NewSigil("hex") + result, err = Transmute(nil, []Sigil{hexSigil}) + require.NoError(t, err) + assert.Nil(t, result) +} diff --git a/pkg/io/sigil/sigils.go b/pkg/io/sigil/sigils.go new file mode 100644 index 00000000..3afc2072 --- /dev/null +++ b/pkg/io/sigil/sigils.go @@ -0,0 +1,273 @@ +package sigil + +import ( + "bytes" + "compress/gzip" + "crypto" + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/base64" + "encoding/hex" + "encoding/json" + "errors" + "io" + + "golang.org/x/crypto/blake2b" + "golang.org/x/crypto/blake2s" + "golang.org/x/crypto/md4" + "golang.org/x/crypto/ripemd160" + "golang.org/x/crypto/sha3" +) + +// ReverseSigil is a symmetric Sigil that reverses the bytes of the payload. +// Both In and Out perform the same reversal operation. +type ReverseSigil struct{} + +// In reverses the bytes of the data. +func (s *ReverseSigil) In(data []byte) ([]byte, error) { + if data == nil { + return nil, nil + } + reversed := make([]byte, len(data)) + for i, j := 0, len(data)-1; i < len(data); i, j = i+1, j-1 { + reversed[i] = data[j] + } + return reversed, nil +} + +// Out reverses the bytes of the data (symmetric with In). +func (s *ReverseSigil) Out(data []byte) ([]byte, error) { + return s.In(data) +} + +// HexSigil is a Sigil that encodes/decodes data to/from hexadecimal. +// In encodes the data, Out decodes it. +type HexSigil struct{} + +// In encodes the data to hexadecimal. +func (s *HexSigil) In(data []byte) ([]byte, error) { + if data == nil { + return nil, nil + } + dst := make([]byte, hex.EncodedLen(len(data))) + hex.Encode(dst, data) + return dst, nil +} + +// Out decodes the data from hexadecimal. +func (s *HexSigil) Out(data []byte) ([]byte, error) { + if data == nil { + return nil, nil + } + dst := make([]byte, hex.DecodedLen(len(data))) + _, err := hex.Decode(dst, data) + return dst, err +} + +// Base64Sigil is a Sigil that encodes/decodes data to/from standard base64. +// In encodes the data, Out decodes it. +type Base64Sigil struct{} + +// In encodes the data to base64. +func (s *Base64Sigil) In(data []byte) ([]byte, error) { + if data == nil { + return nil, nil + } + dst := make([]byte, base64.StdEncoding.EncodedLen(len(data))) + base64.StdEncoding.Encode(dst, data) + return dst, nil +} + +// Out decodes the data from base64. +func (s *Base64Sigil) Out(data []byte) ([]byte, error) { + if data == nil { + return nil, nil + } + dst := make([]byte, base64.StdEncoding.DecodedLen(len(data))) + n, err := base64.StdEncoding.Decode(dst, data) + return dst[:n], err +} + +// GzipSigil is a Sigil that compresses/decompresses data using gzip. +// In compresses the data, Out decompresses it. +type GzipSigil struct{} + +// In compresses the data using gzip. +func (s *GzipSigil) In(data []byte) ([]byte, error) { + if data == nil { + return nil, nil + } + var b bytes.Buffer + gz := gzip.NewWriter(&b) + if _, err := gz.Write(data); err != nil { + return nil, err + } + if err := gz.Close(); err != nil { + return nil, err + } + return b.Bytes(), nil +} + +// Out decompresses the data using gzip. +func (s *GzipSigil) Out(data []byte) ([]byte, error) { + if data == nil { + return nil, nil + } + r, err := gzip.NewReader(bytes.NewReader(data)) + if err != nil { + return nil, err + } + defer r.Close() + return io.ReadAll(r) +} + +// JSONSigil is a Sigil that compacts or indents JSON data. +// Out is a passthrough (no-op). +type JSONSigil struct { + Indent bool +} + +// In compacts or indents the JSON data depending on the Indent field. +func (s *JSONSigil) In(data []byte) ([]byte, error) { + if s.Indent { + var out bytes.Buffer + err := json.Indent(&out, data, "", " ") + return out.Bytes(), err + } + var out bytes.Buffer + err := json.Compact(&out, data) + return out.Bytes(), err +} + +// Out is a passthrough for JSONSigil. The primary use is formatting. +func (s *JSONSigil) Out(data []byte) ([]byte, error) { + return data, nil +} + +// HashSigil is a Sigil that hashes data using a specified algorithm. +// In computes the hash digest, Out is a passthrough. +type HashSigil struct { + Hash crypto.Hash +} + +// NewHashSigil creates a new HashSigil for the given hash algorithm. +func NewHashSigil(h crypto.Hash) *HashSigil { + return &HashSigil{Hash: h} +} + +// In hashes the data using the configured algorithm. +func (s *HashSigil) In(data []byte) ([]byte, error) { + var h io.Writer + switch s.Hash { + case crypto.MD4: + h = md4.New() + case crypto.MD5: + h = md5.New() + case crypto.SHA1: + h = sha1.New() + case crypto.SHA224: + h = sha256.New224() + case crypto.SHA256: + h = sha256.New() + case crypto.SHA384: + h = sha512.New384() + case crypto.SHA512: + h = sha512.New() + case crypto.RIPEMD160: + h = ripemd160.New() + case crypto.SHA3_224: + h = sha3.New224() + case crypto.SHA3_256: + h = sha3.New256() + case crypto.SHA3_384: + h = sha3.New384() + case crypto.SHA3_512: + h = sha3.New512() + case crypto.SHA512_224: + h = sha512.New512_224() + case crypto.SHA512_256: + h = sha512.New512_256() + case crypto.BLAKE2s_256: + h, _ = blake2s.New256(nil) + case crypto.BLAKE2b_256: + h, _ = blake2b.New256(nil) + case crypto.BLAKE2b_384: + h, _ = blake2b.New384(nil) + case crypto.BLAKE2b_512: + h, _ = blake2b.New512(nil) + default: + return nil, errors.New("sigil: hash algorithm not available") + } + + h.Write(data) + return h.(interface{ Sum([]byte) []byte }).Sum(nil), nil +} + +// Out is a passthrough for HashSigil. Hashing is irreversible. +func (s *HashSigil) Out(data []byte) ([]byte, error) { + return data, nil +} + +// NewSigil is a factory function that returns a Sigil based on a string name. +// It is the primary way to create Sigil instances. +// +// Supported names: reverse, hex, base64, gzip, json, json-indent, +// md4, md5, sha1, sha224, sha256, sha384, sha512, ripemd160, +// sha3-224, sha3-256, sha3-384, sha3-512, sha512-224, sha512-256, +// blake2s-256, blake2b-256, blake2b-384, blake2b-512. +func NewSigil(name string) (Sigil, error) { + switch name { + case "reverse": + return &ReverseSigil{}, nil + case "hex": + return &HexSigil{}, nil + case "base64": + return &Base64Sigil{}, nil + case "gzip": + return &GzipSigil{}, nil + case "json": + return &JSONSigil{Indent: false}, nil + case "json-indent": + return &JSONSigil{Indent: true}, nil + case "md4": + return NewHashSigil(crypto.MD4), nil + case "md5": + return NewHashSigil(crypto.MD5), nil + case "sha1": + return NewHashSigil(crypto.SHA1), nil + case "sha224": + return NewHashSigil(crypto.SHA224), nil + case "sha256": + return NewHashSigil(crypto.SHA256), nil + case "sha384": + return NewHashSigil(crypto.SHA384), nil + case "sha512": + return NewHashSigil(crypto.SHA512), nil + case "ripemd160": + return NewHashSigil(crypto.RIPEMD160), nil + case "sha3-224": + return NewHashSigil(crypto.SHA3_224), nil + case "sha3-256": + return NewHashSigil(crypto.SHA3_256), nil + case "sha3-384": + return NewHashSigil(crypto.SHA3_384), nil + case "sha3-512": + return NewHashSigil(crypto.SHA3_512), nil + case "sha512-224": + return NewHashSigil(crypto.SHA512_224), nil + case "sha512-256": + return NewHashSigil(crypto.SHA512_256), nil + case "blake2s-256": + return NewHashSigil(crypto.BLAKE2s_256), nil + case "blake2b-256": + return NewHashSigil(crypto.BLAKE2b_256), nil + case "blake2b-384": + return NewHashSigil(crypto.BLAKE2b_384), nil + case "blake2b-512": + return NewHashSigil(crypto.BLAKE2b_512), nil + default: + return nil, errors.New("sigil: unknown sigil name: " + name) + } +} From dfd7c3ab2de9e478c748d4e9cff0edb04dc3fcc6 Mon Sep 17 00:00:00 2001 From: Vi Date: Thu, 5 Feb 2026 20:30:28 +0000 Subject: [PATCH 04/10] feat(crypt): add LTHN, ChaCha20, RSA, PGP primitives (port from Enchantrix) (#346) (#354) Co-authored-by: Claude Co-authored-by: Claude Opus 4.6 --- pkg/crypt/chachapoly/chachapoly.go | 60 +++++++ pkg/crypt/chachapoly/chachapoly_test.go | 93 ++++++++++ pkg/crypt/lthn/lthn.go | 94 ++++++++++ pkg/crypt/lthn/lthn_test.go | 99 ++++++++++ pkg/crypt/pgp/pgp.go | 230 ++++++++++++++++++++++++ pkg/crypt/pgp/pgp_test.go | 164 +++++++++++++++++ pkg/crypt/rsa/rsa.go | 101 +++++++++++ pkg/crypt/rsa/rsa_test.go | 89 +++++++++ 8 files changed, 930 insertions(+) create mode 100644 pkg/crypt/chachapoly/chachapoly.go create mode 100644 pkg/crypt/chachapoly/chachapoly_test.go create mode 100644 pkg/crypt/lthn/lthn.go create mode 100644 pkg/crypt/lthn/lthn_test.go create mode 100644 pkg/crypt/pgp/pgp.go create mode 100644 pkg/crypt/pgp/pgp_test.go create mode 100644 pkg/crypt/rsa/rsa.go create mode 100644 pkg/crypt/rsa/rsa_test.go diff --git a/pkg/crypt/chachapoly/chachapoly.go b/pkg/crypt/chachapoly/chachapoly.go new file mode 100644 index 00000000..a3a8d766 --- /dev/null +++ b/pkg/crypt/chachapoly/chachapoly.go @@ -0,0 +1,60 @@ +// Package chachapoly provides XChaCha20-Poly1305 authenticated encryption. +// +// Encrypt prepends a random nonce to the ciphertext; Decrypt extracts it. +// The key must be 32 bytes (256 bits). +// +// Ported from Enchantrix (github.com/Snider/Enchantrix/pkg/crypt/std/chachapoly). +package chachapoly + +import ( + "crypto/rand" + "fmt" + "io" + + "golang.org/x/crypto/chacha20poly1305" +) + +// Encrypt encrypts plaintext using XChaCha20-Poly1305. +// The key must be exactly 32 bytes. A random 24-byte nonce is generated +// and prepended to the returned ciphertext. +func Encrypt(plaintext, key []byte) ([]byte, error) { + aead, err := chacha20poly1305.NewX(key) + if err != nil { + return nil, fmt.Errorf("chachapoly: failed to create AEAD: %w", err) + } + + nonce := make([]byte, aead.NonceSize(), aead.NonceSize()+len(plaintext)+aead.Overhead()) + if _, err := io.ReadFull(rand.Reader, nonce); err != nil { + return nil, fmt.Errorf("chachapoly: failed to generate nonce: %w", err) + } + + return aead.Seal(nonce, nonce, plaintext, nil), nil +} + +// Decrypt decrypts ciphertext produced by Encrypt using XChaCha20-Poly1305. +// The key must be exactly 32 bytes. The nonce is extracted from the first +// 24 bytes of the ciphertext. +func Decrypt(ciphertext, key []byte) ([]byte, error) { + aead, err := chacha20poly1305.NewX(key) + if err != nil { + return nil, fmt.Errorf("chachapoly: failed to create AEAD: %w", err) + } + + minLen := aead.NonceSize() + aead.Overhead() + if len(ciphertext) < minLen { + return nil, fmt.Errorf("chachapoly: ciphertext too short: got %d bytes, need at least %d bytes", len(ciphertext), minLen) + } + + nonce, ciphertext := ciphertext[:aead.NonceSize()], ciphertext[aead.NonceSize():] + + decrypted, err := aead.Open(nil, nonce, ciphertext, nil) + if err != nil { + return nil, fmt.Errorf("chachapoly: decryption failed: %w", err) + } + + if len(decrypted) == 0 { + return []byte{}, nil + } + + return decrypted, nil +} diff --git a/pkg/crypt/chachapoly/chachapoly_test.go b/pkg/crypt/chachapoly/chachapoly_test.go new file mode 100644 index 00000000..5d3650b4 --- /dev/null +++ b/pkg/crypt/chachapoly/chachapoly_test.go @@ -0,0 +1,93 @@ +package chachapoly + +import ( + "crypto/rand" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func generateKey(t *testing.T) []byte { + t.Helper() + key := make([]byte, 32) + _, err := rand.Read(key) + require.NoError(t, err) + return key +} + +func TestEncryptDecrypt_Good(t *testing.T) { + key := generateKey(t) + plaintext := []byte("hello, XChaCha20-Poly1305!") + + ciphertext, err := Encrypt(plaintext, key) + require.NoError(t, err) + assert.NotEqual(t, plaintext, ciphertext) + // Ciphertext should be longer than plaintext (nonce + overhead) + assert.Greater(t, len(ciphertext), len(plaintext)) + + decrypted, err := Decrypt(ciphertext, key) + require.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} + +func TestEncryptDecrypt_Bad(t *testing.T) { + key1 := generateKey(t) + key2 := generateKey(t) + plaintext := []byte("secret data") + + ciphertext, err := Encrypt(plaintext, key1) + require.NoError(t, err) + + // Decrypting with a different key should fail + _, err = Decrypt(ciphertext, key2) + assert.Error(t, err) +} + +func TestEncryptDecrypt_Ugly(t *testing.T) { + // Invalid key length should fail + shortKey := []byte("too-short") + _, err := Encrypt([]byte("data"), shortKey) + assert.Error(t, err) + + _, err = Decrypt([]byte("data"), shortKey) + assert.Error(t, err) + + // Ciphertext too short should fail + key := generateKey(t) + _, err = Decrypt([]byte("short"), key) + assert.Error(t, err) +} + +func TestEncryptDecryptEmpty_Good(t *testing.T) { + key := generateKey(t) + plaintext := []byte{} + + ciphertext, err := Encrypt(plaintext, key) + require.NoError(t, err) + + decrypted, err := Decrypt(ciphertext, key) + require.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} + +func TestEncryptNonDeterministic_Good(t *testing.T) { + key := generateKey(t) + plaintext := []byte("same input") + + ct1, err := Encrypt(plaintext, key) + require.NoError(t, err) + + ct2, err := Encrypt(plaintext, key) + require.NoError(t, err) + + // Different nonces mean different ciphertexts + assert.NotEqual(t, ct1, ct2, "each encryption should produce unique ciphertext due to random nonce") + + // Both should decrypt to the same plaintext + d1, err := Decrypt(ct1, key) + require.NoError(t, err) + d2, err := Decrypt(ct2, key) + require.NoError(t, err) + assert.Equal(t, d1, d2) +} diff --git a/pkg/crypt/lthn/lthn.go b/pkg/crypt/lthn/lthn.go new file mode 100644 index 00000000..559aa0ee --- /dev/null +++ b/pkg/crypt/lthn/lthn.go @@ -0,0 +1,94 @@ +// Package lthn implements the LTHN quasi-salted hash algorithm. +// +// LTHN produces deterministic, verifiable hashes without requiring separate salt +// storage. The salt is derived from the input itself through: +// 1. Reversing the input string +// 2. Applying "leet speak" style character substitutions +// +// The final hash is: SHA256(input || derived_salt) +// +// This is suitable for content identifiers, cache keys, and deduplication. +// NOT suitable for password hashing - use bcrypt, Argon2, or scrypt instead. +// +// Ported from Enchantrix (github.com/Snider/Enchantrix/pkg/crypt/std/lthn). +// +// Example: +// +// hash := lthn.Hash("hello") +// valid := lthn.Verify("hello", hash) // true +package lthn + +import ( + "crypto/sha256" + "encoding/hex" +) + +// keyMap defines the character substitutions for quasi-salt derivation. +// These are inspired by "leet speak" conventions for letter-number substitution. +// The mapping is bidirectional for most characters but NOT fully symmetric. +var keyMap = map[rune]rune{ + 'o': '0', // letter O -> zero + 'l': '1', // letter L -> one + 'e': '3', // letter E -> three + 'a': '4', // letter A -> four + 's': 'z', // letter S -> Z + 't': '7', // letter T -> seven + '0': 'o', // zero -> letter O + '1': 'l', // one -> letter L + '3': 'e', // three -> letter E + '4': 'a', // four -> letter A + '7': 't', // seven -> letter T +} + +// SetKeyMap replaces the default character substitution map. +// Use this to customize the quasi-salt derivation for specific applications. +// Changes affect all subsequent Hash and Verify calls. +func SetKeyMap(newKeyMap map[rune]rune) { + keyMap = newKeyMap +} + +// GetKeyMap returns the current character substitution map. +func GetKeyMap() map[rune]rune { + return keyMap +} + +// Hash computes the LTHN hash of the input string. +// +// The algorithm: +// 1. Derive a quasi-salt by reversing the input and applying character substitutions +// 2. Concatenate: input + salt +// 3. Compute SHA-256 of the concatenated string +// 4. Return the hex-encoded digest (64 characters, lowercase) +// +// The same input always produces the same hash, enabling verification +// without storing a separate salt value. +func Hash(input string) string { + salt := createSalt(input) + hash := sha256.Sum256([]byte(input + salt)) + return hex.EncodeToString(hash[:]) +} + +// Verify checks if an input string produces the given hash. +// Returns true if Hash(input) equals the provided hash value. +func Verify(input string, hash string) bool { + return Hash(input) == hash +} + +// createSalt derives a quasi-salt by reversing the input and applying substitutions. +// For example: "hello" -> reversed "olleh" -> substituted "011eh" +func createSalt(input string) string { + if input == "" { + return "" + } + runes := []rune(input) + salt := make([]rune, len(runes)) + for i := 0; i < len(runes); i++ { + char := runes[len(runes)-1-i] + if replacement, ok := keyMap[char]; ok { + salt[i] = replacement + } else { + salt[i] = char + } + } + return string(salt) +} diff --git a/pkg/crypt/lthn/lthn_test.go b/pkg/crypt/lthn/lthn_test.go new file mode 100644 index 00000000..88b10513 --- /dev/null +++ b/pkg/crypt/lthn/lthn_test.go @@ -0,0 +1,99 @@ +package lthn + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestHash_Good(t *testing.T) { + hash := Hash("hello") + assert.Len(t, hash, 64, "SHA-256 hex digest should be 64 characters") + assert.NotEmpty(t, hash) + + // Same input should always produce the same hash (deterministic) + hash2 := Hash("hello") + assert.Equal(t, hash, hash2, "same input must produce the same hash") +} + +func TestHash_Bad(t *testing.T) { + // Different inputs should produce different hashes + hash1 := Hash("hello") + hash2 := Hash("world") + assert.NotEqual(t, hash1, hash2, "different inputs must produce different hashes") +} + +func TestHash_Ugly(t *testing.T) { + // Empty string should still produce a valid hash + hash := Hash("") + assert.Len(t, hash, 64) + assert.NotEmpty(t, hash) +} + +func TestVerify_Good(t *testing.T) { + input := "test-data-123" + hash := Hash(input) + assert.True(t, Verify(input, hash), "Verify must return true for matching input") +} + +func TestVerify_Bad(t *testing.T) { + input := "test-data-123" + hash := Hash(input) + assert.False(t, Verify("wrong-input", hash), "Verify must return false for non-matching input") + assert.False(t, Verify(input, "0000000000000000000000000000000000000000000000000000000000000000"), + "Verify must return false for wrong hash") +} + +func TestVerify_Ugly(t *testing.T) { + // Empty input round-trip + hash := Hash("") + assert.True(t, Verify("", hash)) +} + +func TestSetKeyMap_Good(t *testing.T) { + // Save original map + original := GetKeyMap() + + // Set a custom key map + custom := map[rune]rune{ + 'a': 'b', + 'b': 'a', + } + SetKeyMap(custom) + + // Hash should use new key map + hash1 := Hash("abc") + + // Restore original and hash again + SetKeyMap(original) + hash2 := Hash("abc") + + assert.NotEqual(t, hash1, hash2, "different key maps should produce different hashes") +} + +func TestGetKeyMap_Good(t *testing.T) { + km := GetKeyMap() + require.NotNil(t, km) + assert.Equal(t, '0', km['o']) + assert.Equal(t, '1', km['l']) + assert.Equal(t, '3', km['e']) + assert.Equal(t, '4', km['a']) + assert.Equal(t, 'z', km['s']) + assert.Equal(t, '7', km['t']) +} + +func TestCreateSalt_Good(t *testing.T) { + // "hello" reversed is "olleh", with substitutions: o->0, l->1, l->1, e->3, h->h => "011eh" ... wait + // Actually: reversed "olleh" => o->0, l->1, l->1, e->3, h->h => "0113h" + // Let's verify by checking the hash is deterministic + hash1 := Hash("hello") + hash2 := Hash("hello") + assert.Equal(t, hash1, hash2, "salt derivation must be deterministic") +} + +func TestCreateSalt_Ugly(t *testing.T) { + // Unicode input should not panic + hash := Hash("\U0001f600\U0001f601\U0001f602") + assert.Len(t, hash, 64) +} diff --git a/pkg/crypt/pgp/pgp.go b/pkg/crypt/pgp/pgp.go new file mode 100644 index 00000000..d5c93b97 --- /dev/null +++ b/pkg/crypt/pgp/pgp.go @@ -0,0 +1,230 @@ +// Package pgp provides OpenPGP key generation, encryption, decryption, +// signing, and verification using the ProtonMail go-crypto library. +// +// Ported from Enchantrix (github.com/Snider/Enchantrix/pkg/crypt/std/pgp). +package pgp + +import ( + "bytes" + "fmt" + "io" + + "github.com/ProtonMail/go-crypto/openpgp" + "github.com/ProtonMail/go-crypto/openpgp/armor" + "github.com/ProtonMail/go-crypto/openpgp/packet" +) + +// KeyPair holds armored PGP public and private keys. +type KeyPair struct { + PublicKey string + PrivateKey string +} + +// CreateKeyPair generates a new PGP key pair for the given identity. +// If password is non-empty, the private key is encrypted with it. +// Returns a KeyPair with armored public and private keys. +func CreateKeyPair(name, email, password string) (*KeyPair, error) { + entity, err := openpgp.NewEntity(name, "", email, nil) + if err != nil { + return nil, fmt.Errorf("pgp: failed to create entity: %w", err) + } + + // Sign all the identities + for _, id := range entity.Identities { + _ = id.SelfSignature.SignUserId(id.UserId.Id, entity.PrimaryKey, entity.PrivateKey, nil) + } + + // Encrypt private key with password if provided + if password != "" { + err = entity.PrivateKey.Encrypt([]byte(password)) + if err != nil { + return nil, fmt.Errorf("pgp: failed to encrypt private key: %w", err) + } + for _, subkey := range entity.Subkeys { + err = subkey.PrivateKey.Encrypt([]byte(password)) + if err != nil { + return nil, fmt.Errorf("pgp: failed to encrypt subkey: %w", err) + } + } + } + + // Serialize public key + pubKeyBuf := new(bytes.Buffer) + pubKeyWriter, err := armor.Encode(pubKeyBuf, openpgp.PublicKeyType, nil) + if err != nil { + return nil, fmt.Errorf("pgp: failed to create armored public key writer: %w", err) + } + if err := entity.Serialize(pubKeyWriter); err != nil { + pubKeyWriter.Close() + return nil, fmt.Errorf("pgp: failed to serialize public key: %w", err) + } + pubKeyWriter.Close() + + // Serialize private key + privKeyBuf := new(bytes.Buffer) + privKeyWriter, err := armor.Encode(privKeyBuf, openpgp.PrivateKeyType, nil) + if err != nil { + return nil, fmt.Errorf("pgp: failed to create armored private key writer: %w", err) + } + if password != "" { + // Manual serialization to avoid re-signing encrypted keys + if err := serializeEncryptedEntity(privKeyWriter, entity); err != nil { + privKeyWriter.Close() + return nil, fmt.Errorf("pgp: failed to serialize private key: %w", err) + } + } else { + if err := entity.SerializePrivate(privKeyWriter, nil); err != nil { + privKeyWriter.Close() + return nil, fmt.Errorf("pgp: failed to serialize private key: %w", err) + } + } + privKeyWriter.Close() + + return &KeyPair{ + PublicKey: pubKeyBuf.String(), + PrivateKey: privKeyBuf.String(), + }, nil +} + +// serializeEncryptedEntity manually serializes an entity with encrypted private keys +// to avoid the panic from re-signing encrypted keys. +func serializeEncryptedEntity(w io.Writer, e *openpgp.Entity) error { + if err := e.PrivateKey.Serialize(w); err != nil { + return err + } + for _, ident := range e.Identities { + if err := ident.UserId.Serialize(w); err != nil { + return err + } + if err := ident.SelfSignature.Serialize(w); err != nil { + return err + } + } + for _, subkey := range e.Subkeys { + if err := subkey.PrivateKey.Serialize(w); err != nil { + return err + } + if err := subkey.Sig.Serialize(w); err != nil { + return err + } + } + return nil +} + +// Encrypt encrypts data for the recipient identified by their armored public key. +// Returns the encrypted data as armored PGP output. +func Encrypt(data []byte, publicKeyArmor string) ([]byte, error) { + keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewReader([]byte(publicKeyArmor))) + if err != nil { + return nil, fmt.Errorf("pgp: failed to read public key ring: %w", err) + } + + buf := new(bytes.Buffer) + armoredWriter, err := armor.Encode(buf, "PGP MESSAGE", nil) + if err != nil { + return nil, fmt.Errorf("pgp: failed to create armor encoder: %w", err) + } + + w, err := openpgp.Encrypt(armoredWriter, keyring, nil, nil, nil) + if err != nil { + armoredWriter.Close() + return nil, fmt.Errorf("pgp: failed to create encryption writer: %w", err) + } + + if _, err := w.Write(data); err != nil { + w.Close() + armoredWriter.Close() + return nil, fmt.Errorf("pgp: failed to write data: %w", err) + } + w.Close() + armoredWriter.Close() + + return buf.Bytes(), nil +} + +// Decrypt decrypts armored PGP data using the given armored private key. +// If the private key is encrypted, the password is used to decrypt it first. +func Decrypt(data []byte, privateKeyArmor, password string) ([]byte, error) { + keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewReader([]byte(privateKeyArmor))) + if err != nil { + return nil, fmt.Errorf("pgp: failed to read private key ring: %w", err) + } + + // Decrypt the private key if it is encrypted + for _, entity := range keyring { + if entity.PrivateKey != nil && entity.PrivateKey.Encrypted { + if err := entity.PrivateKey.Decrypt([]byte(password)); err != nil { + return nil, fmt.Errorf("pgp: failed to decrypt private key: %w", err) + } + } + for _, subkey := range entity.Subkeys { + if subkey.PrivateKey != nil && subkey.PrivateKey.Encrypted { + _ = subkey.PrivateKey.Decrypt([]byte(password)) + } + } + } + + // Decode armored message + block, err := armor.Decode(bytes.NewReader(data)) + if err != nil { + return nil, fmt.Errorf("pgp: failed to decode armored message: %w", err) + } + + md, err := openpgp.ReadMessage(block.Body, keyring, nil, nil) + if err != nil { + return nil, fmt.Errorf("pgp: failed to read message: %w", err) + } + + plaintext, err := io.ReadAll(md.UnverifiedBody) + if err != nil { + return nil, fmt.Errorf("pgp: failed to read plaintext: %w", err) + } + + return plaintext, nil +} + +// Sign creates an armored detached signature for the given data using +// the armored private key. If the key is encrypted, the password is used +// to decrypt it first. +func Sign(data []byte, privateKeyArmor, password string) ([]byte, error) { + keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewReader([]byte(privateKeyArmor))) + if err != nil { + return nil, fmt.Errorf("pgp: failed to read private key ring: %w", err) + } + + signer := keyring[0] + if signer.PrivateKey == nil { + return nil, fmt.Errorf("pgp: private key not found in keyring") + } + + if signer.PrivateKey.Encrypted { + if err := signer.PrivateKey.Decrypt([]byte(password)); err != nil { + return nil, fmt.Errorf("pgp: failed to decrypt private key: %w", err) + } + } + + buf := new(bytes.Buffer) + config := &packet.Config{} + err = openpgp.ArmoredDetachSign(buf, signer, bytes.NewReader(data), config) + if err != nil { + return nil, fmt.Errorf("pgp: failed to sign message: %w", err) + } + + return buf.Bytes(), nil +} + +// Verify verifies an armored detached signature against the given data +// and armored public key. Returns nil if the signature is valid. +func Verify(data, signature []byte, publicKeyArmor string) error { + keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewReader([]byte(publicKeyArmor))) + if err != nil { + return fmt.Errorf("pgp: failed to read public key ring: %w", err) + } + + _, err = openpgp.CheckArmoredDetachedSignature(keyring, bytes.NewReader(data), bytes.NewReader(signature), nil) + if err != nil { + return fmt.Errorf("pgp: signature verification failed: %w", err) + } + + return nil +} diff --git a/pkg/crypt/pgp/pgp_test.go b/pkg/crypt/pgp/pgp_test.go new file mode 100644 index 00000000..4f7edd92 --- /dev/null +++ b/pkg/crypt/pgp/pgp_test.go @@ -0,0 +1,164 @@ +package pgp + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCreateKeyPair_Good(t *testing.T) { + kp, err := CreateKeyPair("Test User", "test@example.com", "") + require.NoError(t, err) + require.NotNil(t, kp) + assert.Contains(t, kp.PublicKey, "-----BEGIN PGP PUBLIC KEY BLOCK-----") + assert.Contains(t, kp.PrivateKey, "-----BEGIN PGP PRIVATE KEY BLOCK-----") +} + +func TestCreateKeyPair_Bad(t *testing.T) { + // Empty name still works (openpgp allows it), but test with password + kp, err := CreateKeyPair("Secure User", "secure@example.com", "strong-password") + require.NoError(t, err) + require.NotNil(t, kp) + assert.Contains(t, kp.PublicKey, "-----BEGIN PGP PUBLIC KEY BLOCK-----") + assert.Contains(t, kp.PrivateKey, "-----BEGIN PGP PRIVATE KEY BLOCK-----") +} + +func TestCreateKeyPair_Ugly(t *testing.T) { + // Minimal identity + kp, err := CreateKeyPair("", "", "") + require.NoError(t, err) + require.NotNil(t, kp) +} + +func TestEncryptDecrypt_Good(t *testing.T) { + kp, err := CreateKeyPair("Test User", "test@example.com", "") + require.NoError(t, err) + + plaintext := []byte("hello, OpenPGP!") + ciphertext, err := Encrypt(plaintext, kp.PublicKey) + require.NoError(t, err) + assert.NotEmpty(t, ciphertext) + assert.Contains(t, string(ciphertext), "-----BEGIN PGP MESSAGE-----") + + decrypted, err := Decrypt(ciphertext, kp.PrivateKey, "") + require.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} + +func TestEncryptDecrypt_Bad(t *testing.T) { + kp1, err := CreateKeyPair("User One", "one@example.com", "") + require.NoError(t, err) + kp2, err := CreateKeyPair("User Two", "two@example.com", "") + require.NoError(t, err) + + plaintext := []byte("secret data") + ciphertext, err := Encrypt(plaintext, kp1.PublicKey) + require.NoError(t, err) + + // Decrypting with wrong key should fail + _, err = Decrypt(ciphertext, kp2.PrivateKey, "") + assert.Error(t, err) +} + +func TestEncryptDecrypt_Ugly(t *testing.T) { + // Invalid public key for encryption + _, err := Encrypt([]byte("data"), "not-a-pgp-key") + assert.Error(t, err) + + // Invalid private key for decryption + _, err = Decrypt([]byte("data"), "not-a-pgp-key", "") + assert.Error(t, err) +} + +func TestEncryptDecryptWithPassword_Good(t *testing.T) { + password := "my-secret-passphrase" + kp, err := CreateKeyPair("Secure User", "secure@example.com", password) + require.NoError(t, err) + + plaintext := []byte("encrypted with password-protected key") + ciphertext, err := Encrypt(plaintext, kp.PublicKey) + require.NoError(t, err) + + decrypted, err := Decrypt(ciphertext, kp.PrivateKey, password) + require.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} + +func TestSignVerify_Good(t *testing.T) { + kp, err := CreateKeyPair("Signer", "signer@example.com", "") + require.NoError(t, err) + + data := []byte("message to sign") + signature, err := Sign(data, kp.PrivateKey, "") + require.NoError(t, err) + assert.NotEmpty(t, signature) + assert.Contains(t, string(signature), "-----BEGIN PGP SIGNATURE-----") + + err = Verify(data, signature, kp.PublicKey) + assert.NoError(t, err) +} + +func TestSignVerify_Bad(t *testing.T) { + kp, err := CreateKeyPair("Signer", "signer@example.com", "") + require.NoError(t, err) + + data := []byte("original message") + signature, err := Sign(data, kp.PrivateKey, "") + require.NoError(t, err) + + // Verify with tampered data should fail + err = Verify([]byte("tampered message"), signature, kp.PublicKey) + assert.Error(t, err) +} + +func TestSignVerify_Ugly(t *testing.T) { + // Invalid key for signing + _, err := Sign([]byte("data"), "not-a-key", "") + assert.Error(t, err) + + // Invalid key for verification + kp, err := CreateKeyPair("Signer", "signer@example.com", "") + require.NoError(t, err) + + data := []byte("message") + sig, err := Sign(data, kp.PrivateKey, "") + require.NoError(t, err) + + err = Verify(data, sig, "not-a-key") + assert.Error(t, err) +} + +func TestSignVerifyWithPassword_Good(t *testing.T) { + password := "signing-password" + kp, err := CreateKeyPair("Signer", "signer@example.com", password) + require.NoError(t, err) + + data := []byte("signed with password-protected key") + signature, err := Sign(data, kp.PrivateKey, password) + require.NoError(t, err) + + err = Verify(data, signature, kp.PublicKey) + assert.NoError(t, err) +} + +func TestFullRoundTrip_Good(t *testing.T) { + // Generate keys, encrypt, decrypt, sign, and verify - full round trip + kp, err := CreateKeyPair("Full Test", "full@example.com", "") + require.NoError(t, err) + + original := []byte("full round-trip test data") + + // Encrypt then decrypt + ciphertext, err := Encrypt(original, kp.PublicKey) + require.NoError(t, err) + decrypted, err := Decrypt(ciphertext, kp.PrivateKey, "") + require.NoError(t, err) + assert.Equal(t, original, decrypted) + + // Sign then verify + signature, err := Sign(original, kp.PrivateKey, "") + require.NoError(t, err) + err = Verify(original, signature, kp.PublicKey) + assert.NoError(t, err) +} diff --git a/pkg/crypt/rsa/rsa.go b/pkg/crypt/rsa/rsa.go new file mode 100644 index 00000000..1fd17451 --- /dev/null +++ b/pkg/crypt/rsa/rsa.go @@ -0,0 +1,101 @@ +// Package rsa provides RSA key generation, encryption, and decryption +// using OAEP with SHA-256. +// +// Ported from Enchantrix (github.com/Snider/Enchantrix/pkg/crypt/std/rsa). +package rsa + +import ( + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "crypto/x509" + "encoding/pem" + "fmt" +) + +// KeyPair holds PEM-encoded RSA public and private keys. +type KeyPair struct { + PublicKey string + PrivateKey string +} + +// GenerateKeyPair creates a new RSA key pair of the given bit size. +// The minimum accepted key size is 2048 bits. +// Returns a KeyPair with PEM-encoded public and private keys. +func GenerateKeyPair(bits int) (*KeyPair, error) { + if bits < 2048 { + return nil, fmt.Errorf("rsa: key size too small: %d (minimum 2048)", bits) + } + + privKey, err := rsa.GenerateKey(rand.Reader, bits) + if err != nil { + return nil, fmt.Errorf("rsa: failed to generate private key: %w", err) + } + + privKeyBytes := x509.MarshalPKCS1PrivateKey(privKey) + privKeyPEM := pem.EncodeToMemory(&pem.Block{ + Type: "RSA PRIVATE KEY", + Bytes: privKeyBytes, + }) + + pubKeyBytes, err := x509.MarshalPKIXPublicKey(&privKey.PublicKey) + if err != nil { + return nil, fmt.Errorf("rsa: failed to marshal public key: %w", err) + } + pubKeyPEM := pem.EncodeToMemory(&pem.Block{ + Type: "PUBLIC KEY", + Bytes: pubKeyBytes, + }) + + return &KeyPair{ + PublicKey: string(pubKeyPEM), + PrivateKey: string(privKeyPEM), + }, nil +} + +// Encrypt encrypts data with the given PEM-encoded public key using RSA-OAEP +// with SHA-256. +func Encrypt(data []byte, publicKeyPEM string) ([]byte, error) { + block, _ := pem.Decode([]byte(publicKeyPEM)) + if block == nil { + return nil, fmt.Errorf("rsa: failed to decode public key PEM") + } + + pub, err := x509.ParsePKIXPublicKey(block.Bytes) + if err != nil { + return nil, fmt.Errorf("rsa: failed to parse public key: %w", err) + } + + rsaPub, ok := pub.(*rsa.PublicKey) + if !ok { + return nil, fmt.Errorf("rsa: not an RSA public key") + } + + ciphertext, err := rsa.EncryptOAEP(sha256.New(), rand.Reader, rsaPub, data, nil) + if err != nil { + return nil, fmt.Errorf("rsa: failed to encrypt data: %w", err) + } + + return ciphertext, nil +} + +// Decrypt decrypts data with the given PEM-encoded private key using RSA-OAEP +// with SHA-256. +func Decrypt(data []byte, privateKeyPEM string) ([]byte, error) { + block, _ := pem.Decode([]byte(privateKeyPEM)) + if block == nil { + return nil, fmt.Errorf("rsa: failed to decode private key PEM") + } + + priv, err := x509.ParsePKCS1PrivateKey(block.Bytes) + if err != nil { + return nil, fmt.Errorf("rsa: failed to parse private key: %w", err) + } + + plaintext, err := rsa.DecryptOAEP(sha256.New(), rand.Reader, priv, data, nil) + if err != nil { + return nil, fmt.Errorf("rsa: failed to decrypt data: %w", err) + } + + return plaintext, nil +} diff --git a/pkg/crypt/rsa/rsa_test.go b/pkg/crypt/rsa/rsa_test.go new file mode 100644 index 00000000..52b14f5b --- /dev/null +++ b/pkg/crypt/rsa/rsa_test.go @@ -0,0 +1,89 @@ +package rsa + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGenerateKeyPair_Good(t *testing.T) { + kp, err := GenerateKeyPair(2048) + require.NoError(t, err) + require.NotNil(t, kp) + assert.Contains(t, kp.PublicKey, "-----BEGIN PUBLIC KEY-----") + assert.Contains(t, kp.PrivateKey, "-----BEGIN RSA PRIVATE KEY-----") +} + +func TestGenerateKeyPair_Bad(t *testing.T) { + // Key size too small + _, err := GenerateKeyPair(1024) + assert.Error(t, err) + assert.Contains(t, err.Error(), "key size too small") +} + +func TestGenerateKeyPair_Ugly(t *testing.T) { + // Zero bits + _, err := GenerateKeyPair(0) + assert.Error(t, err) +} + +func TestEncryptDecrypt_Good(t *testing.T) { + kp, err := GenerateKeyPair(2048) + require.NoError(t, err) + + plaintext := []byte("hello, RSA-OAEP with SHA-256!") + ciphertext, err := Encrypt(plaintext, kp.PublicKey) + require.NoError(t, err) + assert.NotEqual(t, plaintext, ciphertext) + + decrypted, err := Decrypt(ciphertext, kp.PrivateKey) + require.NoError(t, err) + assert.Equal(t, plaintext, decrypted) +} + +func TestEncryptDecrypt_Bad(t *testing.T) { + kp1, err := GenerateKeyPair(2048) + require.NoError(t, err) + kp2, err := GenerateKeyPair(2048) + require.NoError(t, err) + + plaintext := []byte("secret data") + ciphertext, err := Encrypt(plaintext, kp1.PublicKey) + require.NoError(t, err) + + // Decrypting with wrong private key should fail + _, err = Decrypt(ciphertext, kp2.PrivateKey) + assert.Error(t, err) +} + +func TestEncryptDecrypt_Ugly(t *testing.T) { + // Invalid PEM for encryption + _, err := Encrypt([]byte("data"), "not-a-pem-key") + assert.Error(t, err) + + // Invalid PEM for decryption + _, err = Decrypt([]byte("data"), "not-a-pem-key") + assert.Error(t, err) +} + +func TestEncryptDecryptRoundTrip_Good(t *testing.T) { + kp, err := GenerateKeyPair(2048) + require.NoError(t, err) + + messages := []string{ + "", + "a", + "short message", + "a slightly longer message with some special chars: !@#$%^&*()", + } + + for _, msg := range messages { + ciphertext, err := Encrypt([]byte(msg), kp.PublicKey) + require.NoError(t, err) + + decrypted, err := Decrypt(ciphertext, kp.PrivateKey) + require.NoError(t, err) + assert.Equal(t, msg, string(decrypted), "round-trip failed for: %q", msg) + } +} From 1f0c67cae93f258713fc1c00fbfcb51fb30128d5 Mon Sep 17 00:00:00 2001 From: Vi Date: Thu, 5 Feb 2026 20:45:45 +0000 Subject: [PATCH 05/10] feat(io): add S3 and SQLite Medium backends (#347) (#355) Implement two new storage backends for the io.Medium interface: - pkg/io/s3: S3-backed Medium using AWS SDK v2 with interface-based mocking for tests. Supports prefix-based namespacing via WithPrefix option. All 18 Medium methods implemented with proper S3 semantics (e.g. EnsureDir is no-op, IsDir checks prefix existence). - pkg/io/sqlite: SQLite-backed Medium using modernc.org/sqlite (pure Go, no CGo). Uses a single table schema with path, content, mode, is_dir, and mtime columns. Supports custom table names via WithTable option. All tests use :memory: databases. Both packages include comprehensive test suites following the _Good/_Bad/_Ugly naming convention with 87 tests total (36 S3, 51 SQLite). Co-authored-by: Claude Co-authored-by: Claude Opus 4.6 --- go.mod | 18 + go.sum | 36 ++ pkg/io/s3/s3.go | 625 ++++++++++++++++++++++++++++++++ pkg/io/s3/s3_test.go | 646 +++++++++++++++++++++++++++++++++ pkg/io/sqlite/sqlite.go | 669 +++++++++++++++++++++++++++++++++++ pkg/io/sqlite/sqlite_test.go | 653 ++++++++++++++++++++++++++++++++++ 6 files changed, 2647 insertions(+) create mode 100644 pkg/io/s3/s3.go create mode 100644 pkg/io/s3/s3_test.go create mode 100644 pkg/io/sqlite/sqlite.go create mode 100644 pkg/io/sqlite/sqlite_test.go diff --git a/go.mod b/go.mod index ea9b957e..df985d42 100644 --- a/go.mod +++ b/go.mod @@ -37,6 +37,17 @@ require ( github.com/ProtonMail/go-crypto v1.3.0 // indirect github.com/TwiN/go-color v1.4.1 // indirect github.com/adrg/xdg v0.5.3 // indirect + github.com/aws/aws-sdk-go-v2 v1.41.1 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17 // indirect + github.com/aws/aws-sdk-go-v2/service/s3 v1.96.0 // indirect + github.com/aws/smithy-go v1.24.0 // indirect github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/bep/debounce v1.2.1 // indirect github.com/brianvoe/gofakeit/v6 v6.28.0 // indirect @@ -46,6 +57,7 @@ require ( github.com/cyphar/filepath-securejoin v0.6.1 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/davidmz/go-pageant v1.0.2 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect github.com/ebitengine/purego v0.9.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect github.com/fatih/color v1.18.0 // indirect @@ -78,6 +90,7 @@ require ( github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect + github.com/ncruces/go-strftime v1.0.0 // indirect github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect github.com/pelletier/go-toml/v2 v2.2.4 // indirect @@ -85,6 +98,7 @@ require ( github.com/pjbgf/sha1cd v0.5.0 // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/sagikazarmark/locafero v0.11.0 // indirect github.com/samber/lo v1.52.0 // indirect @@ -118,4 +132,8 @@ require ( google.golang.org/grpc v1.76.0 // indirect google.golang.org/protobuf v1.36.10 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect + modernc.org/libc v1.67.6 // indirect + modernc.org/mathutil v1.7.1 // indirect + modernc.org/memory v1.11.0 // indirect + modernc.org/sqlite v1.44.3 // indirect ) diff --git a/go.sum b/go.sum index 58a940c4..47c905e2 100644 --- a/go.sum +++ b/go.sum @@ -24,6 +24,28 @@ github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFI github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/aws/aws-sdk-go-v2 v1.41.1 h1:ABlyEARCDLN034NhxlRUSZr4l71mh+T5KAeGh6cerhU= +github.com/aws/aws-sdk-go-v2 v1.41.1/go.mod h1:MayyLB8y+buD9hZqkCW3kX1AKq07Y5pXxtgB+rRFhz0= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4 h1:489krEF9xIGkOaaX3CE/Be2uWjiXrkCH6gUX+bZA/BU= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4/go.mod h1:IOAPF6oT9KCsceNTvvYMNHy0+kMF8akOjeDvPENWxp4= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17 h1:xOLELNKGp2vsiteLsvLPwxC+mYmO6OZ8PYgiuPJzF8U= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.17/go.mod h1:5M5CI3D12dNOtH3/mk6minaRwI2/37ifCURZISxA/IQ= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17 h1:WWLqlh79iO48yLkj1v3ISRNiv+3KdQoZ6JWyfcsyQik= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.17/go.mod h1:EhG22vHRrvF8oXSTYStZhJc1aUgKtnJe+aOiFEV90cM= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17 h1:JqcdRG//czea7Ppjb+g/n4o8i/R50aTBHkA7vu0lK+k= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17/go.mod h1:CO+WeGmIdj/MlPel2KwID9Gt7CNq4M65HUfBW97liM0= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 h1:0ryTNEdJbzUCEWkVXEXoqlXV72J5keC1GvILMOuD00E= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4/go.mod h1:HQ4qwNZh32C3CBeO6iJLQlgtMzqeG17ziAA/3KDJFow= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8 h1:Z5EiPIzXKewUQK0QTMkutjiaPVeVYXX7KIqhXu/0fXs= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8/go.mod h1:FsTpJtvC4U1fyDXk7c71XoDv3HlRm8V3NiYLeYLh5YE= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17 h1:RuNSMoozM8oXlgLG/n6WLaFGoea7/CddrCfIiSA+xdY= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.17/go.mod h1:F2xxQ9TZz5gDWsclCtPQscGpP0VUOc8RqgFM3vDENmU= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17 h1:bGeHBsGZx0Dvu/eJC0Lh9adJa3M1xREcndxLNZlve2U= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17/go.mod h1:dcW24lbU0CzHusTE8LLHhRLI42ejmINN8Lcr22bwh/g= +github.com/aws/aws-sdk-go-v2/service/s3 v1.96.0 h1:oeu8VPlOre74lBA/PMhxa5vewaMIMmILM+RraSyB8KA= +github.com/aws/aws-sdk-go-v2/service/s3 v1.96.0/go.mod h1:5jggDlZ2CLQhwJBiZJb4vfk4f0GxWdEDruWKEJ1xOdo= +github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk= +github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= github.com/bep/debounce v1.2.1 h1:v67fRdBA9UQu2NhLFXrSg0Brw7CexQekrBwDMM8bzeY= @@ -45,6 +67,8 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davidmz/go-pageant v1.0.2 h1:bPblRCh5jGU+Uptpz6LgMZGD5hJoOt7otgT454WvHn0= github.com/davidmz/go-pageant v1.0.2/go.mod h1:P2EDDnMqIwG5Rrp05dTRITj9z2zpGcD9efWSkTNKLIE= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A= github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= @@ -160,6 +184,8 @@ github.com/modelcontextprotocol/go-sdk v1.2.0 h1:Y23co09300CEk8iZ/tMxIX1dVmKZkzo github.com/modelcontextprotocol/go-sdk v1.2.0/go.mod h1:6fM3LCm3yV7pAs8isnKLn07oKtB0MP9LHd3DfAcKw10= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w= +github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= github.com/oasdiff/oasdiff v1.11.9 h1:M/pIY4K1MWnML0DkAdUQU/CnJdNDr2z2hpD0lpKSccM= github.com/oasdiff/oasdiff v1.11.9/go.mod h1:4qorAPsG2EE/lXEs+FGzAJcYHXS3G7XghfqkCFPKzNQ= github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= @@ -185,6 +211,8 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRI github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/qdrant/go-client v1.16.2 h1:UUMJJfvXTByhwhH1DwWdbkhZ2cTdvSqVkXSIfBrVWSg= github.com/qdrant/go-client v1.16.2/go.mod h1:I+EL3h4HRoRTeHtbfOd/4kDXwCukZfkd41j/9wryGkw= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= @@ -336,3 +364,11 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +modernc.org/libc v1.67.6 h1:eVOQvpModVLKOdT+LvBPjdQqfrZq+pC39BygcT+E7OI= +modernc.org/libc v1.67.6/go.mod h1:JAhxUVlolfYDErnwiqaLvUqc8nfb2r6S6slAgZOnaiE= +modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= +modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg= +modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= +modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= +modernc.org/sqlite v1.44.3 h1:+39JvV/HWMcYslAwRxHb8067w+2zowvFOUrOWIy9PjY= +modernc.org/sqlite v1.44.3/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA= diff --git a/pkg/io/s3/s3.go b/pkg/io/s3/s3.go new file mode 100644 index 00000000..1c7bb949 --- /dev/null +++ b/pkg/io/s3/s3.go @@ -0,0 +1,625 @@ +// Package s3 provides an S3-backed implementation of the io.Medium interface. +package s3 + +import ( + "bytes" + "context" + "fmt" + goio "io" + "io/fs" + "os" + "path" + "strings" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/s3" + "github.com/aws/aws-sdk-go-v2/service/s3/types" + + coreerr "github.com/host-uk/core/pkg/framework/core" +) + +// s3API is the subset of the S3 client API used by this package. +// This allows for interface-based mocking in tests. +type s3API interface { + GetObject(ctx context.Context, params *s3.GetObjectInput, optFns ...func(*s3.Options)) (*s3.GetObjectOutput, error) + PutObject(ctx context.Context, params *s3.PutObjectInput, optFns ...func(*s3.Options)) (*s3.PutObjectOutput, error) + DeleteObject(ctx context.Context, params *s3.DeleteObjectInput, optFns ...func(*s3.Options)) (*s3.DeleteObjectOutput, error) + DeleteObjects(ctx context.Context, params *s3.DeleteObjectsInput, optFns ...func(*s3.Options)) (*s3.DeleteObjectsOutput, error) + HeadObject(ctx context.Context, params *s3.HeadObjectInput, optFns ...func(*s3.Options)) (*s3.HeadObjectOutput, error) + ListObjectsV2(ctx context.Context, params *s3.ListObjectsV2Input, optFns ...func(*s3.Options)) (*s3.ListObjectsV2Output, error) + CopyObject(ctx context.Context, params *s3.CopyObjectInput, optFns ...func(*s3.Options)) (*s3.CopyObjectOutput, error) +} + +// Medium is an S3-backed storage backend implementing the io.Medium interface. +type Medium struct { + client s3API + bucket string + prefix string +} + +// Option configures a Medium. +type Option func(*Medium) + +// WithPrefix sets an optional key prefix for all operations. +func WithPrefix(prefix string) Option { + return func(m *Medium) { + // Ensure prefix ends with "/" if non-empty + if prefix != "" && !strings.HasSuffix(prefix, "/") { + prefix += "/" + } + m.prefix = prefix + } +} + +// WithClient sets the S3 client for dependency injection. +func WithClient(client *s3.Client) Option { + return func(m *Medium) { + m.client = client + } +} + +// withAPI sets the s3API interface directly (for testing with mocks). +func withAPI(api s3API) Option { + return func(m *Medium) { + m.client = api + } +} + +// New creates a new S3 Medium for the given bucket. +func New(bucket string, opts ...Option) (*Medium, error) { + if bucket == "" { + return nil, coreerr.E("s3.New", "bucket name is required", nil) + } + m := &Medium{bucket: bucket} + for _, opt := range opts { + opt(m) + } + if m.client == nil { + return nil, coreerr.E("s3.New", "S3 client is required (use WithClient option)", nil) + } + return m, nil +} + +// key returns the full S3 object key for a given path. +func (m *Medium) key(p string) string { + // Clean the path using a leading "/" to sandbox traversal attempts, + // then strip the "/" prefix. This ensures ".." can't escape. + clean := path.Clean("/" + p) + if clean == "/" { + clean = "" + } + clean = strings.TrimPrefix(clean, "/") + + if m.prefix == "" { + return clean + } + if clean == "" { + return m.prefix + } + return m.prefix + clean +} + +// Read retrieves the content of a file as a string. +func (m *Medium) Read(p string) (string, error) { + key := m.key(p) + if key == "" { + return "", coreerr.E("s3.Read", "path is required", os.ErrInvalid) + } + + out, err := m.client.GetObject(context.Background(), &s3.GetObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + if err != nil { + return "", coreerr.E("s3.Read", "failed to get object: "+key, err) + } + defer out.Body.Close() + + data, err := goio.ReadAll(out.Body) + if err != nil { + return "", coreerr.E("s3.Read", "failed to read body: "+key, err) + } + return string(data), nil +} + +// Write saves the given content to a file, overwriting it if it exists. +func (m *Medium) Write(p, content string) error { + key := m.key(p) + if key == "" { + return coreerr.E("s3.Write", "path is required", os.ErrInvalid) + } + + _, err := m.client.PutObject(context.Background(), &s3.PutObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + Body: strings.NewReader(content), + }) + if err != nil { + return coreerr.E("s3.Write", "failed to put object: "+key, err) + } + return nil +} + +// EnsureDir is a no-op for S3 (S3 has no real directories). +func (m *Medium) EnsureDir(_ string) error { + return nil +} + +// IsFile checks if a path exists and is a regular file (not a "directory" prefix). +func (m *Medium) IsFile(p string) bool { + key := m.key(p) + if key == "" { + return false + } + // A "file" in S3 is an object whose key does not end with "/" + if strings.HasSuffix(key, "/") { + return false + } + _, err := m.client.HeadObject(context.Background(), &s3.HeadObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + return err == nil +} + +// FileGet is a convenience function that reads a file from the medium. +func (m *Medium) FileGet(p string) (string, error) { + return m.Read(p) +} + +// FileSet is a convenience function that writes a file to the medium. +func (m *Medium) FileSet(p, content string) error { + return m.Write(p, content) +} + +// Delete removes a single object. +func (m *Medium) Delete(p string) error { + key := m.key(p) + if key == "" { + return coreerr.E("s3.Delete", "path is required", os.ErrInvalid) + } + + _, err := m.client.DeleteObject(context.Background(), &s3.DeleteObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + if err != nil { + return coreerr.E("s3.Delete", "failed to delete object: "+key, err) + } + return nil +} + +// DeleteAll removes all objects under the given prefix. +func (m *Medium) DeleteAll(p string) error { + key := m.key(p) + if key == "" { + return coreerr.E("s3.DeleteAll", "path is required", os.ErrInvalid) + } + + // First, try deleting the exact key + _, _ = m.client.DeleteObject(context.Background(), &s3.DeleteObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + + // Then delete all objects under the prefix + prefix := key + if !strings.HasSuffix(prefix, "/") { + prefix += "/" + } + + paginator := true + var continuationToken *string + + for paginator { + listOut, err := m.client.ListObjectsV2(context.Background(), &s3.ListObjectsV2Input{ + Bucket: aws.String(m.bucket), + Prefix: aws.String(prefix), + ContinuationToken: continuationToken, + }) + if err != nil { + return coreerr.E("s3.DeleteAll", "failed to list objects: "+prefix, err) + } + + if len(listOut.Contents) == 0 { + break + } + + objects := make([]types.ObjectIdentifier, len(listOut.Contents)) + for i, obj := range listOut.Contents { + objects[i] = types.ObjectIdentifier{Key: obj.Key} + } + + _, err = m.client.DeleteObjects(context.Background(), &s3.DeleteObjectsInput{ + Bucket: aws.String(m.bucket), + Delete: &types.Delete{Objects: objects, Quiet: aws.Bool(true)}, + }) + if err != nil { + return coreerr.E("s3.DeleteAll", "failed to delete objects", err) + } + + if listOut.IsTruncated != nil && *listOut.IsTruncated { + continuationToken = listOut.NextContinuationToken + } else { + paginator = false + } + } + + return nil +} + +// Rename moves an object by copying then deleting the original. +func (m *Medium) Rename(oldPath, newPath string) error { + oldKey := m.key(oldPath) + newKey := m.key(newPath) + if oldKey == "" || newKey == "" { + return coreerr.E("s3.Rename", "both old and new paths are required", os.ErrInvalid) + } + + copySource := m.bucket + "/" + oldKey + + _, err := m.client.CopyObject(context.Background(), &s3.CopyObjectInput{ + Bucket: aws.String(m.bucket), + CopySource: aws.String(copySource), + Key: aws.String(newKey), + }) + if err != nil { + return coreerr.E("s3.Rename", "failed to copy object: "+oldKey+" -> "+newKey, err) + } + + _, err = m.client.DeleteObject(context.Background(), &s3.DeleteObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(oldKey), + }) + if err != nil { + return coreerr.E("s3.Rename", "failed to delete source object: "+oldKey, err) + } + + return nil +} + +// List returns directory entries for the given path using ListObjectsV2 with delimiter. +func (m *Medium) List(p string) ([]fs.DirEntry, error) { + prefix := m.key(p) + if prefix != "" && !strings.HasSuffix(prefix, "/") { + prefix += "/" + } + + var entries []fs.DirEntry + + listOut, err := m.client.ListObjectsV2(context.Background(), &s3.ListObjectsV2Input{ + Bucket: aws.String(m.bucket), + Prefix: aws.String(prefix), + Delimiter: aws.String("/"), + }) + if err != nil { + return nil, coreerr.E("s3.List", "failed to list objects: "+prefix, err) + } + + // Common prefixes are "directories" + for _, cp := range listOut.CommonPrefixes { + if cp.Prefix == nil { + continue + } + name := strings.TrimPrefix(*cp.Prefix, prefix) + name = strings.TrimSuffix(name, "/") + if name == "" { + continue + } + entries = append(entries, &dirEntry{ + name: name, + isDir: true, + mode: fs.ModeDir | 0755, + info: &fileInfo{ + name: name, + isDir: true, + mode: fs.ModeDir | 0755, + }, + }) + } + + // Contents are "files" (excluding the prefix itself) + for _, obj := range listOut.Contents { + if obj.Key == nil { + continue + } + name := strings.TrimPrefix(*obj.Key, prefix) + if name == "" || strings.Contains(name, "/") { + continue + } + var size int64 + if obj.Size != nil { + size = *obj.Size + } + var modTime time.Time + if obj.LastModified != nil { + modTime = *obj.LastModified + } + entries = append(entries, &dirEntry{ + name: name, + isDir: false, + mode: 0644, + info: &fileInfo{ + name: name, + size: size, + mode: 0644, + modTime: modTime, + }, + }) + } + + return entries, nil +} + +// Stat returns file information for the given path using HeadObject. +func (m *Medium) Stat(p string) (fs.FileInfo, error) { + key := m.key(p) + if key == "" { + return nil, coreerr.E("s3.Stat", "path is required", os.ErrInvalid) + } + + out, err := m.client.HeadObject(context.Background(), &s3.HeadObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + if err != nil { + return nil, coreerr.E("s3.Stat", "failed to head object: "+key, err) + } + + var size int64 + if out.ContentLength != nil { + size = *out.ContentLength + } + var modTime time.Time + if out.LastModified != nil { + modTime = *out.LastModified + } + + name := path.Base(key) + return &fileInfo{ + name: name, + size: size, + mode: 0644, + modTime: modTime, + }, nil +} + +// Open opens the named file for reading. +func (m *Medium) Open(p string) (fs.File, error) { + key := m.key(p) + if key == "" { + return nil, coreerr.E("s3.Open", "path is required", os.ErrInvalid) + } + + out, err := m.client.GetObject(context.Background(), &s3.GetObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + if err != nil { + return nil, coreerr.E("s3.Open", "failed to get object: "+key, err) + } + + data, err := goio.ReadAll(out.Body) + out.Body.Close() + if err != nil { + return nil, coreerr.E("s3.Open", "failed to read body: "+key, err) + } + + var size int64 + if out.ContentLength != nil { + size = *out.ContentLength + } + var modTime time.Time + if out.LastModified != nil { + modTime = *out.LastModified + } + + return &s3File{ + name: path.Base(key), + content: data, + size: size, + modTime: modTime, + }, nil +} + +// Create creates or truncates the named file. Returns a writer that +// uploads the content on Close. +func (m *Medium) Create(p string) (goio.WriteCloser, error) { + key := m.key(p) + if key == "" { + return nil, coreerr.E("s3.Create", "path is required", os.ErrInvalid) + } + return &s3WriteCloser{ + medium: m, + key: key, + }, nil +} + +// Append opens the named file for appending. It downloads the existing +// content (if any) and re-uploads the combined content on Close. +func (m *Medium) Append(p string) (goio.WriteCloser, error) { + key := m.key(p) + if key == "" { + return nil, coreerr.E("s3.Append", "path is required", os.ErrInvalid) + } + + var existing []byte + out, err := m.client.GetObject(context.Background(), &s3.GetObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + if err == nil { + existing, _ = goio.ReadAll(out.Body) + out.Body.Close() + } + + return &s3WriteCloser{ + medium: m, + key: key, + data: existing, + }, nil +} + +// ReadStream returns a reader for the file content. +func (m *Medium) ReadStream(p string) (goio.ReadCloser, error) { + key := m.key(p) + if key == "" { + return nil, coreerr.E("s3.ReadStream", "path is required", os.ErrInvalid) + } + + out, err := m.client.GetObject(context.Background(), &s3.GetObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + if err != nil { + return nil, coreerr.E("s3.ReadStream", "failed to get object: "+key, err) + } + return out.Body, nil +} + +// WriteStream returns a writer for the file content. Content is uploaded on Close. +func (m *Medium) WriteStream(p string) (goio.WriteCloser, error) { + return m.Create(p) +} + +// Exists checks if a path exists (file or directory prefix). +func (m *Medium) Exists(p string) bool { + key := m.key(p) + if key == "" { + return false + } + + // Check as an exact object + _, err := m.client.HeadObject(context.Background(), &s3.HeadObjectInput{ + Bucket: aws.String(m.bucket), + Key: aws.String(key), + }) + if err == nil { + return true + } + + // Check as a "directory" prefix + prefix := key + if !strings.HasSuffix(prefix, "/") { + prefix += "/" + } + listOut, err := m.client.ListObjectsV2(context.Background(), &s3.ListObjectsV2Input{ + Bucket: aws.String(m.bucket), + Prefix: aws.String(prefix), + MaxKeys: aws.Int32(1), + }) + if err != nil { + return false + } + return len(listOut.Contents) > 0 || len(listOut.CommonPrefixes) > 0 +} + +// IsDir checks if a path exists and is a directory (has objects under it as a prefix). +func (m *Medium) IsDir(p string) bool { + key := m.key(p) + if key == "" { + return false + } + + prefix := key + if !strings.HasSuffix(prefix, "/") { + prefix += "/" + } + + listOut, err := m.client.ListObjectsV2(context.Background(), &s3.ListObjectsV2Input{ + Bucket: aws.String(m.bucket), + Prefix: aws.String(prefix), + MaxKeys: aws.Int32(1), + }) + if err != nil { + return false + } + return len(listOut.Contents) > 0 || len(listOut.CommonPrefixes) > 0 +} + +// --- Internal types --- + +// fileInfo implements fs.FileInfo for S3 objects. +type fileInfo struct { + name string + size int64 + mode fs.FileMode + modTime time.Time + isDir bool +} + +func (fi *fileInfo) Name() string { return fi.name } +func (fi *fileInfo) Size() int64 { return fi.size } +func (fi *fileInfo) Mode() fs.FileMode { return fi.mode } +func (fi *fileInfo) ModTime() time.Time { return fi.modTime } +func (fi *fileInfo) IsDir() bool { return fi.isDir } +func (fi *fileInfo) Sys() any { return nil } + +// dirEntry implements fs.DirEntry for S3 listings. +type dirEntry struct { + name string + isDir bool + mode fs.FileMode + info fs.FileInfo +} + +func (de *dirEntry) Name() string { return de.name } +func (de *dirEntry) IsDir() bool { return de.isDir } +func (de *dirEntry) Type() fs.FileMode { return de.mode.Type() } +func (de *dirEntry) Info() (fs.FileInfo, error) { return de.info, nil } + +// s3File implements fs.File for S3 objects. +type s3File struct { + name string + content []byte + offset int64 + size int64 + modTime time.Time +} + +func (f *s3File) Stat() (fs.FileInfo, error) { + return &fileInfo{ + name: f.name, + size: int64(len(f.content)), + mode: 0644, + modTime: f.modTime, + }, nil +} + +func (f *s3File) Read(b []byte) (int, error) { + if f.offset >= int64(len(f.content)) { + return 0, goio.EOF + } + n := copy(b, f.content[f.offset:]) + f.offset += int64(n) + return n, nil +} + +func (f *s3File) Close() error { + return nil +} + +// s3WriteCloser buffers writes and uploads to S3 on Close. +type s3WriteCloser struct { + medium *Medium + key string + data []byte +} + +func (w *s3WriteCloser) Write(p []byte) (int, error) { + w.data = append(w.data, p...) + return len(p), nil +} + +func (w *s3WriteCloser) Close() error { + _, err := w.medium.client.PutObject(context.Background(), &s3.PutObjectInput{ + Bucket: aws.String(w.medium.bucket), + Key: aws.String(w.key), + Body: bytes.NewReader(w.data), + }) + if err != nil { + return fmt.Errorf("s3: failed to upload on close: %w", err) + } + return nil +} diff --git a/pkg/io/s3/s3_test.go b/pkg/io/s3/s3_test.go new file mode 100644 index 00000000..1f226e76 --- /dev/null +++ b/pkg/io/s3/s3_test.go @@ -0,0 +1,646 @@ +package s3 + +import ( + "bytes" + "context" + "fmt" + goio "io" + "io/fs" + "sort" + "strings" + "sync" + "testing" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/s3" + "github.com/aws/aws-sdk-go-v2/service/s3/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// mockS3 is an in-memory mock implementing the s3API interface. +type mockS3 struct { + mu sync.RWMutex + objects map[string][]byte + mtimes map[string]time.Time +} + +func newMockS3() *mockS3 { + return &mockS3{ + objects: make(map[string][]byte), + mtimes: make(map[string]time.Time), + } +} + +func (m *mockS3) GetObject(_ context.Context, params *s3.GetObjectInput, _ ...func(*s3.Options)) (*s3.GetObjectOutput, error) { + m.mu.RLock() + defer m.mu.RUnlock() + + key := aws.ToString(params.Key) + data, ok := m.objects[key] + if !ok { + return nil, fmt.Errorf("NoSuchKey: key %q not found", key) + } + mtime := m.mtimes[key] + return &s3.GetObjectOutput{ + Body: goio.NopCloser(bytes.NewReader(data)), + ContentLength: aws.Int64(int64(len(data))), + LastModified: &mtime, + }, nil +} + +func (m *mockS3) PutObject(_ context.Context, params *s3.PutObjectInput, _ ...func(*s3.Options)) (*s3.PutObjectOutput, error) { + m.mu.Lock() + defer m.mu.Unlock() + + key := aws.ToString(params.Key) + data, err := goio.ReadAll(params.Body) + if err != nil { + return nil, err + } + m.objects[key] = data + m.mtimes[key] = time.Now() + return &s3.PutObjectOutput{}, nil +} + +func (m *mockS3) DeleteObject(_ context.Context, params *s3.DeleteObjectInput, _ ...func(*s3.Options)) (*s3.DeleteObjectOutput, error) { + m.mu.Lock() + defer m.mu.Unlock() + + key := aws.ToString(params.Key) + delete(m.objects, key) + delete(m.mtimes, key) + return &s3.DeleteObjectOutput{}, nil +} + +func (m *mockS3) DeleteObjects(_ context.Context, params *s3.DeleteObjectsInput, _ ...func(*s3.Options)) (*s3.DeleteObjectsOutput, error) { + m.mu.Lock() + defer m.mu.Unlock() + + for _, obj := range params.Delete.Objects { + key := aws.ToString(obj.Key) + delete(m.objects, key) + delete(m.mtimes, key) + } + return &s3.DeleteObjectsOutput{}, nil +} + +func (m *mockS3) HeadObject(_ context.Context, params *s3.HeadObjectInput, _ ...func(*s3.Options)) (*s3.HeadObjectOutput, error) { + m.mu.RLock() + defer m.mu.RUnlock() + + key := aws.ToString(params.Key) + data, ok := m.objects[key] + if !ok { + return nil, fmt.Errorf("NotFound: key %q not found", key) + } + mtime := m.mtimes[key] + return &s3.HeadObjectOutput{ + ContentLength: aws.Int64(int64(len(data))), + LastModified: &mtime, + }, nil +} + +func (m *mockS3) ListObjectsV2(_ context.Context, params *s3.ListObjectsV2Input, _ ...func(*s3.Options)) (*s3.ListObjectsV2Output, error) { + m.mu.RLock() + defer m.mu.RUnlock() + + prefix := aws.ToString(params.Prefix) + delimiter := aws.ToString(params.Delimiter) + maxKeys := int32(1000) + if params.MaxKeys != nil { + maxKeys = *params.MaxKeys + } + + // Collect all matching keys sorted + var allKeys []string + for k := range m.objects { + if strings.HasPrefix(k, prefix) { + allKeys = append(allKeys, k) + } + } + sort.Strings(allKeys) + + var contents []types.Object + commonPrefixes := make(map[string]bool) + + for _, k := range allKeys { + rest := strings.TrimPrefix(k, prefix) + + if delimiter != "" { + if idx := strings.Index(rest, delimiter); idx >= 0 { + // This key has a delimiter after the prefix -> common prefix + cp := prefix + rest[:idx+len(delimiter)] + commonPrefixes[cp] = true + continue + } + } + + if int32(len(contents)) >= maxKeys { + break + } + + data := m.objects[k] + mtime := m.mtimes[k] + contents = append(contents, types.Object{ + Key: aws.String(k), + Size: aws.Int64(int64(len(data))), + LastModified: &mtime, + }) + } + + var cpSlice []types.CommonPrefix + // Sort common prefixes for deterministic output + var cpKeys []string + for cp := range commonPrefixes { + cpKeys = append(cpKeys, cp) + } + sort.Strings(cpKeys) + for _, cp := range cpKeys { + cpSlice = append(cpSlice, types.CommonPrefix{Prefix: aws.String(cp)}) + } + + return &s3.ListObjectsV2Output{ + Contents: contents, + CommonPrefixes: cpSlice, + IsTruncated: aws.Bool(false), + }, nil +} + +func (m *mockS3) CopyObject(_ context.Context, params *s3.CopyObjectInput, _ ...func(*s3.Options)) (*s3.CopyObjectOutput, error) { + m.mu.Lock() + defer m.mu.Unlock() + + // CopySource is "bucket/key" + source := aws.ToString(params.CopySource) + parts := strings.SplitN(source, "/", 2) + if len(parts) != 2 { + return nil, fmt.Errorf("invalid CopySource: %s", source) + } + srcKey := parts[1] + + data, ok := m.objects[srcKey] + if !ok { + return nil, fmt.Errorf("NoSuchKey: source key %q not found", srcKey) + } + + destKey := aws.ToString(params.Key) + m.objects[destKey] = append([]byte{}, data...) + m.mtimes[destKey] = time.Now() + + return &s3.CopyObjectOutput{}, nil +} + +// --- Helper --- + +func newTestMedium(t *testing.T) (*Medium, *mockS3) { + t.Helper() + mock := newMockS3() + m, err := New("test-bucket", withAPI(mock)) + require.NoError(t, err) + return m, mock +} + +// --- Tests --- + +func TestNew_Good(t *testing.T) { + mock := newMockS3() + m, err := New("my-bucket", withAPI(mock)) + require.NoError(t, err) + assert.Equal(t, "my-bucket", m.bucket) + assert.Equal(t, "", m.prefix) +} + +func TestNew_Bad_NoBucket(t *testing.T) { + _, err := New("") + assert.Error(t, err) + assert.Contains(t, err.Error(), "bucket name is required") +} + +func TestNew_Bad_NoClient(t *testing.T) { + _, err := New("bucket") + assert.Error(t, err) + assert.Contains(t, err.Error(), "S3 client is required") +} + +func TestWithPrefix_Good(t *testing.T) { + mock := newMockS3() + m, err := New("bucket", withAPI(mock), WithPrefix("data/")) + require.NoError(t, err) + assert.Equal(t, "data/", m.prefix) + + // Prefix without trailing slash gets one added + m2, err := New("bucket", withAPI(mock), WithPrefix("data")) + require.NoError(t, err) + assert.Equal(t, "data/", m2.prefix) +} + +func TestReadWrite_Good(t *testing.T) { + m, _ := newTestMedium(t) + + err := m.Write("hello.txt", "world") + require.NoError(t, err) + + content, err := m.Read("hello.txt") + require.NoError(t, err) + assert.Equal(t, "world", content) +} + +func TestReadWrite_Bad_NotFound(t *testing.T) { + m, _ := newTestMedium(t) + + _, err := m.Read("nonexistent.txt") + assert.Error(t, err) +} + +func TestReadWrite_Bad_EmptyPath(t *testing.T) { + m, _ := newTestMedium(t) + + _, err := m.Read("") + assert.Error(t, err) + + err = m.Write("", "content") + assert.Error(t, err) +} + +func TestReadWrite_Good_WithPrefix(t *testing.T) { + mock := newMockS3() + m, err := New("bucket", withAPI(mock), WithPrefix("pfx")) + require.NoError(t, err) + + err = m.Write("file.txt", "data") + require.NoError(t, err) + + // Verify the key has the prefix + _, ok := mock.objects["pfx/file.txt"] + assert.True(t, ok, "object should be stored with prefix") + + content, err := m.Read("file.txt") + require.NoError(t, err) + assert.Equal(t, "data", content) +} + +func TestEnsureDir_Good(t *testing.T) { + m, _ := newTestMedium(t) + // EnsureDir is a no-op for S3 + err := m.EnsureDir("any/path") + assert.NoError(t, err) +} + +func TestIsFile_Good(t *testing.T) { + m, _ := newTestMedium(t) + + err := m.Write("file.txt", "content") + require.NoError(t, err) + + assert.True(t, m.IsFile("file.txt")) + assert.False(t, m.IsFile("nonexistent.txt")) + assert.False(t, m.IsFile("")) +} + +func TestFileGetFileSet_Good(t *testing.T) { + m, _ := newTestMedium(t) + + err := m.FileSet("key.txt", "value") + require.NoError(t, err) + + val, err := m.FileGet("key.txt") + require.NoError(t, err) + assert.Equal(t, "value", val) +} + +func TestDelete_Good(t *testing.T) { + m, _ := newTestMedium(t) + + err := m.Write("to-delete.txt", "content") + require.NoError(t, err) + assert.True(t, m.Exists("to-delete.txt")) + + err = m.Delete("to-delete.txt") + require.NoError(t, err) + assert.False(t, m.IsFile("to-delete.txt")) +} + +func TestDelete_Bad_EmptyPath(t *testing.T) { + m, _ := newTestMedium(t) + err := m.Delete("") + assert.Error(t, err) +} + +func TestDeleteAll_Good(t *testing.T) { + m, _ := newTestMedium(t) + + // Create nested structure + require.NoError(t, m.Write("dir/file1.txt", "a")) + require.NoError(t, m.Write("dir/sub/file2.txt", "b")) + require.NoError(t, m.Write("other.txt", "c")) + + err := m.DeleteAll("dir") + require.NoError(t, err) + + assert.False(t, m.IsFile("dir/file1.txt")) + assert.False(t, m.IsFile("dir/sub/file2.txt")) + assert.True(t, m.IsFile("other.txt")) +} + +func TestDeleteAll_Bad_EmptyPath(t *testing.T) { + m, _ := newTestMedium(t) + err := m.DeleteAll("") + assert.Error(t, err) +} + +func TestRename_Good(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("old.txt", "content")) + assert.True(t, m.IsFile("old.txt")) + + err := m.Rename("old.txt", "new.txt") + require.NoError(t, err) + + assert.False(t, m.IsFile("old.txt")) + assert.True(t, m.IsFile("new.txt")) + + content, err := m.Read("new.txt") + require.NoError(t, err) + assert.Equal(t, "content", content) +} + +func TestRename_Bad_EmptyPath(t *testing.T) { + m, _ := newTestMedium(t) + err := m.Rename("", "new.txt") + assert.Error(t, err) + + err = m.Rename("old.txt", "") + assert.Error(t, err) +} + +func TestRename_Bad_SourceNotFound(t *testing.T) { + m, _ := newTestMedium(t) + err := m.Rename("nonexistent.txt", "new.txt") + assert.Error(t, err) +} + +func TestList_Good(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("dir/file1.txt", "a")) + require.NoError(t, m.Write("dir/file2.txt", "b")) + require.NoError(t, m.Write("dir/sub/file3.txt", "c")) + + entries, err := m.List("dir") + require.NoError(t, err) + + names := make(map[string]bool) + for _, e := range entries { + names[e.Name()] = true + } + + assert.True(t, names["file1.txt"], "should list file1.txt") + assert.True(t, names["file2.txt"], "should list file2.txt") + assert.True(t, names["sub"], "should list sub directory") + assert.Len(t, entries, 3) + + // Check that sub is a directory + for _, e := range entries { + if e.Name() == "sub" { + assert.True(t, e.IsDir()) + info, err := e.Info() + require.NoError(t, err) + assert.True(t, info.IsDir()) + } + } +} + +func TestList_Good_Root(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("root.txt", "content")) + require.NoError(t, m.Write("dir/nested.txt", "nested")) + + entries, err := m.List("") + require.NoError(t, err) + + names := make(map[string]bool) + for _, e := range entries { + names[e.Name()] = true + } + + assert.True(t, names["root.txt"]) + assert.True(t, names["dir"]) +} + +func TestStat_Good(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "hello world")) + + info, err := m.Stat("file.txt") + require.NoError(t, err) + assert.Equal(t, "file.txt", info.Name()) + assert.Equal(t, int64(11), info.Size()) + assert.False(t, info.IsDir()) +} + +func TestStat_Bad_NotFound(t *testing.T) { + m, _ := newTestMedium(t) + + _, err := m.Stat("nonexistent.txt") + assert.Error(t, err) +} + +func TestStat_Bad_EmptyPath(t *testing.T) { + m, _ := newTestMedium(t) + _, err := m.Stat("") + assert.Error(t, err) +} + +func TestOpen_Good(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "open me")) + + f, err := m.Open("file.txt") + require.NoError(t, err) + defer f.Close() + + data, err := goio.ReadAll(f.(goio.Reader)) + require.NoError(t, err) + assert.Equal(t, "open me", string(data)) + + stat, err := f.Stat() + require.NoError(t, err) + assert.Equal(t, "file.txt", stat.Name()) +} + +func TestOpen_Bad_NotFound(t *testing.T) { + m, _ := newTestMedium(t) + + _, err := m.Open("nonexistent.txt") + assert.Error(t, err) +} + +func TestCreate_Good(t *testing.T) { + m, _ := newTestMedium(t) + + w, err := m.Create("new.txt") + require.NoError(t, err) + + n, err := w.Write([]byte("created")) + require.NoError(t, err) + assert.Equal(t, 7, n) + + err = w.Close() + require.NoError(t, err) + + content, err := m.Read("new.txt") + require.NoError(t, err) + assert.Equal(t, "created", content) +} + +func TestAppend_Good(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("append.txt", "hello")) + + w, err := m.Append("append.txt") + require.NoError(t, err) + + _, err = w.Write([]byte(" world")) + require.NoError(t, err) + err = w.Close() + require.NoError(t, err) + + content, err := m.Read("append.txt") + require.NoError(t, err) + assert.Equal(t, "hello world", content) +} + +func TestAppend_Good_NewFile(t *testing.T) { + m, _ := newTestMedium(t) + + w, err := m.Append("new.txt") + require.NoError(t, err) + + _, err = w.Write([]byte("fresh")) + require.NoError(t, err) + err = w.Close() + require.NoError(t, err) + + content, err := m.Read("new.txt") + require.NoError(t, err) + assert.Equal(t, "fresh", content) +} + +func TestReadStream_Good(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("stream.txt", "streaming content")) + + reader, err := m.ReadStream("stream.txt") + require.NoError(t, err) + defer reader.Close() + + data, err := goio.ReadAll(reader) + require.NoError(t, err) + assert.Equal(t, "streaming content", string(data)) +} + +func TestReadStream_Bad_NotFound(t *testing.T) { + m, _ := newTestMedium(t) + _, err := m.ReadStream("nonexistent.txt") + assert.Error(t, err) +} + +func TestWriteStream_Good(t *testing.T) { + m, _ := newTestMedium(t) + + writer, err := m.WriteStream("output.txt") + require.NoError(t, err) + + _, err = goio.Copy(writer, strings.NewReader("piped data")) + require.NoError(t, err) + err = writer.Close() + require.NoError(t, err) + + content, err := m.Read("output.txt") + require.NoError(t, err) + assert.Equal(t, "piped data", content) +} + +func TestExists_Good(t *testing.T) { + m, _ := newTestMedium(t) + + assert.False(t, m.Exists("nonexistent.txt")) + + require.NoError(t, m.Write("file.txt", "content")) + assert.True(t, m.Exists("file.txt")) +} + +func TestExists_Good_DirectoryPrefix(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("dir/file.txt", "content")) + // "dir" should exist as a directory prefix + assert.True(t, m.Exists("dir")) +} + +func TestIsDir_Good(t *testing.T) { + m, _ := newTestMedium(t) + + require.NoError(t, m.Write("dir/file.txt", "content")) + + assert.True(t, m.IsDir("dir")) + assert.False(t, m.IsDir("dir/file.txt")) + assert.False(t, m.IsDir("nonexistent")) + assert.False(t, m.IsDir("")) +} + +func TestKey_Good(t *testing.T) { + mock := newMockS3() + + // No prefix + m, _ := New("bucket", withAPI(mock)) + assert.Equal(t, "file.txt", m.key("file.txt")) + assert.Equal(t, "dir/file.txt", m.key("dir/file.txt")) + assert.Equal(t, "", m.key("")) + assert.Equal(t, "file.txt", m.key("/file.txt")) + assert.Equal(t, "file.txt", m.key("../file.txt")) + + // With prefix + m2, _ := New("bucket", withAPI(mock), WithPrefix("pfx")) + assert.Equal(t, "pfx/file.txt", m2.key("file.txt")) + assert.Equal(t, "pfx/dir/file.txt", m2.key("dir/file.txt")) + assert.Equal(t, "pfx/", m2.key("")) +} + +// Ugly: verify the Medium interface is satisfied at compile time. +func TestInterfaceCompliance_Ugly(t *testing.T) { + mock := newMockS3() + m, err := New("bucket", withAPI(mock)) + require.NoError(t, err) + + // Verify all methods exist by calling them in a way that + // proves compile-time satisfaction of the interface. + var _ interface { + Read(string) (string, error) + Write(string, string) error + EnsureDir(string) error + IsFile(string) bool + FileGet(string) (string, error) + FileSet(string, string) error + Delete(string) error + DeleteAll(string) error + Rename(string, string) error + List(string) ([]fs.DirEntry, error) + Stat(string) (fs.FileInfo, error) + Open(string) (fs.File, error) + Create(string) (goio.WriteCloser, error) + Append(string) (goio.WriteCloser, error) + ReadStream(string) (goio.ReadCloser, error) + WriteStream(string) (goio.WriteCloser, error) + Exists(string) bool + IsDir(string) bool + } = m +} diff --git a/pkg/io/sqlite/sqlite.go b/pkg/io/sqlite/sqlite.go new file mode 100644 index 00000000..734a7492 --- /dev/null +++ b/pkg/io/sqlite/sqlite.go @@ -0,0 +1,669 @@ +// Package sqlite provides a SQLite-backed implementation of the io.Medium interface. +package sqlite + +import ( + "bytes" + "database/sql" + goio "io" + "io/fs" + "os" + "path" + "strings" + "time" + + coreerr "github.com/host-uk/core/pkg/framework/core" + + _ "modernc.org/sqlite" // Pure Go SQLite driver +) + +// Medium is a SQLite-backed storage backend implementing the io.Medium interface. +type Medium struct { + db *sql.DB + table string +} + +// Option configures a Medium. +type Option func(*Medium) + +// WithTable sets the table name (default: "files"). +func WithTable(table string) Option { + return func(m *Medium) { + m.table = table + } +} + +// New creates a new SQLite Medium at the given database path. +// Use ":memory:" for an in-memory database. +func New(dbPath string, opts ...Option) (*Medium, error) { + if dbPath == "" { + return nil, coreerr.E("sqlite.New", "database path is required", nil) + } + + m := &Medium{table: "files"} + for _, opt := range opts { + opt(m) + } + + db, err := sql.Open("sqlite", dbPath) + if err != nil { + return nil, coreerr.E("sqlite.New", "failed to open database", err) + } + + // Enable WAL mode for better concurrency + if _, err := db.Exec("PRAGMA journal_mode=WAL"); err != nil { + db.Close() + return nil, coreerr.E("sqlite.New", "failed to set WAL mode", err) + } + + // Create the schema + createSQL := `CREATE TABLE IF NOT EXISTS ` + m.table + ` ( + path TEXT PRIMARY KEY, + content BLOB NOT NULL, + mode INTEGER DEFAULT 420, + is_dir BOOLEAN DEFAULT FALSE, + mtime DATETIME DEFAULT CURRENT_TIMESTAMP + )` + if _, err := db.Exec(createSQL); err != nil { + db.Close() + return nil, coreerr.E("sqlite.New", "failed to create table", err) + } + + m.db = db + return m, nil +} + +// Close closes the underlying database connection. +func (m *Medium) Close() error { + if m.db != nil { + return m.db.Close() + } + return nil +} + +// cleanPath normalizes a path for consistent storage. +// Uses a leading "/" before Clean to sandbox traversal attempts. +func cleanPath(p string) string { + clean := path.Clean("/" + p) + if clean == "/" { + return "" + } + return strings.TrimPrefix(clean, "/") +} + +// Read retrieves the content of a file as a string. +func (m *Medium) Read(p string) (string, error) { + key := cleanPath(p) + if key == "" { + return "", coreerr.E("sqlite.Read", "path is required", os.ErrInvalid) + } + + var content []byte + var isDir bool + err := m.db.QueryRow( + `SELECT content, is_dir FROM `+m.table+` WHERE path = ?`, key, + ).Scan(&content, &isDir) + if err == sql.ErrNoRows { + return "", coreerr.E("sqlite.Read", "file not found: "+key, os.ErrNotExist) + } + if err != nil { + return "", coreerr.E("sqlite.Read", "query failed: "+key, err) + } + if isDir { + return "", coreerr.E("sqlite.Read", "path is a directory: "+key, os.ErrInvalid) + } + return string(content), nil +} + +// Write saves the given content to a file, overwriting it if it exists. +func (m *Medium) Write(p, content string) error { + key := cleanPath(p) + if key == "" { + return coreerr.E("sqlite.Write", "path is required", os.ErrInvalid) + } + + _, err := m.db.Exec( + `INSERT INTO `+m.table+` (path, content, mode, is_dir, mtime) VALUES (?, ?, 420, FALSE, ?) + ON CONFLICT(path) DO UPDATE SET content = excluded.content, is_dir = FALSE, mtime = excluded.mtime`, + key, []byte(content), time.Now().UTC(), + ) + if err != nil { + return coreerr.E("sqlite.Write", "insert failed: "+key, err) + } + return nil +} + +// EnsureDir makes sure a directory exists, creating it if necessary. +func (m *Medium) EnsureDir(p string) error { + key := cleanPath(p) + if key == "" { + // Root always "exists" + return nil + } + + _, err := m.db.Exec( + `INSERT INTO `+m.table+` (path, content, mode, is_dir, mtime) VALUES (?, '', 493, TRUE, ?) + ON CONFLICT(path) DO NOTHING`, + key, time.Now().UTC(), + ) + if err != nil { + return coreerr.E("sqlite.EnsureDir", "insert failed: "+key, err) + } + return nil +} + +// IsFile checks if a path exists and is a regular file. +func (m *Medium) IsFile(p string) bool { + key := cleanPath(p) + if key == "" { + return false + } + + var isDir bool + err := m.db.QueryRow( + `SELECT is_dir FROM `+m.table+` WHERE path = ?`, key, + ).Scan(&isDir) + if err != nil { + return false + } + return !isDir +} + +// FileGet is a convenience function that reads a file from the medium. +func (m *Medium) FileGet(p string) (string, error) { + return m.Read(p) +} + +// FileSet is a convenience function that writes a file to the medium. +func (m *Medium) FileSet(p, content string) error { + return m.Write(p, content) +} + +// Delete removes a file or empty directory. +func (m *Medium) Delete(p string) error { + key := cleanPath(p) + if key == "" { + return coreerr.E("sqlite.Delete", "path is required", os.ErrInvalid) + } + + // Check if it's a directory with children + var isDir bool + err := m.db.QueryRow( + `SELECT is_dir FROM `+m.table+` WHERE path = ?`, key, + ).Scan(&isDir) + if err == sql.ErrNoRows { + return coreerr.E("sqlite.Delete", "path not found: "+key, os.ErrNotExist) + } + if err != nil { + return coreerr.E("sqlite.Delete", "query failed: "+key, err) + } + + if isDir { + // Check for children + prefix := key + "/" + var count int + err := m.db.QueryRow( + `SELECT COUNT(*) FROM `+m.table+` WHERE path LIKE ? AND path != ?`, prefix+"%", key, + ).Scan(&count) + if err != nil { + return coreerr.E("sqlite.Delete", "count failed: "+key, err) + } + if count > 0 { + return coreerr.E("sqlite.Delete", "directory not empty: "+key, os.ErrExist) + } + } + + res, err := m.db.Exec(`DELETE FROM `+m.table+` WHERE path = ?`, key) + if err != nil { + return coreerr.E("sqlite.Delete", "delete failed: "+key, err) + } + n, _ := res.RowsAffected() + if n == 0 { + return coreerr.E("sqlite.Delete", "path not found: "+key, os.ErrNotExist) + } + return nil +} + +// DeleteAll removes a file or directory and all its contents recursively. +func (m *Medium) DeleteAll(p string) error { + key := cleanPath(p) + if key == "" { + return coreerr.E("sqlite.DeleteAll", "path is required", os.ErrInvalid) + } + + prefix := key + "/" + + // Delete the exact path and all children + res, err := m.db.Exec( + `DELETE FROM `+m.table+` WHERE path = ? OR path LIKE ?`, + key, prefix+"%", + ) + if err != nil { + return coreerr.E("sqlite.DeleteAll", "delete failed: "+key, err) + } + n, _ := res.RowsAffected() + if n == 0 { + return coreerr.E("sqlite.DeleteAll", "path not found: "+key, os.ErrNotExist) + } + return nil +} + +// Rename moves a file or directory from oldPath to newPath. +func (m *Medium) Rename(oldPath, newPath string) error { + oldKey := cleanPath(oldPath) + newKey := cleanPath(newPath) + if oldKey == "" || newKey == "" { + return coreerr.E("sqlite.Rename", "both old and new paths are required", os.ErrInvalid) + } + + tx, err := m.db.Begin() + if err != nil { + return coreerr.E("sqlite.Rename", "begin tx failed", err) + } + defer tx.Rollback() + + // Check if source exists + var content []byte + var mode int + var isDir bool + var mtime time.Time + err = tx.QueryRow( + `SELECT content, mode, is_dir, mtime FROM `+m.table+` WHERE path = ?`, oldKey, + ).Scan(&content, &mode, &isDir, &mtime) + if err == sql.ErrNoRows { + return coreerr.E("sqlite.Rename", "source not found: "+oldKey, os.ErrNotExist) + } + if err != nil { + return coreerr.E("sqlite.Rename", "query failed: "+oldKey, err) + } + + // Insert or replace at new path + _, err = tx.Exec( + `INSERT INTO `+m.table+` (path, content, mode, is_dir, mtime) VALUES (?, ?, ?, ?, ?) + ON CONFLICT(path) DO UPDATE SET content = excluded.content, mode = excluded.mode, is_dir = excluded.is_dir, mtime = excluded.mtime`, + newKey, content, mode, isDir, mtime, + ) + if err != nil { + return coreerr.E("sqlite.Rename", "insert at new path failed: "+newKey, err) + } + + // Delete old path + _, err = tx.Exec(`DELETE FROM `+m.table+` WHERE path = ?`, oldKey) + if err != nil { + return coreerr.E("sqlite.Rename", "delete old path failed: "+oldKey, err) + } + + // If it's a directory, move all children + if isDir { + oldPrefix := oldKey + "/" + newPrefix := newKey + "/" + + rows, err := tx.Query( + `SELECT path, content, mode, is_dir, mtime FROM `+m.table+` WHERE path LIKE ?`, + oldPrefix+"%", + ) + if err != nil { + return coreerr.E("sqlite.Rename", "query children failed", err) + } + + type child struct { + path string + content []byte + mode int + isDir bool + mtime time.Time + } + var children []child + for rows.Next() { + var c child + if err := rows.Scan(&c.path, &c.content, &c.mode, &c.isDir, &c.mtime); err != nil { + rows.Close() + return coreerr.E("sqlite.Rename", "scan child failed", err) + } + children = append(children, c) + } + rows.Close() + + for _, c := range children { + newChildPath := newPrefix + strings.TrimPrefix(c.path, oldPrefix) + _, err = tx.Exec( + `INSERT INTO `+m.table+` (path, content, mode, is_dir, mtime) VALUES (?, ?, ?, ?, ?) + ON CONFLICT(path) DO UPDATE SET content = excluded.content, mode = excluded.mode, is_dir = excluded.is_dir, mtime = excluded.mtime`, + newChildPath, c.content, c.mode, c.isDir, c.mtime, + ) + if err != nil { + return coreerr.E("sqlite.Rename", "insert child failed", err) + } + } + + // Delete old children + _, err = tx.Exec(`DELETE FROM `+m.table+` WHERE path LIKE ?`, oldPrefix+"%") + if err != nil { + return coreerr.E("sqlite.Rename", "delete old children failed", err) + } + } + + return tx.Commit() +} + +// List returns the directory entries for the given path. +func (m *Medium) List(p string) ([]fs.DirEntry, error) { + prefix := cleanPath(p) + if prefix != "" { + prefix += "/" + } + + // Query all paths under the prefix + rows, err := m.db.Query( + `SELECT path, content, mode, is_dir, mtime FROM `+m.table+` WHERE path LIKE ? OR path LIKE ?`, + prefix+"%", prefix+"%", + ) + if err != nil { + return nil, coreerr.E("sqlite.List", "query failed", err) + } + defer rows.Close() + + seen := make(map[string]bool) + var entries []fs.DirEntry + + for rows.Next() { + var rowPath string + var content []byte + var mode int + var isDir bool + var mtime time.Time + if err := rows.Scan(&rowPath, &content, &mode, &isDir, &mtime); err != nil { + return nil, coreerr.E("sqlite.List", "scan failed", err) + } + + rest := strings.TrimPrefix(rowPath, prefix) + if rest == "" { + continue + } + + // Check if this is a direct child or nested + if idx := strings.Index(rest, "/"); idx >= 0 { + // Nested - register as a directory + dirName := rest[:idx] + if !seen[dirName] { + seen[dirName] = true + entries = append(entries, &dirEntry{ + name: dirName, + isDir: true, + mode: fs.ModeDir | 0755, + info: &fileInfo{ + name: dirName, + isDir: true, + mode: fs.ModeDir | 0755, + }, + }) + } + } else { + // Direct child + if !seen[rest] { + seen[rest] = true + entries = append(entries, &dirEntry{ + name: rest, + isDir: isDir, + mode: fs.FileMode(mode), + info: &fileInfo{ + name: rest, + size: int64(len(content)), + mode: fs.FileMode(mode), + modTime: mtime, + isDir: isDir, + }, + }) + } + } + } + + return entries, rows.Err() +} + +// Stat returns file information for the given path. +func (m *Medium) Stat(p string) (fs.FileInfo, error) { + key := cleanPath(p) + if key == "" { + return nil, coreerr.E("sqlite.Stat", "path is required", os.ErrInvalid) + } + + var content []byte + var mode int + var isDir bool + var mtime time.Time + err := m.db.QueryRow( + `SELECT content, mode, is_dir, mtime FROM `+m.table+` WHERE path = ?`, key, + ).Scan(&content, &mode, &isDir, &mtime) + if err == sql.ErrNoRows { + return nil, coreerr.E("sqlite.Stat", "path not found: "+key, os.ErrNotExist) + } + if err != nil { + return nil, coreerr.E("sqlite.Stat", "query failed: "+key, err) + } + + name := path.Base(key) + return &fileInfo{ + name: name, + size: int64(len(content)), + mode: fs.FileMode(mode), + modTime: mtime, + isDir: isDir, + }, nil +} + +// Open opens the named file for reading. +func (m *Medium) Open(p string) (fs.File, error) { + key := cleanPath(p) + if key == "" { + return nil, coreerr.E("sqlite.Open", "path is required", os.ErrInvalid) + } + + var content []byte + var mode int + var isDir bool + var mtime time.Time + err := m.db.QueryRow( + `SELECT content, mode, is_dir, mtime FROM `+m.table+` WHERE path = ?`, key, + ).Scan(&content, &mode, &isDir, &mtime) + if err == sql.ErrNoRows { + return nil, coreerr.E("sqlite.Open", "file not found: "+key, os.ErrNotExist) + } + if err != nil { + return nil, coreerr.E("sqlite.Open", "query failed: "+key, err) + } + if isDir { + return nil, coreerr.E("sqlite.Open", "path is a directory: "+key, os.ErrInvalid) + } + + return &sqliteFile{ + name: path.Base(key), + content: content, + mode: fs.FileMode(mode), + modTime: mtime, + }, nil +} + +// Create creates or truncates the named file. +func (m *Medium) Create(p string) (goio.WriteCloser, error) { + key := cleanPath(p) + if key == "" { + return nil, coreerr.E("sqlite.Create", "path is required", os.ErrInvalid) + } + return &sqliteWriteCloser{ + medium: m, + path: key, + }, nil +} + +// Append opens the named file for appending, creating it if it doesn't exist. +func (m *Medium) Append(p string) (goio.WriteCloser, error) { + key := cleanPath(p) + if key == "" { + return nil, coreerr.E("sqlite.Append", "path is required", os.ErrInvalid) + } + + var existing []byte + err := m.db.QueryRow( + `SELECT content FROM `+m.table+` WHERE path = ? AND is_dir = FALSE`, key, + ).Scan(&existing) + if err != nil && err != sql.ErrNoRows { + return nil, coreerr.E("sqlite.Append", "query failed: "+key, err) + } + + return &sqliteWriteCloser{ + medium: m, + path: key, + data: existing, + }, nil +} + +// ReadStream returns a reader for the file content. +func (m *Medium) ReadStream(p string) (goio.ReadCloser, error) { + key := cleanPath(p) + if key == "" { + return nil, coreerr.E("sqlite.ReadStream", "path is required", os.ErrInvalid) + } + + var content []byte + var isDir bool + err := m.db.QueryRow( + `SELECT content, is_dir FROM `+m.table+` WHERE path = ?`, key, + ).Scan(&content, &isDir) + if err == sql.ErrNoRows { + return nil, coreerr.E("sqlite.ReadStream", "file not found: "+key, os.ErrNotExist) + } + if err != nil { + return nil, coreerr.E("sqlite.ReadStream", "query failed: "+key, err) + } + if isDir { + return nil, coreerr.E("sqlite.ReadStream", "path is a directory: "+key, os.ErrInvalid) + } + + return goio.NopCloser(bytes.NewReader(content)), nil +} + +// WriteStream returns a writer for the file content. Content is stored on Close. +func (m *Medium) WriteStream(p string) (goio.WriteCloser, error) { + return m.Create(p) +} + +// Exists checks if a path exists (file or directory). +func (m *Medium) Exists(p string) bool { + key := cleanPath(p) + if key == "" { + // Root always exists + return true + } + + var count int + err := m.db.QueryRow( + `SELECT COUNT(*) FROM `+m.table+` WHERE path = ?`, key, + ).Scan(&count) + if err != nil { + return false + } + return count > 0 +} + +// IsDir checks if a path exists and is a directory. +func (m *Medium) IsDir(p string) bool { + key := cleanPath(p) + if key == "" { + return false + } + + var isDir bool + err := m.db.QueryRow( + `SELECT is_dir FROM `+m.table+` WHERE path = ?`, key, + ).Scan(&isDir) + if err != nil { + return false + } + return isDir +} + +// --- Internal types --- + +// fileInfo implements fs.FileInfo for SQLite entries. +type fileInfo struct { + name string + size int64 + mode fs.FileMode + modTime time.Time + isDir bool +} + +func (fi *fileInfo) Name() string { return fi.name } +func (fi *fileInfo) Size() int64 { return fi.size } +func (fi *fileInfo) Mode() fs.FileMode { return fi.mode } +func (fi *fileInfo) ModTime() time.Time { return fi.modTime } +func (fi *fileInfo) IsDir() bool { return fi.isDir } +func (fi *fileInfo) Sys() any { return nil } + +// dirEntry implements fs.DirEntry for SQLite listings. +type dirEntry struct { + name string + isDir bool + mode fs.FileMode + info fs.FileInfo +} + +func (de *dirEntry) Name() string { return de.name } +func (de *dirEntry) IsDir() bool { return de.isDir } +func (de *dirEntry) Type() fs.FileMode { return de.mode.Type() } +func (de *dirEntry) Info() (fs.FileInfo, error) { return de.info, nil } + +// sqliteFile implements fs.File for SQLite entries. +type sqliteFile struct { + name string + content []byte + offset int64 + mode fs.FileMode + modTime time.Time +} + +func (f *sqliteFile) Stat() (fs.FileInfo, error) { + return &fileInfo{ + name: f.name, + size: int64(len(f.content)), + mode: f.mode, + modTime: f.modTime, + }, nil +} + +func (f *sqliteFile) Read(b []byte) (int, error) { + if f.offset >= int64(len(f.content)) { + return 0, goio.EOF + } + n := copy(b, f.content[f.offset:]) + f.offset += int64(n) + return n, nil +} + +func (f *sqliteFile) Close() error { + return nil +} + +// sqliteWriteCloser buffers writes and stores to SQLite on Close. +type sqliteWriteCloser struct { + medium *Medium + path string + data []byte +} + +func (w *sqliteWriteCloser) Write(p []byte) (int, error) { + w.data = append(w.data, p...) + return len(p), nil +} + +func (w *sqliteWriteCloser) Close() error { + _, err := w.medium.db.Exec( + `INSERT INTO `+w.medium.table+` (path, content, mode, is_dir, mtime) VALUES (?, ?, 420, FALSE, ?) + ON CONFLICT(path) DO UPDATE SET content = excluded.content, is_dir = FALSE, mtime = excluded.mtime`, + w.path, w.data, time.Now().UTC(), + ) + if err != nil { + return coreerr.E("sqlite.WriteCloser.Close", "store failed: "+w.path, err) + } + return nil +} diff --git a/pkg/io/sqlite/sqlite_test.go b/pkg/io/sqlite/sqlite_test.go new file mode 100644 index 00000000..97d6304c --- /dev/null +++ b/pkg/io/sqlite/sqlite_test.go @@ -0,0 +1,653 @@ +package sqlite + +import ( + goio "io" + "io/fs" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func newTestMedium(t *testing.T) *Medium { + t.Helper() + m, err := New(":memory:") + require.NoError(t, err) + t.Cleanup(func() { m.Close() }) + return m +} + +// --- Constructor Tests --- + +func TestNew_Good(t *testing.T) { + m, err := New(":memory:") + require.NoError(t, err) + defer m.Close() + assert.Equal(t, "files", m.table) +} + +func TestNew_Good_WithTable(t *testing.T) { + m, err := New(":memory:", WithTable("custom")) + require.NoError(t, err) + defer m.Close() + assert.Equal(t, "custom", m.table) +} + +func TestNew_Bad_EmptyPath(t *testing.T) { + _, err := New("") + assert.Error(t, err) + assert.Contains(t, err.Error(), "database path is required") +} + +// --- Read/Write Tests --- + +func TestReadWrite_Good(t *testing.T) { + m := newTestMedium(t) + + err := m.Write("hello.txt", "world") + require.NoError(t, err) + + content, err := m.Read("hello.txt") + require.NoError(t, err) + assert.Equal(t, "world", content) +} + +func TestReadWrite_Good_Overwrite(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "first")) + require.NoError(t, m.Write("file.txt", "second")) + + content, err := m.Read("file.txt") + require.NoError(t, err) + assert.Equal(t, "second", content) +} + +func TestReadWrite_Good_NestedPath(t *testing.T) { + m := newTestMedium(t) + + err := m.Write("a/b/c.txt", "nested") + require.NoError(t, err) + + content, err := m.Read("a/b/c.txt") + require.NoError(t, err) + assert.Equal(t, "nested", content) +} + +func TestRead_Bad_NotFound(t *testing.T) { + m := newTestMedium(t) + + _, err := m.Read("nonexistent.txt") + assert.Error(t, err) +} + +func TestRead_Bad_EmptyPath(t *testing.T) { + m := newTestMedium(t) + + _, err := m.Read("") + assert.Error(t, err) +} + +func TestWrite_Bad_EmptyPath(t *testing.T) { + m := newTestMedium(t) + + err := m.Write("", "content") + assert.Error(t, err) +} + +func TestRead_Bad_IsDirectory(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.EnsureDir("mydir")) + _, err := m.Read("mydir") + assert.Error(t, err) +} + +// --- EnsureDir Tests --- + +func TestEnsureDir_Good(t *testing.T) { + m := newTestMedium(t) + + err := m.EnsureDir("mydir") + require.NoError(t, err) + assert.True(t, m.IsDir("mydir")) +} + +func TestEnsureDir_Good_EmptyPath(t *testing.T) { + m := newTestMedium(t) + // Root always exists, no-op + err := m.EnsureDir("") + assert.NoError(t, err) +} + +func TestEnsureDir_Good_Idempotent(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.EnsureDir("mydir")) + require.NoError(t, m.EnsureDir("mydir")) + assert.True(t, m.IsDir("mydir")) +} + +// --- IsFile Tests --- + +func TestIsFile_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "content")) + require.NoError(t, m.EnsureDir("mydir")) + + assert.True(t, m.IsFile("file.txt")) + assert.False(t, m.IsFile("mydir")) + assert.False(t, m.IsFile("nonexistent")) + assert.False(t, m.IsFile("")) +} + +// --- FileGet/FileSet Tests --- + +func TestFileGetFileSet_Good(t *testing.T) { + m := newTestMedium(t) + + err := m.FileSet("key.txt", "value") + require.NoError(t, err) + + val, err := m.FileGet("key.txt") + require.NoError(t, err) + assert.Equal(t, "value", val) +} + +// --- Delete Tests --- + +func TestDelete_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("to-delete.txt", "content")) + assert.True(t, m.Exists("to-delete.txt")) + + err := m.Delete("to-delete.txt") + require.NoError(t, err) + assert.False(t, m.Exists("to-delete.txt")) +} + +func TestDelete_Good_EmptyDir(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.EnsureDir("emptydir")) + assert.True(t, m.IsDir("emptydir")) + + err := m.Delete("emptydir") + require.NoError(t, err) + assert.False(t, m.IsDir("emptydir")) +} + +func TestDelete_Bad_NotFound(t *testing.T) { + m := newTestMedium(t) + + err := m.Delete("nonexistent") + assert.Error(t, err) +} + +func TestDelete_Bad_EmptyPath(t *testing.T) { + m := newTestMedium(t) + + err := m.Delete("") + assert.Error(t, err) +} + +func TestDelete_Bad_NotEmpty(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.EnsureDir("mydir")) + require.NoError(t, m.Write("mydir/file.txt", "content")) + + err := m.Delete("mydir") + assert.Error(t, err) +} + +// --- DeleteAll Tests --- + +func TestDeleteAll_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("dir/file1.txt", "a")) + require.NoError(t, m.Write("dir/sub/file2.txt", "b")) + require.NoError(t, m.Write("other.txt", "c")) + + err := m.DeleteAll("dir") + require.NoError(t, err) + + assert.False(t, m.Exists("dir/file1.txt")) + assert.False(t, m.Exists("dir/sub/file2.txt")) + assert.True(t, m.Exists("other.txt")) +} + +func TestDeleteAll_Good_SingleFile(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "content")) + + err := m.DeleteAll("file.txt") + require.NoError(t, err) + assert.False(t, m.Exists("file.txt")) +} + +func TestDeleteAll_Bad_NotFound(t *testing.T) { + m := newTestMedium(t) + + err := m.DeleteAll("nonexistent") + assert.Error(t, err) +} + +func TestDeleteAll_Bad_EmptyPath(t *testing.T) { + m := newTestMedium(t) + + err := m.DeleteAll("") + assert.Error(t, err) +} + +// --- Rename Tests --- + +func TestRename_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("old.txt", "content")) + + err := m.Rename("old.txt", "new.txt") + require.NoError(t, err) + + assert.False(t, m.Exists("old.txt")) + assert.True(t, m.IsFile("new.txt")) + + content, err := m.Read("new.txt") + require.NoError(t, err) + assert.Equal(t, "content", content) +} + +func TestRename_Good_Directory(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.EnsureDir("olddir")) + require.NoError(t, m.Write("olddir/file.txt", "content")) + + err := m.Rename("olddir", "newdir") + require.NoError(t, err) + + assert.False(t, m.Exists("olddir")) + assert.False(t, m.Exists("olddir/file.txt")) + assert.True(t, m.IsDir("newdir")) + assert.True(t, m.IsFile("newdir/file.txt")) + + content, err := m.Read("newdir/file.txt") + require.NoError(t, err) + assert.Equal(t, "content", content) +} + +func TestRename_Bad_SourceNotFound(t *testing.T) { + m := newTestMedium(t) + + err := m.Rename("nonexistent", "new") + assert.Error(t, err) +} + +func TestRename_Bad_EmptyPath(t *testing.T) { + m := newTestMedium(t) + + err := m.Rename("", "new") + assert.Error(t, err) + + err = m.Rename("old", "") + assert.Error(t, err) +} + +// --- List Tests --- + +func TestList_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("dir/file1.txt", "a")) + require.NoError(t, m.Write("dir/file2.txt", "b")) + require.NoError(t, m.Write("dir/sub/file3.txt", "c")) + + entries, err := m.List("dir") + require.NoError(t, err) + + names := make(map[string]bool) + for _, e := range entries { + names[e.Name()] = true + } + + assert.True(t, names["file1.txt"]) + assert.True(t, names["file2.txt"]) + assert.True(t, names["sub"]) + assert.Len(t, entries, 3) +} + +func TestList_Good_Root(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("root.txt", "content")) + require.NoError(t, m.Write("dir/nested.txt", "nested")) + + entries, err := m.List("") + require.NoError(t, err) + + names := make(map[string]bool) + for _, e := range entries { + names[e.Name()] = true + } + + assert.True(t, names["root.txt"]) + assert.True(t, names["dir"]) +} + +func TestList_Good_DirectoryEntry(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("dir/sub/file.txt", "content")) + + entries, err := m.List("dir") + require.NoError(t, err) + + require.Len(t, entries, 1) + assert.Equal(t, "sub", entries[0].Name()) + assert.True(t, entries[0].IsDir()) + + info, err := entries[0].Info() + require.NoError(t, err) + assert.True(t, info.IsDir()) +} + +// --- Stat Tests --- + +func TestStat_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "hello world")) + + info, err := m.Stat("file.txt") + require.NoError(t, err) + assert.Equal(t, "file.txt", info.Name()) + assert.Equal(t, int64(11), info.Size()) + assert.False(t, info.IsDir()) +} + +func TestStat_Good_Directory(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.EnsureDir("mydir")) + + info, err := m.Stat("mydir") + require.NoError(t, err) + assert.Equal(t, "mydir", info.Name()) + assert.True(t, info.IsDir()) +} + +func TestStat_Bad_NotFound(t *testing.T) { + m := newTestMedium(t) + + _, err := m.Stat("nonexistent") + assert.Error(t, err) +} + +func TestStat_Bad_EmptyPath(t *testing.T) { + m := newTestMedium(t) + + _, err := m.Stat("") + assert.Error(t, err) +} + +// --- Open Tests --- + +func TestOpen_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "open me")) + + f, err := m.Open("file.txt") + require.NoError(t, err) + defer f.Close() + + data, err := goio.ReadAll(f.(goio.Reader)) + require.NoError(t, err) + assert.Equal(t, "open me", string(data)) + + stat, err := f.Stat() + require.NoError(t, err) + assert.Equal(t, "file.txt", stat.Name()) +} + +func TestOpen_Bad_NotFound(t *testing.T) { + m := newTestMedium(t) + + _, err := m.Open("nonexistent.txt") + assert.Error(t, err) +} + +func TestOpen_Bad_IsDirectory(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.EnsureDir("mydir")) + _, err := m.Open("mydir") + assert.Error(t, err) +} + +// --- Create Tests --- + +func TestCreate_Good(t *testing.T) { + m := newTestMedium(t) + + w, err := m.Create("new.txt") + require.NoError(t, err) + + n, err := w.Write([]byte("created")) + require.NoError(t, err) + assert.Equal(t, 7, n) + + err = w.Close() + require.NoError(t, err) + + content, err := m.Read("new.txt") + require.NoError(t, err) + assert.Equal(t, "created", content) +} + +func TestCreate_Good_Overwrite(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "old content")) + + w, err := m.Create("file.txt") + require.NoError(t, err) + _, err = w.Write([]byte("new")) + require.NoError(t, err) + require.NoError(t, w.Close()) + + content, err := m.Read("file.txt") + require.NoError(t, err) + assert.Equal(t, "new", content) +} + +func TestCreate_Bad_EmptyPath(t *testing.T) { + m := newTestMedium(t) + + _, err := m.Create("") + assert.Error(t, err) +} + +// --- Append Tests --- + +func TestAppend_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("append.txt", "hello")) + + w, err := m.Append("append.txt") + require.NoError(t, err) + + _, err = w.Write([]byte(" world")) + require.NoError(t, err) + require.NoError(t, w.Close()) + + content, err := m.Read("append.txt") + require.NoError(t, err) + assert.Equal(t, "hello world", content) +} + +func TestAppend_Good_NewFile(t *testing.T) { + m := newTestMedium(t) + + w, err := m.Append("new.txt") + require.NoError(t, err) + + _, err = w.Write([]byte("fresh")) + require.NoError(t, err) + require.NoError(t, w.Close()) + + content, err := m.Read("new.txt") + require.NoError(t, err) + assert.Equal(t, "fresh", content) +} + +func TestAppend_Bad_EmptyPath(t *testing.T) { + m := newTestMedium(t) + + _, err := m.Append("") + assert.Error(t, err) +} + +// --- ReadStream Tests --- + +func TestReadStream_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("stream.txt", "streaming content")) + + reader, err := m.ReadStream("stream.txt") + require.NoError(t, err) + defer reader.Close() + + data, err := goio.ReadAll(reader) + require.NoError(t, err) + assert.Equal(t, "streaming content", string(data)) +} + +func TestReadStream_Bad_NotFound(t *testing.T) { + m := newTestMedium(t) + + _, err := m.ReadStream("nonexistent.txt") + assert.Error(t, err) +} + +func TestReadStream_Bad_IsDirectory(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.EnsureDir("mydir")) + _, err := m.ReadStream("mydir") + assert.Error(t, err) +} + +// --- WriteStream Tests --- + +func TestWriteStream_Good(t *testing.T) { + m := newTestMedium(t) + + writer, err := m.WriteStream("output.txt") + require.NoError(t, err) + + _, err = goio.Copy(writer, strings.NewReader("piped data")) + require.NoError(t, err) + require.NoError(t, writer.Close()) + + content, err := m.Read("output.txt") + require.NoError(t, err) + assert.Equal(t, "piped data", content) +} + +// --- Exists Tests --- + +func TestExists_Good(t *testing.T) { + m := newTestMedium(t) + + assert.False(t, m.Exists("nonexistent")) + + require.NoError(t, m.Write("file.txt", "content")) + assert.True(t, m.Exists("file.txt")) + + require.NoError(t, m.EnsureDir("mydir")) + assert.True(t, m.Exists("mydir")) +} + +func TestExists_Good_EmptyPath(t *testing.T) { + m := newTestMedium(t) + // Root always exists + assert.True(t, m.Exists("")) +} + +// --- IsDir Tests --- + +func TestIsDir_Good(t *testing.T) { + m := newTestMedium(t) + + require.NoError(t, m.Write("file.txt", "content")) + require.NoError(t, m.EnsureDir("mydir")) + + assert.True(t, m.IsDir("mydir")) + assert.False(t, m.IsDir("file.txt")) + assert.False(t, m.IsDir("nonexistent")) + assert.False(t, m.IsDir("")) +} + +// --- cleanPath Tests --- + +func TestCleanPath_Good(t *testing.T) { + assert.Equal(t, "file.txt", cleanPath("file.txt")) + assert.Equal(t, "dir/file.txt", cleanPath("dir/file.txt")) + assert.Equal(t, "file.txt", cleanPath("/file.txt")) + assert.Equal(t, "file.txt", cleanPath("../file.txt")) + assert.Equal(t, "file.txt", cleanPath("dir/../file.txt")) + assert.Equal(t, "", cleanPath("")) + assert.Equal(t, "", cleanPath(".")) + assert.Equal(t, "", cleanPath("/")) +} + +// --- Interface Compliance --- + +func TestInterfaceCompliance_Ugly(t *testing.T) { + m := newTestMedium(t) + + // Verify all methods exist by asserting the interface shape. + var _ interface { + Read(string) (string, error) + Write(string, string) error + EnsureDir(string) error + IsFile(string) bool + FileGet(string) (string, error) + FileSet(string, string) error + Delete(string) error + DeleteAll(string) error + Rename(string, string) error + List(string) ([]fs.DirEntry, error) + Stat(string) (fs.FileInfo, error) + Open(string) (fs.File, error) + Create(string) (goio.WriteCloser, error) + Append(string) (goio.WriteCloser, error) + ReadStream(string) (goio.ReadCloser, error) + WriteStream(string) (goio.WriteCloser, error) + Exists(string) bool + IsDir(string) bool + } = m +} + +// --- Custom Table --- + +func TestCustomTable_Good(t *testing.T) { + m, err := New(":memory:", WithTable("my_files")) + require.NoError(t, err) + defer m.Close() + + require.NoError(t, m.Write("file.txt", "content")) + + content, err := m.Read("file.txt") + require.NoError(t, err) + assert.Equal(t, "content", content) +} From 4e10c7f38e3212f2337fb0f190014c162260b517 Mon Sep 17 00:00:00 2001 From: Vi Date: Thu, 5 Feb 2026 20:45:55 +0000 Subject: [PATCH 06/10] feat(auth): add PGP challenge-response auth with air-gapped support (#348) (#356) Co-authored-by: Claude Co-authored-by: Claude Opus 4.6 --- pkg/auth/auth.go | 455 +++++++++++++++++++++++++++++++++ pkg/auth/auth_test.go | 581 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 1036 insertions(+) create mode 100644 pkg/auth/auth.go create mode 100644 pkg/auth/auth_test.go diff --git a/pkg/auth/auth.go b/pkg/auth/auth.go new file mode 100644 index 00000000..55a0eb00 --- /dev/null +++ b/pkg/auth/auth.go @@ -0,0 +1,455 @@ +// Package auth implements OpenPGP challenge-response authentication with +// support for both online (HTTP) and air-gapped (file-based) transport. +// +// Ported from dAppServer's mod-auth/lethean.service.ts. +// +// Authentication Flow (Online): +// +// 1. Client sends public key to server +// 2. Server generates a random nonce, encrypts it with client's public key +// 3. Client decrypts the nonce and signs it with their private key +// 4. Server verifies the signature, creates a session token +// +// Authentication Flow (Air-Gapped / Courier): +// +// Same crypto but challenge/response are exchanged via files on a Medium. +// +// Storage Layout (via Medium): +// +// users/ +// {userID}.pub PGP public key (armored) +// {userID}.key PGP private key (armored, password-encrypted) +// {userID}.rev Revocation certificate (placeholder) +// {userID}.json User metadata (encrypted with user's public key) +// {userID}.lthn LTHN password hash +package auth + +import ( + "crypto/rand" + "encoding/hex" + "encoding/json" + "fmt" + "sync" + "time" + + coreerr "github.com/host-uk/core/pkg/framework/core" + + "github.com/host-uk/core/pkg/crypt/lthn" + "github.com/host-uk/core/pkg/crypt/pgp" + "github.com/host-uk/core/pkg/io" +) + +// Default durations for challenge and session lifetimes. +const ( + DefaultChallengeTTL = 5 * time.Minute + DefaultSessionTTL = 24 * time.Hour + nonceBytes = 32 +) + +// protectedUsers lists usernames that cannot be deleted. +// The "server" user holds the server keypair; deleting it would +// permanently destroy all joining data and require a full rebuild. +var protectedUsers = map[string]bool{ + "server": true, +} + +// User represents a registered user with PGP credentials. +type User struct { + PublicKey string `json:"public_key"` + KeyID string `json:"key_id"` + Fingerprint string `json:"fingerprint"` + PasswordHash string `json:"password_hash"` // LTHN hash + Created time.Time `json:"created"` + LastLogin time.Time `json:"last_login"` +} + +// Challenge is a PGP-encrypted nonce sent to a client during authentication. +type Challenge struct { + Nonce []byte `json:"nonce"` + Encrypted string `json:"encrypted"` // PGP-encrypted nonce (armored) + ExpiresAt time.Time `json:"expires_at"` +} + +// Session represents an authenticated session. +type Session struct { + Token string `json:"token"` + UserID string `json:"user_id"` + ExpiresAt time.Time `json:"expires_at"` +} + +// Option configures an Authenticator. +type Option func(*Authenticator) + +// WithChallengeTTL sets the lifetime of a challenge before it expires. +func WithChallengeTTL(d time.Duration) Option { + return func(a *Authenticator) { + a.challengeTTL = d + } +} + +// WithSessionTTL sets the lifetime of a session before it expires. +func WithSessionTTL(d time.Duration) Option { + return func(a *Authenticator) { + a.sessionTTL = d + } +} + +// Authenticator manages PGP-based challenge-response authentication. +// All user data and keys are persisted through an io.Medium, which may +// be backed by disk, memory (MockMedium), or any other storage backend. +type Authenticator struct { + medium io.Medium + sessions map[string]*Session + challenges map[string]*Challenge // userID -> pending challenge + mu sync.RWMutex + challengeTTL time.Duration + sessionTTL time.Duration +} + +// New creates an Authenticator that persists user data via the given Medium. +func New(m io.Medium, opts ...Option) *Authenticator { + a := &Authenticator{ + medium: m, + sessions: make(map[string]*Session), + challenges: make(map[string]*Challenge), + challengeTTL: DefaultChallengeTTL, + sessionTTL: DefaultSessionTTL, + } + for _, opt := range opts { + opt(a) + } + return a +} + +// userPath returns the storage path for a user artifact. +func userPath(userID, ext string) string { + return "users/" + userID + ext +} + +// Register creates a new user account. It hashes the username with LTHN to +// produce a userID, generates a PGP keypair (protected by the given password), +// and persists the public key, private key, revocation placeholder, password +// hash, and encrypted metadata via the Medium. +func (a *Authenticator) Register(username, password string) (*User, error) { + const op = "auth.Register" + + userID := lthn.Hash(username) + + // Check if user already exists + if a.medium.IsFile(userPath(userID, ".pub")) { + return nil, coreerr.E(op, "user already exists", nil) + } + + // Ensure users directory exists + if err := a.medium.EnsureDir("users"); err != nil { + return nil, coreerr.E(op, "failed to create users directory", err) + } + + // Generate PGP keypair + kp, err := pgp.CreateKeyPair(userID, userID+"@auth.local", password) + if err != nil { + return nil, coreerr.E(op, "failed to create PGP keypair", err) + } + + // Store public key + if err := a.medium.Write(userPath(userID, ".pub"), kp.PublicKey); err != nil { + return nil, coreerr.E(op, "failed to write public key", err) + } + + // Store private key (already encrypted by PGP if password is non-empty) + if err := a.medium.Write(userPath(userID, ".key"), kp.PrivateKey); err != nil { + return nil, coreerr.E(op, "failed to write private key", err) + } + + // Store revocation certificate placeholder + if err := a.medium.Write(userPath(userID, ".rev"), "REVOCATION_PLACEHOLDER"); err != nil { + return nil, coreerr.E(op, "failed to write revocation certificate", err) + } + + // Store LTHN password hash + passwordHash := lthn.Hash(password) + if err := a.medium.Write(userPath(userID, ".lthn"), passwordHash); err != nil { + return nil, coreerr.E(op, "failed to write password hash", err) + } + + // Build user metadata + now := time.Now() + user := &User{ + PublicKey: kp.PublicKey, + KeyID: userID, + Fingerprint: lthn.Hash(kp.PublicKey), + PasswordHash: passwordHash, + Created: now, + LastLogin: time.Time{}, + } + + // Encrypt metadata with the user's public key and store + metaJSON, err := json.Marshal(user) + if err != nil { + return nil, coreerr.E(op, "failed to marshal user metadata", err) + } + + encMeta, err := pgp.Encrypt(metaJSON, kp.PublicKey) + if err != nil { + return nil, coreerr.E(op, "failed to encrypt user metadata", err) + } + + if err := a.medium.Write(userPath(userID, ".json"), string(encMeta)); err != nil { + return nil, coreerr.E(op, "failed to write user metadata", err) + } + + return user, nil +} + +// CreateChallenge generates a cryptographic challenge for the given user. +// A random nonce is created and encrypted with the user's PGP public key. +// The client must decrypt the nonce and sign it to prove key ownership. +func (a *Authenticator) CreateChallenge(userID string) (*Challenge, error) { + const op = "auth.CreateChallenge" + + // Read user's public key + pubKey, err := a.medium.Read(userPath(userID, ".pub")) + if err != nil { + return nil, coreerr.E(op, "user not found", err) + } + + // Generate random nonce + nonce := make([]byte, nonceBytes) + if _, err := rand.Read(nonce); err != nil { + return nil, coreerr.E(op, "failed to generate nonce", err) + } + + // Encrypt nonce with user's public key + encrypted, err := pgp.Encrypt(nonce, pubKey) + if err != nil { + return nil, coreerr.E(op, "failed to encrypt nonce", err) + } + + challenge := &Challenge{ + Nonce: nonce, + Encrypted: string(encrypted), + ExpiresAt: time.Now().Add(a.challengeTTL), + } + + a.mu.Lock() + a.challenges[userID] = challenge + a.mu.Unlock() + + return challenge, nil +} + +// ValidateResponse verifies a signed nonce from the client. The client must +// have decrypted the challenge nonce and signed it with their private key. +// On success, a new session is created and returned. +func (a *Authenticator) ValidateResponse(userID string, signedNonce []byte) (*Session, error) { + const op = "auth.ValidateResponse" + + a.mu.Lock() + challenge, exists := a.challenges[userID] + if exists { + delete(a.challenges, userID) + } + a.mu.Unlock() + + if !exists { + return nil, coreerr.E(op, "no pending challenge for user", nil) + } + + // Check challenge expiry + if time.Now().After(challenge.ExpiresAt) { + return nil, coreerr.E(op, "challenge expired", nil) + } + + // Read user's public key + pubKey, err := a.medium.Read(userPath(userID, ".pub")) + if err != nil { + return nil, coreerr.E(op, "user not found", err) + } + + // Verify signature over the original nonce + if err := pgp.Verify(challenge.Nonce, signedNonce, pubKey); err != nil { + return nil, coreerr.E(op, "signature verification failed", err) + } + + return a.createSession(userID) +} + +// ValidateSession checks whether a token maps to a valid, non-expired session. +func (a *Authenticator) ValidateSession(token string) (*Session, error) { + const op = "auth.ValidateSession" + + a.mu.RLock() + session, exists := a.sessions[token] + a.mu.RUnlock() + + if !exists { + return nil, coreerr.E(op, "session not found", nil) + } + + if time.Now().After(session.ExpiresAt) { + a.mu.Lock() + delete(a.sessions, token) + a.mu.Unlock() + return nil, coreerr.E(op, "session expired", nil) + } + + return session, nil +} + +// RefreshSession extends the expiry of an existing valid session. +func (a *Authenticator) RefreshSession(token string) (*Session, error) { + const op = "auth.RefreshSession" + + a.mu.Lock() + defer a.mu.Unlock() + + session, exists := a.sessions[token] + if !exists { + return nil, coreerr.E(op, "session not found", nil) + } + + if time.Now().After(session.ExpiresAt) { + delete(a.sessions, token) + return nil, coreerr.E(op, "session expired", nil) + } + + session.ExpiresAt = time.Now().Add(a.sessionTTL) + return session, nil +} + +// RevokeSession removes a session, invalidating the token immediately. +func (a *Authenticator) RevokeSession(token string) error { + const op = "auth.RevokeSession" + + a.mu.Lock() + defer a.mu.Unlock() + + if _, exists := a.sessions[token]; !exists { + return coreerr.E(op, "session not found", nil) + } + + delete(a.sessions, token) + return nil +} + +// DeleteUser removes a user and all associated keys from storage. +// The "server" user is protected and cannot be deleted (mirroring the +// original TypeScript implementation's safeguard). +func (a *Authenticator) DeleteUser(userID string) error { + const op = "auth.DeleteUser" + + // Protect special users + if protectedUsers[userID] { + return coreerr.E(op, "cannot delete protected user", nil) + } + + // Check user exists + if !a.medium.IsFile(userPath(userID, ".pub")) { + return coreerr.E(op, "user not found", nil) + } + + // Remove all artifacts + extensions := []string{".pub", ".key", ".rev", ".json", ".lthn"} + for _, ext := range extensions { + p := userPath(userID, ext) + if a.medium.IsFile(p) { + if err := a.medium.Delete(p); err != nil { + return coreerr.E(op, "failed to delete "+ext, err) + } + } + } + + // Revoke any active sessions for this user + a.mu.Lock() + for token, session := range a.sessions { + if session.UserID == userID { + delete(a.sessions, token) + } + } + a.mu.Unlock() + + return nil +} + +// Login performs password-based authentication as a convenience method. +// It verifies the password against the stored LTHN hash and, on success, +// creates a new session. This bypasses the PGP challenge-response flow. +func (a *Authenticator) Login(userID, password string) (*Session, error) { + const op = "auth.Login" + + // Read stored password hash + storedHash, err := a.medium.Read(userPath(userID, ".lthn")) + if err != nil { + return nil, coreerr.E(op, "user not found", err) + } + + // Verify password + if !lthn.Verify(password, storedHash) { + return nil, coreerr.E(op, "invalid password", nil) + } + + return a.createSession(userID) +} + +// WriteChallengeFile writes an encrypted challenge to a file for air-gapped +// (courier) transport. The challenge is created and then its encrypted nonce +// is written to the specified path on the Medium. +func (a *Authenticator) WriteChallengeFile(userID, path string) error { + const op = "auth.WriteChallengeFile" + + challenge, err := a.CreateChallenge(userID) + if err != nil { + return coreerr.E(op, "failed to create challenge", err) + } + + data, err := json.Marshal(challenge) + if err != nil { + return coreerr.E(op, "failed to marshal challenge", err) + } + + if err := a.medium.Write(path, string(data)); err != nil { + return coreerr.E(op, "failed to write challenge file", err) + } + + return nil +} + +// ReadResponseFile reads a signed response from a file and validates it, +// completing the air-gapped authentication flow. The file must contain the +// raw PGP signature bytes (armored). +func (a *Authenticator) ReadResponseFile(userID, path string) (*Session, error) { + const op = "auth.ReadResponseFile" + + content, err := a.medium.Read(path) + if err != nil { + return nil, coreerr.E(op, "failed to read response file", err) + } + + session, err := a.ValidateResponse(userID, []byte(content)) + if err != nil { + return nil, coreerr.E(op, "failed to validate response", err) + } + + return session, nil +} + +// createSession generates a cryptographically random session token and +// stores the session in the in-memory session map. +func (a *Authenticator) createSession(userID string) (*Session, error) { + tokenBytes := make([]byte, 32) + if _, err := rand.Read(tokenBytes); err != nil { + return nil, fmt.Errorf("auth: failed to generate session token: %w", err) + } + + session := &Session{ + Token: hex.EncodeToString(tokenBytes), + UserID: userID, + ExpiresAt: time.Now().Add(a.sessionTTL), + } + + a.mu.Lock() + a.sessions[session.Token] = session + a.mu.Unlock() + + return session, nil +} diff --git a/pkg/auth/auth_test.go b/pkg/auth/auth_test.go new file mode 100644 index 00000000..5e5d0a21 --- /dev/null +++ b/pkg/auth/auth_test.go @@ -0,0 +1,581 @@ +package auth + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/host-uk/core/pkg/crypt/lthn" + "github.com/host-uk/core/pkg/crypt/pgp" + "github.com/host-uk/core/pkg/io" +) + +// helper creates a fresh Authenticator backed by MockMedium. +func newTestAuth(opts ...Option) (*Authenticator, *io.MockMedium) { + m := io.NewMockMedium() + a := New(m, opts...) + return a, m +} + +// --- Register --- + +func TestRegister_Good(t *testing.T) { + a, m := newTestAuth() + + user, err := a.Register("alice", "hunter2") + require.NoError(t, err) + require.NotNil(t, user) + + userID := lthn.Hash("alice") + + // Verify public key is stored + assert.True(t, m.IsFile(userPath(userID, ".pub"))) + assert.True(t, m.IsFile(userPath(userID, ".key"))) + assert.True(t, m.IsFile(userPath(userID, ".rev"))) + assert.True(t, m.IsFile(userPath(userID, ".json"))) + assert.True(t, m.IsFile(userPath(userID, ".lthn"))) + + // Verify user fields + assert.NotEmpty(t, user.PublicKey) + assert.Equal(t, userID, user.KeyID) + assert.NotEmpty(t, user.Fingerprint) + assert.Equal(t, lthn.Hash("hunter2"), user.PasswordHash) + assert.False(t, user.Created.IsZero()) +} + +func TestRegister_Bad(t *testing.T) { + a, _ := newTestAuth() + + // Register first time succeeds + _, err := a.Register("bob", "pass1") + require.NoError(t, err) + + // Duplicate registration should fail + _, err = a.Register("bob", "pass2") + assert.Error(t, err) + assert.Contains(t, err.Error(), "user already exists") +} + +func TestRegister_Ugly(t *testing.T) { + a, _ := newTestAuth() + + // Empty username/password should still work (PGP allows it) + user, err := a.Register("", "") + require.NoError(t, err) + require.NotNil(t, user) +} + +// --- CreateChallenge --- + +func TestCreateChallenge_Good(t *testing.T) { + a, _ := newTestAuth() + + user, err := a.Register("charlie", "pass") + require.NoError(t, err) + + challenge, err := a.CreateChallenge(user.KeyID) + require.NoError(t, err) + require.NotNil(t, challenge) + + assert.Len(t, challenge.Nonce, nonceBytes) + assert.NotEmpty(t, challenge.Encrypted) + assert.True(t, challenge.ExpiresAt.After(time.Now())) +} + +func TestCreateChallenge_Bad(t *testing.T) { + a, _ := newTestAuth() + + // Challenge for non-existent user + _, err := a.CreateChallenge("nonexistent-user-id") + assert.Error(t, err) + assert.Contains(t, err.Error(), "user not found") +} + +func TestCreateChallenge_Ugly(t *testing.T) { + a, _ := newTestAuth() + + // Empty userID + _, err := a.CreateChallenge("") + assert.Error(t, err) +} + +// --- ValidateResponse (full challenge-response flow) --- + +func TestValidateResponse_Good(t *testing.T) { + a, m := newTestAuth() + + // Register user + _, err := a.Register("dave", "password123") + require.NoError(t, err) + + userID := lthn.Hash("dave") + + // Create challenge + challenge, err := a.CreateChallenge(userID) + require.NoError(t, err) + + // Client-side: decrypt nonce, then sign it + privKey, err := m.Read(userPath(userID, ".key")) + require.NoError(t, err) + + decryptedNonce, err := pgp.Decrypt([]byte(challenge.Encrypted), privKey, "password123") + require.NoError(t, err) + assert.Equal(t, challenge.Nonce, decryptedNonce) + + signedNonce, err := pgp.Sign(decryptedNonce, privKey, "password123") + require.NoError(t, err) + + // Validate response + session, err := a.ValidateResponse(userID, signedNonce) + require.NoError(t, err) + require.NotNil(t, session) + + assert.NotEmpty(t, session.Token) + assert.Equal(t, userID, session.UserID) + assert.True(t, session.ExpiresAt.After(time.Now())) +} + +func TestValidateResponse_Bad(t *testing.T) { + a, _ := newTestAuth() + + _, err := a.Register("eve", "pass") + require.NoError(t, err) + userID := lthn.Hash("eve") + + // No pending challenge + _, err = a.ValidateResponse(userID, []byte("fake-signature")) + assert.Error(t, err) + assert.Contains(t, err.Error(), "no pending challenge") +} + +func TestValidateResponse_Ugly(t *testing.T) { + a, m := newTestAuth(WithChallengeTTL(1 * time.Millisecond)) + + _, err := a.Register("frank", "pass") + require.NoError(t, err) + userID := lthn.Hash("frank") + + // Create challenge and let it expire + challenge, err := a.CreateChallenge(userID) + require.NoError(t, err) + + time.Sleep(5 * time.Millisecond) + + // Sign with valid key but expired challenge + privKey, err := m.Read(userPath(userID, ".key")) + require.NoError(t, err) + + signedNonce, err := pgp.Sign(challenge.Nonce, privKey, "pass") + require.NoError(t, err) + + _, err = a.ValidateResponse(userID, signedNonce) + assert.Error(t, err) + assert.Contains(t, err.Error(), "challenge expired") +} + +// --- ValidateSession --- + +func TestValidateSession_Good(t *testing.T) { + a, _ := newTestAuth() + + _, err := a.Register("grace", "pass") + require.NoError(t, err) + userID := lthn.Hash("grace") + + session, err := a.Login(userID, "pass") + require.NoError(t, err) + + validated, err := a.ValidateSession(session.Token) + require.NoError(t, err) + assert.Equal(t, session.Token, validated.Token) + assert.Equal(t, userID, validated.UserID) +} + +func TestValidateSession_Bad(t *testing.T) { + a, _ := newTestAuth() + + _, err := a.ValidateSession("nonexistent-token") + assert.Error(t, err) + assert.Contains(t, err.Error(), "session not found") +} + +func TestValidateSession_Ugly(t *testing.T) { + a, _ := newTestAuth(WithSessionTTL(1 * time.Millisecond)) + + _, err := a.Register("heidi", "pass") + require.NoError(t, err) + userID := lthn.Hash("heidi") + + session, err := a.Login(userID, "pass") + require.NoError(t, err) + + time.Sleep(5 * time.Millisecond) + + _, err = a.ValidateSession(session.Token) + assert.Error(t, err) + assert.Contains(t, err.Error(), "session expired") +} + +// --- RefreshSession --- + +func TestRefreshSession_Good(t *testing.T) { + a, _ := newTestAuth(WithSessionTTL(1 * time.Hour)) + + _, err := a.Register("ivan", "pass") + require.NoError(t, err) + userID := lthn.Hash("ivan") + + session, err := a.Login(userID, "pass") + require.NoError(t, err) + + originalExpiry := session.ExpiresAt + + // Small delay to ensure time moves forward + time.Sleep(2 * time.Millisecond) + + refreshed, err := a.RefreshSession(session.Token) + require.NoError(t, err) + assert.True(t, refreshed.ExpiresAt.After(originalExpiry)) +} + +func TestRefreshSession_Bad(t *testing.T) { + a, _ := newTestAuth() + + _, err := a.RefreshSession("nonexistent-token") + assert.Error(t, err) + assert.Contains(t, err.Error(), "session not found") +} + +func TestRefreshSession_Ugly(t *testing.T) { + a, _ := newTestAuth(WithSessionTTL(1 * time.Millisecond)) + + _, err := a.Register("judy", "pass") + require.NoError(t, err) + userID := lthn.Hash("judy") + + session, err := a.Login(userID, "pass") + require.NoError(t, err) + + time.Sleep(5 * time.Millisecond) + + _, err = a.RefreshSession(session.Token) + assert.Error(t, err) + assert.Contains(t, err.Error(), "session expired") +} + +// --- RevokeSession --- + +func TestRevokeSession_Good(t *testing.T) { + a, _ := newTestAuth() + + _, err := a.Register("karl", "pass") + require.NoError(t, err) + userID := lthn.Hash("karl") + + session, err := a.Login(userID, "pass") + require.NoError(t, err) + + err = a.RevokeSession(session.Token) + require.NoError(t, err) + + // Token should no longer be valid + _, err = a.ValidateSession(session.Token) + assert.Error(t, err) +} + +func TestRevokeSession_Bad(t *testing.T) { + a, _ := newTestAuth() + + err := a.RevokeSession("nonexistent-token") + assert.Error(t, err) + assert.Contains(t, err.Error(), "session not found") +} + +func TestRevokeSession_Ugly(t *testing.T) { + a, _ := newTestAuth() + + // Revoke empty token + err := a.RevokeSession("") + assert.Error(t, err) +} + +// --- DeleteUser --- + +func TestDeleteUser_Good(t *testing.T) { + a, m := newTestAuth() + + _, err := a.Register("larry", "pass") + require.NoError(t, err) + userID := lthn.Hash("larry") + + // Also create a session that should be cleaned up + _, err = a.Login(userID, "pass") + require.NoError(t, err) + + err = a.DeleteUser(userID) + require.NoError(t, err) + + // All files should be gone + assert.False(t, m.IsFile(userPath(userID, ".pub"))) + assert.False(t, m.IsFile(userPath(userID, ".key"))) + assert.False(t, m.IsFile(userPath(userID, ".rev"))) + assert.False(t, m.IsFile(userPath(userID, ".json"))) + assert.False(t, m.IsFile(userPath(userID, ".lthn"))) + + // Session should be gone + a.mu.RLock() + sessionCount := 0 + for _, s := range a.sessions { + if s.UserID == userID { + sessionCount++ + } + } + a.mu.RUnlock() + assert.Equal(t, 0, sessionCount) +} + +func TestDeleteUser_Bad(t *testing.T) { + a, _ := newTestAuth() + + // Protected user "server" cannot be deleted + err := a.DeleteUser("server") + assert.Error(t, err) + assert.Contains(t, err.Error(), "cannot delete protected user") +} + +func TestDeleteUser_Ugly(t *testing.T) { + a, _ := newTestAuth() + + // Non-existent user + err := a.DeleteUser("nonexistent-user-id") + assert.Error(t, err) + assert.Contains(t, err.Error(), "user not found") +} + +// --- Login --- + +func TestLogin_Good(t *testing.T) { + a, _ := newTestAuth() + + _, err := a.Register("mallory", "secret") + require.NoError(t, err) + userID := lthn.Hash("mallory") + + session, err := a.Login(userID, "secret") + require.NoError(t, err) + require.NotNil(t, session) + + assert.NotEmpty(t, session.Token) + assert.Equal(t, userID, session.UserID) + assert.True(t, session.ExpiresAt.After(time.Now())) +} + +func TestLogin_Bad(t *testing.T) { + a, _ := newTestAuth() + + _, err := a.Register("nancy", "correct-password") + require.NoError(t, err) + userID := lthn.Hash("nancy") + + // Wrong password + _, err = a.Login(userID, "wrong-password") + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid password") +} + +func TestLogin_Ugly(t *testing.T) { + a, _ := newTestAuth() + + // Login for non-existent user + _, err := a.Login("nonexistent-user-id", "pass") + assert.Error(t, err) + assert.Contains(t, err.Error(), "user not found") +} + +// --- WriteChallengeFile / ReadResponseFile (Air-Gapped) --- + +func TestAirGappedFlow_Good(t *testing.T) { + a, m := newTestAuth() + + _, err := a.Register("oscar", "airgap-pass") + require.NoError(t, err) + userID := lthn.Hash("oscar") + + // Write challenge to file + challengePath := "transfer/challenge.json" + err = a.WriteChallengeFile(userID, challengePath) + require.NoError(t, err) + assert.True(t, m.IsFile(challengePath)) + + // Read challenge file to get the encrypted nonce (simulating courier) + challengeData, err := m.Read(challengePath) + require.NoError(t, err) + + var challenge Challenge + err = json.Unmarshal([]byte(challengeData), &challenge) + require.NoError(t, err) + + // Client-side: decrypt nonce and sign it + privKey, err := m.Read(userPath(userID, ".key")) + require.NoError(t, err) + + decryptedNonce, err := pgp.Decrypt([]byte(challenge.Encrypted), privKey, "airgap-pass") + require.NoError(t, err) + + signedNonce, err := pgp.Sign(decryptedNonce, privKey, "airgap-pass") + require.NoError(t, err) + + // Write signed response to file + responsePath := "transfer/response.sig" + err = m.Write(responsePath, string(signedNonce)) + require.NoError(t, err) + + // Server reads response file + session, err := a.ReadResponseFile(userID, responsePath) + require.NoError(t, err) + require.NotNil(t, session) + + assert.NotEmpty(t, session.Token) + assert.Equal(t, userID, session.UserID) +} + +func TestWriteChallengeFile_Bad(t *testing.T) { + a, _ := newTestAuth() + + // Challenge for non-existent user + err := a.WriteChallengeFile("nonexistent-user", "challenge.json") + assert.Error(t, err) +} + +func TestReadResponseFile_Bad(t *testing.T) { + a, _ := newTestAuth() + + // Response file does not exist + _, err := a.ReadResponseFile("some-user", "nonexistent-file.sig") + assert.Error(t, err) +} + +func TestReadResponseFile_Ugly(t *testing.T) { + a, m := newTestAuth() + + _, err := a.Register("peggy", "pass") + require.NoError(t, err) + userID := lthn.Hash("peggy") + + // Create a challenge + _, err = a.CreateChallenge(userID) + require.NoError(t, err) + + // Write garbage to response file + responsePath := "transfer/bad-response.sig" + err = m.Write(responsePath, "not-a-valid-signature") + require.NoError(t, err) + + _, err = a.ReadResponseFile(userID, responsePath) + assert.Error(t, err) +} + +// --- Options --- + +func TestWithChallengeTTL_Good(t *testing.T) { + ttl := 30 * time.Second + a, _ := newTestAuth(WithChallengeTTL(ttl)) + assert.Equal(t, ttl, a.challengeTTL) +} + +func TestWithSessionTTL_Good(t *testing.T) { + ttl := 2 * time.Hour + a, _ := newTestAuth(WithSessionTTL(ttl)) + assert.Equal(t, ttl, a.sessionTTL) +} + +// --- Full Round-Trip (Online Flow) --- + +func TestFullRoundTrip_Good(t *testing.T) { + a, m := newTestAuth() + + // 1. Register + user, err := a.Register("quinn", "roundtrip-pass") + require.NoError(t, err) + require.NotNil(t, user) + + userID := lthn.Hash("quinn") + + // 2. Create challenge + challenge, err := a.CreateChallenge(userID) + require.NoError(t, err) + + // 3. Client decrypts + signs + privKey, err := m.Read(userPath(userID, ".key")) + require.NoError(t, err) + + nonce, err := pgp.Decrypt([]byte(challenge.Encrypted), privKey, "roundtrip-pass") + require.NoError(t, err) + + sig, err := pgp.Sign(nonce, privKey, "roundtrip-pass") + require.NoError(t, err) + + // 4. Server validates, issues session + session, err := a.ValidateResponse(userID, sig) + require.NoError(t, err) + require.NotNil(t, session) + + // 5. Validate session + validated, err := a.ValidateSession(session.Token) + require.NoError(t, err) + assert.Equal(t, session.Token, validated.Token) + + // 6. Refresh session + refreshed, err := a.RefreshSession(session.Token) + require.NoError(t, err) + assert.Equal(t, session.Token, refreshed.Token) + + // 7. Revoke session + err = a.RevokeSession(session.Token) + require.NoError(t, err) + + // 8. Session should be invalid now + _, err = a.ValidateSession(session.Token) + assert.Error(t, err) +} + +// --- Concurrent Access --- + +func TestConcurrentSessions_Good(t *testing.T) { + a, _ := newTestAuth() + + _, err := a.Register("ruth", "pass") + require.NoError(t, err) + userID := lthn.Hash("ruth") + + // Create multiple sessions concurrently + const n = 10 + sessions := make(chan *Session, n) + errs := make(chan error, n) + + for i := 0; i < n; i++ { + go func() { + s, err := a.Login(userID, "pass") + if err != nil { + errs <- err + return + } + sessions <- s + }() + } + + for i := 0; i < n; i++ { + select { + case s := <-sessions: + require.NotNil(t, s) + // Validate each session + _, err := a.ValidateSession(s.Token) + assert.NoError(t, err) + case err := <-errs: + t.Fatalf("concurrent login failed: %v", err) + } + } +} From e5c82dab5e22f9e764cb787d2444a28759a07163 Mon Sep 17 00:00:00 2001 From: Snider Date: Fri, 6 Feb 2026 22:50:18 +0000 Subject: [PATCH 07/10] feat(core-app): FrankenPHP + Wails v3 native desktop app Single 53MB binary embedding PHP 8.4 ZTS runtime, Laravel 12, Livewire 4, and Octane worker mode inside a Wails v3 native desktop window. Co-Authored-By: Claude Opus 4.6 --- Taskfile.yml | 61 + cmd/core-app/Taskfile.yml | 37 + cmd/core-app/app_service.go | 48 + cmd/core-app/embed.go | 52 + cmd/core-app/env.go | 166 + cmd/core-app/go.mod | 67 + cmd/core-app/go.sum | 185 + cmd/core-app/handler.go | 137 + cmd/core-app/icons/appicon.png | Bin 0 -> 76 bytes cmd/core-app/icons/icons.go | 24 + cmd/core-app/icons/tray-dark.png | Bin 0 -> 76 bytes cmd/core-app/icons/tray-light.png | Bin 0 -> 76 bytes cmd/core-app/icons/tray-template.png | Bin 0 -> 76 bytes cmd/core-app/laravel/.env.example | 13 + cmd/core-app/laravel/.gitignore | 5 + cmd/core-app/laravel/app/Livewire/Counter.php | 27 + cmd/core-app/laravel/artisan | 21 + cmd/core-app/laravel/bootstrap/app.php | 19 + cmd/core-app/laravel/composer.json | 29 + cmd/core-app/laravel/composer.lock | 6149 +++++++++++++++++ cmd/core-app/laravel/config/app.php | 19 + cmd/core-app/laravel/config/cache.php | 21 + cmd/core-app/laravel/config/database.php | 25 + cmd/core-app/laravel/config/logging.php | 25 + cmd/core-app/laravel/config/session.php | 22 + cmd/core-app/laravel/config/view.php | 10 + cmd/core-app/laravel/public/index.php | 19 + .../views/components/layout.blade.php | 107 + .../views/livewire/counter.blade.php | 8 + .../laravel/resources/views/welcome.blade.php | 40 + cmd/core-app/laravel/routes/web.php | 9 + cmd/core-app/main.go | 102 + cmd/core-app/native_bridge.go | 96 + go.work | 11 + 34 files changed, 7554 insertions(+) create mode 100644 cmd/core-app/Taskfile.yml create mode 100644 cmd/core-app/app_service.go create mode 100644 cmd/core-app/embed.go create mode 100644 cmd/core-app/env.go create mode 100644 cmd/core-app/go.mod create mode 100644 cmd/core-app/go.sum create mode 100644 cmd/core-app/handler.go create mode 100644 cmd/core-app/icons/appicon.png create mode 100644 cmd/core-app/icons/icons.go create mode 100644 cmd/core-app/icons/tray-dark.png create mode 100644 cmd/core-app/icons/tray-light.png create mode 100644 cmd/core-app/icons/tray-template.png create mode 100644 cmd/core-app/laravel/.env.example create mode 100644 cmd/core-app/laravel/.gitignore create mode 100644 cmd/core-app/laravel/app/Livewire/Counter.php create mode 100644 cmd/core-app/laravel/artisan create mode 100644 cmd/core-app/laravel/bootstrap/app.php create mode 100644 cmd/core-app/laravel/composer.json create mode 100644 cmd/core-app/laravel/composer.lock create mode 100644 cmd/core-app/laravel/config/app.php create mode 100644 cmd/core-app/laravel/config/cache.php create mode 100644 cmd/core-app/laravel/config/database.php create mode 100644 cmd/core-app/laravel/config/logging.php create mode 100644 cmd/core-app/laravel/config/session.php create mode 100644 cmd/core-app/laravel/config/view.php create mode 100644 cmd/core-app/laravel/public/index.php create mode 100644 cmd/core-app/laravel/resources/views/components/layout.blade.php create mode 100644 cmd/core-app/laravel/resources/views/livewire/counter.blade.php create mode 100644 cmd/core-app/laravel/resources/views/welcome.blade.php create mode 100644 cmd/core-app/laravel/routes/web.php create mode 100644 cmd/core-app/main.go create mode 100644 cmd/core-app/native_bridge.go create mode 100644 go.work diff --git a/Taskfile.yml b/Taskfile.yml index 1e267461..02384851 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -140,6 +140,67 @@ tasks: cmds: - go run ./internal/tools/i18n-validate ./... + # --- Core IDE (Wails v3) --- + ide:dev: + desc: "Run Core IDE in Wails dev mode" + dir: cmd/core-ide + cmds: + - cd frontend && npm install && npm run build + - wails3 dev + + ide:build: + desc: "Build Core IDE production binary" + dir: cmd/core-ide + cmds: + - cd frontend && npm install && npm run build + - wails3 build + + ide:frontend: + desc: "Build Core IDE frontend only" + dir: cmd/core-ide/frontend + cmds: + - npm install + - npm run build + + # --- Core App (FrankenPHP + Wails v3) --- + app:setup: + desc: "Install PHP-ZTS build dependency for Core App" + cmds: + - brew tap shivammathur/php 2>/dev/null || true + - brew install shivammathur/php/php@8.4-zts + + app:composer: + desc: "Install Laravel dependencies for Core App" + dir: cmd/core-app/laravel + cmds: + - composer install --no-dev --optimize-autoloader --no-interaction + + app:build: + desc: "Build Core App (FrankenPHP + Laravel desktop binary)" + dir: cmd/core-app + env: + CGO_ENABLED: "1" + CGO_CFLAGS: + sh: /opt/homebrew/opt/php@8.4-zts/bin/php-config --includes + CGO_LDFLAGS: + sh: "echo -L/opt/homebrew/opt/php@8.4-zts/lib $(/opt/homebrew/opt/php@8.4-zts/bin/php-config --ldflags) $(/opt/homebrew/opt/php@8.4-zts/bin/php-config --libs)" + cmds: + - go build -tags nowatcher -o ../../bin/core-app . + + app:dev: + desc: "Build and run Core App" + dir: cmd/core-app + env: + CGO_ENABLED: "1" + CGO_CFLAGS: + sh: /opt/homebrew/opt/php@8.4-zts/bin/php-config --includes + CGO_LDFLAGS: + sh: "echo -L/opt/homebrew/opt/php@8.4-zts/lib $(/opt/homebrew/opt/php@8.4-zts/bin/php-config --ldflags) $(/opt/homebrew/opt/php@8.4-zts/bin/php-config --libs)" + DYLD_LIBRARY_PATH: "/opt/homebrew/opt/php@8.4-zts/lib" + cmds: + - go build -tags nowatcher -o ../../bin/core-app . + - ../../bin/core-app + # --- Multi-repo (when in workspace) --- dev:health: desc: "Check health of all repos" diff --git a/cmd/core-app/Taskfile.yml b/cmd/core-app/Taskfile.yml new file mode 100644 index 00000000..5f3fc0d4 --- /dev/null +++ b/cmd/core-app/Taskfile.yml @@ -0,0 +1,37 @@ +version: '3' + +vars: + PHP_CONFIG: /opt/homebrew/opt/php@8.4-zts/bin/php-config + CGO_CFLAGS: + sh: "{{.PHP_CONFIG}} --includes" + CGO_LDFLAGS: + sh: "echo -L/opt/homebrew/opt/php@8.4-zts/lib $({{.PHP_CONFIG}} --ldflags) $({{.PHP_CONFIG}} --libs)" + +tasks: + setup: + desc: "Install PHP-ZTS build dependency" + cmds: + - brew tap shivammathur/php 2>/dev/null || true + - brew install shivammathur/php/php@8.4-zts + + build: + desc: "Build core-app binary" + env: + CGO_ENABLED: "1" + CGO_CFLAGS: "{{.CGO_CFLAGS}}" + CGO_LDFLAGS: "{{.CGO_LDFLAGS}}" + cmds: + - go build -tags nowatcher -o ../../bin/core-app . + + dev: + desc: "Build and run core-app" + deps: [build] + env: + DYLD_LIBRARY_PATH: "/opt/homebrew/opt/php@8.4-zts/lib" + cmds: + - ../../bin/core-app + + clean: + desc: "Remove build artifacts" + cmds: + - rm -f ../../bin/core-app diff --git a/cmd/core-app/app_service.go b/cmd/core-app/app_service.go new file mode 100644 index 00000000..30226c8f --- /dev/null +++ b/cmd/core-app/app_service.go @@ -0,0 +1,48 @@ +package main + +import ( + "github.com/wailsapp/wails/v3/pkg/application" +) + +// AppService provides native desktop capabilities to the Wails frontend. +// These methods are callable via window.go.main.AppService.{Method}() +// from any JavaScript/webview context. +type AppService struct { + app *application.App + env *AppEnvironment +} + +func NewAppService(env *AppEnvironment) *AppService { + return &AppService{env: env} +} + +// ServiceStartup is called by Wails when the application starts. +func (s *AppService) ServiceStartup(app *application.App) { + s.app = app +} + +// GetVersion returns the application version. +func (s *AppService) GetVersion() string { + return "0.1.0" +} + +// GetDataDir returns the persistent data directory path. +func (s *AppService) GetDataDir() string { + return s.env.DataDir +} + +// GetDatabasePath returns the SQLite database file path. +func (s *AppService) GetDatabasePath() string { + return s.env.DatabasePath +} + +// ShowWindow shows and focuses the main application window. +func (s *AppService) ShowWindow(name string) { + if s.app == nil { + return + } + if w, ok := s.app.Window.Get(name); ok { + w.Show() + w.Focus() + } +} diff --git a/cmd/core-app/embed.go b/cmd/core-app/embed.go new file mode 100644 index 00000000..97d57f4c --- /dev/null +++ b/cmd/core-app/embed.go @@ -0,0 +1,52 @@ +package main + +import ( + "embed" + "fmt" + "io/fs" + "os" + "path/filepath" +) + +//go:embed all:laravel +var laravelFiles embed.FS + +// extractLaravel copies the embedded Laravel app to a temporary directory. +// FrankenPHP needs real filesystem paths — it cannot serve from embed.FS. +// Returns the path to the extracted Laravel root. +func extractLaravel() (string, error) { + tmpDir, err := os.MkdirTemp("", "core-app-laravel-*") + if err != nil { + return "", fmt.Errorf("create temp dir: %w", err) + } + + err = fs.WalkDir(laravelFiles, "laravel", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + relPath, err := filepath.Rel("laravel", path) + if err != nil { + return err + } + targetPath := filepath.Join(tmpDir, relPath) + + if d.IsDir() { + return os.MkdirAll(targetPath, 0o755) + } + + data, err := laravelFiles.ReadFile(path) + if err != nil { + return fmt.Errorf("read embedded %s: %w", path, err) + } + + return os.WriteFile(targetPath, data, 0o644) + }) + + if err != nil { + os.RemoveAll(tmpDir) + return "", fmt.Errorf("extract Laravel: %w", err) + } + + return tmpDir, nil +} diff --git a/cmd/core-app/env.go b/cmd/core-app/env.go new file mode 100644 index 00000000..5fbde0bf --- /dev/null +++ b/cmd/core-app/env.go @@ -0,0 +1,166 @@ +package main + +import ( + "crypto/rand" + "encoding/base64" + "fmt" + "log" + "os" + "path/filepath" + "runtime" +) + +// AppEnvironment holds the resolved paths for the running application. +type AppEnvironment struct { + // DataDir is the persistent data directory (survives app updates). + DataDir string + // LaravelRoot is the extracted Laravel app in the temp directory. + LaravelRoot string + // DatabasePath is the full path to the SQLite database file. + DatabasePath string +} + +// PrepareEnvironment creates data directories, generates .env, and symlinks +// storage so Laravel can write to persistent locations. +func PrepareEnvironment(laravelRoot string) (*AppEnvironment, error) { + dataDir, err := resolveDataDir() + if err != nil { + return nil, fmt.Errorf("resolve data dir: %w", err) + } + + env := &AppEnvironment{ + DataDir: dataDir, + LaravelRoot: laravelRoot, + DatabasePath: filepath.Join(dataDir, "core-app.sqlite"), + } + + // Create persistent directories + dirs := []string{ + dataDir, + filepath.Join(dataDir, "storage", "app"), + filepath.Join(dataDir, "storage", "framework", "cache", "data"), + filepath.Join(dataDir, "storage", "framework", "sessions"), + filepath.Join(dataDir, "storage", "framework", "views"), + filepath.Join(dataDir, "storage", "logs"), + } + for _, dir := range dirs { + if err := os.MkdirAll(dir, 0o755); err != nil { + return nil, fmt.Errorf("create dir %s: %w", dir, err) + } + } + + // Create empty SQLite database if it doesn't exist + if _, err := os.Stat(env.DatabasePath); os.IsNotExist(err) { + if err := os.WriteFile(env.DatabasePath, nil, 0o644); err != nil { + return nil, fmt.Errorf("create database: %w", err) + } + log.Printf("Created new database: %s", env.DatabasePath) + } + + // Replace the extracted storage/ with a symlink to the persistent one + extractedStorage := filepath.Join(laravelRoot, "storage") + os.RemoveAll(extractedStorage) + persistentStorage := filepath.Join(dataDir, "storage") + if err := os.Symlink(persistentStorage, extractedStorage); err != nil { + return nil, fmt.Errorf("symlink storage: %w", err) + } + + // Generate .env file with resolved paths + if err := writeEnvFile(laravelRoot, env); err != nil { + return nil, fmt.Errorf("write .env: %w", err) + } + + return env, nil +} + +// resolveDataDir returns the OS-appropriate persistent data directory. +func resolveDataDir() (string, error) { + var base string + switch runtime.GOOS { + case "darwin": + home, err := os.UserHomeDir() + if err != nil { + return "", err + } + base = filepath.Join(home, "Library", "Application Support", "core-app") + case "linux": + if xdg := os.Getenv("XDG_DATA_HOME"); xdg != "" { + base = filepath.Join(xdg, "core-app") + } else { + home, err := os.UserHomeDir() + if err != nil { + return "", err + } + base = filepath.Join(home, ".local", "share", "core-app") + } + default: + home, err := os.UserHomeDir() + if err != nil { + return "", err + } + base = filepath.Join(home, ".core-app") + } + return base, nil +} + +// writeEnvFile generates the Laravel .env with resolved runtime paths. +func writeEnvFile(laravelRoot string, env *AppEnvironment) error { + appKey, err := loadOrGenerateAppKey(env.DataDir) + if err != nil { + return fmt.Errorf("app key: %w", err) + } + + content := fmt.Sprintf(`APP_NAME="Core App" +APP_ENV=production +APP_KEY=%s +APP_DEBUG=false +APP_URL=http://localhost + +DB_CONNECTION=sqlite +DB_DATABASE="%s" + +CACHE_STORE=file +SESSION_DRIVER=file +LOG_CHANNEL=single +LOG_LEVEL=warning +`, appKey, env.DatabasePath) + + return os.WriteFile(filepath.Join(laravelRoot, ".env"), []byte(content), 0o644) +} + +// loadOrGenerateAppKey loads an existing APP_KEY from the data dir, +// or generates a new one and persists it. +func loadOrGenerateAppKey(dataDir string) (string, error) { + keyFile := filepath.Join(dataDir, ".app-key") + + data, err := os.ReadFile(keyFile) + if err == nil && len(data) > 0 { + return string(data), nil + } + + // Generate a new 32-byte key + key := make([]byte, 32) + if _, err := rand.Read(key); err != nil { + return "", fmt.Errorf("generate key: %w", err) + } + appKey := "base64:" + base64.StdEncoding.EncodeToString(key) + + if err := os.WriteFile(keyFile, []byte(appKey), 0o600); err != nil { + return "", fmt.Errorf("save key: %w", err) + } + + log.Printf("Generated new APP_KEY (saved to %s)", keyFile) + return appKey, nil +} + +// appendEnv appends a key=value pair to the Laravel .env file. +func appendEnv(laravelRoot, key, value string) error { + envFile := filepath.Join(laravelRoot, ".env") + f, err := os.OpenFile(envFile, os.O_APPEND|os.O_WRONLY, 0o644) + if err != nil { + return err + } + defer f.Close() + _, err = fmt.Fprintf(f, "%s=\"%s\"\n", key, value) + return err +} diff --git a/cmd/core-app/go.mod b/cmd/core-app/go.mod new file mode 100644 index 00000000..31efbdcb --- /dev/null +++ b/cmd/core-app/go.mod @@ -0,0 +1,67 @@ +module github.com/host-uk/core/cmd/core-app + +go 1.25.5 + +require ( + github.com/dunglas/frankenphp v1.5.0 + github.com/wailsapp/wails/v3 v3.0.0-alpha.64 +) + +require ( + dario.cat/mergo v1.0.2 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/ProtonMail/go-crypto v1.3.0 // indirect + github.com/adrg/xdg v0.5.3 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bep/debounce v1.2.1 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cloudflare/circl v1.6.3 // indirect + github.com/coder/websocket v1.8.14 // indirect + github.com/cyphar/filepath-securejoin v0.6.1 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/dolthub/maphash v0.1.0 // indirect + github.com/ebitengine/purego v0.9.1 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/gammazero/deque v1.0.0 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.7.0 // indirect + github.com/go-git/go-git/v5 v5.16.4 // indirect + github.com/go-ole/go-ole v1.3.0 // indirect + github.com/godbus/dbus/v5 v5.2.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/jchv/go-winloader v0.0.0-20250406163304-c1995be93bd1 // indirect + github.com/kevinburke/ssh_config v1.4.0 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/leaanthony/go-ansi-parser v1.6.1 // indirect + github.com/leaanthony/u v1.1.1 // indirect + github.com/lmittmann/tint v1.1.2 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/maypok86/otter v1.2.4 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/pjbgf/sha1cd v0.5.0 // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_golang v1.21.1 // indirect + github.com/prometheus/client_model v0.6.1 // indirect + github.com/prometheus/common v0.63.0 // indirect + github.com/prometheus/procfs v0.16.0 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/samber/lo v1.52.0 // indirect + github.com/sergi/go-diff v1.4.0 // indirect + github.com/skeema/knownhosts v1.3.2 // indirect + github.com/wailsapp/go-webview2 v1.0.23 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + go.uber.org/multierr v1.11.0 // indirect + go.uber.org/zap v1.27.0 // indirect + golang.org/x/crypto v0.47.0 // indirect + golang.org/x/net v0.49.0 // indirect + golang.org/x/sys v0.40.0 // indirect + golang.org/x/text v0.33.0 // indirect + google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect +) + +replace github.com/host-uk/core => ../.. diff --git a/cmd/core-app/go.sum b/cmd/core-app/go.sum new file mode 100644 index 00000000..5c59f369 --- /dev/null +++ b/cmd/core-app/go.sum @@ -0,0 +1,185 @@ +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= +github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= +github.com/adrg/xdg v0.5.3 h1:xRnxJXne7+oWDatRhR1JLnvuccuIeCoBu2rtuLqQB78= +github.com/adrg/xdg v0.5.3/go.mod h1:nlTsY+NNiCBGCK2tpm09vRqfVzrc2fLmXGpBLF0zlTQ= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bep/debounce v1.2.1 h1:v67fRdBA9UQu2NhLFXrSg0Brw7CexQekrBwDMM8bzeY= +github.com/bep/debounce v1.2.1/go.mod h1:H8yggRPQKLUhUoqrJC1bO2xNya7vanpDl7xR3ISbCJ0= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= +github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= +github.com/coder/websocket v1.8.14 h1:9L0p0iKiNOibykf283eHkKUHHrpG7f65OE3BhhO7v9g= +github.com/coder/websocket v1.8.14/go.mod h1:NX3SzP+inril6yawo5CQXx8+fk145lPDC6pumgx0mVg= +github.com/cyphar/filepath-securejoin v0.6.1 h1:5CeZ1jPXEiYt3+Z6zqprSAgSWiggmpVyciv8syjIpVE= +github.com/cyphar/filepath-securejoin v0.6.1/go.mod h1:A8hd4EnAeyujCJRrICiOWqjS1AX0a9kM5XL+NwKoYSc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dolthub/maphash v0.1.0 h1:bsQ7JsF4FkkWyrP3oCnFJgrCUAFbFf3kOl4L/QxPDyQ= +github.com/dolthub/maphash v0.1.0/go.mod h1:gkg4Ch4CdCDu5h6PMriVLawB7koZ+5ijb9puGMV50a4= +github.com/dunglas/frankenphp v1.5.0 h1:mrkJNe2gxlqYijGSpYIVbbRYxjYw2bmgAeDFqwREEk4= +github.com/dunglas/frankenphp v1.5.0/go.mod h1:tU9EirkVR0EuIr69IT1XBjSE6YfQY88tZlgkAvLPdOw= +github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A= +github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/gammazero/deque v1.0.0 h1:LTmimT8H7bXkkCy6gZX7zNLtkbz4NdS2z8LZuor3j34= +github.com/gammazero/deque v1.0.0/go.mod h1:iflpYvtGfM3U8S8j+sZEKIak3SAKYpA5/SQewgfXDKo= +github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= +github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.7.0 h1:83lBUJhGWhYp0ngzCMSgllhUSuoHP1iEWYjsPl9nwqM= +github.com/go-git/go-billy/v5 v5.7.0/go.mod h1:/1IUejTKH8xipsAcdfcSAlUlo2J7lkYV8GTKxAT/L3E= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= +github.com/go-git/go-git/v5 v5.16.4 h1:7ajIEZHZJULcyJebDLo99bGgS0jRrOxzZG4uCk2Yb2Y= +github.com/go-git/go-git/v5 v5.16.4/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= +github.com/go-json-experiment/json v0.0.0-20251027170946-4849db3c2f7e h1:Lf/gRkoycfOBPa42vU2bbgPurFong6zXeFtPoxholzU= +github.com/go-json-experiment/json v0.0.0-20251027170946-4849db3c2f7e/go.mod h1:uNVvRXArCGbZ508SxYYTC5v1JWoz2voff5pm25jU1Ok= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/godbus/dbus/v5 v5.2.2 h1:TUR3TgtSVDmjiXOgAAyaZbYmIeP3DPkld3jgKGV8mXQ= +github.com/godbus/dbus/v5 v5.2.2/go.mod h1:3AAv2+hPq5rdnr5txxxRwiGjPXamgoIHgz9FPBfOp3c= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/jchv/go-winloader v0.0.0-20250406163304-c1995be93bd1 h1:njuLRcjAuMKr7kI3D85AXWkw6/+v9PwtV6M6o11sWHQ= +github.com/jchv/go-winloader v0.0.0-20250406163304-c1995be93bd1/go.mod h1:alcuEEnZsY1WQsagKhZDsoPCRoOijYqhZvPwLG0kzVs= +github.com/kevinburke/ssh_config v1.4.0 h1:6xxtP5bZ2E4NF5tuQulISpTO2z8XbtH8cg1PWkxoFkQ= +github.com/kevinburke/ssh_config v1.4.0/go.mod h1:q2RIzfka+BXARoNexmF9gkxEX7DmvbW9P4hIVx2Kg4M= +github.com/klauspost/compress v1.18.3 h1:9PJRvfbmTabkOX8moIpXPbMMbYN60bWImDDU7L+/6zw= +github.com/klauspost/compress v1.18.3/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leaanthony/go-ansi-parser v1.6.1 h1:xd8bzARK3dErqkPFtoF9F3/HgN8UQk0ed1YDKpEz01A= +github.com/leaanthony/go-ansi-parser v1.6.1/go.mod h1:+vva/2y4alzVmmIEpk9QDhA7vLC5zKDTRwfZGOp3IWU= +github.com/leaanthony/u v1.1.1 h1:TUFjwDGlNX+WuwVEzDqQwC2lOv0P4uhTQw7CMFdiK7M= +github.com/leaanthony/u v1.1.1/go.mod h1:9+o6hejoRljvZ3BzdYlVL0JYCwtnAsVuN9pVTQcaRfI= +github.com/lmittmann/tint v1.1.2 h1:2CQzrL6rslrsyjqLDwD11bZ5OpLBPU+g3G/r5LSfS8w= +github.com/lmittmann/tint v1.1.2/go.mod h1:HIS3gSy7qNwGCj+5oRjAutErFBl4BzdQP6cJZ0NfMwE= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/matryer/is v1.4.1 h1:55ehd8zaGABKLXQUe2awZ99BD/PTc2ls+KV/dXphgEQ= +github.com/matryer/is v1.4.1/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/maypok86/otter v1.2.4 h1:HhW1Pq6VdJkmWwcZZq19BlEQkHtI8xgsQzBVXJU0nfc= +github.com/maypok86/otter v1.2.4/go.mod h1:mKLfoI7v1HOmQMwFgX4QkRk23mX6ge3RDvjdHOWG4R4= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= +github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= +github.com/pjbgf/sha1cd v0.5.0 h1:a+UkboSi1znleCDUNT3M5YxjOnN1fz2FhN48FlwCxs0= +github.com/pjbgf/sha1cd v0.5.0/go.mod h1:lhpGlyHLpQZoxMv8HcgXvZEhcGs0PG/vsZnEJ7H0iCM= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v1.21.1 h1:DOvXXTqVzvkIewV/CDPFdejpMCGeMcbGCQ8YOmu+Ibk= +github.com/prometheus/client_golang v1.21.1/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg= +github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= +github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= +github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k= +github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18= +github.com/prometheus/procfs v0.16.0 h1:xh6oHhKwnOJKMYiYBDWmkHqQPyiY40sny36Cmx2bbsM= +github.com/prometheus/procfs v0.16.0/go.mod h1:8veyXUu3nGP7oaCxhX6yeaM5u4stL2FeMXnCqhDthZg= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/samber/lo v1.52.0 h1:Rvi+3BFHES3A8meP33VPAxiBZX/Aws5RxrschYGjomw= +github.com/samber/lo v1.52.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0= +github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw= +github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/skeema/knownhosts v1.3.2 h1:EDL9mgf4NzwMXCTfaxSD/o/a5fxDw/xL9nkU28JjdBg= +github.com/skeema/knownhosts v1.3.2/go.mod h1:bEg3iQAuw+jyiw+484wwFJoKSLwcfd7fqRy+N0QTiow= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/wailsapp/go-webview2 v1.0.23 h1:jmv8qhz1lHibCc79bMM/a/FqOnnzOGEisLav+a0b9P0= +github.com/wailsapp/go-webview2 v1.0.23/go.mod h1:qJmWAmAmaniuKGZPWwne+uor3AHMB5PFhqiK0Bbj8kc= +github.com/wailsapp/wails/v3 v3.0.0-alpha.64 h1:xAhLFVfdbg7XdZQ5mMQmBv2BglWu8hMqe50Z+3UJvBs= +github.com/wailsapp/wails/v3 v3.0.0-alpha.64/go.mod h1:zvgNL/mlFcX8aRGu6KOz9AHrMmTBD+4hJRQIONqF/Yw= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= +golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= +golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU= +golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= +golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200810151505-1b9f1253b3ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= +golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= +golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= +google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/cmd/core-app/handler.go b/cmd/core-app/handler.go new file mode 100644 index 00000000..0ad3d78a --- /dev/null +++ b/cmd/core-app/handler.go @@ -0,0 +1,137 @@ +package main + +import ( + "fmt" + "log" + "net/http" + "os" + "path/filepath" + "strings" + + "github.com/dunglas/frankenphp" +) + +// PHPHandler implements http.Handler by delegating to FrankenPHP. +// It resolves URLs to files (like Caddy's try_files) before passing +// requests to the PHP runtime. +type PHPHandler struct { + docRoot string + laravelRoot string +} + +// NewPHPHandler extracts the embedded Laravel app, prepares the environment, +// initialises FrankenPHP with worker mode, and returns the handler. +func NewPHPHandler() (*PHPHandler, *AppEnvironment, func(), error) { + // Extract embedded Laravel to temp directory + laravelRoot, err := extractLaravel() + if err != nil { + return nil, nil, nil, fmt.Errorf("extract Laravel: %w", err) + } + + // Prepare persistent environment + env, err := PrepareEnvironment(laravelRoot) + if err != nil { + os.RemoveAll(laravelRoot) + return nil, nil, nil, fmt.Errorf("prepare environment: %w", err) + } + + docRoot := filepath.Join(laravelRoot, "public") + + log.Printf("Laravel root: %s", laravelRoot) + log.Printf("Document root: %s", docRoot) + log.Printf("Data directory: %s", env.DataDir) + log.Printf("Database: %s", env.DatabasePath) + + // Try Octane worker mode first, fall back to standard mode. + // Worker mode keeps Laravel booted in memory — sub-ms response times. + workerScript := filepath.Join(laravelRoot, "vendor", "laravel", "octane", "bin", "frankenphp-worker.php") + workerEnv := map[string]string{ + "APP_BASE_PATH": laravelRoot, + "FRANKENPHP_WORKER": "1", + } + + workerMode := false + if _, err := os.Stat(workerScript); err == nil { + if err := frankenphp.Init( + frankenphp.WithNumThreads(4), + frankenphp.WithWorkers("laravel", workerScript, 2, workerEnv, nil), + frankenphp.WithPhpIni(map[string]string{ + "display_errors": "Off", + "opcache.enable": "1", + }), + ); err != nil { + log.Printf("Worker mode init failed (%v), falling back to standard mode", err) + } else { + workerMode = true + } + } + + if !workerMode { + if err := frankenphp.Init( + frankenphp.WithNumThreads(4), + frankenphp.WithPhpIni(map[string]string{ + "display_errors": "Off", + "opcache.enable": "1", + }), + ); err != nil { + os.RemoveAll(laravelRoot) + return nil, nil, nil, fmt.Errorf("init FrankenPHP: %w", err) + } + } + + if workerMode { + log.Println("FrankenPHP initialised (Octane worker mode, 2 workers)") + } else { + log.Println("FrankenPHP initialised (standard mode, 4 threads)") + } + + cleanup := func() { + frankenphp.Shutdown() + os.RemoveAll(laravelRoot) + } + + handler := &PHPHandler{ + docRoot: docRoot, + laravelRoot: laravelRoot, + } + + return handler, env, cleanup, nil +} + +func (h *PHPHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + urlPath := r.URL.Path + filePath := filepath.Join(h.docRoot, filepath.Clean(urlPath)) + + info, err := os.Stat(filePath) + if err == nil && info.IsDir() { + // Directory → try index.php inside it + urlPath = strings.TrimRight(urlPath, "/") + "/index.php" + } else if err != nil && !strings.HasSuffix(urlPath, ".php") { + // File not found and not a .php request → front controller + urlPath = "/index.php" + } + + // Serve static assets directly (CSS, JS, images) + if !strings.HasSuffix(urlPath, ".php") { + staticPath := filepath.Join(h.docRoot, filepath.Clean(urlPath)) + if info, err := os.Stat(staticPath); err == nil && !info.IsDir() { + http.ServeFile(w, r, staticPath) + return + } + } + + // Route to FrankenPHP + r.URL.Path = urlPath + + req, err := frankenphp.NewRequestWithContext(r, + frankenphp.WithRequestDocumentRoot(h.docRoot, false), + ) + if err != nil { + http.Error(w, fmt.Sprintf("FrankenPHP request error: %v", err), http.StatusInternalServerError) + return + } + + if err := frankenphp.ServeHTTP(w, req); err != nil { + http.Error(w, fmt.Sprintf("FrankenPHP serve error: %v", err), http.StatusInternalServerError) + } +} diff --git a/cmd/core-app/icons/appicon.png b/cmd/core-app/icons/appicon.png new file mode 100644 index 0000000000000000000000000000000000000000..53adbd595d3e69cce3545aafe98f348b5eb4a3be GIT binary patch literal 76 zcmeAS@N?(olHy`uVBq!ia0vp^Vj#@H1|*Mc$*~4fqMj~}Ar*6y|NQ@N&n&>e@c%zE Z1IHhxf6EIyW&))dJYD@<);T3K0RVYV6kz}W literal 0 HcmV?d00001 diff --git a/cmd/core-app/icons/icons.go b/cmd/core-app/icons/icons.go new file mode 100644 index 00000000..d1305e19 --- /dev/null +++ b/cmd/core-app/icons/icons.go @@ -0,0 +1,24 @@ +// Package icons provides embedded icon assets for the Core App. +package icons + +import _ "embed" + +// TrayTemplate is the template icon for macOS systray (22x22 PNG, black on transparent). +// +//go:embed tray-template.png +var TrayTemplate []byte + +// TrayLight is the light mode icon for Windows/Linux systray. +// +//go:embed tray-light.png +var TrayLight []byte + +// TrayDark is the dark mode icon for Windows/Linux systray. +// +//go:embed tray-dark.png +var TrayDark []byte + +// AppIcon is the main application icon. +// +//go:embed appicon.png +var AppIcon []byte diff --git a/cmd/core-app/icons/tray-dark.png b/cmd/core-app/icons/tray-dark.png new file mode 100644 index 0000000000000000000000000000000000000000..53adbd595d3e69cce3545aafe98f348b5eb4a3be GIT binary patch literal 76 zcmeAS@N?(olHy`uVBq!ia0vp^Vj#@H1|*Mc$*~4fqMj~}Ar*6y|NQ@N&n&>e@c%zE Z1IHhxf6EIyW&))dJYD@<);T3K0RVYV6kz}W literal 0 HcmV?d00001 diff --git a/cmd/core-app/icons/tray-light.png b/cmd/core-app/icons/tray-light.png new file mode 100644 index 0000000000000000000000000000000000000000..53adbd595d3e69cce3545aafe98f348b5eb4a3be GIT binary patch literal 76 zcmeAS@N?(olHy`uVBq!ia0vp^Vj#@H1|*Mc$*~4fqMj~}Ar*6y|NQ@N&n&>e@c%zE Z1IHhxf6EIyW&))dJYD@<);T3K0RVYV6kz}W literal 0 HcmV?d00001 diff --git a/cmd/core-app/icons/tray-template.png b/cmd/core-app/icons/tray-template.png new file mode 100644 index 0000000000000000000000000000000000000000..53adbd595d3e69cce3545aafe98f348b5eb4a3be GIT binary patch literal 76 zcmeAS@N?(olHy`uVBq!ia0vp^Vj#@H1|*Mc$*~4fqMj~}Ar*6y|NQ@N&n&>e@c%zE Z1IHhxf6EIyW&))dJYD@<);T3K0RVYV6kz}W literal 0 HcmV?d00001 diff --git a/cmd/core-app/laravel/.env.example b/cmd/core-app/laravel/.env.example new file mode 100644 index 00000000..99fd7612 --- /dev/null +++ b/cmd/core-app/laravel/.env.example @@ -0,0 +1,13 @@ +APP_NAME="Core App" +APP_ENV=production +APP_KEY= +APP_DEBUG=false +APP_URL=http://localhost + +DB_CONNECTION=sqlite +DB_DATABASE=/tmp/core-app/database.sqlite + +CACHE_STORE=file +SESSION_DRIVER=file +LOG_CHANNEL=single +LOG_LEVEL=warning diff --git a/cmd/core-app/laravel/.gitignore b/cmd/core-app/laravel/.gitignore new file mode 100644 index 00000000..aeec7040 --- /dev/null +++ b/cmd/core-app/laravel/.gitignore @@ -0,0 +1,5 @@ +/vendor/ +/node_modules/ +/.env +/bootstrap/cache/*.php +/storage/*.key diff --git a/cmd/core-app/laravel/app/Livewire/Counter.php b/cmd/core-app/laravel/app/Livewire/Counter.php new file mode 100644 index 00000000..71f5890f --- /dev/null +++ b/cmd/core-app/laravel/app/Livewire/Counter.php @@ -0,0 +1,27 @@ +count++; + } + + public function decrement(): void + { + $this->count--; + } + + public function render() + { + return view('livewire.counter'); + } +} diff --git a/cmd/core-app/laravel/artisan b/cmd/core-app/laravel/artisan new file mode 100644 index 00000000..20cd96fd --- /dev/null +++ b/cmd/core-app/laravel/artisan @@ -0,0 +1,21 @@ +#!/usr/bin/env php +make(Illuminate\Contracts\Console\Kernel::class); + +$status = $kernel->handle( + $input = new Symfony\Component\Console\Input\ArgvInput, + new Symfony\Component\Console\Output\ConsoleOutput +); + +$kernel->terminate($input, $status); + +exit($status); diff --git a/cmd/core-app/laravel/bootstrap/app.php b/cmd/core-app/laravel/bootstrap/app.php new file mode 100644 index 00000000..66615824 --- /dev/null +++ b/cmd/core-app/laravel/bootstrap/app.php @@ -0,0 +1,19 @@ +withRouting( + web: __DIR__.'/../routes/web.php', + ) + ->withMiddleware(function (Middleware $middleware) { + // + }) + ->withExceptions(function (Exceptions $exceptions) { + // + }) + ->create(); diff --git a/cmd/core-app/laravel/composer.json b/cmd/core-app/laravel/composer.json new file mode 100644 index 00000000..03e625d4 --- /dev/null +++ b/cmd/core-app/laravel/composer.json @@ -0,0 +1,29 @@ +{ + "name": "host-uk/core-app", + "description": "Embedded Laravel application for Core App desktop", + "license": "EUPL-1.2", + "type": "project", + "require": { + "php": "^8.4", + "laravel/framework": "^12.0", + "laravel/octane": "^2.0", + "livewire/livewire": "^4.0" + }, + "autoload": { + "psr-4": { + "App\\": "app/" + } + }, + "config": { + "optimize-autoloader": true, + "preferred-install": "dist", + "sort-packages": true + }, + "minimum-stability": "stable", + "prefer-stable": true, + "scripts": { + "post-autoload-dump": [ + "@php artisan package:discover --ansi" + ] + } +} diff --git a/cmd/core-app/laravel/composer.lock b/cmd/core-app/laravel/composer.lock new file mode 100644 index 00000000..fe0f78d4 --- /dev/null +++ b/cmd/core-app/laravel/composer.lock @@ -0,0 +1,6149 @@ +{ + "_readme": [ + "This file locks the dependencies of your project to a known state", + "Read more about it at https://getcomposer.org/doc/01-basic-usage.md#installing-dependencies", + "This file is @generated automatically" + ], + "content-hash": "42a832df69a90e5b95166df60047b7c3", + "packages": [ + { + "name": "brick/math", + "version": "0.14.6", + "source": { + "type": "git", + "url": "https://github.com/brick/math.git", + "reference": "32498d5e1897e7642c0b961ace2df6d7dc9a3bc3" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/brick/math/zipball/32498d5e1897e7642c0b961ace2df6d7dc9a3bc3", + "reference": "32498d5e1897e7642c0b961ace2df6d7dc9a3bc3", + "shasum": "" + }, + "require": { + "php": "^8.2" + }, + "require-dev": { + "php-coveralls/php-coveralls": "^2.2", + "phpstan/phpstan": "2.1.22", + "phpunit/phpunit": "^11.5" + }, + "type": "library", + "autoload": { + "psr-4": { + "Brick\\Math\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "description": "Arbitrary-precision arithmetic library", + "keywords": [ + "Arbitrary-precision", + "BigInteger", + "BigRational", + "arithmetic", + "bigdecimal", + "bignum", + "bignumber", + "brick", + "decimal", + "integer", + "math", + "mathematics", + "rational" + ], + "support": { + "issues": "https://github.com/brick/math/issues", + "source": "https://github.com/brick/math/tree/0.14.6" + }, + "funding": [ + { + "url": "https://github.com/BenMorel", + "type": "github" + } + ], + "time": "2026-02-05T07:59:58+00:00" + }, + { + "name": "carbonphp/carbon-doctrine-types", + "version": "3.2.0", + "source": { + "type": "git", + "url": "https://github.com/CarbonPHP/carbon-doctrine-types.git", + "reference": "18ba5ddfec8976260ead6e866180bd5d2f71aa1d" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/CarbonPHP/carbon-doctrine-types/zipball/18ba5ddfec8976260ead6e866180bd5d2f71aa1d", + "reference": "18ba5ddfec8976260ead6e866180bd5d2f71aa1d", + "shasum": "" + }, + "require": { + "php": "^8.1" + }, + "conflict": { + "doctrine/dbal": "<4.0.0 || >=5.0.0" + }, + "require-dev": { + "doctrine/dbal": "^4.0.0", + "nesbot/carbon": "^2.71.0 || ^3.0.0", + "phpunit/phpunit": "^10.3" + }, + "type": "library", + "autoload": { + "psr-4": { + "Carbon\\Doctrine\\": "src/Carbon/Doctrine/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "KyleKatarn", + "email": "kylekatarnls@gmail.com" + } + ], + "description": "Types to use Carbon in Doctrine", + "keywords": [ + "carbon", + "date", + "datetime", + "doctrine", + "time" + ], + "support": { + "issues": "https://github.com/CarbonPHP/carbon-doctrine-types/issues", + "source": "https://github.com/CarbonPHP/carbon-doctrine-types/tree/3.2.0" + }, + "funding": [ + { + "url": "https://github.com/kylekatarnls", + "type": "github" + }, + { + "url": "https://opencollective.com/Carbon", + "type": "open_collective" + }, + { + "url": "https://tidelift.com/funding/github/packagist/nesbot/carbon", + "type": "tidelift" + } + ], + "time": "2024-02-09T16:56:22+00:00" + }, + { + "name": "dflydev/dot-access-data", + "version": "v3.0.3", + "source": { + "type": "git", + "url": "https://github.com/dflydev/dflydev-dot-access-data.git", + "reference": "a23a2bf4f31d3518f3ecb38660c95715dfead60f" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/dflydev/dflydev-dot-access-data/zipball/a23a2bf4f31d3518f3ecb38660c95715dfead60f", + "reference": "a23a2bf4f31d3518f3ecb38660c95715dfead60f", + "shasum": "" + }, + "require": { + "php": "^7.1 || ^8.0" + }, + "require-dev": { + "phpstan/phpstan": "^0.12.42", + "phpunit/phpunit": "^7.5 || ^8.5 || ^9.3", + "scrutinizer/ocular": "1.6.0", + "squizlabs/php_codesniffer": "^3.5", + "vimeo/psalm": "^4.0.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-main": "3.x-dev" + } + }, + "autoload": { + "psr-4": { + "Dflydev\\DotAccessData\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Dragonfly Development Inc.", + "email": "info@dflydev.com", + "homepage": "http://dflydev.com" + }, + { + "name": "Beau Simensen", + "email": "beau@dflydev.com", + "homepage": "http://beausimensen.com" + }, + { + "name": "Carlos Frutos", + "email": "carlos@kiwing.it", + "homepage": "https://github.com/cfrutos" + }, + { + "name": "Colin O'Dell", + "email": "colinodell@gmail.com", + "homepage": "https://www.colinodell.com" + } + ], + "description": "Given a deep data structure, access data by dot notation.", + "homepage": "https://github.com/dflydev/dflydev-dot-access-data", + "keywords": [ + "access", + "data", + "dot", + "notation" + ], + "support": { + "issues": "https://github.com/dflydev/dflydev-dot-access-data/issues", + "source": "https://github.com/dflydev/dflydev-dot-access-data/tree/v3.0.3" + }, + "time": "2024-07-08T12:26:09+00:00" + }, + { + "name": "doctrine/inflector", + "version": "2.1.0", + "source": { + "type": "git", + "url": "https://github.com/doctrine/inflector.git", + "reference": "6d6c96277ea252fc1304627204c3d5e6e15faa3b" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/doctrine/inflector/zipball/6d6c96277ea252fc1304627204c3d5e6e15faa3b", + "reference": "6d6c96277ea252fc1304627204c3d5e6e15faa3b", + "shasum": "" + }, + "require": { + "php": "^7.2 || ^8.0" + }, + "require-dev": { + "doctrine/coding-standard": "^12.0 || ^13.0", + "phpstan/phpstan": "^1.12 || ^2.0", + "phpstan/phpstan-phpunit": "^1.4 || ^2.0", + "phpstan/phpstan-strict-rules": "^1.6 || ^2.0", + "phpunit/phpunit": "^8.5 || ^12.2" + }, + "type": "library", + "autoload": { + "psr-4": { + "Doctrine\\Inflector\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Guilherme Blanco", + "email": "guilhermeblanco@gmail.com" + }, + { + "name": "Roman Borschel", + "email": "roman@code-factory.org" + }, + { + "name": "Benjamin Eberlei", + "email": "kontakt@beberlei.de" + }, + { + "name": "Jonathan Wage", + "email": "jonwage@gmail.com" + }, + { + "name": "Johannes Schmitt", + "email": "schmittjoh@gmail.com" + } + ], + "description": "PHP Doctrine Inflector is a small library that can perform string manipulations with regard to upper/lowercase and singular/plural forms of words.", + "homepage": "https://www.doctrine-project.org/projects/inflector.html", + "keywords": [ + "inflection", + "inflector", + "lowercase", + "manipulation", + "php", + "plural", + "singular", + "strings", + "uppercase", + "words" + ], + "support": { + "issues": "https://github.com/doctrine/inflector/issues", + "source": "https://github.com/doctrine/inflector/tree/2.1.0" + }, + "funding": [ + { + "url": "https://www.doctrine-project.org/sponsorship.html", + "type": "custom" + }, + { + "url": "https://www.patreon.com/phpdoctrine", + "type": "patreon" + }, + { + "url": "https://tidelift.com/funding/github/packagist/doctrine%2Finflector", + "type": "tidelift" + } + ], + "time": "2025-08-10T19:31:58+00:00" + }, + { + "name": "doctrine/lexer", + "version": "3.0.1", + "source": { + "type": "git", + "url": "https://github.com/doctrine/lexer.git", + "reference": "31ad66abc0fc9e1a1f2d9bc6a42668d2fbbcd6dd" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/doctrine/lexer/zipball/31ad66abc0fc9e1a1f2d9bc6a42668d2fbbcd6dd", + "reference": "31ad66abc0fc9e1a1f2d9bc6a42668d2fbbcd6dd", + "shasum": "" + }, + "require": { + "php": "^8.1" + }, + "require-dev": { + "doctrine/coding-standard": "^12", + "phpstan/phpstan": "^1.10", + "phpunit/phpunit": "^10.5", + "psalm/plugin-phpunit": "^0.18.3", + "vimeo/psalm": "^5.21" + }, + "type": "library", + "autoload": { + "psr-4": { + "Doctrine\\Common\\Lexer\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Guilherme Blanco", + "email": "guilhermeblanco@gmail.com" + }, + { + "name": "Roman Borschel", + "email": "roman@code-factory.org" + }, + { + "name": "Johannes Schmitt", + "email": "schmittjoh@gmail.com" + } + ], + "description": "PHP Doctrine Lexer parser library that can be used in Top-Down, Recursive Descent Parsers.", + "homepage": "https://www.doctrine-project.org/projects/lexer.html", + "keywords": [ + "annotations", + "docblock", + "lexer", + "parser", + "php" + ], + "support": { + "issues": "https://github.com/doctrine/lexer/issues", + "source": "https://github.com/doctrine/lexer/tree/3.0.1" + }, + "funding": [ + { + "url": "https://www.doctrine-project.org/sponsorship.html", + "type": "custom" + }, + { + "url": "https://www.patreon.com/phpdoctrine", + "type": "patreon" + }, + { + "url": "https://tidelift.com/funding/github/packagist/doctrine%2Flexer", + "type": "tidelift" + } + ], + "time": "2024-02-05T11:56:58+00:00" + }, + { + "name": "dragonmantank/cron-expression", + "version": "v3.6.0", + "source": { + "type": "git", + "url": "https://github.com/dragonmantank/cron-expression.git", + "reference": "d61a8a9604ec1f8c3d150d09db6ce98b32675013" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/dragonmantank/cron-expression/zipball/d61a8a9604ec1f8c3d150d09db6ce98b32675013", + "reference": "d61a8a9604ec1f8c3d150d09db6ce98b32675013", + "shasum": "" + }, + "require": { + "php": "^8.2|^8.3|^8.4|^8.5" + }, + "replace": { + "mtdowling/cron-expression": "^1.0" + }, + "require-dev": { + "phpstan/extension-installer": "^1.4.3", + "phpstan/phpstan": "^1.12.32|^2.1.31", + "phpunit/phpunit": "^8.5.48|^9.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "3.x-dev" + } + }, + "autoload": { + "psr-4": { + "Cron\\": "src/Cron/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Chris Tankersley", + "email": "chris@ctankersley.com", + "homepage": "https://github.com/dragonmantank" + } + ], + "description": "CRON for PHP: Calculate the next or previous run date and determine if a CRON expression is due", + "keywords": [ + "cron", + "schedule" + ], + "support": { + "issues": "https://github.com/dragonmantank/cron-expression/issues", + "source": "https://github.com/dragonmantank/cron-expression/tree/v3.6.0" + }, + "funding": [ + { + "url": "https://github.com/dragonmantank", + "type": "github" + } + ], + "time": "2025-10-31T18:51:33+00:00" + }, + { + "name": "egulias/email-validator", + "version": "4.0.4", + "source": { + "type": "git", + "url": "https://github.com/egulias/EmailValidator.git", + "reference": "d42c8731f0624ad6bdc8d3e5e9a4524f68801cfa" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/egulias/EmailValidator/zipball/d42c8731f0624ad6bdc8d3e5e9a4524f68801cfa", + "reference": "d42c8731f0624ad6bdc8d3e5e9a4524f68801cfa", + "shasum": "" + }, + "require": { + "doctrine/lexer": "^2.0 || ^3.0", + "php": ">=8.1", + "symfony/polyfill-intl-idn": "^1.26" + }, + "require-dev": { + "phpunit/phpunit": "^10.2", + "vimeo/psalm": "^5.12" + }, + "suggest": { + "ext-intl": "PHP Internationalization Libraries are required to use the SpoofChecking validation" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "4.0.x-dev" + } + }, + "autoload": { + "psr-4": { + "Egulias\\EmailValidator\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Eduardo Gulias Davis" + } + ], + "description": "A library for validating emails against several RFCs", + "homepage": "https://github.com/egulias/EmailValidator", + "keywords": [ + "email", + "emailvalidation", + "emailvalidator", + "validation", + "validator" + ], + "support": { + "issues": "https://github.com/egulias/EmailValidator/issues", + "source": "https://github.com/egulias/EmailValidator/tree/4.0.4" + }, + "funding": [ + { + "url": "https://github.com/egulias", + "type": "github" + } + ], + "time": "2025-03-06T22:45:56+00:00" + }, + { + "name": "fruitcake/php-cors", + "version": "v1.4.0", + "source": { + "type": "git", + "url": "https://github.com/fruitcake/php-cors.git", + "reference": "38aaa6c3fd4c157ffe2a4d10aa8b9b16ba8de379" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/fruitcake/php-cors/zipball/38aaa6c3fd4c157ffe2a4d10aa8b9b16ba8de379", + "reference": "38aaa6c3fd4c157ffe2a4d10aa8b9b16ba8de379", + "shasum": "" + }, + "require": { + "php": "^8.1", + "symfony/http-foundation": "^5.4|^6.4|^7.3|^8" + }, + "require-dev": { + "phpstan/phpstan": "^2", + "phpunit/phpunit": "^9", + "squizlabs/php_codesniffer": "^4" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "1.3-dev" + } + }, + "autoload": { + "psr-4": { + "Fruitcake\\Cors\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fruitcake", + "homepage": "https://fruitcake.nl" + }, + { + "name": "Barryvdh", + "email": "barryvdh@gmail.com" + } + ], + "description": "Cross-origin resource sharing library for the Symfony HttpFoundation", + "homepage": "https://github.com/fruitcake/php-cors", + "keywords": [ + "cors", + "laravel", + "symfony" + ], + "support": { + "issues": "https://github.com/fruitcake/php-cors/issues", + "source": "https://github.com/fruitcake/php-cors/tree/v1.4.0" + }, + "funding": [ + { + "url": "https://fruitcake.nl", + "type": "custom" + }, + { + "url": "https://github.com/barryvdh", + "type": "github" + } + ], + "time": "2025-12-03T09:33:47+00:00" + }, + { + "name": "graham-campbell/result-type", + "version": "v1.1.4", + "source": { + "type": "git", + "url": "https://github.com/GrahamCampbell/Result-Type.git", + "reference": "e01f4a821471308ba86aa202fed6698b6b695e3b" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/GrahamCampbell/Result-Type/zipball/e01f4a821471308ba86aa202fed6698b6b695e3b", + "reference": "e01f4a821471308ba86aa202fed6698b6b695e3b", + "shasum": "" + }, + "require": { + "php": "^7.2.5 || ^8.0", + "phpoption/phpoption": "^1.9.5" + }, + "require-dev": { + "phpunit/phpunit": "^8.5.41 || ^9.6.22 || ^10.5.45 || ^11.5.7" + }, + "type": "library", + "autoload": { + "psr-4": { + "GrahamCampbell\\ResultType\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Graham Campbell", + "email": "hello@gjcampbell.co.uk", + "homepage": "https://github.com/GrahamCampbell" + } + ], + "description": "An Implementation Of The Result Type", + "keywords": [ + "Graham Campbell", + "GrahamCampbell", + "Result Type", + "Result-Type", + "result" + ], + "support": { + "issues": "https://github.com/GrahamCampbell/Result-Type/issues", + "source": "https://github.com/GrahamCampbell/Result-Type/tree/v1.1.4" + }, + "funding": [ + { + "url": "https://github.com/GrahamCampbell", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/graham-campbell/result-type", + "type": "tidelift" + } + ], + "time": "2025-12-27T19:43:20+00:00" + }, + { + "name": "guzzlehttp/guzzle", + "version": "7.10.0", + "source": { + "type": "git", + "url": "https://github.com/guzzle/guzzle.git", + "reference": "b51ac707cfa420b7bfd4e4d5e510ba8008e822b4" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/guzzle/guzzle/zipball/b51ac707cfa420b7bfd4e4d5e510ba8008e822b4", + "reference": "b51ac707cfa420b7bfd4e4d5e510ba8008e822b4", + "shasum": "" + }, + "require": { + "ext-json": "*", + "guzzlehttp/promises": "^2.3", + "guzzlehttp/psr7": "^2.8", + "php": "^7.2.5 || ^8.0", + "psr/http-client": "^1.0", + "symfony/deprecation-contracts": "^2.2 || ^3.0" + }, + "provide": { + "psr/http-client-implementation": "1.0" + }, + "require-dev": { + "bamarni/composer-bin-plugin": "^1.8.2", + "ext-curl": "*", + "guzzle/client-integration-tests": "3.0.2", + "php-http/message-factory": "^1.1", + "phpunit/phpunit": "^8.5.39 || ^9.6.20", + "psr/log": "^1.1 || ^2.0 || ^3.0" + }, + "suggest": { + "ext-curl": "Required for CURL handler support", + "ext-intl": "Required for Internationalized Domain Name (IDN) support", + "psr/log": "Required for using the Log middleware" + }, + "type": "library", + "extra": { + "bamarni-bin": { + "bin-links": true, + "forward-command": false + } + }, + "autoload": { + "files": [ + "src/functions_include.php" + ], + "psr-4": { + "GuzzleHttp\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Graham Campbell", + "email": "hello@gjcampbell.co.uk", + "homepage": "https://github.com/GrahamCampbell" + }, + { + "name": "Michael Dowling", + "email": "mtdowling@gmail.com", + "homepage": "https://github.com/mtdowling" + }, + { + "name": "Jeremy Lindblom", + "email": "jeremeamia@gmail.com", + "homepage": "https://github.com/jeremeamia" + }, + { + "name": "George Mponos", + "email": "gmponos@gmail.com", + "homepage": "https://github.com/gmponos" + }, + { + "name": "Tobias Nyholm", + "email": "tobias.nyholm@gmail.com", + "homepage": "https://github.com/Nyholm" + }, + { + "name": "Márk Sági-Kazár", + "email": "mark.sagikazar@gmail.com", + "homepage": "https://github.com/sagikazarmark" + }, + { + "name": "Tobias Schultze", + "email": "webmaster@tubo-world.de", + "homepage": "https://github.com/Tobion" + } + ], + "description": "Guzzle is a PHP HTTP client library", + "keywords": [ + "client", + "curl", + "framework", + "http", + "http client", + "psr-18", + "psr-7", + "rest", + "web service" + ], + "support": { + "issues": "https://github.com/guzzle/guzzle/issues", + "source": "https://github.com/guzzle/guzzle/tree/7.10.0" + }, + "funding": [ + { + "url": "https://github.com/GrahamCampbell", + "type": "github" + }, + { + "url": "https://github.com/Nyholm", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/guzzlehttp/guzzle", + "type": "tidelift" + } + ], + "time": "2025-08-23T22:36:01+00:00" + }, + { + "name": "guzzlehttp/promises", + "version": "2.3.0", + "source": { + "type": "git", + "url": "https://github.com/guzzle/promises.git", + "reference": "481557b130ef3790cf82b713667b43030dc9c957" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/guzzle/promises/zipball/481557b130ef3790cf82b713667b43030dc9c957", + "reference": "481557b130ef3790cf82b713667b43030dc9c957", + "shasum": "" + }, + "require": { + "php": "^7.2.5 || ^8.0" + }, + "require-dev": { + "bamarni/composer-bin-plugin": "^1.8.2", + "phpunit/phpunit": "^8.5.44 || ^9.6.25" + }, + "type": "library", + "extra": { + "bamarni-bin": { + "bin-links": true, + "forward-command": false + } + }, + "autoload": { + "psr-4": { + "GuzzleHttp\\Promise\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Graham Campbell", + "email": "hello@gjcampbell.co.uk", + "homepage": "https://github.com/GrahamCampbell" + }, + { + "name": "Michael Dowling", + "email": "mtdowling@gmail.com", + "homepage": "https://github.com/mtdowling" + }, + { + "name": "Tobias Nyholm", + "email": "tobias.nyholm@gmail.com", + "homepage": "https://github.com/Nyholm" + }, + { + "name": "Tobias Schultze", + "email": "webmaster@tubo-world.de", + "homepage": "https://github.com/Tobion" + } + ], + "description": "Guzzle promises library", + "keywords": [ + "promise" + ], + "support": { + "issues": "https://github.com/guzzle/promises/issues", + "source": "https://github.com/guzzle/promises/tree/2.3.0" + }, + "funding": [ + { + "url": "https://github.com/GrahamCampbell", + "type": "github" + }, + { + "url": "https://github.com/Nyholm", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/guzzlehttp/promises", + "type": "tidelift" + } + ], + "time": "2025-08-22T14:34:08+00:00" + }, + { + "name": "guzzlehttp/psr7", + "version": "2.8.0", + "source": { + "type": "git", + "url": "https://github.com/guzzle/psr7.git", + "reference": "21dc724a0583619cd1652f673303492272778051" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/guzzle/psr7/zipball/21dc724a0583619cd1652f673303492272778051", + "reference": "21dc724a0583619cd1652f673303492272778051", + "shasum": "" + }, + "require": { + "php": "^7.2.5 || ^8.0", + "psr/http-factory": "^1.0", + "psr/http-message": "^1.1 || ^2.0", + "ralouphie/getallheaders": "^3.0" + }, + "provide": { + "psr/http-factory-implementation": "1.0", + "psr/http-message-implementation": "1.0" + }, + "require-dev": { + "bamarni/composer-bin-plugin": "^1.8.2", + "http-interop/http-factory-tests": "0.9.0", + "phpunit/phpunit": "^8.5.44 || ^9.6.25" + }, + "suggest": { + "laminas/laminas-httphandlerrunner": "Emit PSR-7 responses" + }, + "type": "library", + "extra": { + "bamarni-bin": { + "bin-links": true, + "forward-command": false + } + }, + "autoload": { + "psr-4": { + "GuzzleHttp\\Psr7\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Graham Campbell", + "email": "hello@gjcampbell.co.uk", + "homepage": "https://github.com/GrahamCampbell" + }, + { + "name": "Michael Dowling", + "email": "mtdowling@gmail.com", + "homepage": "https://github.com/mtdowling" + }, + { + "name": "George Mponos", + "email": "gmponos@gmail.com", + "homepage": "https://github.com/gmponos" + }, + { + "name": "Tobias Nyholm", + "email": "tobias.nyholm@gmail.com", + "homepage": "https://github.com/Nyholm" + }, + { + "name": "Márk Sági-Kazár", + "email": "mark.sagikazar@gmail.com", + "homepage": "https://github.com/sagikazarmark" + }, + { + "name": "Tobias Schultze", + "email": "webmaster@tubo-world.de", + "homepage": "https://github.com/Tobion" + }, + { + "name": "Márk Sági-Kazár", + "email": "mark.sagikazar@gmail.com", + "homepage": "https://sagikazarmark.hu" + } + ], + "description": "PSR-7 message implementation that also provides common utility methods", + "keywords": [ + "http", + "message", + "psr-7", + "request", + "response", + "stream", + "uri", + "url" + ], + "support": { + "issues": "https://github.com/guzzle/psr7/issues", + "source": "https://github.com/guzzle/psr7/tree/2.8.0" + }, + "funding": [ + { + "url": "https://github.com/GrahamCampbell", + "type": "github" + }, + { + "url": "https://github.com/Nyholm", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/guzzlehttp/psr7", + "type": "tidelift" + } + ], + "time": "2025-08-23T21:21:41+00:00" + }, + { + "name": "guzzlehttp/uri-template", + "version": "v1.0.5", + "source": { + "type": "git", + "url": "https://github.com/guzzle/uri-template.git", + "reference": "4f4bbd4e7172148801e76e3decc1e559bdee34e1" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/guzzle/uri-template/zipball/4f4bbd4e7172148801e76e3decc1e559bdee34e1", + "reference": "4f4bbd4e7172148801e76e3decc1e559bdee34e1", + "shasum": "" + }, + "require": { + "php": "^7.2.5 || ^8.0", + "symfony/polyfill-php80": "^1.24" + }, + "require-dev": { + "bamarni/composer-bin-plugin": "^1.8.2", + "phpunit/phpunit": "^8.5.44 || ^9.6.25", + "uri-template/tests": "1.0.0" + }, + "type": "library", + "extra": { + "bamarni-bin": { + "bin-links": true, + "forward-command": false + } + }, + "autoload": { + "psr-4": { + "GuzzleHttp\\UriTemplate\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Graham Campbell", + "email": "hello@gjcampbell.co.uk", + "homepage": "https://github.com/GrahamCampbell" + }, + { + "name": "Michael Dowling", + "email": "mtdowling@gmail.com", + "homepage": "https://github.com/mtdowling" + }, + { + "name": "George Mponos", + "email": "gmponos@gmail.com", + "homepage": "https://github.com/gmponos" + }, + { + "name": "Tobias Nyholm", + "email": "tobias.nyholm@gmail.com", + "homepage": "https://github.com/Nyholm" + } + ], + "description": "A polyfill class for uri_template of PHP", + "keywords": [ + "guzzlehttp", + "uri-template" + ], + "support": { + "issues": "https://github.com/guzzle/uri-template/issues", + "source": "https://github.com/guzzle/uri-template/tree/v1.0.5" + }, + "funding": [ + { + "url": "https://github.com/GrahamCampbell", + "type": "github" + }, + { + "url": "https://github.com/Nyholm", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/guzzlehttp/uri-template", + "type": "tidelift" + } + ], + "time": "2025-08-22T14:27:06+00:00" + }, + { + "name": "laminas/laminas-diactoros", + "version": "3.8.0", + "source": { + "type": "git", + "url": "https://github.com/laminas/laminas-diactoros.git", + "reference": "60c182916b2749480895601649563970f3f12ec4" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/laminas/laminas-diactoros/zipball/60c182916b2749480895601649563970f3f12ec4", + "reference": "60c182916b2749480895601649563970f3f12ec4", + "shasum": "" + }, + "require": { + "php": "~8.2.0 || ~8.3.0 || ~8.4.0 || ~8.5.0", + "psr/http-factory": "^1.1", + "psr/http-message": "^1.1 || ^2.0" + }, + "conflict": { + "amphp/amp": "<2.6.4" + }, + "provide": { + "psr/http-factory-implementation": "^1.0", + "psr/http-message-implementation": "^1.1 || ^2.0" + }, + "require-dev": { + "ext-curl": "*", + "ext-dom": "*", + "ext-gd": "*", + "ext-libxml": "*", + "http-interop/http-factory-tests": "^2.2.0", + "laminas/laminas-coding-standard": "~3.1.0", + "php-http/psr7-integration-tests": "^1.4.0", + "phpunit/phpunit": "^10.5.36", + "psalm/plugin-phpunit": "^0.19.5", + "vimeo/psalm": "^6.13" + }, + "type": "library", + "extra": { + "laminas": { + "module": "Laminas\\Diactoros", + "config-provider": "Laminas\\Diactoros\\ConfigProvider" + } + }, + "autoload": { + "files": [ + "src/functions/create_uploaded_file.php", + "src/functions/marshal_headers_from_sapi.php", + "src/functions/marshal_method_from_sapi.php", + "src/functions/marshal_protocol_version_from_sapi.php", + "src/functions/normalize_server.php", + "src/functions/normalize_uploaded_files.php", + "src/functions/parse_cookie_header.php" + ], + "psr-4": { + "Laminas\\Diactoros\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "BSD-3-Clause" + ], + "description": "PSR HTTP Message implementations", + "homepage": "https://laminas.dev", + "keywords": [ + "http", + "laminas", + "psr", + "psr-17", + "psr-7" + ], + "support": { + "chat": "https://laminas.dev/chat", + "docs": "https://docs.laminas.dev/laminas-diactoros/", + "forum": "https://discourse.laminas.dev", + "issues": "https://github.com/laminas/laminas-diactoros/issues", + "rss": "https://github.com/laminas/laminas-diactoros/releases.atom", + "source": "https://github.com/laminas/laminas-diactoros" + }, + "funding": [ + { + "url": "https://funding.communitybridge.org/projects/laminas-project", + "type": "community_bridge" + } + ], + "time": "2025-10-12T15:31:36+00:00" + }, + { + "name": "laravel/framework", + "version": "v12.50.0", + "source": { + "type": "git", + "url": "https://github.com/laravel/framework.git", + "reference": "174ffed91d794a35a541a5eb7c3785a02a34aaba" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/laravel/framework/zipball/174ffed91d794a35a541a5eb7c3785a02a34aaba", + "reference": "174ffed91d794a35a541a5eb7c3785a02a34aaba", + "shasum": "" + }, + "require": { + "brick/math": "^0.11|^0.12|^0.13|^0.14", + "composer-runtime-api": "^2.2", + "doctrine/inflector": "^2.0.5", + "dragonmantank/cron-expression": "^3.4", + "egulias/email-validator": "^3.2.1|^4.0", + "ext-ctype": "*", + "ext-filter": "*", + "ext-hash": "*", + "ext-mbstring": "*", + "ext-openssl": "*", + "ext-session": "*", + "ext-tokenizer": "*", + "fruitcake/php-cors": "^1.3", + "guzzlehttp/guzzle": "^7.8.2", + "guzzlehttp/uri-template": "^1.0", + "laravel/prompts": "^0.3.0", + "laravel/serializable-closure": "^1.3|^2.0", + "league/commonmark": "^2.7", + "league/flysystem": "^3.25.1", + "league/flysystem-local": "^3.25.1", + "league/uri": "^7.5.1", + "monolog/monolog": "^3.0", + "nesbot/carbon": "^3.8.4", + "nunomaduro/termwind": "^2.0", + "php": "^8.2", + "psr/container": "^1.1.1|^2.0.1", + "psr/log": "^1.0|^2.0|^3.0", + "psr/simple-cache": "^1.0|^2.0|^3.0", + "ramsey/uuid": "^4.7", + "symfony/console": "^7.2.0", + "symfony/error-handler": "^7.2.0", + "symfony/finder": "^7.2.0", + "symfony/http-foundation": "^7.2.0", + "symfony/http-kernel": "^7.2.0", + "symfony/mailer": "^7.2.0", + "symfony/mime": "^7.2.0", + "symfony/polyfill-php83": "^1.33", + "symfony/polyfill-php84": "^1.33", + "symfony/polyfill-php85": "^1.33", + "symfony/process": "^7.2.0", + "symfony/routing": "^7.2.0", + "symfony/uid": "^7.2.0", + "symfony/var-dumper": "^7.2.0", + "tijsverkoyen/css-to-inline-styles": "^2.2.5", + "vlucas/phpdotenv": "^5.6.1", + "voku/portable-ascii": "^2.0.2" + }, + "conflict": { + "tightenco/collect": "<5.5.33" + }, + "provide": { + "psr/container-implementation": "1.1|2.0", + "psr/log-implementation": "1.0|2.0|3.0", + "psr/simple-cache-implementation": "1.0|2.0|3.0" + }, + "replace": { + "illuminate/auth": "self.version", + "illuminate/broadcasting": "self.version", + "illuminate/bus": "self.version", + "illuminate/cache": "self.version", + "illuminate/collections": "self.version", + "illuminate/concurrency": "self.version", + "illuminate/conditionable": "self.version", + "illuminate/config": "self.version", + "illuminate/console": "self.version", + "illuminate/container": "self.version", + "illuminate/contracts": "self.version", + "illuminate/cookie": "self.version", + "illuminate/database": "self.version", + "illuminate/encryption": "self.version", + "illuminate/events": "self.version", + "illuminate/filesystem": "self.version", + "illuminate/hashing": "self.version", + "illuminate/http": "self.version", + "illuminate/json-schema": "self.version", + "illuminate/log": "self.version", + "illuminate/macroable": "self.version", + "illuminate/mail": "self.version", + "illuminate/notifications": "self.version", + "illuminate/pagination": "self.version", + "illuminate/pipeline": "self.version", + "illuminate/process": "self.version", + "illuminate/queue": "self.version", + "illuminate/redis": "self.version", + "illuminate/reflection": "self.version", + "illuminate/routing": "self.version", + "illuminate/session": "self.version", + "illuminate/support": "self.version", + "illuminate/testing": "self.version", + "illuminate/translation": "self.version", + "illuminate/validation": "self.version", + "illuminate/view": "self.version", + "spatie/once": "*" + }, + "require-dev": { + "ably/ably-php": "^1.0", + "aws/aws-sdk-php": "^3.322.9", + "ext-gmp": "*", + "fakerphp/faker": "^1.24", + "guzzlehttp/promises": "^2.0.3", + "guzzlehttp/psr7": "^2.4", + "laravel/pint": "^1.18", + "league/flysystem-aws-s3-v3": "^3.25.1", + "league/flysystem-ftp": "^3.25.1", + "league/flysystem-path-prefixing": "^3.25.1", + "league/flysystem-read-only": "^3.25.1", + "league/flysystem-sftp-v3": "^3.25.1", + "mockery/mockery": "^1.6.10", + "opis/json-schema": "^2.4.1", + "orchestra/testbench-core": "^10.9.0", + "pda/pheanstalk": "^5.0.6|^7.0.0", + "php-http/discovery": "^1.15", + "phpstan/phpstan": "^2.0", + "phpunit/phpunit": "^10.5.35|^11.5.3|^12.0.1", + "predis/predis": "^2.3|^3.0", + "resend/resend-php": "^0.10.0|^1.0", + "symfony/cache": "^7.2.0", + "symfony/http-client": "^7.2.0", + "symfony/psr-http-message-bridge": "^7.2.0", + "symfony/translation": "^7.2.0" + }, + "suggest": { + "ably/ably-php": "Required to use the Ably broadcast driver (^1.0).", + "aws/aws-sdk-php": "Required to use the SQS queue driver, DynamoDb failed job storage, and SES mail driver (^3.322.9).", + "brianium/paratest": "Required to run tests in parallel (^7.0|^8.0).", + "ext-apcu": "Required to use the APC cache driver.", + "ext-fileinfo": "Required to use the Filesystem class.", + "ext-ftp": "Required to use the Flysystem FTP driver.", + "ext-gd": "Required to use Illuminate\\Http\\Testing\\FileFactory::image().", + "ext-memcached": "Required to use the memcache cache driver.", + "ext-pcntl": "Required to use all features of the queue worker and console signal trapping.", + "ext-pdo": "Required to use all database features.", + "ext-posix": "Required to use all features of the queue worker.", + "ext-redis": "Required to use the Redis cache and queue drivers (^4.0|^5.0|^6.0).", + "fakerphp/faker": "Required to generate fake data using the fake() helper (^1.23).", + "filp/whoops": "Required for friendly error pages in development (^2.14.3).", + "laravel/tinker": "Required to use the tinker console command (^2.0).", + "league/flysystem-aws-s3-v3": "Required to use the Flysystem S3 driver (^3.25.1).", + "league/flysystem-ftp": "Required to use the Flysystem FTP driver (^3.25.1).", + "league/flysystem-path-prefixing": "Required to use the scoped driver (^3.25.1).", + "league/flysystem-read-only": "Required to use read-only disks (^3.25.1)", + "league/flysystem-sftp-v3": "Required to use the Flysystem SFTP driver (^3.25.1).", + "mockery/mockery": "Required to use mocking (^1.6).", + "pda/pheanstalk": "Required to use the beanstalk queue driver (^5.0).", + "php-http/discovery": "Required to use PSR-7 bridging features (^1.15).", + "phpunit/phpunit": "Required to use assertions and run tests (^10.5.35|^11.5.3|^12.0.1).", + "predis/predis": "Required to use the predis connector (^2.3|^3.0).", + "psr/http-message": "Required to allow Storage::put to accept a StreamInterface (^1.0).", + "pusher/pusher-php-server": "Required to use the Pusher broadcast driver (^6.0|^7.0).", + "resend/resend-php": "Required to enable support for the Resend mail transport (^0.10.0|^1.0).", + "symfony/cache": "Required to PSR-6 cache bridge (^7.2).", + "symfony/filesystem": "Required to enable support for relative symbolic links (^7.2).", + "symfony/http-client": "Required to enable support for the Symfony API mail transports (^7.2).", + "symfony/mailgun-mailer": "Required to enable support for the Mailgun mail transport (^7.2).", + "symfony/postmark-mailer": "Required to enable support for the Postmark mail transport (^7.2).", + "symfony/psr-http-message-bridge": "Required to use PSR-7 bridging features (^7.2)." + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "12.x-dev" + } + }, + "autoload": { + "files": [ + "src/Illuminate/Collections/functions.php", + "src/Illuminate/Collections/helpers.php", + "src/Illuminate/Events/functions.php", + "src/Illuminate/Filesystem/functions.php", + "src/Illuminate/Foundation/helpers.php", + "src/Illuminate/Log/functions.php", + "src/Illuminate/Reflection/helpers.php", + "src/Illuminate/Support/functions.php", + "src/Illuminate/Support/helpers.php" + ], + "psr-4": { + "Illuminate\\": "src/Illuminate/", + "Illuminate\\Support\\": [ + "src/Illuminate/Macroable/", + "src/Illuminate/Collections/", + "src/Illuminate/Conditionable/", + "src/Illuminate/Reflection/" + ] + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Taylor Otwell", + "email": "taylor@laravel.com" + } + ], + "description": "The Laravel Framework.", + "homepage": "https://laravel.com", + "keywords": [ + "framework", + "laravel" + ], + "support": { + "issues": "https://github.com/laravel/framework/issues", + "source": "https://github.com/laravel/framework" + }, + "time": "2026-02-04T18:34:13+00:00" + }, + { + "name": "laravel/octane", + "version": "v2.13.5", + "source": { + "type": "git", + "url": "https://github.com/laravel/octane.git", + "reference": "c343716659c280a7613a0c10d3241215512355ee" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/laravel/octane/zipball/c343716659c280a7613a0c10d3241215512355ee", + "reference": "c343716659c280a7613a0c10d3241215512355ee", + "shasum": "" + }, + "require": { + "laminas/laminas-diactoros": "^3.0", + "laravel/framework": "^10.10.1|^11.0|^12.0", + "laravel/prompts": "^0.1.24|^0.2.0|^0.3.0", + "laravel/serializable-closure": "^1.3|^2.0", + "nesbot/carbon": "^2.66.0|^3.0", + "php": "^8.1.0", + "symfony/console": "^6.0|^7.0", + "symfony/psr-http-message-bridge": "^2.2.0|^6.4|^7.0" + }, + "conflict": { + "spiral/roadrunner": "<2023.1.0", + "spiral/roadrunner-cli": "<2.6.0", + "spiral/roadrunner-http": "<3.3.0" + }, + "require-dev": { + "guzzlehttp/guzzle": "^7.6.1", + "inertiajs/inertia-laravel": "^1.3.2|^2.0", + "laravel/scout": "^10.2.1", + "laravel/socialite": "^5.6.1", + "livewire/livewire": "^2.12.3|^3.0", + "mockery/mockery": "^1.5.1", + "nunomaduro/collision": "^6.4.0|^7.5.2|^8.0", + "orchestra/testbench": "^8.21|^9.0|^10.0", + "phpstan/phpstan": "^2.1.7", + "phpunit/phpunit": "^10.4|^11.5", + "spiral/roadrunner-cli": "^2.6.0", + "spiral/roadrunner-http": "^3.3.0" + }, + "bin": [ + "bin/roadrunner-worker", + "bin/swoole-server" + ], + "type": "library", + "extra": { + "laravel": { + "aliases": { + "Octane": "Laravel\\Octane\\Facades\\Octane" + }, + "providers": [ + "Laravel\\Octane\\OctaneServiceProvider" + ] + }, + "branch-alias": { + "dev-master": "2.x-dev" + } + }, + "autoload": { + "psr-4": { + "Laravel\\Octane\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Taylor Otwell", + "email": "taylor@laravel.com" + } + ], + "description": "Supercharge your Laravel application's performance.", + "keywords": [ + "frankenphp", + "laravel", + "octane", + "roadrunner", + "swoole" + ], + "support": { + "issues": "https://github.com/laravel/octane/issues", + "source": "https://github.com/laravel/octane" + }, + "time": "2026-01-22T17:24:46+00:00" + }, + { + "name": "laravel/prompts", + "version": "v0.3.12", + "source": { + "type": "git", + "url": "https://github.com/laravel/prompts.git", + "reference": "4861ded9003b7f8a158176a0b7666f74ee761be8" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/laravel/prompts/zipball/4861ded9003b7f8a158176a0b7666f74ee761be8", + "reference": "4861ded9003b7f8a158176a0b7666f74ee761be8", + "shasum": "" + }, + "require": { + "composer-runtime-api": "^2.2", + "ext-mbstring": "*", + "php": "^8.1", + "symfony/console": "^6.2|^7.0|^8.0" + }, + "conflict": { + "illuminate/console": ">=10.17.0 <10.25.0", + "laravel/framework": ">=10.17.0 <10.25.0" + }, + "require-dev": { + "illuminate/collections": "^10.0|^11.0|^12.0|^13.0", + "mockery/mockery": "^1.5", + "pestphp/pest": "^2.3|^3.4|^4.0", + "phpstan/phpstan": "^1.12.28", + "phpstan/phpstan-mockery": "^1.1.3" + }, + "suggest": { + "ext-pcntl": "Required for the spinner to be animated." + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-main": "0.3.x-dev" + } + }, + "autoload": { + "files": [ + "src/helpers.php" + ], + "psr-4": { + "Laravel\\Prompts\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "description": "Add beautiful and user-friendly forms to your command-line applications.", + "support": { + "issues": "https://github.com/laravel/prompts/issues", + "source": "https://github.com/laravel/prompts/tree/v0.3.12" + }, + "time": "2026-02-03T06:57:26+00:00" + }, + { + "name": "laravel/serializable-closure", + "version": "v2.0.9", + "source": { + "type": "git", + "url": "https://github.com/laravel/serializable-closure.git", + "reference": "8f631589ab07b7b52fead814965f5a800459cb3e" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/laravel/serializable-closure/zipball/8f631589ab07b7b52fead814965f5a800459cb3e", + "reference": "8f631589ab07b7b52fead814965f5a800459cb3e", + "shasum": "" + }, + "require": { + "php": "^8.1" + }, + "require-dev": { + "illuminate/support": "^10.0|^11.0|^12.0|^13.0", + "nesbot/carbon": "^2.67|^3.0", + "pestphp/pest": "^2.36|^3.0|^4.0", + "phpstan/phpstan": "^2.0", + "symfony/var-dumper": "^6.2.0|^7.0.0|^8.0.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "2.x-dev" + } + }, + "autoload": { + "psr-4": { + "Laravel\\SerializableClosure\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Taylor Otwell", + "email": "taylor@laravel.com" + }, + { + "name": "Nuno Maduro", + "email": "nuno@laravel.com" + } + ], + "description": "Laravel Serializable Closure provides an easy and secure way to serialize closures in PHP.", + "keywords": [ + "closure", + "laravel", + "serializable" + ], + "support": { + "issues": "https://github.com/laravel/serializable-closure/issues", + "source": "https://github.com/laravel/serializable-closure" + }, + "time": "2026-02-03T06:55:34+00:00" + }, + { + "name": "league/commonmark", + "version": "2.8.0", + "source": { + "type": "git", + "url": "https://github.com/thephpleague/commonmark.git", + "reference": "4efa10c1e56488e658d10adf7b7b7dcd19940bfb" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/thephpleague/commonmark/zipball/4efa10c1e56488e658d10adf7b7b7dcd19940bfb", + "reference": "4efa10c1e56488e658d10adf7b7b7dcd19940bfb", + "shasum": "" + }, + "require": { + "ext-mbstring": "*", + "league/config": "^1.1.1", + "php": "^7.4 || ^8.0", + "psr/event-dispatcher": "^1.0", + "symfony/deprecation-contracts": "^2.1 || ^3.0", + "symfony/polyfill-php80": "^1.16" + }, + "require-dev": { + "cebe/markdown": "^1.0", + "commonmark/cmark": "0.31.1", + "commonmark/commonmark.js": "0.31.1", + "composer/package-versions-deprecated": "^1.8", + "embed/embed": "^4.4", + "erusev/parsedown": "^1.0", + "ext-json": "*", + "github/gfm": "0.29.0", + "michelf/php-markdown": "^1.4 || ^2.0", + "nyholm/psr7": "^1.5", + "phpstan/phpstan": "^1.8.2", + "phpunit/phpunit": "^9.5.21 || ^10.5.9 || ^11.0.0", + "scrutinizer/ocular": "^1.8.1", + "symfony/finder": "^5.3 | ^6.0 | ^7.0", + "symfony/process": "^5.4 | ^6.0 | ^7.0", + "symfony/yaml": "^2.3 | ^3.0 | ^4.0 | ^5.0 | ^6.0 | ^7.0", + "unleashedtech/php-coding-standard": "^3.1.1", + "vimeo/psalm": "^4.24.0 || ^5.0.0 || ^6.0.0" + }, + "suggest": { + "symfony/yaml": "v2.3+ required if using the Front Matter extension" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-main": "2.9-dev" + } + }, + "autoload": { + "psr-4": { + "League\\CommonMark\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "BSD-3-Clause" + ], + "authors": [ + { + "name": "Colin O'Dell", + "email": "colinodell@gmail.com", + "homepage": "https://www.colinodell.com", + "role": "Lead Developer" + } + ], + "description": "Highly-extensible PHP Markdown parser which fully supports the CommonMark spec and GitHub-Flavored Markdown (GFM)", + "homepage": "https://commonmark.thephpleague.com", + "keywords": [ + "commonmark", + "flavored", + "gfm", + "github", + "github-flavored", + "markdown", + "md", + "parser" + ], + "support": { + "docs": "https://commonmark.thephpleague.com/", + "forum": "https://github.com/thephpleague/commonmark/discussions", + "issues": "https://github.com/thephpleague/commonmark/issues", + "rss": "https://github.com/thephpleague/commonmark/releases.atom", + "source": "https://github.com/thephpleague/commonmark" + }, + "funding": [ + { + "url": "https://www.colinodell.com/sponsor", + "type": "custom" + }, + { + "url": "https://www.paypal.me/colinpodell/10.00", + "type": "custom" + }, + { + "url": "https://github.com/colinodell", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/league/commonmark", + "type": "tidelift" + } + ], + "time": "2025-11-26T21:48:24+00:00" + }, + { + "name": "league/config", + "version": "v1.2.0", + "source": { + "type": "git", + "url": "https://github.com/thephpleague/config.git", + "reference": "754b3604fb2984c71f4af4a9cbe7b57f346ec1f3" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/thephpleague/config/zipball/754b3604fb2984c71f4af4a9cbe7b57f346ec1f3", + "reference": "754b3604fb2984c71f4af4a9cbe7b57f346ec1f3", + "shasum": "" + }, + "require": { + "dflydev/dot-access-data": "^3.0.1", + "nette/schema": "^1.2", + "php": "^7.4 || ^8.0" + }, + "require-dev": { + "phpstan/phpstan": "^1.8.2", + "phpunit/phpunit": "^9.5.5", + "scrutinizer/ocular": "^1.8.1", + "unleashedtech/php-coding-standard": "^3.1", + "vimeo/psalm": "^4.7.3" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-main": "1.2-dev" + } + }, + "autoload": { + "psr-4": { + "League\\Config\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "BSD-3-Clause" + ], + "authors": [ + { + "name": "Colin O'Dell", + "email": "colinodell@gmail.com", + "homepage": "https://www.colinodell.com", + "role": "Lead Developer" + } + ], + "description": "Define configuration arrays with strict schemas and access values with dot notation", + "homepage": "https://config.thephpleague.com", + "keywords": [ + "array", + "config", + "configuration", + "dot", + "dot-access", + "nested", + "schema" + ], + "support": { + "docs": "https://config.thephpleague.com/", + "issues": "https://github.com/thephpleague/config/issues", + "rss": "https://github.com/thephpleague/config/releases.atom", + "source": "https://github.com/thephpleague/config" + }, + "funding": [ + { + "url": "https://www.colinodell.com/sponsor", + "type": "custom" + }, + { + "url": "https://www.paypal.me/colinpodell/10.00", + "type": "custom" + }, + { + "url": "https://github.com/colinodell", + "type": "github" + } + ], + "time": "2022-12-11T20:36:23+00:00" + }, + { + "name": "league/flysystem", + "version": "3.31.0", + "source": { + "type": "git", + "url": "https://github.com/thephpleague/flysystem.git", + "reference": "1717e0b3642b0df65ecb0cc89cdd99fa840672ff" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/thephpleague/flysystem/zipball/1717e0b3642b0df65ecb0cc89cdd99fa840672ff", + "reference": "1717e0b3642b0df65ecb0cc89cdd99fa840672ff", + "shasum": "" + }, + "require": { + "league/flysystem-local": "^3.0.0", + "league/mime-type-detection": "^1.0.0", + "php": "^8.0.2" + }, + "conflict": { + "async-aws/core": "<1.19.0", + "async-aws/s3": "<1.14.0", + "aws/aws-sdk-php": "3.209.31 || 3.210.0", + "guzzlehttp/guzzle": "<7.0", + "guzzlehttp/ringphp": "<1.1.1", + "phpseclib/phpseclib": "3.0.15", + "symfony/http-client": "<5.2" + }, + "require-dev": { + "async-aws/s3": "^1.5 || ^2.0", + "async-aws/simple-s3": "^1.1 || ^2.0", + "aws/aws-sdk-php": "^3.295.10", + "composer/semver": "^3.0", + "ext-fileinfo": "*", + "ext-ftp": "*", + "ext-mongodb": "^1.3|^2", + "ext-zip": "*", + "friendsofphp/php-cs-fixer": "^3.5", + "google/cloud-storage": "^1.23", + "guzzlehttp/psr7": "^2.6", + "microsoft/azure-storage-blob": "^1.1", + "mongodb/mongodb": "^1.2|^2", + "phpseclib/phpseclib": "^3.0.36", + "phpstan/phpstan": "^1.10", + "phpunit/phpunit": "^9.5.11|^10.0", + "sabre/dav": "^4.6.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "League\\Flysystem\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Frank de Jonge", + "email": "info@frankdejonge.nl" + } + ], + "description": "File storage abstraction for PHP", + "keywords": [ + "WebDAV", + "aws", + "cloud", + "file", + "files", + "filesystem", + "filesystems", + "ftp", + "s3", + "sftp", + "storage" + ], + "support": { + "issues": "https://github.com/thephpleague/flysystem/issues", + "source": "https://github.com/thephpleague/flysystem/tree/3.31.0" + }, + "time": "2026-01-23T15:38:47+00:00" + }, + { + "name": "league/flysystem-local", + "version": "3.31.0", + "source": { + "type": "git", + "url": "https://github.com/thephpleague/flysystem-local.git", + "reference": "2f669db18a4c20c755c2bb7d3a7b0b2340488079" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/thephpleague/flysystem-local/zipball/2f669db18a4c20c755c2bb7d3a7b0b2340488079", + "reference": "2f669db18a4c20c755c2bb7d3a7b0b2340488079", + "shasum": "" + }, + "require": { + "ext-fileinfo": "*", + "league/flysystem": "^3.0.0", + "league/mime-type-detection": "^1.0.0", + "php": "^8.0.2" + }, + "type": "library", + "autoload": { + "psr-4": { + "League\\Flysystem\\Local\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Frank de Jonge", + "email": "info@frankdejonge.nl" + } + ], + "description": "Local filesystem adapter for Flysystem.", + "keywords": [ + "Flysystem", + "file", + "files", + "filesystem", + "local" + ], + "support": { + "source": "https://github.com/thephpleague/flysystem-local/tree/3.31.0" + }, + "time": "2026-01-23T15:30:45+00:00" + }, + { + "name": "league/mime-type-detection", + "version": "1.16.0", + "source": { + "type": "git", + "url": "https://github.com/thephpleague/mime-type-detection.git", + "reference": "2d6702ff215bf922936ccc1ad31007edc76451b9" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/thephpleague/mime-type-detection/zipball/2d6702ff215bf922936ccc1ad31007edc76451b9", + "reference": "2d6702ff215bf922936ccc1ad31007edc76451b9", + "shasum": "" + }, + "require": { + "ext-fileinfo": "*", + "php": "^7.4 || ^8.0" + }, + "require-dev": { + "friendsofphp/php-cs-fixer": "^3.2", + "phpstan/phpstan": "^0.12.68", + "phpunit/phpunit": "^8.5.8 || ^9.3 || ^10.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "League\\MimeTypeDetection\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Frank de Jonge", + "email": "info@frankdejonge.nl" + } + ], + "description": "Mime-type detection for Flysystem", + "support": { + "issues": "https://github.com/thephpleague/mime-type-detection/issues", + "source": "https://github.com/thephpleague/mime-type-detection/tree/1.16.0" + }, + "funding": [ + { + "url": "https://github.com/frankdejonge", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/league/flysystem", + "type": "tidelift" + } + ], + "time": "2024-09-21T08:32:55+00:00" + }, + { + "name": "league/uri", + "version": "7.8.0", + "source": { + "type": "git", + "url": "https://github.com/thephpleague/uri.git", + "reference": "4436c6ec8d458e4244448b069cc572d088230b76" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/thephpleague/uri/zipball/4436c6ec8d458e4244448b069cc572d088230b76", + "reference": "4436c6ec8d458e4244448b069cc572d088230b76", + "shasum": "" + }, + "require": { + "league/uri-interfaces": "^7.8", + "php": "^8.1", + "psr/http-factory": "^1" + }, + "conflict": { + "league/uri-schemes": "^1.0" + }, + "suggest": { + "ext-bcmath": "to improve IPV4 host parsing", + "ext-dom": "to convert the URI into an HTML anchor tag", + "ext-fileinfo": "to create Data URI from file contennts", + "ext-gmp": "to improve IPV4 host parsing", + "ext-intl": "to handle IDN host with the best performance", + "ext-uri": "to use the PHP native URI class", + "jeremykendall/php-domain-parser": "to further parse the URI host and resolve its Public Suffix and Top Level Domain", + "league/uri-components": "to provide additional tools to manipulate URI objects components", + "league/uri-polyfill": "to backport the PHP URI extension for older versions of PHP", + "php-64bit": "to improve IPV4 host parsing", + "rowbot/url": "to handle URLs using the WHATWG URL Living Standard specification", + "symfony/polyfill-intl-idn": "to handle IDN host via the Symfony polyfill if ext-intl is not present" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "7.x-dev" + } + }, + "autoload": { + "psr-4": { + "League\\Uri\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Ignace Nyamagana Butera", + "email": "nyamsprod@gmail.com", + "homepage": "https://nyamsprod.com" + } + ], + "description": "URI manipulation library", + "homepage": "https://uri.thephpleague.com", + "keywords": [ + "URN", + "data-uri", + "file-uri", + "ftp", + "hostname", + "http", + "https", + "middleware", + "parse_str", + "parse_url", + "psr-7", + "query-string", + "querystring", + "rfc2141", + "rfc3986", + "rfc3987", + "rfc6570", + "rfc8141", + "uri", + "uri-template", + "url", + "ws" + ], + "support": { + "docs": "https://uri.thephpleague.com", + "forum": "https://thephpleague.slack.com", + "issues": "https://github.com/thephpleague/uri-src/issues", + "source": "https://github.com/thephpleague/uri/tree/7.8.0" + }, + "funding": [ + { + "url": "https://github.com/sponsors/nyamsprod", + "type": "github" + } + ], + "time": "2026-01-14T17:24:56+00:00" + }, + { + "name": "league/uri-interfaces", + "version": "7.8.0", + "source": { + "type": "git", + "url": "https://github.com/thephpleague/uri-interfaces.git", + "reference": "c5c5cd056110fc8afaba29fa6b72a43ced42acd4" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/thephpleague/uri-interfaces/zipball/c5c5cd056110fc8afaba29fa6b72a43ced42acd4", + "reference": "c5c5cd056110fc8afaba29fa6b72a43ced42acd4", + "shasum": "" + }, + "require": { + "ext-filter": "*", + "php": "^8.1", + "psr/http-message": "^1.1 || ^2.0" + }, + "suggest": { + "ext-bcmath": "to improve IPV4 host parsing", + "ext-gmp": "to improve IPV4 host parsing", + "ext-intl": "to handle IDN host with the best performance", + "php-64bit": "to improve IPV4 host parsing", + "rowbot/url": "to handle URLs using the WHATWG URL Living Standard specification", + "symfony/polyfill-intl-idn": "to handle IDN host via the Symfony polyfill if ext-intl is not present" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "7.x-dev" + } + }, + "autoload": { + "psr-4": { + "League\\Uri\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Ignace Nyamagana Butera", + "email": "nyamsprod@gmail.com", + "homepage": "https://nyamsprod.com" + } + ], + "description": "Common tools for parsing and resolving RFC3987/RFC3986 URI", + "homepage": "https://uri.thephpleague.com", + "keywords": [ + "data-uri", + "file-uri", + "ftp", + "hostname", + "http", + "https", + "parse_str", + "parse_url", + "psr-7", + "query-string", + "querystring", + "rfc3986", + "rfc3987", + "rfc6570", + "uri", + "url", + "ws" + ], + "support": { + "docs": "https://uri.thephpleague.com", + "forum": "https://thephpleague.slack.com", + "issues": "https://github.com/thephpleague/uri-src/issues", + "source": "https://github.com/thephpleague/uri-interfaces/tree/7.8.0" + }, + "funding": [ + { + "url": "https://github.com/sponsors/nyamsprod", + "type": "github" + } + ], + "time": "2026-01-15T06:54:53+00:00" + }, + { + "name": "livewire/livewire", + "version": "v4.1.3", + "source": { + "type": "git", + "url": "https://github.com/livewire/livewire.git", + "reference": "69c871cb15fb95f10cda5acd1ee7e63cd3c494c8" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/livewire/livewire/zipball/69c871cb15fb95f10cda5acd1ee7e63cd3c494c8", + "reference": "69c871cb15fb95f10cda5acd1ee7e63cd3c494c8", + "shasum": "" + }, + "require": { + "illuminate/database": "^10.0|^11.0|^12.0", + "illuminate/routing": "^10.0|^11.0|^12.0", + "illuminate/support": "^10.0|^11.0|^12.0", + "illuminate/validation": "^10.0|^11.0|^12.0", + "laravel/prompts": "^0.1.24|^0.2|^0.3", + "league/mime-type-detection": "^1.9", + "php": "^8.1", + "symfony/console": "^6.0|^7.0", + "symfony/http-kernel": "^6.2|^7.0" + }, + "require-dev": { + "calebporzio/sushi": "^2.1", + "laravel/framework": "^10.15.0|^11.0|^12.0", + "mockery/mockery": "^1.3.1", + "orchestra/testbench": "^8.21.0|^9.0|^10.0", + "orchestra/testbench-dusk": "^8.24|^9.1|^10.0", + "phpunit/phpunit": "^10.4|^11.5", + "psy/psysh": "^0.11.22|^0.12" + }, + "type": "library", + "extra": { + "laravel": { + "aliases": { + "Livewire": "Livewire\\Livewire" + }, + "providers": [ + "Livewire\\LivewireServiceProvider" + ] + } + }, + "autoload": { + "files": [ + "src/helpers.php" + ], + "psr-4": { + "Livewire\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Caleb Porzio", + "email": "calebporzio@gmail.com" + } + ], + "description": "A front-end framework for Laravel.", + "support": { + "issues": "https://github.com/livewire/livewire/issues", + "source": "https://github.com/livewire/livewire/tree/v4.1.3" + }, + "funding": [ + { + "url": "https://github.com/livewire", + "type": "github" + } + ], + "time": "2026-02-06T12:19:55+00:00" + }, + { + "name": "monolog/monolog", + "version": "3.10.0", + "source": { + "type": "git", + "url": "https://github.com/Seldaek/monolog.git", + "reference": "b321dd6749f0bf7189444158a3ce785cc16d69b0" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/Seldaek/monolog/zipball/b321dd6749f0bf7189444158a3ce785cc16d69b0", + "reference": "b321dd6749f0bf7189444158a3ce785cc16d69b0", + "shasum": "" + }, + "require": { + "php": ">=8.1", + "psr/log": "^2.0 || ^3.0" + }, + "provide": { + "psr/log-implementation": "3.0.0" + }, + "require-dev": { + "aws/aws-sdk-php": "^3.0", + "doctrine/couchdb": "~1.0@dev", + "elasticsearch/elasticsearch": "^7 || ^8", + "ext-json": "*", + "graylog2/gelf-php": "^1.4.2 || ^2.0", + "guzzlehttp/guzzle": "^7.4.5", + "guzzlehttp/psr7": "^2.2", + "mongodb/mongodb": "^1.8 || ^2.0", + "php-amqplib/php-amqplib": "~2.4 || ^3", + "php-console/php-console": "^3.1.8", + "phpstan/phpstan": "^2", + "phpstan/phpstan-deprecation-rules": "^2", + "phpstan/phpstan-strict-rules": "^2", + "phpunit/phpunit": "^10.5.17 || ^11.0.7", + "predis/predis": "^1.1 || ^2", + "rollbar/rollbar": "^4.0", + "ruflin/elastica": "^7 || ^8", + "symfony/mailer": "^5.4 || ^6", + "symfony/mime": "^5.4 || ^6" + }, + "suggest": { + "aws/aws-sdk-php": "Allow sending log messages to AWS services like DynamoDB", + "doctrine/couchdb": "Allow sending log messages to a CouchDB server", + "elasticsearch/elasticsearch": "Allow sending log messages to an Elasticsearch server via official client", + "ext-amqp": "Allow sending log messages to an AMQP server (1.0+ required)", + "ext-curl": "Required to send log messages using the IFTTTHandler, the LogglyHandler, the SendGridHandler, the SlackWebhookHandler or the TelegramBotHandler", + "ext-mbstring": "Allow to work properly with unicode symbols", + "ext-mongodb": "Allow sending log messages to a MongoDB server (via driver)", + "ext-openssl": "Required to send log messages using SSL", + "ext-sockets": "Allow sending log messages to a Syslog server (via UDP driver)", + "graylog2/gelf-php": "Allow sending log messages to a GrayLog2 server", + "mongodb/mongodb": "Allow sending log messages to a MongoDB server (via library)", + "php-amqplib/php-amqplib": "Allow sending log messages to an AMQP server using php-amqplib", + "rollbar/rollbar": "Allow sending log messages to Rollbar", + "ruflin/elastica": "Allow sending log messages to an Elastic Search server" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-main": "3.x-dev" + } + }, + "autoload": { + "psr-4": { + "Monolog\\": "src/Monolog" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Jordi Boggiano", + "email": "j.boggiano@seld.be", + "homepage": "https://seld.be" + } + ], + "description": "Sends your logs to files, sockets, inboxes, databases and various web services", + "homepage": "https://github.com/Seldaek/monolog", + "keywords": [ + "log", + "logging", + "psr-3" + ], + "support": { + "issues": "https://github.com/Seldaek/monolog/issues", + "source": "https://github.com/Seldaek/monolog/tree/3.10.0" + }, + "funding": [ + { + "url": "https://github.com/Seldaek", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/monolog/monolog", + "type": "tidelift" + } + ], + "time": "2026-01-02T08:56:05+00:00" + }, + { + "name": "nesbot/carbon", + "version": "3.11.1", + "source": { + "type": "git", + "url": "https://github.com/CarbonPHP/carbon.git", + "reference": "f438fcc98f92babee98381d399c65336f3a3827f" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/CarbonPHP/carbon/zipball/f438fcc98f92babee98381d399c65336f3a3827f", + "reference": "f438fcc98f92babee98381d399c65336f3a3827f", + "shasum": "" + }, + "require": { + "carbonphp/carbon-doctrine-types": "<100.0", + "ext-json": "*", + "php": "^8.1", + "psr/clock": "^1.0", + "symfony/clock": "^6.3.12 || ^7.0 || ^8.0", + "symfony/polyfill-mbstring": "^1.0", + "symfony/translation": "^4.4.18 || ^5.2.1 || ^6.0 || ^7.0 || ^8.0" + }, + "provide": { + "psr/clock-implementation": "1.0" + }, + "require-dev": { + "doctrine/dbal": "^3.6.3 || ^4.0", + "doctrine/orm": "^2.15.2 || ^3.0", + "friendsofphp/php-cs-fixer": "^v3.87.1", + "kylekatarnls/multi-tester": "^2.5.3", + "phpmd/phpmd": "^2.15.0", + "phpstan/extension-installer": "^1.4.3", + "phpstan/phpstan": "^2.1.22", + "phpunit/phpunit": "^10.5.53", + "squizlabs/php_codesniffer": "^3.13.4 || ^4.0.0" + }, + "bin": [ + "bin/carbon" + ], + "type": "library", + "extra": { + "laravel": { + "providers": [ + "Carbon\\Laravel\\ServiceProvider" + ] + }, + "phpstan": { + "includes": [ + "extension.neon" + ] + }, + "branch-alias": { + "dev-2.x": "2.x-dev", + "dev-master": "3.x-dev" + } + }, + "autoload": { + "psr-4": { + "Carbon\\": "src/Carbon/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Brian Nesbitt", + "email": "brian@nesbot.com", + "homepage": "https://markido.com" + }, + { + "name": "kylekatarnls", + "homepage": "https://github.com/kylekatarnls" + } + ], + "description": "An API extension for DateTime that supports 281 different languages.", + "homepage": "https://carbonphp.github.io/carbon/", + "keywords": [ + "date", + "datetime", + "time" + ], + "support": { + "docs": "https://carbonphp.github.io/carbon/guide/getting-started/introduction.html", + "issues": "https://github.com/CarbonPHP/carbon/issues", + "source": "https://github.com/CarbonPHP/carbon" + }, + "funding": [ + { + "url": "https://github.com/sponsors/kylekatarnls", + "type": "github" + }, + { + "url": "https://opencollective.com/Carbon#sponsor", + "type": "opencollective" + }, + { + "url": "https://tidelift.com/subscription/pkg/packagist-nesbot-carbon?utm_source=packagist-nesbot-carbon&utm_medium=referral&utm_campaign=readme", + "type": "tidelift" + } + ], + "time": "2026-01-29T09:26:29+00:00" + }, + { + "name": "nette/schema", + "version": "v1.3.3", + "source": { + "type": "git", + "url": "https://github.com/nette/schema.git", + "reference": "2befc2f42d7c715fd9d95efc31b1081e5d765004" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/nette/schema/zipball/2befc2f42d7c715fd9d95efc31b1081e5d765004", + "reference": "2befc2f42d7c715fd9d95efc31b1081e5d765004", + "shasum": "" + }, + "require": { + "nette/utils": "^4.0", + "php": "8.1 - 8.5" + }, + "require-dev": { + "nette/tester": "^2.5.2", + "phpstan/phpstan-nette": "^2.0@stable", + "tracy/tracy": "^2.8" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "1.3-dev" + } + }, + "autoload": { + "psr-4": { + "Nette\\": "src" + }, + "classmap": [ + "src/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "BSD-3-Clause", + "GPL-2.0-only", + "GPL-3.0-only" + ], + "authors": [ + { + "name": "David Grudl", + "homepage": "https://davidgrudl.com" + }, + { + "name": "Nette Community", + "homepage": "https://nette.org/contributors" + } + ], + "description": "📐 Nette Schema: validating data structures against a given Schema.", + "homepage": "https://nette.org", + "keywords": [ + "config", + "nette" + ], + "support": { + "issues": "https://github.com/nette/schema/issues", + "source": "https://github.com/nette/schema/tree/v1.3.3" + }, + "time": "2025-10-30T22:57:59+00:00" + }, + { + "name": "nette/utils", + "version": "v4.1.2", + "source": { + "type": "git", + "url": "https://github.com/nette/utils.git", + "reference": "f76b5dc3d6c6d3043c8d937df2698515b99cbaf5" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/nette/utils/zipball/f76b5dc3d6c6d3043c8d937df2698515b99cbaf5", + "reference": "f76b5dc3d6c6d3043c8d937df2698515b99cbaf5", + "shasum": "" + }, + "require": { + "php": "8.2 - 8.5" + }, + "conflict": { + "nette/finder": "<3", + "nette/schema": "<1.2.2" + }, + "require-dev": { + "jetbrains/phpstorm-attributes": "^1.2", + "nette/tester": "^2.5", + "phpstan/phpstan": "^2.0@stable", + "tracy/tracy": "^2.9" + }, + "suggest": { + "ext-gd": "to use Image", + "ext-iconv": "to use Strings::webalize(), toAscii(), chr() and reverse()", + "ext-intl": "to use Strings::webalize(), toAscii(), normalize() and compare()", + "ext-json": "to use Nette\\Utils\\Json", + "ext-mbstring": "to use Strings::lower() etc...", + "ext-tokenizer": "to use Nette\\Utils\\Reflection::getUseStatements()" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "4.1-dev" + } + }, + "autoload": { + "psr-4": { + "Nette\\": "src" + }, + "classmap": [ + "src/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "BSD-3-Clause", + "GPL-2.0-only", + "GPL-3.0-only" + ], + "authors": [ + { + "name": "David Grudl", + "homepage": "https://davidgrudl.com" + }, + { + "name": "Nette Community", + "homepage": "https://nette.org/contributors" + } + ], + "description": "🛠 Nette Utils: lightweight utilities for string & array manipulation, image handling, safe JSON encoding/decoding, validation, slug or strong password generating etc.", + "homepage": "https://nette.org", + "keywords": [ + "array", + "core", + "datetime", + "images", + "json", + "nette", + "paginator", + "password", + "slugify", + "string", + "unicode", + "utf-8", + "utility", + "validation" + ], + "support": { + "issues": "https://github.com/nette/utils/issues", + "source": "https://github.com/nette/utils/tree/v4.1.2" + }, + "time": "2026-02-03T17:21:09+00:00" + }, + { + "name": "nunomaduro/termwind", + "version": "v2.3.3", + "source": { + "type": "git", + "url": "https://github.com/nunomaduro/termwind.git", + "reference": "6fb2a640ff502caace8e05fd7be3b503a7e1c017" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/nunomaduro/termwind/zipball/6fb2a640ff502caace8e05fd7be3b503a7e1c017", + "reference": "6fb2a640ff502caace8e05fd7be3b503a7e1c017", + "shasum": "" + }, + "require": { + "ext-mbstring": "*", + "php": "^8.2", + "symfony/console": "^7.3.6" + }, + "require-dev": { + "illuminate/console": "^11.46.1", + "laravel/pint": "^1.25.1", + "mockery/mockery": "^1.6.12", + "pestphp/pest": "^2.36.0 || ^3.8.4 || ^4.1.3", + "phpstan/phpstan": "^1.12.32", + "phpstan/phpstan-strict-rules": "^1.6.2", + "symfony/var-dumper": "^7.3.5", + "thecodingmachine/phpstan-strict-rules": "^1.0.0" + }, + "type": "library", + "extra": { + "laravel": { + "providers": [ + "Termwind\\Laravel\\TermwindServiceProvider" + ] + }, + "branch-alias": { + "dev-2.x": "2.x-dev" + } + }, + "autoload": { + "files": [ + "src/Functions.php" + ], + "psr-4": { + "Termwind\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nuno Maduro", + "email": "enunomaduro@gmail.com" + } + ], + "description": "Its like Tailwind CSS, but for the console.", + "keywords": [ + "cli", + "console", + "css", + "package", + "php", + "style" + ], + "support": { + "issues": "https://github.com/nunomaduro/termwind/issues", + "source": "https://github.com/nunomaduro/termwind/tree/v2.3.3" + }, + "funding": [ + { + "url": "https://www.paypal.com/paypalme/enunomaduro", + "type": "custom" + }, + { + "url": "https://github.com/nunomaduro", + "type": "github" + }, + { + "url": "https://github.com/xiCO2k", + "type": "github" + } + ], + "time": "2025-11-20T02:34:59+00:00" + }, + { + "name": "phpoption/phpoption", + "version": "1.9.5", + "source": { + "type": "git", + "url": "https://github.com/schmittjoh/php-option.git", + "reference": "75365b91986c2405cf5e1e012c5595cd487a98be" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/schmittjoh/php-option/zipball/75365b91986c2405cf5e1e012c5595cd487a98be", + "reference": "75365b91986c2405cf5e1e012c5595cd487a98be", + "shasum": "" + }, + "require": { + "php": "^7.2.5 || ^8.0" + }, + "require-dev": { + "bamarni/composer-bin-plugin": "^1.8.2", + "phpunit/phpunit": "^8.5.44 || ^9.6.25 || ^10.5.53 || ^11.5.34" + }, + "type": "library", + "extra": { + "bamarni-bin": { + "bin-links": true, + "forward-command": false + }, + "branch-alias": { + "dev-master": "1.9-dev" + } + }, + "autoload": { + "psr-4": { + "PhpOption\\": "src/PhpOption/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "Apache-2.0" + ], + "authors": [ + { + "name": "Johannes M. Schmitt", + "email": "schmittjoh@gmail.com", + "homepage": "https://github.com/schmittjoh" + }, + { + "name": "Graham Campbell", + "email": "hello@gjcampbell.co.uk", + "homepage": "https://github.com/GrahamCampbell" + } + ], + "description": "Option Type for PHP", + "keywords": [ + "language", + "option", + "php", + "type" + ], + "support": { + "issues": "https://github.com/schmittjoh/php-option/issues", + "source": "https://github.com/schmittjoh/php-option/tree/1.9.5" + }, + "funding": [ + { + "url": "https://github.com/GrahamCampbell", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/phpoption/phpoption", + "type": "tidelift" + } + ], + "time": "2025-12-27T19:41:33+00:00" + }, + { + "name": "psr/clock", + "version": "1.0.0", + "source": { + "type": "git", + "url": "https://github.com/php-fig/clock.git", + "reference": "e41a24703d4560fd0acb709162f73b8adfc3aa0d" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/clock/zipball/e41a24703d4560fd0acb709162f73b8adfc3aa0d", + "reference": "e41a24703d4560fd0acb709162f73b8adfc3aa0d", + "shasum": "" + }, + "require": { + "php": "^7.0 || ^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Psr\\Clock\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "https://www.php-fig.org/" + } + ], + "description": "Common interface for reading the clock.", + "homepage": "https://github.com/php-fig/clock", + "keywords": [ + "clock", + "now", + "psr", + "psr-20", + "time" + ], + "support": { + "issues": "https://github.com/php-fig/clock/issues", + "source": "https://github.com/php-fig/clock/tree/1.0.0" + }, + "time": "2022-11-25T14:36:26+00:00" + }, + { + "name": "psr/container", + "version": "2.0.2", + "source": { + "type": "git", + "url": "https://github.com/php-fig/container.git", + "reference": "c71ecc56dfe541dbd90c5360474fbc405f8d5963" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/container/zipball/c71ecc56dfe541dbd90c5360474fbc405f8d5963", + "reference": "c71ecc56dfe541dbd90c5360474fbc405f8d5963", + "shasum": "" + }, + "require": { + "php": ">=7.4.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "2.0.x-dev" + } + }, + "autoload": { + "psr-4": { + "Psr\\Container\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "https://www.php-fig.org/" + } + ], + "description": "Common Container Interface (PHP FIG PSR-11)", + "homepage": "https://github.com/php-fig/container", + "keywords": [ + "PSR-11", + "container", + "container-interface", + "container-interop", + "psr" + ], + "support": { + "issues": "https://github.com/php-fig/container/issues", + "source": "https://github.com/php-fig/container/tree/2.0.2" + }, + "time": "2021-11-05T16:47:00+00:00" + }, + { + "name": "psr/event-dispatcher", + "version": "1.0.0", + "source": { + "type": "git", + "url": "https://github.com/php-fig/event-dispatcher.git", + "reference": "dbefd12671e8a14ec7f180cab83036ed26714bb0" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/event-dispatcher/zipball/dbefd12671e8a14ec7f180cab83036ed26714bb0", + "reference": "dbefd12671e8a14ec7f180cab83036ed26714bb0", + "shasum": "" + }, + "require": { + "php": ">=7.2.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "1.0.x-dev" + } + }, + "autoload": { + "psr-4": { + "Psr\\EventDispatcher\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "http://www.php-fig.org/" + } + ], + "description": "Standard interfaces for event handling.", + "keywords": [ + "events", + "psr", + "psr-14" + ], + "support": { + "issues": "https://github.com/php-fig/event-dispatcher/issues", + "source": "https://github.com/php-fig/event-dispatcher/tree/1.0.0" + }, + "time": "2019-01-08T18:20:26+00:00" + }, + { + "name": "psr/http-client", + "version": "1.0.3", + "source": { + "type": "git", + "url": "https://github.com/php-fig/http-client.git", + "reference": "bb5906edc1c324c9a05aa0873d40117941e5fa90" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/http-client/zipball/bb5906edc1c324c9a05aa0873d40117941e5fa90", + "reference": "bb5906edc1c324c9a05aa0873d40117941e5fa90", + "shasum": "" + }, + "require": { + "php": "^7.0 || ^8.0", + "psr/http-message": "^1.0 || ^2.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "1.0.x-dev" + } + }, + "autoload": { + "psr-4": { + "Psr\\Http\\Client\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "https://www.php-fig.org/" + } + ], + "description": "Common interface for HTTP clients", + "homepage": "https://github.com/php-fig/http-client", + "keywords": [ + "http", + "http-client", + "psr", + "psr-18" + ], + "support": { + "source": "https://github.com/php-fig/http-client" + }, + "time": "2023-09-23T14:17:50+00:00" + }, + { + "name": "psr/http-factory", + "version": "1.1.0", + "source": { + "type": "git", + "url": "https://github.com/php-fig/http-factory.git", + "reference": "2b4765fddfe3b508ac62f829e852b1501d3f6e8a" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/http-factory/zipball/2b4765fddfe3b508ac62f829e852b1501d3f6e8a", + "reference": "2b4765fddfe3b508ac62f829e852b1501d3f6e8a", + "shasum": "" + }, + "require": { + "php": ">=7.1", + "psr/http-message": "^1.0 || ^2.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "1.0.x-dev" + } + }, + "autoload": { + "psr-4": { + "Psr\\Http\\Message\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "https://www.php-fig.org/" + } + ], + "description": "PSR-17: Common interfaces for PSR-7 HTTP message factories", + "keywords": [ + "factory", + "http", + "message", + "psr", + "psr-17", + "psr-7", + "request", + "response" + ], + "support": { + "source": "https://github.com/php-fig/http-factory" + }, + "time": "2024-04-15T12:06:14+00:00" + }, + { + "name": "psr/http-message", + "version": "2.0", + "source": { + "type": "git", + "url": "https://github.com/php-fig/http-message.git", + "reference": "402d35bcb92c70c026d1a6a9883f06b2ead23d71" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/http-message/zipball/402d35bcb92c70c026d1a6a9883f06b2ead23d71", + "reference": "402d35bcb92c70c026d1a6a9883f06b2ead23d71", + "shasum": "" + }, + "require": { + "php": "^7.2 || ^8.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "2.0.x-dev" + } + }, + "autoload": { + "psr-4": { + "Psr\\Http\\Message\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "https://www.php-fig.org/" + } + ], + "description": "Common interface for HTTP messages", + "homepage": "https://github.com/php-fig/http-message", + "keywords": [ + "http", + "http-message", + "psr", + "psr-7", + "request", + "response" + ], + "support": { + "source": "https://github.com/php-fig/http-message/tree/2.0" + }, + "time": "2023-04-04T09:54:51+00:00" + }, + { + "name": "psr/log", + "version": "3.0.2", + "source": { + "type": "git", + "url": "https://github.com/php-fig/log.git", + "reference": "f16e1d5863e37f8d8c2a01719f5b34baa2b714d3" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/log/zipball/f16e1d5863e37f8d8c2a01719f5b34baa2b714d3", + "reference": "f16e1d5863e37f8d8c2a01719f5b34baa2b714d3", + "shasum": "" + }, + "require": { + "php": ">=8.0.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "3.x-dev" + } + }, + "autoload": { + "psr-4": { + "Psr\\Log\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "https://www.php-fig.org/" + } + ], + "description": "Common interface for logging libraries", + "homepage": "https://github.com/php-fig/log", + "keywords": [ + "log", + "psr", + "psr-3" + ], + "support": { + "source": "https://github.com/php-fig/log/tree/3.0.2" + }, + "time": "2024-09-11T13:17:53+00:00" + }, + { + "name": "psr/simple-cache", + "version": "3.0.0", + "source": { + "type": "git", + "url": "https://github.com/php-fig/simple-cache.git", + "reference": "764e0b3939f5ca87cb904f570ef9be2d78a07865" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/php-fig/simple-cache/zipball/764e0b3939f5ca87cb904f570ef9be2d78a07865", + "reference": "764e0b3939f5ca87cb904f570ef9be2d78a07865", + "shasum": "" + }, + "require": { + "php": ">=8.0.0" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "3.0.x-dev" + } + }, + "autoload": { + "psr-4": { + "Psr\\SimpleCache\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "PHP-FIG", + "homepage": "https://www.php-fig.org/" + } + ], + "description": "Common interfaces for simple caching", + "keywords": [ + "cache", + "caching", + "psr", + "psr-16", + "simple-cache" + ], + "support": { + "source": "https://github.com/php-fig/simple-cache/tree/3.0.0" + }, + "time": "2021-10-29T13:26:27+00:00" + }, + { + "name": "ralouphie/getallheaders", + "version": "3.0.3", + "source": { + "type": "git", + "url": "https://github.com/ralouphie/getallheaders.git", + "reference": "120b605dfeb996808c31b6477290a714d356e822" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/ralouphie/getallheaders/zipball/120b605dfeb996808c31b6477290a714d356e822", + "reference": "120b605dfeb996808c31b6477290a714d356e822", + "shasum": "" + }, + "require": { + "php": ">=5.6" + }, + "require-dev": { + "php-coveralls/php-coveralls": "^2.1", + "phpunit/phpunit": "^5 || ^6.5" + }, + "type": "library", + "autoload": { + "files": [ + "src/getallheaders.php" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Ralph Khattar", + "email": "ralph.khattar@gmail.com" + } + ], + "description": "A polyfill for getallheaders.", + "support": { + "issues": "https://github.com/ralouphie/getallheaders/issues", + "source": "https://github.com/ralouphie/getallheaders/tree/develop" + }, + "time": "2019-03-08T08:55:37+00:00" + }, + { + "name": "ramsey/collection", + "version": "2.1.1", + "source": { + "type": "git", + "url": "https://github.com/ramsey/collection.git", + "reference": "344572933ad0181accbf4ba763e85a0306a8c5e2" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/ramsey/collection/zipball/344572933ad0181accbf4ba763e85a0306a8c5e2", + "reference": "344572933ad0181accbf4ba763e85a0306a8c5e2", + "shasum": "" + }, + "require": { + "php": "^8.1" + }, + "require-dev": { + "captainhook/plugin-composer": "^5.3", + "ergebnis/composer-normalize": "^2.45", + "fakerphp/faker": "^1.24", + "hamcrest/hamcrest-php": "^2.0", + "jangregor/phpstan-prophecy": "^2.1", + "mockery/mockery": "^1.6", + "php-parallel-lint/php-console-highlighter": "^1.0", + "php-parallel-lint/php-parallel-lint": "^1.4", + "phpspec/prophecy-phpunit": "^2.3", + "phpstan/extension-installer": "^1.4", + "phpstan/phpstan": "^2.1", + "phpstan/phpstan-mockery": "^2.0", + "phpstan/phpstan-phpunit": "^2.0", + "phpunit/phpunit": "^10.5", + "ramsey/coding-standard": "^2.3", + "ramsey/conventional-commits": "^1.6", + "roave/security-advisories": "dev-latest" + }, + "type": "library", + "extra": { + "captainhook": { + "force-install": true + }, + "ramsey/conventional-commits": { + "configFile": "conventional-commits.json" + } + }, + "autoload": { + "psr-4": { + "Ramsey\\Collection\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Ben Ramsey", + "email": "ben@benramsey.com", + "homepage": "https://benramsey.com" + } + ], + "description": "A PHP library for representing and manipulating collections.", + "keywords": [ + "array", + "collection", + "hash", + "map", + "queue", + "set" + ], + "support": { + "issues": "https://github.com/ramsey/collection/issues", + "source": "https://github.com/ramsey/collection/tree/2.1.1" + }, + "time": "2025-03-22T05:38:12+00:00" + }, + { + "name": "ramsey/uuid", + "version": "4.9.2", + "source": { + "type": "git", + "url": "https://github.com/ramsey/uuid.git", + "reference": "8429c78ca35a09f27565311b98101e2826affde0" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/ramsey/uuid/zipball/8429c78ca35a09f27565311b98101e2826affde0", + "reference": "8429c78ca35a09f27565311b98101e2826affde0", + "shasum": "" + }, + "require": { + "brick/math": "^0.8.16 || ^0.9 || ^0.10 || ^0.11 || ^0.12 || ^0.13 || ^0.14", + "php": "^8.0", + "ramsey/collection": "^1.2 || ^2.0" + }, + "replace": { + "rhumsaa/uuid": "self.version" + }, + "require-dev": { + "captainhook/captainhook": "^5.25", + "captainhook/plugin-composer": "^5.3", + "dealerdirect/phpcodesniffer-composer-installer": "^1.0", + "ergebnis/composer-normalize": "^2.47", + "mockery/mockery": "^1.6", + "paragonie/random-lib": "^2", + "php-mock/php-mock": "^2.6", + "php-mock/php-mock-mockery": "^1.5", + "php-parallel-lint/php-parallel-lint": "^1.4.0", + "phpbench/phpbench": "^1.2.14", + "phpstan/extension-installer": "^1.4", + "phpstan/phpstan": "^2.1", + "phpstan/phpstan-mockery": "^2.0", + "phpstan/phpstan-phpunit": "^2.0", + "phpunit/phpunit": "^9.6", + "slevomat/coding-standard": "^8.18", + "squizlabs/php_codesniffer": "^3.13" + }, + "suggest": { + "ext-bcmath": "Enables faster math with arbitrary-precision integers using BCMath.", + "ext-gmp": "Enables faster math with arbitrary-precision integers using GMP.", + "ext-uuid": "Enables the use of PeclUuidTimeGenerator and PeclUuidRandomGenerator.", + "paragonie/random-lib": "Provides RandomLib for use with the RandomLibAdapter", + "ramsey/uuid-doctrine": "Allows the use of Ramsey\\Uuid\\Uuid as Doctrine field type." + }, + "type": "library", + "extra": { + "captainhook": { + "force-install": true + } + }, + "autoload": { + "files": [ + "src/functions.php" + ], + "psr-4": { + "Ramsey\\Uuid\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "description": "A PHP library for generating and working with universally unique identifiers (UUIDs).", + "keywords": [ + "guid", + "identifier", + "uuid" + ], + "support": { + "issues": "https://github.com/ramsey/uuid/issues", + "source": "https://github.com/ramsey/uuid/tree/4.9.2" + }, + "time": "2025-12-14T04:43:48+00:00" + }, + { + "name": "symfony/clock", + "version": "v8.0.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/clock.git", + "reference": "832119f9b8dbc6c8e6f65f30c5969eca1e88764f" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/clock/zipball/832119f9b8dbc6c8e6f65f30c5969eca1e88764f", + "reference": "832119f9b8dbc6c8e6f65f30c5969eca1e88764f", + "shasum": "" + }, + "require": { + "php": ">=8.4", + "psr/clock": "^1.0" + }, + "provide": { + "psr/clock-implementation": "1.0" + }, + "type": "library", + "autoload": { + "files": [ + "Resources/now.php" + ], + "psr-4": { + "Symfony\\Component\\Clock\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Decouples applications from the system clock", + "homepage": "https://symfony.com", + "keywords": [ + "clock", + "psr20", + "time" + ], + "support": { + "source": "https://github.com/symfony/clock/tree/v8.0.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-11-12T15:46:48+00:00" + }, + { + "name": "symfony/console", + "version": "v7.4.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/console.git", + "reference": "41e38717ac1dd7a46b6bda7d6a82af2d98a78894" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/console/zipball/41e38717ac1dd7a46b6bda7d6a82af2d98a78894", + "reference": "41e38717ac1dd7a46b6bda7d6a82af2d98a78894", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "symfony/deprecation-contracts": "^2.5|^3", + "symfony/polyfill-mbstring": "~1.0", + "symfony/service-contracts": "^2.5|^3", + "symfony/string": "^7.2|^8.0" + }, + "conflict": { + "symfony/dependency-injection": "<6.4", + "symfony/dotenv": "<6.4", + "symfony/event-dispatcher": "<6.4", + "symfony/lock": "<6.4", + "symfony/process": "<6.4" + }, + "provide": { + "psr/log-implementation": "1.0|2.0|3.0" + }, + "require-dev": { + "psr/log": "^1|^2|^3", + "symfony/config": "^6.4|^7.0|^8.0", + "symfony/dependency-injection": "^6.4|^7.0|^8.0", + "symfony/event-dispatcher": "^6.4|^7.0|^8.0", + "symfony/http-foundation": "^6.4|^7.0|^8.0", + "symfony/http-kernel": "^6.4|^7.0|^8.0", + "symfony/lock": "^6.4|^7.0|^8.0", + "symfony/messenger": "^6.4|^7.0|^8.0", + "symfony/process": "^6.4|^7.0|^8.0", + "symfony/stopwatch": "^6.4|^7.0|^8.0", + "symfony/var-dumper": "^6.4|^7.0|^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\Console\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Eases the creation of beautiful and testable command line interfaces", + "homepage": "https://symfony.com", + "keywords": [ + "cli", + "command-line", + "console", + "terminal" + ], + "support": { + "source": "https://github.com/symfony/console/tree/v7.4.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-13T11:36:38+00:00" + }, + { + "name": "symfony/css-selector", + "version": "v8.0.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/css-selector.git", + "reference": "6225bd458c53ecdee056214cb4a2ffaf58bd592b" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/css-selector/zipball/6225bd458c53ecdee056214cb4a2ffaf58bd592b", + "reference": "6225bd458c53ecdee056214cb4a2ffaf58bd592b", + "shasum": "" + }, + "require": { + "php": ">=8.4" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\CssSelector\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Jean-François Simon", + "email": "jeanfrancois.simon@sensiolabs.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Converts CSS selectors to XPath expressions", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/css-selector/tree/v8.0.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-10-30T14:17:19+00:00" + }, + { + "name": "symfony/deprecation-contracts", + "version": "v3.6.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/deprecation-contracts.git", + "reference": "63afe740e99a13ba87ec199bb07bbdee937a5b62" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/deprecation-contracts/zipball/63afe740e99a13ba87ec199bb07bbdee937a5b62", + "reference": "63afe740e99a13ba87ec199bb07bbdee937a5b62", + "shasum": "" + }, + "require": { + "php": ">=8.1" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/contracts", + "name": "symfony/contracts" + }, + "branch-alias": { + "dev-main": "3.6-dev" + } + }, + "autoload": { + "files": [ + "function.php" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "A generic function and convention to trigger deprecation notices", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/deprecation-contracts/tree/v3.6.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2024-09-25T14:21:43+00:00" + }, + { + "name": "symfony/error-handler", + "version": "v7.4.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/error-handler.git", + "reference": "8da531f364ddfee53e36092a7eebbbd0b775f6b8" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/error-handler/zipball/8da531f364ddfee53e36092a7eebbbd0b775f6b8", + "reference": "8da531f364ddfee53e36092a7eebbbd0b775f6b8", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "psr/log": "^1|^2|^3", + "symfony/polyfill-php85": "^1.32", + "symfony/var-dumper": "^6.4|^7.0|^8.0" + }, + "conflict": { + "symfony/deprecation-contracts": "<2.5", + "symfony/http-kernel": "<6.4" + }, + "require-dev": { + "symfony/console": "^6.4|^7.0|^8.0", + "symfony/deprecation-contracts": "^2.5|^3", + "symfony/http-kernel": "^6.4|^7.0|^8.0", + "symfony/serializer": "^6.4|^7.0|^8.0", + "symfony/webpack-encore-bundle": "^1.0|^2.0" + }, + "bin": [ + "Resources/bin/patch-type-declarations" + ], + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\ErrorHandler\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Provides tools to manage errors and ease debugging PHP code", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/error-handler/tree/v7.4.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-20T16:42:42+00:00" + }, + { + "name": "symfony/event-dispatcher", + "version": "v8.0.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/event-dispatcher.git", + "reference": "99301401da182b6cfaa4700dbe9987bb75474b47" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/event-dispatcher/zipball/99301401da182b6cfaa4700dbe9987bb75474b47", + "reference": "99301401da182b6cfaa4700dbe9987bb75474b47", + "shasum": "" + }, + "require": { + "php": ">=8.4", + "symfony/event-dispatcher-contracts": "^2.5|^3" + }, + "conflict": { + "symfony/security-http": "<7.4", + "symfony/service-contracts": "<2.5" + }, + "provide": { + "psr/event-dispatcher-implementation": "1.0", + "symfony/event-dispatcher-implementation": "2.0|3.0" + }, + "require-dev": { + "psr/log": "^1|^2|^3", + "symfony/config": "^7.4|^8.0", + "symfony/dependency-injection": "^7.4|^8.0", + "symfony/error-handler": "^7.4|^8.0", + "symfony/expression-language": "^7.4|^8.0", + "symfony/framework-bundle": "^7.4|^8.0", + "symfony/http-foundation": "^7.4|^8.0", + "symfony/service-contracts": "^2.5|^3", + "symfony/stopwatch": "^7.4|^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\EventDispatcher\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Provides tools that allow your application components to communicate with each other by dispatching events and listening to them", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/event-dispatcher/tree/v8.0.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-05T11:45:55+00:00" + }, + { + "name": "symfony/event-dispatcher-contracts", + "version": "v3.6.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/event-dispatcher-contracts.git", + "reference": "59eb412e93815df44f05f342958efa9f46b1e586" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/event-dispatcher-contracts/zipball/59eb412e93815df44f05f342958efa9f46b1e586", + "reference": "59eb412e93815df44f05f342958efa9f46b1e586", + "shasum": "" + }, + "require": { + "php": ">=8.1", + "psr/event-dispatcher": "^1" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/contracts", + "name": "symfony/contracts" + }, + "branch-alias": { + "dev-main": "3.6-dev" + } + }, + "autoload": { + "psr-4": { + "Symfony\\Contracts\\EventDispatcher\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Generic abstractions related to dispatching event", + "homepage": "https://symfony.com", + "keywords": [ + "abstractions", + "contracts", + "decoupling", + "interfaces", + "interoperability", + "standards" + ], + "support": { + "source": "https://github.com/symfony/event-dispatcher-contracts/tree/v3.6.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2024-09-25T14:21:43+00:00" + }, + { + "name": "symfony/finder", + "version": "v7.4.5", + "source": { + "type": "git", + "url": "https://github.com/symfony/finder.git", + "reference": "ad4daa7c38668dcb031e63bc99ea9bd42196a2cb" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/finder/zipball/ad4daa7c38668dcb031e63bc99ea9bd42196a2cb", + "reference": "ad4daa7c38668dcb031e63bc99ea9bd42196a2cb", + "shasum": "" + }, + "require": { + "php": ">=8.2" + }, + "require-dev": { + "symfony/filesystem": "^6.4|^7.0|^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\Finder\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Finds files and directories via an intuitive fluent interface", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/finder/tree/v7.4.5" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-26T15:07:59+00:00" + }, + { + "name": "symfony/http-foundation", + "version": "v7.4.5", + "source": { + "type": "git", + "url": "https://github.com/symfony/http-foundation.git", + "reference": "446d0db2b1f21575f1284b74533e425096abdfb6" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/http-foundation/zipball/446d0db2b1f21575f1284b74533e425096abdfb6", + "reference": "446d0db2b1f21575f1284b74533e425096abdfb6", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "symfony/deprecation-contracts": "^2.5|^3", + "symfony/polyfill-mbstring": "^1.1" + }, + "conflict": { + "doctrine/dbal": "<3.6", + "symfony/cache": "<6.4.12|>=7.0,<7.1.5" + }, + "require-dev": { + "doctrine/dbal": "^3.6|^4", + "predis/predis": "^1.1|^2.0", + "symfony/cache": "^6.4.12|^7.1.5|^8.0", + "symfony/clock": "^6.4|^7.0|^8.0", + "symfony/dependency-injection": "^6.4|^7.0|^8.0", + "symfony/expression-language": "^6.4|^7.0|^8.0", + "symfony/http-kernel": "^6.4|^7.0|^8.0", + "symfony/mime": "^6.4|^7.0|^8.0", + "symfony/rate-limiter": "^6.4|^7.0|^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\HttpFoundation\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Defines an object-oriented layer for the HTTP specification", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/http-foundation/tree/v7.4.5" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-27T16:16:02+00:00" + }, + { + "name": "symfony/http-kernel", + "version": "v7.4.5", + "source": { + "type": "git", + "url": "https://github.com/symfony/http-kernel.git", + "reference": "229eda477017f92bd2ce7615d06222ec0c19e82a" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/http-kernel/zipball/229eda477017f92bd2ce7615d06222ec0c19e82a", + "reference": "229eda477017f92bd2ce7615d06222ec0c19e82a", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "psr/log": "^1|^2|^3", + "symfony/deprecation-contracts": "^2.5|^3", + "symfony/error-handler": "^6.4|^7.0|^8.0", + "symfony/event-dispatcher": "^7.3|^8.0", + "symfony/http-foundation": "^7.4|^8.0", + "symfony/polyfill-ctype": "^1.8" + }, + "conflict": { + "symfony/browser-kit": "<6.4", + "symfony/cache": "<6.4", + "symfony/config": "<6.4", + "symfony/console": "<6.4", + "symfony/dependency-injection": "<6.4", + "symfony/doctrine-bridge": "<6.4", + "symfony/flex": "<2.10", + "symfony/form": "<6.4", + "symfony/http-client": "<6.4", + "symfony/http-client-contracts": "<2.5", + "symfony/mailer": "<6.4", + "symfony/messenger": "<6.4", + "symfony/translation": "<6.4", + "symfony/translation-contracts": "<2.5", + "symfony/twig-bridge": "<6.4", + "symfony/validator": "<6.4", + "symfony/var-dumper": "<6.4", + "twig/twig": "<3.12" + }, + "provide": { + "psr/log-implementation": "1.0|2.0|3.0" + }, + "require-dev": { + "psr/cache": "^1.0|^2.0|^3.0", + "symfony/browser-kit": "^6.4|^7.0|^8.0", + "symfony/clock": "^6.4|^7.0|^8.0", + "symfony/config": "^6.4|^7.0|^8.0", + "symfony/console": "^6.4|^7.0|^8.0", + "symfony/css-selector": "^6.4|^7.0|^8.0", + "symfony/dependency-injection": "^6.4|^7.0|^8.0", + "symfony/dom-crawler": "^6.4|^7.0|^8.0", + "symfony/expression-language": "^6.4|^7.0|^8.0", + "symfony/finder": "^6.4|^7.0|^8.0", + "symfony/http-client-contracts": "^2.5|^3", + "symfony/process": "^6.4|^7.0|^8.0", + "symfony/property-access": "^7.1|^8.0", + "symfony/routing": "^6.4|^7.0|^8.0", + "symfony/serializer": "^7.1|^8.0", + "symfony/stopwatch": "^6.4|^7.0|^8.0", + "symfony/translation": "^6.4|^7.0|^8.0", + "symfony/translation-contracts": "^2.5|^3", + "symfony/uid": "^6.4|^7.0|^8.0", + "symfony/validator": "^6.4|^7.0|^8.0", + "symfony/var-dumper": "^6.4|^7.0|^8.0", + "symfony/var-exporter": "^6.4|^7.0|^8.0", + "twig/twig": "^3.12" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\HttpKernel\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Provides a structured process for converting a Request into a Response", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/http-kernel/tree/v7.4.5" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-28T10:33:42+00:00" + }, + { + "name": "symfony/mailer", + "version": "v7.4.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/mailer.git", + "reference": "7b750074c40c694ceb34cb926d6dffee231c5cd6" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/mailer/zipball/7b750074c40c694ceb34cb926d6dffee231c5cd6", + "reference": "7b750074c40c694ceb34cb926d6dffee231c5cd6", + "shasum": "" + }, + "require": { + "egulias/email-validator": "^2.1.10|^3|^4", + "php": ">=8.2", + "psr/event-dispatcher": "^1", + "psr/log": "^1|^2|^3", + "symfony/event-dispatcher": "^6.4|^7.0|^8.0", + "symfony/mime": "^7.2|^8.0", + "symfony/service-contracts": "^2.5|^3" + }, + "conflict": { + "symfony/http-client-contracts": "<2.5", + "symfony/http-kernel": "<6.4", + "symfony/messenger": "<6.4", + "symfony/mime": "<6.4", + "symfony/twig-bridge": "<6.4" + }, + "require-dev": { + "symfony/console": "^6.4|^7.0|^8.0", + "symfony/http-client": "^6.4|^7.0|^8.0", + "symfony/messenger": "^6.4|^7.0|^8.0", + "symfony/twig-bridge": "^6.4|^7.0|^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\Mailer\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Helps sending emails", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/mailer/tree/v7.4.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-08T08:25:11+00:00" + }, + { + "name": "symfony/mime", + "version": "v7.4.5", + "source": { + "type": "git", + "url": "https://github.com/symfony/mime.git", + "reference": "b18c7e6e9eee1e19958138df10412f3c4c316148" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/mime/zipball/b18c7e6e9eee1e19958138df10412f3c4c316148", + "reference": "b18c7e6e9eee1e19958138df10412f3c4c316148", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "symfony/deprecation-contracts": "^2.5|^3", + "symfony/polyfill-intl-idn": "^1.10", + "symfony/polyfill-mbstring": "^1.0" + }, + "conflict": { + "egulias/email-validator": "~3.0.0", + "phpdocumentor/reflection-docblock": "<5.2|>=6", + "phpdocumentor/type-resolver": "<1.5.1", + "symfony/mailer": "<6.4", + "symfony/serializer": "<6.4.3|>7.0,<7.0.3" + }, + "require-dev": { + "egulias/email-validator": "^2.1.10|^3.1|^4", + "league/html-to-markdown": "^5.0", + "phpdocumentor/reflection-docblock": "^5.2", + "symfony/dependency-injection": "^6.4|^7.0|^8.0", + "symfony/process": "^6.4|^7.0|^8.0", + "symfony/property-access": "^6.4|^7.0|^8.0", + "symfony/property-info": "^6.4|^7.0|^8.0", + "symfony/serializer": "^6.4.3|^7.0.3|^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\Mime\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Allows manipulating MIME messages", + "homepage": "https://symfony.com", + "keywords": [ + "mime", + "mime-type" + ], + "support": { + "source": "https://github.com/symfony/mime/tree/v7.4.5" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-27T08:59:58+00:00" + }, + { + "name": "symfony/polyfill-ctype", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-ctype.git", + "reference": "a3cc8b044a6ea513310cbd48ef7333b384945638" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-ctype/zipball/a3cc8b044a6ea513310cbd48ef7333b384945638", + "reference": "a3cc8b044a6ea513310cbd48ef7333b384945638", + "shasum": "" + }, + "require": { + "php": ">=7.2" + }, + "provide": { + "ext-ctype": "*" + }, + "suggest": { + "ext-ctype": "For best performance" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Ctype\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Gert de Pagter", + "email": "BackEndTea@gmail.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill for ctype functions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "ctype", + "polyfill", + "portable" + ], + "support": { + "source": "https://github.com/symfony/polyfill-ctype/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2024-09-09T11:45:10+00:00" + }, + { + "name": "symfony/polyfill-intl-grapheme", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-intl-grapheme.git", + "reference": "380872130d3a5dd3ace2f4010d95125fde5d5c70" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-intl-grapheme/zipball/380872130d3a5dd3ace2f4010d95125fde5d5c70", + "reference": "380872130d3a5dd3ace2f4010d95125fde5d5c70", + "shasum": "" + }, + "require": { + "php": ">=7.2" + }, + "suggest": { + "ext-intl": "For best performance" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Intl\\Grapheme\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill for intl's grapheme_* functions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "grapheme", + "intl", + "polyfill", + "portable", + "shim" + ], + "support": { + "source": "https://github.com/symfony/polyfill-intl-grapheme/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-06-27T09:58:17+00:00" + }, + { + "name": "symfony/polyfill-intl-idn", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-intl-idn.git", + "reference": "9614ac4d8061dc257ecc64cba1b140873dce8ad3" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-intl-idn/zipball/9614ac4d8061dc257ecc64cba1b140873dce8ad3", + "reference": "9614ac4d8061dc257ecc64cba1b140873dce8ad3", + "shasum": "" + }, + "require": { + "php": ">=7.2", + "symfony/polyfill-intl-normalizer": "^1.10" + }, + "suggest": { + "ext-intl": "For best performance" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Intl\\Idn\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Laurent Bassin", + "email": "laurent@bassin.info" + }, + { + "name": "Trevor Rowbotham", + "email": "trevor.rowbotham@pm.me" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill for intl's idn_to_ascii and idn_to_utf8 functions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "idn", + "intl", + "polyfill", + "portable", + "shim" + ], + "support": { + "source": "https://github.com/symfony/polyfill-intl-idn/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2024-09-10T14:38:51+00:00" + }, + { + "name": "symfony/polyfill-intl-normalizer", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-intl-normalizer.git", + "reference": "3833d7255cc303546435cb650316bff708a1c75c" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-intl-normalizer/zipball/3833d7255cc303546435cb650316bff708a1c75c", + "reference": "3833d7255cc303546435cb650316bff708a1c75c", + "shasum": "" + }, + "require": { + "php": ">=7.2" + }, + "suggest": { + "ext-intl": "For best performance" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Intl\\Normalizer\\": "" + }, + "classmap": [ + "Resources/stubs" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill for intl's Normalizer class and related functions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "intl", + "normalizer", + "polyfill", + "portable", + "shim" + ], + "support": { + "source": "https://github.com/symfony/polyfill-intl-normalizer/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2024-09-09T11:45:10+00:00" + }, + { + "name": "symfony/polyfill-mbstring", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-mbstring.git", + "reference": "6d857f4d76bd4b343eac26d6b539585d2bc56493" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-mbstring/zipball/6d857f4d76bd4b343eac26d6b539585d2bc56493", + "reference": "6d857f4d76bd4b343eac26d6b539585d2bc56493", + "shasum": "" + }, + "require": { + "ext-iconv": "*", + "php": ">=7.2" + }, + "provide": { + "ext-mbstring": "*" + }, + "suggest": { + "ext-mbstring": "For best performance" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Mbstring\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill for the Mbstring extension", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "mbstring", + "polyfill", + "portable", + "shim" + ], + "support": { + "source": "https://github.com/symfony/polyfill-mbstring/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2024-12-23T08:48:59+00:00" + }, + { + "name": "symfony/polyfill-php80", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-php80.git", + "reference": "0cc9dd0f17f61d8131e7df6b84bd344899fe2608" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-php80/zipball/0cc9dd0f17f61d8131e7df6b84bd344899fe2608", + "reference": "0cc9dd0f17f61d8131e7df6b84bd344899fe2608", + "shasum": "" + }, + "require": { + "php": ">=7.2" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Php80\\": "" + }, + "classmap": [ + "Resources/stubs" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Ion Bazan", + "email": "ion.bazan@gmail.com" + }, + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill backporting some PHP 8.0+ features to lower PHP versions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "polyfill", + "portable", + "shim" + ], + "support": { + "source": "https://github.com/symfony/polyfill-php80/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-01-02T08:10:11+00:00" + }, + { + "name": "symfony/polyfill-php83", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-php83.git", + "reference": "17f6f9a6b1735c0f163024d959f700cfbc5155e5" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-php83/zipball/17f6f9a6b1735c0f163024d959f700cfbc5155e5", + "reference": "17f6f9a6b1735c0f163024d959f700cfbc5155e5", + "shasum": "" + }, + "require": { + "php": ">=7.2" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Php83\\": "" + }, + "classmap": [ + "Resources/stubs" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill backporting some PHP 8.3+ features to lower PHP versions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "polyfill", + "portable", + "shim" + ], + "support": { + "source": "https://github.com/symfony/polyfill-php83/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-07-08T02:45:35+00:00" + }, + { + "name": "symfony/polyfill-php84", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-php84.git", + "reference": "d8ced4d875142b6a7426000426b8abc631d6b191" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-php84/zipball/d8ced4d875142b6a7426000426b8abc631d6b191", + "reference": "d8ced4d875142b6a7426000426b8abc631d6b191", + "shasum": "" + }, + "require": { + "php": ">=7.2" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Php84\\": "" + }, + "classmap": [ + "Resources/stubs" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill backporting some PHP 8.4+ features to lower PHP versions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "polyfill", + "portable", + "shim" + ], + "support": { + "source": "https://github.com/symfony/polyfill-php84/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-06-24T13:30:11+00:00" + }, + { + "name": "symfony/polyfill-php85", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-php85.git", + "reference": "d4e5fcd4ab3d998ab16c0db48e6cbb9a01993f91" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-php85/zipball/d4e5fcd4ab3d998ab16c0db48e6cbb9a01993f91", + "reference": "d4e5fcd4ab3d998ab16c0db48e6cbb9a01993f91", + "shasum": "" + }, + "require": { + "php": ">=7.2" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Php85\\": "" + }, + "classmap": [ + "Resources/stubs" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill backporting some PHP 8.5+ features to lower PHP versions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "polyfill", + "portable", + "shim" + ], + "support": { + "source": "https://github.com/symfony/polyfill-php85/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-06-23T16:12:55+00:00" + }, + { + "name": "symfony/polyfill-uuid", + "version": "v1.33.0", + "source": { + "type": "git", + "url": "https://github.com/symfony/polyfill-uuid.git", + "reference": "21533be36c24be3f4b1669c4725c7d1d2bab4ae2" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/polyfill-uuid/zipball/21533be36c24be3f4b1669c4725c7d1d2bab4ae2", + "reference": "21533be36c24be3f4b1669c4725c7d1d2bab4ae2", + "shasum": "" + }, + "require": { + "php": ">=7.2" + }, + "provide": { + "ext-uuid": "*" + }, + "suggest": { + "ext-uuid": "For best performance" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/polyfill", + "name": "symfony/polyfill" + } + }, + "autoload": { + "files": [ + "bootstrap.php" + ], + "psr-4": { + "Symfony\\Polyfill\\Uuid\\": "" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Grégoire Pineau", + "email": "lyrixx@lyrixx.info" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Symfony polyfill for uuid functions", + "homepage": "https://symfony.com", + "keywords": [ + "compatibility", + "polyfill", + "portable", + "uuid" + ], + "support": { + "source": "https://github.com/symfony/polyfill-uuid/tree/v1.33.0" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2024-09-09T11:45:10+00:00" + }, + { + "name": "symfony/process", + "version": "v7.4.5", + "source": { + "type": "git", + "url": "https://github.com/symfony/process.git", + "reference": "608476f4604102976d687c483ac63a79ba18cc97" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/process/zipball/608476f4604102976d687c483ac63a79ba18cc97", + "reference": "608476f4604102976d687c483ac63a79ba18cc97", + "shasum": "" + }, + "require": { + "php": ">=8.2" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\Process\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Executes commands in sub-processes", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/process/tree/v7.4.5" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-26T15:07:59+00:00" + }, + { + "name": "symfony/psr-http-message-bridge", + "version": "v7.4.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/psr-http-message-bridge.git", + "reference": "929ffe10bbfbb92e711ac3818d416f9daffee067" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/psr-http-message-bridge/zipball/929ffe10bbfbb92e711ac3818d416f9daffee067", + "reference": "929ffe10bbfbb92e711ac3818d416f9daffee067", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "psr/http-message": "^1.0|^2.0", + "symfony/http-foundation": "^6.4|^7.0|^8.0" + }, + "conflict": { + "php-http/discovery": "<1.15", + "symfony/http-kernel": "<6.4" + }, + "require-dev": { + "nyholm/psr7": "^1.1", + "php-http/discovery": "^1.15", + "psr/log": "^1.1.4|^2|^3", + "symfony/browser-kit": "^6.4|^7.0|^8.0", + "symfony/config": "^6.4|^7.0|^8.0", + "symfony/event-dispatcher": "^6.4|^7.0|^8.0", + "symfony/framework-bundle": "^6.4.13|^7.1.6|^8.0", + "symfony/http-kernel": "^6.4.13|^7.1.6|^8.0", + "symfony/runtime": "^6.4.13|^7.1.6|^8.0" + }, + "type": "symfony-bridge", + "autoload": { + "psr-4": { + "Symfony\\Bridge\\PsrHttpMessage\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "PSR HTTP message bridge", + "homepage": "https://symfony.com", + "keywords": [ + "http", + "http-message", + "psr-17", + "psr-7" + ], + "support": { + "source": "https://github.com/symfony/psr-http-message-bridge/tree/v7.4.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-03T23:30:35+00:00" + }, + { + "name": "symfony/routing", + "version": "v7.4.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/routing.git", + "reference": "0798827fe2c79caeed41d70b680c2c3507d10147" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/routing/zipball/0798827fe2c79caeed41d70b680c2c3507d10147", + "reference": "0798827fe2c79caeed41d70b680c2c3507d10147", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "symfony/deprecation-contracts": "^2.5|^3" + }, + "conflict": { + "symfony/config": "<6.4", + "symfony/dependency-injection": "<6.4", + "symfony/yaml": "<6.4" + }, + "require-dev": { + "psr/log": "^1|^2|^3", + "symfony/config": "^6.4|^7.0|^8.0", + "symfony/dependency-injection": "^6.4|^7.0|^8.0", + "symfony/expression-language": "^6.4|^7.0|^8.0", + "symfony/http-foundation": "^6.4|^7.0|^8.0", + "symfony/yaml": "^6.4|^7.0|^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\Routing\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Maps an HTTP request to a set of configuration variables", + "homepage": "https://symfony.com", + "keywords": [ + "router", + "routing", + "uri", + "url" + ], + "support": { + "source": "https://github.com/symfony/routing/tree/v7.4.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-12T12:19:02+00:00" + }, + { + "name": "symfony/service-contracts", + "version": "v3.6.1", + "source": { + "type": "git", + "url": "https://github.com/symfony/service-contracts.git", + "reference": "45112560a3ba2d715666a509a0bc9521d10b6c43" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/service-contracts/zipball/45112560a3ba2d715666a509a0bc9521d10b6c43", + "reference": "45112560a3ba2d715666a509a0bc9521d10b6c43", + "shasum": "" + }, + "require": { + "php": ">=8.1", + "psr/container": "^1.1|^2.0", + "symfony/deprecation-contracts": "^2.5|^3" + }, + "conflict": { + "ext-psr": "<1.1|>=2" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/contracts", + "name": "symfony/contracts" + }, + "branch-alias": { + "dev-main": "3.6-dev" + } + }, + "autoload": { + "psr-4": { + "Symfony\\Contracts\\Service\\": "" + }, + "exclude-from-classmap": [ + "/Test/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Generic abstractions related to writing services", + "homepage": "https://symfony.com", + "keywords": [ + "abstractions", + "contracts", + "decoupling", + "interfaces", + "interoperability", + "standards" + ], + "support": { + "source": "https://github.com/symfony/service-contracts/tree/v3.6.1" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-07-15T11:30:57+00:00" + }, + { + "name": "symfony/string", + "version": "v8.0.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/string.git", + "reference": "758b372d6882506821ed666032e43020c4f57194" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/string/zipball/758b372d6882506821ed666032e43020c4f57194", + "reference": "758b372d6882506821ed666032e43020c4f57194", + "shasum": "" + }, + "require": { + "php": ">=8.4", + "symfony/polyfill-ctype": "^1.8", + "symfony/polyfill-intl-grapheme": "^1.33", + "symfony/polyfill-intl-normalizer": "^1.0", + "symfony/polyfill-mbstring": "^1.0" + }, + "conflict": { + "symfony/translation-contracts": "<2.5" + }, + "require-dev": { + "symfony/emoji": "^7.4|^8.0", + "symfony/http-client": "^7.4|^8.0", + "symfony/intl": "^7.4|^8.0", + "symfony/translation-contracts": "^2.5|^3.0", + "symfony/var-exporter": "^7.4|^8.0" + }, + "type": "library", + "autoload": { + "files": [ + "Resources/functions.php" + ], + "psr-4": { + "Symfony\\Component\\String\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Provides an object-oriented API to strings and deals with bytes, UTF-8 code points and grapheme clusters in a unified way", + "homepage": "https://symfony.com", + "keywords": [ + "grapheme", + "i18n", + "string", + "unicode", + "utf-8", + "utf8" + ], + "support": { + "source": "https://github.com/symfony/string/tree/v8.0.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-12T12:37:40+00:00" + }, + { + "name": "symfony/translation", + "version": "v8.0.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/translation.git", + "reference": "db70c8ce7db74fd2da7b1d268db46b2a8ce32c10" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/translation/zipball/db70c8ce7db74fd2da7b1d268db46b2a8ce32c10", + "reference": "db70c8ce7db74fd2da7b1d268db46b2a8ce32c10", + "shasum": "" + }, + "require": { + "php": ">=8.4", + "symfony/polyfill-mbstring": "^1.0", + "symfony/translation-contracts": "^3.6.1" + }, + "conflict": { + "nikic/php-parser": "<5.0", + "symfony/http-client-contracts": "<2.5", + "symfony/service-contracts": "<2.5" + }, + "provide": { + "symfony/translation-implementation": "2.3|3.0" + }, + "require-dev": { + "nikic/php-parser": "^5.0", + "psr/log": "^1|^2|^3", + "symfony/config": "^7.4|^8.0", + "symfony/console": "^7.4|^8.0", + "symfony/dependency-injection": "^7.4|^8.0", + "symfony/finder": "^7.4|^8.0", + "symfony/http-client-contracts": "^2.5|^3.0", + "symfony/http-kernel": "^7.4|^8.0", + "symfony/intl": "^7.4|^8.0", + "symfony/polyfill-intl-icu": "^1.21", + "symfony/routing": "^7.4|^8.0", + "symfony/service-contracts": "^2.5|^3", + "symfony/yaml": "^7.4|^8.0" + }, + "type": "library", + "autoload": { + "files": [ + "Resources/functions.php" + ], + "psr-4": { + "Symfony\\Component\\Translation\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Fabien Potencier", + "email": "fabien@symfony.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Provides tools to internationalize your application", + "homepage": "https://symfony.com", + "support": { + "source": "https://github.com/symfony/translation/tree/v8.0.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-13T13:06:50+00:00" + }, + { + "name": "symfony/translation-contracts", + "version": "v3.6.1", + "source": { + "type": "git", + "url": "https://github.com/symfony/translation-contracts.git", + "reference": "65a8bc82080447fae78373aa10f8d13b38338977" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/translation-contracts/zipball/65a8bc82080447fae78373aa10f8d13b38338977", + "reference": "65a8bc82080447fae78373aa10f8d13b38338977", + "shasum": "" + }, + "require": { + "php": ">=8.1" + }, + "type": "library", + "extra": { + "thanks": { + "url": "https://github.com/symfony/contracts", + "name": "symfony/contracts" + }, + "branch-alias": { + "dev-main": "3.6-dev" + } + }, + "autoload": { + "psr-4": { + "Symfony\\Contracts\\Translation\\": "" + }, + "exclude-from-classmap": [ + "/Test/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Generic abstractions related to translation", + "homepage": "https://symfony.com", + "keywords": [ + "abstractions", + "contracts", + "decoupling", + "interfaces", + "interoperability", + "standards" + ], + "support": { + "source": "https://github.com/symfony/translation-contracts/tree/v3.6.1" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2025-07-15T13:41:35+00:00" + }, + { + "name": "symfony/uid", + "version": "v7.4.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/uid.git", + "reference": "7719ce8aba76be93dfe249192f1fbfa52c588e36" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/uid/zipball/7719ce8aba76be93dfe249192f1fbfa52c588e36", + "reference": "7719ce8aba76be93dfe249192f1fbfa52c588e36", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "symfony/polyfill-uuid": "^1.15" + }, + "require-dev": { + "symfony/console": "^6.4|^7.0|^8.0" + }, + "type": "library", + "autoload": { + "psr-4": { + "Symfony\\Component\\Uid\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Grégoire Pineau", + "email": "lyrixx@lyrixx.info" + }, + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Provides an object-oriented API to generate and represent UIDs", + "homepage": "https://symfony.com", + "keywords": [ + "UID", + "ulid", + "uuid" + ], + "support": { + "source": "https://github.com/symfony/uid/tree/v7.4.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-03T23:30:35+00:00" + }, + { + "name": "symfony/var-dumper", + "version": "v7.4.4", + "source": { + "type": "git", + "url": "https://github.com/symfony/var-dumper.git", + "reference": "0e4769b46a0c3c62390d124635ce59f66874b282" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/symfony/var-dumper/zipball/0e4769b46a0c3c62390d124635ce59f66874b282", + "reference": "0e4769b46a0c3c62390d124635ce59f66874b282", + "shasum": "" + }, + "require": { + "php": ">=8.2", + "symfony/deprecation-contracts": "^2.5|^3", + "symfony/polyfill-mbstring": "~1.0" + }, + "conflict": { + "symfony/console": "<6.4" + }, + "require-dev": { + "symfony/console": "^6.4|^7.0|^8.0", + "symfony/http-kernel": "^6.4|^7.0|^8.0", + "symfony/process": "^6.4|^7.0|^8.0", + "symfony/uid": "^6.4|^7.0|^8.0", + "twig/twig": "^3.12" + }, + "bin": [ + "Resources/bin/var-dump-server" + ], + "type": "library", + "autoload": { + "files": [ + "Resources/functions/dump.php" + ], + "psr-4": { + "Symfony\\Component\\VarDumper\\": "" + }, + "exclude-from-classmap": [ + "/Tests/" + ] + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Nicolas Grekas", + "email": "p@tchwork.com" + }, + { + "name": "Symfony Community", + "homepage": "https://symfony.com/contributors" + } + ], + "description": "Provides mechanisms for walking through any arbitrary PHP variable", + "homepage": "https://symfony.com", + "keywords": [ + "debug", + "dump" + ], + "support": { + "source": "https://github.com/symfony/var-dumper/tree/v7.4.4" + }, + "funding": [ + { + "url": "https://symfony.com/sponsor", + "type": "custom" + }, + { + "url": "https://github.com/fabpot", + "type": "github" + }, + { + "url": "https://github.com/nicolas-grekas", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/symfony/symfony", + "type": "tidelift" + } + ], + "time": "2026-01-01T22:13:48+00:00" + }, + { + "name": "tijsverkoyen/css-to-inline-styles", + "version": "v2.4.0", + "source": { + "type": "git", + "url": "https://github.com/tijsverkoyen/CssToInlineStyles.git", + "reference": "f0292ccf0ec75843d65027214426b6b163b48b41" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/tijsverkoyen/CssToInlineStyles/zipball/f0292ccf0ec75843d65027214426b6b163b48b41", + "reference": "f0292ccf0ec75843d65027214426b6b163b48b41", + "shasum": "" + }, + "require": { + "ext-dom": "*", + "ext-libxml": "*", + "php": "^7.4 || ^8.0", + "symfony/css-selector": "^5.4 || ^6.0 || ^7.0 || ^8.0" + }, + "require-dev": { + "phpstan/phpstan": "^2.0", + "phpstan/phpstan-phpunit": "^2.0", + "phpunit/phpunit": "^8.5.21 || ^9.5.10" + }, + "type": "library", + "extra": { + "branch-alias": { + "dev-master": "2.x-dev" + } + }, + "autoload": { + "psr-4": { + "TijsVerkoyen\\CssToInlineStyles\\": "src" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "BSD-3-Clause" + ], + "authors": [ + { + "name": "Tijs Verkoyen", + "email": "css_to_inline_styles@verkoyen.eu", + "role": "Developer" + } + ], + "description": "CssToInlineStyles is a class that enables you to convert HTML-pages/files into HTML-pages/files with inline styles. This is very useful when you're sending emails.", + "homepage": "https://github.com/tijsverkoyen/CssToInlineStyles", + "support": { + "issues": "https://github.com/tijsverkoyen/CssToInlineStyles/issues", + "source": "https://github.com/tijsverkoyen/CssToInlineStyles/tree/v2.4.0" + }, + "time": "2025-12-02T11:56:42+00:00" + }, + { + "name": "vlucas/phpdotenv", + "version": "v5.6.3", + "source": { + "type": "git", + "url": "https://github.com/vlucas/phpdotenv.git", + "reference": "955e7815d677a3eaa7075231212f2110983adecc" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/vlucas/phpdotenv/zipball/955e7815d677a3eaa7075231212f2110983adecc", + "reference": "955e7815d677a3eaa7075231212f2110983adecc", + "shasum": "" + }, + "require": { + "ext-pcre": "*", + "graham-campbell/result-type": "^1.1.4", + "php": "^7.2.5 || ^8.0", + "phpoption/phpoption": "^1.9.5", + "symfony/polyfill-ctype": "^1.26", + "symfony/polyfill-mbstring": "^1.26", + "symfony/polyfill-php80": "^1.26" + }, + "require-dev": { + "bamarni/composer-bin-plugin": "^1.8.2", + "ext-filter": "*", + "phpunit/phpunit": "^8.5.34 || ^9.6.13 || ^10.4.2" + }, + "suggest": { + "ext-filter": "Required to use the boolean validator." + }, + "type": "library", + "extra": { + "bamarni-bin": { + "bin-links": true, + "forward-command": false + }, + "branch-alias": { + "dev-master": "5.6-dev" + } + }, + "autoload": { + "psr-4": { + "Dotenv\\": "src/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "BSD-3-Clause" + ], + "authors": [ + { + "name": "Graham Campbell", + "email": "hello@gjcampbell.co.uk", + "homepage": "https://github.com/GrahamCampbell" + }, + { + "name": "Vance Lucas", + "email": "vance@vancelucas.com", + "homepage": "https://github.com/vlucas" + } + ], + "description": "Loads environment variables from `.env` to `getenv()`, `$_ENV` and `$_SERVER` automagically.", + "keywords": [ + "dotenv", + "env", + "environment" + ], + "support": { + "issues": "https://github.com/vlucas/phpdotenv/issues", + "source": "https://github.com/vlucas/phpdotenv/tree/v5.6.3" + }, + "funding": [ + { + "url": "https://github.com/GrahamCampbell", + "type": "github" + }, + { + "url": "https://tidelift.com/funding/github/packagist/vlucas/phpdotenv", + "type": "tidelift" + } + ], + "time": "2025-12-27T19:49:13+00:00" + }, + { + "name": "voku/portable-ascii", + "version": "2.0.3", + "source": { + "type": "git", + "url": "https://github.com/voku/portable-ascii.git", + "reference": "b1d923f88091c6bf09699efcd7c8a1b1bfd7351d" + }, + "dist": { + "type": "zip", + "url": "https://api.github.com/repos/voku/portable-ascii/zipball/b1d923f88091c6bf09699efcd7c8a1b1bfd7351d", + "reference": "b1d923f88091c6bf09699efcd7c8a1b1bfd7351d", + "shasum": "" + }, + "require": { + "php": ">=7.0.0" + }, + "require-dev": { + "phpunit/phpunit": "~6.0 || ~7.0 || ~9.0" + }, + "suggest": { + "ext-intl": "Use Intl for transliterator_transliterate() support" + }, + "type": "library", + "autoload": { + "psr-4": { + "voku\\": "src/voku/" + } + }, + "notification-url": "https://packagist.org/downloads/", + "license": [ + "MIT" + ], + "authors": [ + { + "name": "Lars Moelleken", + "homepage": "https://www.moelleken.org/" + } + ], + "description": "Portable ASCII library - performance optimized (ascii) string functions for php.", + "homepage": "https://github.com/voku/portable-ascii", + "keywords": [ + "ascii", + "clean", + "php" + ], + "support": { + "issues": "https://github.com/voku/portable-ascii/issues", + "source": "https://github.com/voku/portable-ascii/tree/2.0.3" + }, + "funding": [ + { + "url": "https://www.paypal.me/moelleken", + "type": "custom" + }, + { + "url": "https://github.com/voku", + "type": "github" + }, + { + "url": "https://opencollective.com/portable-ascii", + "type": "open_collective" + }, + { + "url": "https://www.patreon.com/voku", + "type": "patreon" + }, + { + "url": "https://tidelift.com/funding/github/packagist/voku/portable-ascii", + "type": "tidelift" + } + ], + "time": "2024-11-21T01:49:47+00:00" + } + ], + "packages-dev": [], + "aliases": [], + "minimum-stability": "stable", + "stability-flags": {}, + "prefer-stable": true, + "prefer-lowest": false, + "platform": { + "php": "^8.4" + }, + "platform-dev": {}, + "plugin-api-version": "2.9.0" +} diff --git a/cmd/core-app/laravel/config/app.php b/cmd/core-app/laravel/config/app.php new file mode 100644 index 00000000..7944ae49 --- /dev/null +++ b/cmd/core-app/laravel/config/app.php @@ -0,0 +1,19 @@ + env('APP_NAME', 'Core App'), + 'env' => env('APP_ENV', 'production'), + 'debug' => (bool) env('APP_DEBUG', false), + 'url' => env('APP_URL', 'http://localhost'), + 'timezone' => 'UTC', + 'locale' => 'en', + 'fallback_locale' => 'en', + 'faker_locale' => 'en_GB', + 'cipher' => 'AES-256-CBC', + 'key' => env('APP_KEY'), + 'maintenance' => [ + 'driver' => 'file', + ], +]; diff --git a/cmd/core-app/laravel/config/cache.php b/cmd/core-app/laravel/config/cache.php new file mode 100644 index 00000000..d2106ca7 --- /dev/null +++ b/cmd/core-app/laravel/config/cache.php @@ -0,0 +1,21 @@ + env('CACHE_STORE', 'file'), + + 'stores' => [ + 'file' => [ + 'driver' => 'file', + 'path' => storage_path('framework/cache/data'), + 'lock_path' => storage_path('framework/cache/data'), + ], + 'array' => [ + 'driver' => 'array', + 'serialize' => false, + ], + ], + + 'prefix' => env('CACHE_PREFIX', 'core_app_cache_'), +]; diff --git a/cmd/core-app/laravel/config/database.php b/cmd/core-app/laravel/config/database.php new file mode 100644 index 00000000..0dd2ae25 --- /dev/null +++ b/cmd/core-app/laravel/config/database.php @@ -0,0 +1,25 @@ + 'sqlite', + + 'connections' => [ + 'sqlite' => [ + 'driver' => 'sqlite', + 'url' => env('DB_URL'), + 'database' => env('DB_DATABASE', database_path('database.sqlite')), + 'prefix' => '', + 'foreign_key_constraints' => true, + 'busy_timeout' => 5000, + 'journal_mode' => 'wal', + 'synchronous' => 'normal', + ], + ], + + 'migrations' => [ + 'table' => 'migrations', + 'update_date_on_publish' => true, + ], +]; diff --git a/cmd/core-app/laravel/config/logging.php b/cmd/core-app/laravel/config/logging.php new file mode 100644 index 00000000..0b50ef7f --- /dev/null +++ b/cmd/core-app/laravel/config/logging.php @@ -0,0 +1,25 @@ + env('LOG_CHANNEL', 'single'), + + 'channels' => [ + 'single' => [ + 'driver' => 'single', + 'path' => storage_path('logs/laravel.log'), + 'level' => env('LOG_LEVEL', 'warning'), + 'replace_placeholders' => true, + ], + 'stderr' => [ + 'driver' => 'monolog', + 'level' => env('LOG_LEVEL', 'debug'), + 'handler' => Monolog\Handler\StreamHandler::class, + 'with' => [ + 'stream' => 'php://stderr', + ], + 'processors' => [Monolog\Processor\PsrLogMessageProcessor::class], + ], + ], +]; diff --git a/cmd/core-app/laravel/config/session.php b/cmd/core-app/laravel/config/session.php new file mode 100644 index 00000000..1d69e945 --- /dev/null +++ b/cmd/core-app/laravel/config/session.php @@ -0,0 +1,22 @@ + env('SESSION_DRIVER', 'file'), + 'lifetime' => env('SESSION_LIFETIME', 120), + 'expire_on_close' => true, + 'encrypt' => false, + 'files' => storage_path('framework/sessions'), + 'connection' => env('SESSION_CONNECTION'), + 'table' => 'sessions', + 'store' => env('SESSION_STORE'), + 'lottery' => [2, 100], + 'cookie' => env('SESSION_COOKIE', 'core_app_session'), + 'path' => '/', + 'domain' => null, + 'secure' => false, + 'http_only' => true, + 'same_site' => 'lax', + 'partitioned' => false, +]; diff --git a/cmd/core-app/laravel/config/view.php b/cmd/core-app/laravel/config/view.php new file mode 100644 index 00000000..c839c6ff --- /dev/null +++ b/cmd/core-app/laravel/config/view.php @@ -0,0 +1,10 @@ + [ + resource_path('views'), + ], + 'compiled' => env('VIEW_COMPILED_PATH', realpath(storage_path('framework/views'))), +]; diff --git a/cmd/core-app/laravel/public/index.php b/cmd/core-app/laravel/public/index.php new file mode 100644 index 00000000..d55a3b2c --- /dev/null +++ b/cmd/core-app/laravel/public/index.php @@ -0,0 +1,19 @@ +handleRequest(Request::capture()); diff --git a/cmd/core-app/laravel/resources/views/components/layout.blade.php b/cmd/core-app/laravel/resources/views/components/layout.blade.php new file mode 100644 index 00000000..acabb0db --- /dev/null +++ b/cmd/core-app/laravel/resources/views/components/layout.blade.php @@ -0,0 +1,107 @@ + + + + + + Core App + + @livewireStyles + + + {{ $slot }} + @livewireScripts + + diff --git a/cmd/core-app/laravel/resources/views/livewire/counter.blade.php b/cmd/core-app/laravel/resources/views/livewire/counter.blade.php new file mode 100644 index 00000000..b000570a --- /dev/null +++ b/cmd/core-app/laravel/resources/views/livewire/counter.blade.php @@ -0,0 +1,8 @@ +
+
{{ $count }}
+
+ + +
+

Livewire {{ \Livewire\Livewire::VERSION }} · Server-rendered, no page reload

+
diff --git a/cmd/core-app/laravel/resources/views/welcome.blade.php b/cmd/core-app/laravel/resources/views/welcome.blade.php new file mode 100644 index 00000000..47186d3d --- /dev/null +++ b/cmd/core-app/laravel/resources/views/welcome.blade.php @@ -0,0 +1,40 @@ + +
+

Core App

+

Laravel {{ app()->version() }} running inside a native desktop window

+ +
+
+
PHP
+
{{ PHP_VERSION }}
+
+
+
Thread Safety
+
{{ PHP_ZTS ? 'ZTS (Yes)' : 'NTS (No)' }}
+
+
+
SAPI
+
{{ php_sapi_name() }}
+
+
+
Platform
+
{{ PHP_OS }} {{ php_uname('m') }}
+
+
+
Database
+
SQLite {{ \SQLite3::version()['versionString'] }}
+
+
+
Mode
+
{{ env('FRANKENPHP_WORKER') ? 'Octane Worker' : 'Standard' }}
+
+
+ +
Single Binary · No Server · No Config
+
+ +
+

Livewire Reactivity Test

+ +
+
diff --git a/cmd/core-app/laravel/routes/web.php b/cmd/core-app/laravel/routes/web.php new file mode 100644 index 00000000..7bceeafd --- /dev/null +++ b/cmd/core-app/laravel/routes/web.php @@ -0,0 +1,9 @@ + Date: Fri, 6 Feb 2026 22:56:44 +0000 Subject: [PATCH 08/10] feat(core-app): add auto-migration and session/cache tables MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit AppServiceProvider runs migrate --force on first request. Sessions and cache tables created automatically in SQLite. Removed synthetic HTTP migration approach in favour of pure PHP service provider — cleaner, works with Octane workers. Co-Authored-By: Claude Opus 4.6 --- cmd/core-app/env.go | 1 + .../app/Providers/AppServiceProvider.php | 28 +++++++++++++++++ cmd/core-app/laravel/bootstrap/app.php | 1 + cmd/core-app/laravel/bootstrap/providers.php | 7 +++++ ...001_01_01_000000_create_sessions_table.php | 27 ++++++++++++++++ .../0001_01_01_000001_create_cache_table.php | 31 +++++++++++++++++++ 6 files changed, 95 insertions(+) create mode 100644 cmd/core-app/laravel/app/Providers/AppServiceProvider.php create mode 100644 cmd/core-app/laravel/bootstrap/providers.php create mode 100644 cmd/core-app/laravel/database/migrations/0001_01_01_000000_create_sessions_table.php create mode 100644 cmd/core-app/laravel/database/migrations/0001_01_01_000001_create_cache_table.php diff --git a/cmd/core-app/env.go b/cmd/core-app/env.go index 5fbde0bf..6249285c 100644 --- a/cmd/core-app/env.go +++ b/cmd/core-app/env.go @@ -123,6 +123,7 @@ CACHE_STORE=file SESSION_DRIVER=file LOG_CHANNEL=single LOG_LEVEL=warning + `, appKey, env.DatabasePath) return os.WriteFile(filepath.Join(laravelRoot, ".env"), []byte(content), 0o644) diff --git a/cmd/core-app/laravel/app/Providers/AppServiceProvider.php b/cmd/core-app/laravel/app/Providers/AppServiceProvider.php new file mode 100644 index 00000000..e8f107ac --- /dev/null +++ b/cmd/core-app/laravel/app/Providers/AppServiceProvider.php @@ -0,0 +1,28 @@ + true, + '--no-interaction' => true, + ]); + } catch (Throwable) { + // Silently skip — DB might not exist yet (e.g. during + // composer operations or first extraction). + } + } +} diff --git a/cmd/core-app/laravel/bootstrap/app.php b/cmd/core-app/laravel/bootstrap/app.php index 66615824..ba8f1fff 100644 --- a/cmd/core-app/laravel/bootstrap/app.php +++ b/cmd/core-app/laravel/bootstrap/app.php @@ -9,6 +9,7 @@ use Illuminate\Foundation\Configuration\Middleware; return Application::configure(basePath: dirname(__DIR__)) ->withRouting( web: __DIR__.'/../routes/web.php', + commands: __DIR__.'/../routes/console.php', ) ->withMiddleware(function (Middleware $middleware) { // diff --git a/cmd/core-app/laravel/bootstrap/providers.php b/cmd/core-app/laravel/bootstrap/providers.php new file mode 100644 index 00000000..84c7d4de --- /dev/null +++ b/cmd/core-app/laravel/bootstrap/providers.php @@ -0,0 +1,7 @@ +string('id')->primary(); + $table->foreignId('user_id')->nullable()->index(); + $table->string('ip_address', 45)->nullable(); + $table->text('user_agent')->nullable(); + $table->longText('payload'); + $table->integer('last_activity')->index(); + }); + } + + public function down(): void + { + Schema::dropIfExists('sessions'); + } +}; diff --git a/cmd/core-app/laravel/database/migrations/0001_01_01_000001_create_cache_table.php b/cmd/core-app/laravel/database/migrations/0001_01_01_000001_create_cache_table.php new file mode 100644 index 00000000..266e00a9 --- /dev/null +++ b/cmd/core-app/laravel/database/migrations/0001_01_01_000001_create_cache_table.php @@ -0,0 +1,31 @@ +string('key')->primary(); + $table->mediumText('value'); + $table->integer('expiration'); + }); + + Schema::create('cache_locks', function (Blueprint $table) { + $table->string('key')->primary(); + $table->string('owner'); + $table->integer('expiration'); + }); + } + + public function down(): void + { + Schema::dropIfExists('cache_locks'); + Schema::dropIfExists('cache'); + } +}; From 309dcaf0b4c03a6e67b6ad971f83dedb58e8ad79 Mon Sep 17 00:00:00 2001 From: Snider Date: Sun, 8 Feb 2026 15:17:12 +0000 Subject: [PATCH 09/10] updates --- .../src/app/jellyfin/jellyfin.component.ts | 187 ++++++ cmd/community/index.html | 602 ++++++++++++++++++ cmd/core-app/CODEX_PROMPT.md | 100 +++ cmd/core-app/laravel/database/database.sqlite | Bin 0 -> 45056 bytes cmd/core-ide/build_service.go | 71 +++ cmd/core-ide/chat_service.go | 135 ++++ cmd/core-ide/frontend/angular.json | 91 +++ cmd/core-ide/frontend/package.json | 41 ++ .../frontend/src/app/app.component.ts | 18 + cmd/core-ide/frontend/src/app/app.config.ts | 9 + cmd/core-ide/frontend/src/app/app.routes.ts | 25 + .../frontend/src/app/build/build.component.ts | 184 ++++++ .../frontend/src/app/chat/chat.component.ts | 242 +++++++ .../src/app/dashboard/dashboard.component.ts | 163 +++++ .../src/app/jellyfin/jellyfin.component.ts | 175 +++++ .../frontend/src/app/main/main.component.ts | 119 ++++ .../src/app/settings/settings.component.ts | 105 +++ .../frontend/src/app/shared/wails.service.ts | 133 ++++ .../frontend/src/app/shared/ws.service.ts | 89 +++ .../frontend/src/app/tray/tray.component.ts | 124 ++++ cmd/core-ide/frontend/src/index.html | 13 + cmd/core-ide/frontend/src/main.ts | 6 + cmd/core-ide/frontend/src/styles.scss | 247 +++++++ cmd/core-ide/frontend/tsconfig.app.json | 13 + cmd/core-ide/frontend/tsconfig.json | 35 + cmd/core-ide/go.mod | 57 ++ cmd/core-ide/go.sum | 165 +++++ cmd/core-ide/icons/appicon.png | Bin 0 -> 76 bytes cmd/core-ide/icons/icons.go | 25 + cmd/core-ide/icons/tray-dark.png | Bin 0 -> 76 bytes cmd/core-ide/icons/tray-light.png | Bin 0 -> 76 bytes cmd/core-ide/icons/tray-template.png | Bin 0 -> 76 bytes cmd/core-ide/ide_service.go | 102 +++ cmd/core-ide/main.go | 151 +++++ github-projects-recovery.md | 403 ++++++++++++ pkg/mcp/ide/bridge.go | 182 ++++++ pkg/mcp/ide/bridge_test.go | 237 +++++++ pkg/mcp/ide/config.go | 48 ++ pkg/mcp/ide/ide.go | 57 ++ pkg/mcp/ide/tools_build.go | 109 ++++ pkg/mcp/ide/tools_chat.go | 191 ++++++ pkg/mcp/ide/tools_dashboard.go | 127 ++++ pkg/mcp/subsystem.go | 32 + pkg/mcp/subsystem_test.go | 114 ++++ 44 files changed, 4927 insertions(+) create mode 100644 cmd/bugseti/frontend/src/app/jellyfin/jellyfin.component.ts create mode 100644 cmd/community/index.html create mode 100644 cmd/core-app/CODEX_PROMPT.md create mode 100644 cmd/core-app/laravel/database/database.sqlite create mode 100644 cmd/core-ide/build_service.go create mode 100644 cmd/core-ide/chat_service.go create mode 100644 cmd/core-ide/frontend/angular.json create mode 100644 cmd/core-ide/frontend/package.json create mode 100644 cmd/core-ide/frontend/src/app/app.component.ts create mode 100644 cmd/core-ide/frontend/src/app/app.config.ts create mode 100644 cmd/core-ide/frontend/src/app/app.routes.ts create mode 100644 cmd/core-ide/frontend/src/app/build/build.component.ts create mode 100644 cmd/core-ide/frontend/src/app/chat/chat.component.ts create mode 100644 cmd/core-ide/frontend/src/app/dashboard/dashboard.component.ts create mode 100644 cmd/core-ide/frontend/src/app/jellyfin/jellyfin.component.ts create mode 100644 cmd/core-ide/frontend/src/app/main/main.component.ts create mode 100644 cmd/core-ide/frontend/src/app/settings/settings.component.ts create mode 100644 cmd/core-ide/frontend/src/app/shared/wails.service.ts create mode 100644 cmd/core-ide/frontend/src/app/shared/ws.service.ts create mode 100644 cmd/core-ide/frontend/src/app/tray/tray.component.ts create mode 100644 cmd/core-ide/frontend/src/index.html create mode 100644 cmd/core-ide/frontend/src/main.ts create mode 100644 cmd/core-ide/frontend/src/styles.scss create mode 100644 cmd/core-ide/frontend/tsconfig.app.json create mode 100644 cmd/core-ide/frontend/tsconfig.json create mode 100644 cmd/core-ide/go.mod create mode 100644 cmd/core-ide/go.sum create mode 100644 cmd/core-ide/icons/appicon.png create mode 100644 cmd/core-ide/icons/icons.go create mode 100644 cmd/core-ide/icons/tray-dark.png create mode 100644 cmd/core-ide/icons/tray-light.png create mode 100644 cmd/core-ide/icons/tray-template.png create mode 100644 cmd/core-ide/ide_service.go create mode 100644 cmd/core-ide/main.go create mode 100644 github-projects-recovery.md create mode 100644 pkg/mcp/ide/bridge.go create mode 100644 pkg/mcp/ide/bridge_test.go create mode 100644 pkg/mcp/ide/config.go create mode 100644 pkg/mcp/ide/ide.go create mode 100644 pkg/mcp/ide/tools_build.go create mode 100644 pkg/mcp/ide/tools_chat.go create mode 100644 pkg/mcp/ide/tools_dashboard.go create mode 100644 pkg/mcp/subsystem.go create mode 100644 pkg/mcp/subsystem_test.go diff --git a/cmd/bugseti/frontend/src/app/jellyfin/jellyfin.component.ts b/cmd/bugseti/frontend/src/app/jellyfin/jellyfin.component.ts new file mode 100644 index 00000000..0f7c8382 --- /dev/null +++ b/cmd/bugseti/frontend/src/app/jellyfin/jellyfin.component.ts @@ -0,0 +1,187 @@ +import { Component } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { FormsModule } from '@angular/forms'; +import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser'; + +type Mode = 'web' | 'stream'; + +@Component({ + selector: 'app-jellyfin', + standalone: true, + imports: [CommonModule, FormsModule], + template: ` +
+
+
+

Jellyfin Player

+

Quick embed for media.lthn.ai or any Jellyfin host.

+
+
+ + +
+
+ +
+
+ + +
+ +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+ + +
+
+ +
+ +
+ +
+ +

Set Item ID and API key to build stream URL.

+
+
+ `, + styles: [` + .jellyfin { + display: flex; + flex-direction: column; + gap: var(--spacing-md); + padding: var(--spacing-md); + height: 100%; + overflow: auto; + background: var(--bg-secondary); + } + + .jellyfin__header { + display: flex; + align-items: center; + justify-content: space-between; + gap: var(--spacing-md); + } + + .jellyfin__header h1 { + margin-bottom: var(--spacing-xs); + } + + .mode-switch { + display: flex; + gap: var(--spacing-xs); + } + + .mode-switch .btn.is-active { + border-color: var(--accent-primary); + color: var(--accent-primary); + } + + .jellyfin__config { + display: flex; + flex-direction: column; + gap: var(--spacing-sm); + } + + .stream-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); + gap: var(--spacing-sm); + } + + .actions { + display: flex; + gap: var(--spacing-sm); + } + + .jellyfin__viewer { + flex: 1; + min-height: 420px; + padding: 0; + overflow: hidden; + } + + .jellyfin-frame, + .jellyfin-video { + border: 0; + width: 100%; + height: 100%; + min-height: 420px; + background: #000; + } + + .stream-hint { + padding: var(--spacing-md); + margin: 0; + } + `] +}) +export class JellyfinComponent { + mode: Mode = 'web'; + loaded = false; + + serverUrl = 'https://media.lthn.ai'; + itemId = ''; + apiKey = ''; + mediaSourceId = ''; + + safeWebUrl: SafeResourceUrl = this.sanitizer.bypassSecurityTrustResourceUrl('https://media.lthn.ai/web/index.html'); + streamUrl = ''; + + constructor(private sanitizer: DomSanitizer) {} + + load(): void { + const base = this.normalizeBase(this.serverUrl); + this.safeWebUrl = this.sanitizer.bypassSecurityTrustResourceUrl(`${base}/web/index.html`); + this.streamUrl = this.buildStreamUrl(base); + this.loaded = true; + } + + reset(): void { + this.loaded = false; + this.itemId = ''; + this.apiKey = ''; + this.mediaSourceId = ''; + this.streamUrl = ''; + } + + private normalizeBase(value: string): string { + const raw = value.trim() || 'https://media.lthn.ai'; + const withProtocol = raw.startsWith('http://') || raw.startsWith('https://') ? raw : `https://${raw}`; + return withProtocol.replace(/\/+$/, ''); + } + + private buildStreamUrl(base: string): string { + if (!this.itemId.trim() || !this.apiKey.trim()) { + return ''; + } + + const url = new URL(`${base}/Videos/${encodeURIComponent(this.itemId.trim())}/stream`); + url.searchParams.set('api_key', this.apiKey.trim()); + url.searchParams.set('static', 'true'); + if (this.mediaSourceId.trim()) { + url.searchParams.set('MediaSourceId', this.mediaSourceId.trim()); + } + return url.toString(); + } +} diff --git a/cmd/community/index.html b/cmd/community/index.html new file mode 100644 index 00000000..9da43fd2 --- /dev/null +++ b/cmd/community/index.html @@ -0,0 +1,602 @@ + + + + + + Lethean Community — Build Trust Through Code + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ +
+ +
+ +
+ + BugSETI by Lethean.io +
+ + +

+ Build trust
+ through code +

+ + +

+ An open source community where every commit, review, and pull request + builds your reputation. Like SETI@home, but for fixing real bugs in real projects. +

+ + +
+
+
+
+ + + + ~ +
+
+
$ bugseti start
+
⠋ Fetching issues from 42 OSS repos...
+
✓ 7 beginner-friendly issues queued
+
✓ AI context prepared for each issue
+
Ready. Fix bugs. Build trust.
+
+
+
+
+ + + +
+
+ + + + +
+
+ +
+

How it works

+

From install to impact

+

BugSETI runs in your system tray. It finds issues, prepares context, and gets out of your way. You write code. The community remembers.

+
+ +
+ +
+
+ 1 +

Install & connect

+
+

Download BugSETI, connect your GitHub account. That's your identity in the Lethean Community — one account, everywhere.

+
+ $ gh auth login
+ $ bugseti init +
+
+ + +
+
+ 2 +

Pick an issue

+
+

BugSETI scans OSS repos for beginner-friendly issues. AI prepares context — the relevant files, similar past fixes, project conventions.

+
+ 7 issues ready
+ Context seeded +
+
+ + +
+
+ 3 +

Fix & earn trust

+
+

Submit your PR. Every merged fix, every review, every contribution — it all counts. Your track record becomes your reputation.

+
+ PR #247 merged
+ Trust updated +
+
+
+
+
+ + + + +
+
+ + +
+
+

The app

+

A workbench in your tray

+

BugSETI lives in your system tray on macOS, Linux, and Windows. It quietly fetches issues, seeds AI context, and presents a clean workbench when you're ready to code.

+
+
+ + Priority queue — issues ranked by your skills and interests +
+
+ + AI context seeding — relevant files and patterns, ready to go +
+
+ + One-click PR submission — fork, branch, commit, push +
+
+ + Stats tracking — streaks, repos contributed, PRs merged +
+
+
+
+
+ +
+
+ + + + BugSETI — Workbench +
+
+ +
+
+ lodash/lodash#5821 + good first issue +
+

Fix _.merge not handling Symbol properties

+
+ ⭐ 58.2k + JavaScript + Context ready +
+
+ +
+
+ vuejs/core#9214 + bug +
+

Teleport target not updating on HMR

+
+ ⭐ 44.7k + TypeScript + Seeding... +
+
+ +
+ 7 issues queued + ♫ dapp.fm playing +
+
+
+
+
+
+ + +
+
+
+
+
+
+ +
+
+

dapp.fm

+

Built into BugSETI

+
+
+ +
+
+
+
+

It Feels So Good (Amnesia Mix)

+

The Conductor & The Cowboy

+
+ 3:42 +
+
+
+
+
+

Zero-trust DRM · Artists keep 95–100% · ChaCha20-Poly1305

+
+
+
+
+

Built in

+

Music while you merge

+

dapp.fm is a free music player built into BugSETI. Zero-trust DRM where the password is the license. Artists keep almost everything. No middlemen, no platform fees.

+

The player is a working implementation of the Lethean protocol RFCs — encrypted, decentralised, and yours. Code, listen, contribute.

+ + Try the demo + + +
+
+ +
+
+ + + + +
+
+ +
+

Ecosystem

+

One identity, everywhere

+

Your GitHub is your Lethean identity. One name across Web2, Web3, Handshake DNS, blockchain — verified by what you've actually done.

+
+ +
+ +
+
Protocol
+

Lethean Network

+

Privacy-first blockchain. Consent-gated networking via the UEPS protocol. Data sovereignty cryptographically enforced.

+ lt.hn → +
+ + +
+
Identity
+

lthn/ everywhere

+

Handshake TLD, .io, .ai, .community, .eth, .tron — one name that resolves across every namespace. Your DID, decentralised.

+ hns.to → +
+ + +
+
Foundation
+

EUPL-1.2

+

Every line is open source under the European Union Public License. 23 languages, no jurisdiction loopholes. Code stays open, forever.

+ host.uk.com/oss → +
+ + +
+
Coming
+

lthn.ai

+

Open source EUPL-1.2 models up to 70B parameters. High quality, embeddable transformers for the community.

+ Coming soon +
+ + +
+
Music
+

dapp.fm

+

All-in-one publishing platform. Zero-trust DRM. Artists keep 95–100%. Built on Borg encryption and LTHN rolling keys.

+ demo.dapp.fm → +
+ + +
+
Services
+

Host UK

+

Infrastructure and services brand of the Lethean Community. Privacy-first hosting, analytics, trust verification, notifications.

+ host.uk.com → +
+
+ +
+
+ + + + +
+ +
+ +
+ +

Get started

+

Join the community

+

Install BugSETI. Connect your GitHub. Start contributing. Every bug you fix makes open source better — and builds a trust record that's cryptographically yours.

+ + + + + +
+
+ # or build from source
+ $ git clone https://github.com/host-uk/core
+ $ cd core && go build ./cmd/bugseti +
+
+ +
+
+ + + + + + + + + + + + + \ No newline at end of file diff --git a/cmd/core-app/CODEX_PROMPT.md b/cmd/core-app/CODEX_PROMPT.md new file mode 100644 index 00000000..7dbfbf27 --- /dev/null +++ b/cmd/core-app/CODEX_PROMPT.md @@ -0,0 +1,100 @@ +# Codex Task: Core App — FrankenPHP Native Desktop App + +## Context + +You are working on `cmd/core-app/` inside the `host-uk/core` Go monorepo. This is a **working** native desktop application that embeds the PHP runtime (FrankenPHP) inside a Wails v3 window. A single 53MB binary runs Laravel 12 with Livewire 4, Octane worker mode, and SQLite — no Docker, no php-fpm, no nginx, no external dependencies. + +**It already builds and runs.** Your job is to refine, not rebuild. + +## Architecture + +``` +Wails v3 WebView (native window) + | + | AssetOptions.Handler → http.Handler + v +FrankenPHP (CGO, PHP 8.4 ZTS runtime) + | + | ServeHTTP() → Laravel public/index.php + v +Laravel 12 (Octane worker mode, 2 workers) + ├── Livewire 4 (server-rendered reactivity) + ├── SQLite (~/Library/Application Support/core-app/) + └── Native Bridge (localhost HTTP API for PHP→Go calls) +``` + +## Key Files + +| File | Purpose | +|------|---------| +| `main.go` | Wails app entry, system tray, window config | +| `handler.go` | PHPHandler — FrankenPHP init, Octane worker mode, try_files URL resolution | +| `embed.go` | `//go:embed all:laravel` + extraction to temp dir | +| `env.go` | Persistent data dir, .env generation, APP_KEY management | +| `app_service.go` | Wails service bindings (version, data dir, window management) | +| `native_bridge.go` | PHP→Go HTTP bridge on localhost (random port) | +| `laravel/` | Full Laravel 12 skeleton (vendor excluded from git, built via `composer install`) | + +## Build Requirements + +- **PHP 8.4 ZTS**: `brew install shivammathur/php/php@8.4-zts` +- **Go 1.25+** with CGO enabled +- **Build tags**: `-tags nowatcher` (FrankenPHP's watcher needs libwatcher-c, skip it) +- **ZTS php-config**: Must use `/opt/homebrew/opt/php@8.4-zts/bin/php-config` (NOT the default php-config which may point to non-ZTS PHP) + +```bash +# Install Laravel deps (one-time) +cd laravel && composer install --no-dev --optimize-autoloader + +# Build +ZTS_PHP_CONFIG=/opt/homebrew/opt/php@8.4-zts/bin/php-config +CGO_ENABLED=1 \ +CGO_CFLAGS="$($ZTS_PHP_CONFIG --includes)" \ +CGO_LDFLAGS="-L/opt/homebrew/opt/php@8.4-zts/lib $($ZTS_PHP_CONFIG --ldflags) $($ZTS_PHP_CONFIG --libs)" \ +go build -tags nowatcher -o ../../bin/core-app . +``` + +## Known Patterns & Gotchas + +1. **FrankenPHP can't serve from embed.FS** — must extract to temp dir, symlink `storage/` to persistent data dir +2. **WithWorkers API (v1.5.0)**: `WithWorkers(name, fileName string, num int, env map[string]string, watch []string)` — 5 positional args, NOT variadic +3. **Worker mode needs Octane**: Workers point at `vendor/laravel/octane/bin/frankenphp-worker.php` with `APP_BASE_PATH` and `FRANKENPHP_WORKER=1` env vars +4. **Paths with spaces**: macOS `~/Library/Application Support/` has a space — ALL .env values with paths MUST be quoted +5. **URL resolution**: FrankenPHP doesn't auto-resolve `/` → `/index.php` — the Go handler implements try_files logic +6. **Auto-migration**: `AppServiceProvider::boot()` runs `migrate --force` wrapped in try/catch (must not fail during composer operations) +7. **Vendor dir**: Excluded from git (`.gitignore`), built at dev time via `composer install`, embedded by `//go:embed all:laravel` at build time + +## Coding Standards + +- **UK English**: colour, organisation, centre +- **PHP**: `declare(strict_types=1)` in every file, full type hints, PSR-12 via Pint +- **Go**: Standard Go conventions, error wrapping with `fmt.Errorf("context: %w", err)` +- **License**: EUPL-1.2 +- **Testing**: Pest syntax for PHP (not PHPUnit) + +## Tasks for Codex + +### Priority 1: Code Quality +- [ ] Review all Go files for error handling consistency +- [ ] Ensure handler.go's try_files logic handles edge cases (double slashes, encoded paths, path traversal) +- [ ] Add Go tests for PHPHandler URL resolution (unit tests, no FrankenPHP needed) +- [ ] Add Go tests for env.go (resolveDataDir, writeEnvFile, loadOrGenerateAppKey) + +### Priority 2: Laravel Polish +- [ ] Add `config/octane.php` with FrankenPHP server config +- [ ] Update welcome view to show migration status (table count from SQLite) +- [ ] Add a second Livewire component (e.g., todo list) to prove full CRUD with SQLite +- [ ] Add proper error page views (404, 500) styled to match the dark theme + +### Priority 3: Build Hardening +- [ ] Verify the Taskfile.yml tasks work end-to-end (`task app:setup && task app:composer && task app:build`) +- [ ] Add `.gitignore` entries for build artifacts (`bin/core-app`, temp dirs) +- [ ] Ensure `go.work` and `go.mod` are consistent + +## CRITICAL WARNINGS + +- **DO NOT push to GitHub** — GitHub remotes have been removed deliberately. The host-uk org is flagged. +- **DO NOT add GitHub as a remote** — Forge (forge.lthn.ai / git.lthn.ai) is the source of truth. +- **DO NOT modify files outside `cmd/core-app/`** — This is a workspace module, keep changes scoped. +- **DO NOT remove the `-tags nowatcher` build flag** — It will fail without libwatcher-c. +- **DO NOT change the PHP-ZTS path** — It must be the ZTS variant, not the default Homebrew PHP. diff --git a/cmd/core-app/laravel/database/database.sqlite b/cmd/core-app/laravel/database/database.sqlite new file mode 100644 index 0000000000000000000000000000000000000000..e265e8de7498d3668f706b7cd996a93c220687bf GIT binary patch literal 45056 zcmeI(yKmD#9Kdng4RM<^4H6QbD!FclT1AiwF)$!eN`^`W@f1`H)-^d=)5LCVr?e6b zDGLjK2>%QVe*pgi0}~5e9ws(@NK8o9*GihR@8#d;&XLYepFZC76KTH=qpnEoo7!c~ zFtoe2t!bK}-c|KZx3Wr<(jE1wXVT{-4ek1;pS8JPTBRCl^{u*D`!V;W_OVu(eK7m2 z8qQuluOi10KmY**5I_I{1P~~IK!0h*v=$fjlX@byJF+EOyVC1~t-UyttgJt2u03no z&(`j3Hf<-9fnIn(({YWH!7&x{cByvY) zEAB^rS479QYIoea>)xuAt&K%p^MkfLiVr$!iQtJ|5)Svh%n5m`nVo9CZkm>5>HYVU z=!W^zXGC;SEQg}gla8IpBNg5x#D5ROoi1B~<7~(6znC(u`FZ_=J{+d>ywlH8ez4NRgOm2#XHKxR<#cwb+34?kqTP-Z z$)6hP=&+#JkwG#VTybrmQ#qc$GM59#e1z7(Ve~gI`0R#|0009ILKmY**5I_I{{{NpjfB*sr zAb0tg_000IagfB*srAbH@ literal 0 HcmV?d00001 diff --git a/cmd/core-ide/build_service.go b/cmd/core-ide/build_service.go new file mode 100644 index 00000000..cf793209 --- /dev/null +++ b/cmd/core-ide/build_service.go @@ -0,0 +1,71 @@ +package main + +import ( + "context" + "log" + "time" + + "github.com/host-uk/core/pkg/mcp/ide" + "github.com/wailsapp/wails/v3/pkg/application" +) + +// BuildService provides build monitoring bindings for the frontend. +type BuildService struct { + ideSub *ide.Subsystem +} + +// NewBuildService creates a new BuildService. +func NewBuildService(ideSub *ide.Subsystem) *BuildService { + return &BuildService{ideSub: ideSub} +} + +// ServiceName returns the service name for Wails. +func (s *BuildService) ServiceName() string { return "BuildService" } + +// ServiceStartup is called when the Wails application starts. +func (s *BuildService) ServiceStartup(ctx context.Context, options application.ServiceOptions) error { + log.Println("BuildService started") + return nil +} + +// ServiceShutdown is called when the Wails application shuts down. +func (s *BuildService) ServiceShutdown() error { + log.Println("BuildService shutdown") + return nil +} + +// BuildDTO is a build for the frontend. +type BuildDTO struct { + ID string `json:"id"` + Repo string `json:"repo"` + Branch string `json:"branch"` + Status string `json:"status"` + Duration string `json:"duration,omitempty"` + StartedAt time.Time `json:"startedAt"` +} + +// GetBuilds returns recent builds. +func (s *BuildService) GetBuilds(repo string) []BuildDTO { + bridge := s.ideSub.Bridge() + if bridge == nil { + return []BuildDTO{} + } + _ = bridge.Send(ide.BridgeMessage{ + Type: "build_list", + Data: map[string]any{"repo": repo}, + }) + return []BuildDTO{} +} + +// GetBuildLogs returns log output for a specific build. +func (s *BuildService) GetBuildLogs(buildID string) []string { + bridge := s.ideSub.Bridge() + if bridge == nil { + return []string{} + } + _ = bridge.Send(ide.BridgeMessage{ + Type: "build_logs", + Data: map[string]any{"buildId": buildID}, + }) + return []string{} +} diff --git a/cmd/core-ide/chat_service.go b/cmd/core-ide/chat_service.go new file mode 100644 index 00000000..e6576261 --- /dev/null +++ b/cmd/core-ide/chat_service.go @@ -0,0 +1,135 @@ +package main + +import ( + "context" + "log" + "time" + + "github.com/host-uk/core/pkg/mcp/ide" + "github.com/wailsapp/wails/v3/pkg/application" +) + +// ChatService provides chat bindings for the frontend. +type ChatService struct { + ideSub *ide.Subsystem +} + +// NewChatService creates a new ChatService. +func NewChatService(ideSub *ide.Subsystem) *ChatService { + return &ChatService{ideSub: ideSub} +} + +// ServiceName returns the service name for Wails. +func (s *ChatService) ServiceName() string { return "ChatService" } + +// ServiceStartup is called when the Wails application starts. +func (s *ChatService) ServiceStartup(ctx context.Context, options application.ServiceOptions) error { + log.Println("ChatService started") + return nil +} + +// ServiceShutdown is called when the Wails application shuts down. +func (s *ChatService) ServiceShutdown() error { + log.Println("ChatService shutdown") + return nil +} + +// ChatMessageDTO is a message for the frontend. +type ChatMessageDTO struct { + Role string `json:"role"` + Content string `json:"content"` + Timestamp time.Time `json:"timestamp"` +} + +// SessionDTO is a session for the frontend. +type SessionDTO struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + CreatedAt time.Time `json:"createdAt"` +} + +// PlanStepDTO is a plan step for the frontend. +type PlanStepDTO struct { + Name string `json:"name"` + Status string `json:"status"` +} + +// PlanDTO is a plan for the frontend. +type PlanDTO struct { + SessionID string `json:"sessionId"` + Status string `json:"status"` + Steps []PlanStepDTO `json:"steps"` +} + +// SendMessage sends a message to an agent session via the bridge. +func (s *ChatService) SendMessage(sessionID string, message string) (bool, error) { + bridge := s.ideSub.Bridge() + if bridge == nil { + return false, nil + } + err := bridge.Send(ide.BridgeMessage{ + Type: "chat_send", + Channel: "chat:" + sessionID, + SessionID: sessionID, + Data: message, + }) + return err == nil, err +} + +// GetHistory retrieves message history for a session. +func (s *ChatService) GetHistory(sessionID string) []ChatMessageDTO { + bridge := s.ideSub.Bridge() + if bridge == nil { + return []ChatMessageDTO{} + } + _ = bridge.Send(ide.BridgeMessage{ + Type: "chat_history", + SessionID: sessionID, + }) + return []ChatMessageDTO{} +} + +// ListSessions returns active agent sessions. +func (s *ChatService) ListSessions() []SessionDTO { + bridge := s.ideSub.Bridge() + if bridge == nil { + return []SessionDTO{} + } + _ = bridge.Send(ide.BridgeMessage{Type: "session_list"}) + return []SessionDTO{} +} + +// CreateSession creates a new agent session. +func (s *ChatService) CreateSession(name string) SessionDTO { + bridge := s.ideSub.Bridge() + if bridge == nil { + return SessionDTO{Name: name, Status: "offline"} + } + _ = bridge.Send(ide.BridgeMessage{ + Type: "session_create", + Data: map[string]any{"name": name}, + }) + return SessionDTO{ + Name: name, + Status: "creating", + CreatedAt: time.Now(), + } +} + +// GetPlanStatus returns the plan status for a session. +func (s *ChatService) GetPlanStatus(sessionID string) PlanDTO { + bridge := s.ideSub.Bridge() + if bridge == nil { + return PlanDTO{SessionID: sessionID, Status: "offline"} + } + _ = bridge.Send(ide.BridgeMessage{ + Type: "plan_status", + SessionID: sessionID, + }) + return PlanDTO{ + SessionID: sessionID, + Status: "unknown", + Steps: []PlanStepDTO{}, + } +} diff --git a/cmd/core-ide/frontend/angular.json b/cmd/core-ide/frontend/angular.json new file mode 100644 index 00000000..638b167a --- /dev/null +++ b/cmd/core-ide/frontend/angular.json @@ -0,0 +1,91 @@ +{ + "$schema": "./node_modules/@angular/cli/lib/config/schema.json", + "version": 1, + "newProjectRoot": "projects", + "projects": { + "core-ide": { + "projectType": "application", + "schematics": { + "@schematics/angular:component": { + "style": "scss", + "standalone": true + } + }, + "root": "", + "sourceRoot": "src", + "prefix": "app", + "architect": { + "build": { + "builder": "@angular-devkit/build-angular:application", + "options": { + "outputPath": "dist/core-ide", + "index": "src/index.html", + "browser": "src/main.ts", + "polyfills": ["zone.js"], + "tsConfig": "tsconfig.app.json", + "inlineStyleLanguage": "scss", + "assets": [ + "src/favicon.ico", + "src/assets" + ], + "styles": [ + "src/styles.scss" + ], + "scripts": [] + }, + "configurations": { + "production": { + "budgets": [ + { + "type": "initial", + "maximumWarning": "500kb", + "maximumError": "1mb" + }, + { + "type": "anyComponentStyle", + "maximumWarning": "2kb", + "maximumError": "4kb" + } + ], + "outputHashing": "all" + }, + "development": { + "optimization": false, + "extractLicenses": false, + "sourceMap": true + } + }, + "defaultConfiguration": "production" + }, + "serve": { + "builder": "@angular-devkit/build-angular:dev-server", + "configurations": { + "production": { + "buildTarget": "core-ide:build:production" + }, + "development": { + "buildTarget": "core-ide:build:development" + } + }, + "defaultConfiguration": "development" + }, + "test": { + "builder": "@angular-devkit/build-angular:karma", + "options": { + "polyfills": ["zone.js", "zone.js/testing"], + "tsConfig": "tsconfig.spec.json", + "inlineStyleLanguage": "scss", + "assets": [ + "src/favicon.ico", + "src/assets" + ], + "styles": [ + "src/styles.scss" + ], + "scripts": [] + } + } + } + } + } +} diff --git a/cmd/core-ide/frontend/package.json b/cmd/core-ide/frontend/package.json new file mode 100644 index 00000000..e575d95a --- /dev/null +++ b/cmd/core-ide/frontend/package.json @@ -0,0 +1,41 @@ +{ + "name": "core-ide", + "version": "0.1.0", + "private": true, + "scripts": { + "ng": "ng", + "start": "ng serve", + "dev": "ng serve --configuration development", + "build": "ng build --configuration production", + "build:dev": "ng build --configuration development", + "watch": "ng build --watch --configuration development", + "test": "ng test", + "lint": "ng lint" + }, + "dependencies": { + "@angular/animations": "^19.1.0", + "@angular/common": "^19.1.0", + "@angular/compiler": "^19.1.0", + "@angular/core": "^19.1.0", + "@angular/forms": "^19.1.0", + "@angular/platform-browser": "^19.1.0", + "@angular/platform-browser-dynamic": "^19.1.0", + "@angular/router": "^19.1.0", + "rxjs": "~7.8.0", + "tslib": "^2.3.0", + "zone.js": "~0.15.0" + }, + "devDependencies": { + "@angular-devkit/build-angular": "^19.1.0", + "@angular/cli": "^21.1.2", + "@angular/compiler-cli": "^19.1.0", + "@types/jasmine": "~5.1.0", + "jasmine-core": "~5.1.0", + "karma": "~6.4.0", + "karma-chrome-launcher": "~3.2.0", + "karma-coverage": "~2.2.0", + "karma-jasmine": "~5.1.0", + "karma-jasmine-html-reporter": "~2.1.0", + "typescript": "~5.5.2" + } +} diff --git a/cmd/core-ide/frontend/src/app/app.component.ts b/cmd/core-ide/frontend/src/app/app.component.ts new file mode 100644 index 00000000..d26c6dc5 --- /dev/null +++ b/cmd/core-ide/frontend/src/app/app.component.ts @@ -0,0 +1,18 @@ +import { Component } from '@angular/core'; +import { RouterOutlet } from '@angular/router'; + +@Component({ + selector: 'app-root', + standalone: true, + imports: [RouterOutlet], + template: '', + styles: [` + :host { + display: block; + height: 100%; + } + `] +}) +export class AppComponent { + title = 'Core IDE'; +} diff --git a/cmd/core-ide/frontend/src/app/app.config.ts b/cmd/core-ide/frontend/src/app/app.config.ts new file mode 100644 index 00000000..628370af --- /dev/null +++ b/cmd/core-ide/frontend/src/app/app.config.ts @@ -0,0 +1,9 @@ +import { ApplicationConfig } from '@angular/core'; +import { provideRouter, withHashLocation } from '@angular/router'; +import { routes } from './app.routes'; + +export const appConfig: ApplicationConfig = { + providers: [ + provideRouter(routes, withHashLocation()) + ] +}; diff --git a/cmd/core-ide/frontend/src/app/app.routes.ts b/cmd/core-ide/frontend/src/app/app.routes.ts new file mode 100644 index 00000000..e8d803cb --- /dev/null +++ b/cmd/core-ide/frontend/src/app/app.routes.ts @@ -0,0 +1,25 @@ +import { Routes } from '@angular/router'; + +export const routes: Routes = [ + { + path: '', + redirectTo: 'tray', + pathMatch: 'full' + }, + { + path: 'tray', + loadComponent: () => import('./tray/tray.component').then(m => m.TrayComponent) + }, + { + path: 'main', + loadComponent: () => import('./main/main.component').then(m => m.MainComponent) + }, + { + path: 'settings', + loadComponent: () => import('./settings/settings.component').then(m => m.SettingsComponent) + }, + { + path: 'jellyfin', + loadComponent: () => import('./jellyfin/jellyfin.component').then(m => m.JellyfinComponent) + } +]; diff --git a/cmd/core-ide/frontend/src/app/build/build.component.ts b/cmd/core-ide/frontend/src/app/build/build.component.ts new file mode 100644 index 00000000..ea3fecec --- /dev/null +++ b/cmd/core-ide/frontend/src/app/build/build.component.ts @@ -0,0 +1,184 @@ +import { Component, OnInit, OnDestroy } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { WailsService, Build } from '@shared/wails.service'; +import { WebSocketService, WSMessage } from '@shared/ws.service'; +import { Subscription } from 'rxjs'; + +@Component({ + selector: 'app-build', + standalone: true, + imports: [CommonModule], + template: ` +
+
+

Builds

+ +
+ +
+
+
+
+ {{ build.repo }} + {{ build.branch }} +
+ {{ build.status }} +
+ +
+ {{ build.startedAt | date:'medium' }} + · {{ build.duration }} +
+ +
+
{{ logs.join('\\n') }}
+

No logs available

+
+
+ +
+ No builds found. Builds will appear here from Forgejo CI. +
+
+
+ `, + styles: [` + .builds { + padding: var(--spacing-md); + } + + .builds__header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: var(--spacing-md); + } + + .builds__list { + display: flex; + flex-direction: column; + gap: var(--spacing-sm); + } + + .build-card { + background: var(--bg-secondary); + border: 1px solid var(--border-color); + border-radius: var(--radius-md); + padding: var(--spacing-md); + cursor: pointer; + transition: border-color 0.15s; + + &:hover { + border-color: var(--text-muted); + } + } + + .build-card__header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: var(--spacing-xs); + } + + .build-card__info { + display: flex; + gap: var(--spacing-sm); + align-items: center; + } + + .build-card__repo { + font-weight: 600; + } + + .build-card__branch { + font-size: 12px; + } + + .build-card__meta { + font-size: 12px; + } + + .build-card__logs { + margin-top: var(--spacing-md); + border-top: 1px solid var(--border-color); + padding-top: var(--spacing-md); + } + + .build-card__logs pre { + font-size: 12px; + max-height: 300px; + overflow-y: auto; + } + + .builds__empty { + text-align: center; + padding: var(--spacing-xl); + } + `] +}) +export class BuildComponent implements OnInit, OnDestroy { + builds: Build[] = []; + expandedId = ''; + logs: string[] = []; + + private sub: Subscription | null = null; + + constructor( + private wails: WailsService, + private wsService: WebSocketService + ) {} + + ngOnInit(): void { + this.refresh(); + this.wsService.connect(); + this.sub = this.wsService.subscribe('build:status').subscribe( + (msg: WSMessage) => { + if (msg.data && typeof msg.data === 'object') { + const update = msg.data as Build; + const idx = this.builds.findIndex(b => b.id === update.id); + if (idx >= 0) { + this.builds[idx] = { ...this.builds[idx], ...update }; + } else { + this.builds.unshift(update); + } + } + } + ); + } + + ngOnDestroy(): void { + this.sub?.unsubscribe(); + } + + async refresh(): Promise { + this.builds = await this.wails.getBuilds(); + } + + async toggle(buildId: string): Promise { + if (this.expandedId === buildId) { + this.expandedId = ''; + this.logs = []; + return; + } + this.expandedId = buildId; + this.logs = await this.wails.getBuildLogs(buildId); + } + + trackBuild(_: number, build: Build): string { + return build.id; + } + + statusBadge(status: string): string { + switch (status) { + case 'success': return 'badge--success'; + case 'running': return 'badge--info'; + case 'failed': return 'badge--danger'; + default: return 'badge--warning'; + } + } +} diff --git a/cmd/core-ide/frontend/src/app/chat/chat.component.ts b/cmd/core-ide/frontend/src/app/chat/chat.component.ts new file mode 100644 index 00000000..ac6ca837 --- /dev/null +++ b/cmd/core-ide/frontend/src/app/chat/chat.component.ts @@ -0,0 +1,242 @@ +import { Component, OnInit, OnDestroy } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { FormsModule } from '@angular/forms'; +import { WailsService, ChatMessage, Session, PlanStatus } from '@shared/wails.service'; +import { WebSocketService, WSMessage } from '@shared/ws.service'; +import { Subscription } from 'rxjs'; + +@Component({ + selector: 'app-chat', + standalone: true, + imports: [CommonModule, FormsModule], + template: ` +
+
+
+ + +
+
+ +
+
+
+
{{ msg.role }}
+
{{ msg.content }}
+
+
+ No messages yet. Start a conversation with an agent. +
+
+ +
+

Plan: {{ plan.status }}

+
    +
  • + {{ step.name }} + {{ step.status }} +
  • +
+
+
+ +
+ + +
+
+ `, + styles: [` + .chat { + display: flex; + flex-direction: column; + height: 100%; + } + + .chat__header { + padding: var(--spacing-sm) var(--spacing-md); + border-bottom: 1px solid var(--border-color); + } + + .chat__session-picker { + display: flex; + gap: var(--spacing-sm); + align-items: center; + } + + .chat__session-picker select { + flex: 1; + } + + .chat__body { + flex: 1; + display: flex; + overflow: hidden; + } + + .chat__messages { + flex: 1; + overflow-y: auto; + padding: var(--spacing-md); + display: flex; + flex-direction: column; + gap: var(--spacing-sm); + } + + .chat__msg { + padding: var(--spacing-sm) var(--spacing-md); + border-radius: var(--radius-md); + max-width: 80%; + } + + .chat__msg--user { + align-self: flex-end; + background: rgba(57, 208, 216, 0.12); + border: 1px solid rgba(57, 208, 216, 0.2); + } + + .chat__msg--agent { + align-self: flex-start; + background: var(--bg-secondary); + border: 1px solid var(--border-color); + } + + .chat__msg-role { + font-size: 11px; + font-weight: 600; + text-transform: uppercase; + color: var(--text-muted); + margin-bottom: 2px; + } + + .chat__msg-content { + white-space: pre-wrap; + word-break: break-word; + } + + .chat__empty { + margin: auto; + text-align: center; + } + + .chat__plan { + width: 260px; + border-left: 1px solid var(--border-color); + padding: var(--spacing-md); + overflow-y: auto; + } + + .chat__plan ul { + list-style: none; + margin-top: var(--spacing-sm); + } + + .chat__plan li { + padding: var(--spacing-xs) 0; + display: flex; + justify-content: space-between; + align-items: center; + font-size: 13px; + } + + .chat__input { + padding: var(--spacing-sm) var(--spacing-md); + border-top: 1px solid var(--border-color); + display: flex; + gap: var(--spacing-sm); + align-items: flex-end; + } + + .chat__input textarea { + flex: 1; + resize: none; + } + `] +}) +export class ChatComponent implements OnInit, OnDestroy { + sessions: Session[] = []; + activeSessionId = ''; + messages: ChatMessage[] = []; + plan: PlanStatus = { sessionId: '', status: '', steps: [] }; + draft = ''; + + private sub: Subscription | null = null; + + constructor( + private wails: WailsService, + private wsService: WebSocketService + ) {} + + ngOnInit(): void { + this.loadSessions(); + this.wsService.connect(); + } + + ngOnDestroy(): void { + this.sub?.unsubscribe(); + } + + async loadSessions(): Promise { + this.sessions = await this.wails.listSessions(); + if (this.sessions.length > 0 && !this.activeSessionId) { + this.activeSessionId = this.sessions[0].id; + this.onSessionChange(); + } + } + + async onSessionChange(): Promise { + if (!this.activeSessionId) return; + + // Unsubscribe from previous channel + this.sub?.unsubscribe(); + + // Load history and plan + this.messages = await this.wails.getHistory(this.activeSessionId); + this.plan = await this.wails.getPlanStatus(this.activeSessionId); + + // Subscribe to live updates + this.sub = this.wsService.subscribe(`chat:${this.activeSessionId}`).subscribe( + (msg: WSMessage) => { + if (msg.data && typeof msg.data === 'object') { + this.messages.push(msg.data as ChatMessage); + } + } + ); + } + + async sendMessage(event?: KeyboardEvent): Promise { + if (event) { + if (event.shiftKey) return; // Allow shift+enter for newlines + event.preventDefault(); + } + const text = this.draft.trim(); + if (!text || !this.activeSessionId) return; + + // Optimistic UI update + this.messages.push({ role: 'user', content: text, timestamp: new Date().toISOString() }); + this.draft = ''; + + await this.wails.sendMessage(this.activeSessionId, text); + } + + async createSession(): Promise { + const name = `Session ${this.sessions.length + 1}`; + const session = await this.wails.createSession(name); + this.sessions.push(session); + this.activeSessionId = session.id; + this.onSessionChange(); + } +} diff --git a/cmd/core-ide/frontend/src/app/dashboard/dashboard.component.ts b/cmd/core-ide/frontend/src/app/dashboard/dashboard.component.ts new file mode 100644 index 00000000..32f4a90d --- /dev/null +++ b/cmd/core-ide/frontend/src/app/dashboard/dashboard.component.ts @@ -0,0 +1,163 @@ +import { Component, OnInit, OnDestroy } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { WailsService, DashboardData } from '@shared/wails.service'; +import { WebSocketService, WSMessage } from '@shared/ws.service'; +import { Subscription } from 'rxjs'; + +interface ActivityItem { + type: string; + message: string; + timestamp: string; +} + +@Component({ + selector: 'app-dashboard', + standalone: true, + imports: [CommonModule], + template: ` +
+

Dashboard

+ +
+
+
+ {{ data.connection.bridgeConnected ? 'Online' : 'Offline' }} +
+
Bridge Status
+
+
+
{{ data.connection.wsClients }}
+
WS Clients
+
+
+
{{ data.connection.wsChannels }}
+
Active Channels
+
+
+
0
+
Agent Sessions
+
+
+ +
+

Activity Feed

+
+
+ {{ item.type }} + {{ item.message }} + {{ item.timestamp | date:'shortTime' }} +
+
+ No recent activity. Events will stream here in real-time. +
+
+
+
+ `, + styles: [` + .dashboard { + padding: var(--spacing-md); + } + + .dashboard__grid { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(180px, 1fr)); + gap: var(--spacing-md); + margin: var(--spacing-md) 0; + } + + .stat-card { + background: var(--bg-secondary); + border: 1px solid var(--border-color); + border-radius: var(--radius-lg); + padding: var(--spacing-lg); + text-align: center; + } + + .stat-card__value { + font-size: 28px; + font-weight: 700; + color: var(--accent-primary); + } + + .stat-card__label { + font-size: 13px; + color: var(--text-muted); + margin-top: var(--spacing-xs); + } + + .dashboard__activity { + margin-top: var(--spacing-lg); + } + + .activity-feed { + margin-top: var(--spacing-sm); + border: 1px solid var(--border-color); + border-radius: var(--radius-md); + max-height: 400px; + overflow-y: auto; + } + + .activity-item { + display: flex; + align-items: center; + gap: var(--spacing-sm); + padding: var(--spacing-sm) var(--spacing-md); + border-bottom: 1px solid var(--border-color); + font-size: 13px; + + &:last-child { + border-bottom: none; + } + } + + .activity-item__msg { + flex: 1; + } + + .activity-item__time { + font-size: 12px; + white-space: nowrap; + } + `] +}) +export class DashboardComponent implements OnInit, OnDestroy { + data: DashboardData = { + connection: { bridgeConnected: false, laravelUrl: '', wsClients: 0, wsChannels: 0 } + }; + activity: ActivityItem[] = []; + + private sub: Subscription | null = null; + private pollTimer: ReturnType | null = null; + + constructor( + private wails: WailsService, + private wsService: WebSocketService + ) {} + + ngOnInit(): void { + this.refresh(); + this.pollTimer = setInterval(() => this.refresh(), 10000); + + this.wsService.connect(); + this.sub = this.wsService.subscribe('dashboard:activity').subscribe( + (msg: WSMessage) => { + if (msg.data && typeof msg.data === 'object') { + this.activity.unshift(msg.data as ActivityItem); + if (this.activity.length > 100) { + this.activity.pop(); + } + } + } + ); + } + + ngOnDestroy(): void { + this.sub?.unsubscribe(); + if (this.pollTimer) clearInterval(this.pollTimer); + } + + async refresh(): Promise { + this.data = await this.wails.getDashboard(); + } +} diff --git a/cmd/core-ide/frontend/src/app/jellyfin/jellyfin.component.ts b/cmd/core-ide/frontend/src/app/jellyfin/jellyfin.component.ts new file mode 100644 index 00000000..29242321 --- /dev/null +++ b/cmd/core-ide/frontend/src/app/jellyfin/jellyfin.component.ts @@ -0,0 +1,175 @@ +import { Component } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { FormsModule } from '@angular/forms'; +import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser'; + +type Mode = 'web' | 'stream'; + +@Component({ + selector: 'app-jellyfin', + standalone: true, + imports: [CommonModule, FormsModule], + template: ` +
+
+
+

Jellyfin Player

+

Embedded media access for Host UK workflows.

+
+
+ + +
+
+ +
+
+ + +
+ +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+ + +
+
+ +
+ +
+ +
+ +

Set Item ID and API key to build stream URL.

+
+
+ `, + styles: [` + .jellyfin { + display: flex; + flex-direction: column; + gap: var(--spacing-md); + padding: var(--spacing-md); + min-height: 100%; + background: var(--bg-primary); + } + + .jellyfin__header { + display: flex; + align-items: center; + justify-content: space-between; + gap: var(--spacing-md); + } + + .mode-switch { + display: flex; + gap: var(--spacing-xs); + } + + .mode-switch .btn.is-active { + border-color: var(--accent-primary); + color: var(--accent-primary); + } + + .stream-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(260px, 1fr)); + gap: var(--spacing-sm); + } + + .actions { + display: flex; + gap: var(--spacing-sm); + } + + .viewer { + padding: 0; + overflow: hidden; + min-height: 520px; + } + + .jellyfin-frame, + .jellyfin-video { + border: 0; + width: 100%; + height: 100%; + min-height: 520px; + background: #000; + } + + .stream-hint { + padding: var(--spacing-md); + margin: 0; + } + `] +}) +export class JellyfinComponent { + mode: Mode = 'web'; + loaded = false; + + serverUrl = 'https://media.lthn.ai'; + itemId = ''; + apiKey = ''; + mediaSourceId = ''; + + safeWebUrl: SafeResourceUrl = this.sanitizer.bypassSecurityTrustResourceUrl('https://media.lthn.ai/web/index.html'); + streamUrl = ''; + + constructor(private sanitizer: DomSanitizer) {} + + load(): void { + const base = this.normalizeBase(this.serverUrl); + this.safeWebUrl = this.sanitizer.bypassSecurityTrustResourceUrl(`${base}/web/index.html`); + this.streamUrl = this.buildStreamUrl(base); + this.loaded = true; + } + + reset(): void { + this.loaded = false; + this.itemId = ''; + this.apiKey = ''; + this.mediaSourceId = ''; + this.streamUrl = ''; + } + + private normalizeBase(value: string): string { + const raw = value.trim() || 'https://media.lthn.ai'; + const withProtocol = raw.startsWith('http://') || raw.startsWith('https://') ? raw : `https://${raw}`; + return withProtocol.replace(/\/+$/, ''); + } + + private buildStreamUrl(base: string): string { + if (!this.itemId.trim() || !this.apiKey.trim()) { + return ''; + } + + const url = new URL(`${base}/Videos/${encodeURIComponent(this.itemId.trim())}/stream`); + url.searchParams.set('api_key', this.apiKey.trim()); + url.searchParams.set('static', 'true'); + if (this.mediaSourceId.trim()) { + url.searchParams.set('MediaSourceId', this.mediaSourceId.trim()); + } + return url.toString(); + } +} diff --git a/cmd/core-ide/frontend/src/app/main/main.component.ts b/cmd/core-ide/frontend/src/app/main/main.component.ts new file mode 100644 index 00000000..6c6e7030 --- /dev/null +++ b/cmd/core-ide/frontend/src/app/main/main.component.ts @@ -0,0 +1,119 @@ +import { Component } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { RouterLink, RouterLinkActive, RouterOutlet } from '@angular/router'; +import { ChatComponent } from '../chat/chat.component'; +import { BuildComponent } from '../build/build.component'; +import { DashboardComponent } from '../dashboard/dashboard.component'; +import { JellyfinComponent } from '../jellyfin/jellyfin.component'; + +type Panel = 'chat' | 'build' | 'dashboard' | 'jellyfin'; + +@Component({ + selector: 'app-main', + standalone: true, + imports: [CommonModule, RouterLink, RouterLinkActive, RouterOutlet, ChatComponent, BuildComponent, DashboardComponent, JellyfinComponent], + template: ` +
+ + +
+ + + + +
+
+ `, + styles: [` + .ide { + display: flex; + height: 100vh; + overflow: hidden; + } + + .ide__sidebar { + width: var(--sidebar-width); + background: var(--bg-sidebar); + border-right: 1px solid var(--border-color); + display: flex; + flex-direction: column; + padding: var(--spacing-md) 0; + flex-shrink: 0; + } + + .ide__logo { + padding: 0 var(--spacing-md); + font-size: 16px; + font-weight: 700; + color: var(--accent-primary); + margin-bottom: var(--spacing-lg); + } + + .ide__nav { + list-style: none; + flex: 1; + } + + .ide__nav-item { + display: flex; + align-items: center; + gap: var(--spacing-sm); + padding: var(--spacing-sm) var(--spacing-md); + cursor: pointer; + color: var(--text-secondary); + transition: all 0.15s; + border-left: 3px solid transparent; + + &:hover { + color: var(--text-primary); + background: var(--bg-tertiary); + } + + &.active { + color: var(--accent-primary); + background: rgba(57, 208, 216, 0.08); + border-left-color: var(--accent-primary); + } + } + + .ide__nav-icon { + font-size: 16px; + width: 20px; + text-align: center; + } + + .ide__nav-footer { + padding: var(--spacing-sm) var(--spacing-md); + font-size: 12px; + } + + .ide__content { + flex: 1; + overflow: auto; + } + `] +}) +export class MainComponent { + activePanel: Panel = 'dashboard'; + + navItems: { id: Panel; label: string; icon: string }[] = [ + { id: 'dashboard', label: 'Dashboard', icon: '\u25A6' }, + { id: 'chat', label: 'Chat', icon: '\u2709' }, + { id: 'build', label: 'Builds', icon: '\u2699' }, + { id: 'jellyfin', label: 'Jellyfin', icon: '\u25B6' }, + ]; +} diff --git a/cmd/core-ide/frontend/src/app/settings/settings.component.ts b/cmd/core-ide/frontend/src/app/settings/settings.component.ts new file mode 100644 index 00000000..b91418b4 --- /dev/null +++ b/cmd/core-ide/frontend/src/app/settings/settings.component.ts @@ -0,0 +1,105 @@ +import { Component, OnInit } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { FormsModule } from '@angular/forms'; + +@Component({ + selector: 'app-settings', + standalone: true, + imports: [CommonModule, FormsModule], + template: ` +
+

Settings

+ +
+

Connection

+
+ + +
+
+ + +
+
+ +
+

Appearance

+
+ + +
+
+ +
+ +
+
+ `, + styles: [` + .settings { + padding: var(--spacing-lg); + max-width: 500px; + } + + .settings__section { + margin-top: var(--spacing-lg); + padding-top: var(--spacing-lg); + border-top: 1px solid var(--border-color); + + &:first-of-type { + margin-top: var(--spacing-md); + padding-top: 0; + border-top: none; + } + } + + .settings__actions { + margin-top: var(--spacing-lg); + } + `] +}) +export class SettingsComponent implements OnInit { + laravelUrl = 'ws://localhost:9876/ws'; + workspaceRoot = '.'; + theme = 'dark'; + + ngOnInit(): void { + // Settings will be loaded from the Go backend + const saved = localStorage.getItem('ide-settings'); + if (saved) { + try { + const parsed = JSON.parse(saved); + this.laravelUrl = parsed.laravelUrl ?? this.laravelUrl; + this.workspaceRoot = parsed.workspaceRoot ?? this.workspaceRoot; + this.theme = parsed.theme ?? this.theme; + } catch { + // Ignore parse errors + } + } + } + + save(): void { + localStorage.setItem('ide-settings', JSON.stringify({ + laravelUrl: this.laravelUrl, + workspaceRoot: this.workspaceRoot, + theme: this.theme, + })); + + if (this.theme === 'light') { + document.documentElement.setAttribute('data-theme', 'light'); + } else { + document.documentElement.removeAttribute('data-theme'); + } + } +} diff --git a/cmd/core-ide/frontend/src/app/shared/wails.service.ts b/cmd/core-ide/frontend/src/app/shared/wails.service.ts new file mode 100644 index 00000000..2da65e97 --- /dev/null +++ b/cmd/core-ide/frontend/src/app/shared/wails.service.ts @@ -0,0 +1,133 @@ +import { Injectable } from '@angular/core'; + +// Type-safe wrapper for Wails v3 Go service bindings. +// At runtime, `window.go.main.{ServiceName}.{Method}()` returns a Promise. + +interface WailsGo { + main: { + IDEService: { + GetConnectionStatus(): Promise; + GetDashboard(): Promise; + ShowWindow(name: string): Promise; + }; + ChatService: { + SendMessage(sessionId: string, message: string): Promise; + GetHistory(sessionId: string): Promise; + ListSessions(): Promise; + CreateSession(name: string): Promise; + GetPlanStatus(sessionId: string): Promise; + }; + BuildService: { + GetBuilds(repo: string): Promise; + GetBuildLogs(buildId: string): Promise; + }; + }; +} + +export interface ConnectionStatus { + bridgeConnected: boolean; + laravelUrl: string; + wsClients: number; + wsChannels: number; +} + +export interface DashboardData { + connection: ConnectionStatus; +} + +export interface ChatMessage { + role: string; + content: string; + timestamp: string; +} + +export interface Session { + id: string; + name: string; + status: string; + createdAt: string; +} + +export interface PlanStatus { + sessionId: string; + status: string; + steps: PlanStep[]; +} + +export interface PlanStep { + name: string; + status: string; +} + +export interface Build { + id: string; + repo: string; + branch: string; + status: string; + duration?: string; + startedAt: string; +} + +declare global { + interface Window { + go: WailsGo; + } +} + +@Injectable({ providedIn: 'root' }) +export class WailsService { + private get ide() { return window.go?.main?.IDEService; } + private get chat() { return window.go?.main?.ChatService; } + private get build() { return window.go?.main?.BuildService; } + + // IDE + getConnectionStatus(): Promise { + return this.ide?.GetConnectionStatus() ?? Promise.resolve({ + bridgeConnected: false, laravelUrl: '', wsClients: 0, wsChannels: 0 + }); + } + + getDashboard(): Promise { + return this.ide?.GetDashboard() ?? Promise.resolve({ + connection: { bridgeConnected: false, laravelUrl: '', wsClients: 0, wsChannels: 0 } + }); + } + + showWindow(name: string): Promise { + return this.ide?.ShowWindow(name) ?? Promise.resolve(); + } + + // Chat + sendMessage(sessionId: string, message: string): Promise { + return this.chat?.SendMessage(sessionId, message) ?? Promise.resolve(false); + } + + getHistory(sessionId: string): Promise { + return this.chat?.GetHistory(sessionId) ?? Promise.resolve([]); + } + + listSessions(): Promise { + return this.chat?.ListSessions() ?? Promise.resolve([]); + } + + createSession(name: string): Promise { + return this.chat?.CreateSession(name) ?? Promise.resolve({ + id: '', name, status: 'offline', createdAt: '' + }); + } + + getPlanStatus(sessionId: string): Promise { + return this.chat?.GetPlanStatus(sessionId) ?? Promise.resolve({ + sessionId, status: 'offline', steps: [] + }); + } + + // Build + getBuilds(repo: string = ''): Promise { + return this.build?.GetBuilds(repo) ?? Promise.resolve([]); + } + + getBuildLogs(buildId: string): Promise { + return this.build?.GetBuildLogs(buildId) ?? Promise.resolve([]); + } +} diff --git a/cmd/core-ide/frontend/src/app/shared/ws.service.ts b/cmd/core-ide/frontend/src/app/shared/ws.service.ts new file mode 100644 index 00000000..a6d55c99 --- /dev/null +++ b/cmd/core-ide/frontend/src/app/shared/ws.service.ts @@ -0,0 +1,89 @@ +import { Injectable, OnDestroy } from '@angular/core'; +import { Subject, Observable } from 'rxjs'; +import { filter } from 'rxjs/operators'; + +export interface WSMessage { + type: string; + channel?: string; + processId?: string; + data?: unknown; + timestamp: string; +} + +@Injectable({ providedIn: 'root' }) +export class WebSocketService implements OnDestroy { + private ws: WebSocket | null = null; + private messages$ = new Subject(); + private reconnectTimer: ReturnType | null = null; + private url = 'ws://127.0.0.1:9877/ws'; + private connected = false; + + connect(url?: string): void { + if (url) this.url = url; + this.doConnect(); + } + + private doConnect(): void { + if (this.ws) { + this.ws.close(); + } + + this.ws = new WebSocket(this.url); + + this.ws.onopen = () => { + this.connected = true; + console.log('[WS] Connected'); + }; + + this.ws.onmessage = (event: MessageEvent) => { + try { + const msg: WSMessage = JSON.parse(event.data); + this.messages$.next(msg); + } catch { + console.warn('[WS] Failed to parse message'); + } + }; + + this.ws.onclose = () => { + this.connected = false; + console.log('[WS] Disconnected, reconnecting in 3s...'); + this.reconnectTimer = setTimeout(() => this.doConnect(), 3000); + }; + + this.ws.onerror = () => { + this.ws?.close(); + }; + } + + subscribe(channel: string): Observable { + // Send subscribe command to hub + this.send({ type: 'subscribe', data: channel, timestamp: new Date().toISOString() }); + return this.messages$.pipe( + filter(msg => msg.channel === channel) + ); + } + + unsubscribe(channel: string): void { + this.send({ type: 'unsubscribe', data: channel, timestamp: new Date().toISOString() }); + } + + send(msg: WSMessage): void { + if (this.ws?.readyState === WebSocket.OPEN) { + this.ws.send(JSON.stringify(msg)); + } + } + + get isConnected(): boolean { + return this.connected; + } + + get allMessages$(): Observable { + return this.messages$.asObservable(); + } + + ngOnDestroy(): void { + if (this.reconnectTimer) clearTimeout(this.reconnectTimer); + this.ws?.close(); + this.messages$.complete(); + } +} diff --git a/cmd/core-ide/frontend/src/app/tray/tray.component.ts b/cmd/core-ide/frontend/src/app/tray/tray.component.ts new file mode 100644 index 00000000..5911a0de --- /dev/null +++ b/cmd/core-ide/frontend/src/app/tray/tray.component.ts @@ -0,0 +1,124 @@ +import { Component, OnInit } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { WailsService, ConnectionStatus } from '@shared/wails.service'; + +@Component({ + selector: 'app-tray', + standalone: true, + imports: [CommonModule], + template: ` +
+
+

Core IDE

+ + {{ status.bridgeConnected ? 'Online' : 'Offline' }} + +
+ +
+
+ {{ status.wsClients }} + WS Clients +
+
+ {{ status.wsChannels }} + Channels +
+
+ +
+ + +
+ + +
+ `, + styles: [` + .tray { + padding: var(--spacing-md); + height: 100%; + display: flex; + flex-direction: column; + gap: var(--spacing-md); + } + + .tray__header { + display: flex; + justify-content: space-between; + align-items: center; + } + + .tray__stats { + display: grid; + grid-template-columns: 1fr 1fr; + gap: var(--spacing-sm); + } + + .stat { + background: var(--bg-secondary); + border: 1px solid var(--border-color); + border-radius: var(--radius-md); + padding: var(--spacing-sm) var(--spacing-md); + text-align: center; + } + + .stat__value { + display: block; + font-size: 24px; + font-weight: 600; + color: var(--accent-primary); + } + + .stat__label { + font-size: 12px; + color: var(--text-muted); + } + + .tray__actions { + display: flex; + gap: var(--spacing-sm); + } + + .tray__actions .btn { + flex: 1; + } + + .tray__footer { + margin-top: auto; + font-size: 12px; + text-align: center; + } + `] +}) +export class TrayComponent implements OnInit { + status: ConnectionStatus = { + bridgeConnected: false, + laravelUrl: '', + wsClients: 0, + wsChannels: 0 + }; + + private pollTimer: ReturnType | null = null; + + constructor(private wails: WailsService) {} + + ngOnInit(): void { + this.refresh(); + this.pollTimer = setInterval(() => this.refresh(), 5000); + } + + async refresh(): Promise { + this.status = await this.wails.getConnectionStatus(); + } + + openMain(): void { + this.wails.showWindow('main'); + } + + openSettings(): void { + this.wails.showWindow('settings'); + } +} diff --git a/cmd/core-ide/frontend/src/index.html b/cmd/core-ide/frontend/src/index.html new file mode 100644 index 00000000..f56693ea --- /dev/null +++ b/cmd/core-ide/frontend/src/index.html @@ -0,0 +1,13 @@ + + + + + Core IDE + + + + + + + + diff --git a/cmd/core-ide/frontend/src/main.ts b/cmd/core-ide/frontend/src/main.ts new file mode 100644 index 00000000..35b00f34 --- /dev/null +++ b/cmd/core-ide/frontend/src/main.ts @@ -0,0 +1,6 @@ +import { bootstrapApplication } from '@angular/platform-browser'; +import { appConfig } from './app/app.config'; +import { AppComponent } from './app/app.component'; + +bootstrapApplication(AppComponent, appConfig) + .catch((err) => console.error(err)); diff --git a/cmd/core-ide/frontend/src/styles.scss b/cmd/core-ide/frontend/src/styles.scss new file mode 100644 index 00000000..a8dda351 --- /dev/null +++ b/cmd/core-ide/frontend/src/styles.scss @@ -0,0 +1,247 @@ +// Core IDE Global Styles + +:root { + // Dark theme (default) — IDE accent: teal/cyan + --bg-primary: #161b22; + --bg-secondary: #0d1117; + --bg-tertiary: #21262d; + --bg-sidebar: #131820; + --text-primary: #c9d1d9; + --text-secondary: #8b949e; + --text-muted: #6e7681; + --border-color: #30363d; + --accent-primary: #39d0d8; + --accent-secondary: #58a6ff; + --accent-success: #3fb950; + --accent-warning: #d29922; + --accent-danger: #f85149; + + // Spacing + --spacing-xs: 4px; + --spacing-sm: 8px; + --spacing-md: 16px; + --spacing-lg: 24px; + --spacing-xl: 32px; + + // Border radius + --radius-sm: 4px; + --radius-md: 6px; + --radius-lg: 12px; + + // Font + --font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Noto Sans', Helvetica, Arial, sans-serif; + --font-mono: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas, Liberation Mono, monospace; + + // IDE-specific + --sidebar-width: 240px; + --chat-input-height: 80px; +} + +// Reset +*, +*::before, +*::after { + box-sizing: border-box; + margin: 0; + padding: 0; +} + +html, body { + height: 100%; + width: 100%; +} + +body { + font-family: var(--font-family); + font-size: 14px; + line-height: 1.5; + color: var(--text-primary); + background-color: var(--bg-primary); + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +// Typography +h1, h2, h3, h4, h5, h6 { + font-weight: 600; + line-height: 1.25; + margin-bottom: var(--spacing-sm); +} + +h1 { font-size: 24px; } +h2 { font-size: 20px; } +h3 { font-size: 16px; } +h4 { font-size: 14px; } + +a { + color: var(--accent-secondary); + text-decoration: none; + + &:hover { + text-decoration: underline; + } +} + +code, pre { + font-family: var(--font-mono); + font-size: 13px; +} + +code { + padding: 2px 6px; + background-color: var(--bg-tertiary); + border-radius: var(--radius-sm); +} + +pre { + padding: var(--spacing-md); + background-color: var(--bg-secondary); + border: 1px solid var(--border-color); + border-radius: var(--radius-md); + overflow-x: auto; +} + +// Scrollbar styling +::-webkit-scrollbar { + width: 8px; + height: 8px; +} + +::-webkit-scrollbar-track { + background: transparent; +} + +::-webkit-scrollbar-thumb { + background: var(--border-color); + border-radius: 4px; + + &:hover { + background: var(--text-muted); + } +} + +// Buttons +.btn { + display: inline-flex; + align-items: center; + justify-content: center; + gap: var(--spacing-xs); + padding: var(--spacing-sm) var(--spacing-md); + font-size: 14px; + font-weight: 500; + line-height: 1; + border: 1px solid transparent; + border-radius: var(--radius-md); + cursor: pointer; + transition: all 0.2s; + + &:disabled { + opacity: 0.5; + cursor: not-allowed; + } + + &--primary { + background-color: var(--accent-primary); + color: #0d1117; + + &:hover:not(:disabled) { + opacity: 0.9; + } + } + + &--secondary { + background-color: var(--bg-tertiary); + border-color: var(--border-color); + color: var(--text-primary); + + &:hover:not(:disabled) { + background-color: var(--bg-secondary); + } + } + + &--danger { + background-color: var(--accent-danger); + color: white; + } + + &--ghost { + background: transparent; + color: var(--text-secondary); + + &:hover:not(:disabled) { + color: var(--text-primary); + background-color: var(--bg-tertiary); + } + } +} + +// Forms +.form-group { + margin-bottom: var(--spacing-md); +} + +.form-label { + display: block; + margin-bottom: var(--spacing-xs); + font-weight: 500; + color: var(--text-primary); +} + +.form-input, +.form-select, +.form-textarea { + width: 100%; + padding: var(--spacing-sm) var(--spacing-md); + font-size: 14px; + background-color: var(--bg-secondary); + border: 1px solid var(--border-color); + border-radius: var(--radius-md); + color: var(--text-primary); + + &:focus { + outline: none; + border-color: var(--accent-primary); + box-shadow: 0 0 0 3px rgba(57, 208, 216, 0.15); + } + + &::placeholder { + color: var(--text-muted); + } +} + +// Badges +.badge { + display: inline-flex; + align-items: center; + padding: 2px 8px; + font-size: 12px; + font-weight: 500; + border-radius: 999px; + + &--success { + background-color: rgba(63, 185, 80, 0.15); + color: var(--accent-success); + } + + &--warning { + background-color: rgba(210, 153, 34, 0.15); + color: var(--accent-warning); + } + + &--danger { + background-color: rgba(248, 81, 73, 0.15); + color: var(--accent-danger); + } + + &--info { + background-color: rgba(57, 208, 216, 0.15); + color: var(--accent-primary); + } +} + +// Utility classes +.text-muted { color: var(--text-muted); } +.text-success { color: var(--accent-success); } +.text-danger { color: var(--accent-danger); } +.text-warning { color: var(--accent-warning); } +.mono { font-family: var(--font-mono); } diff --git a/cmd/core-ide/frontend/tsconfig.app.json b/cmd/core-ide/frontend/tsconfig.app.json new file mode 100644 index 00000000..7d7c716d --- /dev/null +++ b/cmd/core-ide/frontend/tsconfig.app.json @@ -0,0 +1,13 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "./out-tsc/app", + "types": [] + }, + "files": [ + "src/main.ts" + ], + "include": [ + "src/**/*.d.ts" + ] +} diff --git a/cmd/core-ide/frontend/tsconfig.json b/cmd/core-ide/frontend/tsconfig.json new file mode 100644 index 00000000..62eaf438 --- /dev/null +++ b/cmd/core-ide/frontend/tsconfig.json @@ -0,0 +1,35 @@ +{ + "compileOnSave": false, + "compilerOptions": { + "baseUrl": "./", + "outDir": "./dist/out-tsc", + "forceConsistentCasingInFileNames": true, + "strict": true, + "noImplicitOverride": true, + "noPropertyAccessFromIndexSignature": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "esModuleInterop": true, + "sourceMap": true, + "declaration": false, + "experimentalDecorators": true, + "moduleResolution": "bundler", + "importHelpers": true, + "target": "ES2022", + "module": "ES2022", + "lib": [ + "ES2022", + "dom" + ], + "paths": { + "@app/*": ["src/app/*"], + "@shared/*": ["src/app/shared/*"] + } + }, + "angularCompilerOptions": { + "enableI18nLegacyMessageIdFormat": false, + "strictInjectionParameters": true, + "strictInputAccessModifiers": true, + "strictTemplates": true + } +} diff --git a/cmd/core-ide/go.mod b/cmd/core-ide/go.mod new file mode 100644 index 00000000..626ea74d --- /dev/null +++ b/cmd/core-ide/go.mod @@ -0,0 +1,57 @@ +module github.com/host-uk/core/cmd/core-ide + +go 1.25.5 + +require ( + github.com/host-uk/core v0.0.0 + github.com/wailsapp/wails/v3 v3.0.0-alpha.64 +) + +require ( + dario.cat/mergo v1.0.2 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect + github.com/ProtonMail/go-crypto v1.3.0 // indirect + github.com/adrg/xdg v0.5.3 // indirect + github.com/bep/debounce v1.2.1 // indirect + github.com/cloudflare/circl v1.6.3 // indirect + github.com/coder/websocket v1.8.14 // indirect + github.com/cyphar/filepath-securejoin v0.6.1 // indirect + github.com/ebitengine/purego v0.9.1 // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.7.0 // indirect + github.com/go-git/go-git/v5 v5.16.4 // indirect + github.com/go-ole/go-ole v1.3.0 // indirect + github.com/godbus/dbus/v5 v5.2.2 // indirect + github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/google/jsonschema-go v0.4.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/gorilla/websocket v1.5.3 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/jchv/go-winloader v0.0.0-20250406163304-c1995be93bd1 // indirect + github.com/kevinburke/ssh_config v1.4.0 // indirect + github.com/klauspost/cpuid/v2 v2.3.0 // indirect + github.com/leaanthony/go-ansi-parser v1.6.1 // indirect + github.com/leaanthony/u v1.1.1 // indirect + github.com/lmittmann/tint v1.1.2 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/modelcontextprotocol/go-sdk v1.2.0 // indirect + github.com/pjbgf/sha1cd v0.5.0 // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/samber/lo v1.52.0 // indirect + github.com/sergi/go-diff v1.4.0 // indirect + github.com/skeema/knownhosts v1.3.2 // indirect + github.com/wailsapp/go-webview2 v1.0.23 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + github.com/yosida95/uritemplate/v3 v3.0.2 // indirect + golang.org/x/crypto v0.47.0 // indirect + golang.org/x/net v0.49.0 // indirect + golang.org/x/oauth2 v0.34.0 // indirect + golang.org/x/sys v0.40.0 // indirect + golang.org/x/text v0.33.0 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect +) + +replace github.com/host-uk/core => ../.. diff --git a/cmd/core-ide/go.sum b/cmd/core-ide/go.sum new file mode 100644 index 00000000..685ffc65 --- /dev/null +++ b/cmd/core-ide/go.sum @@ -0,0 +1,165 @@ +dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8= +dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= +github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw= +github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE= +github.com/adrg/xdg v0.5.3 h1:xRnxJXne7+oWDatRhR1JLnvuccuIeCoBu2rtuLqQB78= +github.com/adrg/xdg v0.5.3/go.mod h1:nlTsY+NNiCBGCK2tpm09vRqfVzrc2fLmXGpBLF0zlTQ= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/bep/debounce v1.2.1 h1:v67fRdBA9UQu2NhLFXrSg0Brw7CexQekrBwDMM8bzeY= +github.com/bep/debounce v1.2.1/go.mod h1:H8yggRPQKLUhUoqrJC1bO2xNya7vanpDl7xR3ISbCJ0= +github.com/cloudflare/circl v1.6.3 h1:9GPOhQGF9MCYUeXyMYlqTR6a5gTrgR/fBLXvUgtVcg8= +github.com/cloudflare/circl v1.6.3/go.mod h1:2eXP6Qfat4O/Yhh8BznvKnJ+uzEoTQ6jVKJRn81BiS4= +github.com/coder/websocket v1.8.14 h1:9L0p0iKiNOibykf283eHkKUHHrpG7f65OE3BhhO7v9g= +github.com/coder/websocket v1.8.14/go.mod h1:NX3SzP+inril6yawo5CQXx8+fk145lPDC6pumgx0mVg= +github.com/cyphar/filepath-securejoin v0.6.1 h1:5CeZ1jPXEiYt3+Z6zqprSAgSWiggmpVyciv8syjIpVE= +github.com/cyphar/filepath-securejoin v0.6.1/go.mod h1:A8hd4EnAeyujCJRrICiOWqjS1AX0a9kM5XL+NwKoYSc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/ebitengine/purego v0.9.1 h1:a/k2f2HQU3Pi399RPW1MOaZyhKJL9w/xFpKAg4q1s0A= +github.com/ebitengine/purego v0.9.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ= +github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= +github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= +github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.7.0 h1:83lBUJhGWhYp0ngzCMSgllhUSuoHP1iEWYjsPl9nwqM= +github.com/go-git/go-billy/v5 v5.7.0/go.mod h1:/1IUejTKH8xipsAcdfcSAlUlo2J7lkYV8GTKxAT/L3E= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= +github.com/go-git/go-git/v5 v5.16.4 h1:7ajIEZHZJULcyJebDLo99bGgS0jRrOxzZG4uCk2Yb2Y= +github.com/go-git/go-git/v5 v5.16.4/go.mod h1:4Ge4alE/5gPs30F2H1esi2gPd69R0C39lolkucHBOp8= +github.com/go-json-experiment/json v0.0.0-20251027170946-4849db3c2f7e h1:Lf/gRkoycfOBPa42vU2bbgPurFong6zXeFtPoxholzU= +github.com/go-json-experiment/json v0.0.0-20251027170946-4849db3c2f7e/go.mod h1:uNVvRXArCGbZ508SxYYTC5v1JWoz2voff5pm25jU1Ok= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/godbus/dbus/v5 v5.2.2 h1:TUR3TgtSVDmjiXOgAAyaZbYmIeP3DPkld3jgKGV8mXQ= +github.com/godbus/dbus/v5 v5.2.2/go.mod h1:3AAv2+hPq5rdnr5txxxRwiGjPXamgoIHgz9FPBfOp3c= +github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= +github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/jsonschema-go v0.4.2 h1:tmrUohrwoLZZS/P3x7ex0WAVknEkBZM46iALbcqoRA8= +github.com/google/jsonschema-go v0.4.2/go.mod h1:r5quNTdLOYEz95Ru18zA0ydNbBuYoo9tgaYcxEYhJVE= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/jchv/go-winloader v0.0.0-20250406163304-c1995be93bd1 h1:njuLRcjAuMKr7kI3D85AXWkw6/+v9PwtV6M6o11sWHQ= +github.com/jchv/go-winloader v0.0.0-20250406163304-c1995be93bd1/go.mod h1:alcuEEnZsY1WQsagKhZDsoPCRoOijYqhZvPwLG0kzVs= +github.com/kevinburke/ssh_config v1.4.0 h1:6xxtP5bZ2E4NF5tuQulISpTO2z8XbtH8cg1PWkxoFkQ= +github.com/kevinburke/ssh_config v1.4.0/go.mod h1:q2RIzfka+BXARoNexmF9gkxEX7DmvbW9P4hIVx2Kg4M= +github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= +github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leaanthony/go-ansi-parser v1.6.1 h1:xd8bzARK3dErqkPFtoF9F3/HgN8UQk0ed1YDKpEz01A= +github.com/leaanthony/go-ansi-parser v1.6.1/go.mod h1:+vva/2y4alzVmmIEpk9QDhA7vLC5zKDTRwfZGOp3IWU= +github.com/leaanthony/u v1.1.1 h1:TUFjwDGlNX+WuwVEzDqQwC2lOv0P4uhTQw7CMFdiK7M= +github.com/leaanthony/u v1.1.1/go.mod h1:9+o6hejoRljvZ3BzdYlVL0JYCwtnAsVuN9pVTQcaRfI= +github.com/lmittmann/tint v1.1.2 h1:2CQzrL6rslrsyjqLDwD11bZ5OpLBPU+g3G/r5LSfS8w= +github.com/lmittmann/tint v1.1.2/go.mod h1:HIS3gSy7qNwGCj+5oRjAutErFBl4BzdQP6cJZ0NfMwE= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/matryer/is v1.4.1 h1:55ehd8zaGABKLXQUe2awZ99BD/PTc2ls+KV/dXphgEQ= +github.com/matryer/is v1.4.1/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/modelcontextprotocol/go-sdk v1.2.0 h1:Y23co09300CEk8iZ/tMxIX1dVmKZkzoSBZOpJwUnc/s= +github.com/modelcontextprotocol/go-sdk v1.2.0/go.mod h1:6fM3LCm3yV7pAs8isnKLn07oKtB0MP9LHd3DfAcKw10= +github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= +github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= +github.com/pjbgf/sha1cd v0.5.0 h1:a+UkboSi1znleCDUNT3M5YxjOnN1fz2FhN48FlwCxs0= +github.com/pjbgf/sha1cd v0.5.0/go.mod h1:lhpGlyHLpQZoxMv8HcgXvZEhcGs0PG/vsZnEJ7H0iCM= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= +github.com/samber/lo v1.52.0 h1:Rvi+3BFHES3A8meP33VPAxiBZX/Aws5RxrschYGjomw= +github.com/samber/lo v1.52.0/go.mod h1:4+MXEGsJzbKGaUEQFKBq2xtfuznW9oz/WrgyzMzRoM0= +github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw= +github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/skeema/knownhosts v1.3.2 h1:EDL9mgf4NzwMXCTfaxSD/o/a5fxDw/xL9nkU28JjdBg= +github.com/skeema/knownhosts v1.3.2/go.mod h1:bEg3iQAuw+jyiw+484wwFJoKSLwcfd7fqRy+N0QTiow= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/wailsapp/go-webview2 v1.0.23 h1:jmv8qhz1lHibCc79bMM/a/FqOnnzOGEisLav+a0b9P0= +github.com/wailsapp/go-webview2 v1.0.23/go.mod h1:qJmWAmAmaniuKGZPWwne+uor3AHMB5PFhqiK0Bbj8kc= +github.com/wailsapp/wails/v3 v3.0.0-alpha.64 h1:xAhLFVfdbg7XdZQ5mMQmBv2BglWu8hMqe50Z+3UJvBs= +github.com/wailsapp/wails/v3 v3.0.0-alpha.64/go.mod h1:zvgNL/mlFcX8aRGu6KOz9AHrMmTBD+4hJRQIONqF/Yw= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4= +github.com/yosida95/uritemplate/v3 v3.0.2/go.mod h1:ILOh0sOhIJR3+L/8afwt/kE++YT040gmv5BQTMR2HP4= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= +golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= +golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU= +golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= +golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= +golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw= +golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200810151505-1b9f1253b3ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= +golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= +golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= +golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc= +golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/cmd/core-ide/icons/appicon.png b/cmd/core-ide/icons/appicon.png new file mode 100644 index 0000000000000000000000000000000000000000..53adbd595d3e69cce3545aafe98f348b5eb4a3be GIT binary patch literal 76 zcmeAS@N?(olHy`uVBq!ia0vp^Vj#@H1|*Mc$*~4fqMj~}Ar*6y|NQ@N&n&>e@c%zE Z1IHhxf6EIyW&))dJYD@<);T3K0RVYV6kz}W literal 0 HcmV?d00001 diff --git a/cmd/core-ide/icons/icons.go b/cmd/core-ide/icons/icons.go new file mode 100644 index 00000000..72fb175c --- /dev/null +++ b/cmd/core-ide/icons/icons.go @@ -0,0 +1,25 @@ +// Package icons provides embedded icon assets for the Core IDE application. +package icons + +import _ "embed" + +// TrayTemplate is the template icon for macOS systray (22x22 PNG, black on transparent). +// Template icons automatically adapt to light/dark mode on macOS. +// +//go:embed tray-template.png +var TrayTemplate []byte + +// TrayLight is the light mode icon for Windows/Linux systray. +// +//go:embed tray-light.png +var TrayLight []byte + +// TrayDark is the dark mode icon for Windows/Linux systray. +// +//go:embed tray-dark.png +var TrayDark []byte + +// AppIcon is the main application icon. +// +//go:embed appicon.png +var AppIcon []byte diff --git a/cmd/core-ide/icons/tray-dark.png b/cmd/core-ide/icons/tray-dark.png new file mode 100644 index 0000000000000000000000000000000000000000..53adbd595d3e69cce3545aafe98f348b5eb4a3be GIT binary patch literal 76 zcmeAS@N?(olHy`uVBq!ia0vp^Vj#@H1|*Mc$*~4fqMj~}Ar*6y|NQ@N&n&>e@c%zE Z1IHhxf6EIyW&))dJYD@<);T3K0RVYV6kz}W literal 0 HcmV?d00001 diff --git a/cmd/core-ide/icons/tray-light.png b/cmd/core-ide/icons/tray-light.png new file mode 100644 index 0000000000000000000000000000000000000000..53adbd595d3e69cce3545aafe98f348b5eb4a3be GIT binary patch literal 76 zcmeAS@N?(olHy`uVBq!ia0vp^Vj#@H1|*Mc$*~4fqMj~}Ar*6y|NQ@N&n&>e@c%zE Z1IHhxf6EIyW&))dJYD@<);T3K0RVYV6kz}W literal 0 HcmV?d00001 diff --git a/cmd/core-ide/icons/tray-template.png b/cmd/core-ide/icons/tray-template.png new file mode 100644 index 0000000000000000000000000000000000000000..53adbd595d3e69cce3545aafe98f348b5eb4a3be GIT binary patch literal 76 zcmeAS@N?(olHy`uVBq!ia0vp^Vj#@H1|*Mc$*~4fqMj~}Ar*6y|NQ@N&n&>e@c%zE Z1IHhxf6EIyW&))dJYD@<);T3K0RVYV6kz}W literal 0 HcmV?d00001 diff --git a/cmd/core-ide/ide_service.go b/cmd/core-ide/ide_service.go new file mode 100644 index 00000000..fca137ca --- /dev/null +++ b/cmd/core-ide/ide_service.go @@ -0,0 +1,102 @@ +package main + +import ( + "context" + "log" + "net/http" + + "github.com/host-uk/core/pkg/mcp/ide" + "github.com/host-uk/core/pkg/ws" + "github.com/wailsapp/wails/v3/pkg/application" +) + +// IDEService provides core IDE bindings for the frontend. +type IDEService struct { + app *application.App + ideSub *ide.Subsystem + hub *ws.Hub +} + +// NewIDEService creates a new IDEService. +func NewIDEService(ideSub *ide.Subsystem, hub *ws.Hub) *IDEService { + return &IDEService{ideSub: ideSub, hub: hub} +} + +// ServiceName returns the service name for Wails. +func (s *IDEService) ServiceName() string { return "IDEService" } + +// ServiceStartup is called when the Wails application starts. +func (s *IDEService) ServiceStartup(ctx context.Context, options application.ServiceOptions) error { + // Start WebSocket HTTP server for the Angular frontend + go s.startWSServer() + log.Println("IDEService started") + return nil +} + +// ServiceShutdown is called when the Wails application shuts down. +func (s *IDEService) ServiceShutdown() error { + log.Println("IDEService shutdown") + return nil +} + +// ConnectionStatus represents the IDE bridge connection state. +type ConnectionStatus struct { + BridgeConnected bool `json:"bridgeConnected"` + LaravelURL string `json:"laravelUrl"` + WSClients int `json:"wsClients"` + WSChannels int `json:"wsChannels"` +} + +// GetConnectionStatus returns the current bridge and WebSocket status. +func (s *IDEService) GetConnectionStatus() ConnectionStatus { + connected := false + if s.ideSub.Bridge() != nil { + connected = s.ideSub.Bridge().Connected() + } + + stats := s.hub.Stats() + return ConnectionStatus{ + BridgeConnected: connected, + WSClients: stats.Clients, + WSChannels: stats.Channels, + } +} + +// DashboardData aggregates data for the dashboard view. +type DashboardData struct { + Connection ConnectionStatus `json:"connection"` +} + +// GetDashboard returns aggregated dashboard data. +func (s *IDEService) GetDashboard() DashboardData { + return DashboardData{ + Connection: s.GetConnectionStatus(), + } +} + +// ShowWindow shows a named window. +func (s *IDEService) ShowWindow(name string) { + if s.app == nil { + return + } + if w, ok := s.app.Window.Get(name); ok { + w.Show() + w.Focus() + } +} + +// startWSServer starts the WebSocket HTTP server for the Angular frontend. +func (s *IDEService) startWSServer() { + mux := http.NewServeMux() + mux.HandleFunc("/ws", s.hub.HandleWebSocket) + mux.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte(`{"status":"ok"}`)) + }) + + addr := "127.0.0.1:9877" + log.Printf("IDE WebSocket server listening on %s", addr) + if err := http.ListenAndServe(addr, mux); err != nil { + log.Printf("IDE WebSocket server error: %v", err) + } +} diff --git a/cmd/core-ide/main.go b/cmd/core-ide/main.go new file mode 100644 index 00000000..f9efb9fe --- /dev/null +++ b/cmd/core-ide/main.go @@ -0,0 +1,151 @@ +// Package main provides the Core IDE desktop application. +// Core IDE connects to the Laravel core-agentic backend via MCP bridge, +// providing a chat interface for AI agent sessions, build monitoring, +// and a system dashboard. +package main + +import ( + "context" + "embed" + "io/fs" + "log" + "runtime" + + "github.com/host-uk/core/cmd/core-ide/icons" + "github.com/host-uk/core/pkg/mcp/ide" + "github.com/host-uk/core/pkg/ws" + "github.com/wailsapp/wails/v3/pkg/application" +) + +//go:embed all:frontend/dist/core-ide/browser +var assets embed.FS + +func main() { + staticAssets, err := fs.Sub(assets, "frontend/dist/core-ide/browser") + if err != nil { + log.Fatal(err) + } + + // Create shared WebSocket hub for real-time streaming + hub := ws.NewHub() + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + go hub.Run(ctx) + + // Create IDE subsystem (bridge to Laravel core-agentic) + ideSub := ide.New(hub) + ideSub.StartBridge(ctx) + + // Create Wails services + ideService := NewIDEService(ideSub, hub) + chatService := NewChatService(ideSub) + buildService := NewBuildService(ideSub) + + app := application.New(application.Options{ + Name: "Core IDE", + Description: "Host UK Platform IDE - AI Agent Sessions, Build Monitoring & Dashboard", + Services: []application.Service{ + application.NewService(ideService), + application.NewService(chatService), + application.NewService(buildService), + }, + Assets: application.AssetOptions{ + Handler: application.AssetFileServerFS(staticAssets), + }, + Mac: application.MacOptions{ + ActivationPolicy: application.ActivationPolicyAccessory, + }, + }) + + ideService.app = app + + setupSystemTray(app, ideService) + + log.Println("Starting Core IDE...") + log.Println(" - System tray active") + log.Println(" - Bridge connecting to Laravel core-agentic...") + + if err := app.Run(); err != nil { + log.Fatal(err) + } + + cancel() +} + +// setupSystemTray configures the system tray icon, menu, and windows. +func setupSystemTray(app *application.App, ideService *IDEService) { + systray := app.SystemTray.New() + systray.SetTooltip("Core IDE") + + if runtime.GOOS == "darwin" { + systray.SetTemplateIcon(icons.TrayTemplate) + } else { + systray.SetDarkModeIcon(icons.TrayDark) + systray.SetIcon(icons.TrayLight) + } + + // Tray panel window + trayWindow := app.Window.NewWithOptions(application.WebviewWindowOptions{ + Name: "tray-panel", + Title: "Core IDE", + Width: 400, + Height: 500, + URL: "/tray", + Hidden: true, + Frameless: true, + BackgroundColour: application.NewRGB(22, 27, 34), + }) + systray.AttachWindow(trayWindow).WindowOffset(5) + + // Main IDE window + app.Window.NewWithOptions(application.WebviewWindowOptions{ + Name: "main", + Title: "Core IDE", + Width: 1400, + Height: 900, + URL: "/main", + Hidden: true, + BackgroundColour: application.NewRGB(22, 27, 34), + }) + + // Settings window + app.Window.NewWithOptions(application.WebviewWindowOptions{ + Name: "settings", + Title: "Core IDE Settings", + Width: 600, + Height: 500, + URL: "/settings", + Hidden: true, + BackgroundColour: application.NewRGB(22, 27, 34), + }) + + // Tray menu + trayMenu := app.Menu.New() + + statusItem := trayMenu.Add("Status: Connecting...") + statusItem.SetEnabled(false) + + trayMenu.AddSeparator() + + trayMenu.Add("Open IDE").OnClick(func(ctx *application.Context) { + if w, ok := app.Window.Get("main"); ok { + w.Show() + w.Focus() + } + }) + + trayMenu.Add("Settings...").OnClick(func(ctx *application.Context) { + if w, ok := app.Window.Get("settings"); ok { + w.Show() + w.Focus() + } + }) + + trayMenu.AddSeparator() + + trayMenu.Add("Quit Core IDE").OnClick(func(ctx *application.Context) { + app.Quit() + }) + + systray.SetMenu(trayMenu) +} diff --git a/github-projects-recovery.md b/github-projects-recovery.md new file mode 100644 index 00000000..5ead7321 --- /dev/null +++ b/github-projects-recovery.md @@ -0,0 +1,403 @@ +# GitHub Projects Recovery — host-uk org + +> Recovered 2026-02-08 from flagged GitHub org before potential data loss. +> Projects 1 (Core.Framework) was empty. Projects 2, 3, 4 captured below. + +--- + +## Project 2: Workstation (43 items) + +> Agentic task queue — issues labelled agent:ready across all host-uk repos. + +| # | Title | Issue | +|---|-------|-------| +| 1 | feat: add workspace.yaml support for unified package commands | #38 | +| 2 | feat: add core setup command for GitHub repo configuration | #45 | +| 3 | docs sync ignores packages_dir from workspace.yaml | #46 | +| 4 | feat: add core qa command area for CI/workflow monitoring | #47 | +| 5 | feat: add core security command to expose Dependabot and code scanning alerts | #48 | +| 6 | feat: add core monitor to aggregate free tier scanner results | #49 | +| 7 | feat: add core qa issues for intelligent issue triage | #61 | +| 8 | feat: add core qa review for PR review status | #62 | +| 9 | feat: add core qa health for aggregate CI health | #63 | +| 10 | feat(dev): add safe git operations for AI agents | #53 | +| 11 | docs(mcp): Document MCP server setup and usage | #125 | +| 12 | feat: Implement persistent MCP server in daemon mode | #118 | +| 13 | chore(io): Migrate pkg/agentic to Medium abstraction | #104 | +| 14 | feat: Evolve pkg/io from Medium abstraction to io.Node (Borg + Enchantrix) | #101 | +| 15 | Add streaming API to pkg/io/local for large file handling | #224 | +| 16 | feat(hooks): Add core ai hook for async test running | #262 | +| 17 | feat(ai): Add core ai spawn for parallel agent tasks | #260 | +| 18 | feat(ai): Add core ai cost for budget tracking | #261 | +| 19 | feat(ai): Add core ai session for session management | #259 | +| 20 | feat(test): Add smart test detection to core test | #258 | +| 21 | feat(test): Add core test --watch continuous testing mode | #257 | +| 22 | feat(collect): Add core collect dispatch event hook system | #256 | +| 23 | feat(collect): Add core collect process command | #255 | +| 24 | feat(collect): Add core collect excavate command | #254 | +| 25 | feat(collect): Add core collect papers command | #253 | +| 26 | feat(collect): Add core collect bitcointalk command | #251 | +| 27 | feat(collect): Add core collect market command | #252 | +| 28 | feat(collect): Add core collect github command | #250 | +| 29 | epic(security): workspace isolation and authorisation hardening | #31 | +| 30 | epic(security): SQL query validation and execution safety | #32 | +| 31 | epic(fix): namespace and import corrections | #33 | +| 32 | epic(chore): configuration and documentation standardisation | #34 | +| 33 | Epic: Webhook Security Hardening | #27 | +| 34 | Epic: API Performance Optimisation | #28 | +| 35 | Epic: MCP API Hardening | #29 | +| 36 | Epic: API Test Coverage | #30 | +| 37 | Epic: Security Hardening | #104 | +| 38 | Epic: Input Validation & Sanitisation | #105 | +| 39 | Epic: Test Coverage | #106 | +| 40 | Epic: Error Handling & Observability | #107 | +| 41 | Epic: Performance Optimisation | #108 | +| 42 | Epic: Code Quality & Architecture | #109 | +| 43 | Epic: Documentation | #110 | + +--- + +## Project 4: Core.GO & Core.CLI (97 items) + +> Go framework and CLI development — host-uk/core repo. Filter by lang:go label. + +| # | Title | Issue | +|---|-------|-------| +| 1 | feat: add workspace.yaml support for unified package commands | #38 | +| 2 | feat: add core setup command for GitHub repo configuration | #45 | +| 3 | docs sync ignores packages_dir from workspace.yaml | #46 | +| 4 | feat: add core qa command area for CI/workflow monitoring | #47 | +| 5 | feat: add core security command to expose Dependabot and code scanning alerts | #48 | +| 6 | feat: add core monitor to aggregate free tier scanner results | #49 | +| 7 | feat(crypt): Implement standalone pkg/crypt with modern cryptographic primitives | #168 | +| 8 | feat(cli): Implement build variants for reduced attack surface | #171 | +| 9 | feat(config): Implement standalone pkg/config with layered configuration | #167 | +| 10 | feat(io): Fix pkg/io import and add symlink-safe path validation | #169 | +| 11 | feat(plugin): Consolidate pkg/module into pkg/plugin with GitHub installation | #170 | +| 12 | feat(help): Implement full-text search | #139 | +| 13 | feat(help): Implement Catalog and Topic types | #138 | +| 14 | feat(help): Implement markdown parsing and section extraction | #137 | +| 15 | feat(help): Remove Wails dependencies from pkg/help | #134 | +| 16 | feat(help): Add CLI help command | #136 | +| 17 | docs(help): Create help content for core CLI | #135 | +| 18 | feat(help): Implement display-agnostic help system for CLI and GUI | #133 | +| 19 | chore(log): Remove deprecated pkg/errors package | #131 | +| 20 | feat(log): Add combined log-and-return error helpers | #129 | +| 21 | chore(log): Create pkg/errors deprecation alias | #128 | +| 22 | feat(log): Unify pkg/errors and pkg/log into single logging package | #127 | +| 23 | feat(mcp): Add TCP transport | #126 | +| 24 | docs(mcp): Document MCP server setup and usage | #125 | +| 25 | feat(mcp): Add MCP command for manual server control | #124 | +| 26 | feat(mcp): Create MCPService for framework integration | #122 | +| 27 | feat(mcp): Add health check integration | #123 | +| 28 | chore(log): Migrate pkg/errors imports to pkg/log | #130 | +| 29 | feat(mcp): Add connection management and graceful draining | #121 | +| 30 | feat(mcp): Add daemon mode detection and auto-start | #119 | +| 31 | feat(mcp): Add Unix socket transport | #120 | +| 32 | feat: Implement persistent MCP server in daemon mode | #118 | +| 33 | chore(io): Migrate internal/cmd/setup to Medium abstraction | #116 | +| 34 | chore(io): Migrate internal/cmd/docs to Medium abstraction | #113 | +| 35 | chore(io): Migrate remaining internal/cmd/* to Medium abstraction | #117 | +| 36 | chore(io): Migrate internal/cmd/dev to Medium abstraction | #114 | +| 37 | chore(io): Migrate internal/cmd/sdk to Medium abstraction | #115 | +| 38 | chore(io): Migrate internal/cmd/php to Medium abstraction | #112 | +| 39 | feat(log): Add error creation functions to pkg/log | #132 | +| 40 | chore(io): Migrate pkg/cache to Medium abstraction | #111 | +| 41 | chore(io): Migrate pkg/devops to Medium abstraction | #110 | +| 42 | chore(io): Migrate pkg/cli to Medium abstraction | #107 | +| 43 | chore(io): Migrate pkg/build to Medium abstraction | #109 | +| 44 | chore(io): Migrate pkg/container to Medium abstraction | #105 | +| 45 | chore(io): Migrate pkg/repos to Medium abstraction | #108 | +| 46 | feat(io): Migrate pkg/mcp to use Medium abstraction | #103 | +| 47 | chore(io): Migrate pkg/release to Medium abstraction | #106 | +| 48 | chore(io): Migrate pkg/agentic to Medium abstraction | #104 | +| 49 | feat(io): Extend Medium interface with missing operations | #102 | +| 50 | fix(php): core php ci improvements needed | #92 | +| 51 | CLI Output: Color contrast audit and terminal adaptation | #99 | +| 52 | feat: Evolve pkg/io from Medium abstraction to io.Node (Borg + Enchantrix) | #101 | +| 53 | Documentation: Improve Accessibility | #89 | +| 54 | Web UI: Audit Angular App Accessibility | #88 | +| 55 | Add configuration documentation to README | #236 | +| 56 | Add Architecture Decision Records (ADRs) | #237 | +| 57 | Add user documentation: user guide, FAQ, troubleshooting guide | #235 | +| 58 | Add CHANGELOG.md to track version changes | #234 | +| 59 | Add CONTRIBUTING.md with contribution guidelines | #233 | +| 60 | Create centralized configuration service to reduce code duplication | #232 | +| 61 | Update README.md to reflect actual configuration management implementation | #231 | +| 62 | Centralize user-facing error strings in i18n translation files | #230 | +| 63 | Log all errors at handling point with contextual information | #229 | +| 64 | Implement panic recovery mechanism with graceful shutdown | #228 | +| 65 | Standardize on cli.Error for user-facing errors, deprecate cli.Fatal | #227 | +| 66 | Add linker flags (-s -w) to reduce binary size | #226 | +| 67 | Use background goroutines for long-running operations to prevent UI blocking | #225 | +| 68 | Add streaming API to pkg/io/local for large file handling | #224 | +| 69 | Fix Go environment to run govulncheck for dependency scanning | #223 | +| 70 | Sanitize user input in execInContainer to prevent injection | #222 | +| 71 | Configure branch coverage measurement in test tooling | #220 | +| 72 | Remove StrictHostKeyChecking=no from SSH commands | #221 | +| 73 | Implement authentication and authorization features described in README | #217 | +| 74 | Add tests for edge cases, error paths, and integration scenarios | #219 | +| 75 | Increase test coverage for low-coverage packages (cli, internal/cmd/dev) | #218 | +| 76 | Introduce typed messaging system for IPC (replace interface{}) | #216 | +| 77 | Refactor Core struct to smaller, focused components (ServiceManager, MessageBus, LifecycleManager) | #215 | +| 78 | Implement structured logging (JSON format) | #212 | +| 79 | Implement log retention policy | #214 | +| 80 | Add logging for security events (authentication, access) | #213 | +| 81 | feat(setup): add .core/setup.yaml for dev environment bootstrapping | #211 | +| 82 | audit: Documentation completeness and quality | #192 | +| 83 | audit: API design and consistency | #191 | +| 84 | [Audit] Concurrency and Race Condition Analysis | #197 | +| 85 | feat(hooks): Add core ai hook for async test running | #262 | +| 86 | feat(ai): Add core ai spawn for parallel agent tasks | #260 | +| 87 | feat(ai): Add core ai cost for budget tracking | #261 | +| 88 | feat(ai): Add core ai session for session management | #259 | +| 89 | feat(test): Add smart test detection to core test | #258 | +| 90 | feat(test): Add core test --watch continuous testing mode | #257 | +| 91 | feat(collect): Add core collect dispatch event hook system | #256 | +| 92 | feat(collect): Add core collect process command | #255 | +| 93 | feat(collect): Add core collect excavate command | #254 | +| 94 | feat(collect): Add core collect bitcointalk command | #251 | +| 95 | feat(collect): Add core collect papers command | #253 | +| 96 | feat(collect): Add core collect market command | #252 | +| 97 | feat(collect): Add core collect github command | #250 | + +--- + +## Project 3: Core.PHP (195 items) + +> Laravel/PHP ecosystem — all core-* packages. Filter by lang:php label. + +| # | Title | Issue | +|---|-------|-------| +| 1 | Dependency: Consider adding security scanning to CI pipeline | #31 | +| 2 | Concurrency: Sanitiser preset registration not thread-safe | #32 | +| 3 | Documentation: Missing SECURITY.md with vulnerability reporting process | #30 | +| 4 | Error Handling: ResilientSession redirect loop potential | #28 | +| 5 | Configuration: ConfigValue encryption may cause issues during APP_KEY rotation | #25 | +| 6 | Testing: Missing test coverage for critical security components | #23 | +| 7 | Security: HadesEncrypt embeds hardcoded public key | #21 | +| 8 | Security: SafeWebhookUrl DNS rebinding vulnerability | #17 | +| 9 | Performance: selectRaw queries may have missing indexes | #19 | +| 10 | Core Bouncer: Request Whitelisting System | #14 | +| 11 | Security: ManagesTokens trait stores tokens in memory without protection | #18 | +| 12 | Trees: Consolidate subscriber monthly command from Commerce module | #12 | +| 13 | Trees: Webhook/API for TFTF confirmation | #13 | +| 14 | CSRF token not automatically attached in bootstrap.js | #17 | +| 15 | Missing exception handling configuration in bootstrap/app.php | #15 | +| 16 | CI workflow only runs on main branch but repo uses dev as main | #14 | +| 17 | Minimal test coverage for a best-practices template | #16 | +| 18 | Missing declare(strict_types=1) in PHP files violates coding standards | #12 | +| 19 | Dependencies using dev-main branches instead of stable versions | #13 | +| 20 | Security: No HTTPS enforcement in production | #11 | +| 21 | Security: SESSION_ENCRYPT=false in .env.example is insecure default | #8 | +| 22 | Security: No rate limiting configured for any routes | #10 | +| 23 | Security: Missing security headers middleware by default | #9 | +| 24 | Security: ActivityLog query vulnerable to SQL wildcard injection | #20 | +| 25 | Missing: Rate limiting not applied to Livewire component methods | #17 | +| 26 | Missing: Log redaction patterns incomplete for common sensitive data | #16 | +| 27 | Code Quality: Livewire components duplicate checkHadesAccess() method | #19 | +| 28 | Error Handling: RemoteServerManager writeFile() has command injection via base64 | #15 | +| 29 | Missing: phpseclib3 not in composer.json dependencies | #18 | +| 30 | Performance: Query logging enabled unconditionally in local environment | #12 | +| 31 | Testing: Test suite does not verify Hades authorization enforcement | #11 | +| 32 | Error Handling: LogReaderService silently fails on file operations | #10 | +| 33 | Security: Telescope hides insufficient request headers in production | #14 | +| 34 | Security: IP validation missing for Server model | #13 | +| 35 | Security: Hades cookie has 1-year expiry with no rotation | #8 | +| 36 | Security: DevController authorize() method undefined | #7 | +| 37 | Security: Missing HADES_TOKEN configuration in .env.example | #9 | +| 38 | Security: Missing workspace authorization check when creating Server records | #6 | +| 39 | Security: SQL injection vulnerability in Database query tool - stacked query bypass | #4 | +| 40 | Security: Server SSH connection test uses StrictHostKeyChecking=no | #5 | +| 41 | Missing: Webhook endpoint URL scheme validation | #19 | +| 42 | Missing: Tests for WebhookSecretRotationService grace period edge cases | #20 | +| 43 | Performance: ApiUsageDaily recordFromUsage performs multiple queries | #18 | +| 44 | Security: API key scopes exposed in 403 error responses | #17 | +| 45 | Missing: Webhook delivery retry job lacks idempotency key | #15 | +| 46 | Configuration: No environment variable validation for API config | #16 | +| 47 | Error Handling: MCP registry YAML files read without validation | #14 | +| 48 | Missing: Index on webhook_deliveries for needsDelivery scope | #12 | +| 49 | Code Quality: WebhookSignature generateSecret uses Str::random instead of cryptographic RNG | #13 | +| 50 | Error Handling: recordUsage() called synchronously on every request | #10 | +| 51 | Security: Rate limit sliding window stores individual timestamps - memory growth concern | #9 | +| 52 | Security: WebhookSecretController lacks authorization checks | #11 | +| 53 | Security: Webhook secret visible in API response after rotation | #7 | +| 54 | Missing: Tests for MCP API Controller tool execution | #8 | +| 55 | Performance: API key lookup requires loading all candidates with matching prefix | #6 | +| 56 | Security: Webhook URL SSRF vulnerability - no validation of internal/private network URLs | #4 | +| 57 | Security: MCP tool execution uses proc_open without output sanitization | #5 | +| 58 | Missing tests for Social API controllers | #2 | +| 59 | Verify ProductApiController implementation | #3 | +| 60 | Session data stored without encryption (SESSION_ENCRYPT=false) | #18 | +| 61 | Mass assignment vulnerability in ContentEditor save method | #17 | +| 62 | AdminPageSearchProvider returns hardcoded URLs without auth checking | #16 | +| 63 | Missing rate limiting on sensitive admin operations | #14 | +| 64 | XSS risk in GlobalSearch component's JSON encoding | #13 | +| 65 | Missing validation for sortField parameter allows SQL injection | #10 | +| 66 | Missing test coverage for critical admin operations | #11 | +| 67 | Cache flush in Platform.php may cause service disruption | #12 | +| 68 | Missing CSRF protection for Livewire file uploads | #9 | +| 69 | N+1 query risk in ContentManager computed properties | #8 | +| 70 | Missing route authentication middleware on admin routes | #7 | +| 71 | Missing authorization check on Dashboard and Console components | #4 | +| 72 | SQL injection risk via LIKE wildcards in search queries | #5 | +| 73 | Bug: CheckMcpQuota middleware checks wrong attribute name | #22 | +| 74 | Security: DataRedactor does not handle object properties | #21 | +| 75 | Performance: QueryDatabase tool fetches all results before truncation | #20 | +| 76 | Documentation: Missing env validation for sensitive configuration | #23 | +| 77 | Security: McpAuditLog hash chain has race condition in transaction | #18 | +| 78 | Configuration: Missing MCP config file with database and security settings | #17 | +| 79 | Security: ApiKeyManager Livewire component missing CSRF and rate limiting | #19 | +| 80 | Error Handling: QueryExecutionService swallows timeout configuration errors | #16 | +| 81 | Security: SqlQueryValidator whitelist regex may allow SQL injection via JOINs | #15 | +| 82 | Test Coverage: Missing tests for critical security components | #14 | +| 83 | Security: McpApiController namespace mismatch and missing authorization | #11 | +| 84 | Security: AuditLogService export method has no authorization check | #13 | +| 85 | Bug: UpgradePlan tool imports RequiresWorkspaceContext from wrong namespace | #10 | +| 86 | Security: McpAuthenticate accepts API key in query string | #8 | +| 87 | Performance: AuditLogService hash chain verification loads entire log table | #12 | +| 88 | Bug: CircuitBreaker imports wrong namespace for CircuitOpenException | #9 | +| 89 | Security: ListTables tool uses MySQL-specific SHOW TABLES query | #7 | +| 90 | Security: ListTables tool exposes all database tables without authorization | #6 | +| 91 | Security: CreateCoupon tool missing strict_types declaration | #4 | +| 92 | Multi-server federation for MCP | #3 | +| 93 | Security: CreateCoupon tool missing workspace context/authorization | #5 | +| 94 | WebSocket support for real-time MCP updates | #2 | +| 95 | Incomplete account deletion may leave orphaned data | #13 | +| 96 | Error handling gap: Webhook secret returned in creation response | #14 | +| 97 | Missing environment validation for sensitive configuration | #18 | +| 98 | Potential timing attack in invitation token verification | #17 | +| 99 | Race condition in workspace default switching | #11 | +| 100 | Missing test coverage for TotpService TOTP verification | #12 | +| 101 | Missing authorisation check in EntitlementApiController::summary | #10 | +| 102 | Missing rate limiting on sensitive entitlement API endpoints | #9 | +| 103 | Security: Hardcoded test credentials in DemoTestUserSeeder | #7 | +| 104 | Security: SQL injection-like pattern in search query | #8 | +| 105 | Complete UserStatsService TODO items | #2 | +| 106 | Security: SSRF protection missing DNS rebinding defence in webhook dispatch job | #6 | +| 107 | Refund::markAsSucceeded not wrapped in transaction with payment update | #28 | +| 108 | Missing strict_types in Refund model | #30 | +| 109 | CreditNoteService::autoApplyCredits lacks transaction wrapper | #27 | +| 110 | Fail-open VAT validation could allow tax evasion | #25 | +| 111 | Missing strict_types in CreditNote model | #29 | +| 112 | Missing tests for CommerceController API endpoints | #26 | +| 113 | API controller returns raw exception messages to clients | #22 | +| 114 | Missing rate limiting on Commerce API endpoints | #23 | +| 115 | ProcessDunning console command lacks mutex/locking for concurrent runs | #24 | +| 116 | Race condition in CreditNote::recordUsage without row locking | #21 | +| 117 | Missing strict_types in PaymentMethodService.php | #20 | +| 118 | Missing strict_types in CreditNoteService.php | #19 | +| 119 | Missing tests for UsageBillingService | #16 | +| 120 | Missing strict_types in RefundService.php | #18 | +| 121 | Missing return type declarations in CreditNote model scopes | #14 | +| 122 | Missing tests for PaymentMethodService | #17 | +| 123 | MySQL-specific raw SQL breaks database portability | #13 | +| 124 | Missing strict_types declaration in UsageBillingService.php | #11 | +| 125 | Weak random number generation in CreditNote reference number | #12 | +| 126 | Missing tests for CreditNoteService | #15 | +| 127 | Missing tests for critical fraud detection paths | #9 | +| 128 | Missing strict_types declaration in TaxService.php | #10 | +| 129 | Missing index validation and SQL injection protection in Coupon scopes | #6 | +| 130 | Missing database transaction in referral payout commission assignment | #8 | +| 131 | Potential N+1 query in StripeGateway::createCheckoutSession | #7 | +| 132 | Race condition in Order number generation | #5 | +| 133 | Missing strict type declaration in SubscriptionService.php | #3 | +| 134 | Warehouse & Fulfillment System | #2 | +| 135 | Race condition in Invoice number generation | #4 | +| 136 | [Audit] Architecture Patterns | #50 | +| 137 | [Audit] Database Query Optimization | #48 | +| 138 | [Audit] Error Handling and Recovery | #51 | +| 139 | [Audit] Concurrency and Race Condition Analysis | #47 | +| 140 | audit: API design and consistency | #44 | +| 141 | audit: Performance bottlenecks and optimization | #43 | +| 142 | [Audit] Multi-Tenancy Security | #23 | +| 143 | fix(composer): simplify dependencies for hello world setup | #21 | +| 144 | [Audit] Database Query Optimization | #23 | +| 145 | audit: Test coverage and quality | #42 | +| 146 | audit: Code complexity and maintainability | #41 | +| 147 | audit: Authentication and authorization flows | #38 | +| 148 | audit: Dependency vulnerabilities and supply chain | #39 | +| 149 | [Audit] Database Query Optimization | #22 | +| 150 | audit: OWASP Top 10 security review | #36 | +| 151 | audit: Input validation and sanitization | #37 | +| 152 | security(mcp): ContentTools.php accepts workspace as request parameter enabling cross-tenant access | #29 | +| 153 | quality(mcp): standardise tool schema and request input patterns to match MCP spec | #30 | +| 154 | epic(security): workspace isolation and authorisation hardening | #31 | +| 155 | epic(security): SQL query validation and execution safety | #32 | +| 156 | epic(fix): namespace and import corrections | #33 | +| 157 | epic(chore): configuration and documentation standardisation | #34 | +| 158 | Epic: Webhook Security Hardening | #27 | +| 159 | Epic: API Performance Optimisation | #28 | +| 160 | Epic: MCP API Hardening | #29 | +| 161 | Epic: API Test Coverage | #30 | +| 162 | security(trees): fix race condition in PlantTreeWithTFTF job | #77 | +| 163 | security(auth): replace LthnHash with bcrypt for password hashing | #78 | +| 164 | security(helpers): fix SSRF in File.php via unvalidated Http::get | #79 | +| 165 | security(input): sanitise route parameters in Sanitiser middleware | #80 | +| 166 | security(trees): validate $model parameter in TreeStatsController | #81 | +| 167 | security(tests): remove hardcoded API token from test file | #82 | +| 168 | quality(bouncer): move env() call to config file in BouncerMiddleware | #83 | +| 169 | security(api): prevent upstream body leakage in BuildsResponse | #84 | +| 170 | security(auth): add session configuration file | #85 | +| 171 | quality(logging): add correlation IDs to request logging | #86 | +| 172 | security(logging): prevent PII leakage in LogsActivity trait | #87 | +| 173 | performance(queries): fix N+1 queries in ConfigResolver, AdminMenuRegistry, activity feed, SeoScoreTrend | #88 | +| 174 | performance(queries): replace ::all() with chunking/cursors | #89 | +| 175 | security(bouncer): review overly permissive bypass patterns | #90 | +| 176 | performance(http): add caching headers middleware | #91 | +| 177 | quality(scanner): refactor ModuleScanner namespace detection | #92 | +| 178 | security(input): extend superglobal sanitisation to cookies and server vars | #93 | +| 179 | docs(arch): add architecture diagram | #94 | +| 180 | docs(decisions): add Architecture Decision Records | #95 | +| 181 | docs(changelog): create formal changelog | #96 | +| 182 | docs(guide): add user guide, FAQ, and troubleshooting | #97 | +| 183 | quality(tenant): fix BelongsToWorkspace trait location discrepancy | #98 | +| 184 | quality(errors): implement custom exception hierarchy | #99 | +| 185 | quality(registry): reduce code duplication in ModuleRegistry | #100 | +| 186 | test(unit): add unit tests for src/ classes | #101 | +| 187 | test(security): add security-specific test suite | #102 | +| 188 | test(integration): add integration tests | #103 | +| 189 | Epic: Performance Optimisation | #108 | +| 190 | Epic: Code Quality & Architecture | #109 | +| 191 | Epic: Documentation | #110 | +| 192 | Epic: Input Validation & Sanitisation | #105 | +| 193 | Epic: Security Hardening | #104 | +| 194 | Epic: Test Coverage | #106 | +| 195 | Epic: Error Handling & Observability | #107 | + +--- + +## Summary + +| Project | Items | Focus | +|---------|-------|-------| +| #1 Core.Framework | 0 (empty) | 10,000ft architectural decisions | +| #2 Workstation | 43 | Agentic task queue, cross-repo | +| #3 Core.PHP | 195 | Laravel/PHP security, quality, tests | +| #4 Core.GO & Core.CLI | 97 | Go framework, CLI, MCP, io abstraction | +| **Total** | **335** | | + +### Categories at a glance + +**Core.PHP (#3)** — Dominated by security findings and audit results: +- ~60 security vulnerabilities (SQL injection, SSRF, XSS, auth bypass, race conditions) +- ~30 missing strict_types / coding standards +- ~25 missing test coverage +- ~15 performance issues (N+1 queries, missing indexes) +- ~10 epics grouping related work +- ~10 audit tasks +- Misc: docs, config, quality + +**Core.GO (#4)** — Feature development and refactoring: +- ~15 io/Medium abstraction migrations +- ~10 MCP server features (transports, daemon, health) +- ~10 help system features +- ~8 log/error unification +- ~8 collect commands (data gathering) +- ~7 ai/test commands +- ~7 documentation/config audit +- Misc: security hardening, accessibility + +**Workstation (#2)** — Subset of #3 and #4 tagged for agentic execution: +- Features ready for AI agent implementation +- Epics spanning both Go and PHP diff --git a/pkg/mcp/ide/bridge.go b/pkg/mcp/ide/bridge.go new file mode 100644 index 00000000..e0d6f3a8 --- /dev/null +++ b/pkg/mcp/ide/bridge.go @@ -0,0 +1,182 @@ +package ide + +import ( + "context" + "encoding/json" + "fmt" + "log" + "sync" + "time" + + "github.com/gorilla/websocket" + "github.com/host-uk/core/pkg/ws" +) + +// BridgeMessage is the wire format between the IDE and Laravel. +type BridgeMessage struct { + Type string `json:"type"` + Channel string `json:"channel,omitempty"` + SessionID string `json:"sessionId,omitempty"` + Data any `json:"data,omitempty"` + Timestamp time.Time `json:"timestamp"` +} + +// Bridge maintains a WebSocket connection to the Laravel core-agentic +// backend and forwards responses to a local ws.Hub. +type Bridge struct { + cfg Config + hub *ws.Hub + conn *websocket.Conn + + mu sync.Mutex + connected bool + cancel context.CancelFunc +} + +// NewBridge creates a bridge that will connect to the Laravel backend and +// forward incoming messages to the provided ws.Hub channels. +func NewBridge(hub *ws.Hub, cfg Config) *Bridge { + return &Bridge{cfg: cfg, hub: hub} +} + +// Start begins the connection loop in a background goroutine. +// Call Shutdown to stop it. +func (b *Bridge) Start(ctx context.Context) { + ctx, b.cancel = context.WithCancel(ctx) + go b.connectLoop(ctx) +} + +// Shutdown cleanly closes the bridge. +func (b *Bridge) Shutdown() { + if b.cancel != nil { + b.cancel() + } + b.mu.Lock() + defer b.mu.Unlock() + if b.conn != nil { + b.conn.Close() + b.conn = nil + } + b.connected = false +} + +// Connected reports whether the bridge has an active connection. +func (b *Bridge) Connected() bool { + b.mu.Lock() + defer b.mu.Unlock() + return b.connected +} + +// Send sends a message to the Laravel backend. +func (b *Bridge) Send(msg BridgeMessage) error { + b.mu.Lock() + defer b.mu.Unlock() + if b.conn == nil { + return fmt.Errorf("bridge: not connected") + } + msg.Timestamp = time.Now() + data, err := json.Marshal(msg) + if err != nil { + return fmt.Errorf("bridge: marshal failed: %w", err) + } + return b.conn.WriteMessage(websocket.TextMessage, data) +} + +// connectLoop reconnects to Laravel with exponential backoff. +func (b *Bridge) connectLoop(ctx context.Context) { + delay := b.cfg.ReconnectInterval + for { + select { + case <-ctx.Done(): + return + default: + } + + if err := b.dial(ctx); err != nil { + log.Printf("ide bridge: connect failed: %v", err) + select { + case <-ctx.Done(): + return + case <-time.After(delay): + } + delay = min(delay*2, b.cfg.MaxReconnectInterval) + continue + } + + // Reset backoff on successful connection + delay = b.cfg.ReconnectInterval + b.readLoop(ctx) + } +} + +func (b *Bridge) dial(ctx context.Context) error { + dialer := websocket.Dialer{ + HandshakeTimeout: 10 * time.Second, + } + conn, _, err := dialer.DialContext(ctx, b.cfg.LaravelWSURL, nil) + if err != nil { + return err + } + + b.mu.Lock() + b.conn = conn + b.connected = true + b.mu.Unlock() + + log.Printf("ide bridge: connected to %s", b.cfg.LaravelWSURL) + return nil +} + +func (b *Bridge) readLoop(ctx context.Context) { + defer func() { + b.mu.Lock() + if b.conn != nil { + b.conn.Close() + } + b.connected = false + b.mu.Unlock() + }() + + for { + select { + case <-ctx.Done(): + return + default: + } + + _, data, err := b.conn.ReadMessage() + if err != nil { + log.Printf("ide bridge: read error: %v", err) + return + } + + var msg BridgeMessage + if err := json.Unmarshal(data, &msg); err != nil { + log.Printf("ide bridge: unmarshal error: %v", err) + continue + } + + b.dispatch(msg) + } +} + +// dispatch routes an incoming message to the appropriate ws.Hub channel. +func (b *Bridge) dispatch(msg BridgeMessage) { + if b.hub == nil { + return + } + + wsMsg := ws.Message{ + Type: ws.TypeEvent, + Data: msg.Data, + } + + channel := msg.Channel + if channel == "" { + channel = "ide:" + msg.Type + } + + if err := b.hub.SendToChannel(channel, wsMsg); err != nil { + log.Printf("ide bridge: dispatch to %s failed: %v", channel, err) + } +} diff --git a/pkg/mcp/ide/bridge_test.go b/pkg/mcp/ide/bridge_test.go new file mode 100644 index 00000000..faae4dbc --- /dev/null +++ b/pkg/mcp/ide/bridge_test.go @@ -0,0 +1,237 @@ +package ide + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "testing" + "time" + + "github.com/gorilla/websocket" + "github.com/host-uk/core/pkg/ws" +) + +var testUpgrader = websocket.Upgrader{ + CheckOrigin: func(r *http.Request) bool { return true }, +} + +// echoServer creates a test WebSocket server that echoes messages back. +func echoServer(t *testing.T) *httptest.Server { + t.Helper() + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + conn, err := testUpgrader.Upgrade(w, r, nil) + if err != nil { + t.Logf("upgrade error: %v", err) + return + } + defer conn.Close() + for { + mt, data, err := conn.ReadMessage() + if err != nil { + break + } + if err := conn.WriteMessage(mt, data); err != nil { + break + } + } + })) +} + +func wsURL(ts *httptest.Server) string { + return "ws" + strings.TrimPrefix(ts.URL, "http") +} + +func TestBridge_Good_ConnectAndSend(t *testing.T) { + ts := echoServer(t) + defer ts.Close() + + hub := ws.NewHub() + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + go hub.Run(ctx) + + cfg := DefaultConfig() + cfg.LaravelWSURL = wsURL(ts) + cfg.ReconnectInterval = 100 * time.Millisecond + + bridge := NewBridge(hub, cfg) + bridge.Start(ctx) + + // Wait for connection + deadline := time.Now().Add(2 * time.Second) + for !bridge.Connected() && time.Now().Before(deadline) { + time.Sleep(50 * time.Millisecond) + } + if !bridge.Connected() { + t.Fatal("bridge did not connect within timeout") + } + + err := bridge.Send(BridgeMessage{ + Type: "test", + Data: "hello", + }) + if err != nil { + t.Fatalf("Send() failed: %v", err) + } +} + +func TestBridge_Good_Shutdown(t *testing.T) { + ts := echoServer(t) + defer ts.Close() + + hub := ws.NewHub() + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + go hub.Run(ctx) + + cfg := DefaultConfig() + cfg.LaravelWSURL = wsURL(ts) + cfg.ReconnectInterval = 100 * time.Millisecond + + bridge := NewBridge(hub, cfg) + bridge.Start(ctx) + + deadline := time.Now().Add(2 * time.Second) + for !bridge.Connected() && time.Now().Before(deadline) { + time.Sleep(50 * time.Millisecond) + } + + bridge.Shutdown() + if bridge.Connected() { + t.Error("bridge should be disconnected after Shutdown") + } +} + +func TestBridge_Bad_SendWithoutConnection(t *testing.T) { + hub := ws.NewHub() + cfg := DefaultConfig() + bridge := NewBridge(hub, cfg) + + err := bridge.Send(BridgeMessage{Type: "test"}) + if err == nil { + t.Error("expected error when sending without connection") + } +} + +func TestBridge_Good_MessageDispatch(t *testing.T) { + // Server that sends a message to the bridge on connect. + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + conn, err := testUpgrader.Upgrade(w, r, nil) + if err != nil { + return + } + defer conn.Close() + + msg := BridgeMessage{ + Type: "chat_response", + Channel: "chat:session-1", + Data: "hello from laravel", + } + data, _ := json.Marshal(msg) + conn.WriteMessage(websocket.TextMessage, data) + + // Keep connection open + for { + _, _, err := conn.ReadMessage() + if err != nil { + break + } + } + })) + defer ts.Close() + + hub := ws.NewHub() + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + go hub.Run(ctx) + + cfg := DefaultConfig() + cfg.LaravelWSURL = wsURL(ts) + cfg.ReconnectInterval = 100 * time.Millisecond + + bridge := NewBridge(hub, cfg) + bridge.Start(ctx) + + deadline := time.Now().Add(2 * time.Second) + for !bridge.Connected() && time.Now().Before(deadline) { + time.Sleep(50 * time.Millisecond) + } + if !bridge.Connected() { + t.Fatal("bridge did not connect within timeout") + } + + // Give time for the dispatched message to be processed. + time.Sleep(200 * time.Millisecond) + + // Verify hub stats — the message was dispatched (even without subscribers). + // This confirms the dispatch path ran without error. +} + +func TestBridge_Good_Reconnect(t *testing.T) { + callCount := 0 + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + callCount++ + conn, err := testUpgrader.Upgrade(w, r, nil) + if err != nil { + return + } + // Close immediately on first connection to force reconnect + if callCount == 1 { + conn.Close() + return + } + defer conn.Close() + for { + _, _, err := conn.ReadMessage() + if err != nil { + break + } + } + })) + defer ts.Close() + + hub := ws.NewHub() + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + go hub.Run(ctx) + + cfg := DefaultConfig() + cfg.LaravelWSURL = wsURL(ts) + cfg.ReconnectInterval = 100 * time.Millisecond + cfg.MaxReconnectInterval = 200 * time.Millisecond + + bridge := NewBridge(hub, cfg) + bridge.Start(ctx) + + // Wait long enough for a reconnect cycle + deadline := time.Now().Add(3 * time.Second) + for !bridge.Connected() && time.Now().Before(deadline) { + time.Sleep(50 * time.Millisecond) + } + if !bridge.Connected() { + t.Fatal("bridge did not reconnect within timeout") + } + if callCount < 2 { + t.Errorf("expected at least 2 connection attempts, got %d", callCount) + } +} + +func TestSubsystem_Good_Name(t *testing.T) { + sub := New(nil) + if sub.Name() != "ide" { + t.Errorf("expected name 'ide', got %q", sub.Name()) + } +} + +func TestSubsystem_Good_NilHub(t *testing.T) { + sub := New(nil) + if sub.Bridge() != nil { + t.Error("expected nil bridge when hub is nil") + } + // Shutdown should not panic + if err := sub.Shutdown(context.Background()); err != nil { + t.Errorf("Shutdown with nil bridge failed: %v", err) + } +} diff --git a/pkg/mcp/ide/config.go b/pkg/mcp/ide/config.go new file mode 100644 index 00000000..d501c090 --- /dev/null +++ b/pkg/mcp/ide/config.go @@ -0,0 +1,48 @@ +// Package ide provides an MCP subsystem that bridges the desktop IDE to +// a Laravel core-agentic backend over WebSocket. +package ide + +import "time" + +// Config holds connection and workspace settings for the IDE subsystem. +type Config struct { + // LaravelWSURL is the WebSocket endpoint for the Laravel core-agentic backend. + LaravelWSURL string + + // WorkspaceRoot is the local path used as the default workspace context. + WorkspaceRoot string + + // ReconnectInterval controls how long to wait between reconnect attempts. + ReconnectInterval time.Duration + + // MaxReconnectInterval caps exponential backoff for reconnection. + MaxReconnectInterval time.Duration +} + +// DefaultConfig returns sensible defaults for local development. +func DefaultConfig() Config { + return Config{ + LaravelWSURL: "ws://localhost:9876/ws", + WorkspaceRoot: ".", + ReconnectInterval: 2 * time.Second, + MaxReconnectInterval: 30 * time.Second, + } +} + +// Option configures the IDE subsystem. +type Option func(*Config) + +// WithLaravelURL sets the Laravel WebSocket endpoint. +func WithLaravelURL(url string) Option { + return func(c *Config) { c.LaravelWSURL = url } +} + +// WithWorkspaceRoot sets the workspace root directory. +func WithWorkspaceRoot(root string) Option { + return func(c *Config) { c.WorkspaceRoot = root } +} + +// WithReconnectInterval sets the base reconnect interval. +func WithReconnectInterval(d time.Duration) Option { + return func(c *Config) { c.ReconnectInterval = d } +} diff --git a/pkg/mcp/ide/ide.go b/pkg/mcp/ide/ide.go new file mode 100644 index 00000000..f44b91a2 --- /dev/null +++ b/pkg/mcp/ide/ide.go @@ -0,0 +1,57 @@ +package ide + +import ( + "context" + + "github.com/host-uk/core/pkg/ws" + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +// Subsystem implements mcp.Subsystem and mcp.SubsystemWithShutdown for the IDE. +type Subsystem struct { + cfg Config + bridge *Bridge + hub *ws.Hub +} + +// New creates an IDE subsystem. The ws.Hub is used for real-time forwarding; +// pass nil if headless (tools still work but real-time streaming is disabled). +func New(hub *ws.Hub, opts ...Option) *Subsystem { + cfg := DefaultConfig() + for _, opt := range opts { + opt(&cfg) + } + var bridge *Bridge + if hub != nil { + bridge = NewBridge(hub, cfg) + } + return &Subsystem{cfg: cfg, bridge: bridge, hub: hub} +} + +// Name implements mcp.Subsystem. +func (s *Subsystem) Name() string { return "ide" } + +// RegisterTools implements mcp.Subsystem. +func (s *Subsystem) RegisterTools(server *mcp.Server) { + s.registerChatTools(server) + s.registerBuildTools(server) + s.registerDashboardTools(server) +} + +// Shutdown implements mcp.SubsystemWithShutdown. +func (s *Subsystem) Shutdown(_ context.Context) error { + if s.bridge != nil { + s.bridge.Shutdown() + } + return nil +} + +// Bridge returns the Laravel WebSocket bridge (may be nil in headless mode). +func (s *Subsystem) Bridge() *Bridge { return s.bridge } + +// StartBridge begins the background connection to the Laravel backend. +func (s *Subsystem) StartBridge(ctx context.Context) { + if s.bridge != nil { + s.bridge.Start(ctx) + } +} diff --git a/pkg/mcp/ide/tools_build.go b/pkg/mcp/ide/tools_build.go new file mode 100644 index 00000000..4d258832 --- /dev/null +++ b/pkg/mcp/ide/tools_build.go @@ -0,0 +1,109 @@ +package ide + +import ( + "context" + "fmt" + "time" + + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +// Build tool input/output types. + +// BuildStatusInput is the input for ide_build_status. +type BuildStatusInput struct { + BuildID string `json:"buildId"` +} + +// BuildInfo represents a single build. +type BuildInfo struct { + ID string `json:"id"` + Repo string `json:"repo"` + Branch string `json:"branch"` + Status string `json:"status"` + Duration string `json:"duration,omitempty"` + StartedAt time.Time `json:"startedAt"` +} + +// BuildStatusOutput is the output for ide_build_status. +type BuildStatusOutput struct { + Build BuildInfo `json:"build"` +} + +// BuildListInput is the input for ide_build_list. +type BuildListInput struct { + Repo string `json:"repo,omitempty"` + Limit int `json:"limit,omitempty"` +} + +// BuildListOutput is the output for ide_build_list. +type BuildListOutput struct { + Builds []BuildInfo `json:"builds"` +} + +// BuildLogsInput is the input for ide_build_logs. +type BuildLogsInput struct { + BuildID string `json:"buildId"` + Tail int `json:"tail,omitempty"` +} + +// BuildLogsOutput is the output for ide_build_logs. +type BuildLogsOutput struct { + BuildID string `json:"buildId"` + Lines []string `json:"lines"` +} + +func (s *Subsystem) registerBuildTools(server *mcp.Server) { + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_build_status", + Description: "Get the status of a specific build", + }, s.buildStatus) + + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_build_list", + Description: "List recent builds, optionally filtered by repository", + }, s.buildList) + + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_build_logs", + Description: "Retrieve log output for a build", + }, s.buildLogs) +} + +func (s *Subsystem) buildStatus(_ context.Context, _ *mcp.CallToolRequest, input BuildStatusInput) (*mcp.CallToolResult, BuildStatusOutput, error) { + if s.bridge == nil { + return nil, BuildStatusOutput{}, fmt.Errorf("bridge not available") + } + _ = s.bridge.Send(BridgeMessage{ + Type: "build_status", + Data: map[string]any{"buildId": input.BuildID}, + }) + return nil, BuildStatusOutput{ + Build: BuildInfo{ID: input.BuildID, Status: "unknown"}, + }, nil +} + +func (s *Subsystem) buildList(_ context.Context, _ *mcp.CallToolRequest, input BuildListInput) (*mcp.CallToolResult, BuildListOutput, error) { + if s.bridge == nil { + return nil, BuildListOutput{}, fmt.Errorf("bridge not available") + } + _ = s.bridge.Send(BridgeMessage{ + Type: "build_list", + Data: map[string]any{"repo": input.Repo, "limit": input.Limit}, + }) + return nil, BuildListOutput{Builds: []BuildInfo{}}, nil +} + +func (s *Subsystem) buildLogs(_ context.Context, _ *mcp.CallToolRequest, input BuildLogsInput) (*mcp.CallToolResult, BuildLogsOutput, error) { + if s.bridge == nil { + return nil, BuildLogsOutput{}, fmt.Errorf("bridge not available") + } + _ = s.bridge.Send(BridgeMessage{ + Type: "build_logs", + Data: map[string]any{"buildId": input.BuildID, "tail": input.Tail}, + }) + return nil, BuildLogsOutput{ + BuildID: input.BuildID, + Lines: []string{}, + }, nil +} diff --git a/pkg/mcp/ide/tools_chat.go b/pkg/mcp/ide/tools_chat.go new file mode 100644 index 00000000..8a00477e --- /dev/null +++ b/pkg/mcp/ide/tools_chat.go @@ -0,0 +1,191 @@ +package ide + +import ( + "context" + "fmt" + "time" + + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +// Chat tool input/output types. + +// ChatSendInput is the input for ide_chat_send. +type ChatSendInput struct { + SessionID string `json:"sessionId"` + Message string `json:"message"` +} + +// ChatSendOutput is the output for ide_chat_send. +type ChatSendOutput struct { + Sent bool `json:"sent"` + SessionID string `json:"sessionId"` + Timestamp time.Time `json:"timestamp"` +} + +// ChatHistoryInput is the input for ide_chat_history. +type ChatHistoryInput struct { + SessionID string `json:"sessionId"` + Limit int `json:"limit,omitempty"` +} + +// ChatMessage represents a single message in history. +type ChatMessage struct { + Role string `json:"role"` + Content string `json:"content"` + Timestamp time.Time `json:"timestamp"` +} + +// ChatHistoryOutput is the output for ide_chat_history. +type ChatHistoryOutput struct { + SessionID string `json:"sessionId"` + Messages []ChatMessage `json:"messages"` +} + +// SessionListInput is the input for ide_session_list. +type SessionListInput struct{} + +// Session represents an agent session. +type Session struct { + ID string `json:"id"` + Name string `json:"name"` + Status string `json:"status"` + CreatedAt time.Time `json:"createdAt"` +} + +// SessionListOutput is the output for ide_session_list. +type SessionListOutput struct { + Sessions []Session `json:"sessions"` +} + +// SessionCreateInput is the input for ide_session_create. +type SessionCreateInput struct { + Name string `json:"name"` +} + +// SessionCreateOutput is the output for ide_session_create. +type SessionCreateOutput struct { + Session Session `json:"session"` +} + +// PlanStatusInput is the input for ide_plan_status. +type PlanStatusInput struct { + SessionID string `json:"sessionId"` +} + +// PlanStep is a single step in an agent plan. +type PlanStep struct { + Name string `json:"name"` + Status string `json:"status"` +} + +// PlanStatusOutput is the output for ide_plan_status. +type PlanStatusOutput struct { + SessionID string `json:"sessionId"` + Status string `json:"status"` + Steps []PlanStep `json:"steps"` +} + +func (s *Subsystem) registerChatTools(server *mcp.Server) { + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_chat_send", + Description: "Send a message to an agent chat session", + }, s.chatSend) + + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_chat_history", + Description: "Retrieve message history for a chat session", + }, s.chatHistory) + + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_session_list", + Description: "List active agent sessions", + }, s.sessionList) + + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_session_create", + Description: "Create a new agent session", + }, s.sessionCreate) + + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_plan_status", + Description: "Get the current plan status for a session", + }, s.planStatus) +} + +func (s *Subsystem) chatSend(_ context.Context, _ *mcp.CallToolRequest, input ChatSendInput) (*mcp.CallToolResult, ChatSendOutput, error) { + if s.bridge == nil { + return nil, ChatSendOutput{}, fmt.Errorf("bridge not available") + } + err := s.bridge.Send(BridgeMessage{ + Type: "chat_send", + Channel: "chat:" + input.SessionID, + SessionID: input.SessionID, + Data: input.Message, + }) + if err != nil { + return nil, ChatSendOutput{}, fmt.Errorf("failed to send message: %w", err) + } + return nil, ChatSendOutput{ + Sent: true, + SessionID: input.SessionID, + Timestamp: time.Now(), + }, nil +} + +func (s *Subsystem) chatHistory(_ context.Context, _ *mcp.CallToolRequest, input ChatHistoryInput) (*mcp.CallToolResult, ChatHistoryOutput, error) { + if s.bridge == nil { + return nil, ChatHistoryOutput{}, fmt.Errorf("bridge not available") + } + // Request history via bridge; for now return placeholder indicating the + // request was forwarded. Real data arrives via WebSocket subscription. + _ = s.bridge.Send(BridgeMessage{ + Type: "chat_history", + SessionID: input.SessionID, + Data: map[string]any{"limit": input.Limit}, + }) + return nil, ChatHistoryOutput{ + SessionID: input.SessionID, + Messages: []ChatMessage{}, + }, nil +} + +func (s *Subsystem) sessionList(_ context.Context, _ *mcp.CallToolRequest, _ SessionListInput) (*mcp.CallToolResult, SessionListOutput, error) { + if s.bridge == nil { + return nil, SessionListOutput{}, fmt.Errorf("bridge not available") + } + _ = s.bridge.Send(BridgeMessage{Type: "session_list"}) + return nil, SessionListOutput{Sessions: []Session{}}, nil +} + +func (s *Subsystem) sessionCreate(_ context.Context, _ *mcp.CallToolRequest, input SessionCreateInput) (*mcp.CallToolResult, SessionCreateOutput, error) { + if s.bridge == nil { + return nil, SessionCreateOutput{}, fmt.Errorf("bridge not available") + } + _ = s.bridge.Send(BridgeMessage{ + Type: "session_create", + Data: map[string]any{"name": input.Name}, + }) + return nil, SessionCreateOutput{ + Session: Session{ + Name: input.Name, + Status: "creating", + CreatedAt: time.Now(), + }, + }, nil +} + +func (s *Subsystem) planStatus(_ context.Context, _ *mcp.CallToolRequest, input PlanStatusInput) (*mcp.CallToolResult, PlanStatusOutput, error) { + if s.bridge == nil { + return nil, PlanStatusOutput{}, fmt.Errorf("bridge not available") + } + _ = s.bridge.Send(BridgeMessage{ + Type: "plan_status", + SessionID: input.SessionID, + }) + return nil, PlanStatusOutput{ + SessionID: input.SessionID, + Status: "unknown", + Steps: []PlanStep{}, + }, nil +} diff --git a/pkg/mcp/ide/tools_dashboard.go b/pkg/mcp/ide/tools_dashboard.go new file mode 100644 index 00000000..a84e4911 --- /dev/null +++ b/pkg/mcp/ide/tools_dashboard.go @@ -0,0 +1,127 @@ +package ide + +import ( + "context" + "fmt" + "time" + + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +// Dashboard tool input/output types. + +// DashboardOverviewInput is the input for ide_dashboard_overview. +type DashboardOverviewInput struct{} + +// DashboardOverview contains high-level platform stats. +type DashboardOverview struct { + Repos int `json:"repos"` + Services int `json:"services"` + ActiveSessions int `json:"activeSessions"` + RecentBuilds int `json:"recentBuilds"` + BridgeOnline bool `json:"bridgeOnline"` +} + +// DashboardOverviewOutput is the output for ide_dashboard_overview. +type DashboardOverviewOutput struct { + Overview DashboardOverview `json:"overview"` +} + +// DashboardActivityInput is the input for ide_dashboard_activity. +type DashboardActivityInput struct { + Limit int `json:"limit,omitempty"` +} + +// ActivityEvent represents a single activity feed item. +type ActivityEvent struct { + Type string `json:"type"` + Message string `json:"message"` + Timestamp time.Time `json:"timestamp"` +} + +// DashboardActivityOutput is the output for ide_dashboard_activity. +type DashboardActivityOutput struct { + Events []ActivityEvent `json:"events"` +} + +// DashboardMetricsInput is the input for ide_dashboard_metrics. +type DashboardMetricsInput struct { + Period string `json:"period,omitempty"` // "1h", "24h", "7d" +} + +// DashboardMetrics contains aggregate metrics. +type DashboardMetrics struct { + BuildsTotal int `json:"buildsTotal"` + BuildsSuccess int `json:"buildsSuccess"` + BuildsFailed int `json:"buildsFailed"` + AvgBuildTime string `json:"avgBuildTime"` + AgentSessions int `json:"agentSessions"` + MessagesTotal int `json:"messagesTotal"` + SuccessRate float64 `json:"successRate"` +} + +// DashboardMetricsOutput is the output for ide_dashboard_metrics. +type DashboardMetricsOutput struct { + Period string `json:"period"` + Metrics DashboardMetrics `json:"metrics"` +} + +func (s *Subsystem) registerDashboardTools(server *mcp.Server) { + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_dashboard_overview", + Description: "Get a high-level overview of the platform (repos, services, sessions, builds)", + }, s.dashboardOverview) + + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_dashboard_activity", + Description: "Get the recent activity feed", + }, s.dashboardActivity) + + mcp.AddTool(server, &mcp.Tool{ + Name: "ide_dashboard_metrics", + Description: "Get aggregate build and agent metrics for a time period", + }, s.dashboardMetrics) +} + +func (s *Subsystem) dashboardOverview(_ context.Context, _ *mcp.CallToolRequest, _ DashboardOverviewInput) (*mcp.CallToolResult, DashboardOverviewOutput, error) { + connected := s.bridge != nil && s.bridge.Connected() + + if s.bridge != nil { + _ = s.bridge.Send(BridgeMessage{Type: "dashboard_overview"}) + } + + return nil, DashboardOverviewOutput{ + Overview: DashboardOverview{ + BridgeOnline: connected, + }, + }, nil +} + +func (s *Subsystem) dashboardActivity(_ context.Context, _ *mcp.CallToolRequest, input DashboardActivityInput) (*mcp.CallToolResult, DashboardActivityOutput, error) { + if s.bridge == nil { + return nil, DashboardActivityOutput{}, fmt.Errorf("bridge not available") + } + _ = s.bridge.Send(BridgeMessage{ + Type: "dashboard_activity", + Data: map[string]any{"limit": input.Limit}, + }) + return nil, DashboardActivityOutput{Events: []ActivityEvent{}}, nil +} + +func (s *Subsystem) dashboardMetrics(_ context.Context, _ *mcp.CallToolRequest, input DashboardMetricsInput) (*mcp.CallToolResult, DashboardMetricsOutput, error) { + if s.bridge == nil { + return nil, DashboardMetricsOutput{}, fmt.Errorf("bridge not available") + } + period := input.Period + if period == "" { + period = "24h" + } + _ = s.bridge.Send(BridgeMessage{ + Type: "dashboard_metrics", + Data: map[string]any{"period": period}, + }) + return nil, DashboardMetricsOutput{ + Period: period, + Metrics: DashboardMetrics{}, + }, nil +} diff --git a/pkg/mcp/subsystem.go b/pkg/mcp/subsystem.go new file mode 100644 index 00000000..56bd6f74 --- /dev/null +++ b/pkg/mcp/subsystem.go @@ -0,0 +1,32 @@ +package mcp + +import ( + "context" + + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +// Subsystem registers additional MCP tools at startup. +// Implementations should be safe to call concurrently. +type Subsystem interface { + // Name returns a human-readable identifier for logging. + Name() string + + // RegisterTools adds tools to the MCP server during initialisation. + RegisterTools(server *mcp.Server) +} + +// SubsystemWithShutdown extends Subsystem with graceful cleanup. +type SubsystemWithShutdown interface { + Subsystem + Shutdown(ctx context.Context) error +} + +// WithSubsystem registers a subsystem whose tools will be added +// after the built-in tools during New(). +func WithSubsystem(sub Subsystem) Option { + return func(s *Service) error { + s.subsystems = append(s.subsystems, sub) + return nil + } +} diff --git a/pkg/mcp/subsystem_test.go b/pkg/mcp/subsystem_test.go new file mode 100644 index 00000000..5e823f75 --- /dev/null +++ b/pkg/mcp/subsystem_test.go @@ -0,0 +1,114 @@ +package mcp + +import ( + "context" + "testing" + + "github.com/modelcontextprotocol/go-sdk/mcp" +) + +// stubSubsystem is a minimal Subsystem for testing. +type stubSubsystem struct { + name string + toolsRegistered bool +} + +func (s *stubSubsystem) Name() string { return s.name } + +func (s *stubSubsystem) RegisterTools(server *mcp.Server) { + s.toolsRegistered = true +} + +// shutdownSubsystem tracks Shutdown calls. +type shutdownSubsystem struct { + stubSubsystem + shutdownCalled bool + shutdownErr error +} + +func (s *shutdownSubsystem) Shutdown(_ context.Context) error { + s.shutdownCalled = true + return s.shutdownErr +} + +func TestWithSubsystem_Good_Registration(t *testing.T) { + sub := &stubSubsystem{name: "test-sub"} + svc, err := New(WithSubsystem(sub)) + if err != nil { + t.Fatalf("New() failed: %v", err) + } + + if len(svc.Subsystems()) != 1 { + t.Fatalf("expected 1 subsystem, got %d", len(svc.Subsystems())) + } + if svc.Subsystems()[0].Name() != "test-sub" { + t.Errorf("expected name 'test-sub', got %q", svc.Subsystems()[0].Name()) + } +} + +func TestWithSubsystem_Good_ToolsRegistered(t *testing.T) { + sub := &stubSubsystem{name: "tools-sub"} + _, err := New(WithSubsystem(sub)) + if err != nil { + t.Fatalf("New() failed: %v", err) + } + if !sub.toolsRegistered { + t.Error("expected RegisterTools to have been called") + } +} + +func TestWithSubsystem_Good_MultipleSubsystems(t *testing.T) { + sub1 := &stubSubsystem{name: "sub-1"} + sub2 := &stubSubsystem{name: "sub-2"} + svc, err := New(WithSubsystem(sub1), WithSubsystem(sub2)) + if err != nil { + t.Fatalf("New() failed: %v", err) + } + if len(svc.Subsystems()) != 2 { + t.Fatalf("expected 2 subsystems, got %d", len(svc.Subsystems())) + } + if !sub1.toolsRegistered || !sub2.toolsRegistered { + t.Error("expected all subsystems to have RegisterTools called") + } +} + +func TestSubsystemShutdown_Good(t *testing.T) { + sub := &shutdownSubsystem{stubSubsystem: stubSubsystem{name: "shutdown-sub"}} + svc, err := New(WithSubsystem(sub)) + if err != nil { + t.Fatalf("New() failed: %v", err) + } + if err := svc.Shutdown(context.Background()); err != nil { + t.Fatalf("Shutdown() failed: %v", err) + } + if !sub.shutdownCalled { + t.Error("expected Shutdown to have been called") + } +} + +func TestSubsystemShutdown_Bad_Error(t *testing.T) { + sub := &shutdownSubsystem{ + stubSubsystem: stubSubsystem{name: "fail-sub"}, + shutdownErr: context.DeadlineExceeded, + } + svc, err := New(WithSubsystem(sub)) + if err != nil { + t.Fatalf("New() failed: %v", err) + } + err = svc.Shutdown(context.Background()) + if err == nil { + t.Fatal("expected error from Shutdown") + } +} + +func TestSubsystemShutdown_Good_NoShutdownInterface(t *testing.T) { + // A plain Subsystem (without Shutdown) should not cause errors. + sub := &stubSubsystem{name: "plain-sub"} + svc, err := New(WithSubsystem(sub)) + if err != nil { + t.Fatalf("New() failed: %v", err) + } + if err := svc.Shutdown(context.Background()); err != nil { + t.Fatalf("Shutdown() should succeed for non-shutdown subsystem: %v", err) + } +} From cdd97441e97c8f6a14c23f1d0e6d24b2e5ffe00f Mon Sep 17 00:00:00 2001 From: Snider Date: Sun, 8 Feb 2026 15:17:18 +0000 Subject: [PATCH 10/10] updates --- cmd/bugseti/frontend/src/app/app.routes.ts | 4 ++++ cmd/bugseti/frontend/src/app/tray/tray.component.ts | 7 +++++++ 2 files changed, 11 insertions(+) diff --git a/cmd/bugseti/frontend/src/app/app.routes.ts b/cmd/bugseti/frontend/src/app/app.routes.ts index 8367d07a..76725edb 100644 --- a/cmd/bugseti/frontend/src/app/app.routes.ts +++ b/cmd/bugseti/frontend/src/app/app.routes.ts @@ -21,5 +21,9 @@ export const routes: Routes = [ { path: 'onboarding', loadComponent: () => import('./onboarding/onboarding.component').then(m => m.OnboardingComponent) + }, + { + path: 'jellyfin', + loadComponent: () => import('./jellyfin/jellyfin.component').then(m => m.JellyfinComponent) } ]; diff --git a/cmd/bugseti/frontend/src/app/tray/tray.component.ts b/cmd/bugseti/frontend/src/app/tray/tray.component.ts index 4a7ebec8..f6232e90 100644 --- a/cmd/bugseti/frontend/src/app/tray/tray.component.ts +++ b/cmd/bugseti/frontend/src/app/tray/tray.component.ts @@ -66,6 +66,9 @@ interface TrayStatus {