Compare commits
2 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 69220079b6 | |||
|
|
6806b20651 |
113 changed files with 424 additions and 8335 deletions
|
|
@ -1,17 +1,15 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"os"
|
"os"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
|
coreio "forge.lthn.ai/core/go-io"
|
||||||
coreerr "forge.lthn.ai/core/go-log"
|
coreerr "forge.lthn.ai/core/go-log"
|
||||||
cataloglint "forge.lthn.ai/core/lint"
|
lint "forge.lthn.ai/core/lint"
|
||||||
lintpkg "forge.lthn.ai/core/lint/pkg/lint"
|
lintpkg "forge.lthn.ai/core/lint/pkg/lint"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -21,258 +19,37 @@ func main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func addLintCommands(root *cli.Command) {
|
func addLintCommands(root *cli.Command) {
|
||||||
addRFCCommands(root)
|
|
||||||
|
|
||||||
lintCmd := cli.NewGroup("lint", "Pattern-based code linter", "")
|
lintCmd := cli.NewGroup("lint", "Pattern-based code linter", "")
|
||||||
lintCmd.AddCommand(newCheckCommand(), newCatalogCommand())
|
|
||||||
addRFCCommands(lintCmd)
|
|
||||||
|
|
||||||
root.AddCommand(lintCmd)
|
// ── check ──────────────────────────────────────────────────────────────
|
||||||
}
|
|
||||||
|
|
||||||
func addRFCCommands(parent *cli.Command) {
|
|
||||||
parent.AddCommand(
|
|
||||||
newRunCommand("run", "Run configured linters", lintpkg.RunInput{}),
|
|
||||||
newDetectCommand("detect", "Detect project languages"),
|
|
||||||
newToolsCommand("tools", "List supported linter tools"),
|
|
||||||
newInitCommand("init", "Generate .core/lint.yaml"),
|
|
||||||
newRunCommand("go", "Run Go linters", lintpkg.RunInput{Lang: "go"}),
|
|
||||||
newRunCommand("php", "Run PHP linters", lintpkg.RunInput{Lang: "php"}),
|
|
||||||
newRunCommand("js", "Run JS/TS linters", lintpkg.RunInput{Lang: "js"}),
|
|
||||||
newRunCommand("python", "Run Python linters", lintpkg.RunInput{Lang: "python"}),
|
|
||||||
newRunCommand("security", "Run security linters", lintpkg.RunInput{Category: "security"}),
|
|
||||||
newRunCommand("compliance", "Run compliance linters", lintpkg.RunInput{Category: "compliance"}),
|
|
||||||
newHookCommand(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func newRunCommand(commandName string, summary string, defaults lintpkg.RunInput) *cli.Command {
|
|
||||||
var (
|
var (
|
||||||
outputFormat string
|
checkFormat string
|
||||||
configPath string
|
checkLang string
|
||||||
scheduleName string
|
checkSeverity string
|
||||||
failOnLevel string
|
|
||||||
categoryName string
|
|
||||||
languageName string
|
|
||||||
filePaths []string
|
|
||||||
hookMode bool
|
|
||||||
ciMode bool
|
|
||||||
sbomMode bool
|
|
||||||
)
|
)
|
||||||
|
|
||||||
command := cli.NewCommand(commandName, summary, "", func(command *cli.Command, args []string) error {
|
checkCmd := cli.NewCommand("check", "Scan files for pattern matches", "", func(cmd *cli.Command, args []string) error {
|
||||||
input := defaults
|
cat, err := lint.LoadEmbeddedCatalog()
|
||||||
input.Output = outputFormat
|
|
||||||
input.Config = configPath
|
|
||||||
input.Schedule = scheduleName
|
|
||||||
input.FailOn = failOnLevel
|
|
||||||
input.Category = categoryName
|
|
||||||
input.Lang = languageName
|
|
||||||
input.Files = filePaths
|
|
||||||
input.Hook = hookMode
|
|
||||||
input.CI = ciMode
|
|
||||||
input.SBOM = sbomMode
|
|
||||||
|
|
||||||
if len(args) > 0 {
|
|
||||||
input.Path = args[0]
|
|
||||||
}
|
|
||||||
if input.Path == "" {
|
|
||||||
input.Path = "."
|
|
||||||
}
|
|
||||||
|
|
||||||
resolvedOutputFormat, err := lintpkg.ResolveRunOutputFormat(input)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
input.Output = resolvedOutputFormat
|
|
||||||
|
|
||||||
service := lintpkg.NewService()
|
|
||||||
report, err := service.Run(context.Background(), input)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := writeReport(command.OutOrStdout(), input.Output, report); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if !report.Summary.Passed {
|
|
||||||
return coreerr.E(
|
|
||||||
"cmd."+commandName,
|
|
||||||
fmt.Sprintf(
|
|
||||||
"lint failed (fail-on=%s): %d error(s), %d warning(s), %d info finding(s)",
|
|
||||||
input.FailOn,
|
|
||||||
report.Summary.Errors,
|
|
||||||
report.Summary.Warnings,
|
|
||||||
report.Summary.Info,
|
|
||||||
),
|
|
||||||
nil,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
cli.StringFlag(command, &outputFormat, "output", "o", defaults.Output, "Output format: json, text, github, sarif")
|
|
||||||
cli.StringFlag(command, &configPath, "config", "c", defaults.Config, "Config path (default: .core/lint.yaml)")
|
|
||||||
cli.StringFlag(command, &scheduleName, "schedule", "", "", "Run a named schedule from the config")
|
|
||||||
cli.StringFlag(command, &failOnLevel, "fail-on", "", defaults.FailOn, "Fail threshold: error, warning, info")
|
|
||||||
cli.StringFlag(command, &categoryName, "category", "", defaults.Category, "Restrict to one category")
|
|
||||||
cli.StringFlag(command, &languageName, "lang", "l", defaults.Lang, "Restrict to one language")
|
|
||||||
cli.StringSliceFlag(command, &filePaths, "files", "", defaults.Files, "Restrict scanning to specific files")
|
|
||||||
cli.BoolFlag(command, &hookMode, "hook", "", defaults.Hook, "Run in pre-commit mode against staged files")
|
|
||||||
cli.BoolFlag(command, &ciMode, "ci", "", defaults.CI, "GitHub Actions mode (github annotations)")
|
|
||||||
cli.BoolFlag(command, &sbomMode, "sbom", "", defaults.SBOM, "Enable compliance/SBOM tools")
|
|
||||||
|
|
||||||
return command
|
|
||||||
}
|
|
||||||
|
|
||||||
func newDetectCommand(commandName string, summary string) *cli.Command {
|
|
||||||
var output string
|
|
||||||
|
|
||||||
command := cli.NewCommand(commandName, summary, "", func(command *cli.Command, args []string) error {
|
|
||||||
projectPath := "."
|
|
||||||
if len(args) > 0 {
|
|
||||||
projectPath = args[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
languages := lintpkg.Detect(projectPath)
|
|
||||||
switch output {
|
|
||||||
case "", "text":
|
|
||||||
for _, language := range languages {
|
|
||||||
fmt.Fprintln(command.OutOrStdout(), language)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
case "json":
|
|
||||||
return writeIndentedJSON(command.OutOrStdout(), languages)
|
|
||||||
default:
|
|
||||||
return coreerr.E("cmd.detect", "unsupported output format "+output, nil)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
cli.StringFlag(command, &output, "output", "o", "text", "Output format: text, json")
|
|
||||||
return command
|
|
||||||
}
|
|
||||||
|
|
||||||
func newToolsCommand(commandName string, summary string) *cli.Command {
|
|
||||||
var output string
|
|
||||||
var languageFilter string
|
|
||||||
|
|
||||||
command := cli.NewCommand(commandName, summary, "", func(command *cli.Command, args []string) error {
|
|
||||||
service := lintpkg.NewService()
|
|
||||||
|
|
||||||
var languages []string
|
|
||||||
if languageFilter != "" {
|
|
||||||
languages = []string{languageFilter}
|
|
||||||
}
|
|
||||||
|
|
||||||
tools := service.Tools(languages)
|
|
||||||
switch output {
|
|
||||||
case "", "text":
|
|
||||||
for _, tool := range tools {
|
|
||||||
status := "missing"
|
|
||||||
if tool.Available {
|
|
||||||
status = "available"
|
|
||||||
}
|
|
||||||
line := fmt.Sprintf("%-14s [%-11s] %s langs=%s", tool.Name, tool.Category, status, strings.Join(tool.Languages, ","))
|
|
||||||
if tool.Entitlement != "" {
|
|
||||||
line += " entitlement=" + tool.Entitlement
|
|
||||||
}
|
|
||||||
fmt.Fprintln(command.OutOrStdout(), line)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
case "json":
|
|
||||||
return writeIndentedJSON(command.OutOrStdout(), tools)
|
|
||||||
default:
|
|
||||||
return coreerr.E("cmd.tools", "unsupported output format "+output, nil)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
cli.StringFlag(command, &output, "output", "o", "text", "Output format: text, json")
|
|
||||||
cli.StringFlag(command, &languageFilter, "lang", "l", "", "Filter by language")
|
|
||||||
return command
|
|
||||||
}
|
|
||||||
|
|
||||||
func newInitCommand(commandName string, summary string) *cli.Command {
|
|
||||||
var force bool
|
|
||||||
|
|
||||||
command := cli.NewCommand(commandName, summary, "", func(command *cli.Command, args []string) error {
|
|
||||||
projectPath := "."
|
|
||||||
if len(args) > 0 {
|
|
||||||
projectPath = args[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
service := lintpkg.NewService()
|
|
||||||
writtenPath, err := service.WriteDefaultConfig(projectPath, force)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
fmt.Fprintln(command.OutOrStdout(), writtenPath)
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
cli.BoolFlag(command, &force, "force", "f", false, "Overwrite an existing config")
|
|
||||||
return command
|
|
||||||
}
|
|
||||||
|
|
||||||
func newHookCommand() *cli.Command {
|
|
||||||
hookCmd := cli.NewGroup("hook", "Install or remove the git pre-commit hook", "")
|
|
||||||
|
|
||||||
installCmd := cli.NewCommand("install", "Install the pre-commit hook", "", func(command *cli.Command, args []string) error {
|
|
||||||
projectPath := "."
|
|
||||||
if len(args) > 0 {
|
|
||||||
projectPath = args[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
service := lintpkg.NewService()
|
|
||||||
if err := service.InstallHook(projectPath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
fmt.Fprintln(command.OutOrStdout(), "installed")
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
removeCmd := cli.NewCommand("remove", "Remove the pre-commit hook", "", func(command *cli.Command, args []string) error {
|
|
||||||
projectPath := "."
|
|
||||||
if len(args) > 0 {
|
|
||||||
projectPath = args[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
service := lintpkg.NewService()
|
|
||||||
if err := service.RemoveHook(projectPath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
fmt.Fprintln(command.OutOrStdout(), "removed")
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
hookCmd.AddCommand(installCmd, removeCmd)
|
|
||||||
return hookCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func newCheckCommand() *cli.Command {
|
|
||||||
var (
|
|
||||||
format string
|
|
||||||
language string
|
|
||||||
severity string
|
|
||||||
)
|
|
||||||
|
|
||||||
command := cli.NewCommand("check", "Scan files for pattern matches", "", func(command *cli.Command, args []string) error {
|
|
||||||
catalog, err := cataloglint.LoadEmbeddedCatalog()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("cmd.check", "loading catalog", err)
|
return coreerr.E("cmd.check", "loading catalog", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
rules := catalog.Rules
|
rules := cat.Rules
|
||||||
if language != "" {
|
|
||||||
rules = catalog.ForLanguage(language)
|
// Filter by language if specified.
|
||||||
|
if checkLang != "" {
|
||||||
|
rules = cat.ForLanguage(checkLang)
|
||||||
if len(rules) == 0 {
|
if len(rules) == 0 {
|
||||||
fmt.Fprintf(os.Stderr, "no rules for language %q\n", language)
|
fmt.Fprintf(os.Stderr, "no rules for language %q\n", checkLang)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if severity != "" {
|
|
||||||
filtered := (&lintpkg.Catalog{Rules: rules}).AtSeverity(severity)
|
// Filter by severity threshold if specified.
|
||||||
|
if checkSeverity != "" {
|
||||||
|
filtered := (&lintpkg.Catalog{Rules: rules}).AtSeverity(checkSeverity)
|
||||||
if len(filtered) == 0 {
|
if len(filtered) == 0 {
|
||||||
fmt.Fprintf(os.Stderr, "no rules at severity %q or above\n", severity)
|
fmt.Fprintf(os.Stderr, "no rules at severity %q or above\n", checkSeverity)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
rules = filtered
|
rules = filtered
|
||||||
|
|
@ -288,176 +65,110 @@ func newCheckCommand() *cli.Command {
|
||||||
paths = []string{"."}
|
paths = []string{"."}
|
||||||
}
|
}
|
||||||
|
|
||||||
var findings []lintpkg.Finding
|
var allFindings []lintpkg.Finding
|
||||||
for _, path := range paths {
|
for _, p := range paths {
|
||||||
info, err := os.Stat(path)
|
info, err := coreio.Local.Stat(p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("cmd.check", "stat "+path, err)
|
return coreerr.E("cmd.check", "stat "+p, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var findings []lintpkg.Finding
|
||||||
if info.IsDir() {
|
if info.IsDir() {
|
||||||
pathFindings, err := scanner.ScanDir(path)
|
findings, err = scanner.ScanDir(p)
|
||||||
if err != nil {
|
} else {
|
||||||
return err
|
findings, err = scanner.ScanFile(p)
|
||||||
}
|
|
||||||
findings = append(findings, pathFindings...)
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pathFindings, err := scanner.ScanFile(path)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
findings = append(findings, pathFindings...)
|
allFindings = append(allFindings, findings...)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch format {
|
switch checkFormat {
|
||||||
case "json":
|
case "json":
|
||||||
return lintpkg.WriteJSON(command.OutOrStdout(), findings)
|
return lintpkg.WriteJSON(os.Stdout, allFindings)
|
||||||
case "jsonl":
|
case "jsonl":
|
||||||
return lintpkg.WriteJSONL(command.OutOrStdout(), findings)
|
return lintpkg.WriteJSONL(os.Stdout, allFindings)
|
||||||
case "sarif":
|
|
||||||
report := lintpkg.Report{
|
|
||||||
Findings: findings,
|
|
||||||
Summary: lintpkg.Summarise(findings),
|
|
||||||
}
|
|
||||||
return lintpkg.WriteReportSARIF(command.OutOrStdout(), report)
|
|
||||||
default:
|
default:
|
||||||
lintpkg.WriteText(command.OutOrStdout(), findings)
|
lintpkg.WriteText(os.Stdout, allFindings)
|
||||||
if format == "text" && len(findings) > 0 {
|
|
||||||
writeCatalogSummary(command.OutOrStdout(), findings)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(allFindings) > 0 {
|
||||||
|
summary := lintpkg.Summarise(allFindings)
|
||||||
|
fmt.Fprintf(os.Stderr, "\n%d finding(s)", summary.Total)
|
||||||
|
var parts []string
|
||||||
|
for sev, count := range summary.BySeverity {
|
||||||
|
parts = append(parts, fmt.Sprintf("%d %s", count, sev))
|
||||||
|
}
|
||||||
|
if len(parts) > 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, " (%s)", strings.Join(parts, ", "))
|
||||||
|
}
|
||||||
|
fmt.Fprintln(os.Stderr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
cli.StringFlag(command, &format, "format", "f", "text", "Output format: text, json, jsonl, sarif")
|
cli.StringFlag(checkCmd, &checkFormat, "format", "f", "text", "Output format: text, json, jsonl")
|
||||||
cli.StringFlag(command, &language, "lang", "l", "", "Filter rules by language")
|
cli.StringFlag(checkCmd, &checkLang, "lang", "l", "", "Filter rules by language (e.g. go, php, ts)")
|
||||||
cli.StringFlag(command, &severity, "severity", "s", "", "Minimum severity threshold (info, low, medium, high, critical)")
|
cli.StringFlag(checkCmd, &checkSeverity, "severity", "s", "", "Minimum severity threshold (info, low, medium, high, critical)")
|
||||||
|
|
||||||
return command
|
// ── catalog ────────────────────────────────────────────────────────────
|
||||||
}
|
|
||||||
|
|
||||||
func newCatalogCommand() *cli.Command {
|
|
||||||
catalogCmd := cli.NewGroup("catalog", "Browse the pattern catalog", "")
|
catalogCmd := cli.NewGroup("catalog", "Browse the pattern catalog", "")
|
||||||
|
|
||||||
var listLanguage string
|
// catalog list
|
||||||
listCmd := cli.NewCommand("list", "List all rules in the catalog", "", func(command *cli.Command, args []string) error {
|
var listLang string
|
||||||
catalog, err := cataloglint.LoadEmbeddedCatalog()
|
|
||||||
|
listCmd := cli.NewCommand("list", "List all rules in the catalog", "", func(cmd *cli.Command, args []string) error {
|
||||||
|
cat, err := lint.LoadEmbeddedCatalog()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("cmd.catalog.list", "loading catalog", err)
|
return coreerr.E("cmd.catalog.list", "loading catalog", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
rules := catalog.Rules
|
rules := cat.Rules
|
||||||
if listLanguage != "" {
|
if listLang != "" {
|
||||||
rules = catalog.ForLanguage(listLanguage)
|
rules = cat.ForLanguage(listLang)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(rules) == 0 {
|
if len(rules) == 0 {
|
||||||
fmt.Fprintln(command.OutOrStdout(), "No rules found.")
|
fmt.Println("No rules found.")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
rules = append([]lintpkg.Rule(nil), rules...)
|
for _, r := range rules {
|
||||||
sort.Slice(rules, func(left int, right int) bool {
|
fmt.Printf("%-14s [%-8s] %s\n", r.ID, r.Severity, r.Title)
|
||||||
if rules[left].Severity == rules[right].Severity {
|
|
||||||
return strings.Compare(rules[left].ID, rules[right].ID) < 0
|
|
||||||
}
|
|
||||||
return strings.Compare(rules[left].Severity, rules[right].Severity) < 0
|
|
||||||
})
|
|
||||||
|
|
||||||
for _, rule := range rules {
|
|
||||||
fmt.Fprintf(command.OutOrStdout(), "%-14s [%-8s] %s\n", rule.ID, rule.Severity, rule.Title)
|
|
||||||
}
|
}
|
||||||
fmt.Fprintf(os.Stderr, "\n%d rule(s)\n", len(rules))
|
fmt.Fprintf(os.Stderr, "\n%d rule(s)\n", len(rules))
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
cli.StringFlag(listCmd, &listLanguage, "lang", "l", "", "Filter by language")
|
|
||||||
|
|
||||||
showCmd := cli.NewCommand("show", "Show details of a specific rule", "", func(command *cli.Command, args []string) error {
|
cli.StringFlag(listCmd, &listLang, "lang", "l", "", "Filter by language")
|
||||||
|
|
||||||
|
// catalog show
|
||||||
|
showCmd := cli.NewCommand("show", "Show details of a specific rule", "", func(cmd *cli.Command, args []string) error {
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
return coreerr.E("cmd.catalog.show", "rule ID required", nil)
|
return coreerr.E("cmd.catalog.show", "rule ID required", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
catalog, err := cataloglint.LoadEmbeddedCatalog()
|
cat, err := lint.LoadEmbeddedCatalog()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return coreerr.E("cmd.catalog.show", "loading catalog", err)
|
return coreerr.E("cmd.catalog.show", "loading catalog", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
rule := catalog.ByID(args[0])
|
r := cat.ByID(args[0])
|
||||||
if rule == nil {
|
if r == nil {
|
||||||
return coreerr.E("cmd.catalog.show", "rule "+args[0]+" not found", nil)
|
return coreerr.E("cmd.catalog.show", "rule "+args[0]+" not found", nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
data, err := json.MarshalIndent(rule, "", " ")
|
data, err := json.MarshalIndent(r, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
fmt.Fprintf(command.OutOrStdout(), "%s\n", string(data))
|
fmt.Println(string(data))
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
catalogCmd.AddCommand(listCmd, showCmd)
|
catalogCmd.AddCommand(listCmd, showCmd)
|
||||||
return catalogCmd
|
lintCmd.AddCommand(checkCmd, catalogCmd)
|
||||||
}
|
root.AddCommand(lintCmd)
|
||||||
|
|
||||||
func writeReport(writer io.Writer, output string, report lintpkg.Report) error {
|
|
||||||
switch output {
|
|
||||||
case "json":
|
|
||||||
return lintpkg.WriteReportJSON(writer, report)
|
|
||||||
case "text":
|
|
||||||
lintpkg.WriteReportText(writer, report)
|
|
||||||
return nil
|
|
||||||
case "github":
|
|
||||||
lintpkg.WriteReportGitHub(writer, report)
|
|
||||||
return nil
|
|
||||||
case "sarif":
|
|
||||||
return lintpkg.WriteReportSARIF(writer, report)
|
|
||||||
default:
|
|
||||||
return coreerr.E("writeReport", "unsupported output format "+output, nil)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeIndentedJSON(writer io.Writer, value any) error {
|
|
||||||
encoder := json.NewEncoder(writer)
|
|
||||||
encoder.SetIndent("", " ")
|
|
||||||
return encoder.Encode(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeCatalogSummary(writer io.Writer, findings []lintpkg.Finding) {
|
|
||||||
summary := lintpkg.Summarise(findings)
|
|
||||||
fmt.Fprintf(writer, "\n%d finding(s)", summary.Total)
|
|
||||||
|
|
||||||
orderedSeverities := []string{"critical", "high", "medium", "low", "info", "error", "warning"}
|
|
||||||
seen := make(map[string]bool, len(summary.BySeverity))
|
|
||||||
var parts []string
|
|
||||||
|
|
||||||
for _, severity := range orderedSeverities {
|
|
||||||
count := summary.BySeverity[severity]
|
|
||||||
if count == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[severity] = true
|
|
||||||
parts = append(parts, fmt.Sprintf("%d %s", count, severity))
|
|
||||||
}
|
|
||||||
|
|
||||||
var extraSeverities []string
|
|
||||||
for severity := range summary.BySeverity {
|
|
||||||
if seen[severity] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
extraSeverities = append(extraSeverities, severity)
|
|
||||||
}
|
|
||||||
sort.Strings(extraSeverities)
|
|
||||||
for _, severity := range extraSeverities {
|
|
||||||
count := summary.BySeverity[severity]
|
|
||||||
if count == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
parts = append(parts, fmt.Sprintf("%d %s", count, severity))
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(parts) > 0 {
|
|
||||||
fmt.Fprintf(writer, " (%s)", strings.Join(parts, ", "))
|
|
||||||
}
|
|
||||||
fmt.Fprintln(writer)
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,286 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
lintpkg "forge.lthn.ai/core/lint/pkg/lint"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
buildBinaryOnce sync.Once
|
|
||||||
builtBinaryPath string
|
|
||||||
buildBinaryErr error
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestCLI_Run_JSON(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
buildCLI(t)
|
|
||||||
t.Setenv("PATH", t.TempDir())
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "input.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("data")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
stdout, stderr, exitCode := runCLI(t, dir, "run", "--output", "json", "--fail-on", "warning", dir)
|
|
||||||
assert.Equal(t, 1, exitCode, stderr)
|
|
||||||
assert.Contains(t, stderr, "lint failed (fail-on=warning)")
|
|
||||||
|
|
||||||
var report lintpkg.Report
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(stdout), &report))
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
assert.Equal(t, "go-cor-003", report.Findings[0].Code)
|
|
||||||
assert.Equal(t, 1, report.Summary.Total)
|
|
||||||
assert.False(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCLI_Run_FilesFlagLimitsScanning(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
buildCLI(t)
|
|
||||||
t.Setenv("PATH", t.TempDir())
|
|
||||||
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "clean.go"), []byte(`package sample
|
|
||||||
|
|
||||||
func Clean() {}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "ignored.go"), []byte(`package sample
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
_ = helper()
|
|
||||||
}
|
|
||||||
|
|
||||||
func helper() error { return nil }
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
stdout, stderr, exitCode := runCLI(t, dir, "run", "--output", "json", "--files", "clean.go", dir)
|
|
||||||
assert.Equal(t, 0, exitCode, stderr)
|
|
||||||
|
|
||||||
var report lintpkg.Report
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(stdout), &report))
|
|
||||||
assert.Empty(t, report.Findings)
|
|
||||||
assert.Equal(t, 0, report.Summary.Total)
|
|
||||||
assert.True(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCLI_Run_ScheduleAppliesPreset(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
buildCLI(t)
|
|
||||||
t.Setenv("PATH", t.TempDir())
|
|
||||||
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "root.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("root")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, "services"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "services", "clean.go"), []byte(`package sample
|
|
||||||
|
|
||||||
func Clean() {}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte(`output: text
|
|
||||||
schedules:
|
|
||||||
nightly:
|
|
||||||
output: json
|
|
||||||
paths:
|
|
||||||
- services
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
stdout, stderr, exitCode := runCLI(t, dir, "run", "--schedule", "nightly", dir)
|
|
||||||
assert.Equal(t, 0, exitCode, stderr)
|
|
||||||
|
|
||||||
var report lintpkg.Report
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(stdout), &report))
|
|
||||||
assert.Empty(t, report.Findings)
|
|
||||||
assert.Equal(t, 0, report.Summary.Total)
|
|
||||||
assert.True(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCLI_Detect_JSON(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "package.json"), []byte("{}\n"), 0o644))
|
|
||||||
|
|
||||||
stdout, stderr, exitCode := runCLI(t, dir, "detect", "--output", "json", dir)
|
|
||||||
assert.Equal(t, 0, exitCode, stderr)
|
|
||||||
|
|
||||||
var languages []string
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(stdout), &languages))
|
|
||||||
assert.Equal(t, []string{"go", "js"}, languages)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCLI_Init_WritesConfig(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
|
|
||||||
stdout, stderr, exitCode := runCLI(t, dir, "init", dir)
|
|
||||||
assert.Equal(t, 0, exitCode, stderr)
|
|
||||||
assert.Contains(t, stdout, ".core/lint.yaml")
|
|
||||||
|
|
||||||
configPath := filepath.Join(dir, ".core", "lint.yaml")
|
|
||||||
content, err := os.ReadFile(configPath)
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Contains(t, string(content), "golangci-lint")
|
|
||||||
assert.Contains(t, string(content), "fail_on: error")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCLI_Tools_TextIncludesMetadata(t *testing.T) {
|
|
||||||
buildCLI(t)
|
|
||||||
|
|
||||||
binDir := t.TempDir()
|
|
||||||
fakeToolPath := filepath.Join(binDir, "gosec")
|
|
||||||
require.NoError(t, os.WriteFile(fakeToolPath, []byte("#!/bin/sh\nexit 0\n"), 0o755))
|
|
||||||
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
|
||||||
|
|
||||||
command := exec.Command(buildCLI(t), "tools", "--lang", "go")
|
|
||||||
command.Dir = t.TempDir()
|
|
||||||
command.Env = os.Environ()
|
|
||||||
|
|
||||||
output, err := command.CombinedOutput()
|
|
||||||
require.NoError(t, err, string(output))
|
|
||||||
|
|
||||||
text := string(output)
|
|
||||||
assert.Contains(t, text, "gosec")
|
|
||||||
assert.Contains(t, text, "langs=go")
|
|
||||||
assert.Contains(t, text, "entitlement=lint.security")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCLI_LintCheck_SARIF(t *testing.T) {
|
|
||||||
buildCLI(t)
|
|
||||||
|
|
||||||
repoRoot := repoRoot(t)
|
|
||||||
stdout, stderr, exitCode := runCLI(t, repoRoot, "lint", "check", "--format", "sarif", "tests/cli/lint/check/fixtures")
|
|
||||||
assert.Equal(t, 0, exitCode, stderr)
|
|
||||||
|
|
||||||
var sarif struct {
|
|
||||||
Version string `json:"version"`
|
|
||||||
Runs []struct {
|
|
||||||
Tool struct {
|
|
||||||
Driver struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
} `json:"driver"`
|
|
||||||
} `json:"tool"`
|
|
||||||
Results []struct {
|
|
||||||
RuleID string `json:"ruleId"`
|
|
||||||
} `json:"results"`
|
|
||||||
} `json:"runs"`
|
|
||||||
}
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(stdout), &sarif))
|
|
||||||
require.Equal(t, "2.1.0", sarif.Version)
|
|
||||||
require.Len(t, sarif.Runs, 1)
|
|
||||||
assert.Equal(t, "core-lint", sarif.Runs[0].Tool.Driver.Name)
|
|
||||||
require.Len(t, sarif.Runs[0].Results, 1)
|
|
||||||
assert.Equal(t, "go-cor-003", sarif.Runs[0].Results[0].RuleID)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCLI_HookInstallRemove(t *testing.T) {
|
|
||||||
if _, err := exec.LookPath("git"); err != nil {
|
|
||||||
t.Skip("git not available")
|
|
||||||
}
|
|
||||||
|
|
||||||
dir := t.TempDir()
|
|
||||||
runCLIExpectSuccess(t, dir, "git", "init")
|
|
||||||
runCLIExpectSuccess(t, dir, "git", "config", "user.email", "test@example.com")
|
|
||||||
runCLIExpectSuccess(t, dir, "git", "config", "user.name", "Test User")
|
|
||||||
|
|
||||||
_, stderr, exitCode := runCLI(t, dir, "hook", "install", dir)
|
|
||||||
assert.Equal(t, 0, exitCode, stderr)
|
|
||||||
|
|
||||||
hookPath := filepath.Join(dir, ".git", "hooks", "pre-commit")
|
|
||||||
hookContent, err := os.ReadFile(hookPath)
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Contains(t, string(hookContent), "core-lint run --hook")
|
|
||||||
|
|
||||||
_, stderr, exitCode = runCLI(t, dir, "hook", "remove", dir)
|
|
||||||
assert.Equal(t, 0, exitCode, stderr)
|
|
||||||
|
|
||||||
removedContent, err := os.ReadFile(hookPath)
|
|
||||||
if err == nil {
|
|
||||||
assert.NotContains(t, string(removedContent), "core-lint run --hook")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func runCLI(t *testing.T, workdir string, args ...string) (string, string, int) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
command := exec.Command(buildCLI(t), args...)
|
|
||||||
command.Dir = workdir
|
|
||||||
command.Env = os.Environ()
|
|
||||||
stdout, err := command.Output()
|
|
||||||
if err == nil {
|
|
||||||
return string(stdout), "", 0
|
|
||||||
}
|
|
||||||
|
|
||||||
exitCode := -1
|
|
||||||
stderr := ""
|
|
||||||
if exitErr, ok := err.(*exec.ExitError); ok {
|
|
||||||
exitCode = exitErr.ExitCode()
|
|
||||||
stderr = string(exitErr.Stderr)
|
|
||||||
}
|
|
||||||
|
|
||||||
return string(stdout), stderr, exitCode
|
|
||||||
}
|
|
||||||
|
|
||||||
func runCLIExpectSuccess(t *testing.T, dir string, name string, args ...string) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
command := exec.Command(name, args...)
|
|
||||||
command.Dir = dir
|
|
||||||
output, err := command.CombinedOutput()
|
|
||||||
require.NoError(t, err, string(output))
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildCLI(t *testing.T) string {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
buildBinaryOnce.Do(func() {
|
|
||||||
repoRoot := repoRoot(t)
|
|
||||||
binDir, err := os.MkdirTemp("", "core-lint-bin-*")
|
|
||||||
if err != nil {
|
|
||||||
buildBinaryErr = err
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
builtBinaryPath = filepath.Join(binDir, "core-lint")
|
|
||||||
command := exec.Command("go", "build", "-o", builtBinaryPath, "./cmd/core-lint")
|
|
||||||
command.Dir = repoRoot
|
|
||||||
output, err := command.CombinedOutput()
|
|
||||||
if err != nil {
|
|
||||||
buildBinaryErr = fmt.Errorf("build core-lint: %w: %s", err, strings.TrimSpace(string(output)))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
require.NoError(t, buildBinaryErr)
|
|
||||||
return builtBinaryPath
|
|
||||||
}
|
|
||||||
|
|
||||||
func repoRoot(t *testing.T) string {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
root, err := filepath.Abs(filepath.Join(".", "..", ".."))
|
|
||||||
require.NoError(t, err)
|
|
||||||
return root
|
|
||||||
}
|
|
||||||
|
|
@ -20,6 +20,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
|
coreio "forge.lthn.ai/core/go-io"
|
||||||
"forge.lthn.ai/core/go-i18n"
|
"forge.lthn.ai/core/go-i18n"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -59,7 +60,6 @@ type DocblockResult struct {
|
||||||
Total int `json:"total"`
|
Total int `json:"total"`
|
||||||
Documented int `json:"documented"`
|
Documented int `json:"documented"`
|
||||||
Missing []MissingDocblock `json:"missing,omitempty"`
|
Missing []MissingDocblock `json:"missing,omitempty"`
|
||||||
Warnings []DocblockWarning `json:"warnings,omitempty"`
|
|
||||||
Passed bool `json:"passed"`
|
Passed bool `json:"passed"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -72,13 +72,6 @@ type MissingDocblock struct {
|
||||||
Reason string `json:"reason,omitempty"`
|
Reason string `json:"reason,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// DocblockWarning captures a partial parse failure while still preserving
|
|
||||||
// the successfully parsed files in the same directory.
|
|
||||||
type DocblockWarning struct {
|
|
||||||
Path string `json:"path"`
|
|
||||||
Error string `json:"error"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// RunDocblockCheck checks docblock coverage for the given packages.
|
// RunDocblockCheck checks docblock coverage for the given packages.
|
||||||
func RunDocblockCheck(paths []string, threshold float64, verbose, jsonOutput bool) error {
|
func RunDocblockCheck(paths []string, threshold float64, verbose, jsonOutput bool) error {
|
||||||
result, err := CheckDocblockCoverage(paths)
|
result, err := CheckDocblockCoverage(paths)
|
||||||
|
|
@ -93,13 +86,21 @@ func RunDocblockCheck(paths []string, threshold float64, verbose, jsonOutput boo
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
cli.Print("%s\n", string(data))
|
fmt.Println(string(data))
|
||||||
if !result.Passed {
|
if !result.Passed {
|
||||||
return cli.Err("docblock coverage %.1f%% below threshold %.1f%%", result.Coverage, threshold)
|
return cli.Err("docblock coverage %.1f%% below threshold %.1f%%", result.Coverage, threshold)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sort missing by file then line
|
||||||
|
slices.SortFunc(result.Missing, func(a, b MissingDocblock) int {
|
||||||
|
return cmp.Or(
|
||||||
|
cmp.Compare(a.File, b.File),
|
||||||
|
cmp.Compare(a.Line, b.Line),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
// Print result
|
// Print result
|
||||||
if verbose && len(result.Missing) > 0 {
|
if verbose && len(result.Missing) > 0 {
|
||||||
cli.Print("%s\n\n", i18n.T("cmd.qa.docblock.missing_docs"))
|
cli.Print("%s\n\n", i18n.T("cmd.qa.docblock.missing_docs"))
|
||||||
|
|
@ -114,13 +115,6 @@ func RunDocblockCheck(paths []string, threshold float64, verbose, jsonOutput boo
|
||||||
cli.Blank()
|
cli.Blank()
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(result.Warnings) > 0 {
|
|
||||||
for _, warning := range result.Warnings {
|
|
||||||
cli.Warnf("failed to parse %s: %s", warning.Path, warning.Error)
|
|
||||||
}
|
|
||||||
cli.Blank()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Summary
|
// Summary
|
||||||
coverageStr := fmt.Sprintf("%.1f%%", result.Coverage)
|
coverageStr := fmt.Sprintf("%.1f%%", result.Coverage)
|
||||||
thresholdStr := fmt.Sprintf("%.1f%%", threshold)
|
thresholdStr := fmt.Sprintf("%.1f%%", threshold)
|
||||||
|
|
@ -174,12 +168,9 @@ func CheckDocblockCoverage(patterns []string) (*DocblockResult, error) {
|
||||||
return !strings.HasSuffix(fi.Name(), "_test.go")
|
return !strings.HasSuffix(fi.Name(), "_test.go")
|
||||||
}, parser.ParseComments)
|
}, parser.ParseComments)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Preserve partial results when a directory contains both valid and
|
// Log parse errors but continue to check other directories
|
||||||
// invalid files. The caller decides how to present the warning.
|
cli.Warnf("failed to parse %s: %v", dir, err)
|
||||||
result.Warnings = append(result.Warnings, DocblockWarning{
|
continue
|
||||||
Path: dir,
|
|
||||||
Error: err.Error(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, pkg := range pkgs {
|
for _, pkg := range pkgs {
|
||||||
|
|
@ -193,21 +184,6 @@ func CheckDocblockCoverage(patterns []string) (*DocblockResult, error) {
|
||||||
result.Coverage = float64(result.Documented) / float64(result.Total) * 100
|
result.Coverage = float64(result.Documented) / float64(result.Total) * 100
|
||||||
}
|
}
|
||||||
|
|
||||||
slices.SortFunc(result.Missing, func(a, b MissingDocblock) int {
|
|
||||||
return cmp.Or(
|
|
||||||
cmp.Compare(a.File, b.File),
|
|
||||||
cmp.Compare(a.Line, b.Line),
|
|
||||||
cmp.Compare(a.Kind, b.Kind),
|
|
||||||
cmp.Compare(a.Name, b.Name),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
slices.SortFunc(result.Warnings, func(a, b DocblockWarning) int {
|
|
||||||
return cmp.Or(
|
|
||||||
cmp.Compare(a.Path, b.Path),
|
|
||||||
cmp.Compare(a.Error, b.Error),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -260,7 +236,7 @@ func expandPatterns(patterns []string) ([]string, error) {
|
||||||
|
|
||||||
// hasGoFiles checks if a directory contains Go files.
|
// hasGoFiles checks if a directory contains Go files.
|
||||||
func hasGoFiles(dir string) bool {
|
func hasGoFiles(dir string) bool {
|
||||||
entries, err := os.ReadDir(dir)
|
entries, err := coreio.Local.List(dir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
package qa
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRunDocblockCheckJSONOutput_IsDeterministicAndKeepsWarnings(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "b.go"), "package sample\n\nfunc Beta() {}\n")
|
|
||||||
writeTestFile(t, filepath.Join(dir, "a.go"), "package sample\n\nfunc Alpha() {}\n")
|
|
||||||
writeTestFile(t, filepath.Join(dir, "broken.go"), "package sample\n\nfunc Broken(\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
|
|
||||||
var result DocblockResult
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
err := RunDocblockCheck([]string{"."}, 100, false, true)
|
|
||||||
require.Error(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &result))
|
|
||||||
assert.False(t, result.Passed)
|
|
||||||
assert.Equal(t, 2, result.Total)
|
|
||||||
assert.Equal(t, 0, result.Documented)
|
|
||||||
require.Len(t, result.Missing, 2)
|
|
||||||
assert.Equal(t, "a.go", result.Missing[0].File)
|
|
||||||
assert.Equal(t, "b.go", result.Missing[1].File)
|
|
||||||
require.Len(t, result.Warnings, 1)
|
|
||||||
assert.Equal(t, ".", result.Warnings[0].Path)
|
|
||||||
assert.NotEmpty(t, result.Warnings[0].Error)
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
// cmd_health.go implements the `qa health` command for aggregate CI health.
|
// cmd_health.go implements the 'qa health' command for aggregate CI health.
|
||||||
//
|
//
|
||||||
// Usage:
|
// Usage:
|
||||||
// core qa health # Show CI health summary
|
// core qa health # Show CI health summary
|
||||||
|
|
@ -20,14 +20,13 @@ import (
|
||||||
"forge.lthn.ai/core/go-scm/repos"
|
"forge.lthn.ai/core/go-scm/repos"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Health command flags.
|
// Health command flags
|
||||||
var (
|
var (
|
||||||
healthProblems bool
|
healthProblems bool
|
||||||
healthRegistry string
|
healthRegistry string
|
||||||
healthJSON bool
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// HealthWorkflowRun represents a GitHub Actions workflow run.
|
// HealthWorkflowRun represents a GitHub Actions workflow run
|
||||||
type HealthWorkflowRun struct {
|
type HealthWorkflowRun struct {
|
||||||
Status string `json:"status"`
|
Status string `json:"status"`
|
||||||
Conclusion string `json:"conclusion"`
|
Conclusion string `json:"conclusion"`
|
||||||
|
|
@ -37,36 +36,16 @@ type HealthWorkflowRun struct {
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// RepoHealth represents the CI health of a single repo.
|
// RepoHealth represents the CI health of a single repo
|
||||||
type RepoHealth struct {
|
type RepoHealth struct {
|
||||||
Name string `json:"name"`
|
Name string
|
||||||
Status string `json:"status"` // passing, failing, error, pending, no_ci, disabled
|
Status string // "passing", "failing", "pending", "no_ci", "disabled"
|
||||||
Message string `json:"message"`
|
Message string
|
||||||
URL string `json:"url"`
|
URL string
|
||||||
FailingSince string `json:"failing_since,omitempty"`
|
FailingSince string
|
||||||
}
|
|
||||||
|
|
||||||
// HealthSummary captures aggregate health counts.
|
|
||||||
type HealthSummary struct {
|
|
||||||
TotalRepos int `json:"total_repos"`
|
|
||||||
FilteredRepos int `json:"filtered_repos"`
|
|
||||||
Passing int `json:"passing"`
|
|
||||||
Failing int `json:"failing"`
|
|
||||||
Errors int `json:"errors"`
|
|
||||||
Pending int `json:"pending"`
|
|
||||||
Disabled int `json:"disabled"`
|
|
||||||
NotConfigured int `json:"not_configured"`
|
|
||||||
PassingRate int `json:"passing_rate"`
|
|
||||||
ProblemsOnly bool `json:"problems_only"`
|
|
||||||
ByStatus map[string]int `json:"by_status"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// HealthOutput is the JSON payload for `qa health --json`.
|
|
||||||
type HealthOutput struct {
|
|
||||||
Summary HealthSummary `json:"summary"`
|
|
||||||
Repos []RepoHealth `json:"repos"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// addHealthCommand adds the 'health' subcommand to qa.
|
||||||
func addHealthCommand(parent *cli.Command) {
|
func addHealthCommand(parent *cli.Command) {
|
||||||
healthCmd := &cli.Command{
|
healthCmd := &cli.Command{
|
||||||
Use: "health",
|
Use: "health",
|
||||||
|
|
@ -79,18 +58,20 @@ func addHealthCommand(parent *cli.Command) {
|
||||||
|
|
||||||
healthCmd.Flags().BoolVarP(&healthProblems, "problems", "p", false, i18n.T("cmd.qa.health.flag.problems"))
|
healthCmd.Flags().BoolVarP(&healthProblems, "problems", "p", false, i18n.T("cmd.qa.health.flag.problems"))
|
||||||
healthCmd.Flags().StringVar(&healthRegistry, "registry", "", i18n.T("common.flag.registry"))
|
healthCmd.Flags().StringVar(&healthRegistry, "registry", "", i18n.T("common.flag.registry"))
|
||||||
healthCmd.Flags().BoolVar(&healthJSON, "json", false, i18n.T("common.flag.json"))
|
|
||||||
|
|
||||||
parent.AddCommand(healthCmd)
|
parent.AddCommand(healthCmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
func runHealth() error {
|
func runHealth() error {
|
||||||
|
// Check gh is available
|
||||||
if _, err := exec.LookPath("gh"); err != nil {
|
if _, err := exec.LookPath("gh"); err != nil {
|
||||||
return log.E("qa.health", i18n.T("error.gh_not_found"), nil)
|
return log.E("qa.health", i18n.T("error.gh_not_found"), nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Load registry
|
||||||
var reg *repos.Registry
|
var reg *repos.Registry
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if healthRegistry != "" {
|
if healthRegistry != "" {
|
||||||
reg, err = repos.LoadRegistry(io.Local, healthRegistry)
|
reg, err = repos.LoadRegistry(io.Local, healthRegistry)
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -104,24 +85,28 @@ func runHealth() error {
|
||||||
return log.E("qa.health", "failed to load registry", err)
|
return log.E("qa.health", "failed to load registry", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Fetch CI status from all repos
|
||||||
|
var healthResults []RepoHealth
|
||||||
repoList := reg.List()
|
repoList := reg.List()
|
||||||
allHealthResults := make([]RepoHealth, 0, len(repoList))
|
|
||||||
for _, repo := range repoList {
|
|
||||||
health := fetchRepoHealth(reg.Org, repo.Name)
|
|
||||||
allHealthResults = append(allHealthResults, health)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort by severity first, then repo name for deterministic output.
|
for i, repo := range repoList {
|
||||||
slices.SortFunc(allHealthResults, func(a, b RepoHealth) int {
|
cli.Print("\033[2K\r%s %d/%d %s",
|
||||||
if p := cmp.Compare(healthPriority(a.Status), healthPriority(b.Status)); p != 0 {
|
dimStyle.Render(i18n.T("cmd.qa.issues.fetching")),
|
||||||
return p
|
i+1, len(repoList), repo.Name)
|
||||||
}
|
|
||||||
return strings.Compare(a.Name, b.Name)
|
health := fetchRepoHealth(reg.Org, repo.Name)
|
||||||
|
healthResults = append(healthResults, health)
|
||||||
|
}
|
||||||
|
cli.Print("\033[2K\r") // Clear progress
|
||||||
|
|
||||||
|
// Sort: problems first, then passing
|
||||||
|
slices.SortFunc(healthResults, func(a, b RepoHealth) int {
|
||||||
|
return cmp.Compare(healthPriority(a.Status), healthPriority(b.Status))
|
||||||
})
|
})
|
||||||
|
|
||||||
healthResults := allHealthResults
|
// Filter if --problems flag
|
||||||
if healthProblems {
|
if healthProblems {
|
||||||
problems := make([]RepoHealth, 0, len(healthResults))
|
var problems []RepoHealth
|
||||||
for _, h := range healthResults {
|
for _, h := range healthResults {
|
||||||
if h.Status != "passing" {
|
if h.Status != "passing" {
|
||||||
problems = append(problems, h)
|
problems = append(problems, h)
|
||||||
|
|
@ -130,29 +115,37 @@ func runHealth() error {
|
||||||
healthResults = problems
|
healthResults = problems
|
||||||
}
|
}
|
||||||
|
|
||||||
summary := summariseHealthResults(len(repoList), len(healthResults), allHealthResults, healthProblems)
|
// Calculate summary
|
||||||
if healthJSON {
|
passing := 0
|
||||||
return printHealthJSON(summary, healthResults)
|
for _, h := range healthResults {
|
||||||
|
if h.Status == "passing" {
|
||||||
|
passing++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
total := len(repoList)
|
||||||
|
percentage := 0
|
||||||
|
if total > 0 {
|
||||||
|
percentage = (passing * 100) / total
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Print summary
|
||||||
cli.Print("%s: %d/%d repos healthy (%d%%)\n\n",
|
cli.Print("%s: %d/%d repos healthy (%d%%)\n\n",
|
||||||
i18n.T("cmd.qa.health.summary"),
|
i18n.T("cmd.qa.health.summary"),
|
||||||
summary.Passing,
|
passing, total, percentage)
|
||||||
summary.TotalRepos,
|
|
||||||
summary.PassingRate)
|
|
||||||
|
|
||||||
if len(healthResults) == 0 {
|
if len(healthResults) == 0 {
|
||||||
cli.Text(i18n.T("cmd.qa.health.all_healthy"))
|
cli.Text(i18n.T("cmd.qa.health.all_healthy"))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Group by status
|
||||||
grouped := make(map[string][]RepoHealth)
|
grouped := make(map[string][]RepoHealth)
|
||||||
for _, h := range healthResults {
|
for _, h := range healthResults {
|
||||||
grouped[h.Status] = append(grouped[h.Status], h)
|
grouped[h.Status] = append(grouped[h.Status], h)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Print problems first
|
||||||
printHealthGroup("failing", grouped["failing"], errorStyle)
|
printHealthGroup("failing", grouped["failing"], errorStyle)
|
||||||
printHealthGroup("error", grouped["error"], errorStyle)
|
|
||||||
printHealthGroup("pending", grouped["pending"], warningStyle)
|
printHealthGroup("pending", grouped["pending"], warningStyle)
|
||||||
printHealthGroup("no_ci", grouped["no_ci"], dimStyle)
|
printHealthGroup("no_ci", grouped["no_ci"], dimStyle)
|
||||||
printHealthGroup("disabled", grouped["disabled"], dimStyle)
|
printHealthGroup("disabled", grouped["disabled"], dimStyle)
|
||||||
|
|
@ -166,6 +159,7 @@ func runHealth() error {
|
||||||
|
|
||||||
func fetchRepoHealth(org, repoName string) RepoHealth {
|
func fetchRepoHealth(org, repoName string) RepoHealth {
|
||||||
repoFullName := cli.Sprintf("%s/%s", org, repoName)
|
repoFullName := cli.Sprintf("%s/%s", org, repoName)
|
||||||
|
|
||||||
args := []string{
|
args := []string{
|
||||||
"run", "list",
|
"run", "list",
|
||||||
"--repo", repoFullName,
|
"--repo", repoFullName,
|
||||||
|
|
@ -176,6 +170,7 @@ func fetchRepoHealth(org, repoName string) RepoHealth {
|
||||||
cmd := exec.Command("gh", args...)
|
cmd := exec.Command("gh", args...)
|
||||||
output, err := cmd.Output()
|
output, err := cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
// Check if it's a 404 (no workflows)
|
||||||
if exitErr, ok := err.(*exec.ExitError); ok {
|
if exitErr, ok := err.(*exec.ExitError); ok {
|
||||||
stderr := string(exitErr.Stderr)
|
stderr := string(exitErr.Stderr)
|
||||||
if strings.Contains(stderr, "no workflows") || strings.Contains(stderr, "not found") {
|
if strings.Contains(stderr, "no workflows") || strings.Contains(stderr, "not found") {
|
||||||
|
|
@ -188,7 +183,7 @@ func fetchRepoHealth(org, repoName string) RepoHealth {
|
||||||
}
|
}
|
||||||
return RepoHealth{
|
return RepoHealth{
|
||||||
Name: repoName,
|
Name: repoName,
|
||||||
Status: "error",
|
Status: "no_ci",
|
||||||
Message: i18n.T("cmd.qa.health.fetch_error"),
|
Message: i18n.T("cmd.qa.health.fetch_error"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -197,7 +192,7 @@ func fetchRepoHealth(org, repoName string) RepoHealth {
|
||||||
if err := json.Unmarshal(output, &runs); err != nil {
|
if err := json.Unmarshal(output, &runs); err != nil {
|
||||||
return RepoHealth{
|
return RepoHealth{
|
||||||
Name: repoName,
|
Name: repoName,
|
||||||
Status: "error",
|
Status: "no_ci",
|
||||||
Message: i18n.T("cmd.qa.health.parse_error"),
|
Message: i18n.T("cmd.qa.health.parse_error"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -250,88 +245,28 @@ func healthPriority(status string) int {
|
||||||
switch status {
|
switch status {
|
||||||
case "failing":
|
case "failing":
|
||||||
return 0
|
return 0
|
||||||
case "error":
|
|
||||||
return 1
|
|
||||||
case "pending":
|
case "pending":
|
||||||
return 2
|
return 1
|
||||||
case "no_ci":
|
case "no_ci":
|
||||||
return 3
|
return 2
|
||||||
case "disabled":
|
case "disabled":
|
||||||
return 4
|
return 3
|
||||||
case "passing":
|
case "passing":
|
||||||
return 5
|
return 4
|
||||||
default:
|
default:
|
||||||
return 6
|
return 5
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func summariseHealthResults(totalRepos int, filteredRepos int, results []RepoHealth, problemsOnly bool) HealthSummary {
|
|
||||||
summary := HealthSummary{
|
|
||||||
TotalRepos: totalRepos,
|
|
||||||
FilteredRepos: filteredRepos,
|
|
||||||
ByStatus: map[string]int{
|
|
||||||
"passing": 0,
|
|
||||||
"failing": 0,
|
|
||||||
"error": 0,
|
|
||||||
"pending": 0,
|
|
||||||
"disabled": 0,
|
|
||||||
"no_ci": 0,
|
|
||||||
},
|
|
||||||
ProblemsOnly: problemsOnly,
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, health := range results {
|
|
||||||
summary.ByStatus[health.Status]++
|
|
||||||
switch health.Status {
|
|
||||||
case "passing":
|
|
||||||
summary.Passing++
|
|
||||||
case "failing":
|
|
||||||
summary.Failing++
|
|
||||||
case "error":
|
|
||||||
summary.Errors++
|
|
||||||
case "pending":
|
|
||||||
summary.Pending++
|
|
||||||
case "disabled":
|
|
||||||
summary.Disabled++
|
|
||||||
case "no_ci":
|
|
||||||
summary.NotConfigured++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if summary.TotalRepos > 0 {
|
|
||||||
summary.PassingRate = (summary.Passing * 100) / summary.TotalRepos
|
|
||||||
}
|
|
||||||
|
|
||||||
return summary
|
|
||||||
}
|
|
||||||
|
|
||||||
func printHealthJSON(summary HealthSummary, repos []RepoHealth) error {
|
|
||||||
data, err := json.MarshalIndent(HealthOutput{
|
|
||||||
Summary: summary,
|
|
||||||
Repos: repos,
|
|
||||||
}, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
cli.Print("%s\n", string(data))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func printHealthGroup(status string, repos []RepoHealth, style *cli.AnsiStyle) {
|
func printHealthGroup(status string, repos []RepoHealth, style *cli.AnsiStyle) {
|
||||||
if len(repos) == 0 {
|
if len(repos) == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
slices.SortFunc(repos, func(a, b RepoHealth) int {
|
|
||||||
return strings.Compare(a.Name, b.Name)
|
|
||||||
})
|
|
||||||
|
|
||||||
var label string
|
var label string
|
||||||
switch status {
|
switch status {
|
||||||
case "failing":
|
case "failing":
|
||||||
label = i18n.T("cmd.qa.health.count_failing")
|
label = i18n.T("cmd.qa.health.count_failing")
|
||||||
case "error":
|
|
||||||
label = i18n.T("cmd.qa.health.count_error")
|
|
||||||
case "pending":
|
case "pending":
|
||||||
label = i18n.T("cmd.qa.health.count_pending")
|
label = i18n.T("cmd.qa.health.count_pending")
|
||||||
case "no_ci":
|
case "no_ci":
|
||||||
|
|
|
||||||
|
|
@ -1,240 +0,0 @@
|
||||||
package qa
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRunHealthJSONOutput_UsesMachineFriendlyKeysAndKeepsFetchErrors(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "repos.yaml"), `version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
beta:
|
|
||||||
type: module
|
|
||||||
`)
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"--repo forge/alpha"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"status": "completed",
|
|
||||||
"conclusion": "success",
|
|
||||||
"name": "CI",
|
|
||||||
"headSha": "abc123",
|
|
||||||
"updatedAt": "2026-03-30T00:00:00Z",
|
|
||||||
"url": "https://example.com/alpha/run/1"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*"--repo forge/beta"*)
|
|
||||||
printf '%s\n' 'simulated workflow lookup failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetHealthFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
healthRegistry = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addHealthCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "health")
|
|
||||||
require.NoError(t, command.Flags().Set("registry", filepath.Join(dir, "repos.yaml")))
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
var payload HealthOutput
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
assert.Equal(t, 2, payload.Summary.TotalRepos)
|
|
||||||
assert.Equal(t, 1, payload.Summary.Passing)
|
|
||||||
assert.Equal(t, 1, payload.Summary.Errors)
|
|
||||||
assert.Equal(t, 2, payload.Summary.FilteredRepos)
|
|
||||||
assert.Len(t, payload.Summary.ByStatus, 6)
|
|
||||||
assert.Equal(t, 1, payload.Summary.ByStatus["passing"])
|
|
||||||
assert.Equal(t, 1, payload.Summary.ByStatus["error"])
|
|
||||||
assert.Equal(t, 0, payload.Summary.ByStatus["pending"])
|
|
||||||
assert.Equal(t, 0, payload.Summary.ByStatus["disabled"])
|
|
||||||
assert.Equal(t, 0, payload.Summary.ByStatus["no_ci"])
|
|
||||||
require.Len(t, payload.Repos, 2)
|
|
||||||
assert.Equal(t, "error", payload.Repos[0].Status)
|
|
||||||
assert.Equal(t, "beta", payload.Repos[0].Name)
|
|
||||||
assert.Equal(t, "passing", payload.Repos[1].Status)
|
|
||||||
assert.Equal(t, "alpha", payload.Repos[1].Name)
|
|
||||||
assert.Contains(t, output, `"status"`)
|
|
||||||
assert.NotContains(t, output, `"Status"`)
|
|
||||||
assert.NotContains(t, output, `"FailingSince"`)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunHealthJSONOutput_ProblemsOnlyKeepsOverallSummary(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "repos.yaml"), `version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
beta:
|
|
||||||
type: module
|
|
||||||
`)
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"--repo forge/alpha"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"status": "completed",
|
|
||||||
"conclusion": "success",
|
|
||||||
"name": "CI",
|
|
||||||
"headSha": "abc123",
|
|
||||||
"updatedAt": "2026-03-30T00:00:00Z",
|
|
||||||
"url": "https://example.com/alpha/run/1"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*"--repo forge/beta"*)
|
|
||||||
printf '%s\n' 'simulated workflow lookup failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetHealthFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
healthRegistry = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addHealthCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "health")
|
|
||||||
require.NoError(t, command.Flags().Set("registry", filepath.Join(dir, "repos.yaml")))
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
require.NoError(t, command.Flags().Set("problems", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
var payload HealthOutput
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
assert.Equal(t, 2, payload.Summary.TotalRepos)
|
|
||||||
assert.Equal(t, 1, payload.Summary.Passing)
|
|
||||||
assert.Equal(t, 1, payload.Summary.Errors)
|
|
||||||
assert.Equal(t, 1, payload.Summary.FilteredRepos)
|
|
||||||
assert.True(t, payload.Summary.ProblemsOnly)
|
|
||||||
assert.Len(t, payload.Summary.ByStatus, 6)
|
|
||||||
assert.Equal(t, 1, payload.Summary.ByStatus["passing"])
|
|
||||||
assert.Equal(t, 1, payload.Summary.ByStatus["error"])
|
|
||||||
assert.Equal(t, 0, payload.Summary.ByStatus["pending"])
|
|
||||||
assert.Equal(t, 0, payload.Summary.ByStatus["disabled"])
|
|
||||||
assert.Equal(t, 0, payload.Summary.ByStatus["no_ci"])
|
|
||||||
require.Len(t, payload.Repos, 1)
|
|
||||||
assert.Equal(t, "error", payload.Repos[0].Status)
|
|
||||||
assert.Equal(t, "beta", payload.Repos[0].Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunHealthHumanOutput_ShowsFetchErrorsAsErrors(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "repos.yaml"), `version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
beta:
|
|
||||||
type: module
|
|
||||||
`)
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"--repo forge/alpha"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"status": "completed",
|
|
||||||
"conclusion": "success",
|
|
||||||
"name": "CI",
|
|
||||||
"headSha": "abc123",
|
|
||||||
"updatedAt": "2026-03-30T00:00:00Z",
|
|
||||||
"url": "https://example.com/alpha/run/1"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*"--repo forge/beta"*)
|
|
||||||
printf '%s\n' 'simulated workflow lookup failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetHealthFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
healthRegistry = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addHealthCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "health")
|
|
||||||
require.NoError(t, command.Flags().Set("registry", filepath.Join(dir, "repos.yaml")))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Contains(t, output, "cmd.qa.health.summary")
|
|
||||||
assert.Contains(t, output, "alpha")
|
|
||||||
assert.Contains(t, output, "beta")
|
|
||||||
assert.Contains(t, output, "cmd.qa.health.fetch_error")
|
|
||||||
assert.NotContains(t, output, "no CI")
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetHealthFlags(t *testing.T) {
|
|
||||||
t.Helper()
|
|
||||||
oldProblems := healthProblems
|
|
||||||
oldRegistry := healthRegistry
|
|
||||||
oldJSON := healthJSON
|
|
||||||
|
|
||||||
healthProblems = false
|
|
||||||
healthRegistry = ""
|
|
||||||
healthJSON = false
|
|
||||||
|
|
||||||
t.Cleanup(func() {
|
|
||||||
healthProblems = oldProblems
|
|
||||||
healthRegistry = oldRegistry
|
|
||||||
healthJSON = oldJSON
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -30,7 +30,6 @@ var (
|
||||||
issuesBlocked bool
|
issuesBlocked bool
|
||||||
issuesRegistry string
|
issuesRegistry string
|
||||||
issuesLimit int
|
issuesLimit int
|
||||||
issuesJSON bool
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Issue represents a GitHub issue with triage metadata
|
// Issue represents a GitHub issue with triage metadata
|
||||||
|
|
@ -66,31 +65,10 @@ type Issue struct {
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
|
|
||||||
// Computed fields
|
// Computed fields
|
||||||
RepoName string `json:"repo_name"`
|
RepoName string
|
||||||
Priority int `json:"priority"` // Lower = higher priority
|
Priority int // Lower = higher priority
|
||||||
Category string `json:"category"` // "needs_response", "ready", "blocked", "triage"
|
Category string // "needs_response", "ready", "blocked", "triage"
|
||||||
ActionHint string `json:"action_hint,omitempty"`
|
ActionHint string
|
||||||
}
|
|
||||||
|
|
||||||
type IssueFetchError struct {
|
|
||||||
Repo string `json:"repo"`
|
|
||||||
Error string `json:"error"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type IssueCategoryOutput struct {
|
|
||||||
Category string `json:"category"`
|
|
||||||
Count int `json:"count"`
|
|
||||||
Issues []Issue `json:"issues"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type IssuesOutput struct {
|
|
||||||
TotalIssues int `json:"total_issues"`
|
|
||||||
FilteredIssues int `json:"filtered_issues"`
|
|
||||||
ShowingMine bool `json:"showing_mine"`
|
|
||||||
ShowingTriage bool `json:"showing_triage"`
|
|
||||||
ShowingBlocked bool `json:"showing_blocked"`
|
|
||||||
Categories []IssueCategoryOutput `json:"categories"`
|
|
||||||
FetchErrors []IssueFetchError `json:"fetch_errors"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// addIssuesCommand adds the 'issues' subcommand to qa.
|
// addIssuesCommand adds the 'issues' subcommand to qa.
|
||||||
|
|
@ -109,7 +87,6 @@ func addIssuesCommand(parent *cli.Command) {
|
||||||
issuesCmd.Flags().BoolVarP(&issuesBlocked, "blocked", "b", false, i18n.T("cmd.qa.issues.flag.blocked"))
|
issuesCmd.Flags().BoolVarP(&issuesBlocked, "blocked", "b", false, i18n.T("cmd.qa.issues.flag.blocked"))
|
||||||
issuesCmd.Flags().StringVar(&issuesRegistry, "registry", "", i18n.T("common.flag.registry"))
|
issuesCmd.Flags().StringVar(&issuesRegistry, "registry", "", i18n.T("common.flag.registry"))
|
||||||
issuesCmd.Flags().IntVarP(&issuesLimit, "limit", "l", 50, i18n.T("cmd.qa.issues.flag.limit"))
|
issuesCmd.Flags().IntVarP(&issuesLimit, "limit", "l", 50, i18n.T("cmd.qa.issues.flag.limit"))
|
||||||
issuesCmd.Flags().BoolVar(&issuesJSON, "json", false, i18n.T("common.flag.json"))
|
|
||||||
|
|
||||||
parent.AddCommand(issuesCmd)
|
parent.AddCommand(issuesCmd)
|
||||||
}
|
}
|
||||||
|
|
@ -139,59 +116,22 @@ func runQAIssues() error {
|
||||||
|
|
||||||
// Fetch issues from all repos
|
// Fetch issues from all repos
|
||||||
var allIssues []Issue
|
var allIssues []Issue
|
||||||
fetchErrors := make([]IssueFetchError, 0)
|
|
||||||
repoList := reg.List()
|
repoList := reg.List()
|
||||||
// Registry repos are map-backed, so sort before fetching to keep output stable.
|
|
||||||
slices.SortFunc(repoList, func(a, b *repos.Repo) int {
|
|
||||||
return cmp.Compare(a.Name, b.Name)
|
|
||||||
})
|
|
||||||
successfulFetches := 0
|
|
||||||
|
|
||||||
for i, repo := range repoList {
|
for i, repo := range repoList {
|
||||||
if !issuesJSON {
|
cli.Print("\033[2K\r%s %d/%d %s",
|
||||||
cli.Print("%s %d/%d %s\n",
|
dimStyle.Render(i18n.T("cmd.qa.issues.fetching")),
|
||||||
dimStyle.Render(i18n.T("cmd.qa.issues.fetching")),
|
i+1, len(repoList), repo.Name)
|
||||||
i+1, len(repoList), repo.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
issues, err := fetchQAIssues(reg.Org, repo.Name, issuesLimit)
|
issues, err := fetchQAIssues(reg.Org, repo.Name, issuesLimit)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fetchErrors = append(fetchErrors, IssueFetchError{
|
|
||||||
Repo: repo.Name,
|
|
||||||
Error: strings.TrimSpace(err.Error()),
|
|
||||||
})
|
|
||||||
if !issuesJSON {
|
|
||||||
cli.Print("%s\n", warningStyle.Render(i18n.T(
|
|
||||||
"cmd.qa.issues.fetch_error",
|
|
||||||
map[string]any{"Repo": repo.Name, "Error": strings.TrimSpace(err.Error())},
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
continue // Skip repos with errors
|
continue // Skip repos with errors
|
||||||
}
|
}
|
||||||
allIssues = append(allIssues, issues...)
|
allIssues = append(allIssues, issues...)
|
||||||
successfulFetches++
|
|
||||||
}
|
}
|
||||||
totalIssues := len(allIssues)
|
cli.Print("\033[2K\r") // Clear progress
|
||||||
|
|
||||||
if len(allIssues) == 0 {
|
if len(allIssues) == 0 {
|
||||||
emptyCategorised := map[string][]Issue{
|
|
||||||
"needs_response": {},
|
|
||||||
"ready": {},
|
|
||||||
"blocked": {},
|
|
||||||
"triage": {},
|
|
||||||
}
|
|
||||||
if issuesJSON {
|
|
||||||
if err := printCategorisedIssuesJSON(0, emptyCategorised, fetchErrors); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if successfulFetches == 0 && len(fetchErrors) > 0 {
|
|
||||||
return cli.Err("failed to fetch issues from any repository")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if successfulFetches == 0 && len(fetchErrors) > 0 {
|
|
||||||
return cli.Err("failed to fetch issues from any repository")
|
|
||||||
}
|
|
||||||
cli.Text(i18n.T("cmd.qa.issues.no_issues"))
|
cli.Text(i18n.T("cmd.qa.issues.no_issues"))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -210,10 +150,6 @@ func runQAIssues() error {
|
||||||
categorised = filterCategory(categorised, "blocked")
|
categorised = filterCategory(categorised, "blocked")
|
||||||
}
|
}
|
||||||
|
|
||||||
if issuesJSON {
|
|
||||||
return printCategorisedIssuesJSON(totalIssues, categorised, fetchErrors)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print categorised issues
|
// Print categorised issues
|
||||||
printCategorisedIssues(categorised)
|
printCategorisedIssues(categorised)
|
||||||
|
|
||||||
|
|
@ -234,9 +170,6 @@ func fetchQAIssues(org, repoName string, limit int) ([]Issue, error) {
|
||||||
cmd := exec.Command("gh", args...)
|
cmd := exec.Command("gh", args...)
|
||||||
output, err := cmd.Output()
|
output, err := cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if exitErr, ok := err.(*exec.ExitError); ok {
|
|
||||||
return nil, log.E("qa.fetchQAIssues", strings.TrimSpace(string(exitErr.Stderr)), nil)
|
|
||||||
}
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -272,16 +205,7 @@ func categoriseIssues(issues []Issue) map[string][]Issue {
|
||||||
// Sort each category by priority
|
// Sort each category by priority
|
||||||
for cat := range result {
|
for cat := range result {
|
||||||
slices.SortFunc(result[cat], func(a, b Issue) int {
|
slices.SortFunc(result[cat], func(a, b Issue) int {
|
||||||
if priority := cmp.Compare(a.Priority, b.Priority); priority != 0 {
|
return cmp.Compare(a.Priority, b.Priority)
|
||||||
return priority
|
|
||||||
}
|
|
||||||
if byDate := cmp.Compare(b.UpdatedAt.Unix(), a.UpdatedAt.Unix()); byDate != 0 {
|
|
||||||
return byDate
|
|
||||||
}
|
|
||||||
if repo := cmp.Compare(a.RepoName, b.RepoName); repo != 0 {
|
|
||||||
return repo
|
|
||||||
}
|
|
||||||
return cmp.Compare(a.Number, b.Number)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -326,26 +250,24 @@ func categoriseIssue(issue *Issue, currentUser string) {
|
||||||
|
|
||||||
// Default: ready to work
|
// Default: ready to work
|
||||||
issue.Category = "ready"
|
issue.Category = "ready"
|
||||||
issue.Priority = calculatePriority(labels)
|
issue.Priority = calculatePriority(issue, labels)
|
||||||
issue.ActionHint = ""
|
issue.ActionHint = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// calculatePriority chooses the most urgent matching label so label order
|
func calculatePriority(issue *Issue, labels []string) int {
|
||||||
// does not change how issues are ranked.
|
|
||||||
func calculatePriority(labels []string) int {
|
|
||||||
priority := 50
|
priority := 50
|
||||||
|
|
||||||
// Priority labels
|
// Priority labels
|
||||||
for _, l := range labels {
|
for _, l := range labels {
|
||||||
switch {
|
switch {
|
||||||
case strings.Contains(l, "critical") || strings.Contains(l, "urgent"):
|
case strings.Contains(l, "critical") || strings.Contains(l, "urgent"):
|
||||||
priority = min(priority, 1)
|
priority = 1
|
||||||
case strings.Contains(l, "high"):
|
case strings.Contains(l, "high"):
|
||||||
priority = min(priority, 10)
|
priority = 10
|
||||||
case strings.Contains(l, "medium"):
|
case strings.Contains(l, "medium"):
|
||||||
priority = min(priority, 30)
|
priority = 30
|
||||||
case strings.Contains(l, "low"):
|
case strings.Contains(l, "low"):
|
||||||
priority = min(priority, 70)
|
priority = 70
|
||||||
case l == "good-first-issue" || l == "good first issue":
|
case l == "good-first-issue" || l == "good first issue":
|
||||||
priority = min(priority, 15) // Boost good first issues
|
priority = min(priority, 15) // Boost good first issues
|
||||||
case l == "help-wanted" || l == "help wanted":
|
case l == "help-wanted" || l == "help wanted":
|
||||||
|
|
@ -441,39 +363,6 @@ func printCategorisedIssues(categorised map[string][]Issue) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func printCategorisedIssuesJSON(totalIssues int, categorised map[string][]Issue, fetchErrors []IssueFetchError) error {
|
|
||||||
categories := []string{"needs_response", "ready", "blocked", "triage"}
|
|
||||||
filteredIssues := 0
|
|
||||||
categoryOutput := make([]IssueCategoryOutput, 0, len(categories))
|
|
||||||
|
|
||||||
for _, category := range categories {
|
|
||||||
issues := categorised[category]
|
|
||||||
filteredIssues += len(issues)
|
|
||||||
categoryOutput = append(categoryOutput, IssueCategoryOutput{
|
|
||||||
Category: category,
|
|
||||||
Count: len(issues),
|
|
||||||
Issues: issues,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
output := IssuesOutput{
|
|
||||||
TotalIssues: totalIssues,
|
|
||||||
FilteredIssues: filteredIssues,
|
|
||||||
ShowingMine: issuesMine,
|
|
||||||
ShowingTriage: issuesTriage,
|
|
||||||
ShowingBlocked: issuesBlocked,
|
|
||||||
Categories: categoryOutput,
|
|
||||||
FetchErrors: fetchErrors,
|
|
||||||
}
|
|
||||||
|
|
||||||
data, err := json.MarshalIndent(output, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
cli.Print("%s\n", string(data))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func printTriagedIssue(issue Issue) {
|
func printTriagedIssue(issue Issue) {
|
||||||
// #42 [core-bio] Fix avatar upload
|
// #42 [core-bio] Fix avatar upload
|
||||||
num := cli.TitleStyle.Render(cli.Sprintf("#%d", issue.Number))
|
num := cli.TitleStyle.Render(cli.Sprintf("#%d", issue.Number))
|
||||||
|
|
@ -492,7 +381,6 @@ func printTriagedIssue(issue Issue) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(importantLabels) > 0 {
|
if len(importantLabels) > 0 {
|
||||||
slices.Sort(importantLabels)
|
|
||||||
cli.Print(" %s", warningStyle.Render("["+strings.Join(importantLabels, ", ")+"]"))
|
cli.Print(" %s", warningStyle.Render("["+strings.Join(importantLabels, ", ")+"]"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,316 +0,0 @@
|
||||||
package qa
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRunQAIssuesJSONOutput_UsesMachineFriendlyKeys(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
commentTime := time.Now().UTC().Add(-1 * time.Hour).Format(time.RFC3339)
|
|
||||||
updatedAt := time.Now().UTC().Format(time.RFC3339)
|
|
||||||
writeTestFile(t, filepath.Join(dir, "repos.yaml"), `version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
`)
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), fmt.Sprintf(`#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"api user"*)
|
|
||||||
printf '%%s\n' 'alice'
|
|
||||||
;;
|
|
||||||
*"issue list --repo forge/alpha"*)
|
|
||||||
cat <<JSON
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"number": 7,
|
|
||||||
"title": "Clarify agent output",
|
|
||||||
"state": "OPEN",
|
|
||||||
"body": "Explain behaviour",
|
|
||||||
"createdAt": "2026-03-30T00:00:00Z",
|
|
||||||
"updatedAt": %q,
|
|
||||||
"author": {"login": "bob"},
|
|
||||||
"assignees": {"nodes": []},
|
|
||||||
"labels": {"nodes": [{"name": "agent:ready"}]},
|
|
||||||
"comments": {
|
|
||||||
"totalCount": 1,
|
|
||||||
"nodes": [
|
|
||||||
{
|
|
||||||
"author": {"login": "carol"},
|
|
||||||
"createdAt": %q
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"url": "https://example.com/issues/7"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`, updatedAt, commentTime))
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetIssuesFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
issuesRegistry = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addIssuesCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "issues")
|
|
||||||
require.NoError(t, command.Flags().Set("registry", filepath.Join(dir, "repos.yaml")))
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
var payload IssuesOutput
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
assert.Equal(t, 1, payload.TotalIssues)
|
|
||||||
assert.Equal(t, 1, payload.FilteredIssues)
|
|
||||||
require.Len(t, payload.Categories, 4)
|
|
||||||
require.Len(t, payload.Categories[0].Issues, 1)
|
|
||||||
|
|
||||||
issue := payload.Categories[0].Issues[0]
|
|
||||||
assert.Equal(t, "needs_response", payload.Categories[0].Category)
|
|
||||||
assert.Equal(t, "alpha", issue.RepoName)
|
|
||||||
assert.Equal(t, 10, issue.Priority)
|
|
||||||
assert.Equal(t, "needs_response", issue.Category)
|
|
||||||
assert.Equal(t, "@carol cmd.qa.issues.hint.needs_response", issue.ActionHint)
|
|
||||||
assert.Contains(t, output, `"repo_name"`)
|
|
||||||
assert.Contains(t, output, `"action_hint"`)
|
|
||||||
assert.NotContains(t, output, `"RepoName"`)
|
|
||||||
assert.NotContains(t, output, `"ActionHint"`)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunQAIssuesJSONOutput_SortsFetchErrorsByRepoName(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "repos.yaml"), `version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
beta:
|
|
||||||
type: module
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
`)
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"issue list --repo forge/alpha"*)
|
|
||||||
printf '%s\n' 'alpha failed' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*"issue list --repo forge/beta"*)
|
|
||||||
printf '%s\n' 'beta failed' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetIssuesFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
issuesRegistry = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addIssuesCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "issues")
|
|
||||||
require.NoError(t, command.Flags().Set("registry", filepath.Join(dir, "repos.yaml")))
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
var runErr error
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
runErr = command.RunE(command, nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
require.Error(t, runErr)
|
|
||||||
var payload IssuesOutput
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
require.Len(t, payload.FetchErrors, 2)
|
|
||||||
assert.Equal(t, "alpha", payload.FetchErrors[0].Repo)
|
|
||||||
assert.Equal(t, "beta", payload.FetchErrors[1].Repo)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunQAIssuesJSONOutput_ReturnsErrorWhenAllFetchesFail(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "repos.yaml"), `version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
beta:
|
|
||||||
type: module
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
`)
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"issue list --repo forge/alpha"*)
|
|
||||||
printf '%s\n' 'alpha failed' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*"issue list --repo forge/beta"*)
|
|
||||||
printf '%s\n' 'beta failed' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetIssuesFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
issuesRegistry = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addIssuesCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "issues")
|
|
||||||
require.NoError(t, command.Flags().Set("registry", filepath.Join(dir, "repos.yaml")))
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
var runErr error
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
runErr = command.RunE(command, nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
require.Error(t, runErr)
|
|
||||||
|
|
||||||
var payload IssuesOutput
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
require.Len(t, payload.Categories, 4)
|
|
||||||
assert.Empty(t, payload.Categories[0].Issues)
|
|
||||||
require.Len(t, payload.FetchErrors, 2)
|
|
||||||
assert.Equal(t, "alpha", payload.FetchErrors[0].Repo)
|
|
||||||
assert.Equal(t, "beta", payload.FetchErrors[1].Repo)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunQAIssuesHumanOutput_ReturnsErrorWhenAllFetchesFail(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "repos.yaml"), `version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
beta:
|
|
||||||
type: module
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
`)
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"issue list --repo forge/alpha"*)
|
|
||||||
printf '%s\n' 'alpha failed' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*"issue list --repo forge/beta"*)
|
|
||||||
printf '%s\n' 'beta failed' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetIssuesFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
issuesRegistry = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addIssuesCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "issues")
|
|
||||||
require.NoError(t, command.Flags().Set("registry", filepath.Join(dir, "repos.yaml")))
|
|
||||||
|
|
||||||
var runErr error
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
runErr = command.RunE(command, nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
require.Error(t, runErr)
|
|
||||||
assert.NotContains(t, output, "cmd.qa.issues.no_issues")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCalculatePriority_UsesMostUrgentLabelRegardlessOfOrder(t *testing.T) {
|
|
||||||
labelsA := []string{"low", "critical"}
|
|
||||||
labelsB := []string{"critical", "low"}
|
|
||||||
|
|
||||||
assert.Equal(t, 1, calculatePriority(labelsA))
|
|
||||||
assert.Equal(t, 1, calculatePriority(labelsB))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPrintTriagedIssue_SortsImportantLabels(t *testing.T) {
|
|
||||||
var issue Issue
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(`{
|
|
||||||
"number": 7,
|
|
||||||
"title": "Stabilise output",
|
|
||||||
"updatedAt": "2026-03-30T00:00:00Z",
|
|
||||||
"labels": {
|
|
||||||
"nodes": [
|
|
||||||
{"name": "priority:urgent"},
|
|
||||||
{"name": "agent:ready"}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}`), &issue))
|
|
||||||
issue.RepoName = "alpha"
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
printTriagedIssue(issue)
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Contains(t, output, "[agent:ready, priority:urgent]")
|
|
||||||
assert.NotContains(t, output, "[priority:urgent, agent:ready]")
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetIssuesFlags(t *testing.T) {
|
|
||||||
t.Helper()
|
|
||||||
oldMine := issuesMine
|
|
||||||
oldTriage := issuesTriage
|
|
||||||
oldBlocked := issuesBlocked
|
|
||||||
oldRegistry := issuesRegistry
|
|
||||||
oldLimit := issuesLimit
|
|
||||||
oldJSON := issuesJSON
|
|
||||||
|
|
||||||
issuesMine = false
|
|
||||||
issuesTriage = false
|
|
||||||
issuesBlocked = false
|
|
||||||
issuesRegistry = ""
|
|
||||||
issuesLimit = 50
|
|
||||||
issuesJSON = false
|
|
||||||
|
|
||||||
t.Cleanup(func() {
|
|
||||||
issuesMine = oldMine
|
|
||||||
issuesTriage = oldTriage
|
|
||||||
issuesBlocked = oldBlocked
|
|
||||||
issuesRegistry = oldRegistry
|
|
||||||
issuesLimit = oldLimit
|
|
||||||
issuesJSON = oldJSON
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -14,10 +14,8 @@ package qa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
|
|
@ -67,10 +65,8 @@ func addPHPFmtCommand(parent *cli.Command) {
|
||||||
return cli.Err("not a PHP project (no composer.json found)")
|
return cli.Err("not a PHP project (no composer.json found)")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isMachineReadableOutput(phpFmtJSON) {
|
cli.Print("%s %s\n", headerStyle.Render("PHP Format"), dimStyle.Render("(Pint)"))
|
||||||
cli.Print("%s %s\n", headerStyle.Render("PHP Format"), dimStyle.Render("(Pint)"))
|
cli.Blank()
|
||||||
cli.Blank()
|
|
||||||
}
|
|
||||||
|
|
||||||
return php.Format(context.Background(), php.FormatOptions{
|
return php.Format(context.Background(), php.FormatOptions{
|
||||||
Dir: cwd,
|
Dir: cwd,
|
||||||
|
|
@ -115,10 +111,8 @@ func addPHPStanCommand(parent *cli.Command) {
|
||||||
return cli.Err("no static analyser found (install PHPStan: composer require phpstan/phpstan --dev)")
|
return cli.Err("no static analyser found (install PHPStan: composer require phpstan/phpstan --dev)")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isMachineReadableOutput(phpStanJSON, phpStanSARIF) {
|
cli.Print("%s %s\n", headerStyle.Render("PHP Static Analysis"), dimStyle.Render(fmt.Sprintf("(%s)", analyser)))
|
||||||
cli.Print("%s %s\n", headerStyle.Render("PHP Static Analysis"), dimStyle.Render(fmt.Sprintf("(%s)", analyser)))
|
cli.Blank()
|
||||||
cli.Blank()
|
|
||||||
}
|
|
||||||
|
|
||||||
err = php.Analyse(context.Background(), php.AnalyseOptions{
|
err = php.Analyse(context.Background(), php.AnalyseOptions{
|
||||||
Dir: cwd,
|
Dir: cwd,
|
||||||
|
|
@ -131,10 +125,8 @@ func addPHPStanCommand(parent *cli.Command) {
|
||||||
return cli.Err("static analysis found issues")
|
return cli.Err("static analysis found issues")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isMachineReadableOutput(phpStanJSON, phpStanSARIF) {
|
cli.Blank()
|
||||||
cli.Blank()
|
cli.Print("%s\n", successStyle.Render("Static analysis passed"))
|
||||||
cli.Print("%s\n", successStyle.Render("Static analysis passed"))
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -176,10 +168,8 @@ func addPHPPsalmCommand(parent *cli.Command) {
|
||||||
return cli.Err("Psalm not found (install: composer require vimeo/psalm --dev)")
|
return cli.Err("Psalm not found (install: composer require vimeo/psalm --dev)")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isMachineReadableOutput(phpPsalmJSON, phpPsalmSARIF) {
|
cli.Print("%s\n", headerStyle.Render("PHP Psalm Analysis"))
|
||||||
cli.Print("%s\n", headerStyle.Render("PHP Psalm Analysis"))
|
cli.Blank()
|
||||||
cli.Blank()
|
|
||||||
}
|
|
||||||
|
|
||||||
err = php.RunPsalm(context.Background(), php.PsalmOptions{
|
err = php.RunPsalm(context.Background(), php.PsalmOptions{
|
||||||
Dir: cwd,
|
Dir: cwd,
|
||||||
|
|
@ -194,10 +184,8 @@ func addPHPPsalmCommand(parent *cli.Command) {
|
||||||
return cli.Err("Psalm found issues")
|
return cli.Err("Psalm found issues")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isMachineReadableOutput(phpPsalmJSON, phpPsalmSARIF) {
|
cli.Blank()
|
||||||
cli.Blank()
|
cli.Print("%s\n", successStyle.Render("Psalm analysis passed"))
|
||||||
cli.Print("%s\n", successStyle.Render("Psalm analysis passed"))
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -232,10 +220,8 @@ func addPHPAuditCommand(parent *cli.Command) {
|
||||||
return cli.Err("not a PHP project (no composer.json found)")
|
return cli.Err("not a PHP project (no composer.json found)")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isMachineReadableOutput(phpAuditJSON) {
|
cli.Print("%s\n", headerStyle.Render("Dependency Audit"))
|
||||||
cli.Print("%s\n", headerStyle.Render("Dependency Audit"))
|
cli.Blank()
|
||||||
cli.Blank()
|
|
||||||
}
|
|
||||||
|
|
||||||
results, err := php.RunAudit(context.Background(), php.AuditOptions{
|
results, err := php.RunAudit(context.Background(), php.AuditOptions{
|
||||||
Dir: cwd,
|
Dir: cwd,
|
||||||
|
|
@ -246,20 +232,6 @@ func addPHPAuditCommand(parent *cli.Command) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if phpAuditJSON {
|
|
||||||
payload := mapAuditResultsForJSON(results)
|
|
||||||
data, err := json.MarshalIndent(payload, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
cli.Print("%s\n", string(data))
|
|
||||||
|
|
||||||
if payload.HasVulnerabilities {
|
|
||||||
return cli.Err("vulnerabilities found in dependencies")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
hasVulns := false
|
hasVulns := false
|
||||||
for _, result := range results {
|
for _, result := range results {
|
||||||
if result.Error != nil {
|
if result.Error != nil {
|
||||||
|
|
@ -321,10 +293,8 @@ func addPHPSecurityCommand(parent *cli.Command) {
|
||||||
return cli.Err("not a PHP project (no composer.json found)")
|
return cli.Err("not a PHP project (no composer.json found)")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isMachineReadableOutput(phpSecurityJSON, phpSecuritySARIF) {
|
cli.Print("%s\n", headerStyle.Render("Security Checks"))
|
||||||
cli.Print("%s\n", headerStyle.Render("Security Checks"))
|
cli.Blank()
|
||||||
cli.Blank()
|
|
||||||
}
|
|
||||||
|
|
||||||
result, err := php.RunSecurityChecks(context.Background(), php.SecurityOptions{
|
result, err := php.RunSecurityChecks(context.Background(), php.SecurityOptions{
|
||||||
Dir: cwd,
|
Dir: cwd,
|
||||||
|
|
@ -337,36 +307,6 @@ func addPHPSecurityCommand(parent *cli.Command) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
result.Checks = sortSecurityChecks(result.Checks)
|
|
||||||
|
|
||||||
if phpSecuritySARIF {
|
|
||||||
data, err := json.MarshalIndent(mapSecurityResultForSARIF(result), "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
cli.Print("%s\n", string(data))
|
|
||||||
|
|
||||||
summary := result.Summary
|
|
||||||
if summary.Critical > 0 || summary.High > 0 {
|
|
||||||
return cli.Err("security checks failed")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if phpSecurityJSON {
|
|
||||||
data, err := json.MarshalIndent(result, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
cli.Print("%s\n", string(data))
|
|
||||||
|
|
||||||
summary := result.Summary
|
|
||||||
if summary.Critical > 0 || summary.High > 0 {
|
|
||||||
return cli.Err("security checks failed")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print each check result
|
// Print each check result
|
||||||
for _, check := range result.Checks {
|
for _, check := range result.Checks {
|
||||||
if check.Passed {
|
if check.Passed {
|
||||||
|
|
@ -423,74 +363,6 @@ func addPHPSecurityCommand(parent *cli.Command) {
|
||||||
parent.AddCommand(cmd)
|
parent.AddCommand(cmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
type auditJSONOutput struct {
|
|
||||||
Results []auditResultJSON `json:"results"`
|
|
||||||
HasVulnerabilities bool `json:"has_vulnerabilities"`
|
|
||||||
Vulnerabilities int `json:"vulnerabilities"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type auditResultJSON struct {
|
|
||||||
Tool string `json:"tool"`
|
|
||||||
Vulnerabilities int `json:"vulnerabilities"`
|
|
||||||
Advisories []auditAdvisoryJSON `json:"advisories"`
|
|
||||||
Error string `json:"error,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type auditAdvisoryJSON struct {
|
|
||||||
Package string `json:"package"`
|
|
||||||
Severity string `json:"severity,omitempty"`
|
|
||||||
Title string `json:"title,omitempty"`
|
|
||||||
URL string `json:"url,omitempty"`
|
|
||||||
Identifiers []string `json:"identifiers,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func mapAuditResultsForJSON(results []php.AuditResult) auditJSONOutput {
|
|
||||||
output := auditJSONOutput{
|
|
||||||
Results: make([]auditResultJSON, 0, len(results)),
|
|
||||||
}
|
|
||||||
sort.Slice(results, func(i, j int) bool {
|
|
||||||
return results[i].Tool < results[j].Tool
|
|
||||||
})
|
|
||||||
|
|
||||||
for _, result := range results {
|
|
||||||
entry := auditResultJSON{
|
|
||||||
Tool: result.Tool,
|
|
||||||
Vulnerabilities: result.Vulnerabilities,
|
|
||||||
}
|
|
||||||
if result.Error != nil {
|
|
||||||
entry.Error = result.Error.Error()
|
|
||||||
}
|
|
||||||
entry.Advisories = make([]auditAdvisoryJSON, 0, len(result.Advisories))
|
|
||||||
for _, advisory := range result.Advisories {
|
|
||||||
entry.Advisories = append(entry.Advisories, auditAdvisoryJSON{
|
|
||||||
Package: advisory.Package,
|
|
||||||
Severity: advisory.Severity,
|
|
||||||
Title: advisory.Title,
|
|
||||||
URL: advisory.URL,
|
|
||||||
Identifiers: append([]string(nil), advisory.Identifiers...),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
sort.Slice(entry.Advisories, func(i, j int) bool {
|
|
||||||
if entry.Advisories[i].Package == entry.Advisories[j].Package {
|
|
||||||
return entry.Advisories[i].Title < entry.Advisories[j].Title
|
|
||||||
}
|
|
||||||
return entry.Advisories[i].Package < entry.Advisories[j].Package
|
|
||||||
})
|
|
||||||
output.Results = append(output.Results, entry)
|
|
||||||
output.Vulnerabilities += entry.Vulnerabilities
|
|
||||||
}
|
|
||||||
|
|
||||||
output.HasVulnerabilities = output.Vulnerabilities > 0
|
|
||||||
return output
|
|
||||||
}
|
|
||||||
|
|
||||||
func sortSecurityChecks(checks []php.SecurityCheck) []php.SecurityCheck {
|
|
||||||
sort.Slice(checks, func(i, j int) bool {
|
|
||||||
return checks[i].ID < checks[j].ID
|
|
||||||
})
|
|
||||||
return checks
|
|
||||||
}
|
|
||||||
|
|
||||||
// PHP rector command flags.
|
// PHP rector command flags.
|
||||||
var (
|
var (
|
||||||
phpRectorFix bool
|
phpRectorFix bool
|
||||||
|
|
@ -627,10 +499,8 @@ func addPHPTestCommand(parent *cli.Command) {
|
||||||
}
|
}
|
||||||
|
|
||||||
runner := php.DetectTestRunner(cwd)
|
runner := php.DetectTestRunner(cwd)
|
||||||
if !isMachineReadableOutput(phpTestJUnit) {
|
cli.Print("%s %s\n", headerStyle.Render("PHP Tests"), dimStyle.Render(fmt.Sprintf("(%s)", runner)))
|
||||||
cli.Print("%s %s\n", headerStyle.Render("PHP Tests"), dimStyle.Render(fmt.Sprintf("(%s)", runner)))
|
cli.Blank()
|
||||||
cli.Blank()
|
|
||||||
}
|
|
||||||
|
|
||||||
var groups []string
|
var groups []string
|
||||||
if phpTestGroup != "" {
|
if phpTestGroup != "" {
|
||||||
|
|
@ -649,10 +519,8 @@ func addPHPTestCommand(parent *cli.Command) {
|
||||||
return cli.Err("tests failed")
|
return cli.Err("tests failed")
|
||||||
}
|
}
|
||||||
|
|
||||||
if !isMachineReadableOutput(phpTestJUnit) {
|
cli.Blank()
|
||||||
cli.Blank()
|
cli.Print("%s\n", successStyle.Render("All tests passed"))
|
||||||
cli.Print("%s\n", successStyle.Render("All tests passed"))
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -681,124 +549,3 @@ func getSeverityStyle(severity string) *cli.AnsiStyle {
|
||||||
return dimStyle
|
return dimStyle
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func isMachineReadableOutput(flags ...bool) bool {
|
|
||||||
for _, flag := range flags {
|
|
||||||
if flag {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
type sarifLog struct {
|
|
||||||
Version string `json:"version"`
|
|
||||||
Schema string `json:"$schema"`
|
|
||||||
Runs []sarifRun `json:"runs"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type sarifRun struct {
|
|
||||||
Tool sarifTool `json:"tool"`
|
|
||||||
Results []sarifResult `json:"results"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type sarifTool struct {
|
|
||||||
Driver sarifDriver `json:"driver"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type sarifDriver struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Rules []sarifRule `json:"rules"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type sarifRule struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
ShortDescription sarifMessage `json:"shortDescription"`
|
|
||||||
FullDescription sarifMessage `json:"fullDescription"`
|
|
||||||
Help sarifMessage `json:"help,omitempty"`
|
|
||||||
Properties any `json:"properties,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type sarifResult struct {
|
|
||||||
RuleID string `json:"ruleId"`
|
|
||||||
Level string `json:"level"`
|
|
||||||
Message sarifMessage `json:"message"`
|
|
||||||
Properties any `json:"properties,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type sarifMessage struct {
|
|
||||||
Text string `json:"text"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func mapSecurityResultForSARIF(result *php.SecurityResult) sarifLog {
|
|
||||||
rules := make([]sarifRule, 0, len(result.Checks))
|
|
||||||
sarifResults := make([]sarifResult, 0, len(result.Checks))
|
|
||||||
|
|
||||||
for _, check := range result.Checks {
|
|
||||||
rule := sarifRule{
|
|
||||||
ID: check.ID,
|
|
||||||
Name: check.Name,
|
|
||||||
ShortDescription: sarifMessage{Text: check.Name},
|
|
||||||
FullDescription: sarifMessage{Text: check.Description},
|
|
||||||
}
|
|
||||||
if check.Fix != "" {
|
|
||||||
rule.Help = sarifMessage{Text: check.Fix}
|
|
||||||
}
|
|
||||||
if check.CWE != "" {
|
|
||||||
rule.Properties = map[string]any{"cwe": check.CWE}
|
|
||||||
}
|
|
||||||
rules = append(rules, rule)
|
|
||||||
|
|
||||||
if check.Passed {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
message := check.Message
|
|
||||||
if message == "" {
|
|
||||||
message = check.Description
|
|
||||||
}
|
|
||||||
|
|
||||||
properties := map[string]any{
|
|
||||||
"severity": check.Severity,
|
|
||||||
}
|
|
||||||
if check.CWE != "" {
|
|
||||||
properties["cwe"] = check.CWE
|
|
||||||
}
|
|
||||||
if check.Fix != "" {
|
|
||||||
properties["fix"] = check.Fix
|
|
||||||
}
|
|
||||||
|
|
||||||
sarifResults = append(sarifResults, sarifResult{
|
|
||||||
RuleID: check.ID,
|
|
||||||
Level: sarifLevel(check.Severity),
|
|
||||||
Message: sarifMessage{Text: message},
|
|
||||||
Properties: properties,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return sarifLog{
|
|
||||||
Version: "2.1.0",
|
|
||||||
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
|
|
||||||
Runs: []sarifRun{{
|
|
||||||
Tool: sarifTool{
|
|
||||||
Driver: sarifDriver{
|
|
||||||
Name: "core qa security",
|
|
||||||
Rules: rules,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Results: sarifResults,
|
|
||||||
}},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func sarifLevel(severity string) string {
|
|
||||||
switch strings.ToLower(severity) {
|
|
||||||
case "critical", "high":
|
|
||||||
return "error"
|
|
||||||
case "medium":
|
|
||||||
return "warning"
|
|
||||||
default:
|
|
||||||
return "note"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,432 +0,0 @@
|
||||||
package qa
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestPHPStanJSONOutput_DoesNotAppendSuccessBanner(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "vendor", "bin", "phpstan"), "#!/bin/sh\nprintf '%s\\n' '{\"tool\":\"phpstan\",\"status\":\"ok\"}'\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
resetPHPStanFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPStanCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "stan")
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Equal(t, "{\"tool\":\"phpstan\",\"status\":\"ok\"}\n", output)
|
|
||||||
assert.NotContains(t, output, "Static analysis passed")
|
|
||||||
assert.NotContains(t, output, "PHP Static Analysis")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPHPPsalmJSONOutput_DoesNotAppendSuccessBanner(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "vendor", "bin", "psalm"), "#!/bin/sh\nprintf '%s\\n' '{\"tool\":\"psalm\",\"status\":\"ok\"}'\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
resetPHPPsalmFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPPsalmCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "psalm")
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Equal(t, "{\"tool\":\"psalm\",\"status\":\"ok\"}\n", output)
|
|
||||||
assert.NotContains(t, output, "Psalm analysis passed")
|
|
||||||
assert.NotContains(t, output, "PHP Psalm Analysis")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPHPStanSARIFOutput_DoesNotAppendSuccessBanner(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "vendor", "bin", "phpstan"), "#!/bin/sh\nprintf '%s\\n' '{\"version\":\"2.1.0\",\"runs\":[]}'\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
resetPHPStanFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPStanCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "stan")
|
|
||||||
require.NoError(t, command.Flags().Set("sarif", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Equal(t, "{\"version\":\"2.1.0\",\"runs\":[]}\n", output)
|
|
||||||
assert.NotContains(t, output, "Static analysis passed")
|
|
||||||
assert.NotContains(t, output, "PHP Static Analysis")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPHPPsalmSARIFOutput_DoesNotAppendSuccessBanner(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "vendor", "bin", "psalm"), "#!/bin/sh\nprintf '%s\\n' '{\"version\":\"2.1.0\",\"runs\":[]}'\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
resetPHPPsalmFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPPsalmCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "psalm")
|
|
||||||
require.NoError(t, command.Flags().Set("sarif", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Equal(t, "{\"version\":\"2.1.0\",\"runs\":[]}\n", output)
|
|
||||||
assert.NotContains(t, output, "Psalm analysis passed")
|
|
||||||
assert.NotContains(t, output, "PHP Psalm Analysis")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPHPSecurityJSONOutput_UsesMachineFriendlyKeys(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeTestFile(t, filepath.Join(dir, ".env"), "APP_DEBUG=true\nAPP_KEY=short\nAPP_URL=http://example.com\n")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "bin", "composer"), "#!/bin/sh\nprintf '%s\\n' '{\"advisories\":{}}'\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, filepath.Join(dir, "bin"))
|
|
||||||
resetPHPSecurityFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPSecurityCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "security")
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.Error(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Contains(t, output, "\"checks\"")
|
|
||||||
assert.Contains(t, output, "\"summary\"")
|
|
||||||
assert.Contains(t, output, "\"app_key_set\"")
|
|
||||||
assert.NotContains(t, output, "\"Checks\"")
|
|
||||||
assert.NotContains(t, output, "Security Checks")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPHPSecuritySARIFOutput_IsStructuredAndChromeFree(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeTestFile(t, filepath.Join(dir, ".env"), "APP_DEBUG=true\nAPP_KEY=short\nAPP_URL=http://example.com\n")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "bin", "composer"), "#!/bin/sh\nprintf '%s\\n' '{\"advisories\":{}}'\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, filepath.Join(dir, "bin"))
|
|
||||||
resetPHPSecurityFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPSecurityCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "security")
|
|
||||||
require.NoError(t, command.Flags().Set("sarif", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.Error(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
var payload map[string]any
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
assert.Equal(t, "2.1.0", payload["version"])
|
|
||||||
assert.Contains(t, output, "\"ruleId\": \"app_key_set\"")
|
|
||||||
assert.NotContains(t, output, "Security Checks")
|
|
||||||
assert.NotContains(t, output, "Summary:")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPHPSecurityJSONOutput_RespectsSeverityFilter(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeTestFile(t, filepath.Join(dir, ".env"), "APP_DEBUG=true\nAPP_KEY=short\nAPP_URL=http://example.com\n")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "bin", "composer"), "#!/bin/sh\nprintf '%s\\n' '{\"advisories\":{}}'\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, filepath.Join(dir, "bin"))
|
|
||||||
resetPHPSecurityFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPSecurityCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "security")
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
require.NoError(t, command.Flags().Set("severity", "critical"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.Error(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
var payload struct {
|
|
||||||
Checks []struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
Severity string `json:"severity"`
|
|
||||||
} `json:"checks"`
|
|
||||||
Summary struct {
|
|
||||||
Total int `json:"total"`
|
|
||||||
Passed int `json:"passed"`
|
|
||||||
Critical int `json:"critical"`
|
|
||||||
High int `json:"high"`
|
|
||||||
} `json:"summary"`
|
|
||||||
}
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
assert.Equal(t, 3, payload.Summary.Total)
|
|
||||||
assert.Equal(t, 1, payload.Summary.Passed)
|
|
||||||
assert.Equal(t, 2, payload.Summary.Critical)
|
|
||||||
assert.Zero(t, payload.Summary.High)
|
|
||||||
require.Len(t, payload.Checks, 3)
|
|
||||||
assert.NotContains(t, output, "https_enforced")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPHPAuditJSONOutput_UsesLowerCaseAdvisoryKeys(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "composer"), `#!/bin/sh
|
|
||||||
cat <<'JSON'
|
|
||||||
{
|
|
||||||
"advisories": {
|
|
||||||
"vendor/package-a": [
|
|
||||||
{
|
|
||||||
"title": "Remote Code Execution",
|
|
||||||
"link": "https://example.com/advisory/1",
|
|
||||||
"cve": "CVE-2025-1234",
|
|
||||||
"affectedVersions": ">=1.0,<1.5"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
JSON
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetPHPAuditFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPAuditCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "audit")
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
var runErr error
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
runErr = command.RunE(command, nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
require.Error(t, runErr)
|
|
||||||
|
|
||||||
var payload struct {
|
|
||||||
Results []struct {
|
|
||||||
Tool string `json:"tool"`
|
|
||||||
Advisories []struct {
|
|
||||||
Package string `json:"package"`
|
|
||||||
} `json:"advisories"`
|
|
||||||
} `json:"results"`
|
|
||||||
HasVulnerabilities bool `json:"has_vulnerabilities"`
|
|
||||||
Vulnerabilities int `json:"vulnerabilities"`
|
|
||||||
}
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
require.Len(t, payload.Results, 1)
|
|
||||||
assert.Equal(t, "composer", payload.Results[0].Tool)
|
|
||||||
require.Len(t, payload.Results[0].Advisories, 1)
|
|
||||||
assert.Equal(t, "vendor/package-a", payload.Results[0].Advisories[0].Package)
|
|
||||||
assert.True(t, payload.HasVulnerabilities)
|
|
||||||
assert.Equal(t, 1, payload.Vulnerabilities)
|
|
||||||
assert.NotContains(t, output, "\"Package\"")
|
|
||||||
assert.NotContains(t, output, "Dependency Audit")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPHPTestJUnitOutput_PrintsOnlyXML(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeTestFile(t, filepath.Join(dir, "composer.json"), "{}")
|
|
||||||
writeExecutable(t, filepath.Join(dir, "vendor", "bin", "phpunit"), "#!/bin/sh\njunit=''\nwhile [ $# -gt 0 ]; do\n if [ \"$1\" = \"--log-junit\" ]; then\n shift\n junit=\"$1\"\n fi\n shift\ndone\nprintf '%s\\n' 'human output should be suppressed'\nprintf '%s' '<testsuite tests=\"1\"></testsuite>' > \"$junit\"\n")
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
resetPHPTestFlags(t)
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addPHPTestCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "test")
|
|
||||||
require.NoError(t, command.Flags().Set("junit", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Equal(t, "<testsuite tests=\"1\"></testsuite>\n", output)
|
|
||||||
assert.NotContains(t, output, "human output should be suppressed")
|
|
||||||
assert.NotContains(t, output, "PHP Tests")
|
|
||||||
assert.NotContains(t, output, "All tests passed")
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeTestFile(t *testing.T, path string, content string) {
|
|
||||||
t.Helper()
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Dir(path), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(path, []byte(content), 0o644))
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeExecutable(t *testing.T, path string, content string) {
|
|
||||||
t.Helper()
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Dir(path), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(path, []byte(content), 0o755))
|
|
||||||
}
|
|
||||||
|
|
||||||
func restoreWorkingDir(t *testing.T, dir string) {
|
|
||||||
t.Helper()
|
|
||||||
wd, err := os.Getwd()
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.NoError(t, os.Chdir(dir))
|
|
||||||
t.Cleanup(func() {
|
|
||||||
require.NoError(t, os.Chdir(wd))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetPHPStanFlags(t *testing.T) {
|
|
||||||
t.Helper()
|
|
||||||
oldLevel := phpStanLevel
|
|
||||||
oldMemory := phpStanMemory
|
|
||||||
oldJSON := phpStanJSON
|
|
||||||
oldSARIF := phpStanSARIF
|
|
||||||
phpStanLevel = 0
|
|
||||||
phpStanMemory = ""
|
|
||||||
phpStanJSON = false
|
|
||||||
phpStanSARIF = false
|
|
||||||
t.Cleanup(func() {
|
|
||||||
phpStanLevel = oldLevel
|
|
||||||
phpStanMemory = oldMemory
|
|
||||||
phpStanJSON = oldJSON
|
|
||||||
phpStanSARIF = oldSARIF
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetPHPPsalmFlags(t *testing.T) {
|
|
||||||
t.Helper()
|
|
||||||
oldLevel := phpPsalmLevel
|
|
||||||
oldFix := phpPsalmFix
|
|
||||||
oldBaseline := phpPsalmBaseline
|
|
||||||
oldShowInfo := phpPsalmShowInfo
|
|
||||||
oldJSON := phpPsalmJSON
|
|
||||||
oldSARIF := phpPsalmSARIF
|
|
||||||
phpPsalmLevel = 0
|
|
||||||
phpPsalmFix = false
|
|
||||||
phpPsalmBaseline = false
|
|
||||||
phpPsalmShowInfo = false
|
|
||||||
phpPsalmJSON = false
|
|
||||||
phpPsalmSARIF = false
|
|
||||||
t.Cleanup(func() {
|
|
||||||
phpPsalmLevel = oldLevel
|
|
||||||
phpPsalmFix = oldFix
|
|
||||||
phpPsalmBaseline = oldBaseline
|
|
||||||
phpPsalmShowInfo = oldShowInfo
|
|
||||||
phpPsalmJSON = oldJSON
|
|
||||||
phpPsalmSARIF = oldSARIF
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetPHPSecurityFlags(t *testing.T) {
|
|
||||||
t.Helper()
|
|
||||||
oldSeverity := phpSecuritySeverity
|
|
||||||
oldJSON := phpSecurityJSON
|
|
||||||
oldSARIF := phpSecuritySARIF
|
|
||||||
oldURL := phpSecurityURL
|
|
||||||
phpSecuritySeverity = ""
|
|
||||||
phpSecurityJSON = false
|
|
||||||
phpSecuritySARIF = false
|
|
||||||
phpSecurityURL = ""
|
|
||||||
t.Cleanup(func() {
|
|
||||||
phpSecuritySeverity = oldSeverity
|
|
||||||
phpSecurityJSON = oldJSON
|
|
||||||
phpSecuritySARIF = oldSARIF
|
|
||||||
phpSecurityURL = oldURL
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetPHPAuditFlags(t *testing.T) {
|
|
||||||
t.Helper()
|
|
||||||
oldJSON := phpAuditJSON
|
|
||||||
oldFix := phpAuditFix
|
|
||||||
phpAuditJSON = false
|
|
||||||
phpAuditFix = false
|
|
||||||
t.Cleanup(func() {
|
|
||||||
phpAuditJSON = oldJSON
|
|
||||||
phpAuditFix = oldFix
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetPHPTestFlags(t *testing.T) {
|
|
||||||
t.Helper()
|
|
||||||
oldParallel := phpTestParallel
|
|
||||||
oldCoverage := phpTestCoverage
|
|
||||||
oldFilter := phpTestFilter
|
|
||||||
oldGroup := phpTestGroup
|
|
||||||
oldJUnit := phpTestJUnit
|
|
||||||
phpTestParallel = false
|
|
||||||
phpTestCoverage = false
|
|
||||||
phpTestFilter = ""
|
|
||||||
phpTestGroup = ""
|
|
||||||
phpTestJUnit = false
|
|
||||||
t.Cleanup(func() {
|
|
||||||
phpTestParallel = oldParallel
|
|
||||||
phpTestCoverage = oldCoverage
|
|
||||||
phpTestFilter = oldFilter
|
|
||||||
phpTestGroup = oldGroup
|
|
||||||
phpTestJUnit = oldJUnit
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func findSubcommand(t *testing.T, parent *cli.Command, name string) *cli.Command {
|
|
||||||
t.Helper()
|
|
||||||
for _, command := range parent.Commands() {
|
|
||||||
if command.Name() == name {
|
|
||||||
return command
|
|
||||||
}
|
|
||||||
}
|
|
||||||
t.Fatalf("subcommand %q not found", name)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func captureStdout(t *testing.T, fn func()) string {
|
|
||||||
t.Helper()
|
|
||||||
oldStdout := os.Stdout
|
|
||||||
reader, writer, err := os.Pipe()
|
|
||||||
require.NoError(t, err)
|
|
||||||
os.Stdout = writer
|
|
||||||
defer func() {
|
|
||||||
os.Stdout = oldStdout
|
|
||||||
}()
|
|
||||||
defer func() {
|
|
||||||
require.NoError(t, reader.Close())
|
|
||||||
}()
|
|
||||||
|
|
||||||
fn()
|
|
||||||
|
|
||||||
require.NoError(t, writer.Close())
|
|
||||||
|
|
||||||
output, err := io.ReadAll(reader)
|
|
||||||
require.NoError(t, err)
|
|
||||||
return string(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
func prependPath(t *testing.T, dir string) {
|
|
||||||
t.Helper()
|
|
||||||
oldPath := os.Getenv("PATH")
|
|
||||||
require.NoError(t, os.Setenv("PATH", dir+string(os.PathListSeparator)+oldPath))
|
|
||||||
t.Cleanup(func() {
|
|
||||||
require.NoError(t, os.Setenv("PATH", oldPath))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -12,12 +12,13 @@ package qa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
"forge.lthn.ai/core/cli/pkg/cli"
|
||||||
"forge.lthn.ai/core/go-i18n"
|
i18n "forge.lthn.ai/core/go-i18n"
|
||||||
"forge.lthn.ai/core/lint/locales"
|
"forge.lthn.ai/core/lint/locales"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
cli.RegisterCommands(AddQACommands, locales.FS)
|
i18n.RegisterLocales(locales.FS, ".")
|
||||||
|
cli.RegisterCommands(AddQACommands)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Style aliases from shared package
|
// Style aliases from shared package
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,6 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
|
@ -26,7 +25,6 @@ var (
|
||||||
reviewMine bool
|
reviewMine bool
|
||||||
reviewRequested bool
|
reviewRequested bool
|
||||||
reviewRepo string
|
reviewRepo string
|
||||||
reviewJSON bool
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// PullRequest represents a GitHub pull request
|
// PullRequest represents a GitHub pull request
|
||||||
|
|
@ -83,24 +81,6 @@ type Review struct {
|
||||||
State string `json:"state"`
|
State string `json:"state"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReviewFetchError captures a partial fetch failure while preserving any
|
|
||||||
// successfully fetched PRs in the same review run.
|
|
||||||
type ReviewFetchError struct {
|
|
||||||
Repo string `json:"repo"`
|
|
||||||
Scope string `json:"scope"`
|
|
||||||
Error string `json:"error"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type reviewOutput struct {
|
|
||||||
Mine []PullRequest `json:"mine"`
|
|
||||||
Requested []PullRequest `json:"requested"`
|
|
||||||
TotalMine int `json:"total_mine"`
|
|
||||||
TotalRequested int `json:"total_requested"`
|
|
||||||
ShowingMine bool `json:"showing_mine"`
|
|
||||||
ShowingRequested bool `json:"showing_requested"`
|
|
||||||
FetchErrors []ReviewFetchError `json:"fetch_errors"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// addReviewCommand adds the 'review' subcommand to the qa command.
|
// addReviewCommand adds the 'review' subcommand to the qa command.
|
||||||
func addReviewCommand(parent *cli.Command) {
|
func addReviewCommand(parent *cli.Command) {
|
||||||
reviewCmd := &cli.Command{
|
reviewCmd := &cli.Command{
|
||||||
|
|
@ -115,7 +95,6 @@ func addReviewCommand(parent *cli.Command) {
|
||||||
reviewCmd.Flags().BoolVarP(&reviewMine, "mine", "m", false, i18n.T("cmd.qa.review.flag.mine"))
|
reviewCmd.Flags().BoolVarP(&reviewMine, "mine", "m", false, i18n.T("cmd.qa.review.flag.mine"))
|
||||||
reviewCmd.Flags().BoolVarP(&reviewRequested, "requested", "r", false, i18n.T("cmd.qa.review.flag.requested"))
|
reviewCmd.Flags().BoolVarP(&reviewRequested, "requested", "r", false, i18n.T("cmd.qa.review.flag.requested"))
|
||||||
reviewCmd.Flags().StringVar(&reviewRepo, "repo", "", i18n.T("cmd.qa.review.flag.repo"))
|
reviewCmd.Flags().StringVar(&reviewRepo, "repo", "", i18n.T("cmd.qa.review.flag.repo"))
|
||||||
reviewCmd.Flags().BoolVar(&reviewJSON, "json", false, i18n.T("common.flag.json"))
|
|
||||||
|
|
||||||
parent.AddCommand(reviewCmd)
|
parent.AddCommand(reviewCmd)
|
||||||
}
|
}
|
||||||
|
|
@ -142,98 +121,18 @@ func runReview() error {
|
||||||
// Default: show both mine and requested if neither flag is set
|
// Default: show both mine and requested if neither flag is set
|
||||||
showMine := reviewMine || (!reviewMine && !reviewRequested)
|
showMine := reviewMine || (!reviewMine && !reviewRequested)
|
||||||
showRequested := reviewRequested || (!reviewMine && !reviewRequested)
|
showRequested := reviewRequested || (!reviewMine && !reviewRequested)
|
||||||
minePRs := []PullRequest{}
|
|
||||||
requestedPRs := []PullRequest{}
|
|
||||||
fetchErrors := make([]ReviewFetchError, 0)
|
|
||||||
mineFetched := false
|
|
||||||
requestedFetched := false
|
|
||||||
successfulFetches := 0
|
|
||||||
|
|
||||||
if showMine {
|
if showMine {
|
||||||
prs, err := fetchPRs(ctx, repoFullName, "author:@me")
|
if err := showMyPRs(ctx, repoFullName); err != nil {
|
||||||
if err != nil {
|
return err
|
||||||
fetchErrors = append(fetchErrors, ReviewFetchError{
|
|
||||||
Repo: repoFullName,
|
|
||||||
Scope: "mine",
|
|
||||||
Error: strings.TrimSpace(err.Error()),
|
|
||||||
})
|
|
||||||
if !reviewJSON {
|
|
||||||
cli.Warnf("failed to fetch your PRs for %s: %s", repoFullName, strings.TrimSpace(err.Error()))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
sort.Slice(prs, func(i, j int) bool {
|
|
||||||
if prs[i].Number == prs[j].Number {
|
|
||||||
return strings.Compare(prs[i].Title, prs[j].Title) < 0
|
|
||||||
}
|
|
||||||
return prs[i].Number < prs[j].Number
|
|
||||||
})
|
|
||||||
minePRs = prs
|
|
||||||
mineFetched = true
|
|
||||||
successfulFetches++
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if showRequested {
|
if showRequested {
|
||||||
prs, err := fetchPRs(ctx, repoFullName, "review-requested:@me")
|
if showMine {
|
||||||
if err != nil {
|
|
||||||
fetchErrors = append(fetchErrors, ReviewFetchError{
|
|
||||||
Repo: repoFullName,
|
|
||||||
Scope: "requested",
|
|
||||||
Error: strings.TrimSpace(err.Error()),
|
|
||||||
})
|
|
||||||
if !reviewJSON {
|
|
||||||
cli.Warnf("failed to fetch review requested PRs for %s: %s", repoFullName, strings.TrimSpace(err.Error()))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
sort.Slice(prs, func(i, j int) bool {
|
|
||||||
if prs[i].Number == prs[j].Number {
|
|
||||||
return strings.Compare(prs[i].Title, prs[j].Title) < 0
|
|
||||||
}
|
|
||||||
return prs[i].Number < prs[j].Number
|
|
||||||
})
|
|
||||||
requestedPRs = prs
|
|
||||||
requestedFetched = true
|
|
||||||
successfulFetches++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
output := reviewOutput{
|
|
||||||
Mine: minePRs,
|
|
||||||
Requested: requestedPRs,
|
|
||||||
TotalMine: len(minePRs),
|
|
||||||
TotalRequested: len(requestedPRs),
|
|
||||||
ShowingMine: showMine,
|
|
||||||
ShowingRequested: showRequested,
|
|
||||||
FetchErrors: fetchErrors,
|
|
||||||
}
|
|
||||||
|
|
||||||
if reviewJSON {
|
|
||||||
data, err := json.MarshalIndent(output, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
cli.Print("%s\n", string(data))
|
|
||||||
if successfulFetches == 0 && len(fetchErrors) > 0 {
|
|
||||||
return cli.Err("failed to fetch pull requests for %s", repoFullName)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if successfulFetches == 0 && len(fetchErrors) > 0 {
|
|
||||||
return cli.Err("failed to fetch pull requests for %s", repoFullName)
|
|
||||||
}
|
|
||||||
|
|
||||||
if showMine && mineFetched {
|
|
||||||
if err := printMyPRs(minePRs); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if showRequested && requestedFetched {
|
|
||||||
if showMine && mineFetched {
|
|
||||||
cli.Blank()
|
cli.Blank()
|
||||||
}
|
}
|
||||||
if err := printRequestedPRs(requestedPRs); err != nil {
|
if err := showRequestedReviews(ctx, repoFullName); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -241,8 +140,13 @@ func runReview() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// printMyPRs shows the user's open PRs with status
|
// showMyPRs shows the user's open PRs with status
|
||||||
func printMyPRs(prs []PullRequest) error {
|
func showMyPRs(ctx context.Context, repo string) error {
|
||||||
|
prs, err := fetchPRs(ctx, repo, "author:@me")
|
||||||
|
if err != nil {
|
||||||
|
return log.E("qa.review", "failed to fetch your PRs", err)
|
||||||
|
}
|
||||||
|
|
||||||
if len(prs) == 0 {
|
if len(prs) == 0 {
|
||||||
cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.qa.review.no_prs")))
|
cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.qa.review.no_prs")))
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -257,8 +161,13 @@ func printMyPRs(prs []PullRequest) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// printRequestedPRs shows PRs where user's review is requested
|
// showRequestedReviews shows PRs where user's review is requested
|
||||||
func printRequestedPRs(prs []PullRequest) error {
|
func showRequestedReviews(ctx context.Context, repo string) error {
|
||||||
|
prs, err := fetchPRs(ctx, repo, "review-requested:@me")
|
||||||
|
if err != nil {
|
||||||
|
return log.E("qa.review", "failed to fetch review requests", err)
|
||||||
|
}
|
||||||
|
|
||||||
if len(prs) == 0 {
|
if len(prs) == 0 {
|
||||||
cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.qa.review.no_reviews")))
|
cli.Print("%s\n", dimStyle.Render(i18n.T("cmd.qa.review.no_reviews")))
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -348,7 +257,7 @@ func analyzePRStatus(pr PullRequest) (status string, style *cli.AnsiStyle, actio
|
||||||
ciPassed := true
|
ciPassed := true
|
||||||
ciFailed := false
|
ciFailed := false
|
||||||
ciPending := false
|
ciPending := false
|
||||||
var failedChecks []string
|
var failedCheck string
|
||||||
|
|
||||||
if pr.StatusChecks != nil {
|
if pr.StatusChecks != nil {
|
||||||
for _, check := range pr.StatusChecks.Contexts {
|
for _, check := range pr.StatusChecks.Contexts {
|
||||||
|
|
@ -356,7 +265,9 @@ func analyzePRStatus(pr PullRequest) (status string, style *cli.AnsiStyle, actio
|
||||||
case "FAILURE", "failure":
|
case "FAILURE", "failure":
|
||||||
ciFailed = true
|
ciFailed = true
|
||||||
ciPassed = false
|
ciPassed = false
|
||||||
failedChecks = append(failedChecks, check.Name)
|
if failedCheck == "" {
|
||||||
|
failedCheck = check.Name
|
||||||
|
}
|
||||||
case "PENDING", "pending", "":
|
case "PENDING", "pending", "":
|
||||||
if check.State == "PENDING" || check.State == "" {
|
if check.State == "PENDING" || check.State == "" {
|
||||||
ciPending = true
|
ciPending = true
|
||||||
|
|
@ -379,11 +290,7 @@ func analyzePRStatus(pr PullRequest) (status string, style *cli.AnsiStyle, actio
|
||||||
}
|
}
|
||||||
|
|
||||||
if ciFailed {
|
if ciFailed {
|
||||||
if len(failedChecks) > 0 {
|
return "✗", errorStyle, fmt.Sprintf("CI failed: %s", failedCheck)
|
||||||
sort.Strings(failedChecks)
|
|
||||||
return "✗", errorStyle, fmt.Sprintf("CI failed: %s", failedChecks[0])
|
|
||||||
}
|
|
||||||
return "✗", errorStyle, "CI failed"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if changesRequested {
|
if changesRequested {
|
||||||
|
|
|
||||||
|
|
@ -1,269 +0,0 @@
|
||||||
package qa
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRunReviewJSONOutput_PreservesPartialResultsAndFetchErrors(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"author:@me"*)
|
|
||||||
printf '%s\n' 'simulated author query failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*"review-requested:@me"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"number": 42,
|
|
||||||
"title": "Refine agent output",
|
|
||||||
"author": {"login": "alice"},
|
|
||||||
"state": "OPEN",
|
|
||||||
"isDraft": false,
|
|
||||||
"mergeable": "MERGEABLE",
|
|
||||||
"reviewDecision": "",
|
|
||||||
"url": "https://example.com/pull/42",
|
|
||||||
"headRefName": "feature/agent-output",
|
|
||||||
"createdAt": "2026-03-30T00:00:00Z",
|
|
||||||
"updatedAt": "2026-03-30T00:00:00Z",
|
|
||||||
"additions": 12,
|
|
||||||
"deletions": 3,
|
|
||||||
"changedFiles": 2,
|
|
||||||
"reviewRequests": {"nodes": []},
|
|
||||||
"reviews": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetReviewFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
reviewRepo = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addReviewCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "review")
|
|
||||||
require.NoError(t, command.Flags().Set("repo", "forge/example"))
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
var payload reviewOutput
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
assert.True(t, payload.ShowingMine)
|
|
||||||
assert.True(t, payload.ShowingRequested)
|
|
||||||
require.Len(t, payload.Mine, 0)
|
|
||||||
require.Len(t, payload.Requested, 1)
|
|
||||||
assert.Equal(t, 42, payload.Requested[0].Number)
|
|
||||||
assert.Equal(t, "Refine agent output", payload.Requested[0].Title)
|
|
||||||
require.Len(t, payload.FetchErrors, 1)
|
|
||||||
assert.Equal(t, "forge/example", payload.FetchErrors[0].Repo)
|
|
||||||
assert.Equal(t, "mine", payload.FetchErrors[0].Scope)
|
|
||||||
assert.Contains(t, payload.FetchErrors[0].Error, "simulated author query failure")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunReviewJSONOutput_ReturnsErrorWhenAllFetchesFail(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"author:@me"*)
|
|
||||||
printf '%s\n' 'simulated author query failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*"review-requested:@me"*)
|
|
||||||
printf '%s\n' 'simulated requested query failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetReviewFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
reviewRepo = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addReviewCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "review")
|
|
||||||
require.NoError(t, command.Flags().Set("repo", "forge/example"))
|
|
||||||
require.NoError(t, command.Flags().Set("json", "true"))
|
|
||||||
|
|
||||||
var runErr error
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
runErr = command.RunE(command, nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
require.Error(t, runErr)
|
|
||||||
|
|
||||||
var payload reviewOutput
|
|
||||||
require.NoError(t, json.Unmarshal([]byte(output), &payload))
|
|
||||||
assert.Empty(t, payload.Mine)
|
|
||||||
assert.Empty(t, payload.Requested)
|
|
||||||
require.Len(t, payload.FetchErrors, 2)
|
|
||||||
assert.Equal(t, "mine", payload.FetchErrors[0].Scope)
|
|
||||||
assert.Equal(t, "requested", payload.FetchErrors[1].Scope)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunReviewHumanOutput_PreservesSuccessfulSectionWhenOneFetchFails(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"author:@me"*)
|
|
||||||
printf '%s\n' 'simulated author query failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*"review-requested:@me"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"number": 42,
|
|
||||||
"title": "Refine agent output",
|
|
||||||
"author": {"login": "alice"},
|
|
||||||
"state": "OPEN",
|
|
||||||
"isDraft": false,
|
|
||||||
"mergeable": "MERGEABLE",
|
|
||||||
"reviewDecision": "",
|
|
||||||
"url": "https://example.com/pull/42",
|
|
||||||
"headRefName": "feature/agent-output",
|
|
||||||
"createdAt": "2026-03-30T00:00:00Z",
|
|
||||||
"updatedAt": "2026-03-30T00:00:00Z",
|
|
||||||
"additions": 12,
|
|
||||||
"deletions": 3,
|
|
||||||
"changedFiles": 2,
|
|
||||||
"reviewRequests": {"nodes": []},
|
|
||||||
"reviews": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetReviewFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
reviewRepo = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addReviewCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "review")
|
|
||||||
require.NoError(t, command.Flags().Set("repo", "forge/example"))
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
require.NoError(t, command.RunE(command, nil))
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Contains(t, output, "#42 Refine agent output")
|
|
||||||
assert.Contains(t, output, "gh pr checkout 42")
|
|
||||||
assert.NotContains(t, output, "Your pull requests")
|
|
||||||
assert.NotContains(t, output, "cmd.qa.review.no_prs")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunReviewHumanOutput_ReturnsErrorWhenAllFetchesFail(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"author:@me"*)
|
|
||||||
printf '%s\n' 'simulated author query failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*"review-requested:@me"*)
|
|
||||||
printf '%s\n' 'simulated requested query failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
restoreWorkingDir(t, dir)
|
|
||||||
prependPath(t, dir)
|
|
||||||
resetReviewFlags(t)
|
|
||||||
t.Cleanup(func() {
|
|
||||||
reviewRepo = ""
|
|
||||||
})
|
|
||||||
|
|
||||||
parent := &cli.Command{Use: "qa"}
|
|
||||||
addReviewCommand(parent)
|
|
||||||
command := findSubcommand(t, parent, "review")
|
|
||||||
require.NoError(t, command.Flags().Set("repo", "forge/example"))
|
|
||||||
|
|
||||||
var runErr error
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
runErr = command.RunE(command, nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
require.Error(t, runErr)
|
|
||||||
assert.NotContains(t, output, "Your pull requests")
|
|
||||||
assert.NotContains(t, output, "Review requested")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAnalyzePRStatus_UsesDeterministicFailedCheckName(t *testing.T) {
|
|
||||||
pr := PullRequest{
|
|
||||||
Mergeable: "MERGEABLE",
|
|
||||||
ReviewDecision: "",
|
|
||||||
StatusChecks: &StatusCheckRollup{
|
|
||||||
Contexts: []StatusContext{
|
|
||||||
{State: "FAILURE", Conclusion: "failure", Name: "Zulu"},
|
|
||||||
{State: "FAILURE", Conclusion: "failure", Name: "Alpha"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
status, _, action := analyzePRStatus(pr)
|
|
||||||
|
|
||||||
assert.Equal(t, "✗", status)
|
|
||||||
assert.Equal(t, "CI failed: Alpha", action)
|
|
||||||
}
|
|
||||||
|
|
||||||
func resetReviewFlags(t *testing.T) {
|
|
||||||
t.Helper()
|
|
||||||
oldMine := reviewMine
|
|
||||||
oldRequested := reviewRequested
|
|
||||||
oldRepo := reviewRepo
|
|
||||||
oldJSON := reviewJSON
|
|
||||||
|
|
||||||
reviewMine = false
|
|
||||||
reviewRequested = false
|
|
||||||
reviewRepo = ""
|
|
||||||
reviewJSON = false
|
|
||||||
|
|
||||||
t.Cleanup(func() {
|
|
||||||
reviewMine = oldMine
|
|
||||||
reviewRequested = oldRequested
|
|
||||||
reviewRepo = oldRepo
|
|
||||||
reviewJSON = oldJSON
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -9,12 +9,10 @@
|
||||||
package qa
|
package qa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"cmp"
|
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"slices"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
|
@ -45,12 +43,11 @@ type WorkflowRun struct {
|
||||||
|
|
||||||
// WorkflowJob represents a job within a workflow run
|
// WorkflowJob represents a job within a workflow run
|
||||||
type WorkflowJob struct {
|
type WorkflowJob struct {
|
||||||
ID int64 `json:"databaseId"`
|
ID int64 `json:"databaseId"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Status string `json:"status"`
|
Status string `json:"status"`
|
||||||
Conclusion string `json:"conclusion"`
|
Conclusion string `json:"conclusion"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
Steps []JobStep `json:"steps"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// JobStep represents a step within a job
|
// JobStep represents a step within a job
|
||||||
|
|
@ -113,7 +110,6 @@ func runWatch() error {
|
||||||
// Poll for workflow runs
|
// Poll for workflow runs
|
||||||
pollInterval := 3 * time.Second
|
pollInterval := 3 * time.Second
|
||||||
var lastStatus string
|
var lastStatus string
|
||||||
waitingStatus := dimStyle.Render(i18n.T("cmd.qa.watch.waiting_for_workflows"))
|
|
||||||
|
|
||||||
for {
|
for {
|
||||||
// Check if context deadline exceeded
|
// Check if context deadline exceeded
|
||||||
|
|
@ -129,10 +125,7 @@ func runWatch() error {
|
||||||
|
|
||||||
if len(runs) == 0 {
|
if len(runs) == 0 {
|
||||||
// No workflows triggered yet, keep waiting
|
// No workflows triggered yet, keep waiting
|
||||||
if waitingStatus != lastStatus {
|
cli.Print("\033[2K\r%s", dimStyle.Render(i18n.T("cmd.qa.watch.waiting_for_workflows")))
|
||||||
cli.Print("%s\n", waitingStatus)
|
|
||||||
lastStatus = waitingStatus
|
|
||||||
}
|
|
||||||
time.Sleep(pollInterval)
|
time.Sleep(pollInterval)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
@ -176,11 +169,12 @@ func runWatch() error {
|
||||||
|
|
||||||
// Only print if status changed
|
// Only print if status changed
|
||||||
if status != lastStatus {
|
if status != lastStatus {
|
||||||
cli.Print("%s\n", status)
|
cli.Print("\033[2K\r%s", status)
|
||||||
lastStatus = status
|
lastStatus = status
|
||||||
}
|
}
|
||||||
|
|
||||||
if allComplete {
|
if allComplete {
|
||||||
|
cli.Blank()
|
||||||
cli.Blank()
|
cli.Blank()
|
||||||
return printResults(ctx, repoFullName, runs)
|
return printResults(ctx, repoFullName, runs)
|
||||||
}
|
}
|
||||||
|
|
@ -314,17 +308,14 @@ func printResults(ctx context.Context, repoFullName string, runs []WorkflowRun)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
slices.SortFunc(successes, compareWorkflowRun)
|
|
||||||
slices.SortFunc(failures, compareWorkflowRun)
|
|
||||||
|
|
||||||
// Print successes briefly
|
// Print successes briefly
|
||||||
for _, run := range successes {
|
for _, run := range successes {
|
||||||
cli.Print("%s %s\n", successStyle.Render(i18n.T("common.label.success")), run.Name)
|
cli.Print("%s %s\n", successStyle.Render(cli.Glyph(":check:")), run.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Print failures with details
|
// Print failures with details
|
||||||
for _, run := range failures {
|
for _, run := range failures {
|
||||||
cli.Print("%s %s\n", errorStyle.Render(i18n.T("common.label.error")), run.Name)
|
cli.Print("%s %s\n", errorStyle.Render(cli.Glyph(":cross:")), run.Name)
|
||||||
|
|
||||||
// Fetch failed job details
|
// Fetch failed job details
|
||||||
failedJob, failedStep, errorLine := fetchFailureDetails(ctx, repoFullName, run.ID)
|
failedJob, failedStep, errorLine := fetchFailureDetails(ctx, repoFullName, run.ID)
|
||||||
|
|
@ -368,20 +359,25 @@ func fetchFailureDetails(ctx context.Context, repoFullName string, runID int64)
|
||||||
}
|
}
|
||||||
|
|
||||||
var result struct {
|
var result struct {
|
||||||
Jobs []WorkflowJob `json:"jobs"`
|
Jobs []struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Conclusion string `json:"conclusion"`
|
||||||
|
Steps []struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Conclusion string `json:"conclusion"`
|
||||||
|
Number int `json:"number"`
|
||||||
|
} `json:"steps"`
|
||||||
|
} `json:"jobs"`
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := json.Unmarshal(output, &result); err != nil {
|
if err := json.Unmarshal(output, &result); err != nil {
|
||||||
return "", "", ""
|
return "", "", ""
|
||||||
}
|
}
|
||||||
|
|
||||||
slices.SortFunc(result.Jobs, compareWorkflowJob)
|
|
||||||
|
|
||||||
// Find the failed job and step
|
// Find the failed job and step
|
||||||
for _, job := range result.Jobs {
|
for _, job := range result.Jobs {
|
||||||
if job.Conclusion == "failure" {
|
if job.Conclusion == "failure" {
|
||||||
jobName = job.Name
|
jobName = job.Name
|
||||||
slices.SortFunc(job.Steps, compareJobStep)
|
|
||||||
for _, step := range job.Steps {
|
for _, step := range job.Steps {
|
||||||
if step.Conclusion == "failure" {
|
if step.Conclusion == "failure" {
|
||||||
stepName = fmt.Sprintf("%d: %s", step.Number, step.Name)
|
stepName = fmt.Sprintf("%d: %s", step.Number, step.Name)
|
||||||
|
|
@ -446,33 +442,3 @@ func fetchErrorFromLogs(ctx context.Context, repoFullName string, runID int64) s
|
||||||
|
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func compareWorkflowRun(a, b WorkflowRun) int {
|
|
||||||
return cmp.Or(
|
|
||||||
cmp.Compare(a.Name, b.Name),
|
|
||||||
cmp.Compare(a.DisplayTitle, b.DisplayTitle),
|
|
||||||
a.CreatedAt.Compare(b.CreatedAt),
|
|
||||||
a.UpdatedAt.Compare(b.UpdatedAt),
|
|
||||||
cmp.Compare(a.ID, b.ID),
|
|
||||||
cmp.Compare(a.URL, b.URL),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func compareWorkflowJob(a, b WorkflowJob) int {
|
|
||||||
return cmp.Or(
|
|
||||||
cmp.Compare(a.Name, b.Name),
|
|
||||||
cmp.Compare(a.Conclusion, b.Conclusion),
|
|
||||||
cmp.Compare(a.Status, b.Status),
|
|
||||||
cmp.Compare(a.ID, b.ID),
|
|
||||||
cmp.Compare(a.URL, b.URL),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func compareJobStep(a, b JobStep) int {
|
|
||||||
return cmp.Or(
|
|
||||||
cmp.Compare(a.Number, b.Number),
|
|
||||||
cmp.Compare(a.Name, b.Name),
|
|
||||||
cmp.Compare(a.Conclusion, b.Conclusion),
|
|
||||||
cmp.Compare(a.Status, b.Status),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,103 +0,0 @@
|
||||||
package qa
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestPrintResults_SortsRunsAndUsesDeterministicDetails(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
writeExecutable(t, filepath.Join(dir, "gh"), `#!/bin/sh
|
|
||||||
case "$*" in
|
|
||||||
*"run view 2 --repo forge/alpha --json jobs"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
{"jobs":[
|
|
||||||
{
|
|
||||||
"databaseId": 20,
|
|
||||||
"name": "Zulu Job",
|
|
||||||
"status": "completed",
|
|
||||||
"conclusion": "failure",
|
|
||||||
"steps": [
|
|
||||||
{"name": "Zulu Step", "status": "completed", "conclusion": "failure", "number": 2}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"databaseId": 10,
|
|
||||||
"name": "Alpha Job",
|
|
||||||
"status": "completed",
|
|
||||||
"conclusion": "failure",
|
|
||||||
"steps": [
|
|
||||||
{"name": "Zulu Step", "status": "completed", "conclusion": "failure", "number": 2},
|
|
||||||
{"name": "Alpha Step", "status": "completed", "conclusion": "failure", "number": 1}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]}
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*"run view 2 --repo forge/alpha --log-failed"*)
|
|
||||||
cat <<'EOF'
|
|
||||||
Alpha error detail
|
|
||||||
EOF
|
|
||||||
;;
|
|
||||||
*"run view 4 --repo forge/alpha --json jobs"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
{"jobs":[
|
|
||||||
{
|
|
||||||
"databaseId": 40,
|
|
||||||
"name": "Omega Job",
|
|
||||||
"status": "completed",
|
|
||||||
"conclusion": "failure",
|
|
||||||
"steps": [
|
|
||||||
{"name": "Omega Step", "status": "completed", "conclusion": "failure", "number": 1}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]}
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*"run view 4 --repo forge/alpha --log-failed"*)
|
|
||||||
cat <<'EOF'
|
|
||||||
Omega error detail
|
|
||||||
EOF
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
`)
|
|
||||||
|
|
||||||
prependPath(t, dir)
|
|
||||||
|
|
||||||
runs := []WorkflowRun{
|
|
||||||
{ID: 3, Name: "Zulu Build", Conclusion: "success", URL: "https://example.com/zulu"},
|
|
||||||
{ID: 1, Name: "Alpha Build", Conclusion: "success", URL: "https://example.com/alpha"},
|
|
||||||
{ID: 4, Name: "Omega Failure", Conclusion: "failure", URL: "https://example.com/omega"},
|
|
||||||
{ID: 2, Name: "Beta Failure", Conclusion: "failure", URL: "https://example.com/beta"},
|
|
||||||
}
|
|
||||||
|
|
||||||
output := captureStdout(t, func() {
|
|
||||||
err := printResults(context.Background(), "forge/alpha", runs)
|
|
||||||
require.Error(t, err)
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.NotContains(t, output, "\033[2K\r")
|
|
||||||
alphaBuild := strings.Index(output, "Alpha Build")
|
|
||||||
require.NotEqual(t, -1, alphaBuild)
|
|
||||||
zuluBuild := strings.Index(output, "Zulu Build")
|
|
||||||
require.NotEqual(t, -1, zuluBuild)
|
|
||||||
assert.Less(t, alphaBuild, zuluBuild)
|
|
||||||
|
|
||||||
betaFailure := strings.Index(output, "Beta Failure")
|
|
||||||
require.NotEqual(t, -1, betaFailure)
|
|
||||||
omegaFailure := strings.Index(output, "Omega Failure")
|
|
||||||
require.NotEqual(t, -1, omegaFailure)
|
|
||||||
assert.Less(t, betaFailure, omegaFailure)
|
|
||||||
assert.Contains(t, output, "Job: Alpha Job (step: 1: Alpha Step)")
|
|
||||||
assert.Contains(t, output, "Error: Alpha error detail")
|
|
||||||
assert.NotContains(t, output, "Job: Zulu Job")
|
|
||||||
}
|
|
||||||
|
|
@ -1,440 +0,0 @@
|
||||||
# RFC-025: Agent Experience (AX) Design Principles
|
|
||||||
|
|
||||||
- **Status:** Draft
|
|
||||||
- **Authors:** Snider, Cladius
|
|
||||||
- **Date:** 2026-03-19
|
|
||||||
- **Applies to:** All Core ecosystem packages (CoreGO, CorePHP, CoreTS, core-agent)
|
|
||||||
|
|
||||||
## Abstract
|
|
||||||
|
|
||||||
Agent Experience (AX) is a design paradigm for software systems where the primary code consumer is an AI agent, not a human developer. AX sits alongside User Experience (UX) and Developer Experience (DX) as the third era of interface design.
|
|
||||||
|
|
||||||
This RFC establishes AX as a formal design principle for the Core ecosystem and defines the conventions that follow from it.
|
|
||||||
|
|
||||||
## Motivation
|
|
||||||
|
|
||||||
As of early 2026, AI agents write, review, and maintain the majority of code in the Core ecosystem. The original author has not manually edited code (outside of Core struct design) since October 2025. Code is processed semantically — agents reason about intent, not characters.
|
|
||||||
|
|
||||||
Design patterns inherited from the human-developer era optimise for the wrong consumer:
|
|
||||||
|
|
||||||
- **Short names** save keystrokes but increase semantic ambiguity
|
|
||||||
- **Functional option chains** are fluent for humans but opaque for agents tracing configuration
|
|
||||||
- **Error-at-every-call-site** produces 50% boilerplate that obscures intent
|
|
||||||
- **Generic type parameters** force agents to carry type context that the runtime already has
|
|
||||||
- **Panic-hiding conventions** (`Must*`) create implicit control flow that agents must special-case
|
|
||||||
|
|
||||||
AX acknowledges this shift and provides principles for designing code, APIs, file structures, and conventions that serve AI agents as first-class consumers.
|
|
||||||
|
|
||||||
## The Three Eras
|
|
||||||
|
|
||||||
| Era | Primary Consumer | Optimises For | Key Metric |
|
|
||||||
|-----|-----------------|---------------|------------|
|
|
||||||
| UX | End users | Discoverability, forgiveness, visual clarity | Task completion time |
|
|
||||||
| DX | Developers | Typing speed, IDE support, convention familiarity | Time to first commit |
|
|
||||||
| AX | AI agents | Predictability, composability, semantic navigation | Correct-on-first-pass rate |
|
|
||||||
|
|
||||||
AX does not replace UX or DX. End users still need good UX. Developers still need good DX. But when the primary code author and maintainer is an AI agent, the codebase should be designed for that consumer first.
|
|
||||||
|
|
||||||
## Principles
|
|
||||||
|
|
||||||
### 1. Predictable Names Over Short Names
|
|
||||||
|
|
||||||
Names are tokens that agents pattern-match across languages and contexts. Abbreviations introduce mapping overhead.
|
|
||||||
|
|
||||||
```
|
|
||||||
Config not Cfg
|
|
||||||
Service not Srv
|
|
||||||
Embed not Emb
|
|
||||||
Error not Err (as a subsystem name; err for local variables is fine)
|
|
||||||
Options not Opts
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** If a name would require a comment to explain, it is too short.
|
|
||||||
|
|
||||||
**Exception:** Industry-standard abbreviations that are universally understood (`HTTP`, `URL`, `ID`, `IPC`, `I18n`) are acceptable. The test: would an agent trained on any mainstream language recognise it without context?
|
|
||||||
|
|
||||||
### 2. Comments as Usage Examples
|
|
||||||
|
|
||||||
The function signature tells WHAT. The comment shows HOW with real values.
|
|
||||||
|
|
||||||
```go
|
|
||||||
// Detect the project type from files present
|
|
||||||
setup.Detect("/path/to/project")
|
|
||||||
|
|
||||||
// Set up a workspace with auto-detected template
|
|
||||||
setup.Run(setup.Options{Path: ".", Template: "auto"})
|
|
||||||
|
|
||||||
// Scaffold a PHP module workspace
|
|
||||||
setup.Run(setup.Options{Path: "./my-module", Template: "php"})
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** If a comment restates what the type signature already says, delete it. If a comment shows a concrete usage with realistic values, keep it.
|
|
||||||
|
|
||||||
**Rationale:** Agents learn from examples more effectively than from descriptions. A comment like "Run executes the setup process" adds zero information. A comment like `setup.Run(setup.Options{Path: ".", Template: "auto"})` teaches an agent exactly how to call the function.
|
|
||||||
|
|
||||||
### 3. Path Is Documentation
|
|
||||||
|
|
||||||
File and directory paths should be self-describing. An agent navigating the filesystem should understand what it is looking at without reading a README.
|
|
||||||
|
|
||||||
```
|
|
||||||
flow/deploy/to/homelab.yaml — deploy TO the homelab
|
|
||||||
flow/deploy/from/github.yaml — deploy FROM GitHub
|
|
||||||
flow/code/review.yaml — code review flow
|
|
||||||
template/file/go/struct.go.tmpl — Go struct file template
|
|
||||||
template/dir/workspace/php/ — PHP workspace scaffold
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** If an agent needs to read a file to understand what a directory contains, the directory naming has failed.
|
|
||||||
|
|
||||||
**Corollary:** The unified path convention (folder structure = HTTP route = CLI command = test path) is AX-native. One path, every surface.
|
|
||||||
|
|
||||||
### 4. Templates Over Freeform
|
|
||||||
|
|
||||||
When an agent generates code from a template, the output is constrained to known-good shapes. When an agent writes freeform, the output varies.
|
|
||||||
|
|
||||||
```go
|
|
||||||
// Template-driven — consistent output
|
|
||||||
lib.RenderFile("php/action", data)
|
|
||||||
lib.ExtractDir("php", targetDir, data)
|
|
||||||
|
|
||||||
// Freeform — variance in output
|
|
||||||
"write a PHP action class that..."
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** For any code pattern that recurs, provide a template. Templates are guardrails for agents.
|
|
||||||
|
|
||||||
**Scope:** Templates apply to file generation, workspace scaffolding, config generation, and commit messages. They do NOT apply to novel logic — agents should write business logic freeform with the domain knowledge available.
|
|
||||||
|
|
||||||
### 5. Declarative Over Imperative
|
|
||||||
|
|
||||||
Agents reason better about declarations of intent than sequences of operations.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# Declarative — agent sees what should happen
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
flow: tools/docker-build
|
|
||||||
with:
|
|
||||||
context: "{{ .app_dir }}"
|
|
||||||
image_name: "{{ .image_name }}"
|
|
||||||
|
|
||||||
- name: deploy
|
|
||||||
flow: deploy/with/docker
|
|
||||||
with:
|
|
||||||
host: "{{ .host }}"
|
|
||||||
```
|
|
||||||
|
|
||||||
```go
|
|
||||||
// Imperative — agent must trace execution
|
|
||||||
cmd := exec.Command("docker", "build", "--platform", "linux/amd64", "-t", imageName, ".")
|
|
||||||
cmd.Dir = appDir
|
|
||||||
if err := cmd.Run(); err != nil {
|
|
||||||
return fmt.Errorf("docker build: %w", err)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** Orchestration, configuration, and pipeline logic should be declarative (YAML/JSON). Implementation logic should be imperative (Go/PHP/TS). The boundary is: if an agent needs to compose or modify the logic, make it declarative.
|
|
||||||
|
|
||||||
### 6. Universal Types (Core Primitives)
|
|
||||||
|
|
||||||
Every component in the ecosystem accepts and returns the same primitive types. An agent processing any level of the tree sees identical shapes.
|
|
||||||
|
|
||||||
```go
|
|
||||||
// Universal contract
|
|
||||||
setup.Run(core.Options{Path: ".", Template: "auto"})
|
|
||||||
brain.New(core.Options{Name: "openbrain"})
|
|
||||||
deploy.Run(core.Options{Flow: "deploy/to/homelab"})
|
|
||||||
|
|
||||||
// Fractal — Core itself is a Service
|
|
||||||
core.New(core.Options{
|
|
||||||
Services: []core.Service{
|
|
||||||
process.New(core.Options{Name: "process"}),
|
|
||||||
brain.New(core.Options{Name: "brain"}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
**Core primitive types:**
|
|
||||||
|
|
||||||
| Type | Purpose |
|
|
||||||
|------|---------|
|
|
||||||
| `core.Options` | Input configuration (what you want) |
|
|
||||||
| `core.Config` | Runtime settings (what is active) |
|
|
||||||
| `core.Data` | Embedded or stored content |
|
|
||||||
| `core.Service` | A managed component with lifecycle |
|
|
||||||
| `core.Result[T]` | Return value with OK/fail state |
|
|
||||||
|
|
||||||
**What this replaces:**
|
|
||||||
|
|
||||||
| Go Convention | Core AX | Why |
|
|
||||||
|--------------|---------|-----|
|
|
||||||
| `func With*(v) Option` | `core.Options{Field: v}` | Struct literal is parseable; option chain requires tracing |
|
|
||||||
| `func Must*(v) T` | `core.Result[T]` | No hidden panics; errors flow through Core |
|
|
||||||
| `func *For[T](c) T` | `c.Service("name")` | String lookup is greppable; generics require type context |
|
|
||||||
| `val, err :=` everywhere | Single return via `core.Result` | Intent not obscured by error handling |
|
|
||||||
| `_ = err` | Never needed | Core handles all errors internally |
|
|
||||||
|
|
||||||
### 7. Directory as Semantics
|
|
||||||
|
|
||||||
The directory structure tells an agent the intent before it reads a word. Top-level directories are semantic categories, not organisational bins.
|
|
||||||
|
|
||||||
```
|
|
||||||
plans/
|
|
||||||
├── code/ # Pure primitives — read for WHAT exists
|
|
||||||
├── project/ # Products — read for WHAT we're building and WHY
|
|
||||||
└── rfc/ # Contracts — read for constraints and rules
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** An agent should know what kind of document it's reading from the path alone. `code/core/go/io/RFC.md` = a lib primitive spec. `project/ofm/RFC.md` = a product spec that cross-references code/. `rfc/snider/borg/RFC-BORG-006-SMSG-FORMAT.md` = an immutable contract for the Borg SMSG protocol.
|
|
||||||
|
|
||||||
**Corollary:** The three-way split (code/project/rfc) extends principle 3 (Path Is Documentation) from files to entire subtrees. The path IS the metadata.
|
|
||||||
|
|
||||||
### 8. Lib Never Imports Consumer
|
|
||||||
|
|
||||||
Dependency flows one direction. Libraries define primitives. Consumers compose from them. A new feature in a consumer can never break a library.
|
|
||||||
|
|
||||||
```
|
|
||||||
code/core/go/* → lib tier (stable foundation)
|
|
||||||
code/core/agent/ → consumer tier (composes from go/*)
|
|
||||||
code/core/cli/ → consumer tier (composes from go/*)
|
|
||||||
code/core/gui/ → consumer tier (composes from go/*)
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** If package A is in `go/` and package B is in the consumer tier, B may import A but A must never import B. The repo naming convention enforces this: `go-{name}` = lib, bare `{name}` = consumer.
|
|
||||||
|
|
||||||
**Why this matters for agents:** When an agent is dispatched to implement a feature in `core/agent`, it can freely import from `go-io`, `go-scm`, `go-process`. But if an agent is dispatched to `go-io`, it knows its changes are foundational — every consumer depends on it, so the contract must not break.
|
|
||||||
|
|
||||||
### 9. Issues Are N+(rounds) Deep
|
|
||||||
|
|
||||||
Problems in code and specs are layered. Surface issues mask deeper issues. Fixing the surface reveals the next layer. This is not a failure mode — it is the discovery process.
|
|
||||||
|
|
||||||
```
|
|
||||||
Pass 1: Find 16 issues (surface — naming, imports, obvious errors)
|
|
||||||
Pass 2: Find 11 issues (structural — contradictions, missing types)
|
|
||||||
Pass 3: Find 5 issues (architectural — signature mismatches, registration gaps)
|
|
||||||
Pass 4: Find 4 issues (contract — cross-spec API mismatches)
|
|
||||||
Pass 5: Find 2 issues (mechanical — path format, nil safety)
|
|
||||||
Pass N: Findings are trivial → spec/code is complete
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** Iteration is required, not a failure. Each pass sees what the previous pass could not, because the context changed. An agent dispatched with the same task on the same repo will find different things each time — this is correct behaviour.
|
|
||||||
|
|
||||||
**Corollary:** The cheapest model should do the most passes (surface work). The frontier model should arrive last, when only deep issues remain. Tiered iteration: grunt model grinds → mid model pre-warms → frontier model polishes.
|
|
||||||
|
|
||||||
**Anti-pattern:** One-shot generation expecting valid output. No model, no human, produces correct-on-first-pass for non-trivial work. Expecting it wastes the first pass on surface issues that a cheaper pass would have caught.
|
|
||||||
|
|
||||||
### 10. CLI Tests as Artifact Validation
|
|
||||||
|
|
||||||
Unit tests verify the code. CLI tests verify the binary. The directory structure IS the command structure — path maps to command, Taskfile runs the test.
|
|
||||||
|
|
||||||
```
|
|
||||||
tests/cli/
|
|
||||||
├── core/
|
|
||||||
│ └── lint/
|
|
||||||
│ ├── Taskfile.yaml ← test `core-lint` (root)
|
|
||||||
│ ├── run/
|
|
||||||
│ │ ├── Taskfile.yaml ← test `core-lint run`
|
|
||||||
│ │ └── fixtures/
|
|
||||||
│ ├── go/
|
|
||||||
│ │ ├── Taskfile.yaml ← test `core-lint go`
|
|
||||||
│ │ └── fixtures/
|
|
||||||
│ └── security/
|
|
||||||
│ ├── Taskfile.yaml ← test `core-lint security`
|
|
||||||
│ └── fixtures/
|
|
||||||
```
|
|
||||||
|
|
||||||
**Rule:** Every CLI command has a matching `tests/cli/{path}/Taskfile.yaml`. The Taskfile runs the compiled binary against fixtures with known inputs and validates the output. If the CLI test passes, the underlying actions work — because CLI commands call actions, MCP tools call actions, API endpoints call actions. Test the CLI, trust the rest.
|
|
||||||
|
|
||||||
**Pattern:**
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# tests/cli/core/lint/go/Taskfile.yaml
|
|
||||||
version: '3'
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- core-lint go --output json fixtures/ > /tmp/result.json
|
|
||||||
- jq -e '.findings | length > 0' /tmp/result.json
|
|
||||||
- jq -e '.summary.passed == false' /tmp/result.json
|
|
||||||
```
|
|
||||||
|
|
||||||
**Why this matters for agents:** An agent can validate its own work by running `task test` in the matching `tests/cli/` directory. No test framework, no mocking, no setup — just the binary, fixtures, and `jq` assertions. The agent builds the binary, runs the test, sees the result. If it fails, the agent can read the fixture, read the output, and fix the code.
|
|
||||||
|
|
||||||
**Corollary:** Fixtures are planted bugs. Each fixture file has a known issue that the linter must find. If the linter doesn't find it, the test fails. Fixtures are the spec for what the tool must detect — they ARE the test cases, not descriptions of test cases.
|
|
||||||
|
|
||||||
## Applying AX to Existing Patterns
|
|
||||||
|
|
||||||
### File Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
# AX-native: path describes content
|
|
||||||
core/agent/
|
|
||||||
├── go/ # Go source
|
|
||||||
├── php/ # PHP source
|
|
||||||
├── ui/ # Frontend source
|
|
||||||
├── claude/ # Claude Code plugin
|
|
||||||
└── codex/ # Codex plugin
|
|
||||||
|
|
||||||
# Not AX: generic names requiring README
|
|
||||||
src/
|
|
||||||
├── lib/
|
|
||||||
├── utils/
|
|
||||||
└── helpers/
|
|
||||||
```
|
|
||||||
|
|
||||||
### Error Handling
|
|
||||||
|
|
||||||
```go
|
|
||||||
// AX-native: errors are infrastructure, not application logic
|
|
||||||
svc := c.Service("brain")
|
|
||||||
cfg := c.Config().Get("database.host")
|
|
||||||
// Errors logged by Core. Code reads like a spec.
|
|
||||||
|
|
||||||
// Not AX: errors dominate the code
|
|
||||||
svc, err := c.ServiceFor[brain.Service]()
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("get brain service: %w", err)
|
|
||||||
}
|
|
||||||
cfg, err := c.Config().Get("database.host")
|
|
||||||
if err != nil {
|
|
||||||
_ = err // silenced because "it'll be fine"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### API Design
|
|
||||||
|
|
||||||
```go
|
|
||||||
// AX-native: one shape, every surface
|
|
||||||
core.New(core.Options{
|
|
||||||
Name: "my-app",
|
|
||||||
Services: []core.Service{...},
|
|
||||||
Config: core.Config{...},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Not AX: multiple patterns for the same thing
|
|
||||||
core.New(
|
|
||||||
core.WithName("my-app"),
|
|
||||||
core.WithService(factory1),
|
|
||||||
core.WithService(factory2),
|
|
||||||
core.WithConfig(cfg),
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
## The Plans Convention — AX Development Lifecycle
|
|
||||||
|
|
||||||
The `plans/` directory structure encodes a development methodology designed for how generative AI actually works: iterative refinement across structured phases, not one-shot generation.
|
|
||||||
|
|
||||||
### The Three-Way Split
|
|
||||||
|
|
||||||
```
|
|
||||||
plans/
|
|
||||||
├── project/ # 1. WHAT and WHY — start here
|
|
||||||
├── rfc/ # 2. CONSTRAINTS — immutable contracts
|
|
||||||
└── code/ # 3. HOW — implementation specs
|
|
||||||
```
|
|
||||||
|
|
||||||
Each directory is a phase. Work flows from project → rfc → code. Each transition forces a refinement pass — you cannot write a code spec without discovering gaps in the project spec, and you cannot write an RFC without discovering assumptions in both.
|
|
||||||
|
|
||||||
**Three places for data that can't be written simultaneously = three guaranteed iterations of "actually, this needs changing."** Refinement is baked into the structure, not bolted on as a review step.
|
|
||||||
|
|
||||||
### Phase 1: Project (Vision)
|
|
||||||
|
|
||||||
Start with `project/`. No code exists yet. Define:
|
|
||||||
- What the product IS and who it serves
|
|
||||||
- What existing primitives it consumes (cross-ref to `code/`)
|
|
||||||
- What constraints it operates under (cross-ref to `rfc/`)
|
|
||||||
|
|
||||||
This is where creativity lives. Map features to building blocks. Connect systems. The project spec is integrative — it references everything else.
|
|
||||||
|
|
||||||
### Phase 2: RFC (Contracts)
|
|
||||||
|
|
||||||
Extract the immutable rules into `rfc/`. These are constraints that don't change with implementation:
|
|
||||||
- Wire formats, protocols, hash algorithms
|
|
||||||
- Security properties that must hold
|
|
||||||
- Compatibility guarantees
|
|
||||||
|
|
||||||
RFCs are numbered per component (`RFC-BORG-006-SMSG-FORMAT.md`) and never modified after acceptance. If the contract changes, write a new RFC.
|
|
||||||
|
|
||||||
### Phase 3: Code (Implementation Specs)
|
|
||||||
|
|
||||||
Define the implementation in `code/`. Each component gets an RFC.md that an agent can implement from:
|
|
||||||
- Struct definitions (the DTOs — see principle 6)
|
|
||||||
- Method signatures and behaviour
|
|
||||||
- Error conditions and edge cases
|
|
||||||
- Cross-references to other code/ specs
|
|
||||||
|
|
||||||
The code spec IS the product. Write the spec → dispatch to an agent → review output → iterate.
|
|
||||||
|
|
||||||
### Pre-Launch: Alignment Protocol
|
|
||||||
|
|
||||||
Before dispatching for implementation, verify spec-model alignment:
|
|
||||||
|
|
||||||
```
|
|
||||||
1. REVIEW — The implementation model (Codex/Jules) reads the spec
|
|
||||||
and reports missing elements. This surfaces the delta between
|
|
||||||
the model's training and the spec's assumptions.
|
|
||||||
|
|
||||||
"I need X, Y, Z to implement this" is the model saying
|
|
||||||
"I hear you but I'm missing context" — without asking.
|
|
||||||
|
|
||||||
2. ADJUST — Update the spec to close the gaps. Add examples,
|
|
||||||
clarify ambiguities, provide the context the model needs.
|
|
||||||
This is shared alignment, not compromise.
|
|
||||||
|
|
||||||
3. VERIFY — A different model (or sub-agent) reviews the adjusted
|
|
||||||
spec without the planner's bias. Fresh eyes on the contract.
|
|
||||||
"Does this make sense to someone who wasn't in the room?"
|
|
||||||
|
|
||||||
4. READY — When the review findings are trivial or deployment-
|
|
||||||
related (not architectural), the spec is ready to dispatch.
|
|
||||||
```
|
|
||||||
|
|
||||||
### Implementation: Iterative Dispatch
|
|
||||||
|
|
||||||
Same prompt, multiple runs. Each pass sees deeper because the context evolved:
|
|
||||||
|
|
||||||
```
|
|
||||||
Round 1: Build features (the obvious gaps)
|
|
||||||
Round 2: Write tests (verify what was built)
|
|
||||||
Round 3: Harden security (what can go wrong?)
|
|
||||||
Round 4: Next RFC section (what's still missing?)
|
|
||||||
Round N: Findings are trivial → implementation is complete
|
|
||||||
```
|
|
||||||
|
|
||||||
Re-running is not failure. It is the process. Each pass changes the codebase, which changes what the next pass can see. The iteration IS the refinement.
|
|
||||||
|
|
||||||
### Post-Implementation: Auto-Documentation
|
|
||||||
|
|
||||||
The QA/verify chain produces artefacts that feed forward:
|
|
||||||
- Test results document the contract (what works, what doesn't)
|
|
||||||
- Coverage reports surface untested paths
|
|
||||||
- Diff summaries prep the changelog for the next release
|
|
||||||
- Doc site updates from the spec (the spec IS the documentation)
|
|
||||||
|
|
||||||
The output of one cycle is the input to the next. The plans repo stays current because the specs drive the code, not the other way round.
|
|
||||||
|
|
||||||
## Compatibility
|
|
||||||
|
|
||||||
AX conventions are valid, idiomatic Go/PHP/TS. They do not require language extensions, code generation, or non-standard tooling. An AX-designed codebase compiles, tests, and deploys with standard toolchains.
|
|
||||||
|
|
||||||
The conventions diverge from community patterns (functional options, Must/For, etc.) but do not violate language specifications. This is a style choice, not a fork.
|
|
||||||
|
|
||||||
## Adoption
|
|
||||||
|
|
||||||
AX applies to all new code in the Core ecosystem. Existing code migrates incrementally as it is touched — no big-bang rewrite.
|
|
||||||
|
|
||||||
Priority order:
|
|
||||||
1. **Public APIs** (package-level functions, struct constructors)
|
|
||||||
2. **File structure** (path naming, template locations)
|
|
||||||
3. **Internal fields** (struct field names, local variables)
|
|
||||||
|
|
||||||
## References
|
|
||||||
|
|
||||||
- dAppServer unified path convention (2024)
|
|
||||||
- CoreGO DTO pattern refactor (2026-03-18)
|
|
||||||
- Core primitives design (2026-03-19)
|
|
||||||
- Go Proverbs, Rob Pike (2015) — AX provides an updated lens
|
|
||||||
|
|
||||||
## Changelog
|
|
||||||
|
|
||||||
- 2026-03-19: Initial draft
|
|
||||||
685
docs/RFC-LINT.md
685
docs/RFC-LINT.md
|
|
@ -1,685 +0,0 @@
|
||||||
# RFC-LINT: core/lint Agent-Native CLI and Adapter Contract
|
|
||||||
|
|
||||||
- **Status:** Implemented
|
|
||||||
- **Date:** 2026-03-30
|
|
||||||
- **Applies to:** `forge.lthn.ai/core/lint`
|
|
||||||
- **Standard:** [`docs/RFC-CORE-008-AGENT-EXPERIENCE.md`](./RFC-CORE-008-AGENT-EXPERIENCE.md)
|
|
||||||
|
|
||||||
## Abstract
|
|
||||||
|
|
||||||
`core/lint` is a standalone Go CLI and library that detects project languages, runs matching lint adapters, merges their findings into one report, and writes machine-readable output for local development, CI, and agent QA.
|
|
||||||
|
|
||||||
The binary does not bundle external linters. It orchestrates tools already present in `PATH`, treats missing tools as `skipped`, and keeps the orchestration report contract separate from the legacy catalog commands.
|
|
||||||
|
|
||||||
This RFC describes the implementation that exists in this repository. It replaces the earlier draft that described a future Core service with Tasks, IPC actions, MCP wrapping, build stages, artifact stages, entitlement gates, and scheduled runs. Those designs are not the current contract.
|
|
||||||
|
|
||||||
## Motivation
|
|
||||||
|
|
||||||
Earlier drafts described a future `core/lint` service that does not exist in this module. Agents dispatched to this repository need the contract that is implemented now, not the architecture that might exist later.
|
|
||||||
|
|
||||||
The current implementation has three properties that matter for AX:
|
|
||||||
|
|
||||||
- one CLI binary with explicit command paths
|
|
||||||
- one orchestration DTO (`RunInput`) and one orchestration report (`Report`)
|
|
||||||
- one clear split between adapter-driven runs and the older embedded catalog commands
|
|
||||||
|
|
||||||
An agent should be able to read the paths, map the commands, and predict the output shapes without reverse-engineering aspirational features from an outdated RFC.
|
|
||||||
|
|
||||||
## AX Principles Applied
|
|
||||||
|
|
||||||
This RFC follows the Agent Experience standard directly:
|
|
||||||
|
|
||||||
1. Predictable names over short names: `RunInput`, `Report`, `ToolRun`, `ToolInfo`, `Service`, and `Adapter` are the contract nouns across the CLI and package boundary.
|
|
||||||
2. Comments as usage examples: command examples use real flags and real paths such as `core-lint run --output json .` and `core-lint tools --output json --lang go`.
|
|
||||||
3. Path is documentation: the implementation map is the contract, and `tests/cli/lint/{path}` mirrors the command path it validates.
|
|
||||||
4. Declarative over imperative: `.core/lint.yaml` declares tool groups, thresholds, and output defaults instead of encoding those decisions in hidden CLI behavior.
|
|
||||||
5. One input shape for orchestration: `pkg/lint/service.go` owns `RunInput`.
|
|
||||||
6. One output shape for orchestration: `pkg/lint/service.go` owns `Report`.
|
|
||||||
7. CLI tests as artifact validation: the Taskfiles under `tests/cli/lint/...` are the runnable contract for the binary surface.
|
|
||||||
8. Stable sequencing over hidden magic: adapters run sequentially, then tool runs and findings are sorted before output.
|
|
||||||
|
|
||||||
## Path Map
|
|
||||||
|
|
||||||
An agent should be able to navigate the module from the path alone:
|
|
||||||
|
|
||||||
| Path | Meaning |
|
|
||||||
|------|---------|
|
|
||||||
| `cmd/core-lint/main.go` | CLI surface for `run`, `detect`, `tools`, `init`, language shortcuts, `hook`, and the legacy `lint` namespace |
|
|
||||||
| `pkg/lint/service.go` | Orchestrator for config loading, language selection, adapter selection, hook mode, and report assembly |
|
|
||||||
| `pkg/lint/adapter.go` | Adapter interface, external adapter registry, built-in catalog fallback, external command execution, and output parsers |
|
|
||||||
| `pkg/lint/config.go` | Repo-local config contract and defaults for `core-lint init` |
|
|
||||||
| `pkg/lint/detect_project.go` | Project language detection from markers and file names |
|
|
||||||
| `pkg/lint/report.go` | `Summary` aggregation and JSON/text/GitHub/SARIF writers |
|
|
||||||
| `lint.go` | Embedded catalog loader for `lint check` and `lint catalog` |
|
|
||||||
| `catalog/*.yaml` | Embedded pattern catalog files used by the legacy catalog commands |
|
|
||||||
| `tests/cli/lint/...` | CLI artifact tests; the path is the command |
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
In scope:
|
|
||||||
|
|
||||||
- Project language detection
|
|
||||||
- Config-driven lint tool selection
|
|
||||||
- Embedded catalog scanning
|
|
||||||
- External linter orchestration
|
|
||||||
- Structured report generation
|
|
||||||
- Git pre-commit hook installation and removal
|
|
||||||
- CLI artifact tests in `tests/cli/lint/...`
|
|
||||||
|
|
||||||
Out of scope:
|
|
||||||
|
|
||||||
- Core service registration
|
|
||||||
- IPC or MCP exposure
|
|
||||||
- Build-stage compilation checks
|
|
||||||
- Artifact-stage scans against compiled binaries or images
|
|
||||||
- Scheduler integration
|
|
||||||
- Sidecar SBOM file writing
|
|
||||||
- Automatic tool installation
|
|
||||||
- Entitlement enforcement
|
|
||||||
|
|
||||||
## Command Surface
|
|
||||||
|
|
||||||
The repository ships two CLI surfaces:
|
|
||||||
|
|
||||||
- The root AX surface: `core-lint run`, `core-lint detect`, `core-lint tools`, and friends
|
|
||||||
- The legacy catalog surface: `core-lint lint check` and `core-lint lint catalog ...`
|
|
||||||
|
|
||||||
The RFC commands are mounted twice: once at the root and once under `core-lint lint ...`. Both surfaces are real. The root surface is shorter. The namespaced surface keeps the path semantic.
|
|
||||||
|
|
||||||
| Capability | Root path | Namespaced alias | Example |
|
|
||||||
|------------|-----------|------------------|---------|
|
|
||||||
| Full orchestration | `core-lint run [path]` | `core-lint lint run [path]` | `core-lint run --output json .` |
|
|
||||||
| Go only | `core-lint go [path]` | `core-lint lint go [path]` | `core-lint go .` |
|
|
||||||
| PHP only | `core-lint php [path]` | `core-lint lint php [path]` | `core-lint php .` |
|
|
||||||
| JS group shortcut | `core-lint js [path]` | `core-lint lint js [path]` | `core-lint js .` |
|
|
||||||
| Python only | `core-lint python [path]` | `core-lint lint python [path]` | `core-lint python .` |
|
|
||||||
| Security group shortcut | `core-lint security [path]` | `core-lint lint security [path]` | `core-lint security --ci .` |
|
|
||||||
| Compliance tools only | `core-lint compliance [path]` | `core-lint lint compliance [path]` | `core-lint compliance --output json .` |
|
|
||||||
| Language detection | `core-lint detect [path]` | `core-lint lint detect [path]` | `core-lint detect --output json .` |
|
|
||||||
| Tool inventory | `core-lint tools` | `core-lint lint tools` | `core-lint tools --output json --lang go` |
|
|
||||||
| Default config | `core-lint init [path]` | `core-lint lint init [path]` | `core-lint init /tmp/project` |
|
|
||||||
| Pre-commit hook install | `core-lint hook install [path]` | `core-lint lint hook install [path]` | `core-lint hook install .` |
|
|
||||||
| Pre-commit hook remove | `core-lint hook remove [path]` | `core-lint lint hook remove [path]` | `core-lint hook remove .` |
|
|
||||||
| Embedded catalog scan | none | `core-lint lint check [path...]` | `core-lint lint check --format json tests/cli/lint/check/fixtures` |
|
|
||||||
| Embedded catalog list | none | `core-lint lint catalog list` | `core-lint lint catalog list --lang go` |
|
|
||||||
| Embedded catalog show | none | `core-lint lint catalog show RULE_ID` | `core-lint lint catalog show go-sec-001` |
|
|
||||||
|
|
||||||
`core-lint js` is a shortcut for `Lang=js`, not a dedicated TypeScript command. TypeScript-only runs use `core-lint run --lang ts ...` or plain `run` with auto-detection.
|
|
||||||
|
|
||||||
`core-lint compliance` is also not identical to `core-lint run --sbom`. The shortcut sets `Category=compliance`, so the final adapter filter keeps only adapters whose runtime category is `compliance`. `run --sbom` appends the compliance config group without that category filter.
|
|
||||||
|
|
||||||
## RunInput Contract
|
|
||||||
|
|
||||||
All orchestration commands resolve into one DTO:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type RunInput struct {
|
|
||||||
Path string `json:"path"`
|
|
||||||
Output string `json:"output,omitempty"`
|
|
||||||
Config string `json:"config,omitempty"`
|
|
||||||
FailOn string `json:"fail_on,omitempty"`
|
|
||||||
Category string `json:"category,omitempty"`
|
|
||||||
Lang string `json:"lang,omitempty"`
|
|
||||||
Hook bool `json:"hook,omitempty"`
|
|
||||||
CI bool `json:"ci,omitempty"`
|
|
||||||
Files []string `json:"files,omitempty"`
|
|
||||||
SBOM bool `json:"sbom,omitempty"`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Input Resolution Rules
|
|
||||||
|
|
||||||
`Service.Run()` resolves input in this order:
|
|
||||||
|
|
||||||
1. Empty `Path` becomes `.`
|
|
||||||
2. `CI=true` sets `Output=github` only when `Output` was not provided explicitly
|
|
||||||
3. Config is loaded from `--config` or `.core/lint.yaml`
|
|
||||||
4. Empty `FailOn` falls back to the loaded config
|
|
||||||
5. `Hook=true` with no explicit `Files` reads staged files from `git diff --cached --name-only`
|
|
||||||
6. `Lang` overrides auto-detection
|
|
||||||
7. `Files` override directory detection for language inference
|
|
||||||
|
|
||||||
### CLI Output Resolution
|
|
||||||
|
|
||||||
The CLI resolves output before it calls `Service.Run()`:
|
|
||||||
|
|
||||||
1. explicit `--output` wins
|
|
||||||
2. otherwise `--ci` becomes `github`
|
|
||||||
3. otherwise the loaded config `output` value is used
|
|
||||||
4. if the config output is empty, the CLI falls back to `text`
|
|
||||||
|
|
||||||
### Category and Language Precedence
|
|
||||||
|
|
||||||
Tool group selection is intentionally simple and deterministic:
|
|
||||||
|
|
||||||
1. `Category=security` selects the `lint.security` config group
|
|
||||||
2. `Category=compliance` means only `lint.compliance`
|
|
||||||
3. `Lang=go|php|js|ts|python|...` means only that language group
|
|
||||||
4. Plain `run` uses all detected language groups plus `infra`
|
|
||||||
5. Plain `run --ci` adds the `security` group
|
|
||||||
6. Plain `run --sbom` adds the `compliance` group
|
|
||||||
|
|
||||||
`Lang` is stronger than `CI` and `SBOM`. If `Lang` is set, the language group wins and the extra groups are not appended.
|
|
||||||
|
|
||||||
`Category=style`, `Category=correctness`, and other non-group categories act as adapter-side filters only. They do not map to dedicated config groups.
|
|
||||||
|
|
||||||
One current consequence is that `grype` is listed in the default `lint.compliance` config group but advertises `Category() == "security"`. `core-lint compliance` therefore filters it out, while plain `core-lint run --sbom` still leaves it eligible.
|
|
||||||
|
|
||||||
Final adapter selection has one extra Go-specific exception: if Go is present and `Category != "compliance"`, `Service.Run()` prepends the built-in `catalog` adapter after registry filtering. That means `core-lint security` on a Go project can still emit `catalog` findings tagged `security`.
|
|
||||||
|
|
||||||
## Config Contract
|
|
||||||
|
|
||||||
Repo-local config lives at `.core/lint.yaml`.
|
|
||||||
|
|
||||||
`core-lint init /path/to/project` writes the default file from `pkg/lint/config.go`.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
lint:
|
|
||||||
go:
|
|
||||||
- golangci-lint
|
|
||||||
- gosec
|
|
||||||
- govulncheck
|
|
||||||
- staticcheck
|
|
||||||
- revive
|
|
||||||
- errcheck
|
|
||||||
php:
|
|
||||||
- phpstan
|
|
||||||
- psalm
|
|
||||||
- phpcs
|
|
||||||
- phpmd
|
|
||||||
- pint
|
|
||||||
js:
|
|
||||||
- biome
|
|
||||||
- oxlint
|
|
||||||
- eslint
|
|
||||||
- prettier
|
|
||||||
ts:
|
|
||||||
- biome
|
|
||||||
- oxlint
|
|
||||||
- typescript
|
|
||||||
python:
|
|
||||||
- ruff
|
|
||||||
- mypy
|
|
||||||
- bandit
|
|
||||||
- pylint
|
|
||||||
infra:
|
|
||||||
- shellcheck
|
|
||||||
- hadolint
|
|
||||||
- yamllint
|
|
||||||
- jsonlint
|
|
||||||
- markdownlint
|
|
||||||
security:
|
|
||||||
- gitleaks
|
|
||||||
- trivy
|
|
||||||
- gosec
|
|
||||||
- bandit
|
|
||||||
- semgrep
|
|
||||||
compliance:
|
|
||||||
- syft
|
|
||||||
- grype
|
|
||||||
- scancode
|
|
||||||
|
|
||||||
output: json
|
|
||||||
fail_on: error
|
|
||||||
paths:
|
|
||||||
- .
|
|
||||||
exclude:
|
|
||||||
- vendor/
|
|
||||||
- node_modules/
|
|
||||||
- .core/
|
|
||||||
```
|
|
||||||
|
|
||||||
### Config Rules
|
|
||||||
|
|
||||||
- If `.core/lint.yaml` does not exist, `DefaultConfig()` is used in memory
|
|
||||||
- Relative `--config` paths resolve relative to `Path`
|
|
||||||
- Unknown tool names in config are inert; the adapter registry is authoritative
|
|
||||||
- The current default config includes `prettier`, but the adapter registry does not yet provide a `prettier` adapter
|
|
||||||
- `paths` and `exclude` are part of the file schema, but the current orchestration path does not read them; detection and scanning still rely on built-in defaults
|
|
||||||
- `LintConfig` still accepts a `schedules` map, but no current CLI command reads or executes it
|
|
||||||
|
|
||||||
## Detection Contract
|
|
||||||
|
|
||||||
`pkg/lint/detect_project.go` is the only project-language detector used by orchestration commands.
|
|
||||||
|
|
||||||
### Marker Files
|
|
||||||
|
|
||||||
| Marker | Language |
|
|
||||||
|--------|----------|
|
|
||||||
| `go.mod` | `go` |
|
|
||||||
| `composer.json` | `php` |
|
|
||||||
| `package.json` | `js` |
|
|
||||||
| `tsconfig.json` | `ts` |
|
|
||||||
| `requirements.txt` | `python` |
|
|
||||||
| `pyproject.toml` | `python` |
|
|
||||||
| `Cargo.toml` | `rust` |
|
|
||||||
| `Dockerfile*` | `dockerfile` |
|
|
||||||
|
|
||||||
### File Extensions
|
|
||||||
|
|
||||||
| Extension | Language |
|
|
||||||
|-----------|----------|
|
|
||||||
| `.go` | `go` |
|
|
||||||
| `.php` | `php` |
|
|
||||||
| `.js`, `.jsx` | `js` |
|
|
||||||
| `.ts`, `.tsx` | `ts` |
|
|
||||||
| `.py` | `python` |
|
|
||||||
| `.rs` | `rust` |
|
|
||||||
| `.sh` | `shell` |
|
|
||||||
| `.yaml`, `.yml` | `yaml` |
|
|
||||||
| `.json` | `json` |
|
|
||||||
| `.md` | `markdown` |
|
|
||||||
|
|
||||||
### Detection Rules
|
|
||||||
|
|
||||||
- Directory traversal skips `vendor`, `node_modules`, `.git`, `testdata`, `.core`, and any hidden directory
|
|
||||||
- Results are de-duplicated and returned in sorted order
|
|
||||||
- `core-lint detect --output json tests/cli/lint/check/fixtures` currently returns `["go"]`
|
|
||||||
|
|
||||||
## Execution Model
|
|
||||||
|
|
||||||
`Service.Run()` is the orchestrator. The current implementation is sequential, not parallel.
|
|
||||||
|
|
||||||
### Step 1: Load Config
|
|
||||||
|
|
||||||
`LoadProjectConfig()` returns the repo-local config or the in-memory default.
|
|
||||||
|
|
||||||
### Step 2: Resolve File Scope
|
|
||||||
|
|
||||||
- If `Files` was provided, only those files are considered for language detection and adapter arguments
|
|
||||||
- If `Hook=true` and `Files` is empty, staged files are read from Git
|
|
||||||
- Otherwise the whole project path is scanned
|
|
||||||
|
|
||||||
### Step 3: Resolve Languages
|
|
||||||
|
|
||||||
- `Lang` wins first
|
|
||||||
- `Files` are used next
|
|
||||||
- `Detect(Path)` is the fallback
|
|
||||||
|
|
||||||
### Step 4: Select Adapters
|
|
||||||
|
|
||||||
`pkg/lint/service.go` builds a set of enabled tool names from config, then filters the registry from `pkg/lint/adapter.go`.
|
|
||||||
|
|
||||||
Special case:
|
|
||||||
|
|
||||||
- If `go` is present in the final language set and `Category != "compliance"`, a built-in `catalog` adapter is prepended automatically
|
|
||||||
|
|
||||||
### Step 5: Run Adapters
|
|
||||||
|
|
||||||
Every selected adapter runs with the same contract:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type Adapter interface {
|
|
||||||
Name() string
|
|
||||||
Available() bool
|
|
||||||
Languages() []string
|
|
||||||
Command() string
|
|
||||||
Entitlement() string
|
|
||||||
RequiresEntitlement() bool
|
|
||||||
MatchesLanguage(languages []string) bool
|
|
||||||
Category() string
|
|
||||||
Fast() bool
|
|
||||||
Run(ctx context.Context, input RunInput, files []string) AdapterResult
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Execution rules:
|
|
||||||
|
|
||||||
- Missing binaries become `ToolRun{Status: "skipped"}`
|
|
||||||
- External commands run with a 5 minute timeout
|
|
||||||
- Hook mode marks non-fast adapters as `skipped`
|
|
||||||
- Parsed findings are normalised, sorted, and merged into one report
|
|
||||||
- Adapter order becomes deterministic after `sortToolRuns()` and `sortFindings()`
|
|
||||||
|
|
||||||
### Step 6: Compute Pass or Fail
|
|
||||||
|
|
||||||
`passesThreshold()` applies the configured threshold:
|
|
||||||
|
|
||||||
| `fail_on` | Passes when |
|
|
||||||
|-----------|-------------|
|
|
||||||
| `error` or empty | `summary.errors == 0` |
|
|
||||||
| `warning` | `summary.errors == 0 && summary.warnings == 0` |
|
|
||||||
| `info` | `summary.total == 0` |
|
|
||||||
|
|
||||||
CLI exit status follows `report.Summary.Passed`, not raw tool state. A `skipped` or `timeout` tool run does not fail the command by itself.
|
|
||||||
|
|
||||||
## Catalog Surfaces
|
|
||||||
|
|
||||||
The repository has two catalog paths. They are related, but they are not the same implementation.
|
|
||||||
|
|
||||||
### Legacy Embedded Catalog
|
|
||||||
|
|
||||||
These commands load the embedded YAML catalog via `lint.go`:
|
|
||||||
|
|
||||||
- `core-lint lint check`
|
|
||||||
- `core-lint lint catalog list`
|
|
||||||
- `core-lint lint catalog show`
|
|
||||||
|
|
||||||
The source of truth is `catalog/*.yaml`.
|
|
||||||
|
|
||||||
### Orchestration Catalog Adapter
|
|
||||||
|
|
||||||
`core-lint run`, `core-lint go`, and the other orchestration commands prepend a smaller built-in `catalog` adapter from `pkg/lint/adapter.go`.
|
|
||||||
|
|
||||||
That adapter reads the hard-coded `defaultCatalogRulesYAML` constant, not `catalog/*.yaml`.
|
|
||||||
|
|
||||||
Today the fallback adapter contains these Go rules:
|
|
||||||
|
|
||||||
- `go-cor-003`
|
|
||||||
- `go-cor-004`
|
|
||||||
- `go-sec-001`
|
|
||||||
- `go-sec-002`
|
|
||||||
- `go-sec-004`
|
|
||||||
|
|
||||||
The overlap is intentional, but the surfaces are different:
|
|
||||||
|
|
||||||
- `lint check` returns raw catalog findings with catalog severities such as `medium` or `high`
|
|
||||||
- `run` normalises those findings into report severities `warning`, `error`, or `info`
|
|
||||||
|
|
||||||
An agent must not assume that `core-lint lint check` and `core-lint run` execute the same rule set.
|
|
||||||
|
|
||||||
## Adapter Inventory
|
|
||||||
|
|
||||||
The implementation has two adapter sources in `pkg/lint/adapter.go`:
|
|
||||||
|
|
||||||
- `defaultAdapters()` defines the external-tool registry exposed by `core-lint tools`
|
|
||||||
- `newCatalogAdapter()` defines the built-in Go fallback injected by `Service.Run()` when Go is in scope
|
|
||||||
|
|
||||||
### ToolInfo Contract
|
|
||||||
|
|
||||||
`core-lint tools` returns the runtime inventory from `Service.Tools()`:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type ToolInfo struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Available bool `json:"available"`
|
|
||||||
Languages []string `json:"languages"`
|
|
||||||
Category string `json:"category"`
|
|
||||||
Entitlement string `json:"entitlement,omitempty"`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Inventory rules:
|
|
||||||
|
|
||||||
- results are sorted by `Name`
|
|
||||||
- `--lang` filters via `Adapter.MatchesLanguage()`, not strict equality on the `Languages` field
|
|
||||||
- wildcard adapters with `Languages() == []string{"*"}` still appear under any `--lang` filter
|
|
||||||
- category tokens also match, so `core-lint tools --lang security` returns security adapters plus wildcard adapters
|
|
||||||
- `Available` reflects a `PATH` lookup at runtime, not config membership
|
|
||||||
- `Entitlement` is descriptive metadata; the current implementation does not enforce it
|
|
||||||
- the built-in `catalog` adapter is not returned by `core-lint tools`; it is injected only during `run`-style orchestration on Go projects
|
|
||||||
|
|
||||||
### Injected During Run
|
|
||||||
|
|
||||||
| Adapter | Languages | Category | Fast | Notes |
|
|
||||||
|---------|-----------|----------|------|-------|
|
|
||||||
| `catalog` | `go` | `correctness` | yes | Built-in regex fallback rules; injected by `Service.Run()`, not listed by `core-lint tools` |
|
|
||||||
|
|
||||||
### Go
|
|
||||||
|
|
||||||
| Adapter | Category | Fast |
|
|
||||||
|---------|----------|------|
|
|
||||||
| `golangci-lint` | `correctness` | yes |
|
|
||||||
| `gosec` | `security` | no |
|
|
||||||
| `govulncheck` | `security` | no |
|
|
||||||
| `staticcheck` | `correctness` | yes |
|
|
||||||
| `revive` | `style` | yes |
|
|
||||||
| `errcheck` | `correctness` | yes |
|
|
||||||
|
|
||||||
### PHP
|
|
||||||
|
|
||||||
| Adapter | Category | Fast |
|
|
||||||
|---------|----------|------|
|
|
||||||
| `phpstan` | `correctness` | yes |
|
|
||||||
| `psalm` | `correctness` | yes |
|
|
||||||
| `phpcs` | `style` | yes |
|
|
||||||
| `phpmd` | `correctness` | yes |
|
|
||||||
| `pint` | `style` | yes |
|
|
||||||
|
|
||||||
### JS and TS
|
|
||||||
|
|
||||||
| Adapter | Category | Fast |
|
|
||||||
|---------|----------|------|
|
|
||||||
| `biome` | `style` | yes |
|
|
||||||
| `oxlint` | `style` | yes |
|
|
||||||
| `eslint` | `style` | yes |
|
|
||||||
| `typescript` | `correctness` | yes |
|
|
||||||
|
|
||||||
### Python
|
|
||||||
|
|
||||||
| Adapter | Category | Fast |
|
|
||||||
|---------|----------|------|
|
|
||||||
| `ruff` | `style` | yes |
|
|
||||||
| `mypy` | `correctness` | yes |
|
|
||||||
| `bandit` | `security` | no |
|
|
||||||
| `pylint` | `style` | yes |
|
|
||||||
|
|
||||||
### Infra and Cross-Project
|
|
||||||
|
|
||||||
| Adapter | Category | Fast |
|
|
||||||
|---------|----------|------|
|
|
||||||
| `shellcheck` | `correctness` | yes |
|
|
||||||
| `hadolint` | `security` | yes |
|
|
||||||
| `yamllint` | `style` | yes |
|
|
||||||
| `jsonlint` | `style` | yes |
|
|
||||||
| `markdownlint` | `style` | yes |
|
|
||||||
| `gitleaks` | `security` | no |
|
|
||||||
| `trivy` | `security` | no |
|
|
||||||
| `semgrep` | `security` | no |
|
|
||||||
| `syft` | `compliance` | no |
|
|
||||||
| `grype` | `security` | no |
|
|
||||||
| `scancode` | `compliance` | no |
|
|
||||||
|
|
||||||
### Adapter Parsing Rules
|
|
||||||
|
|
||||||
- JSON tools are parsed recursively and schema-tolerantly by searching for common keys such as `file`, `line`, `column`, `code`, `message`, and `severity`
|
|
||||||
- Text tools are parsed from `file:line[:column]: message`
|
|
||||||
- Non-empty output that does not match either parser becomes one synthetic finding with `code: diagnostic`
|
|
||||||
- A failed command with no usable parsed output becomes one synthetic finding with `code: command-failed`
|
|
||||||
- Duplicate findings are collapsed on `tool|file|line|column|code|message`
|
|
||||||
- `ToolRun.Version` exists in the report schema but is not populated yet
|
|
||||||
|
|
||||||
### Entitlement Metadata
|
|
||||||
|
|
||||||
Adapters still expose `Entitlement()` and `RequiresEntitlement()`, but `Service.Run()` does not enforce them today. The metadata is present; the gate is not.
|
|
||||||
|
|
||||||
## Output Contract
|
|
||||||
|
|
||||||
Orchestration commands return one report document:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type Report struct {
|
|
||||||
Project string `json:"project"`
|
|
||||||
Timestamp time.Time `json:"timestamp"`
|
|
||||||
Duration string `json:"duration"`
|
|
||||||
Languages []string `json:"languages"`
|
|
||||||
Tools []ToolRun `json:"tools"`
|
|
||||||
Findings []Finding `json:"findings"`
|
|
||||||
Summary Summary `json:"summary"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ToolRun struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Version string `json:"version,omitempty"`
|
|
||||||
Status string `json:"status"`
|
|
||||||
Duration string `json:"duration"`
|
|
||||||
Findings int `json:"findings"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Summary struct {
|
|
||||||
Total int `json:"total"`
|
|
||||||
Errors int `json:"errors"`
|
|
||||||
Warnings int `json:"warnings"`
|
|
||||||
Info int `json:"info"`
|
|
||||||
Passed bool `json:"passed"`
|
|
||||||
BySeverity map[string]int `json:"by_severity,omitempty"`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
`ToolRun.Status` has four implemented values:
|
|
||||||
|
|
||||||
| Status | Meaning |
|
|
||||||
|--------|---------|
|
|
||||||
| `passed` | The adapter ran and emitted no findings |
|
|
||||||
| `failed` | The adapter ran and emitted findings or the command exited non-zero |
|
|
||||||
| `skipped` | The binary was missing or hook mode skipped a non-fast adapter |
|
|
||||||
| `timeout` | The command exceeded the 5 minute adapter timeout |
|
|
||||||
|
|
||||||
`Finding` is shared with the legacy catalog scanner:
|
|
||||||
|
|
||||||
```go
|
|
||||||
type Finding struct {
|
|
||||||
Tool string `json:"tool,omitempty"`
|
|
||||||
File string `json:"file"`
|
|
||||||
Line int `json:"line"`
|
|
||||||
Column int `json:"column,omitempty"`
|
|
||||||
Severity string `json:"severity"`
|
|
||||||
Code string `json:"code,omitempty"`
|
|
||||||
Message string `json:"message,omitempty"`
|
|
||||||
Category string `json:"category,omitempty"`
|
|
||||||
Fix string `json:"fix,omitempty"`
|
|
||||||
RuleID string `json:"rule_id,omitempty"`
|
|
||||||
Title string `json:"title,omitempty"`
|
|
||||||
Match string `json:"match,omitempty"`
|
|
||||||
Repo string `json:"repo,omitempty"`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Finding Normalisation
|
|
||||||
|
|
||||||
During orchestration:
|
|
||||||
|
|
||||||
- `Code` falls back to `RuleID`
|
|
||||||
- `Message` falls back to `Title`
|
|
||||||
- empty `Tool` becomes `catalog`
|
|
||||||
- file paths are made relative to `Path` when possible
|
|
||||||
- severities are collapsed to report levels:
|
|
||||||
|
|
||||||
| Raw severity | Report severity |
|
|
||||||
|--------------|-----------------|
|
|
||||||
| `critical`, `high`, `error`, `errors` | `error` |
|
|
||||||
| `medium`, `low`, `warning`, `warn` | `warning` |
|
|
||||||
| `info`, `note` | `info` |
|
|
||||||
|
|
||||||
### Output Modes
|
|
||||||
|
|
||||||
| Mode | How to request it | Writer |
|
|
||||||
|------|-------------------|--------|
|
|
||||||
| JSON | `--output json` | `WriteReportJSON` |
|
|
||||||
| Text | `--output text` | `WriteReportText` |
|
|
||||||
| GitHub annotations | `--output github` or `--ci` | `WriteReportGitHub` |
|
|
||||||
| SARIF | `--output sarif` | `WriteReportSARIF` |
|
|
||||||
|
|
||||||
### Stream Contract
|
|
||||||
|
|
||||||
For `run`-style commands, the selected writer always writes the report document to `stdout`.
|
|
||||||
|
|
||||||
If the report fails the configured threshold, the CLI still writes the report to `stdout`, then returns an error. The error path adds human-facing diagnostics on `stderr`.
|
|
||||||
|
|
||||||
Agents and CI jobs that need machine-readable output should parse `stdout` and treat `stderr` as diagnostic text.
|
|
||||||
|
|
||||||
## Hook Mode
|
|
||||||
|
|
||||||
`core-lint run --hook` is the installed pre-commit path.
|
|
||||||
|
|
||||||
Implementation details:
|
|
||||||
|
|
||||||
- staged files come from `git diff --cached --name-only`
|
|
||||||
- language detection runs only on those staged files
|
|
||||||
- adapters with `Fast() == false` are marked `skipped`
|
|
||||||
- output format still follows normal resolution rules; hook mode does not force text output
|
|
||||||
- `core-lint hook install` writes a managed block into `.git/hooks/pre-commit`
|
|
||||||
- `core-lint hook remove` removes only the managed block
|
|
||||||
|
|
||||||
Installed hook block:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
# core-lint hook start
|
|
||||||
# Installed by core-lint
|
|
||||||
exec core-lint run --hook
|
|
||||||
# core-lint hook end
|
|
||||||
```
|
|
||||||
|
|
||||||
If the hook file already exists, install appends a guarded block instead of overwriting the file. In that appended case the command line becomes `core-lint run --hook || exit $?` rather than `exec core-lint run --hook`.
|
|
||||||
|
|
||||||
## Test Contract
|
|
||||||
|
|
||||||
The CLI artifact tests are the runnable contract for this RFC:
|
|
||||||
|
|
||||||
| Path | Command under test |
|
|
||||||
|------|--------------------|
|
|
||||||
| `tests/cli/lint/check/Taskfile.yaml` | `core-lint lint check` |
|
|
||||||
| `tests/cli/lint/catalog/list/Taskfile.yaml` | `core-lint lint catalog list` |
|
|
||||||
| `tests/cli/lint/catalog/show/Taskfile.yaml` | `core-lint lint catalog show` |
|
|
||||||
| `tests/cli/lint/detect/Taskfile.yaml` | `core-lint detect` |
|
|
||||||
| `tests/cli/lint/tools/Taskfile.yaml` | `core-lint tools` |
|
|
||||||
| `tests/cli/lint/init/Taskfile.yaml` | `core-lint init` |
|
|
||||||
| `tests/cli/lint/run/Taskfile.yaml` | `core-lint run` |
|
|
||||||
| `tests/cli/lint/Taskfile.yaml` | aggregate CLI suite |
|
|
||||||
|
|
||||||
The planted bug fixture is `tests/cli/lint/check/fixtures/input.go`.
|
|
||||||
|
|
||||||
Current expectations from the test suite:
|
|
||||||
|
|
||||||
- `lint check --format=json` finds `go-cor-003` in `input.go`
|
|
||||||
- `run --output json --fail-on warning` writes one report document to `stdout`, emits failure diagnostics on `stderr`, and exits non-zero
|
|
||||||
- `detect --output json` returns `["go"]` for the shipped fixture
|
|
||||||
- `tools --output json --lang go` includes `golangci-lint` and `govulncheck`
|
|
||||||
- `init` writes `.core/lint.yaml`
|
|
||||||
|
|
||||||
Unit-level confirmation also exists in:
|
|
||||||
|
|
||||||
- `cmd/core-lint/main_test.go`
|
|
||||||
- `pkg/lint/service_test.go`
|
|
||||||
- `pkg/lint/detect_project_test.go`
|
|
||||||
|
|
||||||
## Explicit Non-Goals
|
|
||||||
|
|
||||||
These items are intentionally not part of the current contract:
|
|
||||||
|
|
||||||
- no Core runtime integration
|
|
||||||
- no `core.Task` pipeline
|
|
||||||
- no `lint.static`, `lint.build`, or `lint.artifact` action graph
|
|
||||||
- no scheduled cron registration
|
|
||||||
- no sidecar `sbom.cdx.json` or `sbom.spdx.json` output
|
|
||||||
- no parallel adapter execution
|
|
||||||
- no adapter entitlement enforcement
|
|
||||||
- no guarantee that every config tool name has a matching adapter
|
|
||||||
|
|
||||||
Any future RFC that adds those capabilities must describe the code that implements them, not just the aspiration.
|
|
||||||
|
|
||||||
## Compatibility
|
|
||||||
|
|
||||||
This RFC matches the code that ships today:
|
|
||||||
|
|
||||||
- a standard Go CLI binary built from `cmd/core-lint`
|
|
||||||
- external tools resolved from `PATH` at runtime
|
|
||||||
- no required Core runtime, IPC layer, scheduler, or generated action graph
|
|
||||||
|
|
||||||
The contract is compatible with the current unit tests and CLI Taskfile tests because it describes the existing paths, flags, DTOs, and outputs rather than a future service boundary.
|
|
||||||
|
|
||||||
## Adoption
|
|
||||||
|
|
||||||
This contract applies immediately to:
|
|
||||||
|
|
||||||
- the root orchestration commands such as `core-lint run`, `core-lint detect`, `core-lint tools`, `core-lint init`, and `core-lint hook`
|
|
||||||
- the namespaced aliases under `core-lint lint ...`
|
|
||||||
- the legacy embedded catalog commands under `core-lint lint check` and `core-lint lint catalog ...`
|
|
||||||
|
|
||||||
Future work that adds scheduler support, runtime registration, entitlement enforcement, parallel execution, or SBOM file outputs must land behind a new RFC revision that points to implemented code.
|
|
||||||
|
|
||||||
## References
|
|
||||||
|
|
||||||
- `docs/RFC-CORE-008-AGENT-EXPERIENCE.md`
|
|
||||||
- `docs/index.md`
|
|
||||||
- `docs/development.md`
|
|
||||||
- `cmd/core-lint/main.go`
|
|
||||||
- `pkg/lint/service.go`
|
|
||||||
- `pkg/lint/adapter.go`
|
|
||||||
- `tests/cli/lint/Taskfile.yaml`
|
|
||||||
|
|
||||||
## Changelog
|
|
||||||
|
|
||||||
- 2026-03-30: Rewrote the RFC to match the implemented standalone CLI, adapter registry, fallback catalog adapter, hook mode, and CLI test paths
|
|
||||||
- 2026-03-30: Clarified the implemented report boundary, category filtering semantics, ignored config fields, and AX-style motivation/compatibility/adoption sections
|
|
||||||
- 2026-03-30: Documented the `stdout` versus `stderr` contract for failing `run` commands and the non-strict `tools --lang` matching rules
|
|
||||||
18
go.mod
18
go.mod
|
|
@ -1,21 +1,21 @@
|
||||||
module dappco.re/go/core/lint
|
module forge.lthn.ai/core/lint
|
||||||
|
|
||||||
go 1.26.0
|
go 1.26.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
dappco.re/go/core/cli v0.3.7
|
forge.lthn.ai/core/cli v0.3.5
|
||||||
dappco.re/go/core/i18n v0.1.7
|
forge.lthn.ai/core/go-i18n v0.1.5
|
||||||
dappco.re/go/core/io v0.1.7
|
forge.lthn.ai/core/go-io v0.1.5
|
||||||
dappco.re/go/core/log v0.0.4
|
forge.lthn.ai/core/go-log v0.0.4
|
||||||
dappco.re/go/core/process v0.2.9
|
forge.lthn.ai/core/go-process v0.2.7
|
||||||
dappco.re/go/core/scm v0.3.6
|
forge.lthn.ai/core/go-scm v0.3.4
|
||||||
github.com/stretchr/testify v1.11.1
|
github.com/stretchr/testify v1.11.1
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
dappco.re/go/core v0.3.3 // indirect
|
forge.lthn.ai/core/go v0.3.1 // indirect
|
||||||
dappco.re/go/core/inference v0.1.7 // indirect
|
forge.lthn.ai/core/go-inference v0.1.4 // indirect
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||||
github.com/charmbracelet/bubbletea v1.3.10 // indirect
|
github.com/charmbracelet/bubbletea v1.3.10 // indirect
|
||||||
github.com/charmbracelet/colorprofile v0.4.3 // indirect
|
github.com/charmbracelet/colorprofile v0.4.3 // indirect
|
||||||
|
|
|
||||||
28
go.sum
28
go.sum
|
|
@ -1,19 +1,19 @@
|
||||||
forge.lthn.ai/core/cli v0.3.7 h1:1GrbaGg0wDGHr6+klSbbGyN/9sSbHvFbdySJznymhwg=
|
forge.lthn.ai/core/cli v0.3.5 h1:P7yK0DmSA1QnUMFuCjJZf/fk/akKPIxopQ6OwD8Sar8=
|
||||||
forge.lthn.ai/core/cli v0.3.7/go.mod h1:DBUppJkA9P45ZFGgI2B8VXw1rAZxamHoI/KG7fRvTNs=
|
forge.lthn.ai/core/cli v0.3.5/go.mod h1:SeArHx+hbpX5iZqgASCD7Q1EDoc6uaaGiGBotmNzIx4=
|
||||||
forge.lthn.ai/core/go v0.3.3 h1:kYYZ2nRYy0/Be3cyuLJspRjLqTMxpckVyhb/7Sw2gd0=
|
forge.lthn.ai/core/go v0.3.1 h1:5FMTsUhLcxSr07F9q3uG0Goy4zq4eLivoqi8shSY4UM=
|
||||||
forge.lthn.ai/core/go v0.3.3/go.mod h1:Cp4ac25pghvO2iqOu59t1GyngTKVOzKB5/VPdhRi9CQ=
|
forge.lthn.ai/core/go v0.3.1/go.mod h1:gE6c8h+PJ2287qNhVUJ5SOe1kopEwHEquvinstpuyJc=
|
||||||
forge.lthn.ai/core/go-i18n v0.1.7 h1:aHkAoc3W8fw3RPNvw/UszQbjyFWXHszzbZgty3SwyAA=
|
forge.lthn.ai/core/go-i18n v0.1.5 h1:B4hV4eTl63akZiplM8lswuttctrcSOCWyFSGBZmu6Nc=
|
||||||
forge.lthn.ai/core/go-i18n v0.1.7/go.mod h1:0VDjwtY99NSj2iqwrI09h5GUsJeM9s48MLkr+/Dn4G8=
|
forge.lthn.ai/core/go-i18n v0.1.5/go.mod h1:hJsUxmqdPly73i3VkTDxvmbrpjxSd65hQVQqWA3+fnM=
|
||||||
forge.lthn.ai/core/go-inference v0.1.7 h1:9Dy6v03jX5ZRH3n5iTzlYyGtucuBIgSe+S7GWvBzx9Q=
|
forge.lthn.ai/core/go-inference v0.1.4 h1:fuAgWbqsEDajHniqAKyvHYbRcBrkGEiGSqR2pfTMRY0=
|
||||||
forge.lthn.ai/core/go-inference v0.1.7/go.mod h1:jfWz+IJX55wAH98+ic6FEqqGB6/P31CHlg7VY7pxREw=
|
forge.lthn.ai/core/go-inference v0.1.4/go.mod h1:jfWz+IJX55wAH98+ic6FEqqGB6/P31CHlg7VY7pxREw=
|
||||||
forge.lthn.ai/core/go-io v0.1.7 h1:Tdb6sqh+zz1lsGJaNX9RFWM6MJ/RhSAyxfulLXrJsbk=
|
forge.lthn.ai/core/go-io v0.1.5 h1:+XJ1YhaGGFLGtcNbPtVlndTjk+pO0Ydi2hRDj5/cHOM=
|
||||||
forge.lthn.ai/core/go-io v0.1.7/go.mod h1:8lRLFk4Dnp5cR/Cyzh9WclD5566TbpdRgwcH7UZLWn4=
|
forge.lthn.ai/core/go-io v0.1.5/go.mod h1:FRtXSsi8W+U9vewCU+LBAqqbIj3wjXA4dBdSv3SAtWI=
|
||||||
forge.lthn.ai/core/go-log v0.0.4 h1:KTuCEPgFmuM8KJfnyQ8vPOU1Jg654W74h8IJvfQMfv0=
|
forge.lthn.ai/core/go-log v0.0.4 h1:KTuCEPgFmuM8KJfnyQ8vPOU1Jg654W74h8IJvfQMfv0=
|
||||||
forge.lthn.ai/core/go-log v0.0.4/go.mod h1:r14MXKOD3LF/sI8XUJQhRk/SZHBE7jAFVuCfgkXoZPw=
|
forge.lthn.ai/core/go-log v0.0.4/go.mod h1:r14MXKOD3LF/sI8XUJQhRk/SZHBE7jAFVuCfgkXoZPw=
|
||||||
forge.lthn.ai/core/go-process v0.2.9 h1:Wql+5TUF+lfU2oJ9I+S764MkTqJhBsuyMM0v1zsfZC4=
|
forge.lthn.ai/core/go-process v0.2.7 h1:yl7jOxzDqWpJd/ZvJ/Ff6bHgPFLA1ZYU5UDcsz3AzLM=
|
||||||
forge.lthn.ai/core/go-process v0.2.9/go.mod h1:NIzZOF5IVYYCjHkcNIGcg1mZH+bzGoie4SlZUDYOKIM=
|
forge.lthn.ai/core/go-process v0.2.7/go.mod h1:I6x11UNaZbU3k0FWUaSlPRTE4YZk/lWIjiODm/8Jr9c=
|
||||||
forge.lthn.ai/core/go-scm v0.3.6 h1:LFNx8Fs82mrpxro/MPUM6tMiD4DqPmdu83UknXztQjc=
|
forge.lthn.ai/core/go-scm v0.3.4 h1:McZvp2gI3wEPCF/jim8O4F1+Vp477N81TUiiklTq5hw=
|
||||||
forge.lthn.ai/core/go-scm v0.3.6/go.mod h1:IWFIYDfRH0mtRdqY5zV06l/RkmkPpBM6FcbKWhg1Qa8=
|
forge.lthn.ai/core/go-scm v0.3.4/go.mod h1:AOrx4CEmV8/Q73Cvd2LkbFniYGpk46mticpYmK5MnJA=
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||||
github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw=
|
github.com/charmbracelet/bubbletea v1.3.10 h1:otUDHWMMzQSB0Pkc87rm691KZ3SWa4KUlvF9nRvCICw=
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@
|
||||||
},
|
},
|
||||||
"health": {
|
"health": {
|
||||||
"short": "Show CI health across repos",
|
"short": "Show CI health across repos",
|
||||||
"long": "Check GitHub Actions workflow status for all repos in the registry and report which are passing, failing, errored, or unconfigured.",
|
"long": "Check GitHub Actions workflow status for all repos in the registry and report which are passing, failing, or unconfigured.",
|
||||||
"summary": "CI Health",
|
"summary": "CI Health",
|
||||||
"all_healthy": "All repos are healthy.",
|
"all_healthy": "All repos are healthy.",
|
||||||
"passing": "Passing",
|
"passing": "Passing",
|
||||||
|
|
@ -27,7 +27,6 @@
|
||||||
"no_ci_configured": "No CI configured",
|
"no_ci_configured": "No CI configured",
|
||||||
"count_passing": "passing",
|
"count_passing": "passing",
|
||||||
"count_failing": "failing",
|
"count_failing": "failing",
|
||||||
"count_error": "error",
|
|
||||||
"count_pending": "pending",
|
"count_pending": "pending",
|
||||||
"count_no_ci": "no CI",
|
"count_no_ci": "no CI",
|
||||||
"count_disabled": "disabled",
|
"count_disabled": "disabled",
|
||||||
|
|
@ -46,7 +45,6 @@
|
||||||
"blocked": "Blocked",
|
"blocked": "Blocked",
|
||||||
"triage": "Triage"
|
"triage": "Triage"
|
||||||
},
|
},
|
||||||
"fetch_error": "Failed to fetch issues from {{.Repo}}: {{.Error}}",
|
|
||||||
"hint": {
|
"hint": {
|
||||||
"blocked": "blocked by dependency",
|
"blocked": "blocked by dependency",
|
||||||
"triage": "needs triage",
|
"triage": "needs triage",
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,11 @@
|
||||||
// Package detect identifies project types by examining filesystem markers.
|
// Package detect identifies project types by examining filesystem markers.
|
||||||
package detect
|
package detect
|
||||||
|
|
||||||
import "os"
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
coreio "forge.lthn.ai/core/go-io"
|
||||||
|
)
|
||||||
|
|
||||||
// ProjectType identifies a project's language/framework.
|
// ProjectType identifies a project's language/framework.
|
||||||
type ProjectType string
|
type ProjectType string
|
||||||
|
|
@ -13,14 +17,12 @@ const (
|
||||||
|
|
||||||
// IsGoProject returns true if dir contains a go.mod file.
|
// IsGoProject returns true if dir contains a go.mod file.
|
||||||
func IsGoProject(dir string) bool {
|
func IsGoProject(dir string) bool {
|
||||||
_, err := os.Stat(dir + "/go.mod")
|
return coreio.Local.Exists(filepath.Join(dir, "go.mod"))
|
||||||
return err == nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsPHPProject returns true if dir contains a composer.json file.
|
// IsPHPProject returns true if dir contains a composer.json file.
|
||||||
func IsPHPProject(dir string) bool {
|
func IsPHPProject(dir string) bool {
|
||||||
_, err := os.Stat(dir + "/composer.json")
|
return coreio.Local.Exists(filepath.Join(dir, "composer.json"))
|
||||||
return err == nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// DetectAll returns all detected project types in the directory.
|
// DetectAll returns all detected project types in the directory.
|
||||||
|
|
|
||||||
|
|
@ -1,912 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
coreerr "forge.lthn.ai/core/go-log"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Adapter wraps one lint tool and normalises its output to Finding values.
|
|
||||||
type Adapter interface {
|
|
||||||
Name() string
|
|
||||||
Available() bool
|
|
||||||
Languages() []string
|
|
||||||
Command() string
|
|
||||||
Entitlement() string
|
|
||||||
RequiresEntitlement() bool
|
|
||||||
MatchesLanguage(languages []string) bool
|
|
||||||
Category() string
|
|
||||||
Fast() bool
|
|
||||||
Run(ctx context.Context, input RunInput, files []string) AdapterResult
|
|
||||||
}
|
|
||||||
|
|
||||||
// AdapterResult contains one tool execution plus the parsed findings from that run.
|
|
||||||
type AdapterResult struct {
|
|
||||||
Tool ToolRun
|
|
||||||
Findings []Finding
|
|
||||||
}
|
|
||||||
|
|
||||||
type findingParser func(tool string, category string, output string) []Finding
|
|
||||||
type commandArgumentsBuilder func(projectPath string, files []string) []string
|
|
||||||
|
|
||||||
// CommandAdapter runs an external binary and parses its stdout/stderr.
|
|
||||||
type CommandAdapter struct {
|
|
||||||
name string
|
|
||||||
binaries []string
|
|
||||||
languages []string
|
|
||||||
category string
|
|
||||||
entitlement string
|
|
||||||
requiresEntitlement bool
|
|
||||||
fast bool
|
|
||||||
buildArgs commandArgumentsBuilder
|
|
||||||
parseOutput findingParser
|
|
||||||
}
|
|
||||||
|
|
||||||
// CatalogAdapter wraps the embedded regex rule catalog as a built-in linter.
|
|
||||||
type CatalogAdapter struct{}
|
|
||||||
|
|
||||||
func defaultAdapters() []Adapter {
|
|
||||||
return []Adapter{
|
|
||||||
newCommandAdapter("golangci-lint", []string{"golangci-lint"}, []string{"go"}, "correctness", "", false, true, goProjectArguments("run", "--out-format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("gosec", []string{"gosec"}, []string{"go"}, "security", "lint.security", true, false, goProjectArguments("-fmt", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("govulncheck", []string{"govulncheck"}, []string{"go"}, "security", "", false, false, goProjectArguments("-json"), parseGovulncheckDiagnostics),
|
|
||||||
newCommandAdapter("staticcheck", []string{"staticcheck"}, []string{"go"}, "correctness", "", false, true, goProjectArguments("-f", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("revive", []string{"revive"}, []string{"go"}, "style", "", false, true, goProjectArguments("-formatter", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("errcheck", []string{"errcheck"}, []string{"go"}, "correctness", "", false, true, goProjectArguments(), parseTextDiagnostics),
|
|
||||||
newCommandAdapter("phpstan", []string{"phpstan"}, []string{"php"}, "correctness", "", false, true, projectPathArguments("analyse", "--error-format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("psalm", []string{"psalm"}, []string{"php"}, "correctness", "", false, true, projectPathArguments("--output-format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("phpcs", []string{"phpcs"}, []string{"php"}, "style", "", false, true, projectPathArguments("--report=json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("phpmd", []string{"phpmd"}, []string{"php"}, "correctness", "", false, true, phpmdArguments(), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("pint", []string{"pint"}, []string{"php"}, "style", "", false, true, projectPathArguments("--format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("biome", []string{"biome"}, []string{"js", "ts"}, "style", "", false, true, projectPathArguments("check", "--reporter", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("oxlint", []string{"oxlint"}, []string{"js", "ts"}, "style", "", false, true, projectPathArguments("--format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("eslint", []string{"eslint"}, []string{"js"}, "style", "", false, true, projectPathArguments("--format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("typescript", []string{"tsc", "typescript"}, []string{"ts"}, "correctness", "", false, true, projectPathArguments("--pretty", "false"), parseTextDiagnostics),
|
|
||||||
newCommandAdapter("ruff", []string{"ruff"}, []string{"python"}, "style", "", false, true, projectPathArguments("check", "--output-format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("mypy", []string{"mypy"}, []string{"python"}, "correctness", "", false, true, projectPathArguments("--output", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("bandit", []string{"bandit"}, []string{"python"}, "security", "lint.security", true, false, recursiveProjectPathArguments("-f", "json", "-r"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("pylint", []string{"pylint"}, []string{"python"}, "style", "", false, true, projectPathArguments("--output-format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("shellcheck", []string{"shellcheck"}, []string{"shell"}, "correctness", "", false, true, filePathArguments("-f", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("hadolint", []string{"hadolint"}, []string{"dockerfile"}, "security", "", false, true, filePathArguments("-f", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("yamllint", []string{"yamllint"}, []string{"yaml"}, "style", "", false, true, projectPathArguments("-f", "parsable"), parseTextDiagnostics),
|
|
||||||
newCommandAdapter("jsonlint", []string{"jsonlint"}, []string{"json"}, "style", "", false, true, filePathArguments(), parseTextDiagnostics),
|
|
||||||
newCommandAdapter("markdownlint", []string{"markdownlint", "markdownlint-cli"}, []string{"markdown"}, "style", "", false, true, projectPathArguments("--json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("prettier", []string{"prettier"}, []string{"js"}, "style", "", false, true, projectPathArguments("--list-different"), parsePrettierDiagnostics),
|
|
||||||
newCommandAdapter("gitleaks", []string{"gitleaks"}, []string{"*"}, "security", "lint.security", true, false, recursiveProjectPathArguments("detect", "--no-git", "--report-format", "json", "--source"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("trivy", []string{"trivy"}, []string{"*"}, "security", "lint.security", true, false, projectPathArguments("fs", "--format", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("semgrep", []string{"semgrep"}, []string{"*"}, "security", "lint.security", true, false, projectPathArguments("--json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("syft", []string{"syft"}, []string{"*"}, "compliance", "lint.compliance", true, false, projectPathArguments("scan", "-o", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("grype", []string{"grype"}, []string{"*"}, "security", "lint.compliance", true, false, projectPathArguments("-o", "json"), parseJSONDiagnostics),
|
|
||||||
newCommandAdapter("scancode", []string{"scancode-toolkit", "scancode"}, []string{"*"}, "compliance", "lint.compliance", true, false, projectPathArguments("--json"), parseJSONDiagnostics),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func newCatalogAdapter() Adapter {
|
|
||||||
return CatalogAdapter{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func newCommandAdapter(name string, binaries []string, languages []string, category string, entitlement string, requiresEntitlement bool, fast bool, builder commandArgumentsBuilder, parser findingParser) Adapter {
|
|
||||||
return CommandAdapter{
|
|
||||||
name: name,
|
|
||||||
binaries: binaries,
|
|
||||||
languages: languages,
|
|
||||||
category: category,
|
|
||||||
entitlement: entitlement,
|
|
||||||
requiresEntitlement: requiresEntitlement,
|
|
||||||
fast: fast,
|
|
||||||
buildArgs: builder,
|
|
||||||
parseOutput: parser,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) Name() string { return adapter.name }
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) Available() bool {
|
|
||||||
_, ok := adapter.availableBinary()
|
|
||||||
return ok
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) Languages() []string {
|
|
||||||
return append([]string(nil), adapter.languages...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) Command() string {
|
|
||||||
if len(adapter.binaries) == 0 {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return adapter.binaries[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) Entitlement() string { return adapter.entitlement }
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) RequiresEntitlement() bool { return adapter.requiresEntitlement }
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) MatchesLanguage(languages []string) bool {
|
|
||||||
if len(adapter.languages) == 0 || len(languages) == 0 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if len(adapter.languages) == 1 && adapter.languages[0] == "*" {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
for _, language := range languages {
|
|
||||||
if strings.EqualFold(language, adapter.category) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
for _, supported := range adapter.languages {
|
|
||||||
if supported == language {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) Category() string { return adapter.category }
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) Fast() bool { return adapter.fast }
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) Run(ctx context.Context, input RunInput, files []string) AdapterResult {
|
|
||||||
startedAt := time.Now()
|
|
||||||
result := AdapterResult{
|
|
||||||
Tool: ToolRun{
|
|
||||||
Name: adapter.name,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
binary, ok := adapter.availableBinary()
|
|
||||||
if !ok {
|
|
||||||
result.Tool.Status = "skipped"
|
|
||||||
result.Tool.Duration = "0s"
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Tool.Version = probeCommandVersion(binary, input.Path)
|
|
||||||
|
|
||||||
runContext, cancel := context.WithTimeout(ctx, 5*time.Minute)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
args := adapter.buildArgs(input.Path, files)
|
|
||||||
stdout, stderr, exitCode, runErr := runCommand(runContext, input.Path, binary, args)
|
|
||||||
|
|
||||||
result.Tool.Duration = time.Since(startedAt).Round(time.Millisecond).String()
|
|
||||||
|
|
||||||
if errors.Is(runContext.Err(), context.DeadlineExceeded) {
|
|
||||||
result.Tool.Status = "timeout"
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
output := strings.TrimSpace(stdout)
|
|
||||||
if strings.TrimSpace(stderr) != "" {
|
|
||||||
if output != "" {
|
|
||||||
output += "\n" + strings.TrimSpace(stderr)
|
|
||||||
} else {
|
|
||||||
output = strings.TrimSpace(stderr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if adapter.parseOutput != nil && output != "" {
|
|
||||||
result.Findings = adapter.parseOutput(adapter.name, adapter.category, output)
|
|
||||||
}
|
|
||||||
if len(result.Findings) == 0 && output != "" {
|
|
||||||
result.Findings = parseTextDiagnostics(adapter.name, adapter.category, output)
|
|
||||||
}
|
|
||||||
if len(result.Findings) == 0 && runErr != nil {
|
|
||||||
result.Findings = []Finding{{
|
|
||||||
Tool: adapter.name,
|
|
||||||
Severity: defaultSeverityForCategory(adapter.category),
|
|
||||||
Code: "command-failed",
|
|
||||||
Message: strings.TrimSpace(firstNonEmpty(output, runErr.Error())),
|
|
||||||
Category: adapter.category,
|
|
||||||
}}
|
|
||||||
}
|
|
||||||
|
|
||||||
for index := range result.Findings {
|
|
||||||
if result.Findings[index].Tool == "" {
|
|
||||||
result.Findings[index].Tool = adapter.name
|
|
||||||
}
|
|
||||||
if result.Findings[index].Category == "" {
|
|
||||||
result.Findings[index].Category = adapter.category
|
|
||||||
}
|
|
||||||
if result.Findings[index].Severity == "" {
|
|
||||||
result.Findings[index].Severity = defaultSeverityForCategory(adapter.category)
|
|
||||||
} else {
|
|
||||||
result.Findings[index].Severity = normaliseSeverity(result.Findings[index].Severity)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Tool.Findings = len(result.Findings)
|
|
||||||
switch {
|
|
||||||
case runErr != nil || exitCode != 0 || len(result.Findings) > 0:
|
|
||||||
result.Tool.Status = "failed"
|
|
||||||
default:
|
|
||||||
result.Tool.Status = "passed"
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func probeCommandVersion(binary string, workingDir string) string {
|
|
||||||
for _, args := range [][]string{{"--version"}, {"-version"}, {"version"}} {
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
stdout, stderr, exitCode, err := runCommand(ctx, workingDir, binary, args)
|
|
||||||
cancel()
|
|
||||||
|
|
||||||
if err != nil && exitCode != 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
version := firstNonEmpty(stdout, stderr)
|
|
||||||
if version == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if line := firstVersionLine(version); line != "" {
|
|
||||||
return line
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter CommandAdapter) availableBinary() (string, bool) {
|
|
||||||
for _, binary := range adapter.binaries {
|
|
||||||
path, err := exec.LookPath(binary)
|
|
||||||
if err == nil {
|
|
||||||
return path, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (CatalogAdapter) Name() string { return "catalog" }
|
|
||||||
|
|
||||||
func (CatalogAdapter) Available() bool { return true }
|
|
||||||
|
|
||||||
func (CatalogAdapter) Languages() []string { return []string{"go"} }
|
|
||||||
|
|
||||||
func (CatalogAdapter) Command() string { return "catalog" }
|
|
||||||
|
|
||||||
func (CatalogAdapter) Entitlement() string { return "" }
|
|
||||||
|
|
||||||
func (CatalogAdapter) RequiresEntitlement() bool { return false }
|
|
||||||
|
|
||||||
func (CatalogAdapter) MatchesLanguage(languages []string) bool {
|
|
||||||
return len(languages) == 0 || containsString(languages, "go")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (CatalogAdapter) Category() string { return "correctness" }
|
|
||||||
|
|
||||||
func (CatalogAdapter) Fast() bool { return true }
|
|
||||||
|
|
||||||
func (CatalogAdapter) Run(_ context.Context, input RunInput, files []string) AdapterResult {
|
|
||||||
startedAt := time.Now()
|
|
||||||
result := AdapterResult{
|
|
||||||
Tool: ToolRun{
|
|
||||||
Name: "catalog",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
catalog, err := loadBuiltinCatalog()
|
|
||||||
if err != nil {
|
|
||||||
result.Tool.Status = "failed"
|
|
||||||
result.Tool.Duration = time.Since(startedAt).Round(time.Millisecond).String()
|
|
||||||
result.Findings = []Finding{{
|
|
||||||
Tool: "catalog",
|
|
||||||
Severity: "error",
|
|
||||||
Code: "catalog-load",
|
|
||||||
Message: err.Error(),
|
|
||||||
Category: "correctness",
|
|
||||||
}}
|
|
||||||
result.Tool.Findings = len(result.Findings)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
rules := catalog.Rules
|
|
||||||
if input.Category != "" {
|
|
||||||
rules = filterRulesByTag(rules, input.Category)
|
|
||||||
}
|
|
||||||
|
|
||||||
scanner, err := NewScanner(rules)
|
|
||||||
if err != nil {
|
|
||||||
result.Tool.Status = "failed"
|
|
||||||
result.Tool.Duration = time.Since(startedAt).Round(time.Millisecond).String()
|
|
||||||
result.Findings = []Finding{{
|
|
||||||
Tool: "catalog",
|
|
||||||
Severity: "error",
|
|
||||||
Code: "catalog-scan",
|
|
||||||
Message: err.Error(),
|
|
||||||
Category: "correctness",
|
|
||||||
}}
|
|
||||||
result.Tool.Findings = len(result.Findings)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
var findings []Finding
|
|
||||||
if len(files) > 0 {
|
|
||||||
for _, file := range files {
|
|
||||||
scanPath := file
|
|
||||||
if !filepath.IsAbs(scanPath) {
|
|
||||||
scanPath = filepath.Join(input.Path, file)
|
|
||||||
}
|
|
||||||
fileFindings, scanErr := scanner.ScanFile(scanPath)
|
|
||||||
if scanErr != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
findings = append(findings, fileFindings...)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
findings, _ = scanner.ScanDir(input.Path)
|
|
||||||
}
|
|
||||||
|
|
||||||
for index := range findings {
|
|
||||||
rule := catalog.ByID(findings[index].RuleID)
|
|
||||||
findings[index].Tool = "catalog"
|
|
||||||
findings[index].Code = findings[index].RuleID
|
|
||||||
findings[index].Message = findings[index].Title
|
|
||||||
findings[index].Severity = normaliseSeverity(findings[index].Severity)
|
|
||||||
if rule != nil {
|
|
||||||
findings[index].Category = ruleCategory(*rule)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.Findings = findings
|
|
||||||
result.Tool.Findings = len(findings)
|
|
||||||
result.Tool.Duration = time.Since(startedAt).Round(time.Millisecond).String()
|
|
||||||
if len(findings) > 0 {
|
|
||||||
result.Tool.Status = "failed"
|
|
||||||
} else {
|
|
||||||
result.Tool.Status = "passed"
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadBuiltinCatalog() (*Catalog, error) {
|
|
||||||
rules, err := ParseRules([]byte(defaultCatalogRulesYAML))
|
|
||||||
if err != nil {
|
|
||||||
return nil, coreerr.E("loadBuiltinCatalog", "parse embedded fallback rules", err)
|
|
||||||
}
|
|
||||||
return &Catalog{Rules: rules}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func goProjectArguments(prefix ...string) commandArgumentsBuilder {
|
|
||||||
return func(_ string, files []string) []string {
|
|
||||||
args := append([]string(nil), prefix...)
|
|
||||||
if len(files) > 0 {
|
|
||||||
return append(args, files...)
|
|
||||||
}
|
|
||||||
return append(args, "./...")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func projectPathArguments(prefix ...string) commandArgumentsBuilder {
|
|
||||||
return func(_ string, files []string) []string {
|
|
||||||
args := append([]string(nil), prefix...)
|
|
||||||
if len(files) > 0 {
|
|
||||||
return append(args, files...)
|
|
||||||
}
|
|
||||||
return append(args, ".")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func recursiveProjectPathArguments(prefix ...string) commandArgumentsBuilder {
|
|
||||||
return func(_ string, files []string) []string {
|
|
||||||
args := append([]string(nil), prefix...)
|
|
||||||
if len(files) > 0 {
|
|
||||||
return append(args, files...)
|
|
||||||
}
|
|
||||||
return append(args, ".")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func filePathArguments(prefix ...string) commandArgumentsBuilder {
|
|
||||||
return func(_ string, files []string) []string {
|
|
||||||
args := append([]string(nil), prefix...)
|
|
||||||
if len(files) > 0 {
|
|
||||||
return append(args, files...)
|
|
||||||
}
|
|
||||||
return append(args, ".")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func phpmdArguments() commandArgumentsBuilder {
|
|
||||||
return func(_ string, files []string) []string {
|
|
||||||
target := "."
|
|
||||||
if len(files) > 0 {
|
|
||||||
target = strings.Join(files, ",")
|
|
||||||
}
|
|
||||||
return []string{target, "json", "cleancode,codesize,controversial,design,naming,unusedcode"}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func runCommand(ctx context.Context, workingDir string, binary string, args []string) (string, string, int, error) {
|
|
||||||
command := exec.CommandContext(ctx, binary, args...)
|
|
||||||
if workingDir != "" {
|
|
||||||
command.Dir = workingDir
|
|
||||||
}
|
|
||||||
|
|
||||||
var stdout bytes.Buffer
|
|
||||||
var stderr bytes.Buffer
|
|
||||||
command.Stdout = &stdout
|
|
||||||
command.Stderr = &stderr
|
|
||||||
|
|
||||||
err := command.Run()
|
|
||||||
if err == nil {
|
|
||||||
return stdout.String(), stderr.String(), 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var exitErr *exec.ExitError
|
|
||||||
if errors.As(err, &exitErr) {
|
|
||||||
return stdout.String(), stderr.String(), exitErr.ExitCode(), err
|
|
||||||
}
|
|
||||||
|
|
||||||
return stdout.String(), stderr.String(), -1, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseGovulncheckDiagnostics(tool string, category string, output string) []Finding {
|
|
||||||
result, err := ParseVulnCheckJSON(output, "")
|
|
||||||
if err != nil || result == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var findings []Finding
|
|
||||||
for _, vuln := range result.Findings {
|
|
||||||
message := strings.TrimSpace(firstNonEmpty(vuln.Description, vuln.Package))
|
|
||||||
if message == "" {
|
|
||||||
message = vuln.ID
|
|
||||||
}
|
|
||||||
findings = append(findings, Finding{
|
|
||||||
Tool: tool,
|
|
||||||
File: vuln.Package,
|
|
||||||
Severity: "error",
|
|
||||||
Code: vuln.ID,
|
|
||||||
Message: message,
|
|
||||||
Category: category,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return findings
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseJSONDiagnostics(tool string, category string, output string) []Finding {
|
|
||||||
decoder := json.NewDecoder(strings.NewReader(output))
|
|
||||||
var findings []Finding
|
|
||||||
|
|
||||||
for {
|
|
||||||
var value any
|
|
||||||
err := decoder.Decode(&value)
|
|
||||||
if errors.Is(err, io.EOF) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
findings = append(findings, collectJSONDiagnostics(tool, category, value)...)
|
|
||||||
}
|
|
||||||
|
|
||||||
return dedupeFindings(findings)
|
|
||||||
}
|
|
||||||
|
|
||||||
func collectJSONDiagnostics(tool string, category string, value any) []Finding {
|
|
||||||
switch typed := value.(type) {
|
|
||||||
case []any:
|
|
||||||
var findings []Finding
|
|
||||||
for _, child := range typed {
|
|
||||||
findings = append(findings, collectJSONDiagnostics(tool, category, child)...)
|
|
||||||
}
|
|
||||||
return findings
|
|
||||||
case map[string]any:
|
|
||||||
var findings []Finding
|
|
||||||
if finding, ok := findingFromMap(tool, category, typed); ok {
|
|
||||||
findings = append(findings, finding)
|
|
||||||
}
|
|
||||||
for _, child := range typed {
|
|
||||||
findings = append(findings, collectJSONDiagnostics(tool, category, child)...)
|
|
||||||
}
|
|
||||||
return findings
|
|
||||||
default:
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func findingFromMap(tool string, category string, fields map[string]any) (Finding, bool) {
|
|
||||||
file := firstStringPath(fields,
|
|
||||||
[]string{"file"},
|
|
||||||
[]string{"File"},
|
|
||||||
[]string{"filename"},
|
|
||||||
[]string{"path"},
|
|
||||||
[]string{"location", "path"},
|
|
||||||
[]string{"artifactLocation", "uri"},
|
|
||||||
[]string{"Target"},
|
|
||||||
)
|
|
||||||
line := firstIntPath(fields,
|
|
||||||
[]string{"line"},
|
|
||||||
[]string{"Line"},
|
|
||||||
[]string{"startLine"},
|
|
||||||
[]string{"StartLine"},
|
|
||||||
[]string{"region", "startLine"},
|
|
||||||
[]string{"location", "start", "line"},
|
|
||||||
[]string{"Start", "Line"},
|
|
||||||
)
|
|
||||||
column := firstIntPath(fields,
|
|
||||||
[]string{"column"},
|
|
||||||
[]string{"Column"},
|
|
||||||
[]string{"col"},
|
|
||||||
[]string{"startColumn"},
|
|
||||||
[]string{"StartColumn"},
|
|
||||||
[]string{"region", "startColumn"},
|
|
||||||
[]string{"location", "start", "column"},
|
|
||||||
)
|
|
||||||
code := firstStringPath(fields,
|
|
||||||
[]string{"code"},
|
|
||||||
[]string{"Code"},
|
|
||||||
[]string{"rule"},
|
|
||||||
[]string{"Rule"},
|
|
||||||
[]string{"rule_id"},
|
|
||||||
[]string{"RuleID"},
|
|
||||||
[]string{"check_id"},
|
|
||||||
[]string{"checkId"},
|
|
||||||
[]string{"id"},
|
|
||||||
[]string{"ID"},
|
|
||||||
)
|
|
||||||
message := firstStringPath(fields,
|
|
||||||
[]string{"message"},
|
|
||||||
[]string{"Message"},
|
|
||||||
[]string{"description"},
|
|
||||||
[]string{"Description"},
|
|
||||||
[]string{"title"},
|
|
||||||
[]string{"Title"},
|
|
||||||
[]string{"message", "text"},
|
|
||||||
[]string{"Message", "Text"},
|
|
||||||
)
|
|
||||||
severity := firstStringPath(fields,
|
|
||||||
[]string{"severity"},
|
|
||||||
[]string{"Severity"},
|
|
||||||
[]string{"level"},
|
|
||||||
[]string{"Level"},
|
|
||||||
[]string{"type"},
|
|
||||||
[]string{"Type"},
|
|
||||||
)
|
|
||||||
|
|
||||||
if message == "" && code == "" {
|
|
||||||
return Finding{}, false
|
|
||||||
}
|
|
||||||
if file == "" && line == 0 && !strings.Contains(strings.ToLower(category), "security") && code == "" {
|
|
||||||
return Finding{}, false
|
|
||||||
}
|
|
||||||
|
|
||||||
return Finding{
|
|
||||||
Tool: tool,
|
|
||||||
File: file,
|
|
||||||
Line: line,
|
|
||||||
Column: column,
|
|
||||||
Severity: firstNonEmpty(normaliseSeverity(severity), defaultSeverityForCategory(category)),
|
|
||||||
Code: code,
|
|
||||||
Message: message,
|
|
||||||
Category: category,
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseTextDiagnostics(tool string, category string, output string) []Finding {
|
|
||||||
var findings []Finding
|
|
||||||
|
|
||||||
for line := range strings.SplitSeq(strings.TrimSpace(output), "\n") {
|
|
||||||
trimmed := strings.TrimSpace(line)
|
|
||||||
if trimmed == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if finding, ok := parseTextDiagnosticLine(tool, category, trimmed); ok {
|
|
||||||
findings = append(findings, finding)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(findings) == 0 && strings.TrimSpace(output) != "" {
|
|
||||||
findings = append(findings, Finding{
|
|
||||||
Tool: tool,
|
|
||||||
Severity: defaultSeverityForCategory(category),
|
|
||||||
Code: "diagnostic",
|
|
||||||
Message: strings.TrimSpace(output),
|
|
||||||
Category: category,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return dedupeFindings(findings)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parsePrettierDiagnostics(tool string, category string, output string) []Finding {
|
|
||||||
var findings []Finding
|
|
||||||
|
|
||||||
for line := range strings.SplitSeq(strings.TrimSpace(output), "\n") {
|
|
||||||
trimmed := strings.TrimSpace(line)
|
|
||||||
if trimmed == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
findings = append(findings, Finding{
|
|
||||||
Tool: tool,
|
|
||||||
File: filepath.ToSlash(trimmed),
|
|
||||||
Severity: defaultSeverityForCategory(category),
|
|
||||||
Code: "prettier-format",
|
|
||||||
Message: "File is not formatted with Prettier",
|
|
||||||
Category: category,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return dedupeFindings(findings)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseTextDiagnosticLine(tool string, category string, line string) (Finding, bool) {
|
|
||||||
segments := strings.Split(line, ":")
|
|
||||||
if len(segments) < 3 {
|
|
||||||
return Finding{}, false
|
|
||||||
}
|
|
||||||
|
|
||||||
lineNumber, lineErr := strconv.Atoi(strings.TrimSpace(segments[1]))
|
|
||||||
if lineErr != nil {
|
|
||||||
return Finding{}, false
|
|
||||||
}
|
|
||||||
|
|
||||||
columnNumber := 0
|
|
||||||
messageIndex := 2
|
|
||||||
if len(segments) > 3 {
|
|
||||||
if parsedColumn, columnErr := strconv.Atoi(strings.TrimSpace(segments[2])); columnErr == nil {
|
|
||||||
columnNumber = parsedColumn
|
|
||||||
messageIndex = 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
message := strings.TrimSpace(strings.Join(segments[messageIndex:], ":"))
|
|
||||||
if message == "" {
|
|
||||||
return Finding{}, false
|
|
||||||
}
|
|
||||||
|
|
||||||
severity := defaultSeverityForCategory(category)
|
|
||||||
switch {
|
|
||||||
case strings.Contains(strings.ToLower(message), "warning"):
|
|
||||||
severity = "warning"
|
|
||||||
case strings.Contains(strings.ToLower(message), "error"):
|
|
||||||
severity = "error"
|
|
||||||
}
|
|
||||||
|
|
||||||
return Finding{
|
|
||||||
Tool: tool,
|
|
||||||
File: filepath.ToSlash(strings.TrimSpace(segments[0])),
|
|
||||||
Line: lineNumber,
|
|
||||||
Column: columnNumber,
|
|
||||||
Severity: severity,
|
|
||||||
Code: "diagnostic",
|
|
||||||
Message: message,
|
|
||||||
Category: category,
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func firstStringPath(fields map[string]any, paths ...[]string) string {
|
|
||||||
for _, path := range paths {
|
|
||||||
if value, ok := lookupPath(fields, path); ok {
|
|
||||||
switch typed := value.(type) {
|
|
||||||
case string:
|
|
||||||
if strings.TrimSpace(typed) != "" {
|
|
||||||
return strings.TrimSpace(typed)
|
|
||||||
}
|
|
||||||
case json.Number:
|
|
||||||
return typed.String()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func firstIntPath(fields map[string]any, paths ...[]string) int {
|
|
||||||
for _, path := range paths {
|
|
||||||
if value, ok := lookupPath(fields, path); ok {
|
|
||||||
switch typed := value.(type) {
|
|
||||||
case int:
|
|
||||||
return typed
|
|
||||||
case int64:
|
|
||||||
return int(typed)
|
|
||||||
case float64:
|
|
||||||
return int(typed)
|
|
||||||
case json.Number:
|
|
||||||
parsed, _ := typed.Int64()
|
|
||||||
return int(parsed)
|
|
||||||
case string:
|
|
||||||
parsed, err := strconv.Atoi(strings.TrimSpace(typed))
|
|
||||||
if err == nil {
|
|
||||||
return parsed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func lookupPath(fields map[string]any, path []string) (any, bool) {
|
|
||||||
current := any(fields)
|
|
||||||
for _, segment := range path {
|
|
||||||
object, ok := current.(map[string]any)
|
|
||||||
if !ok {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
value, found := mapValue(object, segment)
|
|
||||||
if !found {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
current = value
|
|
||||||
}
|
|
||||||
return current, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func mapValue(fields map[string]any, key string) (any, bool) {
|
|
||||||
if value, ok := fields[key]; ok {
|
|
||||||
return value, true
|
|
||||||
}
|
|
||||||
lowerKey := strings.ToLower(key)
|
|
||||||
for fieldKey, value := range fields {
|
|
||||||
if strings.ToLower(fieldKey) == lowerKey {
|
|
||||||
return value, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
func dedupeFindings(findings []Finding) []Finding {
|
|
||||||
seen := make(map[string]bool)
|
|
||||||
var deduped []Finding
|
|
||||||
for _, finding := range findings {
|
|
||||||
key := strings.Join([]string{
|
|
||||||
finding.Tool,
|
|
||||||
finding.File,
|
|
||||||
strconv.Itoa(finding.Line),
|
|
||||||
strconv.Itoa(finding.Column),
|
|
||||||
finding.Code,
|
|
||||||
finding.Message,
|
|
||||||
}, "|")
|
|
||||||
if seen[key] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[key] = true
|
|
||||||
deduped = append(deduped, finding)
|
|
||||||
}
|
|
||||||
return deduped
|
|
||||||
}
|
|
||||||
|
|
||||||
func filterRulesByTag(rules []Rule, tag string) []Rule {
|
|
||||||
var filtered []Rule
|
|
||||||
for _, rule := range rules {
|
|
||||||
for _, currentTag := range rule.Tags {
|
|
||||||
if currentTag == tag {
|
|
||||||
filtered = append(filtered, rule)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return filtered
|
|
||||||
}
|
|
||||||
|
|
||||||
func ruleCategory(rule Rule) string {
|
|
||||||
for _, tag := range rule.Tags {
|
|
||||||
switch tag {
|
|
||||||
case "security", "style", "correctness", "performance", "compliance":
|
|
||||||
return tag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "correctness"
|
|
||||||
}
|
|
||||||
|
|
||||||
func normaliseSeverity(severity string) string {
|
|
||||||
switch strings.ToLower(strings.TrimSpace(severity)) {
|
|
||||||
case "critical", "high", "error", "errors":
|
|
||||||
return "error"
|
|
||||||
case "medium", "low", "warning", "warn":
|
|
||||||
return "warning"
|
|
||||||
case "info", "note":
|
|
||||||
return "info"
|
|
||||||
default:
|
|
||||||
return strings.ToLower(strings.TrimSpace(severity))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func defaultSeverityForCategory(category string) string {
|
|
||||||
switch category {
|
|
||||||
case "security":
|
|
||||||
return "error"
|
|
||||||
case "compliance":
|
|
||||||
return "warning"
|
|
||||||
default:
|
|
||||||
return "warning"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func firstNonEmpty(values ...string) string {
|
|
||||||
for _, value := range values {
|
|
||||||
if strings.TrimSpace(value) != "" {
|
|
||||||
return strings.TrimSpace(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func firstVersionLine(output string) string {
|
|
||||||
for line := range strings.SplitSeq(strings.TrimSpace(output), "\n") {
|
|
||||||
line = strings.TrimSpace(line)
|
|
||||||
if line != "" {
|
|
||||||
return line
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func containsString(values []string, target string) bool {
|
|
||||||
for _, value := range values {
|
|
||||||
if value == target {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultCatalogRulesYAML = `
|
|
||||||
- id: go-cor-003
|
|
||||||
title: "Silent error swallowing with blank identifier"
|
|
||||||
severity: medium
|
|
||||||
languages: [go]
|
|
||||||
tags: [correctness, errors]
|
|
||||||
pattern: '^\s*_\s*=\s*\w+\.\w+\('
|
|
||||||
exclude_pattern: 'defer|Close\(|Flush\('
|
|
||||||
fix: "Handle the error explicitly — log it, return it, or document why it is safe to discard"
|
|
||||||
detection: regex
|
|
||||||
auto_fixable: false
|
|
||||||
|
|
||||||
- id: go-cor-004
|
|
||||||
title: "Panic in library code"
|
|
||||||
severity: high
|
|
||||||
languages: [go]
|
|
||||||
tags: [correctness, panic]
|
|
||||||
pattern: '\bpanic\('
|
|
||||||
exclude_pattern: '_test\.go|// unreachable|Must\w+\('
|
|
||||||
fix: "Return an error instead of panicking — panics in libraries crash the caller"
|
|
||||||
detection: regex
|
|
||||||
auto_fixable: false
|
|
||||||
|
|
||||||
- id: go-sec-001
|
|
||||||
title: "SQL wildcard injection in LIKE clauses"
|
|
||||||
severity: high
|
|
||||||
languages: [go]
|
|
||||||
tags: [security, injection]
|
|
||||||
pattern: 'LIKE\s+\?.*["%].*\+'
|
|
||||||
fix: "Use parameterised LIKE with EscapeLike() helper to sanitise wildcard characters"
|
|
||||||
detection: regex
|
|
||||||
auto_fixable: false
|
|
||||||
|
|
||||||
- id: go-sec-002
|
|
||||||
title: "Path traversal via filepath.Join"
|
|
||||||
severity: high
|
|
||||||
languages: [go]
|
|
||||||
tags: [security, path-traversal]
|
|
||||||
pattern: 'filepath\.Join\(.*,\s*\w+\)'
|
|
||||||
exclude_pattern: 'filepath\.Clean|securejoin|ValidatePath'
|
|
||||||
fix: "Validate the path component or use securejoin to prevent directory traversal"
|
|
||||||
detection: regex
|
|
||||||
auto_fixable: false
|
|
||||||
|
|
||||||
- id: go-sec-004
|
|
||||||
title: "Non-constant-time authentication comparison"
|
|
||||||
severity: critical
|
|
||||||
languages: [go]
|
|
||||||
tags: [security, timing-attack]
|
|
||||||
pattern: '==\s*\w*(token|key|secret|password|hash|digest|hmac|mac|sig)'
|
|
||||||
exclude_pattern: 'subtle\.ConstantTimeCompare|hmac\.Equal'
|
|
||||||
fix: "Use subtle.ConstantTimeCompare() or hmac.Equal() for timing-safe comparison"
|
|
||||||
detection: regex
|
|
||||||
auto_fixable: false
|
|
||||||
`
|
|
||||||
|
|
@ -30,7 +30,6 @@ func LoadDir(dir string) (*Catalog, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, coreerr.E("Catalog.LoadDir", "loading catalog from "+dir, err)
|
return nil, coreerr.E("Catalog.LoadDir", "loading catalog from "+dir, err)
|
||||||
}
|
}
|
||||||
sortDirEntries(entries)
|
|
||||||
|
|
||||||
var rules []Rule
|
var rules []Rule
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
|
|
@ -57,7 +56,6 @@ func LoadFS(fsys fs.FS, dir string) (*Catalog, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, coreerr.E("Catalog.LoadFS", "loading catalog from embedded "+dir, err)
|
return nil, coreerr.E("Catalog.LoadFS", "loading catalog from embedded "+dir, err)
|
||||||
}
|
}
|
||||||
sortDirEntries(entries)
|
|
||||||
|
|
||||||
var rules []Rule
|
var rules []Rule
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
|
|
@ -78,12 +76,6 @@ func LoadFS(fsys fs.FS, dir string) (*Catalog, error) {
|
||||||
return &Catalog{Rules: rules}, nil
|
return &Catalog{Rules: rules}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func sortDirEntries(entries []fs.DirEntry) {
|
|
||||||
slices.SortFunc(entries, func(a, b fs.DirEntry) int {
|
|
||||||
return strings.Compare(a.Name(), b.Name())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// ForLanguage returns all rules that apply to the given language.
|
// ForLanguage returns all rules that apply to the given language.
|
||||||
func (c *Catalog) ForLanguage(lang string) []Rule {
|
func (c *Catalog) ForLanguage(lang string) []Rule {
|
||||||
var result []Rule
|
var result []Rule
|
||||||
|
|
|
||||||
|
|
@ -29,38 +29,6 @@ func TestLoadDir_Good(t *testing.T) {
|
||||||
assert.NotNil(t, cat.ByID("go-mod-001"))
|
assert.NotNil(t, cat.ByID("go-mod-001"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadDir_SortsFilesDeterministically(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
|
|
||||||
err := os.WriteFile(filepath.Join(dir, "z.yaml"), []byte(`- id: z-rule
|
|
||||||
title: "Z rule"
|
|
||||||
severity: info
|
|
||||||
languages: [go]
|
|
||||||
pattern: 'z'
|
|
||||||
fix: "z"
|
|
||||||
detection: regex
|
|
||||||
auto_fixable: false
|
|
||||||
`), 0o644)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
err = os.WriteFile(filepath.Join(dir, "a.yaml"), []byte(`- id: a-rule
|
|
||||||
title: "A rule"
|
|
||||||
severity: info
|
|
||||||
languages: [go]
|
|
||||||
pattern: 'a'
|
|
||||||
fix: "a"
|
|
||||||
detection: regex
|
|
||||||
auto_fixable: false
|
|
||||||
`), 0o644)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
cat, err := LoadDir(dir)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, cat.Rules, 2)
|
|
||||||
assert.Equal(t, "a-rule", cat.Rules[0].ID)
|
|
||||||
assert.Equal(t, "z-rule", cat.Rules[1].ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLoadDir_Bad_NonexistentDir(t *testing.T) {
|
func TestLoadDir_Bad_NonexistentDir(t *testing.T) {
|
||||||
_, err := LoadDir("/nonexistent/path/that/does/not/exist")
|
_, err := LoadDir("/nonexistent/path/that/does/not/exist")
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
|
|
|
||||||
|
|
@ -1,182 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
coreio "forge.lthn.ai/core/go-io"
|
|
||||||
coreerr "forge.lthn.ai/core/go-log"
|
|
||||||
"gopkg.in/yaml.v3"
|
|
||||||
)
|
|
||||||
|
|
||||||
// DefaultConfigPath is the repo-local config path used by core-lint.
|
|
||||||
const DefaultConfigPath = ".core/lint.yaml"
|
|
||||||
|
|
||||||
// LintConfig defines which tools run for each language and how results fail the build.
|
|
||||||
//
|
|
||||||
// cfg := lint.DefaultConfig()
|
|
||||||
// cfg.FailOn = "warning"
|
|
||||||
type LintConfig struct {
|
|
||||||
Lint ToolGroups `yaml:"lint" json:"lint"`
|
|
||||||
Output string `yaml:"output" json:"output"`
|
|
||||||
FailOn string `yaml:"fail_on" json:"fail_on"`
|
|
||||||
Paths []string `yaml:"paths" json:"paths"`
|
|
||||||
Exclude []string `yaml:"exclude" json:"exclude"`
|
|
||||||
Schedules map[string]Schedule `yaml:"schedules,omitempty" json:"schedules,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToolGroups maps config groups to tool names.
|
|
||||||
type ToolGroups struct {
|
|
||||||
Go []string `yaml:"go,omitempty" json:"go,omitempty"`
|
|
||||||
PHP []string `yaml:"php,omitempty" json:"php,omitempty"`
|
|
||||||
JS []string `yaml:"js,omitempty" json:"js,omitempty"`
|
|
||||||
TS []string `yaml:"ts,omitempty" json:"ts,omitempty"`
|
|
||||||
Python []string `yaml:"python,omitempty" json:"python,omitempty"`
|
|
||||||
Infra []string `yaml:"infra,omitempty" json:"infra,omitempty"`
|
|
||||||
Security []string `yaml:"security,omitempty" json:"security,omitempty"`
|
|
||||||
Compliance []string `yaml:"compliance,omitempty" json:"compliance,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Schedule declares a named lint run for external schedulers.
|
|
||||||
type Schedule struct {
|
|
||||||
Cron string `yaml:"cron" json:"cron"`
|
|
||||||
Categories []string `yaml:"categories,omitempty" json:"categories,omitempty"`
|
|
||||||
Output string `yaml:"output,omitempty" json:"output,omitempty"`
|
|
||||||
Paths []string `yaml:"paths,omitempty" json:"paths,omitempty"`
|
|
||||||
FailOn string `yaml:"fail_on,omitempty" json:"fail_on,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// DefaultConfig returns the RFC baseline config used when a repo has no local file yet.
|
|
||||||
//
|
|
||||||
// cfg := lint.DefaultConfig()
|
|
||||||
// cfg.Output = "sarif"
|
|
||||||
func DefaultConfig() LintConfig {
|
|
||||||
return LintConfig{
|
|
||||||
Lint: ToolGroups{
|
|
||||||
Go: []string{
|
|
||||||
"golangci-lint",
|
|
||||||
"gosec",
|
|
||||||
"govulncheck",
|
|
||||||
"staticcheck",
|
|
||||||
"revive",
|
|
||||||
"errcheck",
|
|
||||||
},
|
|
||||||
PHP: []string{
|
|
||||||
"phpstan",
|
|
||||||
"psalm",
|
|
||||||
"phpcs",
|
|
||||||
"phpmd",
|
|
||||||
"pint",
|
|
||||||
},
|
|
||||||
JS: []string{
|
|
||||||
"biome",
|
|
||||||
"oxlint",
|
|
||||||
"eslint",
|
|
||||||
"prettier",
|
|
||||||
},
|
|
||||||
TS: []string{
|
|
||||||
"biome",
|
|
||||||
"oxlint",
|
|
||||||
"typescript",
|
|
||||||
},
|
|
||||||
Python: []string{
|
|
||||||
"ruff",
|
|
||||||
"mypy",
|
|
||||||
"bandit",
|
|
||||||
"pylint",
|
|
||||||
},
|
|
||||||
Infra: []string{
|
|
||||||
"shellcheck",
|
|
||||||
"hadolint",
|
|
||||||
"yamllint",
|
|
||||||
"jsonlint",
|
|
||||||
"markdownlint",
|
|
||||||
},
|
|
||||||
Security: []string{
|
|
||||||
"gitleaks",
|
|
||||||
"trivy",
|
|
||||||
"gosec",
|
|
||||||
"bandit",
|
|
||||||
"semgrep",
|
|
||||||
},
|
|
||||||
Compliance: []string{
|
|
||||||
"syft",
|
|
||||||
"grype",
|
|
||||||
"scancode",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Output: "json",
|
|
||||||
FailOn: "error",
|
|
||||||
Paths: []string{"."},
|
|
||||||
Exclude: []string{"vendor/", "node_modules/", ".core/"},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DefaultConfigYAML marshals the default config as the file content for `core-lint init`.
|
|
||||||
func DefaultConfigYAML() (string, error) {
|
|
||||||
data, err := yaml.Marshal(DefaultConfig())
|
|
||||||
if err != nil {
|
|
||||||
return "", coreerr.E("DefaultConfigYAML", "marshal default config", err)
|
|
||||||
}
|
|
||||||
return string(data), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveConfigPath resolves an explicit config path or the repo-local default.
|
|
||||||
//
|
|
||||||
// path := lint.ResolveConfigPath(".", "")
|
|
||||||
// override := lint.ResolveConfigPath("/repo", ".core/lint.yaml")
|
|
||||||
func ResolveConfigPath(projectPath string, override string) string {
|
|
||||||
if projectPath == "" {
|
|
||||||
projectPath = "."
|
|
||||||
}
|
|
||||||
if override == "" {
|
|
||||||
return filepath.Join(projectPath, DefaultConfigPath)
|
|
||||||
}
|
|
||||||
if filepath.IsAbs(override) {
|
|
||||||
return override
|
|
||||||
}
|
|
||||||
return filepath.Join(projectPath, override)
|
|
||||||
}
|
|
||||||
|
|
||||||
// LoadProjectConfig reads `.core/lint.yaml` if present, otherwise returns the default config.
|
|
||||||
//
|
|
||||||
// cfg, path, err := lint.LoadProjectConfig(".", "")
|
|
||||||
// cfg, _, err = lint.LoadProjectConfig("/repo", ".core/lint.yaml")
|
|
||||||
func LoadProjectConfig(projectPath string, override string) (LintConfig, string, error) {
|
|
||||||
config := DefaultConfig()
|
|
||||||
path := ResolveConfigPath(projectPath, override)
|
|
||||||
|
|
||||||
_, err := coreio.Local.Stat(path)
|
|
||||||
if err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return config, "", nil
|
|
||||||
}
|
|
||||||
return config, "", coreerr.E("LoadProjectConfig", "stat "+path, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
raw, err := coreio.Local.Read(path)
|
|
||||||
if err != nil {
|
|
||||||
return config, "", coreerr.E("LoadProjectConfig", "read "+path, err)
|
|
||||||
}
|
|
||||||
if err := yaml.Unmarshal([]byte(raw), &config); err != nil {
|
|
||||||
return config, "", coreerr.E("LoadProjectConfig", "parse "+path, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return config, path, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveSchedule returns a named schedule from the config.
|
|
||||||
//
|
|
||||||
// schedule, err := lint.ResolveSchedule(cfg, "nightly")
|
|
||||||
func ResolveSchedule(config LintConfig, name string) (*Schedule, error) {
|
|
||||||
if name == "" {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
schedule, ok := config.Schedules[name]
|
|
||||||
if !ok {
|
|
||||||
return nil, coreerr.E("ResolveSchedule", "schedule "+name+" not found", nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &schedule, nil
|
|
||||||
}
|
|
||||||
|
|
@ -2,12 +2,10 @@ package lint
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"cmp"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"math"
|
"math"
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
@ -19,9 +17,9 @@ import (
|
||||||
// CoverageSnapshot represents a point-in-time coverage measurement.
|
// CoverageSnapshot represents a point-in-time coverage measurement.
|
||||||
type CoverageSnapshot struct {
|
type CoverageSnapshot struct {
|
||||||
Timestamp time.Time `json:"timestamp"`
|
Timestamp time.Time `json:"timestamp"`
|
||||||
Packages map[string]float64 `json:"packages"` // package → coverage %
|
Packages map[string]float64 `json:"packages"` // package → coverage %
|
||||||
Total float64 `json:"total"` // overall coverage %
|
Total float64 `json:"total"` // overall coverage %
|
||||||
Meta map[string]string `json:"meta,omitempty"` // optional metadata (commit, branch, etc.)
|
Meta map[string]string `json:"meta,omitempty"` // optional metadata (commit, branch, etc.)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CoverageRegression flags a package whose coverage changed between runs.
|
// CoverageRegression flags a package whose coverage changed between runs.
|
||||||
|
|
@ -249,24 +247,5 @@ func CompareCoverage(previous, current CoverageSnapshot) CoverageComparison {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
slices.Sort(comp.NewPackages)
|
|
||||||
slices.Sort(comp.Removed)
|
|
||||||
slices.SortFunc(comp.Regressions, func(a, b CoverageRegression) int {
|
|
||||||
return cmp.Or(
|
|
||||||
cmp.Compare(a.Package, b.Package),
|
|
||||||
cmp.Compare(a.Previous, b.Previous),
|
|
||||||
cmp.Compare(a.Current, b.Current),
|
|
||||||
cmp.Compare(a.Delta, b.Delta),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
slices.SortFunc(comp.Improvements, func(a, b CoverageRegression) int {
|
|
||||||
return cmp.Or(
|
|
||||||
cmp.Compare(a.Package, b.Package),
|
|
||||||
cmp.Compare(a.Previous, b.Previous),
|
|
||||||
cmp.Compare(a.Current, b.Current),
|
|
||||||
cmp.Compare(a.Delta, b.Delta),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
return comp
|
return comp
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -79,37 +79,6 @@ func TestCompareCoverage(t *testing.T) {
|
||||||
assert.InDelta(t, 6.7, comp.TotalDelta, 0.1)
|
assert.InDelta(t, 6.7, comp.TotalDelta, 0.1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCompareCoverage_SortsResultSlices(t *testing.T) {
|
|
||||||
prev := CoverageSnapshot{
|
|
||||||
Packages: map[string]float64{
|
|
||||||
"pkg/z": 90.0,
|
|
||||||
"pkg/b": 60.0,
|
|
||||||
"pkg/a": 80.0,
|
|
||||||
"pkg/c": 50.0,
|
|
||||||
},
|
|
||||||
Total: 70.0,
|
|
||||||
}
|
|
||||||
curr := CoverageSnapshot{
|
|
||||||
Packages: map[string]float64{
|
|
||||||
"pkg/b": 55.0,
|
|
||||||
"pkg/a": 70.0,
|
|
||||||
"pkg/c": 60.0,
|
|
||||||
"pkg/y": 40.0,
|
|
||||||
},
|
|
||||||
Total: 55.0,
|
|
||||||
}
|
|
||||||
|
|
||||||
comp := CompareCoverage(prev, curr)
|
|
||||||
|
|
||||||
assert.Equal(t, []string{"pkg/y"}, comp.NewPackages)
|
|
||||||
assert.Equal(t, []string{"pkg/z"}, comp.Removed)
|
|
||||||
require.Len(t, comp.Regressions, 2)
|
|
||||||
assert.Equal(t, "pkg/a", comp.Regressions[0].Package)
|
|
||||||
assert.Equal(t, "pkg/b", comp.Regressions[1].Package)
|
|
||||||
require.Len(t, comp.Improvements, 1)
|
|
||||||
assert.Equal(t, "pkg/c", comp.Improvements[0].Package)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCompareCoverage_NoChange(t *testing.T) {
|
func TestCompareCoverage_NoChange(t *testing.T) {
|
||||||
snap := CoverageSnapshot{
|
snap := CoverageSnapshot{
|
||||||
Packages: map[string]float64{"pkg/a": 80.0},
|
Packages: map[string]float64{"pkg/a": 80.0},
|
||||||
|
|
|
||||||
|
|
@ -1,129 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io/fs"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
var projectLanguageByExtension = map[string]string{
|
|
||||||
".go": "go",
|
|
||||||
".php": "php",
|
|
||||||
".cpp": "cpp",
|
|
||||||
".cc": "cpp",
|
|
||||||
".c": "cpp",
|
|
||||||
".h": "cpp",
|
|
||||||
".js": "js",
|
|
||||||
".jsx": "js",
|
|
||||||
".ts": "ts",
|
|
||||||
".tsx": "ts",
|
|
||||||
".py": "python",
|
|
||||||
".rs": "rust",
|
|
||||||
".sh": "shell",
|
|
||||||
".yaml": "yaml",
|
|
||||||
".yml": "yaml",
|
|
||||||
".json": "json",
|
|
||||||
".md": "markdown",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect returns the project languages inferred from markers and file names.
|
|
||||||
//
|
|
||||||
// lint.Detect(".")
|
|
||||||
// lint.Detect("/path/to/project")
|
|
||||||
func Detect(path string) []string {
|
|
||||||
if path == "" {
|
|
||||||
path = "."
|
|
||||||
}
|
|
||||||
|
|
||||||
seen := make(map[string]bool)
|
|
||||||
info, err := os.Stat(path)
|
|
||||||
if err != nil {
|
|
||||||
return []string{}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !info.IsDir() {
|
|
||||||
recordDetectedPath(seen, path)
|
|
||||||
return sortedDetectedLanguages(seen)
|
|
||||||
}
|
|
||||||
|
|
||||||
if shouldSkipTraversalRoot(path) {
|
|
||||||
return []string{}
|
|
||||||
}
|
|
||||||
|
|
||||||
_ = filepath.WalkDir(path, func(currentPath string, entry fs.DirEntry, walkErr error) error {
|
|
||||||
if walkErr != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry.IsDir() {
|
|
||||||
if currentPath != path && IsExcludedDir(entry.Name()) {
|
|
||||||
return filepath.SkipDir
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
recordDetectedPath(seen, currentPath)
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
|
|
||||||
return sortedDetectedLanguages(seen)
|
|
||||||
}
|
|
||||||
|
|
||||||
func detectFromFiles(files []string) []string {
|
|
||||||
seen := make(map[string]bool)
|
|
||||||
for _, file := range files {
|
|
||||||
recordDetectedPath(seen, file)
|
|
||||||
}
|
|
||||||
return sortedDetectedLanguages(seen)
|
|
||||||
}
|
|
||||||
|
|
||||||
func recordDetectedPath(seen map[string]bool, path string) {
|
|
||||||
name := filepath.Base(path)
|
|
||||||
matchedMarker := false
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case name == "go.mod":
|
|
||||||
seen["go"] = true
|
|
||||||
matchedMarker = true
|
|
||||||
case name == "composer.json":
|
|
||||||
seen["php"] = true
|
|
||||||
matchedMarker = true
|
|
||||||
case name == "package.json":
|
|
||||||
seen["js"] = true
|
|
||||||
matchedMarker = true
|
|
||||||
case name == "tsconfig.json":
|
|
||||||
seen["ts"] = true
|
|
||||||
matchedMarker = true
|
|
||||||
case name == "requirements.txt", name == "pyproject.toml":
|
|
||||||
seen["python"] = true
|
|
||||||
matchedMarker = true
|
|
||||||
case name == "Cargo.toml":
|
|
||||||
seen["rust"] = true
|
|
||||||
matchedMarker = true
|
|
||||||
case strings.HasPrefix(name, "Dockerfile"):
|
|
||||||
seen["dockerfile"] = true
|
|
||||||
matchedMarker = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if matchedMarker {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if lang, ok := projectLanguageByExtension[strings.ToLower(filepath.Ext(name))]; ok {
|
|
||||||
seen[lang] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func sortedDetectedLanguages(seen map[string]bool) []string {
|
|
||||||
var languages []string
|
|
||||||
for language := range seen {
|
|
||||||
languages = append(languages, language)
|
|
||||||
}
|
|
||||||
slices.Sort(languages)
|
|
||||||
if languages == nil {
|
|
||||||
return []string{}
|
|
||||||
}
|
|
||||||
return languages
|
|
||||||
}
|
|
||||||
|
|
@ -1,59 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestDetect_Good_ProjectMarkersAndFiles(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "main.cpp"), []byte("int main() { return 0; }\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "package.json"), []byte("{}\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "tsconfig.json"), []byte("{}\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "requirements.txt"), []byte("ruff\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "Dockerfile"), []byte("FROM scratch\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "run.sh"), []byte("#!/bin/sh\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "README.md"), []byte("# Test\n"), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, "vendor"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "vendor", "ignored.go"), []byte("package ignored\n"), 0o644))
|
|
||||||
|
|
||||||
assert.Equal(t,
|
|
||||||
[]string{"cpp", "dockerfile", "go", "js", "markdown", "python", "shell", "ts"},
|
|
||||||
Detect(dir),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDetectFromFiles_Good(t *testing.T) {
|
|
||||||
files := []string{
|
|
||||||
"main.go",
|
|
||||||
"src/lib.cc",
|
|
||||||
"web/app.ts",
|
|
||||||
"Dockerfile",
|
|
||||||
"scripts/run.sh",
|
|
||||||
"docs/index.md",
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.Equal(t,
|
|
||||||
[]string{"cpp", "dockerfile", "go", "markdown", "shell", "ts"},
|
|
||||||
detectFromFiles(files),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDetect_MissingPathReturnsEmptySlice(t *testing.T) {
|
|
||||||
assert.Equal(t, []string{}, Detect(filepath.Join(t.TempDir(), "missing")))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDetect_Good_SkipsHiddenRootDirectory(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
hiddenDir := filepath.Join(dir, ".core")
|
|
||||||
require.NoError(t, os.MkdirAll(hiddenDir, 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(hiddenDir, "main.go"), []byte("package main\n"), 0o644))
|
|
||||||
|
|
||||||
assert.Equal(t, []string{}, Detect(hiddenDir))
|
|
||||||
}
|
|
||||||
|
|
@ -10,18 +10,13 @@ import (
|
||||||
|
|
||||||
// Finding represents a single match of a rule against a source file.
|
// Finding represents a single match of a rule against a source file.
|
||||||
type Finding struct {
|
type Finding struct {
|
||||||
Tool string `json:"tool,omitempty"`
|
RuleID string `json:"rule_id"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Severity string `json:"severity"`
|
||||||
File string `json:"file"`
|
File string `json:"file"`
|
||||||
Line int `json:"line"`
|
Line int `json:"line"`
|
||||||
Column int `json:"column,omitempty"`
|
Match string `json:"match"`
|
||||||
Severity string `json:"severity"`
|
Fix string `json:"fix"`
|
||||||
Code string `json:"code,omitempty"`
|
|
||||||
Message string `json:"message,omitempty"`
|
|
||||||
Category string `json:"category,omitempty"`
|
|
||||||
Fix string `json:"fix,omitempty"`
|
|
||||||
RuleID string `json:"rule_id,omitempty"`
|
|
||||||
Title string `json:"title,omitempty"`
|
|
||||||
Match string `json:"match,omitempty"`
|
|
||||||
Repo string `json:"repo,omitempty"`
|
Repo string `json:"repo,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
// ResolveRunOutputFormat resolves the report writer from the run input and project config.
|
|
||||||
//
|
|
||||||
// format, err := lint.ResolveRunOutputFormat(lint.RunInput{Path: ".", CI: true})
|
|
||||||
// format, err := lint.ResolveRunOutputFormat(lint.RunInput{Path: ".", Schedule: "nightly"})
|
|
||||||
func ResolveRunOutputFormat(input RunInput) (string, error) {
|
|
||||||
if input.Output != "" {
|
|
||||||
return input.Output, nil
|
|
||||||
}
|
|
||||||
if input.CI {
|
|
||||||
return "github", nil
|
|
||||||
}
|
|
||||||
config, _, err := LoadProjectConfig(input.Path, input.Config)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
schedule, err := ResolveSchedule(config, input.Schedule)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if schedule != nil && schedule.Output != "" {
|
|
||||||
return schedule.Output, nil
|
|
||||||
}
|
|
||||||
if config.Output != "" {
|
|
||||||
return config.Output, nil
|
|
||||||
}
|
|
||||||
return "text", nil
|
|
||||||
}
|
|
||||||
|
|
@ -1,64 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestResolveRunOutputFormat_Good_Precedence(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte(`output: text
|
|
||||||
schedules:
|
|
||||||
nightly:
|
|
||||||
output: json
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
format, err := ResolveRunOutputFormat(RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Output: "sarif",
|
|
||||||
CI: true,
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "sarif", format)
|
|
||||||
|
|
||||||
format, err = ResolveRunOutputFormat(RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Schedule: "nightly",
|
|
||||||
CI: true,
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "github", format)
|
|
||||||
|
|
||||||
format, err = ResolveRunOutputFormat(RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Schedule: "nightly",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "json", format)
|
|
||||||
|
|
||||||
format, err = ResolveRunOutputFormat(RunInput{
|
|
||||||
Path: dir,
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "text", format)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestResolveRunOutputFormat_Good_ExplicitOutputBypassesConfigLoading(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
projectPath := filepath.Join(dir, "project-file")
|
|
||||||
require.NoError(t, os.WriteFile(projectPath, []byte("not a directory"), 0o644))
|
|
||||||
|
|
||||||
format, err := ResolveRunOutputFormat(RunInput{
|
|
||||||
Path: projectPath,
|
|
||||||
Output: "sarif",
|
|
||||||
Config: "broken/config.yaml",
|
|
||||||
Schedule: "nightly",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "sarif", format)
|
|
||||||
}
|
|
||||||
|
|
@ -4,49 +4,27 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Summary holds aggregate counts for a set of findings.
|
// Summary holds aggregate counts for a set of findings.
|
||||||
type Summary struct {
|
type Summary struct {
|
||||||
Total int `json:"total"`
|
Total int `json:"total"`
|
||||||
Errors int `json:"errors"`
|
BySeverity map[string]int `json:"by_severity"`
|
||||||
Warnings int `json:"warnings"`
|
|
||||||
Info int `json:"info"`
|
|
||||||
Passed bool `json:"passed"`
|
|
||||||
BySeverity map[string]int `json:"by_severity,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Summarise counts findings by severity.
|
// Summarise counts findings by severity.
|
||||||
//
|
|
||||||
// summary := lint.Summarise(findings)
|
|
||||||
func Summarise(findings []Finding) Summary {
|
func Summarise(findings []Finding) Summary {
|
||||||
summary := Summary{
|
s := Summary{
|
||||||
Total: len(findings),
|
Total: len(findings),
|
||||||
BySeverity: make(map[string]int),
|
BySeverity: make(map[string]int),
|
||||||
}
|
}
|
||||||
for _, finding := range findings {
|
for _, f := range findings {
|
||||||
severity := strings.TrimSpace(finding.Severity)
|
s.BySeverity[f.Severity]++
|
||||||
if severity == "" {
|
|
||||||
severity = "warning"
|
|
||||||
}
|
|
||||||
summary.BySeverity[severity]++
|
|
||||||
switch severity {
|
|
||||||
case "error":
|
|
||||||
summary.Errors++
|
|
||||||
case "info":
|
|
||||||
summary.Info++
|
|
||||||
default:
|
|
||||||
summary.Warnings++
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
summary.Passed = summary.Errors == 0
|
return s
|
||||||
return summary
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteJSON writes findings as a pretty-printed JSON array.
|
// WriteJSON writes findings as a pretty-printed JSON array.
|
||||||
//
|
|
||||||
// _ = lint.WriteJSON(os.Stdout, findings)
|
|
||||||
func WriteJSON(w io.Writer, findings []Finding) error {
|
func WriteJSON(w io.Writer, findings []Finding) error {
|
||||||
if findings == nil {
|
if findings == nil {
|
||||||
findings = []Finding{}
|
findings = []Finding{}
|
||||||
|
|
@ -57,8 +35,6 @@ func WriteJSON(w io.Writer, findings []Finding) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteJSONL writes findings as newline-delimited JSON (one object per line).
|
// WriteJSONL writes findings as newline-delimited JSON (one object per line).
|
||||||
//
|
|
||||||
// _ = lint.WriteJSONL(os.Stdout, findings)
|
|
||||||
func WriteJSONL(w io.Writer, findings []Finding) error {
|
func WriteJSONL(w io.Writer, findings []Finding) error {
|
||||||
for _, f := range findings {
|
for _, f := range findings {
|
||||||
data, err := json.Marshal(f)
|
data, err := json.Marshal(f)
|
||||||
|
|
@ -72,172 +48,11 @@ func WriteJSONL(w io.Writer, findings []Finding) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// WriteText writes findings in a human-readable format.
|
// WriteText writes findings in a human-readable format:
|
||||||
//
|
//
|
||||||
// lint.WriteText(os.Stdout, findings)
|
// file:line [severity] title (rule-id)
|
||||||
func WriteText(w io.Writer, findings []Finding) {
|
func WriteText(w io.Writer, findings []Finding) {
|
||||||
for _, finding := range findings {
|
for _, f := range findings {
|
||||||
message := finding.Message
|
fmt.Fprintf(w, "%s:%d [%s] %s (%s)\n", f.File, f.Line, f.Severity, f.Title, f.RuleID)
|
||||||
if message == "" {
|
|
||||||
message = finding.Title
|
|
||||||
}
|
|
||||||
code := finding.Code
|
|
||||||
if code == "" {
|
|
||||||
code = finding.RuleID
|
|
||||||
}
|
|
||||||
fmt.Fprintf(w, "%s:%d [%s] %s (%s)\n", finding.File, finding.Line, finding.Severity, message, code)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteReportJSON writes the RFC report document as pretty-printed JSON.
|
|
||||||
//
|
|
||||||
// _ = lint.WriteReportJSON(os.Stdout, report)
|
|
||||||
func WriteReportJSON(w io.Writer, report Report) error {
|
|
||||||
enc := json.NewEncoder(w)
|
|
||||||
enc.SetIndent("", " ")
|
|
||||||
return enc.Encode(report)
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteReportText writes report findings followed by a short summary.
|
|
||||||
//
|
|
||||||
// lint.WriteReportText(os.Stdout, report)
|
|
||||||
func WriteReportText(w io.Writer, report Report) {
|
|
||||||
WriteText(w, report.Findings)
|
|
||||||
fmt.Fprintf(w, "\n%d finding(s): %d error(s), %d warning(s), %d info\n", report.Summary.Total, report.Summary.Errors, report.Summary.Warnings, report.Summary.Info)
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteReportGitHub writes GitHub Actions annotation lines.
|
|
||||||
//
|
|
||||||
// lint.WriteReportGitHub(os.Stdout, report)
|
|
||||||
func WriteReportGitHub(w io.Writer, report Report) {
|
|
||||||
for _, finding := range report.Findings {
|
|
||||||
level := githubAnnotationLevel(finding.Severity)
|
|
||||||
|
|
||||||
location := ""
|
|
||||||
if finding.File != "" {
|
|
||||||
location = fmt.Sprintf(" file=%s", finding.File)
|
|
||||||
if finding.Line > 0 {
|
|
||||||
location += fmt.Sprintf(",line=%d", finding.Line)
|
|
||||||
}
|
|
||||||
if finding.Column > 0 {
|
|
||||||
location += fmt.Sprintf(",col=%d", finding.Column)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
message := finding.Message
|
|
||||||
if message == "" {
|
|
||||||
message = finding.Title
|
|
||||||
}
|
|
||||||
code := finding.Code
|
|
||||||
if code == "" {
|
|
||||||
code = finding.RuleID
|
|
||||||
}
|
|
||||||
fmt.Fprintf(w, "::%s%s::[%s] %s (%s)\n", level, location, finding.Tool, message, code)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteReportSARIF writes a minimal SARIF document for code scanning tools.
|
|
||||||
//
|
|
||||||
// _ = lint.WriteReportSARIF(os.Stdout, report)
|
|
||||||
func WriteReportSARIF(w io.Writer, report Report) error {
|
|
||||||
type sarifMessage struct {
|
|
||||||
Text string `json:"text"`
|
|
||||||
}
|
|
||||||
type sarifRegion struct {
|
|
||||||
StartLine int `json:"startLine,omitempty"`
|
|
||||||
StartColumn int `json:"startColumn,omitempty"`
|
|
||||||
}
|
|
||||||
type sarifArtifactLocation struct {
|
|
||||||
URI string `json:"uri,omitempty"`
|
|
||||||
}
|
|
||||||
type sarifPhysicalLocation struct {
|
|
||||||
ArtifactLocation sarifArtifactLocation `json:"artifactLocation"`
|
|
||||||
Region sarifRegion `json:"region,omitempty"`
|
|
||||||
}
|
|
||||||
type sarifLocation struct {
|
|
||||||
PhysicalLocation sarifPhysicalLocation `json:"physicalLocation"`
|
|
||||||
}
|
|
||||||
type sarifResult struct {
|
|
||||||
RuleID string `json:"ruleId,omitempty"`
|
|
||||||
Level string `json:"level,omitempty"`
|
|
||||||
Message sarifMessage `json:"message"`
|
|
||||||
Locations []sarifLocation `json:"locations,omitempty"`
|
|
||||||
}
|
|
||||||
type sarifRun struct {
|
|
||||||
Tool struct {
|
|
||||||
Driver struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
} `json:"driver"`
|
|
||||||
} `json:"tool"`
|
|
||||||
Results []sarifResult `json:"results"`
|
|
||||||
}
|
|
||||||
type sarifLog struct {
|
|
||||||
Version string `json:"version"`
|
|
||||||
Schema string `json:"$schema"`
|
|
||||||
Runs []sarifRun `json:"runs"`
|
|
||||||
}
|
|
||||||
|
|
||||||
sarifRunValue := sarifRun{}
|
|
||||||
sarifRunValue.Tool.Driver.Name = "core-lint"
|
|
||||||
|
|
||||||
for _, finding := range report.Findings {
|
|
||||||
message := finding.Message
|
|
||||||
if message == "" {
|
|
||||||
message = finding.Title
|
|
||||||
}
|
|
||||||
ruleID := finding.Code
|
|
||||||
if ruleID == "" {
|
|
||||||
ruleID = finding.RuleID
|
|
||||||
}
|
|
||||||
|
|
||||||
result := sarifResult{
|
|
||||||
RuleID: ruleID,
|
|
||||||
Level: sarifLevel(finding.Severity),
|
|
||||||
Message: sarifMessage{Text: message},
|
|
||||||
}
|
|
||||||
if finding.File != "" {
|
|
||||||
result.Locations = []sarifLocation{{
|
|
||||||
PhysicalLocation: sarifPhysicalLocation{
|
|
||||||
ArtifactLocation: sarifArtifactLocation{URI: finding.File},
|
|
||||||
Region: sarifRegion{
|
|
||||||
StartLine: finding.Line,
|
|
||||||
StartColumn: finding.Column,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
}
|
|
||||||
sarifRunValue.Results = append(sarifRunValue.Results, result)
|
|
||||||
}
|
|
||||||
|
|
||||||
return json.NewEncoder(w).Encode(sarifLog{
|
|
||||||
Version: "2.1.0",
|
|
||||||
Schema: "https://json.schemastore.org/sarif-2.1.0.json",
|
|
||||||
Runs: []sarifRun{sarifRunValue},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func githubAnnotationLevel(severity string) string {
|
|
||||||
switch strings.ToLower(strings.TrimSpace(severity)) {
|
|
||||||
case "error":
|
|
||||||
return "error"
|
|
||||||
case "info":
|
|
||||||
return "notice"
|
|
||||||
case "warning", "":
|
|
||||||
return "warning"
|
|
||||||
default:
|
|
||||||
return "warning"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func sarifLevel(severity string) string {
|
|
||||||
switch strings.ToLower(strings.TrimSpace(severity)) {
|
|
||||||
case "error":
|
|
||||||
return "error"
|
|
||||||
case "warning":
|
|
||||||
return "warning"
|
|
||||||
case "info":
|
|
||||||
return "note"
|
|
||||||
default:
|
|
||||||
return "warning"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -134,45 +134,3 @@ func TestWriteText_Good_Empty(t *testing.T) {
|
||||||
WriteText(&buf, nil)
|
WriteText(&buf, nil)
|
||||||
assert.Empty(t, buf.String())
|
assert.Empty(t, buf.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWriteReportGitHub_Good_MapsInfoToNotice(t *testing.T) {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
|
|
||||||
WriteReportGitHub(&buf, Report{
|
|
||||||
Findings: []Finding{{
|
|
||||||
Tool: "demo",
|
|
||||||
File: "example.go",
|
|
||||||
Line: 7,
|
|
||||||
Column: 3,
|
|
||||||
Severity: "info",
|
|
||||||
Code: "demo-rule",
|
|
||||||
Message: "explanation",
|
|
||||||
}},
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Contains(t, buf.String(), "::notice file=example.go,line=7,col=3::[demo] explanation (demo-rule)")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestWriteReportSARIF_Good_MapsInfoToNote(t *testing.T) {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
|
|
||||||
err := WriteReportSARIF(&buf, Report{
|
|
||||||
Findings: []Finding{{
|
|
||||||
Tool: "demo",
|
|
||||||
File: "example.go",
|
|
||||||
Line: 7,
|
|
||||||
Column: 3,
|
|
||||||
Severity: "info",
|
|
||||||
Code: "demo-rule",
|
|
||||||
Message: "explanation",
|
|
||||||
}},
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
var decoded map[string]any
|
|
||||||
require.NoError(t, json.Unmarshal(buf.Bytes(), &decoded))
|
|
||||||
|
|
||||||
runs := decoded["runs"].([]any)
|
|
||||||
results := runs[0].(map[string]any)["results"].([]any)
|
|
||||||
assert.Equal(t, "note", results[0].(map[string]any)["level"])
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -12,23 +12,17 @@ import (
|
||||||
|
|
||||||
// extensionMap maps file extensions to language identifiers.
|
// extensionMap maps file extensions to language identifiers.
|
||||||
var extensionMap = map[string]string{
|
var extensionMap = map[string]string{
|
||||||
".go": "go",
|
".go": "go",
|
||||||
".php": "php",
|
".php": "php",
|
||||||
".ts": "ts",
|
".ts": "ts",
|
||||||
".tsx": "ts",
|
".tsx": "ts",
|
||||||
".js": "js",
|
".js": "js",
|
||||||
".jsx": "js",
|
".jsx": "js",
|
||||||
".cpp": "cpp",
|
".cpp": "cpp",
|
||||||
".cc": "cpp",
|
".cc": "cpp",
|
||||||
".c": "cpp",
|
".c": "cpp",
|
||||||
".h": "cpp",
|
".h": "cpp",
|
||||||
".py": "python",
|
".py": "py",
|
||||||
".rs": "rust",
|
|
||||||
".sh": "shell",
|
|
||||||
".yaml": "yaml",
|
|
||||||
".yml": "yaml",
|
|
||||||
".json": "json",
|
|
||||||
".md": "markdown",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// defaultExcludes lists directory names that are always skipped during scanning.
|
// defaultExcludes lists directory names that are always skipped during scanning.
|
||||||
|
|
@ -41,51 +35,32 @@ var defaultExcludes = []string{
|
||||||
}
|
}
|
||||||
|
|
||||||
// DetectLanguage returns the language identifier for a filename based on its extension.
|
// DetectLanguage returns the language identifier for a filename based on its extension.
|
||||||
//
|
// Returns an empty string for unrecognised extensions.
|
||||||
// lint.DetectLanguage("main.go")
|
|
||||||
// lint.DetectLanguage("Dockerfile")
|
|
||||||
func DetectLanguage(filename string) string {
|
func DetectLanguage(filename string) string {
|
||||||
base := filepath.Base(filename)
|
ext := filepath.Ext(filename)
|
||||||
if strings.HasPrefix(base, "Dockerfile") {
|
|
||||||
return "dockerfile"
|
|
||||||
}
|
|
||||||
|
|
||||||
ext := filepath.Ext(base)
|
|
||||||
if lang, ok := extensionMap[ext]; ok {
|
if lang, ok := extensionMap[ext]; ok {
|
||||||
return lang
|
return lang
|
||||||
}
|
}
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func shouldSkipTraversalRoot(path string) bool {
|
|
||||||
cleanedPath := filepath.Clean(path)
|
|
||||||
if cleanedPath == "." {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
base := filepath.Base(cleanedPath)
|
|
||||||
if base == "." || base == string(filepath.Separator) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return IsExcludedDir(base)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Scanner walks directory trees and matches files against lint rules.
|
// Scanner walks directory trees and matches files against lint rules.
|
||||||
type Scanner struct {
|
type Scanner struct {
|
||||||
matcher *Matcher
|
matcher *Matcher
|
||||||
rules []Rule
|
rules []Rule
|
||||||
|
excludes []string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewScanner creates a Scanner with the given rules and default directory exclusions.
|
// NewScanner creates a Scanner with the given rules and default directory exclusions.
|
||||||
func NewScanner(rules []Rule) (*Scanner, error) {
|
func NewScanner(rules []Rule) (*Scanner, error) {
|
||||||
matcher, err := NewMatcher(rules)
|
m, err := NewMatcher(rules)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return &Scanner{
|
return &Scanner{
|
||||||
matcher: matcher,
|
matcher: m,
|
||||||
rules: rules,
|
rules: rules,
|
||||||
|
excludes: slices.Clone(defaultExcludes),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -94,19 +69,15 @@ func NewScanner(rules []Rule) (*Scanner, error) {
|
||||||
func (s *Scanner) ScanDir(root string) ([]Finding, error) {
|
func (s *Scanner) ScanDir(root string) ([]Finding, error) {
|
||||||
var findings []Finding
|
var findings []Finding
|
||||||
|
|
||||||
if shouldSkipTraversalRoot(root) {
|
|
||||||
return findings, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
|
err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip excluded directories and hidden directories.
|
// Skip excluded directories.
|
||||||
if d.IsDir() {
|
if d.IsDir() {
|
||||||
name := d.Name()
|
name := d.Name()
|
||||||
if IsExcludedDir(name) {
|
if slices.Contains(s.excludes, name) {
|
||||||
return filepath.SkipDir
|
return filepath.SkipDir
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|
@ -131,7 +102,7 @@ func (s *Scanner) ScanDir(root string) ([]Finding, error) {
|
||||||
content := []byte(raw)
|
content := []byte(raw)
|
||||||
|
|
||||||
// Build a matcher scoped to this file's language.
|
// Build a matcher scoped to this file's language.
|
||||||
matcher, err := NewMatcher(langRules)
|
m, err := NewMatcher(langRules)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
@ -142,7 +113,7 @@ func (s *Scanner) ScanDir(root string) ([]Finding, error) {
|
||||||
relPath = path
|
relPath = path
|
||||||
}
|
}
|
||||||
|
|
||||||
found := matcher.Match(relPath, content)
|
found := m.Match(relPath, content)
|
||||||
findings = append(findings, found...)
|
findings = append(findings, found...)
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
@ -172,12 +143,12 @@ func (s *Scanner) ScanFile(path string) ([]Finding, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
matcher, err := NewMatcher(langRules)
|
m, err := NewMatcher(langRules)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return matcher.Match(path, content), nil
|
return m.Match(path, content), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// filterRulesByLanguage returns rules that include the given language.
|
// filterRulesByLanguage returns rules that include the given language.
|
||||||
|
|
|
||||||
|
|
@ -25,10 +25,9 @@ func TestDetectLanguage_Good(t *testing.T) {
|
||||||
{"core.c", "cpp"},
|
{"core.c", "cpp"},
|
||||||
{"app.js", "js"},
|
{"app.js", "js"},
|
||||||
{"component.jsx", "js"},
|
{"component.jsx", "js"},
|
||||||
{"unknown.rs", "rust"},
|
{"unknown.rs", ""},
|
||||||
{"noextension", ""},
|
{"noextension", ""},
|
||||||
{"file.py", "python"},
|
{"file.py", "py"},
|
||||||
{"Dockerfile", "dockerfile"},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
|
|
@ -181,34 +180,6 @@ func TestScanFile_Good(t *testing.T) {
|
||||||
assert.Equal(t, "test-panic", findings[0].RuleID)
|
assert.Equal(t, "test-panic", findings[0].RuleID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestScanFile_Good_Python(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
file := filepath.Join(dir, "app.py")
|
|
||||||
err := os.WriteFile(file, []byte("print('hello')\n# TODO: fix\n"), 0o644)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
rules := []Rule{
|
|
||||||
{
|
|
||||||
ID: "python-todo",
|
|
||||||
Title: "Python TODO",
|
|
||||||
Severity: "low",
|
|
||||||
Languages: []string{"python"},
|
|
||||||
Pattern: `TODO`,
|
|
||||||
Fix: "Remove TODO",
|
|
||||||
Detection: "regex",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
s, err := NewScanner(rules)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
findings, err := s.ScanFile(file)
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, findings, 1)
|
|
||||||
assert.Equal(t, "python-todo", findings[0].RuleID)
|
|
||||||
assert.Equal(t, "python", DetectLanguage(file))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestScanDir_Good_Subdirectories(t *testing.T) {
|
func TestScanDir_Good_Subdirectories(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
|
@ -238,42 +209,53 @@ func TestScanDir_Good_Subdirectories(t *testing.T) {
|
||||||
require.Len(t, findings, 1)
|
require.Len(t, findings, 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestScanDir_Good_SkipsHiddenRootDirectory(t *testing.T) {
|
func TestLanguagesFromRules_Good(t *testing.T) {
|
||||||
dir := t.TempDir()
|
|
||||||
hiddenDir := filepath.Join(dir, ".git")
|
|
||||||
require.NoError(t, os.MkdirAll(hiddenDir, 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(hiddenDir, "main.go"), []byte("// TODO: hidden\n"), 0o644))
|
|
||||||
|
|
||||||
rules := []Rule{
|
rules := []Rule{
|
||||||
{
|
{Languages: []string{"go", "php"}},
|
||||||
ID: "test-001",
|
{Languages: []string{"go", "ts"}},
|
||||||
Title: "Found a TODO",
|
{Languages: []string{"py"}},
|
||||||
Severity: "low",
|
|
||||||
Languages: []string{"go"},
|
|
||||||
Pattern: `TODO`,
|
|
||||||
Fix: "Remove TODO",
|
|
||||||
Detection: "regex",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
langs := languagesFromRules(rules)
|
||||||
s, err := NewScanner(rules)
|
assert.Equal(t, []string{"go", "php", "py", "ts"}, langs)
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
findings, err := s.ScanDir(hiddenDir)
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Empty(t, findings)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestScanDir_Good_SkipsHiddenNestedDirectory(t *testing.T) {
|
func TestLanguagesFromRules_Good_Empty(t *testing.T) {
|
||||||
|
langs := languagesFromRules(nil)
|
||||||
|
assert.Empty(t, langs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsExcludedDir_Good(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{"vendor", true},
|
||||||
|
{"node_modules", true},
|
||||||
|
{".git", true},
|
||||||
|
{"testdata", true},
|
||||||
|
{".core", true},
|
||||||
|
{".hidden", true}, // any dot-prefixed dir
|
||||||
|
{".idea", true}, // any dot-prefixed dir
|
||||||
|
{"src", false},
|
||||||
|
{"pkg", false},
|
||||||
|
{"cmd", false},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
assert.Equal(t, tt.want, IsExcludedDir(tt.name))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestScanFile_Bad_UnrecognisedExtension(t *testing.T) {
|
||||||
dir := t.TempDir()
|
dir := t.TempDir()
|
||||||
hiddenDir := filepath.Join(dir, "services", ".generated")
|
file := filepath.Join(dir, "readme.txt")
|
||||||
require.NoError(t, os.MkdirAll(hiddenDir, 0o755))
|
require.NoError(t, os.WriteFile(file, []byte("TODO: fix this"), 0o644))
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(hiddenDir, "main.go"), []byte("// TODO: hidden\n"), 0o644))
|
|
||||||
|
|
||||||
rules := []Rule{
|
rules := []Rule{
|
||||||
{
|
{
|
||||||
ID: "test-001",
|
ID: "test-001",
|
||||||
Title: "Found a TODO",
|
Title: "Found TODO",
|
||||||
Severity: "low",
|
Severity: "low",
|
||||||
Languages: []string{"go"},
|
Languages: []string{"go"},
|
||||||
Pattern: `TODO`,
|
Pattern: `TODO`,
|
||||||
|
|
@ -285,9 +267,29 @@ func TestScanDir_Good_SkipsHiddenNestedDirectory(t *testing.T) {
|
||||||
s, err := NewScanner(rules)
|
s, err := NewScanner(rules)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
findings, err := s.ScanDir(dir)
|
findings, err := s.ScanFile(file)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Empty(t, findings)
|
assert.Empty(t, findings, "should not match unrecognised extensions")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestScanFile_Bad_NonexistentFile(t *testing.T) {
|
||||||
|
rules := []Rule{
|
||||||
|
{
|
||||||
|
ID: "test-001",
|
||||||
|
Title: "Test",
|
||||||
|
Severity: "low",
|
||||||
|
Languages: []string{"go"},
|
||||||
|
Pattern: `TODO`,
|
||||||
|
Fix: "Fix",
|
||||||
|
Detection: "regex",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
s, err := NewScanner(rules)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = s.ScanFile("/nonexistent/test.go")
|
||||||
|
assert.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestScanDir_Bad_NonexistentDir(t *testing.T) {
|
func TestScanDir_Bad_NonexistentDir(t *testing.T) {
|
||||||
|
|
|
||||||
|
|
@ -1,746 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"io/fs"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
coreio "forge.lthn.ai/core/go-io"
|
|
||||||
coreerr "forge.lthn.ai/core/go-log"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
hookStartMarker = "# core-lint hook start"
|
|
||||||
hookEndMarker = "# core-lint hook end"
|
|
||||||
)
|
|
||||||
|
|
||||||
// RunInput is the DTO for `core-lint run` and the language/category shortcuts.
|
|
||||||
//
|
|
||||||
// input := lint.RunInput{Path: ".", Schedule: "nightly", Output: "json"}
|
|
||||||
// report, err := lint.NewService().Run(ctx, input)
|
|
||||||
type RunInput struct {
|
|
||||||
Path string `json:"path"`
|
|
||||||
Output string `json:"output,omitempty"`
|
|
||||||
Config string `json:"config,omitempty"`
|
|
||||||
Schedule string `json:"schedule,omitempty"`
|
|
||||||
FailOn string `json:"fail_on,omitempty"`
|
|
||||||
Category string `json:"category,omitempty"`
|
|
||||||
Lang string `json:"lang,omitempty"`
|
|
||||||
Hook bool `json:"hook,omitempty"`
|
|
||||||
CI bool `json:"ci,omitempty"`
|
|
||||||
Files []string `json:"files,omitempty"`
|
|
||||||
SBOM bool `json:"sbom,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToolInfo describes a supported linter tool and whether it is available in PATH.
|
|
||||||
//
|
|
||||||
// tools := lint.NewService().Tools([]string{"go"})
|
|
||||||
type ToolInfo struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Available bool `json:"available"`
|
|
||||||
Languages []string `json:"languages"`
|
|
||||||
Category string `json:"category"`
|
|
||||||
Entitlement string `json:"entitlement,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Report aggregates every tool run into a single output document.
|
|
||||||
//
|
|
||||||
// report, err := lint.NewService().Run(context.Background(), lint.RunInput{Path: ".", Output: "json"})
|
|
||||||
type Report struct {
|
|
||||||
Project string `json:"project"`
|
|
||||||
Timestamp time.Time `json:"timestamp"`
|
|
||||||
Duration string `json:"duration"`
|
|
||||||
Languages []string `json:"languages"`
|
|
||||||
Tools []ToolRun `json:"tools"`
|
|
||||||
Findings []Finding `json:"findings"`
|
|
||||||
Summary Summary `json:"summary"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToolRun records the execution status of one adapter.
|
|
||||||
type ToolRun struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Version string `json:"version,omitempty"`
|
|
||||||
Status string `json:"status"`
|
|
||||||
Duration string `json:"duration"`
|
|
||||||
Findings int `json:"findings"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Service orchestrates the configured lint adapters for a project.
|
|
||||||
//
|
|
||||||
// svc := lint.NewService()
|
|
||||||
// report, err := svc.Run(ctx, lint.RunInput{Path: ".", Output: "json"})
|
|
||||||
type Service struct {
|
|
||||||
adapters []Adapter
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewService constructs a lint orchestrator with the built-in adapter registry.
|
|
||||||
//
|
|
||||||
// svc := lint.NewService()
|
|
||||||
func NewService() *Service {
|
|
||||||
return &Service{adapters: defaultAdapters()}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run executes the selected adapters and returns the merged report.
|
|
||||||
//
|
|
||||||
// report, err := lint.NewService().Run(ctx, lint.RunInput{Path: ".", Output: "json"})
|
|
||||||
func (service *Service) Run(ctx context.Context, input RunInput) (Report, error) {
|
|
||||||
startedAt := time.Now().UTC()
|
|
||||||
input = normaliseRunInput(input)
|
|
||||||
|
|
||||||
config, _, err := LoadProjectConfig(input.Path, input.Config)
|
|
||||||
if err != nil {
|
|
||||||
return Report{}, err
|
|
||||||
}
|
|
||||||
schedule, err := ResolveSchedule(config, input.Schedule)
|
|
||||||
if err != nil {
|
|
||||||
return Report{}, err
|
|
||||||
}
|
|
||||||
if input.FailOn == "" && schedule != nil && schedule.FailOn != "" {
|
|
||||||
input.FailOn = schedule.FailOn
|
|
||||||
}
|
|
||||||
if input.FailOn == "" {
|
|
||||||
input.FailOn = config.FailOn
|
|
||||||
}
|
|
||||||
|
|
||||||
files, scoped, err := service.scopeFiles(input.Path, config, input, schedule)
|
|
||||||
if err != nil {
|
|
||||||
return Report{}, err
|
|
||||||
}
|
|
||||||
if input.Hook && len(files) == 0 {
|
|
||||||
report := Report{
|
|
||||||
Project: projectName(input.Path),
|
|
||||||
Timestamp: startedAt,
|
|
||||||
Duration: time.Since(startedAt).Round(time.Millisecond).String(),
|
|
||||||
Languages: []string{},
|
|
||||||
Tools: []ToolRun{},
|
|
||||||
Findings: []Finding{},
|
|
||||||
Summary: Summarise(nil),
|
|
||||||
}
|
|
||||||
report.Summary.Passed = passesThreshold(report.Summary, input.FailOn)
|
|
||||||
return report, nil
|
|
||||||
}
|
|
||||||
if scoped && len(files) == 0 {
|
|
||||||
report := Report{
|
|
||||||
Project: projectName(input.Path),
|
|
||||||
Timestamp: startedAt,
|
|
||||||
Duration: time.Since(startedAt).Round(time.Millisecond).String(),
|
|
||||||
Languages: []string{},
|
|
||||||
Tools: []ToolRun{},
|
|
||||||
Findings: []Finding{},
|
|
||||||
Summary: Summarise(nil),
|
|
||||||
}
|
|
||||||
report.Summary.Passed = passesThreshold(report.Summary, input.FailOn)
|
|
||||||
return report, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
languages := service.languagesForInput(input, files, scoped)
|
|
||||||
selectedAdapters := service.selectAdapters(config, languages, input, schedule)
|
|
||||||
|
|
||||||
var findings []Finding
|
|
||||||
var toolRuns []ToolRun
|
|
||||||
|
|
||||||
for _, adapter := range selectedAdapters {
|
|
||||||
if input.Hook && !adapter.Fast() {
|
|
||||||
toolRuns = append(toolRuns, ToolRun{
|
|
||||||
Name: adapter.Name(),
|
|
||||||
Status: "skipped",
|
|
||||||
Duration: "0s",
|
|
||||||
Findings: 0,
|
|
||||||
})
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
result := adapter.Run(ctx, input, files)
|
|
||||||
toolRuns = append(toolRuns, result.Tool)
|
|
||||||
findings = append(findings, normaliseReportFindings(result.Findings, input.Path)...)
|
|
||||||
}
|
|
||||||
|
|
||||||
findings = dedupeFindings(findings)
|
|
||||||
sortToolRuns(toolRuns)
|
|
||||||
sortFindings(findings)
|
|
||||||
if languages == nil {
|
|
||||||
languages = []string{}
|
|
||||||
}
|
|
||||||
if toolRuns == nil {
|
|
||||||
toolRuns = []ToolRun{}
|
|
||||||
}
|
|
||||||
if findings == nil {
|
|
||||||
findings = []Finding{}
|
|
||||||
}
|
|
||||||
|
|
||||||
report := Report{
|
|
||||||
Project: projectName(input.Path),
|
|
||||||
Timestamp: startedAt,
|
|
||||||
Duration: time.Since(startedAt).Round(time.Millisecond).String(),
|
|
||||||
Languages: slices.Clone(languages),
|
|
||||||
Tools: toolRuns,
|
|
||||||
Findings: findings,
|
|
||||||
Summary: Summarise(findings),
|
|
||||||
}
|
|
||||||
report.Summary.Passed = passesThreshold(report.Summary, input.FailOn)
|
|
||||||
|
|
||||||
return report, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tools returns the current adapter inventory for display in the CLI.
|
|
||||||
//
|
|
||||||
// tools := lint.NewService().Tools([]string{"go"})
|
|
||||||
func (service *Service) Tools(languages []string) []ToolInfo {
|
|
||||||
var tools []ToolInfo
|
|
||||||
for _, adapter := range service.adapters {
|
|
||||||
if len(languages) > 0 && !adapter.MatchesLanguage(languages) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
tools = append(tools, ToolInfo{
|
|
||||||
Name: adapter.Name(),
|
|
||||||
Available: adapter.Available(),
|
|
||||||
Languages: slices.Clone(adapter.Languages()),
|
|
||||||
Category: adapter.Category(),
|
|
||||||
Entitlement: adapter.Entitlement(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
slices.SortFunc(tools, func(left ToolInfo, right ToolInfo) int {
|
|
||||||
return strings.Compare(left.Name, right.Name)
|
|
||||||
})
|
|
||||||
if tools == nil {
|
|
||||||
return []ToolInfo{}
|
|
||||||
}
|
|
||||||
return tools
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteDefaultConfig creates `.core/lint.yaml` in the target project.
|
|
||||||
//
|
|
||||||
// path, err := svc.WriteDefaultConfig(".", false)
|
|
||||||
func (service *Service) WriteDefaultConfig(projectPath string, force bool) (string, error) {
|
|
||||||
if projectPath == "" {
|
|
||||||
projectPath = "."
|
|
||||||
}
|
|
||||||
|
|
||||||
targetPath := filepath.Join(projectPath, DefaultConfigPath)
|
|
||||||
if !force {
|
|
||||||
if _, err := os.Stat(targetPath); err == nil {
|
|
||||||
return "", coreerr.E("Service.WriteDefaultConfig", targetPath+" already exists", nil)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := os.MkdirAll(filepath.Dir(targetPath), 0o755); err != nil {
|
|
||||||
return "", coreerr.E("Service.WriteDefaultConfig", "mkdir "+filepath.Dir(targetPath), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
content, err := DefaultConfigYAML()
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if err := coreio.Local.Write(targetPath, content); err != nil {
|
|
||||||
return "", coreerr.E("Service.WriteDefaultConfig", "write "+targetPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return targetPath, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// InstallHook adds a git pre-commit hook that runs `core-lint run --hook`.
|
|
||||||
//
|
|
||||||
// _ = lint.NewService().InstallHook(".")
|
|
||||||
func (service *Service) InstallHook(projectPath string) error {
|
|
||||||
hookPath, err := hookFilePath(projectPath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
block := hookScriptBlock(false)
|
|
||||||
content := "#!/bin/sh\n" + block
|
|
||||||
|
|
||||||
raw, readErr := coreio.Local.Read(hookPath)
|
|
||||||
if readErr == nil {
|
|
||||||
if strings.Contains(raw, hookStartMarker) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
trimmed := strings.TrimRight(raw, "\n")
|
|
||||||
if trimmed == "" {
|
|
||||||
content = "#!/bin/sh\n" + block
|
|
||||||
} else {
|
|
||||||
content = trimmed + "\n\n" + hookScriptBlock(true)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := os.MkdirAll(filepath.Dir(hookPath), 0o755); err != nil {
|
|
||||||
return coreerr.E("Service.InstallHook", "mkdir "+filepath.Dir(hookPath), err)
|
|
||||||
}
|
|
||||||
if err := coreio.Local.Write(hookPath, content); err != nil {
|
|
||||||
return coreerr.E("Service.InstallHook", "write "+hookPath, err)
|
|
||||||
}
|
|
||||||
if err := os.Chmod(hookPath, 0o755); err != nil {
|
|
||||||
return coreerr.E("Service.InstallHook", "chmod "+hookPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RemoveHook removes the block previously installed by InstallHook.
|
|
||||||
//
|
|
||||||
// _ = lint.NewService().RemoveHook(".")
|
|
||||||
func (service *Service) RemoveHook(projectPath string) error {
|
|
||||||
hookPath, err := hookFilePath(projectPath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
raw, err := coreio.Local.Read(hookPath)
|
|
||||||
if err != nil {
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return coreerr.E("Service.RemoveHook", "read "+hookPath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
startIndex := strings.Index(raw, hookStartMarker)
|
|
||||||
endIndex := strings.Index(raw, hookEndMarker)
|
|
||||||
if startIndex < 0 || endIndex < 0 || endIndex < startIndex {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
endIndex += len(hookEndMarker)
|
|
||||||
content := strings.TrimRight(raw[:startIndex]+raw[endIndex:], "\n")
|
|
||||||
if strings.TrimSpace(content) == "" {
|
|
||||||
if err := os.Remove(hookPath); err != nil && !os.IsNotExist(err) {
|
|
||||||
return coreerr.E("Service.RemoveHook", "remove "+hookPath, err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := coreio.Local.Write(hookPath, content); err != nil {
|
|
||||||
return coreerr.E("Service.RemoveHook", "write "+hookPath, err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (service *Service) languagesForInput(input RunInput, files []string, scoped bool) []string {
|
|
||||||
if input.Lang != "" {
|
|
||||||
return []string{input.Lang}
|
|
||||||
}
|
|
||||||
if scoped {
|
|
||||||
return detectFromFiles(files)
|
|
||||||
}
|
|
||||||
return Detect(input.Path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (service *Service) scopeFiles(projectPath string, config LintConfig, input RunInput, schedule *Schedule) ([]string, bool, error) {
|
|
||||||
if input.Files != nil {
|
|
||||||
return slices.Clone(input.Files), true, nil
|
|
||||||
}
|
|
||||||
if input.Hook {
|
|
||||||
files, err := service.stagedFiles(projectPath)
|
|
||||||
return files, true, err
|
|
||||||
}
|
|
||||||
if schedule != nil && len(schedule.Paths) > 0 {
|
|
||||||
files, err := collectConfiguredFiles(projectPath, schedule.Paths, config.Exclude)
|
|
||||||
return files, true, err
|
|
||||||
}
|
|
||||||
if !slices.Equal(config.Paths, DefaultConfig().Paths) || !slices.Equal(config.Exclude, DefaultConfig().Exclude) {
|
|
||||||
files, err := collectConfiguredFiles(projectPath, config.Paths, config.Exclude)
|
|
||||||
return files, true, err
|
|
||||||
}
|
|
||||||
return nil, false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (service *Service) selectAdapters(config LintConfig, languages []string, input RunInput, schedule *Schedule) []Adapter {
|
|
||||||
categories := selectedCategories(input, schedule)
|
|
||||||
enabled := make(map[string]bool)
|
|
||||||
for _, name := range enabledToolNames(config, languages, input, categories) {
|
|
||||||
enabled[name] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
var selected []Adapter
|
|
||||||
for _, adapter := range service.adapters {
|
|
||||||
if len(enabled) > 0 && !enabled[adapter.Name()] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if len(categories) > 0 && !slices.Contains(categories, adapter.Category()) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !adapter.MatchesLanguage(languages) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
selected = append(selected, adapter)
|
|
||||||
}
|
|
||||||
|
|
||||||
if slices.Contains(languages, "go") && !slices.Contains(categories, "compliance") {
|
|
||||||
if !hasAdapter(selected, "catalog") {
|
|
||||||
selected = append([]Adapter{newCatalogAdapter()}, selected...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return selected
|
|
||||||
}
|
|
||||||
|
|
||||||
func (service *Service) stagedFiles(projectPath string) ([]string, error) {
|
|
||||||
toolkit := NewToolkit(projectPath)
|
|
||||||
stdout, stderr, exitCode, err := toolkit.Run("git", "diff", "--cached", "--name-only")
|
|
||||||
if err != nil && exitCode != 0 {
|
|
||||||
return nil, coreerr.E("Service.stagedFiles", "git diff --cached --name-only: "+strings.TrimSpace(stderr), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
var files []string
|
|
||||||
for line := range strings.SplitSeq(strings.TrimSpace(stdout), "\n") {
|
|
||||||
line = strings.TrimSpace(line)
|
|
||||||
if line == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
files = append(files, line)
|
|
||||||
}
|
|
||||||
return files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func collectConfiguredFiles(projectPath string, paths []string, excludes []string) ([]string, error) {
|
|
||||||
seen := make(map[string]bool)
|
|
||||||
var files []string
|
|
||||||
|
|
||||||
for _, path := range paths {
|
|
||||||
if path == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
absolutePath := path
|
|
||||||
if !filepath.IsAbs(absolutePath) {
|
|
||||||
absolutePath = filepath.Join(projectPath, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
info, err := os.Stat(absolutePath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, coreerr.E("collectConfiguredFiles", "stat "+absolutePath, err)
|
|
||||||
}
|
|
||||||
if info.IsDir() && shouldSkipTraversalRoot(absolutePath) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
addFile := func(candidate string) {
|
|
||||||
relativePath := relativeConfiguredPath(projectPath, candidate)
|
|
||||||
if hasHiddenDirectory(relativePath) || hasHiddenDirectory(filepath.ToSlash(filepath.Clean(candidate))) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if matchesConfiguredExclude(relativePath, excludes) || matchesConfiguredExclude(filepath.ToSlash(filepath.Clean(candidate)), excludes) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if seen[relativePath] {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
seen[relativePath] = true
|
|
||||||
files = append(files, relativePath)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !info.IsDir() {
|
|
||||||
addFile(absolutePath)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
walkErr := filepath.WalkDir(absolutePath, func(currentPath string, entry fs.DirEntry, walkErr error) error {
|
|
||||||
if walkErr != nil {
|
|
||||||
return walkErr
|
|
||||||
}
|
|
||||||
if entry.IsDir() {
|
|
||||||
relativeDir := relativeConfiguredPath(projectPath, currentPath)
|
|
||||||
if matchesConfiguredExclude(relativeDir, excludes) || matchesConfiguredExclude(filepath.ToSlash(filepath.Clean(currentPath)), excludes) {
|
|
||||||
return filepath.SkipDir
|
|
||||||
}
|
|
||||||
if currentPath != absolutePath && IsExcludedDir(entry.Name()) {
|
|
||||||
return filepath.SkipDir
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
addFile(currentPath)
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
if walkErr != nil {
|
|
||||||
return nil, coreerr.E("collectConfiguredFiles", "walk "+absolutePath, walkErr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
slices.Sort(files)
|
|
||||||
return files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func relativeConfiguredPath(projectPath string, candidate string) string {
|
|
||||||
relativePath := candidate
|
|
||||||
if projectPath != "" {
|
|
||||||
if rel, relErr := filepath.Rel(projectPath, candidate); relErr == nil && rel != "" && !strings.HasPrefix(rel, "..") {
|
|
||||||
relativePath = rel
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return filepath.ToSlash(filepath.Clean(relativePath))
|
|
||||||
}
|
|
||||||
|
|
||||||
func matchesConfiguredExclude(candidate string, excludes []string) bool {
|
|
||||||
if candidate == "" || len(excludes) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
normalisedCandidate := filepath.ToSlash(filepath.Clean(candidate))
|
|
||||||
for _, exclude := range excludes {
|
|
||||||
normalisedExclude := filepath.ToSlash(filepath.Clean(strings.TrimSpace(exclude)))
|
|
||||||
if normalisedExclude == "." || normalisedExclude == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
normalisedExclude = strings.TrimSuffix(normalisedExclude, "/")
|
|
||||||
if normalisedCandidate == normalisedExclude {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(normalisedCandidate, normalisedExclude+"/") {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func hasHiddenDirectory(candidate string) bool {
|
|
||||||
if candidate == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, segment := range strings.Split(filepath.ToSlash(filepath.Clean(candidate)), "/") {
|
|
||||||
if segment == "" || segment == "." || segment == ".." {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(segment, ".") {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func enabledToolNames(config LintConfig, languages []string, input RunInput, categories []string) []string {
|
|
||||||
var names []string
|
|
||||||
|
|
||||||
if slices.Contains(categories, "security") {
|
|
||||||
names = append(names, config.Lint.Security...)
|
|
||||||
}
|
|
||||||
if slices.Contains(categories, "compliance") {
|
|
||||||
names = append(names, config.Lint.Compliance...)
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Lang != "" {
|
|
||||||
names = append(names, groupForLanguage(config.Lint, input.Lang)...)
|
|
||||||
} else if shouldIncludeLanguageGroups(categories) {
|
|
||||||
for _, language := range languages {
|
|
||||||
names = append(names, groupForLanguage(config.Lint, language)...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if input.Lang == "" && shouldIncludeInfraGroups(categories) {
|
|
||||||
names = append(names, config.Lint.Infra...)
|
|
||||||
}
|
|
||||||
if input.Lang == "" {
|
|
||||||
if input.CI {
|
|
||||||
names = append(names, config.Lint.Security...)
|
|
||||||
}
|
|
||||||
if input.SBOM {
|
|
||||||
names = append(names, config.Lint.Compliance...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return dedupeStrings(names)
|
|
||||||
}
|
|
||||||
|
|
||||||
func selectedCategories(input RunInput, schedule *Schedule) []string {
|
|
||||||
if input.Category != "" {
|
|
||||||
return []string{input.Category}
|
|
||||||
}
|
|
||||||
if schedule == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return slices.Clone(schedule.Categories)
|
|
||||||
}
|
|
||||||
|
|
||||||
func shouldIncludeLanguageGroups(categories []string) bool {
|
|
||||||
if len(categories) == 0 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
for _, category := range categories {
|
|
||||||
switch category {
|
|
||||||
case "security", "compliance":
|
|
||||||
continue
|
|
||||||
default:
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func shouldIncludeInfraGroups(categories []string) bool {
|
|
||||||
if len(categories) == 0 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
for _, category := range categories {
|
|
||||||
switch category {
|
|
||||||
case "security", "compliance":
|
|
||||||
continue
|
|
||||||
default:
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func groupForLanguage(groups ToolGroups, language string) []string {
|
|
||||||
switch language {
|
|
||||||
case "go":
|
|
||||||
return groups.Go
|
|
||||||
case "php":
|
|
||||||
return groups.PHP
|
|
||||||
case "js":
|
|
||||||
return groups.JS
|
|
||||||
case "ts":
|
|
||||||
return groups.TS
|
|
||||||
case "python":
|
|
||||||
return groups.Python
|
|
||||||
case "shell", "dockerfile", "yaml", "json", "markdown":
|
|
||||||
return groups.Infra
|
|
||||||
default:
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func hookFilePath(projectPath string) (string, error) {
|
|
||||||
if projectPath == "" {
|
|
||||||
projectPath = "."
|
|
||||||
}
|
|
||||||
|
|
||||||
toolkit := NewToolkit(projectPath)
|
|
||||||
stdout, stderr, exitCode, err := toolkit.Run("git", "rev-parse", "--git-dir")
|
|
||||||
if err != nil && exitCode != 0 {
|
|
||||||
return "", coreerr.E("hookFilePath", "git rev-parse --git-dir: "+strings.TrimSpace(stderr), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
gitDir := strings.TrimSpace(stdout)
|
|
||||||
if gitDir == "" {
|
|
||||||
return "", coreerr.E("hookFilePath", "git directory is empty", nil)
|
|
||||||
}
|
|
||||||
if !filepath.IsAbs(gitDir) {
|
|
||||||
gitDir = filepath.Join(projectPath, gitDir)
|
|
||||||
}
|
|
||||||
return filepath.Join(gitDir, "hooks", "pre-commit"), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func hookScriptBlock(appended bool) string {
|
|
||||||
command := "exec core-lint run --hook"
|
|
||||||
if appended {
|
|
||||||
command = "core-lint run --hook || exit $?"
|
|
||||||
}
|
|
||||||
|
|
||||||
return hookStartMarker + "\n# Installed by core-lint\n" + command + "\n" + hookEndMarker + "\n"
|
|
||||||
}
|
|
||||||
|
|
||||||
func normaliseRunInput(input RunInput) RunInput {
|
|
||||||
if input.Path == "" {
|
|
||||||
input.Path = "."
|
|
||||||
}
|
|
||||||
if input.CI && input.Output == "" {
|
|
||||||
input.Output = "github"
|
|
||||||
}
|
|
||||||
return input
|
|
||||||
}
|
|
||||||
|
|
||||||
func normaliseReportFindings(findings []Finding, projectPath string) []Finding {
|
|
||||||
normalised := make([]Finding, 0, len(findings))
|
|
||||||
for _, finding := range findings {
|
|
||||||
if finding.Code == "" {
|
|
||||||
finding.Code = finding.RuleID
|
|
||||||
}
|
|
||||||
if finding.Message == "" {
|
|
||||||
finding.Message = finding.Title
|
|
||||||
}
|
|
||||||
if finding.Tool == "" {
|
|
||||||
finding.Tool = "catalog"
|
|
||||||
}
|
|
||||||
if finding.Severity == "" {
|
|
||||||
finding.Severity = "warning"
|
|
||||||
} else {
|
|
||||||
finding.Severity = normaliseSeverity(finding.Severity)
|
|
||||||
}
|
|
||||||
if finding.File != "" && projectPath != "" {
|
|
||||||
if relativePath, err := filepath.Rel(projectPath, finding.File); err == nil && relativePath != "" && !strings.HasPrefix(relativePath, "..") {
|
|
||||||
finding.File = filepath.ToSlash(relativePath)
|
|
||||||
} else {
|
|
||||||
finding.File = filepath.ToSlash(finding.File)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
normalised = append(normalised, finding)
|
|
||||||
}
|
|
||||||
return normalised
|
|
||||||
}
|
|
||||||
|
|
||||||
func projectName(path string) string {
|
|
||||||
absolutePath, err := filepath.Abs(path)
|
|
||||||
if err != nil {
|
|
||||||
return filepath.Base(path)
|
|
||||||
}
|
|
||||||
return filepath.Base(absolutePath)
|
|
||||||
}
|
|
||||||
|
|
||||||
func dedupeStrings(values []string) []string {
|
|
||||||
seen := make(map[string]bool)
|
|
||||||
var deduped []string
|
|
||||||
for _, value := range values {
|
|
||||||
if value == "" || seen[value] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[value] = true
|
|
||||||
deduped = append(deduped, value)
|
|
||||||
}
|
|
||||||
return deduped
|
|
||||||
}
|
|
||||||
|
|
||||||
func hasAdapter(adapters []Adapter, name string) bool {
|
|
||||||
for _, adapter := range adapters {
|
|
||||||
if adapter.Name() == name {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func passesThreshold(summary Summary, threshold string) bool {
|
|
||||||
switch strings.ToLower(strings.TrimSpace(threshold)) {
|
|
||||||
case "", "error":
|
|
||||||
return summary.Errors == 0
|
|
||||||
case "warning":
|
|
||||||
return summary.Errors == 0 && summary.Warnings == 0
|
|
||||||
case "info":
|
|
||||||
return summary.Total == 0
|
|
||||||
default:
|
|
||||||
return summary.Errors == 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func sortFindings(findings []Finding) {
|
|
||||||
slices.SortFunc(findings, func(left Finding, right Finding) int {
|
|
||||||
switch {
|
|
||||||
case left.File != right.File:
|
|
||||||
return strings.Compare(left.File, right.File)
|
|
||||||
case left.Line != right.Line:
|
|
||||||
if left.Line < right.Line {
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
return 1
|
|
||||||
case left.Column != right.Column:
|
|
||||||
if left.Column < right.Column {
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
return 1
|
|
||||||
case left.Tool != right.Tool:
|
|
||||||
return strings.Compare(left.Tool, right.Tool)
|
|
||||||
default:
|
|
||||||
return strings.Compare(left.Code, right.Code)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func sortToolRuns(toolRuns []ToolRun) {
|
|
||||||
slices.SortFunc(toolRuns, func(left ToolRun, right ToolRun) int {
|
|
||||||
return strings.Compare(left.Name, right.Name)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -1,624 +0,0 @@
|
||||||
package lint
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestServiceRun_Good_CatalogFindings(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "input.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("data")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
assert.Equal(t, "warning", report.Findings[0].Severity)
|
|
||||||
assert.Equal(t, "catalog", report.Findings[0].Tool)
|
|
||||||
assert.Equal(t, "go-cor-003", report.Findings[0].Code)
|
|
||||||
assert.Equal(t, "correctness", report.Findings[0].Category)
|
|
||||||
assert.Equal(t, 1, report.Summary.Total)
|
|
||||||
assert.Equal(t, 1, report.Summary.Warnings)
|
|
||||||
assert.False(t, report.Summary.Passed)
|
|
||||||
assert.Contains(t, report.Languages, "go")
|
|
||||||
require.NotEmpty(t, report.Tools)
|
|
||||||
assert.Equal(t, "catalog", report.Tools[0].Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_UsesConfiguredPaths(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "root.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("root")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, "services"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "services", "scoped.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("scoped")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte("paths:\n - services\n"), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
assert.Equal(t, "services/scoped.go", report.Findings[0].File)
|
|
||||||
assert.Equal(t, 1, report.Summary.Total)
|
|
||||||
assert.False(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_ExplicitEmptyFilesSkipsScanning(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "root.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("root")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Files: []string{},
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Empty(t, report.Languages)
|
|
||||||
assert.Empty(t, report.Tools)
|
|
||||||
assert.Empty(t, report.Findings)
|
|
||||||
assert.True(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_UsesConfiguredExclude(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "root.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("root")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, "services"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "services", "scoped.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("scoped")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte("exclude:\n - services\n"), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
assert.Equal(t, "root.go", report.Findings[0].File)
|
|
||||||
assert.Equal(t, 1, report.Summary.Total)
|
|
||||||
assert.False(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_SkipsHiddenConfiguredRootDirectory(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".hidden"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".hidden", "scoped.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("scoped")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte("paths:\n - .hidden\n"), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Empty(t, report.Findings)
|
|
||||||
assert.Empty(t, report.Tools)
|
|
||||||
assert.True(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_SkipsHiddenConfiguredFilePath(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "root.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("root")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".hidden"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".hidden", "scoped.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("hidden")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte("paths:\n - root.go\n - .hidden/scoped.go\n"), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
assert.Equal(t, "root.go", report.Findings[0].File)
|
|
||||||
assert.Equal(t, 1, report.Summary.Total)
|
|
||||||
assert.False(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_UsesNamedSchedule(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "root.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("root")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, "services"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "services", "scoped.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("scoped")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte(`schedules:
|
|
||||||
nightly:
|
|
||||||
fail_on: warning
|
|
||||||
paths:
|
|
||||||
- services
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Schedule: "nightly",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
assert.Equal(t, "services/scoped.go", report.Findings[0].File)
|
|
||||||
assert.Equal(t, 1, report.Summary.Total)
|
|
||||||
assert.False(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_LanguageShortcutIgnoresCiAndSbomGroups(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte(`lint:
|
|
||||||
go:
|
|
||||||
- catalog
|
|
||||||
- go-tool
|
|
||||||
security:
|
|
||||||
- security-tool
|
|
||||||
compliance:
|
|
||||||
- compliance-tool
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{
|
|
||||||
shortcutAdapter{name: "go-tool", category: "correctness"},
|
|
||||||
shortcutAdapter{name: "security-tool", category: "security"},
|
|
||||||
shortcutAdapter{name: "compliance-tool", category: "compliance"},
|
|
||||||
}}
|
|
||||||
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Lang: "go",
|
|
||||||
CI: true,
|
|
||||||
SBOM: true,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Tools, 2)
|
|
||||||
assert.Equal(t, []string{"catalog", "go-tool"}, []string{report.Tools[0].Name, report.Tools[1].Name})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_LanguageShortcutExcludesInfraGroup(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "composer.json"), []byte("{\n \"name\": \"example/test\"\n}\n"), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte(`lint:
|
|
||||||
php:
|
|
||||||
- php-tool
|
|
||||||
infra:
|
|
||||||
- shell-tool
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{
|
|
||||||
shortcutAdapter{name: "php-tool", category: "correctness"},
|
|
||||||
shortcutAdapter{name: "shell-tool", category: "correctness"},
|
|
||||||
}}
|
|
||||||
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Lang: "php",
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Tools, 1)
|
|
||||||
assert.Equal(t, "php-tool", report.Tools[0].Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_HookModeUsesStagedFiles(t *testing.T) {
|
|
||||||
if _, err := exec.LookPath("git"); err != nil {
|
|
||||||
t.Skip("git not available")
|
|
||||||
}
|
|
||||||
|
|
||||||
dir := t.TempDir()
|
|
||||||
runTestCommand(t, dir, "git", "init")
|
|
||||||
runTestCommand(t, dir, "git", "config", "user.email", "test@example.com")
|
|
||||||
runTestCommand(t, dir, "git", "config", "user.name", "Test User")
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "staged.go"), []byte(`package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("data")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "unstaged.go"), []byte(`package sample
|
|
||||||
|
|
||||||
func run2() {
|
|
||||||
panic("boom")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
runTestCommand(t, dir, "git", "add", "go.mod", "staged.go")
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Hook: true,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
assert.Equal(t, "staged.go", report.Findings[0].File)
|
|
||||||
assert.Equal(t, "go-cor-003", report.Findings[0].Code)
|
|
||||||
assert.False(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_HookModeWithNoStagedFilesSkipsScanning(t *testing.T) {
|
|
||||||
if _, err := exec.LookPath("git"); err != nil {
|
|
||||||
t.Skip("git not available")
|
|
||||||
}
|
|
||||||
|
|
||||||
dir := t.TempDir()
|
|
||||||
runTestCommand(t, dir, "git", "init")
|
|
||||||
runTestCommand(t, dir, "git", "config", "user.email", "test@example.com")
|
|
||||||
runTestCommand(t, dir, "git", "config", "user.name", "Test User")
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "unstaged.go"), []byte(`package sample
|
|
||||||
|
|
||||||
func run() {
|
|
||||||
panic("boom")
|
|
||||||
}
|
|
||||||
`), 0o644))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{newCatalogAdapter()}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
Hook: true,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Empty(t, report.Languages)
|
|
||||||
assert.Empty(t, report.Tools)
|
|
||||||
assert.Empty(t, report.Findings)
|
|
||||||
assert.True(t, report.Summary.Passed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRemoveHook_PreservesExistingHookContent(t *testing.T) {
|
|
||||||
if _, err := exec.LookPath("git"); err != nil {
|
|
||||||
t.Skip("git not available")
|
|
||||||
}
|
|
||||||
|
|
||||||
dir := t.TempDir()
|
|
||||||
runTestCommand(t, dir, "git", "init")
|
|
||||||
|
|
||||||
original := "\n# custom hook\nprintf 'keep'"
|
|
||||||
hookDir := filepath.Join(dir, ".git", "hooks")
|
|
||||||
require.NoError(t, os.MkdirAll(hookDir, 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(hookDir, "pre-commit"), []byte(original), 0o755))
|
|
||||||
|
|
||||||
svc := NewService()
|
|
||||||
require.NoError(t, svc.InstallHook(dir))
|
|
||||||
require.NoError(t, svc.RemoveHook(dir))
|
|
||||||
|
|
||||||
restored, err := os.ReadFile(filepath.Join(hookDir, "pre-commit"))
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, original, string(restored))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_JS_PrettierFindings(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "package.json"), []byte("{\n \"name\": \"example\"\n}\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "index.js"), []byte("const value = 1;\n"), 0o644))
|
|
||||||
|
|
||||||
setupMockCmdExit(t, "prettier", "index.js\n", "", 1)
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{
|
|
||||||
newCommandAdapter("prettier", []string{"prettier"}, []string{"js"}, "style", "", false, true, projectPathArguments("--list-different"), parsePrettierDiagnostics),
|
|
||||||
}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
require.Len(t, report.Tools, 1)
|
|
||||||
assert.Equal(t, "prettier", report.Findings[0].Tool)
|
|
||||||
assert.Equal(t, "index.js", report.Findings[0].File)
|
|
||||||
assert.Equal(t, "prettier-format", report.Findings[0].Code)
|
|
||||||
assert.Equal(t, "warning", report.Findings[0].Severity)
|
|
||||||
assert.False(t, report.Summary.Passed)
|
|
||||||
assert.Equal(t, "prettier", report.Tools[0].Name)
|
|
||||||
assert.Equal(t, "failed", report.Tools[0].Status)
|
|
||||||
assert.Equal(t, 1, report.Tools[0].Findings)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_CapturesToolVersion(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "package.json"), []byte("{\n \"name\": \"example\"\n}\n"), 0o644))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "index.js"), []byte("const value = 1;\n"), 0o644))
|
|
||||||
|
|
||||||
binDir := t.TempDir()
|
|
||||||
scriptPath := filepath.Join(binDir, "prettier")
|
|
||||||
script := `#!/bin/sh
|
|
||||||
case "$1" in
|
|
||||||
--version)
|
|
||||||
echo "prettier 3.2.1"
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
--list-different)
|
|
||||||
echo "index.js"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
echo "unexpected args: $*" >&2
|
|
||||||
exit 0
|
|
||||||
`
|
|
||||||
require.NoError(t, os.WriteFile(scriptPath, []byte(script), 0o755))
|
|
||||||
t.Setenv("PATH", binDir+string(os.PathListSeparator)+os.Getenv("PATH"))
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{
|
|
||||||
newCommandAdapter("prettier", []string{"prettier"}, []string{"js"}, "style", "", false, true, projectPathArguments("--list-different"), parsePrettierDiagnostics),
|
|
||||||
}}
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Tools, 1)
|
|
||||||
assert.Equal(t, "prettier", report.Tools[0].Name)
|
|
||||||
assert.Equal(t, "prettier 3.2.1", report.Tools[0].Version)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceRun_Good_DeduplicatesMergedFindings(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com/test\n"), 0o644))
|
|
||||||
require.NoError(t, os.MkdirAll(filepath.Join(dir, ".core"), 0o755))
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".core", "lint.yaml"), []byte("lint:\n go:\n - dup\n"), 0o644))
|
|
||||||
|
|
||||||
finding := Finding{
|
|
||||||
Tool: "dup",
|
|
||||||
File: filepath.Join(dir, "input.go"),
|
|
||||||
Line: 12,
|
|
||||||
Column: 3,
|
|
||||||
Severity: "warning",
|
|
||||||
Code: "duplicate-finding",
|
|
||||||
Message: "same finding",
|
|
||||||
}
|
|
||||||
|
|
||||||
svc := &Service{adapters: []Adapter{
|
|
||||||
duplicateAdapter{name: "dup", finding: finding},
|
|
||||||
duplicateAdapter{name: "dup", finding: finding},
|
|
||||||
}}
|
|
||||||
|
|
||||||
report, err := svc.Run(context.Background(), RunInput{
|
|
||||||
Path: dir,
|
|
||||||
FailOn: "warning",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, report.Tools, 3)
|
|
||||||
require.Len(t, report.Findings, 1)
|
|
||||||
assert.Equal(t, "duplicate-finding", report.Findings[0].Code)
|
|
||||||
assert.Equal(t, 1, report.Summary.Total)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServiceTools_EmptyInventoryReturnsEmptySlice(t *testing.T) {
|
|
||||||
tools := (&Service{}).Tools(nil)
|
|
||||||
require.NotNil(t, tools)
|
|
||||||
assert.Empty(t, tools)
|
|
||||||
}
|
|
||||||
|
|
||||||
type shortcutAdapter struct {
|
|
||||||
name string
|
|
||||||
category string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) Name() string { return adapter.name }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) Available() bool { return true }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) Languages() []string { return []string{"*"} }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) Command() string { return adapter.name }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) Entitlement() string { return "" }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) RequiresEntitlement() bool { return false }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) MatchesLanguage(languages []string) bool { return true }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) Category() string { return adapter.category }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) Fast() bool { return true }
|
|
||||||
|
|
||||||
func (adapter shortcutAdapter) Run(_ context.Context, _ RunInput, _ []string) AdapterResult {
|
|
||||||
return AdapterResult{
|
|
||||||
Tool: ToolRun{
|
|
||||||
Name: adapter.name,
|
|
||||||
Status: "passed",
|
|
||||||
Duration: "0s",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type duplicateAdapter struct {
|
|
||||||
name string
|
|
||||||
finding Finding
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) Name() string { return adapter.name }
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) Available() bool { return true }
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) Languages() []string { return []string{"go"} }
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) Command() string { return adapter.name }
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) Entitlement() string { return "" }
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) RequiresEntitlement() bool { return false }
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) MatchesLanguage(languages []string) bool {
|
|
||||||
for _, language := range languages {
|
|
||||||
if language == "go" {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) Category() string { return "correctness" }
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) Fast() bool { return true }
|
|
||||||
|
|
||||||
func (adapter duplicateAdapter) Run(_ context.Context, _ RunInput, _ []string) AdapterResult {
|
|
||||||
return AdapterResult{
|
|
||||||
Tool: ToolRun{
|
|
||||||
Name: adapter.name,
|
|
||||||
Status: "passed",
|
|
||||||
Duration: "0s",
|
|
||||||
},
|
|
||||||
Findings: []Finding{adapter.finding},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTestCommand(t *testing.T, dir string, name string, args ...string) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
cmd := exec.Command(name, args...)
|
|
||||||
cmd.Dir = dir
|
|
||||||
output, err := cmd.CombinedOutput()
|
|
||||||
require.NoError(t, err, string(output))
|
|
||||||
}
|
|
||||||
|
|
@ -8,7 +8,6 @@ import (
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"slices"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
@ -38,17 +37,14 @@ type RaceCondition struct {
|
||||||
Desc string `json:"desc"`
|
Desc string `json:"desc"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TrackedComment represents a tracked code comment like TODO, FIXME, or HACK.
|
// TODO represents a tracked code comment like TODO, FIXME, or HACK.
|
||||||
type TrackedComment struct {
|
type TODO struct {
|
||||||
File string `json:"file"`
|
File string `json:"file"`
|
||||||
Line int `json:"line"`
|
Line int `json:"line"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Message string `json:"message"`
|
Message string `json:"message"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO is kept for compatibility with the older API name.
|
|
||||||
type TODO = TrackedComment
|
|
||||||
|
|
||||||
// Vulnerability represents a dependency vulnerability from govulncheck text output.
|
// Vulnerability represents a dependency vulnerability from govulncheck text output.
|
||||||
type Vulnerability struct {
|
type Vulnerability struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
|
|
@ -142,10 +138,8 @@ func (t *Toolkit) Run(name string, args ...string) (stdout, stderr string, exitC
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindTrackedComments greps for TODO/FIXME/HACK comments within a directory.
|
// FindTODOs greps for TODO/FIXME/HACK comments within a directory.
|
||||||
//
|
func (t *Toolkit) FindTODOs(dir string) ([]TODO, error) {
|
||||||
// comments, err := lint.NewToolkit(".").FindTrackedComments("pkg/lint")
|
|
||||||
func (t *Toolkit) FindTrackedComments(dir string) ([]TrackedComment, error) {
|
|
||||||
pattern := `\b(TODO|FIXME|HACK)\b(\(.*\))?:`
|
pattern := `\b(TODO|FIXME|HACK)\b(\(.*\))?:`
|
||||||
stdout, stderr, exitCode, err := t.Run("git", "grep", "--line-number", "-E", pattern, "--", dir)
|
stdout, stderr, exitCode, err := t.Run("git", "grep", "--line-number", "-E", pattern, "--", dir)
|
||||||
|
|
||||||
|
|
@ -153,10 +147,10 @@ func (t *Toolkit) FindTrackedComments(dir string) ([]TrackedComment, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
if err != nil && exitCode != 1 {
|
if err != nil && exitCode != 1 {
|
||||||
return nil, coreerr.E("Toolkit.FindTrackedComments", fmt.Sprintf("git grep failed (exit %d):\n%s", exitCode, stderr), err)
|
return nil, coreerr.E("Toolkit.FindTODOs", fmt.Sprintf("git grep failed (exit %d):\n%s", exitCode, stderr), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
var comments []TrackedComment
|
var todos []TODO
|
||||||
re := regexp.MustCompile(pattern)
|
re := regexp.MustCompile(pattern)
|
||||||
|
|
||||||
for line := range strings.SplitSeq(strings.TrimSpace(stdout), "\n") {
|
for line := range strings.SplitSeq(strings.TrimSpace(stdout), "\n") {
|
||||||
|
|
@ -175,19 +169,14 @@ func (t *Toolkit) FindTrackedComments(dir string) ([]TrackedComment, error) {
|
||||||
}
|
}
|
||||||
msg := strings.TrimSpace(re.Split(parts[2], 2)[1])
|
msg := strings.TrimSpace(re.Split(parts[2], 2)[1])
|
||||||
|
|
||||||
comments = append(comments, TrackedComment{
|
todos = append(todos, TODO{
|
||||||
File: parts[0],
|
File: parts[0],
|
||||||
Line: lineNum,
|
Line: lineNum,
|
||||||
Type: todoType,
|
Type: todoType,
|
||||||
Message: msg,
|
Message: msg,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return comments, nil
|
return todos, nil
|
||||||
}
|
|
||||||
|
|
||||||
// FindTODOs is kept for compatibility with the older API name.
|
|
||||||
func (t *Toolkit) FindTODOs(dir string) ([]TODO, error) {
|
|
||||||
return t.FindTrackedComments(dir)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// AuditDeps runs govulncheck to find dependency vulnerabilities (text output).
|
// AuditDeps runs govulncheck to find dependency vulnerabilities (text output).
|
||||||
|
|
@ -500,10 +489,6 @@ func (t *Toolkit) DepGraph(pkg string) (*Graph, error) {
|
||||||
for node := range nodes {
|
for node := range nodes {
|
||||||
graph.Nodes = append(graph.Nodes, node)
|
graph.Nodes = append(graph.Nodes, node)
|
||||||
}
|
}
|
||||||
slices.Sort(graph.Nodes)
|
|
||||||
for src := range graph.Edges {
|
|
||||||
slices.Sort(graph.Edges[src])
|
|
||||||
}
|
|
||||||
return graph, nil
|
return graph, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -135,18 +135,6 @@ func TestToolkit_DepGraph_Good(t *testing.T) {
|
||||||
assert.Len(t, graph.Edges["modA@v1"], 2)
|
assert.Len(t, graph.Edges["modA@v1"], 2)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestToolkit_DepGraph_SortsNodesAndEdges(t *testing.T) {
|
|
||||||
output := "modB@v2 modD@v1\nmodA@v1 modC@v3\nmodA@v1 modB@v2"
|
|
||||||
setupMockCmd(t, "go", output)
|
|
||||||
|
|
||||||
tk := NewToolkit(t.TempDir())
|
|
||||||
graph, err := tk.DepGraph("./...")
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, []string{"modA@v1", "modB@v2", "modC@v3", "modD@v1"}, graph.Nodes)
|
|
||||||
assert.Equal(t, []string{"modB@v2", "modC@v3"}, graph.Edges["modA@v1"])
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestToolkit_RaceDetect_Good(t *testing.T) {
|
func TestToolkit_RaceDetect_Good(t *testing.T) {
|
||||||
setupMockCmd(t, "go", "ok\texample.com/safe\t0.1s")
|
setupMockCmd(t, "go", "ok\texample.com/safe\t0.1s")
|
||||||
|
|
||||||
|
|
@ -203,21 +191,3 @@ func TestToolkit_CheckPerms_Good(t *testing.T) {
|
||||||
require.Len(t, issues, 1)
|
require.Len(t, issues, 1)
|
||||||
assert.Equal(t, "World-writable", issues[0].Issue)
|
assert.Equal(t, "World-writable", issues[0].Issue)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestToolkit_FindTrackedComments_Compatibility(t *testing.T) {
|
|
||||||
output := "pkg/file.go:12:TODO: fix this\n"
|
|
||||||
setupMockCmd(t, "git", output)
|
|
||||||
|
|
||||||
tk := NewToolkit(t.TempDir())
|
|
||||||
comments, err := tk.FindTrackedComments("pkg")
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Len(t, comments, 1)
|
|
||||||
assert.Equal(t, "pkg/file.go", comments[0].File)
|
|
||||||
assert.Equal(t, 12, comments[0].Line)
|
|
||||||
assert.Equal(t, "TODO", comments[0].Type)
|
|
||||||
assert.Equal(t, "fix this", comments[0].Message)
|
|
||||||
|
|
||||||
legacyComments, err := tk.FindTODOs("pkg")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, comments, legacyComments)
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,12 @@
|
||||||
package php
|
package php
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"cmp"
|
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"slices"
|
|
||||||
|
|
||||||
coreerr "forge.lthn.ai/core/go-log"
|
coreerr "forge.lthn.ai/core/go-log"
|
||||||
)
|
)
|
||||||
|
|
@ -104,7 +102,6 @@ func runComposerAudit(ctx context.Context, opts AuditOptions) AuditResult {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sortAuditAdvisories(result.Advisories)
|
|
||||||
result.Vulnerabilities = len(result.Advisories)
|
result.Vulnerabilities = len(result.Advisories)
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
result.Error = err
|
result.Error = err
|
||||||
|
|
@ -153,7 +150,6 @@ func runNpmAudit(ctx context.Context, opts AuditOptions) AuditResult {
|
||||||
Severity: vuln.Severity,
|
Severity: vuln.Severity,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
sortAuditAdvisories(result.Advisories)
|
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
result.Error = err
|
result.Error = err
|
||||||
}
|
}
|
||||||
|
|
@ -161,14 +157,3 @@ func runNpmAudit(ctx context.Context, opts AuditOptions) AuditResult {
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func sortAuditAdvisories(advisories []AuditAdvisory) {
|
|
||||||
slices.SortFunc(advisories, func(a, b AuditAdvisory) int {
|
|
||||||
return cmp.Or(
|
|
||||||
cmp.Compare(a.Package, b.Package),
|
|
||||||
cmp.Compare(a.Title, b.Title),
|
|
||||||
cmp.Compare(a.Severity, b.Severity),
|
|
||||||
cmp.Compare(a.URL, b.URL),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -47,24 +47,6 @@ func TestAuditAdvisory_Fields(t *testing.T) {
|
||||||
assert.Equal(t, []string{"CVE-2025-9999", "GHSA-xxxx"}, adv.Identifiers)
|
assert.Equal(t, []string{"CVE-2025-9999", "GHSA-xxxx"}, adv.Identifiers)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSortAuditAdvisories_Good(t *testing.T) {
|
|
||||||
advisories := []AuditAdvisory{
|
|
||||||
{Package: "vendor/package-b", Title: "Zulu"},
|
|
||||||
{Package: "vendor/package-a", Title: "Beta"},
|
|
||||||
{Package: "vendor/package-b", Title: "Alpha"},
|
|
||||||
}
|
|
||||||
|
|
||||||
sortAuditAdvisories(advisories)
|
|
||||||
|
|
||||||
require.Len(t, advisories, 3)
|
|
||||||
assert.Equal(t, "vendor/package-a", advisories[0].Package)
|
|
||||||
assert.Equal(t, "Beta", advisories[0].Title)
|
|
||||||
assert.Equal(t, "vendor/package-b", advisories[1].Package)
|
|
||||||
assert.Equal(t, "Alpha", advisories[1].Title)
|
|
||||||
assert.Equal(t, "vendor/package-b", advisories[2].Package)
|
|
||||||
assert.Equal(t, "Zulu", advisories[2].Title)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunComposerAudit_ParsesJSON(t *testing.T) {
|
func TestRunComposerAudit_ParsesJSON(t *testing.T) {
|
||||||
// Test the JSON parsing of composer audit output by verifying
|
// Test the JSON parsing of composer audit output by verifying
|
||||||
// the struct can be populated from JSON matching composer's format.
|
// the struct can be populated from JSON matching composer's format.
|
||||||
|
|
@ -119,20 +101,24 @@ func TestRunComposerAudit_ParsesJSON(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sortAuditAdvisories(result.Advisories)
|
|
||||||
result.Vulnerabilities = len(result.Advisories)
|
result.Vulnerabilities = len(result.Advisories)
|
||||||
|
|
||||||
assert.Equal(t, "composer", result.Tool)
|
assert.Equal(t, "composer", result.Tool)
|
||||||
assert.Equal(t, 3, result.Vulnerabilities)
|
assert.Equal(t, 3, result.Vulnerabilities)
|
||||||
assert.Len(t, result.Advisories, 3)
|
assert.Len(t, result.Advisories, 3)
|
||||||
assert.Equal(t, "vendor/package-a", result.Advisories[0].Package)
|
|
||||||
assert.Equal(t, "Remote Code Execution", result.Advisories[0].Title)
|
// Build a map of advisories by package for deterministic assertions
|
||||||
assert.Equal(t, "https://example.com/advisory/1", result.Advisories[0].URL)
|
byPkg := make(map[string][]AuditAdvisory)
|
||||||
assert.Equal(t, []string{"CVE-2025-1234"}, result.Advisories[0].Identifiers)
|
for _, a := range result.Advisories {
|
||||||
assert.Equal(t, "vendor/package-b", result.Advisories[1].Package)
|
byPkg[a.Package] = append(byPkg[a.Package], a)
|
||||||
assert.Equal(t, "Cross-Site Scripting", result.Advisories[1].Title)
|
}
|
||||||
assert.Equal(t, "vendor/package-b", result.Advisories[2].Package)
|
|
||||||
assert.Equal(t, "Open Redirect", result.Advisories[2].Title)
|
assert.Len(t, byPkg["vendor/package-a"], 1)
|
||||||
|
assert.Equal(t, "Remote Code Execution", byPkg["vendor/package-a"][0].Title)
|
||||||
|
assert.Equal(t, "https://example.com/advisory/1", byPkg["vendor/package-a"][0].URL)
|
||||||
|
assert.Equal(t, []string{"CVE-2025-1234"}, byPkg["vendor/package-a"][0].Identifiers)
|
||||||
|
|
||||||
|
assert.Len(t, byPkg["vendor/package-b"], 2)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNpmAuditJSON_ParsesCorrectly(t *testing.T) {
|
func TestNpmAuditJSON_ParsesCorrectly(t *testing.T) {
|
||||||
|
|
@ -178,15 +164,19 @@ func TestNpmAuditJSON_ParsesCorrectly(t *testing.T) {
|
||||||
Severity: vuln.Severity,
|
Severity: vuln.Severity,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
sortAuditAdvisories(result.Advisories)
|
|
||||||
|
|
||||||
assert.Equal(t, "npm", result.Tool)
|
assert.Equal(t, "npm", result.Tool)
|
||||||
assert.Equal(t, 2, result.Vulnerabilities)
|
assert.Equal(t, 2, result.Vulnerabilities)
|
||||||
assert.Len(t, result.Advisories, 2)
|
assert.Len(t, result.Advisories, 2)
|
||||||
assert.Equal(t, "lodash", result.Advisories[0].Package)
|
|
||||||
assert.Equal(t, "high", result.Advisories[0].Severity)
|
// Build map for deterministic assertions
|
||||||
assert.Equal(t, "minimist", result.Advisories[1].Package)
|
byPkg := make(map[string]AuditAdvisory)
|
||||||
assert.Equal(t, "low", result.Advisories[1].Severity)
|
for _, a := range result.Advisories {
|
||||||
|
byPkg[a.Package] = a
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, "high", byPkg["lodash"].Severity)
|
||||||
|
assert.Equal(t, "low", byPkg["minimist"].Severity)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRunAudit_SkipsNpmWithoutPackageJSON(t *testing.T) {
|
func TestRunAudit_SkipsNpmWithoutPackageJSON(t *testing.T) {
|
||||||
|
|
|
||||||
|
|
@ -8,13 +8,13 @@ import (
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
coreio "forge.lthn.ai/core/go-io"
|
||||||
coreerr "forge.lthn.ai/core/go-log"
|
coreerr "forge.lthn.ai/core/go-log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// fileExists reports whether the named file or directory exists.
|
// fileExists reports whether the named file or directory exists.
|
||||||
func fileExists(path string) bool {
|
func fileExists(path string) bool {
|
||||||
_, err := os.Stat(path)
|
return coreio.Local.Exists(path)
|
||||||
return err == nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// FormatOptions configures PHP code formatting.
|
// FormatOptions configures PHP code formatting.
|
||||||
|
|
|
||||||
|
|
@ -1,17 +1,11 @@
|
||||||
package php
|
package php
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"cmp"
|
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"slices"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
coreio "forge.lthn.ai/core/go-io"
|
coreio "forge.lthn.ai/core/go-io"
|
||||||
coreerr "forge.lthn.ai/core/go-log"
|
coreerr "forge.lthn.ai/core/go-log"
|
||||||
|
|
@ -28,30 +22,30 @@ type SecurityOptions struct {
|
||||||
|
|
||||||
// SecurityResult holds the results of security scanning.
|
// SecurityResult holds the results of security scanning.
|
||||||
type SecurityResult struct {
|
type SecurityResult struct {
|
||||||
Checks []SecurityCheck `json:"checks"`
|
Checks []SecurityCheck
|
||||||
Summary SecuritySummary `json:"summary"`
|
Summary SecuritySummary
|
||||||
}
|
}
|
||||||
|
|
||||||
// SecurityCheck represents a single security check result.
|
// SecurityCheck represents a single security check result.
|
||||||
type SecurityCheck struct {
|
type SecurityCheck struct {
|
||||||
ID string `json:"id"`
|
ID string
|
||||||
Name string `json:"name"`
|
Name string
|
||||||
Description string `json:"description"`
|
Description string
|
||||||
Severity string `json:"severity"`
|
Severity string
|
||||||
Passed bool `json:"passed"`
|
Passed bool
|
||||||
Message string `json:"message,omitempty"`
|
Message string
|
||||||
Fix string `json:"fix,omitempty"`
|
Fix string
|
||||||
CWE string `json:"cwe,omitempty"`
|
CWE string
|
||||||
}
|
}
|
||||||
|
|
||||||
// SecuritySummary summarises security check results.
|
// SecuritySummary summarises security check results.
|
||||||
type SecuritySummary struct {
|
type SecuritySummary struct {
|
||||||
Total int `json:"total"`
|
Total int
|
||||||
Passed int `json:"passed"`
|
Passed int
|
||||||
Critical int `json:"critical"`
|
Critical int
|
||||||
High int `json:"high"`
|
High int
|
||||||
Medium int `json:"medium"`
|
Medium int
|
||||||
Low int `json:"low"`
|
Low int
|
||||||
}
|
}
|
||||||
|
|
||||||
// capitalise returns s with the first letter upper-cased.
|
// capitalise returns s with the first letter upper-cased.
|
||||||
|
|
@ -62,50 +56,6 @@ func capitalise(s string) string {
|
||||||
return strings.ToUpper(s[:1]) + s[1:]
|
return strings.ToUpper(s[:1]) + s[1:]
|
||||||
}
|
}
|
||||||
|
|
||||||
// securitySeverityRank maps severities to a sortable rank.
|
|
||||||
// Lower numbers are more severe.
|
|
||||||
func securitySeverityRank(severity string) (int, bool) {
|
|
||||||
switch strings.ToLower(strings.TrimSpace(severity)) {
|
|
||||||
case "critical":
|
|
||||||
return 0, true
|
|
||||||
case "high":
|
|
||||||
return 1, true
|
|
||||||
case "medium":
|
|
||||||
return 2, true
|
|
||||||
case "low":
|
|
||||||
return 3, true
|
|
||||||
case "info":
|
|
||||||
return 4, true
|
|
||||||
default:
|
|
||||||
return 0, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// filterSecurityChecks returns checks at or above the requested severity.
|
|
||||||
func filterSecurityChecks(checks []SecurityCheck, minimum string) ([]SecurityCheck, error) {
|
|
||||||
if strings.TrimSpace(minimum) == "" {
|
|
||||||
return checks, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
minRank, ok := securitySeverityRank(minimum)
|
|
||||||
if !ok {
|
|
||||||
return nil, coreerr.E("filterSecurityChecks", "invalid security severity "+minimum, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
filtered := make([]SecurityCheck, 0, len(checks))
|
|
||||||
for _, check := range checks {
|
|
||||||
rank, ok := securitySeverityRank(check.Severity)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if rank <= minRank {
|
|
||||||
filtered = append(filtered, check)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return filtered, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RunSecurityChecks runs security checks on the project.
|
// RunSecurityChecks runs security checks on the project.
|
||||||
func RunSecurityChecks(ctx context.Context, opts SecurityOptions) (*SecurityResult, error) {
|
func RunSecurityChecks(ctx context.Context, opts SecurityOptions) (*SecurityResult, error) {
|
||||||
if opts.Dir == "" {
|
if opts.Dir == "" {
|
||||||
|
|
@ -143,109 +93,28 @@ func RunSecurityChecks(ctx context.Context, opts SecurityOptions) (*SecurityResu
|
||||||
fsChecks := runFilesystemSecurityChecks(opts.Dir)
|
fsChecks := runFilesystemSecurityChecks(opts.Dir)
|
||||||
result.Checks = append(result.Checks, fsChecks...)
|
result.Checks = append(result.Checks, fsChecks...)
|
||||||
|
|
||||||
// Check HTTP security headers when a URL is supplied.
|
// Calculate summary
|
||||||
result.Checks = append(result.Checks, runHTTPSecurityHeaderChecks(ctx, opts.URL)...)
|
|
||||||
|
|
||||||
filteredChecks, err := filterSecurityChecks(result.Checks, opts.Severity)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
result.Checks = filteredChecks
|
|
||||||
|
|
||||||
// Keep the check order stable for callers that consume the package result
|
|
||||||
// directly instead of going through the CLI layer.
|
|
||||||
slices.SortFunc(result.Checks, func(a, b SecurityCheck) int {
|
|
||||||
return cmp.Compare(a.ID, b.ID)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Calculate summary after any severity filtering has been applied.
|
|
||||||
for _, check := range result.Checks {
|
for _, check := range result.Checks {
|
||||||
result.Summary.Total++
|
result.Summary.Total++
|
||||||
if check.Passed {
|
if check.Passed {
|
||||||
result.Summary.Passed++
|
result.Summary.Passed++
|
||||||
continue
|
} else {
|
||||||
}
|
switch check.Severity {
|
||||||
|
case "critical":
|
||||||
switch check.Severity {
|
result.Summary.Critical++
|
||||||
case "critical":
|
case "high":
|
||||||
result.Summary.Critical++
|
result.Summary.High++
|
||||||
case "high":
|
case "medium":
|
||||||
result.Summary.High++
|
result.Summary.Medium++
|
||||||
case "medium":
|
case "low":
|
||||||
result.Summary.Medium++
|
result.Summary.Low++
|
||||||
case "low":
|
}
|
||||||
result.Summary.Low++
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func runHTTPSecurityHeaderChecks(ctx context.Context, rawURL string) []SecurityCheck {
|
|
||||||
if strings.TrimSpace(rawURL) == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
check := SecurityCheck{
|
|
||||||
ID: "http_security_headers",
|
|
||||||
Name: "HTTP Security Headers",
|
|
||||||
Description: "Check for common security headers on the supplied URL",
|
|
||||||
Severity: "high",
|
|
||||||
CWE: "CWE-693",
|
|
||||||
}
|
|
||||||
|
|
||||||
parsedURL, err := url.Parse(rawURL)
|
|
||||||
if err != nil || parsedURL.Scheme == "" || parsedURL.Host == "" {
|
|
||||||
check.Message = "Invalid URL"
|
|
||||||
check.Fix = "Provide a valid http:// or https:// URL"
|
|
||||||
return []SecurityCheck{check}
|
|
||||||
}
|
|
||||||
|
|
||||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, rawURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
check.Message = err.Error()
|
|
||||||
check.Fix = "Provide a reachable URL"
|
|
||||||
return []SecurityCheck{check}
|
|
||||||
}
|
|
||||||
|
|
||||||
client := &http.Client{Timeout: 10 * time.Second}
|
|
||||||
resp, err := client.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
check.Message = err.Error()
|
|
||||||
check.Fix = "Ensure the URL is reachable"
|
|
||||||
return []SecurityCheck{check}
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
_, _ = io.Copy(io.Discard, resp.Body)
|
|
||||||
|
|
||||||
requiredHeaders := []string{
|
|
||||||
"Content-Security-Policy",
|
|
||||||
"X-Frame-Options",
|
|
||||||
"X-Content-Type-Options",
|
|
||||||
"Referrer-Policy",
|
|
||||||
}
|
|
||||||
if strings.EqualFold(parsedURL.Scheme, "https") {
|
|
||||||
requiredHeaders = append(requiredHeaders, "Strict-Transport-Security")
|
|
||||||
}
|
|
||||||
|
|
||||||
var missing []string
|
|
||||||
for _, header := range requiredHeaders {
|
|
||||||
if strings.TrimSpace(resp.Header.Get(header)) == "" {
|
|
||||||
missing = append(missing, header)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(missing) == 0 {
|
|
||||||
check.Passed = true
|
|
||||||
check.Message = "Common security headers are present"
|
|
||||||
return []SecurityCheck{check}
|
|
||||||
}
|
|
||||||
|
|
||||||
check.Message = fmt.Sprintf("Missing headers: %s", strings.Join(missing, ", "))
|
|
||||||
check.Fix = "Add the missing security headers to the response"
|
|
||||||
return []SecurityCheck{check}
|
|
||||||
}
|
|
||||||
|
|
||||||
func runEnvSecurityChecks(dir string) []SecurityCheck {
|
func runEnvSecurityChecks(dir string) []SecurityCheck {
|
||||||
var checks []SecurityCheck
|
var checks []SecurityCheck
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,8 @@ package php
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"net/http"
|
|
||||||
"net/http/httptest"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
@ -195,7 +192,7 @@ func TestRunSecurityChecks_Summary(t *testing.T) {
|
||||||
// Summary should have totals
|
// Summary should have totals
|
||||||
assert.Greater(t, result.Summary.Total, 0)
|
assert.Greater(t, result.Summary.Total, 0)
|
||||||
assert.Greater(t, result.Summary.Critical, 0) // at least debug_mode fails
|
assert.Greater(t, result.Summary.Critical, 0) // at least debug_mode fails
|
||||||
assert.Greater(t, result.Summary.High, 0) // at least https_enforced fails
|
assert.Greater(t, result.Summary.High, 0) // at least https_enforced fails
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRunSecurityChecks_DefaultsDir(t *testing.T) {
|
func TestRunSecurityChecks_DefaultsDir(t *testing.T) {
|
||||||
|
|
@ -205,100 +202,9 @@ func TestRunSecurityChecks_DefaultsDir(t *testing.T) {
|
||||||
assert.NotNil(t, result)
|
assert.NotNil(t, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRunSecurityChecks_SeverityFilterCritical(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
setupSecurityFixture(t, dir, "APP_DEBUG=true\nAPP_KEY=short\nAPP_URL=http://example.com\n")
|
|
||||||
|
|
||||||
result, err := RunSecurityChecks(context.Background(), SecurityOptions{
|
|
||||||
Dir: dir,
|
|
||||||
Severity: "critical",
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.Len(t, result.Checks, 3)
|
|
||||||
assert.Equal(t, 3, result.Summary.Total)
|
|
||||||
assert.Equal(t, 1, result.Summary.Passed)
|
|
||||||
assert.Equal(t, 2, result.Summary.Critical)
|
|
||||||
assert.Zero(t, result.Summary.High)
|
|
||||||
|
|
||||||
for _, check := range result.Checks {
|
|
||||||
assert.Equal(t, "critical", check.Severity)
|
|
||||||
}
|
|
||||||
|
|
||||||
byID := make(map[string]SecurityCheck)
|
|
||||||
for _, check := range result.Checks {
|
|
||||||
byID[check.ID] = check
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.NotContains(t, byID, "https_enforced")
|
|
||||||
assert.Contains(t, byID, "app_key_set")
|
|
||||||
assert.Contains(t, byID, "composer_audit")
|
|
||||||
assert.Contains(t, byID, "debug_mode")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunSecurityChecks_URLAddsHeaderCheck(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
setupSecurityFixture(t, dir, "APP_DEBUG=false\nAPP_KEY=base64:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa=\nAPP_URL=https://example.com\n")
|
|
||||||
|
|
||||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
||||||
w.Header().Set("X-Content-Type-Options", "nosniff")
|
|
||||||
w.WriteHeader(http.StatusOK)
|
|
||||||
_, _ = w.Write([]byte("ok"))
|
|
||||||
}))
|
|
||||||
defer server.Close()
|
|
||||||
|
|
||||||
result, err := RunSecurityChecks(context.Background(), SecurityOptions{
|
|
||||||
Dir: dir,
|
|
||||||
URL: server.URL,
|
|
||||||
})
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
byID := make(map[string]SecurityCheck)
|
|
||||||
for _, check := range result.Checks {
|
|
||||||
byID[check.ID] = check
|
|
||||||
}
|
|
||||||
|
|
||||||
headerCheck, ok := byID["http_security_headers"]
|
|
||||||
require.True(t, ok)
|
|
||||||
assert.False(t, headerCheck.Passed)
|
|
||||||
assert.Equal(t, "high", headerCheck.Severity)
|
|
||||||
assert.True(t, strings.Contains(headerCheck.Message, "Missing headers"))
|
|
||||||
assert.NotEmpty(t, headerCheck.Fix)
|
|
||||||
|
|
||||||
assert.Equal(t, 5, result.Summary.Total)
|
|
||||||
assert.Equal(t, 4, result.Summary.Passed)
|
|
||||||
assert.Equal(t, 1, result.Summary.High)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRunSecurityChecks_InvalidSeverity(t *testing.T) {
|
|
||||||
dir := t.TempDir()
|
|
||||||
|
|
||||||
_, err := RunSecurityChecks(context.Background(), SecurityOptions{
|
|
||||||
Dir: dir,
|
|
||||||
Severity: "banana",
|
|
||||||
})
|
|
||||||
require.Error(t, err)
|
|
||||||
assert.Contains(t, err.Error(), "invalid security severity")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCapitalise(t *testing.T) {
|
func TestCapitalise(t *testing.T) {
|
||||||
assert.Equal(t, "Composer", capitalise("composer"))
|
assert.Equal(t, "Composer", capitalise("composer"))
|
||||||
assert.Equal(t, "Npm", capitalise("npm"))
|
assert.Equal(t, "Npm", capitalise("npm"))
|
||||||
assert.Equal(t, "", capitalise(""))
|
assert.Equal(t, "", capitalise(""))
|
||||||
assert.Equal(t, "A", capitalise("a"))
|
assert.Equal(t, "A", capitalise("a"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupSecurityFixture(t *testing.T, dir string, envContent string) {
|
|
||||||
t.Helper()
|
|
||||||
|
|
||||||
require.NoError(t, os.WriteFile(filepath.Join(dir, ".env"), []byte(envContent), 0o644))
|
|
||||||
|
|
||||||
composerBin := filepath.Join(dir, "composer")
|
|
||||||
require.NoError(t, os.WriteFile(composerBin, []byte("#!/bin/sh\ncat <<'JSON'\n{\"advisories\":{}}\nJSON\n"), 0o755))
|
|
||||||
|
|
||||||
oldPath := os.Getenv("PATH")
|
|
||||||
require.NoError(t, os.Setenv("PATH", dir+string(os.PathListSeparator)+oldPath))
|
|
||||||
t.Cleanup(func() {
|
|
||||||
require.NoError(t, os.Setenv("PATH", oldPath))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
package php
|
package php
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
|
@ -34,9 +33,6 @@ type TestOptions struct {
|
||||||
// JUnit outputs results in JUnit XML format via --log-junit.
|
// JUnit outputs results in JUnit XML format via --log-junit.
|
||||||
JUnit bool
|
JUnit bool
|
||||||
|
|
||||||
// JUnitPath overrides the JUnit report path. Defaults to test-results.xml.
|
|
||||||
JUnitPath string
|
|
||||||
|
|
||||||
// Output is the writer for test output (defaults to os.Stdout).
|
// Output is the writer for test output (defaults to os.Stdout).
|
||||||
Output io.Writer
|
Output io.Writer
|
||||||
}
|
}
|
||||||
|
|
@ -77,18 +73,6 @@ func RunTests(ctx context.Context, opts TestOptions) error {
|
||||||
opts.Output = os.Stdout
|
opts.Output = os.Stdout
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.JUnit && opts.JUnitPath == "" {
|
|
||||||
reportFile, err := os.CreateTemp("", "core-qa-junit-*.xml")
|
|
||||||
if err != nil {
|
|
||||||
return coreerr.E("php.RunTests", "create JUnit report file", err)
|
|
||||||
}
|
|
||||||
if closeErr := reportFile.Close(); closeErr != nil {
|
|
||||||
return coreerr.E("php.RunTests", "close JUnit report file", closeErr)
|
|
||||||
}
|
|
||||||
opts.JUnitPath = reportFile.Name()
|
|
||||||
defer os.Remove(opts.JUnitPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Detect test runner
|
// Detect test runner
|
||||||
runner := DetectTestRunner(opts.Dir)
|
runner := DetectTestRunner(opts.Dir)
|
||||||
|
|
||||||
|
|
@ -105,27 +89,14 @@ func RunTests(ctx context.Context, opts TestOptions) error {
|
||||||
|
|
||||||
cmd := exec.CommandContext(ctx, cmdName, args...)
|
cmd := exec.CommandContext(ctx, cmdName, args...)
|
||||||
cmd.Dir = opts.Dir
|
cmd.Dir = opts.Dir
|
||||||
|
cmd.Stdout = opts.Output
|
||||||
|
cmd.Stderr = opts.Output
|
||||||
cmd.Stdin = os.Stdin
|
cmd.Stdin = os.Stdin
|
||||||
|
|
||||||
// Set XDEBUG_MODE=coverage to avoid PHPUnit 11 warning
|
// Set XDEBUG_MODE=coverage to avoid PHPUnit 11 warning
|
||||||
cmd.Env = append(os.Environ(), "XDEBUG_MODE=coverage")
|
cmd.Env = append(os.Environ(), "XDEBUG_MODE=coverage")
|
||||||
|
|
||||||
if !opts.JUnit {
|
return cmd.Run()
|
||||||
cmd.Stdout = opts.Output
|
|
||||||
cmd.Stderr = opts.Output
|
|
||||||
return cmd.Run()
|
|
||||||
}
|
|
||||||
|
|
||||||
var machineOutput bytes.Buffer
|
|
||||||
cmd.Stdout = &machineOutput
|
|
||||||
cmd.Stderr = &machineOutput
|
|
||||||
|
|
||||||
runErr := cmd.Run()
|
|
||||||
reportErr := emitJUnitReport(opts.Output, opts.JUnitPath)
|
|
||||||
if runErr != nil {
|
|
||||||
return runErr
|
|
||||||
}
|
|
||||||
return reportErr
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// RunParallel runs tests in parallel using the appropriate runner.
|
// RunParallel runs tests in parallel using the appropriate runner.
|
||||||
|
|
@ -169,7 +140,7 @@ func buildPestCommand(opts TestOptions) (string, []string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.JUnit {
|
if opts.JUnit {
|
||||||
args = append(args, "--log-junit", junitReportPath(opts))
|
args = append(args, "--log-junit", "test-results.xml")
|
||||||
}
|
}
|
||||||
|
|
||||||
return cmdName, args
|
return cmdName, args
|
||||||
|
|
@ -214,34 +185,8 @@ func buildPHPUnitCommand(opts TestOptions) (string, []string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.JUnit {
|
if opts.JUnit {
|
||||||
args = append(args, "--log-junit", junitReportPath(opts))
|
args = append(args, "--log-junit", "test-results.xml", "--testdox")
|
||||||
}
|
}
|
||||||
|
|
||||||
return cmdName, args
|
return cmdName, args
|
||||||
}
|
}
|
||||||
|
|
||||||
func junitReportPath(opts TestOptions) string {
|
|
||||||
if opts.JUnitPath != "" {
|
|
||||||
return opts.JUnitPath
|
|
||||||
}
|
|
||||||
return "test-results.xml"
|
|
||||||
}
|
|
||||||
|
|
||||||
func emitJUnitReport(output io.Writer, reportPath string) error {
|
|
||||||
report, err := os.ReadFile(reportPath)
|
|
||||||
if err != nil {
|
|
||||||
return coreerr.E("php.emitJUnitReport", "read JUnit report", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := output.Write(report); err != nil {
|
|
||||||
return coreerr.E("php.emitJUnitReport", "write JUnit report", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(report) == 0 || report[len(report)-1] != '\n' {
|
|
||||||
if _, err := io.WriteString(output, "\n"); err != nil {
|
|
||||||
return coreerr.E("php.emitJUnitReport", "terminate JUnit report", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -288,7 +288,7 @@ func TestBuildPHPUnitCommand_Good_JUnit(t *testing.T) {
|
||||||
|
|
||||||
assert.Contains(t, args, "--log-junit")
|
assert.Contains(t, args, "--log-junit")
|
||||||
assert.Contains(t, args, "test-results.xml")
|
assert.Contains(t, args, "test-results.xml")
|
||||||
assert.NotContains(t, args, "--testdox")
|
assert.Contains(t, args, "--testdox")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBuildPHPUnitCommand_Good_AllFlags(t *testing.T) {
|
func TestBuildPHPUnitCommand_Good_AllFlags(t *testing.T) {
|
||||||
|
|
@ -313,5 +313,5 @@ func TestBuildPHPUnitCommand_Good_AllFlags(t *testing.T) {
|
||||||
assert.Contains(t, args, "--group")
|
assert.Contains(t, args, "--group")
|
||||||
assert.Contains(t, args, "feature")
|
assert.Contains(t, args, "feature")
|
||||||
assert.Contains(t, args, "--log-junit")
|
assert.Contains(t, args, "--log-junit")
|
||||||
assert.NotContains(t, args, "--testdox")
|
assert.Contains(t, args, "--testdox")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,53 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
run_capture_stdout() {
|
|
||||||
local expected_status="$1"
|
|
||||||
local output_file="$2"
|
|
||||||
shift 2
|
|
||||||
|
|
||||||
set +e
|
|
||||||
"$@" >"$output_file"
|
|
||||||
local status=$?
|
|
||||||
set -e
|
|
||||||
|
|
||||||
if [[ "$status" -ne "$expected_status" ]]; then
|
|
||||||
printf 'expected exit %s, got %s\n' "$expected_status" "$status" >&2
|
|
||||||
if [[ -s "$output_file" ]]; then
|
|
||||||
printf 'stdout:\n' >&2
|
|
||||||
cat "$output_file" >&2
|
|
||||||
fi
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
run_capture_all() {
|
|
||||||
local expected_status="$1"
|
|
||||||
local output_file="$2"
|
|
||||||
shift 2
|
|
||||||
|
|
||||||
set +e
|
|
||||||
"$@" >"$output_file" 2>&1
|
|
||||||
local status=$?
|
|
||||||
set -e
|
|
||||||
|
|
||||||
if [[ "$status" -ne "$expected_status" ]]; then
|
|
||||||
printf 'expected exit %s, got %s\n' "$expected_status" "$status" >&2
|
|
||||||
if [[ -s "$output_file" ]]; then
|
|
||||||
printf 'output:\n' >&2
|
|
||||||
cat "$output_file" >&2
|
|
||||||
fi
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_jq() {
|
|
||||||
local expression="$1"
|
|
||||||
local input_file="$2"
|
|
||||||
jq -e "$expression" "$input_file" >/dev/null
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_contains() {
|
|
||||||
local needle="$1"
|
|
||||||
local input_file="$2"
|
|
||||||
grep -Fq "$needle" "$input_file"
|
|
||||||
}
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- task -d check test
|
|
||||||
- task -d catalog/list test
|
|
||||||
- task -d catalog/show test
|
|
||||||
- task -d detect test
|
|
||||||
- task -d tools test
|
|
||||||
- task -d init test
|
|
||||||
- task -d run test
|
|
||||||
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core-lint ../../../../../cmd/core-lint
|
|
||||||
|
|
||||||
lang="$(cat fixtures/lang.txt)"
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_all 0 "$output" ./bin/core-lint lint catalog list --lang "$lang"
|
|
||||||
grep -Fq "go-sec-001" "$output"
|
|
||||||
grep -Fq "rule(s)" "$output"
|
|
||||||
EOF
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
go
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core-lint ../../../../../cmd/core-lint
|
|
||||||
|
|
||||||
rule_id="$(cat fixtures/rule-id.txt)"
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_stdout 0 "$output" ./bin/core-lint lint catalog show "$rule_id"
|
|
||||||
jq -e '.id == "go-sec-001" and .severity == "high" and (.languages | index("go") != null)' "$output" >/dev/null
|
|
||||||
jq -e '.title == "SQL wildcard injection in LIKE clauses"' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
go-sec-001
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core-lint ../../../../cmd/core-lint
|
|
||||||
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_stdout 0 "$output" ./bin/core-lint lint check --format=json fixtures
|
|
||||||
jq -e 'length == 1 and .[0].rule_id == "go-cor-003" and .[0].file == "input.go"' "$output" >/dev/null
|
|
||||||
jq -e '.[0].severity == "medium" and .[0].fix != ""' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
//go:build ignore
|
|
||||||
|
|
||||||
package sample
|
|
||||||
|
|
||||||
type service struct{}
|
|
||||||
|
|
||||||
func (service) Process(string) error { return nil }
|
|
||||||
|
|
||||||
func Run() {
|
|
||||||
svc := service{}
|
|
||||||
_ = svc.Process("data")
|
|
||||||
}
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core-lint ../../../../cmd/core-lint
|
|
||||||
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_stdout 0 "$output" ./bin/core-lint detect --output json ../check/fixtures
|
|
||||||
jq -e '. == ["go"]' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core-lint ../../../../cmd/core-lint
|
|
||||||
|
|
||||||
project_dir="$(mktemp -d)"
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_stdout 0 "$output" ./bin/core-lint init "$project_dir"
|
|
||||||
test -f "$project_dir/.core/lint.yaml"
|
|
||||||
grep -Fq "golangci-lint" "$project_dir/.core/lint.yaml"
|
|
||||||
EOF
|
|
||||||
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core-lint ../../../../cmd/core-lint
|
|
||||||
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_stdout 1 "$output" ./bin/core-lint run --output json --fail-on warning ../check/fixtures
|
|
||||||
jq -e '.findings | length == 1' "$output" >/dev/null
|
|
||||||
jq -e '.findings[0].code == "go-cor-003"' "$output" >/dev/null
|
|
||||||
jq -e '.summary.warnings == 1 and .summary.passed == false' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core-lint ../../../../cmd/core-lint
|
|
||||||
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_stdout 0 "$output" ./bin/core-lint tools --output json --lang go
|
|
||||||
jq -e '.[] | select(.name == "golangci-lint")' "$output" >/dev/null
|
|
||||||
jq -e '.[] | select(.name == "govulncheck")' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"forge.lthn.ai/core/cli/pkg/cli"
|
|
||||||
_ "forge.lthn.ai/core/lint/cmd/qa"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
cli.WithAppName("core")
|
|
||||||
cli.Main()
|
|
||||||
}
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
cd fixtures/project
|
|
||||||
output="$(mktemp)"
|
|
||||||
export PATH="$(pwd)/../bin:$PATH"
|
|
||||||
run_capture_stdout 1 "$output" ../../bin/core qa audit --json
|
|
||||||
jq -e '.results[0].tool == "composer" and .results[0].vulnerabilities == 1' "$output" >/dev/null
|
|
||||||
jq -e '.has_vulnerabilities == true and .vulnerabilities == 1' "$output" >/dev/null
|
|
||||||
jq -e '.results[0].advisories[0].package == "vendor/package-a"' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
cat <<'JSON'
|
|
||||||
{
|
|
||||||
"advisories": {
|
|
||||||
"vendor/package-a": [
|
|
||||||
{
|
|
||||||
"title": "Remote Code Execution",
|
|
||||||
"link": "https://example.com/advisory/1",
|
|
||||||
"cve": "CVE-2026-0001",
|
|
||||||
"affectedVersions": ">=1.0,<1.5"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
JSON
|
|
||||||
exit 1
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
{}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
function bad_example() {
|
|
||||||
return "bad";
|
|
||||||
}
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_stdout 1 "$output" ./bin/core qa docblock --json --threshold 100 fixtures/src
|
|
||||||
jq -e '(.passed == false) and (.coverage < .threshold)' "$output" >/dev/null
|
|
||||||
jq -e '(.missing | length == 1) and (.missing[0].name == "Beta")' "$output" >/dev/null
|
|
||||||
jq -e '(.warnings | length == 1) and (.warnings[0].path == "fixtures/src")' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
//go:build ignore
|
|
||||||
|
|
||||||
package sample
|
|
||||||
|
|
||||||
// Alpha demonstrates a documented exported function.
|
|
||||||
func Alpha() {}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
//go:build ignore
|
|
||||||
|
|
||||||
package sample
|
|
||||||
|
|
||||||
func Beta() {}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
//go:build ignore
|
|
||||||
|
|
||||||
package sample
|
|
||||||
|
|
||||||
func Broken(
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
cd fixtures/project
|
|
||||||
output="$(mktemp)"
|
|
||||||
export PATH="../bin:$PATH"
|
|
||||||
run_capture_stdout 0 "$output" ../../bin/core qa fmt --json
|
|
||||||
jq -e '.tool == "pint" and .changed == true and .files[0].path == "src/Bad.php"' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
{}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
function bad_example() {
|
|
||||||
return "bad";
|
|
||||||
}
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
printf '%s\n' '{"tool":"pint","changed":true,"files":[{"path":"src/Bad.php","fixed":1}]}'
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
output="$(mktemp)"
|
|
||||||
export PATH="$(pwd)/fixtures/bin:$PATH"
|
|
||||||
run_capture_stdout 0 "$output" ./bin/core qa health --registry fixtures/repos.yaml --json
|
|
||||||
jq -e '.summary.total_repos == 2 and .summary.filtered_repos == 2' "$output" >/dev/null
|
|
||||||
jq -e '.summary.passing == 1 and .summary.errors == 1' "$output" >/dev/null
|
|
||||||
jq -e '.repos[0].status == "error" and .repos[0].name == "beta"' "$output" >/dev/null
|
|
||||||
jq -e '.repos[1].status == "passing" and .repos[1].name == "alpha"' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
case "$*" in
|
|
||||||
*"--repo forge/alpha"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"status": "completed",
|
|
||||||
"conclusion": "success",
|
|
||||||
"name": "CI",
|
|
||||||
"headSha": "abc123",
|
|
||||||
"updatedAt": "2026-03-30T00:00:00Z",
|
|
||||||
"url": "https://example.com/alpha/run/1"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*"--repo forge/beta"*)
|
|
||||||
printf '%s\n' 'simulated workflow lookup failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
beta:
|
|
||||||
type: module
|
|
||||||
|
|
@ -1,22 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
cd fixtures/project
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_all 1 "$output" ../../bin/core qa infection --min-msi 80 --min-covered-msi 90 --threads 8 --filter src --only-covered
|
|
||||||
grep -Fq "Mutation Testing" "$output"
|
|
||||||
grep -Fq -- "--min-msi=80" "$output"
|
|
||||||
grep -Fq -- "--min-covered-msi=90" "$output"
|
|
||||||
grep -Fq -- "--threads=8" "$output"
|
|
||||||
grep -Fq -- "--filter=src" "$output"
|
|
||||||
grep -Fq -- "--only-covered" "$output"
|
|
||||||
EOF
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
{}
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
{}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
function bad_example() {
|
|
||||||
return "bad";
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
printf '%s\n' "infection args: $*"
|
|
||||||
exit 1
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
output="$(mktemp)"
|
|
||||||
export PATH="$(pwd)/fixtures/bin:$PATH"
|
|
||||||
run_capture_stdout 0 "$output" ./bin/core qa issues --registry fixtures/repos.yaml --json
|
|
||||||
jq -e '.total_issues == 1 and .filtered_issues == 1' "$output" >/dev/null
|
|
||||||
jq -e '.categories[0].category == "needs_response" and .categories[0].issues[0].repo_name == "alpha"' "$output" >/dev/null
|
|
||||||
jq -e '.categories[0].issues[0].action_hint != ""' "$output" >/dev/null
|
|
||||||
jq -e '.fetch_errors[0].repo == "beta"' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
case "$*" in
|
|
||||||
*"api user"*)
|
|
||||||
printf '%s\n' 'alice'
|
|
||||||
;;
|
|
||||||
*"issue list --repo forge/alpha"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"number": 7,
|
|
||||||
"title": "Clarify agent output",
|
|
||||||
"state": "OPEN",
|
|
||||||
"body": "Explain behaviour",
|
|
||||||
"createdAt": "2026-03-30T00:00:00Z",
|
|
||||||
"updatedAt": "2026-03-30T11:00:00Z",
|
|
||||||
"author": {"login": "bob"},
|
|
||||||
"assignees": {"nodes": [{"login": "alice"}]},
|
|
||||||
"labels": {"nodes": [{"name": "agent:ready"}]},
|
|
||||||
"comments": {
|
|
||||||
"totalCount": 1,
|
|
||||||
"nodes": [
|
|
||||||
{
|
|
||||||
"author": {"login": "carol"},
|
|
||||||
"createdAt": "2026-03-30T10:30:00Z"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"url": "https://example.com/issues/7"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*"issue list --repo forge/beta"*)
|
|
||||||
printf '%s\n' 'simulated issue query failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
version: 1
|
|
||||||
org: forge
|
|
||||||
base_path: .
|
|
||||||
repos:
|
|
||||||
alpha:
|
|
||||||
type: module
|
|
||||||
beta:
|
|
||||||
type: module
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
cd fixtures/project
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_stdout 1 "$output" ../../bin/core qa psalm --json
|
|
||||||
jq -e '.tool == "psalm" and .issues[0].file == "src/Bad.php" and .issues[0].line == 3' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
{}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
<psalm>
|
|
||||||
<projectFiles>
|
|
||||||
<directory name="src" />
|
|
||||||
</projectFiles>
|
|
||||||
</psalm>
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
function bad_example() {
|
|
||||||
return $anotherMissingVariable;
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
printf '%s\n' '{"tool":"psalm","issues":[{"file":"src/Bad.php","line":3,"message":"Undefined variable $anotherMissingVariable"}]}'
|
|
||||||
exit 1
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
cd fixtures/project
|
|
||||||
output="$(mktemp)"
|
|
||||||
run_capture_all 1 "$output" ../../bin/core qa rector
|
|
||||||
grep -Fq "Rector Refactoring" "$output"
|
|
||||||
grep -Fq "(dry-run)" "$output"
|
|
||||||
grep -Fq "1 refactoring suggestion in src/Bad.php" "$output"
|
|
||||||
EOF
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
{}
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
return [];
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
function bad_example() {
|
|
||||||
return "bad";
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
printf '%s\n' '1 refactoring suggestion in src/Bad.php'
|
|
||||||
exit 1
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
output="$(mktemp)"
|
|
||||||
export PATH="$(pwd)/fixtures/bin:$PATH"
|
|
||||||
run_capture_stdout 0 "$output" ./bin/core qa review --repo forge/example --json
|
|
||||||
jq -e '.showing_mine == true and .showing_requested == true' "$output" >/dev/null
|
|
||||||
jq -e '.mine | length == 0 and .requested | length == 1' "$output" >/dev/null
|
|
||||||
jq -e '.requested[0].number == 42 and .requested[0].title == "Refine agent output"' "$output" >/dev/null
|
|
||||||
jq -e '.fetch_errors[0].repo == "forge/example" and .fetch_errors[0].scope == "mine"' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
case "$*" in
|
|
||||||
*"pr list --state open --search author:@me --json"*)
|
|
||||||
printf '%s\n' 'simulated author query failure' >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*"pr list --state open --search review-requested:@me --json"*)
|
|
||||||
cat <<'JSON'
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"number": 42,
|
|
||||||
"title": "Refine agent output",
|
|
||||||
"author": {"login": "alice"},
|
|
||||||
"state": "OPEN",
|
|
||||||
"isDraft": false,
|
|
||||||
"mergeable": "MERGEABLE",
|
|
||||||
"reviewDecision": "",
|
|
||||||
"url": "https://example.com/pull/42",
|
|
||||||
"headRefName": "feature/agent-output",
|
|
||||||
"createdAt": "2026-03-30T00:00:00Z",
|
|
||||||
"updatedAt": "2026-03-30T00:00:00Z",
|
|
||||||
"additions": 12,
|
|
||||||
"deletions": 3,
|
|
||||||
"changedFiles": 2,
|
|
||||||
"statusCheckRollup": {"contexts": []},
|
|
||||||
"reviewRequests": {"nodes": []},
|
|
||||||
"reviews": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
JSON
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
printf '%s\n' "unexpected gh invocation: $*" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
version: "3"
|
|
||||||
|
|
||||||
tasks:
|
|
||||||
test:
|
|
||||||
cmds:
|
|
||||||
- |
|
|
||||||
bash <<'EOF'
|
|
||||||
set -euo pipefail
|
|
||||||
source ../../_lib/run.sh
|
|
||||||
|
|
||||||
go build -trimpath -ldflags="-s -w" -o bin/core ../_harness
|
|
||||||
|
|
||||||
cd fixtures/project
|
|
||||||
output="$(mktemp)"
|
|
||||||
export PATH="$(pwd)/../bin:$PATH"
|
|
||||||
run_capture_stdout 1 "$output" ../../bin/core qa security --json
|
|
||||||
jq -e '.summary.total == 4 and .summary.passed == 0' "$output" >/dev/null
|
|
||||||
jq -e '.summary.critical == 3 and .summary.high == 1' "$output" >/dev/null
|
|
||||||
jq -e '.checks[0].id == "app_key_set" and .checks[1].id == "composer_audit"' "$output" >/dev/null
|
|
||||||
jq -e '.checks[] | select(.id == "debug_mode") | .passed == false' "$output" >/dev/null
|
|
||||||
EOF
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
#!/usr/bin/env sh
|
|
||||||
|
|
||||||
cat <<'JSON'
|
|
||||||
{
|
|
||||||
"advisories": {
|
|
||||||
"vendor/package-a": [
|
|
||||||
{
|
|
||||||
"title": "Remote Code Execution",
|
|
||||||
"link": "https://example.com/advisory/1",
|
|
||||||
"cve": "CVE-2026-0001",
|
|
||||||
"affectedVersions": ">=1.0,<1.5"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
JSON
|
|
||||||
exit 1
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
||||||
APP_DEBUG=true
|
|
||||||
APP_KEY=short
|
|
||||||
APP_URL=http://example.com
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
{}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
function bad_example() {
|
|
||||||
return "bad";
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue