feat: add report output (JSON, JSONL, text, summary)
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
d33d7056f1
commit
4309e17cde
2 changed files with 194 additions and 0 deletions
58
pkg/lint/report.go
Normal file
58
pkg/lint/report.go
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
package lint
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// Summary holds aggregate counts for a set of findings.
|
||||
type Summary struct {
|
||||
Total int `json:"total"`
|
||||
BySeverity map[string]int `json:"by_severity"`
|
||||
}
|
||||
|
||||
// Summarise counts findings by severity.
|
||||
func Summarise(findings []Finding) Summary {
|
||||
s := Summary{
|
||||
Total: len(findings),
|
||||
BySeverity: make(map[string]int),
|
||||
}
|
||||
for _, f := range findings {
|
||||
s.BySeverity[f.Severity]++
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// WriteJSON writes findings as a pretty-printed JSON array.
|
||||
func WriteJSON(w io.Writer, findings []Finding) error {
|
||||
if findings == nil {
|
||||
findings = []Finding{}
|
||||
}
|
||||
enc := json.NewEncoder(w)
|
||||
enc.SetIndent("", " ")
|
||||
return enc.Encode(findings)
|
||||
}
|
||||
|
||||
// WriteJSONL writes findings as newline-delimited JSON (one object per line).
|
||||
func WriteJSONL(w io.Writer, findings []Finding) error {
|
||||
for _, f := range findings {
|
||||
data, err := json.Marshal(f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := fmt.Fprintf(w, "%s\n", data); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// WriteText writes findings in a human-readable format:
|
||||
//
|
||||
// file:line [severity] title (rule-id)
|
||||
func WriteText(w io.Writer, findings []Finding) {
|
||||
for _, f := range findings {
|
||||
fmt.Fprintf(w, "%s:%d [%s] %s (%s)\n", f.File, f.Line, f.Severity, f.Title, f.RuleID)
|
||||
}
|
||||
}
|
||||
136
pkg/lint/report_test.go
Normal file
136
pkg/lint/report_test.go
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
package lint
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func sampleFindings() []Finding {
|
||||
return []Finding{
|
||||
{
|
||||
RuleID: "go-sec-001",
|
||||
Title: "SQL injection",
|
||||
Severity: "high",
|
||||
File: "store/query.go",
|
||||
Line: 42,
|
||||
Match: `db.Query("SELECT * FROM users WHERE name LIKE ?", "%"+input+"%")`,
|
||||
Fix: "Use parameterised LIKE with EscapeLike()",
|
||||
},
|
||||
{
|
||||
RuleID: "go-cor-003",
|
||||
Title: "Silent error swallowing",
|
||||
Severity: "medium",
|
||||
File: "handler.go",
|
||||
Line: 17,
|
||||
Match: `_ = service.Process(data)`,
|
||||
Fix: "Handle the error",
|
||||
},
|
||||
{
|
||||
RuleID: "go-sec-004",
|
||||
Title: "Non-constant-time auth",
|
||||
Severity: "high",
|
||||
File: "auth/check.go",
|
||||
Line: 88,
|
||||
Match: `if token == expectedToken {`,
|
||||
Fix: "Use subtle.ConstantTimeCompare",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestSummarise_Good(t *testing.T) {
|
||||
findings := sampleFindings()
|
||||
summary := Summarise(findings)
|
||||
|
||||
assert.Equal(t, 3, summary.Total)
|
||||
assert.Equal(t, 2, summary.BySeverity["high"])
|
||||
assert.Equal(t, 1, summary.BySeverity["medium"])
|
||||
}
|
||||
|
||||
func TestSummarise_Good_Empty(t *testing.T) {
|
||||
summary := Summarise(nil)
|
||||
assert.Equal(t, 0, summary.Total)
|
||||
assert.Empty(t, summary.BySeverity)
|
||||
}
|
||||
|
||||
func TestWriteJSON_Good_Roundtrip(t *testing.T) {
|
||||
findings := sampleFindings()
|
||||
var buf bytes.Buffer
|
||||
err := WriteJSON(&buf, findings)
|
||||
require.NoError(t, err)
|
||||
|
||||
var decoded []Finding
|
||||
err = json.Unmarshal(buf.Bytes(), &decoded)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Len(t, decoded, 3)
|
||||
assert.Equal(t, "go-sec-001", decoded[0].RuleID)
|
||||
assert.Equal(t, 42, decoded[0].Line)
|
||||
assert.Equal(t, "handler.go", decoded[1].File)
|
||||
}
|
||||
|
||||
func TestWriteJSON_Good_PrettyPrinted(t *testing.T) {
|
||||
findings := sampleFindings()
|
||||
var buf bytes.Buffer
|
||||
err := WriteJSON(&buf, findings)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Pretty-printed JSON should contain indentation.
|
||||
assert.Contains(t, buf.String(), " ")
|
||||
assert.Contains(t, buf.String(), "\n")
|
||||
}
|
||||
|
||||
func TestWriteJSON_Good_Empty(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := WriteJSON(&buf, nil)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "[]\n", buf.String())
|
||||
}
|
||||
|
||||
func TestWriteJSONL_Good_LineCount(t *testing.T) {
|
||||
findings := sampleFindings()
|
||||
var buf bytes.Buffer
|
||||
err := WriteJSONL(&buf, findings)
|
||||
require.NoError(t, err)
|
||||
|
||||
lines := strings.Split(strings.TrimSpace(buf.String()), "\n")
|
||||
assert.Len(t, lines, 3)
|
||||
|
||||
// Each line should be valid JSON.
|
||||
for _, line := range lines {
|
||||
var f Finding
|
||||
err := json.Unmarshal([]byte(line), &f)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteJSONL_Good_Empty(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
err := WriteJSONL(&buf, nil)
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, buf.String())
|
||||
}
|
||||
|
||||
func TestWriteText_Good(t *testing.T) {
|
||||
findings := sampleFindings()
|
||||
var buf bytes.Buffer
|
||||
WriteText(&buf, findings)
|
||||
|
||||
output := buf.String()
|
||||
assert.Contains(t, output, "store/query.go:42")
|
||||
assert.Contains(t, output, "[high]")
|
||||
assert.Contains(t, output, "SQL injection")
|
||||
assert.Contains(t, output, "go-sec-001")
|
||||
assert.Contains(t, output, "handler.go:17")
|
||||
assert.Contains(t, output, "[medium]")
|
||||
}
|
||||
|
||||
func TestWriteText_Good_Empty(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
WriteText(&buf, nil)
|
||||
assert.Empty(t, buf.String())
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue