feat: add crypto, session, sigil, and node packages
Add new packages for cryptographic operations, session management, and I/O handling: - pkg/crypt/chachapoly: ChaCha20-Poly1305 AEAD encryption - pkg/crypt/lthn: Lethean-specific key derivation and encryption - pkg/crypt/rsa: RSA key generation, encryption, and signing - pkg/io/node: CryptoNote node I/O and protocol handling - pkg/io/sigil: Cryptographic sigil generation and verification - pkg/session: Session parsing, HTML rendering, search, and video - internal/cmd/forge: Forgejo auth status command - internal/cmd/session: Session management CLI command Also gitignore build artifacts (bugseti binary, i18n-validate). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
68247f8205
commit
95261a92ff
17 changed files with 2892 additions and 1 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -16,7 +16,8 @@ bin/
|
|||
dist/
|
||||
tasks
|
||||
/core
|
||||
|
||||
/i18n-validate
|
||||
cmd/bugseti/bugseti
|
||||
|
||||
patch_cov.*
|
||||
go.work.sum
|
||||
|
|
|
|||
86
internal/cmd/forge/cmd_auth.go
Normal file
86
internal/cmd/forge/cmd_auth.go
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
package forge
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/host-uk/core/pkg/cli"
|
||||
fg "github.com/host-uk/core/pkg/forge"
|
||||
)
|
||||
|
||||
// Auth command flags.
|
||||
var (
|
||||
authURL string
|
||||
authToken string
|
||||
)
|
||||
|
||||
// addAuthCommand adds the 'auth' subcommand for authentication status and login.
|
||||
func addAuthCommand(parent *cli.Command) {
|
||||
cmd := &cli.Command{
|
||||
Use: "auth",
|
||||
Short: "Show authentication status",
|
||||
Long: "Show the current Forgejo authentication status, or log in with a new token.",
|
||||
RunE: func(cmd *cli.Command, args []string) error {
|
||||
return runAuth()
|
||||
},
|
||||
}
|
||||
|
||||
cmd.Flags().StringVar(&authURL, "url", "", "Forgejo instance URL")
|
||||
cmd.Flags().StringVar(&authToken, "token", "", "API token (create at <url>/user/settings/applications)")
|
||||
|
||||
parent.AddCommand(cmd)
|
||||
}
|
||||
|
||||
func runAuth() error {
|
||||
// If credentials provided, save them first
|
||||
if authURL != "" || authToken != "" {
|
||||
if err := fg.SaveConfig(authURL, authToken); err != nil {
|
||||
return err
|
||||
}
|
||||
if authURL != "" {
|
||||
cli.Success(fmt.Sprintf("URL set to %s", authURL))
|
||||
}
|
||||
if authToken != "" {
|
||||
cli.Success("Token saved")
|
||||
}
|
||||
}
|
||||
|
||||
// Always show current auth status
|
||||
url, token, err := fg.ResolveConfig(authURL, authToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if token == "" {
|
||||
cli.Blank()
|
||||
cli.Print(" %s %s\n", dimStyle.Render("URL:"), valueStyle.Render(url))
|
||||
cli.Print(" %s %s\n", dimStyle.Render("Auth:"), warningStyle.Render("not authenticated"))
|
||||
cli.Print(" %s %s\n", dimStyle.Render("Hint:"), dimStyle.Render(fmt.Sprintf("core forge auth --token TOKEN (create at %s/user/settings/applications)", url)))
|
||||
cli.Blank()
|
||||
return nil
|
||||
}
|
||||
|
||||
client, err := fg.NewFromConfig(authURL, authToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
user, _, err := client.API().GetMyUserInfo()
|
||||
if err != nil {
|
||||
cli.Blank()
|
||||
cli.Print(" %s %s\n", dimStyle.Render("URL:"), valueStyle.Render(url))
|
||||
cli.Print(" %s %s\n", dimStyle.Render("Auth:"), errorStyle.Render("token invalid or expired"))
|
||||
cli.Blank()
|
||||
return nil
|
||||
}
|
||||
|
||||
cli.Blank()
|
||||
cli.Success(fmt.Sprintf("Authenticated to %s", client.URL()))
|
||||
cli.Print(" %s %s\n", dimStyle.Render("User:"), valueStyle.Render(user.UserName))
|
||||
cli.Print(" %s %s\n", dimStyle.Render("Email:"), valueStyle.Render(user.Email))
|
||||
if user.IsAdmin {
|
||||
cli.Print(" %s %s\n", dimStyle.Render("Role:"), infoStyle.Render("admin"))
|
||||
}
|
||||
cli.Blank()
|
||||
|
||||
return nil
|
||||
}
|
||||
235
internal/cmd/session/cmd_session.go
Normal file
235
internal/cmd/session/cmd_session.go
Normal file
|
|
@ -0,0 +1,235 @@
|
|||
// Package session provides commands for replaying and searching Claude Code session transcripts.
|
||||
package session
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/host-uk/core/pkg/cli"
|
||||
"github.com/host-uk/core/pkg/session"
|
||||
)
|
||||
|
||||
func init() {
|
||||
cli.RegisterCommands(AddSessionCommands)
|
||||
}
|
||||
|
||||
// AddSessionCommands registers the 'session' command group.
|
||||
func AddSessionCommands(root *cli.Command) {
|
||||
sessionCmd := &cli.Command{
|
||||
Use: "session",
|
||||
Short: "Session recording and replay",
|
||||
}
|
||||
root.AddCommand(sessionCmd)
|
||||
|
||||
addListCommand(sessionCmd)
|
||||
addReplayCommand(sessionCmd)
|
||||
addSearchCommand(sessionCmd)
|
||||
}
|
||||
|
||||
func projectsDir() string {
|
||||
home, _ := os.UserHomeDir()
|
||||
// Walk .claude/projects/ looking for dirs with .jsonl files
|
||||
base := filepath.Join(home, ".claude", "projects")
|
||||
entries, err := os.ReadDir(base)
|
||||
if err != nil {
|
||||
return base
|
||||
}
|
||||
// Return the first project dir that has .jsonl files
|
||||
for _, e := range entries {
|
||||
if !e.IsDir() {
|
||||
continue
|
||||
}
|
||||
dir := filepath.Join(base, e.Name())
|
||||
matches, _ := filepath.Glob(filepath.Join(dir, "*.jsonl"))
|
||||
if len(matches) > 0 {
|
||||
return dir
|
||||
}
|
||||
}
|
||||
return base
|
||||
}
|
||||
|
||||
func addListCommand(parent *cli.Command) {
|
||||
listCmd := &cli.Command{
|
||||
Use: "list",
|
||||
Short: "List recent sessions",
|
||||
RunE: func(cmd *cli.Command, args []string) error {
|
||||
sessions, err := session.ListSessions(projectsDir())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(sessions) == 0 {
|
||||
cli.Print("No sessions found")
|
||||
return nil
|
||||
}
|
||||
|
||||
cli.Print(cli.HeaderStyle.Render("Recent Sessions"))
|
||||
cli.Print("")
|
||||
for i, s := range sessions {
|
||||
if i >= 20 {
|
||||
cli.Print(cli.DimStyle.Render(fmt.Sprintf(" ... and %d more", len(sessions)-20)))
|
||||
break
|
||||
}
|
||||
dur := s.EndTime.Sub(s.StartTime)
|
||||
durStr := ""
|
||||
if dur > 0 {
|
||||
durStr = fmt.Sprintf(" (%s)", formatDur(dur))
|
||||
}
|
||||
id := s.ID
|
||||
if len(id) > 8 {
|
||||
id = id[:8]
|
||||
}
|
||||
cli.Print(fmt.Sprintf(" %s %s%s",
|
||||
cli.ValueStyle.Render(id),
|
||||
s.StartTime.Format("2006-01-02 15:04"),
|
||||
cli.DimStyle.Render(durStr)))
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
parent.AddCommand(listCmd)
|
||||
}
|
||||
|
||||
func addReplayCommand(parent *cli.Command) {
|
||||
var mp4 bool
|
||||
var output string
|
||||
|
||||
replayCmd := &cli.Command{
|
||||
Use: "replay <session-id>",
|
||||
Short: "Generate HTML timeline (and optional MP4) from a session",
|
||||
Args: cli.MinimumNArgs(1),
|
||||
RunE: func(cmd *cli.Command, args []string) error {
|
||||
id := args[0]
|
||||
path := findSession(id)
|
||||
if path == "" {
|
||||
return fmt.Errorf("session not found: %s", id)
|
||||
}
|
||||
|
||||
cli.Print(fmt.Sprintf("Parsing %s...", cli.ValueStyle.Render(filepath.Base(path))))
|
||||
|
||||
sess, err := session.ParseTranscript(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parse: %w", err)
|
||||
}
|
||||
|
||||
toolCount := 0
|
||||
for _, e := range sess.Events {
|
||||
if e.Type == "tool_use" {
|
||||
toolCount++
|
||||
}
|
||||
}
|
||||
cli.Print(fmt.Sprintf(" %d events, %d tool calls",
|
||||
len(sess.Events), toolCount))
|
||||
|
||||
// HTML output
|
||||
htmlPath := output
|
||||
if htmlPath == "" {
|
||||
htmlPath = fmt.Sprintf("session-%s.html", shortID(sess.ID))
|
||||
}
|
||||
if err := session.RenderHTML(sess, htmlPath); err != nil {
|
||||
return fmt.Errorf("render html: %w", err)
|
||||
}
|
||||
cli.Print(cli.SuccessStyle.Render(fmt.Sprintf(" HTML: %s", htmlPath)))
|
||||
|
||||
// MP4 output
|
||||
if mp4 {
|
||||
mp4Path := strings.TrimSuffix(htmlPath, ".html") + ".mp4"
|
||||
if err := session.RenderMP4(sess, mp4Path); err != nil {
|
||||
cli.Print(cli.ErrorStyle.Render(fmt.Sprintf(" MP4: %s", err)))
|
||||
} else {
|
||||
cli.Print(cli.SuccessStyle.Render(fmt.Sprintf(" MP4: %s", mp4Path)))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
replayCmd.Flags().BoolVar(&mp4, "mp4", false, "Also generate MP4 video (requires vhs + ffmpeg)")
|
||||
replayCmd.Flags().StringVarP(&output, "output", "o", "", "Output file path")
|
||||
parent.AddCommand(replayCmd)
|
||||
}
|
||||
|
||||
func addSearchCommand(parent *cli.Command) {
|
||||
searchCmd := &cli.Command{
|
||||
Use: "search <query>",
|
||||
Short: "Search across session transcripts",
|
||||
Args: cli.MinimumNArgs(1),
|
||||
RunE: func(cmd *cli.Command, args []string) error {
|
||||
query := strings.ToLower(strings.Join(args, " "))
|
||||
results, err := session.Search(projectsDir(), query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(results) == 0 {
|
||||
cli.Print("No matches found")
|
||||
return nil
|
||||
}
|
||||
|
||||
cli.Print(cli.HeaderStyle.Render(fmt.Sprintf("Found %d matches", len(results))))
|
||||
cli.Print("")
|
||||
for _, r := range results {
|
||||
id := r.SessionID
|
||||
if len(id) > 8 {
|
||||
id = id[:8]
|
||||
}
|
||||
cli.Print(fmt.Sprintf(" %s %s %s",
|
||||
cli.ValueStyle.Render(id),
|
||||
r.Timestamp.Format("15:04:05"),
|
||||
cli.DimStyle.Render(r.Tool)))
|
||||
cli.Print(fmt.Sprintf(" %s", truncateStr(r.Match, 100)))
|
||||
cli.Print("")
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
parent.AddCommand(searchCmd)
|
||||
}
|
||||
|
||||
func findSession(id string) string {
|
||||
dir := projectsDir()
|
||||
// Try exact match first
|
||||
path := filepath.Join(dir, id+".jsonl")
|
||||
if _, err := os.Stat(path); err == nil {
|
||||
return path
|
||||
}
|
||||
// Try prefix match
|
||||
matches, _ := filepath.Glob(filepath.Join(dir, id+"*.jsonl"))
|
||||
if len(matches) == 1 {
|
||||
return matches[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func shortID(id string) string {
|
||||
if len(id) > 8 {
|
||||
return id[:8]
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
func formatDur(d interface{ Hours() float64; Minutes() float64; Seconds() float64 }) string {
|
||||
type dur interface {
|
||||
Hours() float64
|
||||
Minutes() float64
|
||||
Seconds() float64
|
||||
}
|
||||
dd := d.(dur)
|
||||
h := int(dd.Hours())
|
||||
m := int(dd.Minutes()) % 60
|
||||
if h > 0 {
|
||||
return fmt.Sprintf("%dh%dm", h, m)
|
||||
}
|
||||
s := int(dd.Seconds()) % 60
|
||||
if m > 0 {
|
||||
return fmt.Sprintf("%dm%ds", m, s)
|
||||
}
|
||||
return fmt.Sprintf("%ds", s)
|
||||
}
|
||||
|
||||
func truncateStr(s string, max int) string {
|
||||
if len(s) <= max {
|
||||
return s
|
||||
}
|
||||
return s[:max] + "..."
|
||||
}
|
||||
50
pkg/crypt/chachapoly/chachapoly.go
Normal file
50
pkg/crypt/chachapoly/chachapoly.go
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
package chachapoly
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"golang.org/x/crypto/chacha20poly1305"
|
||||
)
|
||||
|
||||
// Encrypt encrypts data using ChaCha20-Poly1305.
|
||||
func Encrypt(plaintext []byte, key []byte) ([]byte, error) {
|
||||
aead, err := chacha20poly1305.NewX(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonce := make([]byte, aead.NonceSize(), aead.NonceSize()+len(plaintext)+aead.Overhead())
|
||||
if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return aead.Seal(nonce, nonce, plaintext, nil), nil
|
||||
}
|
||||
|
||||
// Decrypt decrypts data using ChaCha20-Poly1305.
|
||||
func Decrypt(ciphertext []byte, key []byte) ([]byte, error) {
|
||||
aead, err := chacha20poly1305.NewX(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
minLen := aead.NonceSize() + aead.Overhead()
|
||||
if len(ciphertext) < minLen {
|
||||
return nil, fmt.Errorf("ciphertext too short: got %d bytes, need at least %d bytes", len(ciphertext), minLen)
|
||||
}
|
||||
|
||||
nonce, ciphertext := ciphertext[:aead.NonceSize()], ciphertext[aead.NonceSize():]
|
||||
|
||||
decrypted, err := aead.Open(nil, nonce, ciphertext, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(decrypted) == 0 {
|
||||
return []byte{}, nil
|
||||
}
|
||||
|
||||
return decrypted, nil
|
||||
}
|
||||
114
pkg/crypt/chachapoly/chachapoly_test.go
Normal file
114
pkg/crypt/chachapoly/chachapoly_test.go
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
package chachapoly
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// mockReader is a reader that returns an error.
|
||||
type mockReader struct{}
|
||||
|
||||
func (r *mockReader) Read(p []byte) (n int, err error) {
|
||||
return 0, errors.New("read error")
|
||||
}
|
||||
|
||||
func TestEncryptDecrypt(t *testing.T) {
|
||||
key := make([]byte, 32)
|
||||
for i := range key {
|
||||
key[i] = 1
|
||||
}
|
||||
|
||||
plaintext := []byte("Hello, world!")
|
||||
ciphertext, err := Encrypt(plaintext, key)
|
||||
assert.NoError(t, err)
|
||||
|
||||
decrypted, err := Decrypt(ciphertext, key)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, plaintext, decrypted)
|
||||
}
|
||||
|
||||
func TestEncryptInvalidKeySize(t *testing.T) {
|
||||
key := make([]byte, 16) // Wrong size
|
||||
plaintext := []byte("test")
|
||||
_, err := Encrypt(plaintext, key)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestDecryptWithWrongKey(t *testing.T) {
|
||||
key1 := make([]byte, 32)
|
||||
key2 := make([]byte, 32)
|
||||
key2[0] = 1 // Different key
|
||||
|
||||
plaintext := []byte("secret")
|
||||
ciphertext, err := Encrypt(plaintext, key1)
|
||||
assert.NoError(t, err)
|
||||
|
||||
_, err = Decrypt(ciphertext, key2)
|
||||
assert.Error(t, err) // Should fail authentication
|
||||
}
|
||||
|
||||
func TestDecryptTamperedCiphertext(t *testing.T) {
|
||||
key := make([]byte, 32)
|
||||
plaintext := []byte("secret")
|
||||
ciphertext, err := Encrypt(plaintext, key)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Tamper with the ciphertext
|
||||
ciphertext[0] ^= 0xff
|
||||
|
||||
_, err = Decrypt(ciphertext, key)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestEncryptEmptyPlaintext(t *testing.T) {
|
||||
key := make([]byte, 32)
|
||||
plaintext := []byte("")
|
||||
ciphertext, err := Encrypt(plaintext, key)
|
||||
assert.NoError(t, err)
|
||||
|
||||
decrypted, err := Decrypt(ciphertext, key)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, plaintext, decrypted)
|
||||
}
|
||||
|
||||
func TestDecryptShortCiphertext(t *testing.T) {
|
||||
key := make([]byte, 32)
|
||||
shortCiphertext := []byte("short")
|
||||
|
||||
_, err := Decrypt(shortCiphertext, key)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "too short")
|
||||
}
|
||||
|
||||
func TestCiphertextDiffersFromPlaintext(t *testing.T) {
|
||||
key := make([]byte, 32)
|
||||
plaintext := []byte("Hello, world!")
|
||||
ciphertext, err := Encrypt(plaintext, key)
|
||||
assert.NoError(t, err)
|
||||
assert.NotEqual(t, plaintext, ciphertext)
|
||||
}
|
||||
|
||||
func TestEncryptNonceError(t *testing.T) {
|
||||
key := make([]byte, 32)
|
||||
plaintext := []byte("test")
|
||||
|
||||
// Replace the rand.Reader with our mock reader
|
||||
oldReader := rand.Reader
|
||||
rand.Reader = &mockReader{}
|
||||
defer func() { rand.Reader = oldReader }()
|
||||
|
||||
_, err := Encrypt(plaintext, key)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestDecryptInvalidKeySize(t *testing.T) {
|
||||
key := make([]byte, 16) // Wrong size
|
||||
ciphertext := []byte("test")
|
||||
_, err := Decrypt(ciphertext, key)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
94
pkg/crypt/lthn/lthn.go
Normal file
94
pkg/crypt/lthn/lthn.go
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
// Package lthn implements the LTHN quasi-salted hash algorithm (RFC-0004).
|
||||
//
|
||||
// LTHN produces deterministic, verifiable hashes without requiring separate salt
|
||||
// storage. The salt is derived from the input itself through:
|
||||
// 1. Reversing the input string
|
||||
// 2. Applying "leet speak" style character substitutions
|
||||
//
|
||||
// The final hash is: SHA256(input || derived_salt)
|
||||
//
|
||||
// This is suitable for content identifiers, cache keys, and deduplication.
|
||||
// NOT suitable for password hashing - use bcrypt, Argon2, or scrypt instead.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// hash := lthn.Hash("hello")
|
||||
// valid := lthn.Verify("hello", hash) // true
|
||||
package lthn
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
)
|
||||
|
||||
// keyMap defines the character substitutions for quasi-salt derivation.
|
||||
// These are inspired by "leet speak" conventions for letter-number substitution.
|
||||
// The mapping is bidirectional for most characters but NOT fully symmetric.
|
||||
var keyMap = map[rune]rune{
|
||||
'o': '0', // letter O -> zero
|
||||
'l': '1', // letter L -> one
|
||||
'e': '3', // letter E -> three
|
||||
'a': '4', // letter A -> four
|
||||
's': 'z', // letter S -> Z
|
||||
't': '7', // letter T -> seven
|
||||
'0': 'o', // zero -> letter O
|
||||
'1': 'l', // one -> letter L
|
||||
'3': 'e', // three -> letter E
|
||||
'4': 'a', // four -> letter A
|
||||
'7': 't', // seven -> letter T
|
||||
}
|
||||
|
||||
// SetKeyMap replaces the default character substitution map.
|
||||
// Use this to customize the quasi-salt derivation for specific applications.
|
||||
// Changes affect all subsequent Hash and Verify calls.
|
||||
func SetKeyMap(newKeyMap map[rune]rune) {
|
||||
keyMap = newKeyMap
|
||||
}
|
||||
|
||||
// GetKeyMap returns the current character substitution map.
|
||||
func GetKeyMap() map[rune]rune {
|
||||
return keyMap
|
||||
}
|
||||
|
||||
// Hash computes the LTHN hash of the input string.
|
||||
//
|
||||
// The algorithm:
|
||||
// 1. Derive a quasi-salt by reversing the input and applying character substitutions
|
||||
// 2. Concatenate: input + salt
|
||||
// 3. Compute SHA-256 of the concatenated string
|
||||
// 4. Return the hex-encoded digest (64 characters, lowercase)
|
||||
//
|
||||
// The same input always produces the same hash, enabling verification
|
||||
// without storing a separate salt value.
|
||||
func Hash(input string) string {
|
||||
salt := createSalt(input)
|
||||
hash := sha256.Sum256([]byte(input + salt))
|
||||
return hex.EncodeToString(hash[:])
|
||||
}
|
||||
|
||||
// createSalt derives a quasi-salt by reversing the input and applying substitutions.
|
||||
// For example: "hello" -> reversed "olleh" -> substituted "011eh"
|
||||
func createSalt(input string) string {
|
||||
if input == "" {
|
||||
return ""
|
||||
}
|
||||
runes := []rune(input)
|
||||
salt := make([]rune, len(runes))
|
||||
for i := 0; i < len(runes); i++ {
|
||||
char := runes[len(runes)-1-i]
|
||||
if replacement, ok := keyMap[char]; ok {
|
||||
salt[i] = replacement
|
||||
} else {
|
||||
salt[i] = char
|
||||
}
|
||||
}
|
||||
return string(salt)
|
||||
}
|
||||
|
||||
// Verify checks if an input string produces the given hash.
|
||||
// Returns true if Hash(input) equals the provided hash value.
|
||||
// Uses direct string comparison - for security-critical applications,
|
||||
// consider using constant-time comparison.
|
||||
func Verify(input string, hash string) bool {
|
||||
return Hash(input) == hash
|
||||
}
|
||||
66
pkg/crypt/lthn/lthn_test.go
Normal file
66
pkg/crypt/lthn/lthn_test.go
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
package lthn
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestHash(t *testing.T) {
|
||||
hash := Hash("hello")
|
||||
assert.NotEmpty(t, hash)
|
||||
}
|
||||
|
||||
func TestVerify(t *testing.T) {
|
||||
hash := Hash("hello")
|
||||
assert.True(t, Verify("hello", hash))
|
||||
assert.False(t, Verify("world", hash))
|
||||
}
|
||||
|
||||
func TestCreateSalt_Good(t *testing.T) {
|
||||
// "hello" reversed: "olleh" -> "0113h"
|
||||
expected := "0113h"
|
||||
actual := createSalt("hello")
|
||||
assert.Equal(t, expected, actual, "Salt should be correctly created for 'hello'")
|
||||
}
|
||||
|
||||
func TestCreateSalt_Bad(t *testing.T) {
|
||||
// Test with an empty string
|
||||
expected := ""
|
||||
actual := createSalt("")
|
||||
assert.Equal(t, expected, actual, "Salt for an empty string should be empty")
|
||||
}
|
||||
|
||||
func TestCreateSalt_Ugly(t *testing.T) {
|
||||
// Test with characters not in the keyMap
|
||||
input := "world123"
|
||||
// "world123" reversed: "321dlrow" -> "e2ld1r0w"
|
||||
expected := "e2ld1r0w"
|
||||
actual := createSalt(input)
|
||||
assert.Equal(t, expected, actual, "Salt should handle characters not in the keyMap")
|
||||
|
||||
// Test with only characters in the keyMap
|
||||
input = "oleta"
|
||||
// "oleta" reversed: "atelo" -> "47310"
|
||||
expected = "47310"
|
||||
actual = createSalt(input)
|
||||
assert.Equal(t, expected, actual, "Salt should correctly handle strings with only keyMap characters")
|
||||
}
|
||||
|
||||
var testKeyMapMu sync.Mutex
|
||||
|
||||
func TestSetKeyMap(t *testing.T) {
|
||||
testKeyMapMu.Lock()
|
||||
originalKeyMap := GetKeyMap()
|
||||
t.Cleanup(func() {
|
||||
SetKeyMap(originalKeyMap)
|
||||
testKeyMapMu.Unlock()
|
||||
})
|
||||
|
||||
newKeyMap := map[rune]rune{
|
||||
'a': 'b',
|
||||
}
|
||||
SetKeyMap(newKeyMap)
|
||||
assert.Equal(t, newKeyMap, GetKeyMap())
|
||||
}
|
||||
91
pkg/crypt/rsa/rsa.go
Normal file
91
pkg/crypt/rsa/rsa.go
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
package rsa
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"encoding/pem"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Service provides RSA functionality.
|
||||
type Service struct{}
|
||||
|
||||
// NewService creates and returns a new Service instance for performing RSA-related operations.
|
||||
func NewService() *Service {
|
||||
return &Service{}
|
||||
}
|
||||
|
||||
// GenerateKeyPair creates a new RSA key pair.
|
||||
func (s *Service) GenerateKeyPair(bits int) (publicKey, privateKey []byte, err error) {
|
||||
if bits < 2048 {
|
||||
return nil, nil, fmt.Errorf("rsa: key size too small: %d (minimum 2048)", bits)
|
||||
}
|
||||
privKey, err := rsa.GenerateKey(rand.Reader, bits)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to generate private key: %w", err)
|
||||
}
|
||||
|
||||
privKeyBytes := x509.MarshalPKCS1PrivateKey(privKey)
|
||||
privKeyPEM := pem.EncodeToMemory(&pem.Block{
|
||||
Type: "RSA PRIVATE KEY",
|
||||
Bytes: privKeyBytes,
|
||||
})
|
||||
|
||||
pubKeyBytes, err := x509.MarshalPKIXPublicKey(&privKey.PublicKey)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to marshal public key: %w", err)
|
||||
}
|
||||
pubKeyPEM := pem.EncodeToMemory(&pem.Block{
|
||||
Type: "PUBLIC KEY",
|
||||
Bytes: pubKeyBytes,
|
||||
})
|
||||
|
||||
return pubKeyPEM, privKeyPEM, nil
|
||||
}
|
||||
|
||||
// Encrypt encrypts data with a public key.
|
||||
func (s *Service) Encrypt(publicKey, data, label []byte) ([]byte, error) {
|
||||
block, _ := pem.Decode(publicKey)
|
||||
if block == nil {
|
||||
return nil, fmt.Errorf("failed to decode public key")
|
||||
}
|
||||
|
||||
pub, err := x509.ParsePKIXPublicKey(block.Bytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse public key: %w", err)
|
||||
}
|
||||
|
||||
rsaPub, ok := pub.(*rsa.PublicKey)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("not an RSA public key")
|
||||
}
|
||||
|
||||
ciphertext, err := rsa.EncryptOAEP(sha256.New(), rand.Reader, rsaPub, data, label)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to encrypt data: %w", err)
|
||||
}
|
||||
|
||||
return ciphertext, nil
|
||||
}
|
||||
|
||||
// Decrypt decrypts data with a private key.
|
||||
func (s *Service) Decrypt(privateKey, ciphertext, label []byte) ([]byte, error) {
|
||||
block, _ := pem.Decode(privateKey)
|
||||
if block == nil {
|
||||
return nil, fmt.Errorf("failed to decode private key")
|
||||
}
|
||||
|
||||
priv, err := x509.ParsePKCS1PrivateKey(block.Bytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse private key: %w", err)
|
||||
}
|
||||
|
||||
plaintext, err := rsa.DecryptOAEP(sha256.New(), rand.Reader, priv, ciphertext, label)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decrypt data: %w", err)
|
||||
}
|
||||
|
||||
return plaintext, nil
|
||||
}
|
||||
101
pkg/crypt/rsa/rsa_test.go
Normal file
101
pkg/crypt/rsa/rsa_test.go
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
package rsa
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/x509"
|
||||
"encoding/pem"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// mockReader is a reader that returns an error.
|
||||
type mockReader struct{}
|
||||
|
||||
func (r *mockReader) Read(p []byte) (n int, err error) {
|
||||
return 0, errors.New("read error")
|
||||
}
|
||||
|
||||
func TestRSA_Good(t *testing.T) {
|
||||
s := NewService()
|
||||
|
||||
// Generate a new key pair
|
||||
pubKey, privKey, err := s.GenerateKeyPair(2048)
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, pubKey)
|
||||
assert.NotEmpty(t, privKey)
|
||||
|
||||
// Encrypt and decrypt a message
|
||||
message := []byte("Hello, World!")
|
||||
ciphertext, err := s.Encrypt(pubKey, message, nil)
|
||||
assert.NoError(t, err)
|
||||
plaintext, err := s.Decrypt(privKey, ciphertext, nil)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, message, plaintext)
|
||||
}
|
||||
|
||||
func TestRSA_Bad(t *testing.T) {
|
||||
s := NewService()
|
||||
|
||||
// Decrypt with wrong key
|
||||
pubKey, _, err := s.GenerateKeyPair(2048)
|
||||
assert.NoError(t, err)
|
||||
_, otherPrivKey, err := s.GenerateKeyPair(2048)
|
||||
assert.NoError(t, err)
|
||||
message := []byte("Hello, World!")
|
||||
ciphertext, err := s.Encrypt(pubKey, message, nil)
|
||||
assert.NoError(t, err)
|
||||
_, err = s.Decrypt(otherPrivKey, ciphertext, nil)
|
||||
assert.Error(t, err)
|
||||
|
||||
// Key size too small
|
||||
_, _, err = s.GenerateKeyPair(512)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestRSA_Ugly(t *testing.T) {
|
||||
s := NewService()
|
||||
|
||||
// Malformed keys and messages
|
||||
_, err := s.Encrypt([]byte("not-a-key"), []byte("message"), nil)
|
||||
assert.Error(t, err)
|
||||
_, err = s.Decrypt([]byte("not-a-key"), []byte("message"), nil)
|
||||
assert.Error(t, err)
|
||||
_, err = s.Encrypt([]byte("-----BEGIN PUBLIC KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJ/6j/y7/r/9/z/8/f/+/v7+/v7+/v7+\nv/7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v4=\n-----END PUBLIC KEY-----"), []byte("message"), nil)
|
||||
assert.Error(t, err)
|
||||
_, err = s.Decrypt([]byte("-----BEGIN RSA PRIVATE KEY-----\nMIIBOQIBAAJBAL/6j/y7/r/9/z/8/f/+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+\nv/7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v4CAwEAAQJB\nAL/6j/y7/r/9/z/8/f/+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+\nv/7+/v7+/v7+/v7+/v7+/v7+/v7+/v4CgYEA/f8/vLv+v/3/P/z9//7+/v7+/v7+\nvv7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v4C\ngYEA/f8/vLv+v/3/P/z9//7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+\nvv7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v4CgYEA/f8/vLv+v/3/P/z9//7+/v7+\nvv7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+\nv/4CgYEA/f8/vLv+v/3/P/z9//7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+\nvv7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v4CgYEA/f8/vLv+v/3/P/z9//7+/v7+\nvv7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+/v7+\nv/4=\n-----END RSA PRIVATE KEY-----"), []byte("message"), nil)
|
||||
assert.Error(t, err)
|
||||
|
||||
// Key generation failure
|
||||
oldReader := rand.Reader
|
||||
rand.Reader = &mockReader{}
|
||||
t.Cleanup(func() { rand.Reader = oldReader })
|
||||
_, _, err = s.GenerateKeyPair(2048)
|
||||
assert.Error(t, err)
|
||||
|
||||
// Encrypt with non-RSA key
|
||||
rand.Reader = oldReader // Restore reader for this test
|
||||
ecdsaPrivKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||
assert.NoError(t, err)
|
||||
ecdsaPubKeyBytes, err := x509.MarshalPKIXPublicKey(&ecdsaPrivKey.PublicKey)
|
||||
assert.NoError(t, err)
|
||||
ecdsaPubKeyPEM := pem.EncodeToMemory(&pem.Block{
|
||||
Type: "PUBLIC KEY",
|
||||
Bytes: ecdsaPubKeyBytes,
|
||||
})
|
||||
_, err = s.Encrypt(ecdsaPubKeyPEM, []byte("message"), nil)
|
||||
assert.Error(t, err)
|
||||
rand.Reader = &mockReader{} // Set it back for the next test
|
||||
|
||||
// Encrypt message too long
|
||||
rand.Reader = oldReader // Restore reader for this test
|
||||
pubKey, _, err := s.GenerateKeyPair(2048)
|
||||
assert.NoError(t, err)
|
||||
message := make([]byte, 2048)
|
||||
_, err = s.Encrypt(pubKey, message, nil)
|
||||
assert.Error(t, err)
|
||||
rand.Reader = &mockReader{} // Set it back
|
||||
}
|
||||
514
pkg/io/node/node.go
Normal file
514
pkg/io/node/node.go
Normal file
|
|
@ -0,0 +1,514 @@
|
|||
// Package node provides an in-memory filesystem implementation of io.Medium
|
||||
// ported from Borg's DataNode. It stores files in memory with implicit
|
||||
// directory structure and supports tar serialisation.
|
||||
package node
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
goio "io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
coreio "github.com/host-uk/core/pkg/io"
|
||||
)
|
||||
|
||||
// Node is an in-memory filesystem that implements coreio.Node (and therefore
|
||||
// coreio.Medium). Directories are implicit -- they exist whenever a file path
|
||||
// contains a "/".
|
||||
type Node struct {
|
||||
files map[string]*dataFile
|
||||
}
|
||||
|
||||
// compile-time interface check
|
||||
var _ coreio.Node = (*Node)(nil)
|
||||
|
||||
// New creates a new, empty Node.
|
||||
func New() *Node {
|
||||
return &Node{files: make(map[string]*dataFile)}
|
||||
}
|
||||
|
||||
// ---------- Node-specific methods ----------
|
||||
|
||||
// AddData stages content in the in-memory filesystem.
|
||||
func (n *Node) AddData(name string, content []byte) {
|
||||
name = strings.TrimPrefix(name, "/")
|
||||
if name == "" {
|
||||
return
|
||||
}
|
||||
// Directories are implicit, so we don't store them.
|
||||
if strings.HasSuffix(name, "/") {
|
||||
return
|
||||
}
|
||||
n.files[name] = &dataFile{
|
||||
name: name,
|
||||
content: content,
|
||||
modTime: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
// ToTar serialises the entire in-memory tree to a tar archive.
|
||||
func (n *Node) ToTar() ([]byte, error) {
|
||||
buf := new(bytes.Buffer)
|
||||
tw := tar.NewWriter(buf)
|
||||
|
||||
for _, file := range n.files {
|
||||
hdr := &tar.Header{
|
||||
Name: file.name,
|
||||
Mode: 0600,
|
||||
Size: int64(len(file.content)),
|
||||
ModTime: file.modTime,
|
||||
}
|
||||
if err := tw.WriteHeader(hdr); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err := tw.Write(file.content); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tw.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// FromTar replaces the in-memory tree with the contents of a tar archive.
|
||||
func (n *Node) FromTar(data []byte) error {
|
||||
newFiles := make(map[string]*dataFile)
|
||||
tr := tar.NewReader(bytes.NewReader(data))
|
||||
|
||||
for {
|
||||
header, err := tr.Next()
|
||||
if err == goio.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if header.Typeflag == tar.TypeReg {
|
||||
content, err := goio.ReadAll(tr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
name := strings.TrimPrefix(header.Name, "/")
|
||||
if name == "" || strings.HasSuffix(name, "/") {
|
||||
continue
|
||||
}
|
||||
newFiles[name] = &dataFile{
|
||||
name: name,
|
||||
content: content,
|
||||
modTime: header.ModTime,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
n.files = newFiles
|
||||
return nil
|
||||
}
|
||||
|
||||
// WalkNode walks the in-memory tree, calling fn for each entry.
|
||||
func (n *Node) WalkNode(root string, fn fs.WalkDirFunc) error {
|
||||
return fs.WalkDir(n, root, fn)
|
||||
}
|
||||
|
||||
// CopyTo copies a file (or directory tree) from the node to any Medium.
|
||||
func (n *Node) CopyTo(target coreio.Medium, sourcePath, destPath string) error {
|
||||
sourcePath = strings.TrimPrefix(sourcePath, "/")
|
||||
info, err := n.Stat(sourcePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !info.IsDir() {
|
||||
// Single file copy
|
||||
f, ok := n.files[sourcePath]
|
||||
if !ok {
|
||||
return fs.ErrNotExist
|
||||
}
|
||||
return target.Write(destPath, string(f.content))
|
||||
}
|
||||
|
||||
// Directory: walk and copy all files underneath
|
||||
prefix := sourcePath
|
||||
if prefix != "" && !strings.HasSuffix(prefix, "/") {
|
||||
prefix += "/"
|
||||
}
|
||||
|
||||
for p, f := range n.files {
|
||||
if !strings.HasPrefix(p, prefix) && p != sourcePath {
|
||||
continue
|
||||
}
|
||||
rel := strings.TrimPrefix(p, prefix)
|
||||
dest := destPath
|
||||
if rel != "" {
|
||||
dest = destPath + "/" + rel
|
||||
}
|
||||
if err := target.Write(dest, string(f.content)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ---------- Medium interface: fs.FS methods ----------
|
||||
|
||||
// Open opens a file from the Node. Implements fs.FS.
|
||||
func (n *Node) Open(name string) (fs.File, error) {
|
||||
name = strings.TrimPrefix(name, "/")
|
||||
if file, ok := n.files[name]; ok {
|
||||
return &dataFileReader{file: file}, nil
|
||||
}
|
||||
// Check if it's a directory
|
||||
prefix := name + "/"
|
||||
if name == "." || name == "" {
|
||||
prefix = ""
|
||||
}
|
||||
for p := range n.files {
|
||||
if strings.HasPrefix(p, prefix) {
|
||||
return &dirFile{path: name, modTime: time.Now()}, nil
|
||||
}
|
||||
}
|
||||
return nil, fs.ErrNotExist
|
||||
}
|
||||
|
||||
// Stat returns file information for the given path.
|
||||
func (n *Node) Stat(name string) (fs.FileInfo, error) {
|
||||
name = strings.TrimPrefix(name, "/")
|
||||
if file, ok := n.files[name]; ok {
|
||||
return file.Stat()
|
||||
}
|
||||
// Check if it's a directory
|
||||
prefix := name + "/"
|
||||
if name == "." || name == "" {
|
||||
prefix = ""
|
||||
}
|
||||
for p := range n.files {
|
||||
if strings.HasPrefix(p, prefix) {
|
||||
return &dirInfo{name: path.Base(name), modTime: time.Now()}, nil
|
||||
}
|
||||
}
|
||||
return nil, fs.ErrNotExist
|
||||
}
|
||||
|
||||
// ReadDir reads and returns all directory entries for the named directory.
|
||||
func (n *Node) ReadDir(name string) ([]fs.DirEntry, error) {
|
||||
name = strings.TrimPrefix(name, "/")
|
||||
if name == "." {
|
||||
name = ""
|
||||
}
|
||||
|
||||
// Disallow reading a file as a directory.
|
||||
if info, err := n.Stat(name); err == nil && !info.IsDir() {
|
||||
return nil, &fs.PathError{Op: "readdir", Path: name, Err: fs.ErrInvalid}
|
||||
}
|
||||
|
||||
entries := []fs.DirEntry{}
|
||||
seen := make(map[string]bool)
|
||||
|
||||
prefix := ""
|
||||
if name != "" {
|
||||
prefix = name + "/"
|
||||
}
|
||||
|
||||
for p := range n.files {
|
||||
if !strings.HasPrefix(p, prefix) {
|
||||
continue
|
||||
}
|
||||
|
||||
relPath := strings.TrimPrefix(p, prefix)
|
||||
firstComponent := strings.Split(relPath, "/")[0]
|
||||
|
||||
if seen[firstComponent] {
|
||||
continue
|
||||
}
|
||||
seen[firstComponent] = true
|
||||
|
||||
if strings.Contains(relPath, "/") {
|
||||
dir := &dirInfo{name: firstComponent, modTime: time.Now()}
|
||||
entries = append(entries, fs.FileInfoToDirEntry(dir))
|
||||
} else {
|
||||
file := n.files[p]
|
||||
info, _ := file.Stat()
|
||||
entries = append(entries, fs.FileInfoToDirEntry(info))
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(entries, func(i, j int) bool {
|
||||
return entries[i].Name() < entries[j].Name()
|
||||
})
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
// ---------- Medium interface: read/write ----------
|
||||
|
||||
// Read retrieves the content of a file as a string.
|
||||
func (n *Node) Read(p string) (string, error) {
|
||||
p = strings.TrimPrefix(p, "/")
|
||||
f, ok := n.files[p]
|
||||
if !ok {
|
||||
return "", fs.ErrNotExist
|
||||
}
|
||||
return string(f.content), nil
|
||||
}
|
||||
|
||||
// Write saves the given content to a file, overwriting it if it exists.
|
||||
func (n *Node) Write(p, content string) error {
|
||||
n.AddData(p, []byte(content))
|
||||
return nil
|
||||
}
|
||||
|
||||
// FileGet is an alias for Read.
|
||||
func (n *Node) FileGet(p string) (string, error) {
|
||||
return n.Read(p)
|
||||
}
|
||||
|
||||
// FileSet is an alias for Write.
|
||||
func (n *Node) FileSet(p, content string) error {
|
||||
return n.Write(p, content)
|
||||
}
|
||||
|
||||
// EnsureDir is a no-op because directories are implicit in Node.
|
||||
func (n *Node) EnsureDir(_ string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ---------- Medium interface: existence checks ----------
|
||||
|
||||
// Exists checks if a path exists (file or directory).
|
||||
func (n *Node) Exists(p string) bool {
|
||||
_, err := n.Stat(p)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// IsFile checks if a path exists and is a regular file.
|
||||
func (n *Node) IsFile(p string) bool {
|
||||
p = strings.TrimPrefix(p, "/")
|
||||
_, ok := n.files[p]
|
||||
return ok
|
||||
}
|
||||
|
||||
// IsDir checks if a path exists and is a directory.
|
||||
func (n *Node) IsDir(p string) bool {
|
||||
info, err := n.Stat(p)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return info.IsDir()
|
||||
}
|
||||
|
||||
// ---------- Medium interface: mutations ----------
|
||||
|
||||
// Delete removes a single file.
|
||||
func (n *Node) Delete(p string) error {
|
||||
p = strings.TrimPrefix(p, "/")
|
||||
if _, ok := n.files[p]; ok {
|
||||
delete(n.files, p)
|
||||
return nil
|
||||
}
|
||||
return fs.ErrNotExist
|
||||
}
|
||||
|
||||
// DeleteAll removes a file or directory and all children.
|
||||
func (n *Node) DeleteAll(p string) error {
|
||||
p = strings.TrimPrefix(p, "/")
|
||||
|
||||
found := false
|
||||
if _, ok := n.files[p]; ok {
|
||||
delete(n.files, p)
|
||||
found = true
|
||||
}
|
||||
|
||||
prefix := p + "/"
|
||||
for k := range n.files {
|
||||
if strings.HasPrefix(k, prefix) {
|
||||
delete(n.files, k)
|
||||
found = true
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
return fs.ErrNotExist
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Rename moves a file from oldPath to newPath.
|
||||
func (n *Node) Rename(oldPath, newPath string) error {
|
||||
oldPath = strings.TrimPrefix(oldPath, "/")
|
||||
newPath = strings.TrimPrefix(newPath, "/")
|
||||
|
||||
f, ok := n.files[oldPath]
|
||||
if !ok {
|
||||
return fs.ErrNotExist
|
||||
}
|
||||
|
||||
f.name = newPath
|
||||
n.files[newPath] = f
|
||||
delete(n.files, oldPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
// List returns directory entries for the given path.
|
||||
func (n *Node) List(p string) ([]fs.DirEntry, error) {
|
||||
p = strings.TrimPrefix(p, "/")
|
||||
if p == "" || p == "." {
|
||||
return n.ReadDir(".")
|
||||
}
|
||||
return n.ReadDir(p)
|
||||
}
|
||||
|
||||
// ---------- Medium interface: streams ----------
|
||||
|
||||
// Create creates or truncates the named file, returning a WriteCloser.
|
||||
// Content is committed to the Node on Close.
|
||||
func (n *Node) Create(p string) (goio.WriteCloser, error) {
|
||||
p = strings.TrimPrefix(p, "/")
|
||||
return &nodeWriter{node: n, path: p}, nil
|
||||
}
|
||||
|
||||
// Append opens the named file for appending, creating it if needed.
|
||||
// Content is committed to the Node on Close.
|
||||
func (n *Node) Append(p string) (goio.WriteCloser, error) {
|
||||
p = strings.TrimPrefix(p, "/")
|
||||
var existing []byte
|
||||
if f, ok := n.files[p]; ok {
|
||||
existing = make([]byte, len(f.content))
|
||||
copy(existing, f.content)
|
||||
}
|
||||
return &nodeWriter{node: n, path: p, buf: existing}, nil
|
||||
}
|
||||
|
||||
// ReadStream returns a ReadCloser for the file content.
|
||||
func (n *Node) ReadStream(p string) (goio.ReadCloser, error) {
|
||||
f, err := n.Open(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return goio.NopCloser(f), nil
|
||||
}
|
||||
|
||||
// WriteStream returns a WriteCloser for the file content.
|
||||
func (n *Node) WriteStream(p string) (goio.WriteCloser, error) {
|
||||
return n.Create(p)
|
||||
}
|
||||
|
||||
// ---------- Internal types ----------
|
||||
|
||||
// nodeWriter buffers writes and commits them to the Node on Close.
|
||||
type nodeWriter struct {
|
||||
node *Node
|
||||
path string
|
||||
buf []byte
|
||||
}
|
||||
|
||||
func (w *nodeWriter) Write(p []byte) (int, error) {
|
||||
w.buf = append(w.buf, p...)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
func (w *nodeWriter) Close() error {
|
||||
w.node.files[w.path] = &dataFile{
|
||||
name: w.path,
|
||||
content: w.buf,
|
||||
modTime: time.Now(),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// dataFile represents a file in the Node.
|
||||
type dataFile struct {
|
||||
name string
|
||||
content []byte
|
||||
modTime time.Time
|
||||
}
|
||||
|
||||
func (d *dataFile) Stat() (fs.FileInfo, error) { return &dataFileInfo{file: d}, nil }
|
||||
func (d *dataFile) Read(_ []byte) (int, error) { return 0, goio.EOF }
|
||||
func (d *dataFile) Close() error { return nil }
|
||||
|
||||
// dataFileInfo implements fs.FileInfo for a dataFile.
|
||||
type dataFileInfo struct{ file *dataFile }
|
||||
|
||||
func (d *dataFileInfo) Name() string { return path.Base(d.file.name) }
|
||||
func (d *dataFileInfo) Size() int64 { return int64(len(d.file.content)) }
|
||||
func (d *dataFileInfo) Mode() fs.FileMode { return 0444 }
|
||||
func (d *dataFileInfo) ModTime() time.Time { return d.file.modTime }
|
||||
func (d *dataFileInfo) IsDir() bool { return false }
|
||||
func (d *dataFileInfo) Sys() any { return nil }
|
||||
|
||||
// dataFileReader implements fs.File for reading a dataFile.
|
||||
type dataFileReader struct {
|
||||
file *dataFile
|
||||
reader *bytes.Reader
|
||||
}
|
||||
|
||||
func (d *dataFileReader) Stat() (fs.FileInfo, error) { return d.file.Stat() }
|
||||
func (d *dataFileReader) Read(p []byte) (int, error) {
|
||||
if d.reader == nil {
|
||||
d.reader = bytes.NewReader(d.file.content)
|
||||
}
|
||||
return d.reader.Read(p)
|
||||
}
|
||||
func (d *dataFileReader) Close() error { return nil }
|
||||
|
||||
// dirInfo implements fs.FileInfo for an implicit directory.
|
||||
type dirInfo struct {
|
||||
name string
|
||||
modTime time.Time
|
||||
}
|
||||
|
||||
func (d *dirInfo) Name() string { return d.name }
|
||||
func (d *dirInfo) Size() int64 { return 0 }
|
||||
func (d *dirInfo) Mode() fs.FileMode { return fs.ModeDir | 0555 }
|
||||
func (d *dirInfo) ModTime() time.Time { return d.modTime }
|
||||
func (d *dirInfo) IsDir() bool { return true }
|
||||
func (d *dirInfo) Sys() any { return nil }
|
||||
|
||||
// dirFile implements fs.File for a directory.
|
||||
type dirFile struct {
|
||||
path string
|
||||
modTime time.Time
|
||||
}
|
||||
|
||||
func (d *dirFile) Stat() (fs.FileInfo, error) {
|
||||
return &dirInfo{name: path.Base(d.path), modTime: d.modTime}, nil
|
||||
}
|
||||
func (d *dirFile) Read([]byte) (int, error) {
|
||||
return 0, &fs.PathError{Op: "read", Path: d.path, Err: fs.ErrInvalid}
|
||||
}
|
||||
func (d *dirFile) Close() error { return nil }
|
||||
|
||||
// Ensure Node implements fs.FS so WalkDir works.
|
||||
var _ fs.FS = (*Node)(nil)
|
||||
|
||||
// Ensure Node also satisfies fs.StatFS and fs.ReadDirFS for WalkDir.
|
||||
var _ fs.StatFS = (*Node)(nil)
|
||||
var _ fs.ReadDirFS = (*Node)(nil)
|
||||
|
||||
// Unexported helper: ensure ReadStream result also satisfies fs.File
|
||||
// (for cases where callers do a type assertion).
|
||||
var _ goio.ReadCloser = goio.NopCloser(nil)
|
||||
// Ensure nodeWriter satisfies goio.WriteCloser.
|
||||
var _ goio.WriteCloser = (*nodeWriter)(nil)
|
||||
|
||||
// Ensure dirFile satisfies fs.File.
|
||||
var _ fs.File = (*dirFile)(nil)
|
||||
// Ensure dataFileReader satisfies fs.File.
|
||||
var _ fs.File = (*dataFileReader)(nil)
|
||||
|
||||
// ReadDirFile is not needed since fs.WalkDir works via ReadDirFS on the FS itself,
|
||||
// but we need the Node to satisfy fs.ReadDirFS.
|
||||
|
||||
// ensure all internal compile-time checks are grouped above
|
||||
// no further type assertions needed
|
||||
|
||||
// unused import guard
|
||||
var _ = os.ErrNotExist
|
||||
373
pkg/io/sigil/crypto_sigil.go
Normal file
373
pkg/io/sigil/crypto_sigil.go
Normal file
|
|
@ -0,0 +1,373 @@
|
|||
// This file implements the Pre-Obfuscation Layer Protocol with
|
||||
// XChaCha20-Poly1305 encryption. The protocol applies a reversible transformation
|
||||
// to plaintext BEFORE it reaches CPU encryption routines, providing defense-in-depth
|
||||
// against side-channel attacks.
|
||||
//
|
||||
// The encryption flow is:
|
||||
//
|
||||
// plaintext -> obfuscate(nonce) -> encrypt -> [nonce || ciphertext || tag]
|
||||
//
|
||||
// The decryption flow is:
|
||||
//
|
||||
// [nonce || ciphertext || tag] -> decrypt -> deobfuscate(nonce) -> plaintext
|
||||
package sigil
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"golang.org/x/crypto/chacha20poly1305"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrInvalidKey is returned when the encryption key is invalid.
|
||||
ErrInvalidKey = errors.New("sigil: invalid key size, must be 32 bytes")
|
||||
// ErrCiphertextTooShort is returned when the ciphertext is too short to decrypt.
|
||||
ErrCiphertextTooShort = errors.New("sigil: ciphertext too short")
|
||||
// ErrDecryptionFailed is returned when decryption or authentication fails.
|
||||
ErrDecryptionFailed = errors.New("sigil: decryption failed")
|
||||
// ErrNoKeyConfigured is returned when no encryption key has been set.
|
||||
ErrNoKeyConfigured = errors.New("sigil: no encryption key configured")
|
||||
)
|
||||
|
||||
// PreObfuscator applies a reversible transformation to data before encryption.
|
||||
// This ensures that raw plaintext patterns are never sent directly to CPU
|
||||
// encryption routines, providing defense against side-channel attacks.
|
||||
//
|
||||
// Implementations must be deterministic: given the same entropy, the transformation
|
||||
// must be perfectly reversible: Deobfuscate(Obfuscate(x, e), e) == x
|
||||
type PreObfuscator interface {
|
||||
// Obfuscate transforms plaintext before encryption using the provided entropy.
|
||||
// The entropy is typically the encryption nonce, ensuring the transformation
|
||||
// is unique per-encryption without additional random generation.
|
||||
Obfuscate(data []byte, entropy []byte) []byte
|
||||
|
||||
// Deobfuscate reverses the transformation after decryption.
|
||||
// Must be called with the same entropy used during Obfuscate.
|
||||
Deobfuscate(data []byte, entropy []byte) []byte
|
||||
}
|
||||
|
||||
// XORObfuscator performs XOR-based obfuscation using an entropy-derived key stream.
|
||||
//
|
||||
// The key stream is generated using SHA-256 in counter mode:
|
||||
//
|
||||
// keyStream[i*32:(i+1)*32] = SHA256(entropy || BigEndian64(i))
|
||||
//
|
||||
// This provides a cryptographically uniform key stream that decorrelates
|
||||
// plaintext patterns from the data seen by the encryption routine.
|
||||
// XOR is symmetric, so obfuscation and deobfuscation use the same operation.
|
||||
type XORObfuscator struct{}
|
||||
|
||||
// Obfuscate XORs the data with a key stream derived from the entropy.
|
||||
func (x *XORObfuscator) Obfuscate(data []byte, entropy []byte) []byte {
|
||||
if len(data) == 0 {
|
||||
return data
|
||||
}
|
||||
return x.transform(data, entropy)
|
||||
}
|
||||
|
||||
// Deobfuscate reverses the XOR transformation (XOR is symmetric).
|
||||
func (x *XORObfuscator) Deobfuscate(data []byte, entropy []byte) []byte {
|
||||
if len(data) == 0 {
|
||||
return data
|
||||
}
|
||||
return x.transform(data, entropy)
|
||||
}
|
||||
|
||||
// transform applies XOR with an entropy-derived key stream.
|
||||
func (x *XORObfuscator) transform(data []byte, entropy []byte) []byte {
|
||||
result := make([]byte, len(data))
|
||||
keyStream := x.deriveKeyStream(entropy, len(data))
|
||||
for i := range data {
|
||||
result[i] = data[i] ^ keyStream[i]
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// deriveKeyStream creates a deterministic key stream from entropy.
|
||||
func (x *XORObfuscator) deriveKeyStream(entropy []byte, length int) []byte {
|
||||
stream := make([]byte, length)
|
||||
h := sha256.New()
|
||||
|
||||
// Generate key stream in 32-byte blocks
|
||||
blockNum := uint64(0)
|
||||
offset := 0
|
||||
for offset < length {
|
||||
h.Reset()
|
||||
h.Write(entropy)
|
||||
var blockBytes [8]byte
|
||||
binary.BigEndian.PutUint64(blockBytes[:], blockNum)
|
||||
h.Write(blockBytes[:])
|
||||
block := h.Sum(nil)
|
||||
|
||||
copyLen := len(block)
|
||||
if offset+copyLen > length {
|
||||
copyLen = length - offset
|
||||
}
|
||||
copy(stream[offset:], block[:copyLen])
|
||||
offset += copyLen
|
||||
blockNum++
|
||||
}
|
||||
return stream
|
||||
}
|
||||
|
||||
// ShuffleMaskObfuscator provides stronger obfuscation through byte shuffling and masking.
|
||||
//
|
||||
// The obfuscation process:
|
||||
// 1. Generate a mask from entropy using SHA-256 in counter mode
|
||||
// 2. XOR the data with the mask
|
||||
// 3. Generate a deterministic permutation using Fisher-Yates shuffle
|
||||
// 4. Reorder bytes according to the permutation
|
||||
//
|
||||
// This provides both value transformation (XOR mask) and position transformation
|
||||
// (shuffle), making pattern analysis more difficult than XOR alone.
|
||||
type ShuffleMaskObfuscator struct{}
|
||||
|
||||
// Obfuscate shuffles bytes and applies a mask derived from entropy.
|
||||
func (s *ShuffleMaskObfuscator) Obfuscate(data []byte, entropy []byte) []byte {
|
||||
if len(data) == 0 {
|
||||
return data
|
||||
}
|
||||
|
||||
result := make([]byte, len(data))
|
||||
copy(result, data)
|
||||
|
||||
// Generate permutation and mask from entropy
|
||||
perm := s.generatePermutation(entropy, len(data))
|
||||
mask := s.deriveMask(entropy, len(data))
|
||||
|
||||
// Apply mask first, then shuffle
|
||||
for i := range result {
|
||||
result[i] ^= mask[i]
|
||||
}
|
||||
|
||||
// Shuffle using Fisher-Yates with deterministic seed
|
||||
shuffled := make([]byte, len(data))
|
||||
for i, p := range perm {
|
||||
shuffled[i] = result[p]
|
||||
}
|
||||
|
||||
return shuffled
|
||||
}
|
||||
|
||||
// Deobfuscate reverses the shuffle and mask operations.
|
||||
func (s *ShuffleMaskObfuscator) Deobfuscate(data []byte, entropy []byte) []byte {
|
||||
if len(data) == 0 {
|
||||
return data
|
||||
}
|
||||
|
||||
result := make([]byte, len(data))
|
||||
|
||||
// Generate permutation and mask from entropy
|
||||
perm := s.generatePermutation(entropy, len(data))
|
||||
mask := s.deriveMask(entropy, len(data))
|
||||
|
||||
// Unshuffle first
|
||||
for i, p := range perm {
|
||||
result[p] = data[i]
|
||||
}
|
||||
|
||||
// Remove mask
|
||||
for i := range result {
|
||||
result[i] ^= mask[i]
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// generatePermutation creates a deterministic permutation from entropy.
|
||||
func (s *ShuffleMaskObfuscator) generatePermutation(entropy []byte, length int) []int {
|
||||
perm := make([]int, length)
|
||||
for i := range perm {
|
||||
perm[i] = i
|
||||
}
|
||||
|
||||
// Use entropy to seed a deterministic shuffle
|
||||
h := sha256.New()
|
||||
h.Write(entropy)
|
||||
h.Write([]byte("permutation"))
|
||||
seed := h.Sum(nil)
|
||||
|
||||
// Fisher-Yates shuffle with deterministic randomness
|
||||
for i := length - 1; i > 0; i-- {
|
||||
h.Reset()
|
||||
h.Write(seed)
|
||||
var iBytes [8]byte
|
||||
binary.BigEndian.PutUint64(iBytes[:], uint64(i))
|
||||
h.Write(iBytes[:])
|
||||
jBytes := h.Sum(nil)
|
||||
j := int(binary.BigEndian.Uint64(jBytes[:8]) % uint64(i+1))
|
||||
perm[i], perm[j] = perm[j], perm[i]
|
||||
}
|
||||
|
||||
return perm
|
||||
}
|
||||
|
||||
// deriveMask creates a mask byte array from entropy.
|
||||
func (s *ShuffleMaskObfuscator) deriveMask(entropy []byte, length int) []byte {
|
||||
mask := make([]byte, length)
|
||||
h := sha256.New()
|
||||
|
||||
blockNum := uint64(0)
|
||||
offset := 0
|
||||
for offset < length {
|
||||
h.Reset()
|
||||
h.Write(entropy)
|
||||
h.Write([]byte("mask"))
|
||||
var blockBytes [8]byte
|
||||
binary.BigEndian.PutUint64(blockBytes[:], blockNum)
|
||||
h.Write(blockBytes[:])
|
||||
block := h.Sum(nil)
|
||||
|
||||
copyLen := len(block)
|
||||
if offset+copyLen > length {
|
||||
copyLen = length - offset
|
||||
}
|
||||
copy(mask[offset:], block[:copyLen])
|
||||
offset += copyLen
|
||||
blockNum++
|
||||
}
|
||||
return mask
|
||||
}
|
||||
|
||||
// ChaChaPolySigil is a Sigil that encrypts/decrypts data using ChaCha20-Poly1305.
|
||||
// It applies pre-obfuscation before encryption to ensure raw plaintext never
|
||||
// goes directly to CPU encryption routines.
|
||||
//
|
||||
// The output format is:
|
||||
// [24-byte nonce][encrypted(obfuscated(plaintext))]
|
||||
//
|
||||
// Unlike demo implementations, the nonce is ONLY embedded in the ciphertext,
|
||||
// not exposed separately in headers.
|
||||
type ChaChaPolySigil struct {
|
||||
Key []byte
|
||||
Obfuscator PreObfuscator
|
||||
randReader io.Reader // for testing injection
|
||||
}
|
||||
|
||||
// NewChaChaPolySigil creates a new encryption sigil with the given key.
|
||||
// The key must be exactly 32 bytes.
|
||||
func NewChaChaPolySigil(key []byte) (*ChaChaPolySigil, error) {
|
||||
if len(key) != 32 {
|
||||
return nil, ErrInvalidKey
|
||||
}
|
||||
|
||||
keyCopy := make([]byte, 32)
|
||||
copy(keyCopy, key)
|
||||
|
||||
return &ChaChaPolySigil{
|
||||
Key: keyCopy,
|
||||
Obfuscator: &XORObfuscator{},
|
||||
randReader: rand.Reader,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// NewChaChaPolySigilWithObfuscator creates a new encryption sigil with custom obfuscator.
|
||||
func NewChaChaPolySigilWithObfuscator(key []byte, obfuscator PreObfuscator) (*ChaChaPolySigil, error) {
|
||||
sigil, err := NewChaChaPolySigil(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if obfuscator != nil {
|
||||
sigil.Obfuscator = obfuscator
|
||||
}
|
||||
return sigil, nil
|
||||
}
|
||||
|
||||
// In encrypts the data with pre-obfuscation.
|
||||
// The flow is: plaintext -> obfuscate -> encrypt
|
||||
func (s *ChaChaPolySigil) In(data []byte) ([]byte, error) {
|
||||
if s.Key == nil {
|
||||
return nil, ErrNoKeyConfigured
|
||||
}
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
aead, err := chacha20poly1305.NewX(s.Key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Generate nonce
|
||||
nonce := make([]byte, aead.NonceSize())
|
||||
reader := s.randReader
|
||||
if reader == nil {
|
||||
reader = rand.Reader
|
||||
}
|
||||
if _, err := io.ReadFull(reader, nonce); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Pre-obfuscate the plaintext using nonce as entropy
|
||||
// This ensures CPU encryption routines never see raw plaintext
|
||||
obfuscated := data
|
||||
if s.Obfuscator != nil {
|
||||
obfuscated = s.Obfuscator.Obfuscate(data, nonce)
|
||||
}
|
||||
|
||||
// Encrypt the obfuscated data
|
||||
// Output: [nonce | ciphertext | auth tag]
|
||||
ciphertext := aead.Seal(nonce, nonce, obfuscated, nil)
|
||||
|
||||
return ciphertext, nil
|
||||
}
|
||||
|
||||
// Out decrypts the data and reverses obfuscation.
|
||||
// The flow is: decrypt -> deobfuscate -> plaintext
|
||||
func (s *ChaChaPolySigil) Out(data []byte) ([]byte, error) {
|
||||
if s.Key == nil {
|
||||
return nil, ErrNoKeyConfigured
|
||||
}
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
aead, err := chacha20poly1305.NewX(s.Key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
minLen := aead.NonceSize() + aead.Overhead()
|
||||
if len(data) < minLen {
|
||||
return nil, ErrCiphertextTooShort
|
||||
}
|
||||
|
||||
// Extract nonce from ciphertext
|
||||
nonce := data[:aead.NonceSize()]
|
||||
ciphertext := data[aead.NonceSize():]
|
||||
|
||||
// Decrypt
|
||||
obfuscated, err := aead.Open(nil, nonce, ciphertext, nil)
|
||||
if err != nil {
|
||||
return nil, ErrDecryptionFailed
|
||||
}
|
||||
|
||||
// Deobfuscate using the same nonce as entropy
|
||||
plaintext := obfuscated
|
||||
if s.Obfuscator != nil {
|
||||
plaintext = s.Obfuscator.Deobfuscate(obfuscated, nonce)
|
||||
}
|
||||
|
||||
if len(plaintext) == 0 {
|
||||
return []byte{}, nil
|
||||
}
|
||||
|
||||
return plaintext, nil
|
||||
}
|
||||
|
||||
// GetNonceFromCiphertext extracts the nonce from encrypted output.
|
||||
// This is provided for debugging/logging purposes only.
|
||||
// The nonce should NOT be stored separately in headers.
|
||||
func GetNonceFromCiphertext(ciphertext []byte) ([]byte, error) {
|
||||
nonceSize := chacha20poly1305.NonceSizeX
|
||||
if len(ciphertext) < nonceSize {
|
||||
return nil, ErrCiphertextTooShort
|
||||
}
|
||||
nonceCopy := make([]byte, nonceSize)
|
||||
copy(nonceCopy, ciphertext[:nonceSize])
|
||||
return nonceCopy, nil
|
||||
}
|
||||
71
pkg/io/sigil/sigil.go
Normal file
71
pkg/io/sigil/sigil.go
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
// Package sigil provides the Sigil transformation framework for composable,
|
||||
// reversible data transformations.
|
||||
//
|
||||
// Sigils are the core abstraction - each sigil implements a specific transformation
|
||||
// (encoding, compression, hashing, encryption) with a uniform interface. Sigils can
|
||||
// be chained together to create transformation pipelines.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// hexSigil, _ := sigil.NewSigil("hex")
|
||||
// base64Sigil, _ := sigil.NewSigil("base64")
|
||||
// result, _ := sigil.Transmute(data, []sigil.Sigil{hexSigil, base64Sigil})
|
||||
package sigil
|
||||
|
||||
// Sigil defines the interface for a data transformer.
|
||||
//
|
||||
// A Sigil represents a single transformation unit that can be applied to byte data.
|
||||
// Sigils may be reversible (encoding, compression, encryption) or irreversible (hashing).
|
||||
//
|
||||
// For reversible sigils: Out(In(x)) == x for all valid x
|
||||
// For irreversible sigils: Out returns the input unchanged
|
||||
// For symmetric sigils: In(x) == Out(x)
|
||||
//
|
||||
// Implementations must handle nil input by returning nil without error,
|
||||
// and empty input by returning an empty slice without error.
|
||||
type Sigil interface {
|
||||
// In applies the forward transformation to the data.
|
||||
// For encoding sigils, this encodes the data.
|
||||
// For compression sigils, this compresses the data.
|
||||
// For hash sigils, this computes the digest.
|
||||
In(data []byte) ([]byte, error)
|
||||
|
||||
// Out applies the reverse transformation to the data.
|
||||
// For reversible sigils, this recovers the original data.
|
||||
// For irreversible sigils (e.g., hashing), this returns the input unchanged.
|
||||
Out(data []byte) ([]byte, error)
|
||||
}
|
||||
|
||||
// Transmute applies a series of sigils to data in sequence.
|
||||
//
|
||||
// Each sigil's In method is called in order, with the output of one sigil
|
||||
// becoming the input of the next. If any sigil returns an error, Transmute
|
||||
// stops immediately and returns nil with that error.
|
||||
//
|
||||
// To reverse a transmutation, call each sigil's Out method in reverse order.
|
||||
func Transmute(data []byte, sigils []Sigil) ([]byte, error) {
|
||||
var err error
|
||||
for _, s := range sigils {
|
||||
data, err = s.In(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// Untransmute reverses a transmutation by applying Out in reverse order.
|
||||
//
|
||||
// Each sigil's Out method is called in reverse order, with the output of one sigil
|
||||
// becoming the input of the next. If any sigil returns an error, Untransmute
|
||||
// stops immediately and returns nil with that error.
|
||||
func Untransmute(data []byte, sigils []Sigil) ([]byte, error) {
|
||||
var err error
|
||||
for i := len(sigils) - 1; i >= 0; i-- {
|
||||
data, err = sigils[i].Out(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
274
pkg/io/sigil/sigils.go
Normal file
274
pkg/io/sigil/sigils.go
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
package sigil
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto"
|
||||
"crypto/md5"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"golang.org/x/crypto/blake2b"
|
||||
"golang.org/x/crypto/blake2s"
|
||||
"golang.org/x/crypto/md4"
|
||||
"golang.org/x/crypto/ripemd160"
|
||||
"golang.org/x/crypto/sha3"
|
||||
)
|
||||
|
||||
// ReverseSigil is a Sigil that reverses the bytes of the payload.
|
||||
// It is a symmetrical Sigil, meaning that the In and Out methods perform the same operation.
|
||||
type ReverseSigil struct{}
|
||||
|
||||
// In reverses the bytes of the data.
|
||||
func (s *ReverseSigil) In(data []byte) ([]byte, error) {
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
reversed := make([]byte, len(data))
|
||||
for i, j := 0, len(data)-1; i < len(data); i, j = i+1, j-1 {
|
||||
reversed[i] = data[j]
|
||||
}
|
||||
return reversed, nil
|
||||
}
|
||||
|
||||
// Out reverses the bytes of the data.
|
||||
func (s *ReverseSigil) Out(data []byte) ([]byte, error) {
|
||||
return s.In(data)
|
||||
}
|
||||
|
||||
// HexSigil is a Sigil that encodes/decodes data to/from hexadecimal.
|
||||
// The In method encodes the data, and the Out method decodes it.
|
||||
type HexSigil struct{}
|
||||
|
||||
// In encodes the data to hexadecimal.
|
||||
func (s *HexSigil) In(data []byte) ([]byte, error) {
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
dst := make([]byte, hex.EncodedLen(len(data)))
|
||||
hex.Encode(dst, data)
|
||||
return dst, nil
|
||||
}
|
||||
|
||||
// Out decodes the data from hexadecimal.
|
||||
func (s *HexSigil) Out(data []byte) ([]byte, error) {
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
dst := make([]byte, hex.DecodedLen(len(data)))
|
||||
_, err := hex.Decode(dst, data)
|
||||
return dst, err
|
||||
}
|
||||
|
||||
// Base64Sigil is a Sigil that encodes/decodes data to/from base64.
|
||||
// The In method encodes the data, and the Out method decodes it.
|
||||
type Base64Sigil struct{}
|
||||
|
||||
// In encodes the data to base64.
|
||||
func (s *Base64Sigil) In(data []byte) ([]byte, error) {
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
dst := make([]byte, base64.StdEncoding.EncodedLen(len(data)))
|
||||
base64.StdEncoding.Encode(dst, data)
|
||||
return dst, nil
|
||||
}
|
||||
|
||||
// Out decodes the data from base64.
|
||||
func (s *Base64Sigil) Out(data []byte) ([]byte, error) {
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
dst := make([]byte, base64.StdEncoding.DecodedLen(len(data)))
|
||||
n, err := base64.StdEncoding.Decode(dst, data)
|
||||
return dst[:n], err
|
||||
}
|
||||
|
||||
// GzipSigil is a Sigil that compresses/decompresses data using gzip.
|
||||
// The In method compresses the data, and the Out method decompresses it.
|
||||
type GzipSigil struct {
|
||||
writer io.Writer
|
||||
}
|
||||
|
||||
// In compresses the data using gzip.
|
||||
func (s *GzipSigil) In(data []byte) ([]byte, error) {
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
var b bytes.Buffer
|
||||
w := s.writer
|
||||
if w == nil {
|
||||
w = &b
|
||||
}
|
||||
gz := gzip.NewWriter(w)
|
||||
if _, err := gz.Write(data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := gz.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return b.Bytes(), nil
|
||||
}
|
||||
|
||||
// Out decompresses the data using gzip.
|
||||
func (s *GzipSigil) Out(data []byte) ([]byte, error) {
|
||||
if data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
r, err := gzip.NewReader(bytes.NewReader(data))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
return io.ReadAll(r)
|
||||
}
|
||||
|
||||
// JSONSigil is a Sigil that compacts or indents JSON data.
|
||||
// The Out method is a no-op.
|
||||
type JSONSigil struct{ Indent bool }
|
||||
|
||||
// In compacts or indents the JSON data.
|
||||
func (s *JSONSigil) In(data []byte) ([]byte, error) {
|
||||
if s.Indent {
|
||||
var out bytes.Buffer
|
||||
err := json.Indent(&out, data, "", " ")
|
||||
return out.Bytes(), err
|
||||
}
|
||||
var out bytes.Buffer
|
||||
err := json.Compact(&out, data)
|
||||
return out.Bytes(), err
|
||||
}
|
||||
|
||||
// Out is a no-op for JSONSigil.
|
||||
func (s *JSONSigil) Out(data []byte) ([]byte, error) {
|
||||
// For simplicity, Out is a no-op. The primary use is formatting.
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// HashSigil is a Sigil that hashes the data using a specified algorithm.
|
||||
// The In method hashes the data, and the Out method is a no-op.
|
||||
type HashSigil struct {
|
||||
Hash crypto.Hash
|
||||
}
|
||||
|
||||
// NewHashSigil creates a new HashSigil.
|
||||
func NewHashSigil(h crypto.Hash) *HashSigil {
|
||||
return &HashSigil{Hash: h}
|
||||
}
|
||||
|
||||
// In hashes the data.
|
||||
func (s *HashSigil) In(data []byte) ([]byte, error) {
|
||||
var h io.Writer
|
||||
switch s.Hash {
|
||||
case crypto.MD4:
|
||||
h = md4.New()
|
||||
case crypto.MD5:
|
||||
h = md5.New()
|
||||
case crypto.SHA1:
|
||||
h = sha1.New()
|
||||
case crypto.SHA224:
|
||||
h = sha256.New224()
|
||||
case crypto.SHA256:
|
||||
h = sha256.New()
|
||||
case crypto.SHA384:
|
||||
h = sha512.New384()
|
||||
case crypto.SHA512:
|
||||
h = sha512.New()
|
||||
case crypto.RIPEMD160:
|
||||
h = ripemd160.New()
|
||||
case crypto.SHA3_224:
|
||||
h = sha3.New224()
|
||||
case crypto.SHA3_256:
|
||||
h = sha3.New256()
|
||||
case crypto.SHA3_384:
|
||||
h = sha3.New384()
|
||||
case crypto.SHA3_512:
|
||||
h = sha3.New512()
|
||||
case crypto.SHA512_224:
|
||||
h = sha512.New512_224()
|
||||
case crypto.SHA512_256:
|
||||
h = sha512.New512_256()
|
||||
case crypto.BLAKE2s_256:
|
||||
h, _ = blake2s.New256(nil)
|
||||
case crypto.BLAKE2b_256:
|
||||
h, _ = blake2b.New256(nil)
|
||||
case crypto.BLAKE2b_384:
|
||||
h, _ = blake2b.New384(nil)
|
||||
case crypto.BLAKE2b_512:
|
||||
h, _ = blake2b.New512(nil)
|
||||
default:
|
||||
// MD5SHA1 is not supported as a direct hash
|
||||
return nil, errors.New("sigil: hash algorithm not available")
|
||||
}
|
||||
|
||||
h.Write(data)
|
||||
return h.(interface{ Sum([]byte) []byte }).Sum(nil), nil
|
||||
}
|
||||
|
||||
// Out is a no-op for HashSigil.
|
||||
func (s *HashSigil) Out(data []byte) ([]byte, error) {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// NewSigil is a factory function that returns a Sigil based on a string name.
|
||||
// It is the primary way to create Sigil instances.
|
||||
func NewSigil(name string) (Sigil, error) {
|
||||
switch name {
|
||||
case "reverse":
|
||||
return &ReverseSigil{}, nil
|
||||
case "hex":
|
||||
return &HexSigil{}, nil
|
||||
case "base64":
|
||||
return &Base64Sigil{}, nil
|
||||
case "gzip":
|
||||
return &GzipSigil{}, nil
|
||||
case "json":
|
||||
return &JSONSigil{Indent: false}, nil
|
||||
case "json-indent":
|
||||
return &JSONSigil{Indent: true}, nil
|
||||
case "md4":
|
||||
return NewHashSigil(crypto.MD4), nil
|
||||
case "md5":
|
||||
return NewHashSigil(crypto.MD5), nil
|
||||
case "sha1":
|
||||
return NewHashSigil(crypto.SHA1), nil
|
||||
case "sha224":
|
||||
return NewHashSigil(crypto.SHA224), nil
|
||||
case "sha256":
|
||||
return NewHashSigil(crypto.SHA256), nil
|
||||
case "sha384":
|
||||
return NewHashSigil(crypto.SHA384), nil
|
||||
case "sha512":
|
||||
return NewHashSigil(crypto.SHA512), nil
|
||||
case "ripemd160":
|
||||
return NewHashSigil(crypto.RIPEMD160), nil
|
||||
case "sha3-224":
|
||||
return NewHashSigil(crypto.SHA3_224), nil
|
||||
case "sha3-256":
|
||||
return NewHashSigil(crypto.SHA3_256), nil
|
||||
case "sha3-384":
|
||||
return NewHashSigil(crypto.SHA3_384), nil
|
||||
case "sha3-512":
|
||||
return NewHashSigil(crypto.SHA3_512), nil
|
||||
case "sha512-224":
|
||||
return NewHashSigil(crypto.SHA512_224), nil
|
||||
case "sha512-256":
|
||||
return NewHashSigil(crypto.SHA512_256), nil
|
||||
case "blake2s-256":
|
||||
return NewHashSigil(crypto.BLAKE2s_256), nil
|
||||
case "blake2b-256":
|
||||
return NewHashSigil(crypto.BLAKE2b_256), nil
|
||||
case "blake2b-384":
|
||||
return NewHashSigil(crypto.BLAKE2b_384), nil
|
||||
case "blake2b-512":
|
||||
return NewHashSigil(crypto.BLAKE2b_512), nil
|
||||
default:
|
||||
return nil, errors.New("sigil: unknown sigil name")
|
||||
}
|
||||
}
|
||||
257
pkg/session/html.go
Normal file
257
pkg/session/html.go
Normal file
|
|
@ -0,0 +1,257 @@
|
|||
package session
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// RenderHTML generates a self-contained HTML timeline from a session.
|
||||
func RenderHTML(sess *Session, outputPath string) error {
|
||||
f, err := os.Create(outputPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("create html: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
duration := sess.EndTime.Sub(sess.StartTime)
|
||||
toolCount := 0
|
||||
errorCount := 0
|
||||
for _, e := range sess.Events {
|
||||
if e.Type == "tool_use" {
|
||||
toolCount++
|
||||
if !e.Success {
|
||||
errorCount++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(f, `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>Session %s</title>
|
||||
<style>
|
||||
:root {
|
||||
--bg: #0d1117; --bg2: #161b22; --bg3: #21262d;
|
||||
--fg: #c9d1d9; --dim: #8b949e; --accent: #58a6ff;
|
||||
--green: #3fb950; --red: #f85149; --yellow: #d29922;
|
||||
--border: #30363d; --font: 'SF Mono', 'Cascadia Code', 'JetBrains Mono', monospace;
|
||||
}
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body { background: var(--bg); color: var(--fg); font-family: var(--font); font-size: 13px; line-height: 1.5; }
|
||||
.header { background: var(--bg2); border-bottom: 1px solid var(--border); padding: 16px 24px; position: sticky; top: 0; z-index: 10; }
|
||||
.header h1 { font-size: 16px; font-weight: 600; color: var(--accent); }
|
||||
.header .meta { color: var(--dim); font-size: 12px; margin-top: 4px; }
|
||||
.header .stats span { display: inline-block; margin-right: 16px; }
|
||||
.header .stats .err { color: var(--red); }
|
||||
.search { margin-top: 8px; display: flex; gap: 8px; }
|
||||
.search input { background: var(--bg3); border: 1px solid var(--border); border-radius: 6px; color: var(--fg); font-family: var(--font); font-size: 12px; padding: 6px 12px; width: 300px; outline: none; }
|
||||
.search input:focus { border-color: var(--accent); }
|
||||
.search select { background: var(--bg3); border: 1px solid var(--border); border-radius: 6px; color: var(--fg); font-family: var(--font); font-size: 12px; padding: 6px 8px; outline: none; }
|
||||
.timeline { padding: 16px 24px; }
|
||||
.event { border: 1px solid var(--border); border-radius: 8px; margin-bottom: 8px; overflow: hidden; transition: border-color 0.15s; }
|
||||
.event:hover { border-color: var(--accent); }
|
||||
.event.error { border-color: var(--red); }
|
||||
.event.hidden { display: none; }
|
||||
.event-header { display: flex; align-items: center; gap: 8px; padding: 8px 12px; cursor: pointer; user-select: none; background: var(--bg2); }
|
||||
.event-header:hover { background: var(--bg3); }
|
||||
.event-header .time { color: var(--dim); font-size: 11px; min-width: 70px; }
|
||||
.event-header .tool { font-weight: 600; color: var(--accent); min-width: 60px; }
|
||||
.event-header .tool.bash { color: var(--green); }
|
||||
.event-header .tool.error { color: var(--red); }
|
||||
.event-header .tool.user { color: var(--yellow); }
|
||||
.event-header .tool.assistant { color: var(--dim); }
|
||||
.event-header .input { flex: 1; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; }
|
||||
.event-header .dur { color: var(--dim); font-size: 11px; min-width: 50px; text-align: right; }
|
||||
.event-header .status { font-size: 14px; min-width: 20px; text-align: center; }
|
||||
.event-header .arrow { color: var(--dim); font-size: 10px; transition: transform 0.15s; min-width: 16px; }
|
||||
.event.open .arrow { transform: rotate(90deg); }
|
||||
.event-body { display: none; padding: 12px; background: var(--bg); border-top: 1px solid var(--border); }
|
||||
.event.open .event-body { display: block; }
|
||||
.event-body pre { white-space: pre-wrap; word-break: break-all; font-size: 12px; max-height: 400px; overflow-y: auto; }
|
||||
.event-body .label { color: var(--dim); font-size: 11px; margin-bottom: 4px; text-transform: uppercase; letter-spacing: 0.5px; }
|
||||
.event-body .section { margin-bottom: 12px; }
|
||||
.event-body .output { color: var(--fg); }
|
||||
.event-body .output.err { color: var(--red); }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="header">
|
||||
<h1>Session %s</h1>
|
||||
<div class="meta">
|
||||
<div class="stats">
|
||||
<span>%s</span>
|
||||
<span>Duration: %s</span>
|
||||
<span>%d tool calls</span>`,
|
||||
shortID(sess.ID), shortID(sess.ID),
|
||||
sess.StartTime.Format("2006-01-02 15:04:05"),
|
||||
formatDuration(duration),
|
||||
toolCount)
|
||||
|
||||
if errorCount > 0 {
|
||||
fmt.Fprintf(f, `
|
||||
<span class="err">%d errors</span>`, errorCount)
|
||||
}
|
||||
|
||||
fmt.Fprintf(f, `
|
||||
</div>
|
||||
</div>
|
||||
<div class="search">
|
||||
<input type="text" id="search" placeholder="Search commands, outputs..." oninput="filterEvents()">
|
||||
<select id="filter" onchange="filterEvents()">
|
||||
<option value="all">All events</option>
|
||||
<option value="tool_use">Tool calls only</option>
|
||||
<option value="errors">Errors only</option>
|
||||
<option value="Bash">Bash only</option>
|
||||
<option value="user">User messages</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="timeline" id="timeline">
|
||||
`)
|
||||
|
||||
for i, evt := range sess.Events {
|
||||
toolClass := strings.ToLower(evt.Tool)
|
||||
if evt.Type == "user" {
|
||||
toolClass = "user"
|
||||
} else if evt.Type == "assistant" {
|
||||
toolClass = "assistant"
|
||||
}
|
||||
|
||||
errorClass := ""
|
||||
if !evt.Success && evt.Type == "tool_use" {
|
||||
errorClass = " error"
|
||||
}
|
||||
|
||||
statusIcon := ""
|
||||
if evt.Type == "tool_use" {
|
||||
if evt.Success {
|
||||
statusIcon = `<span style="color:var(--green)">✓</span>`
|
||||
} else {
|
||||
statusIcon = `<span style="color:var(--red)">✗</span>`
|
||||
}
|
||||
}
|
||||
|
||||
toolLabel := evt.Tool
|
||||
if evt.Type == "user" {
|
||||
toolLabel = "User"
|
||||
} else if evt.Type == "assistant" {
|
||||
toolLabel = "Claude"
|
||||
}
|
||||
|
||||
durStr := ""
|
||||
if evt.Duration > 0 {
|
||||
durStr = formatDuration(evt.Duration)
|
||||
}
|
||||
|
||||
fmt.Fprintf(f, `<div class="event%s" data-type="%s" data-tool="%s" data-text="%s" id="evt-%d">
|
||||
<div class="event-header" onclick="toggle(%d)">
|
||||
<span class="arrow">▶</span>
|
||||
<span class="time">%s</span>
|
||||
<span class="tool %s">%s</span>
|
||||
<span class="input">%s</span>
|
||||
<span class="dur">%s</span>
|
||||
<span class="status">%s</span>
|
||||
</div>
|
||||
<div class="event-body">
|
||||
`,
|
||||
errorClass,
|
||||
evt.Type,
|
||||
evt.Tool,
|
||||
html.EscapeString(strings.ToLower(evt.Input+" "+evt.Output)),
|
||||
i,
|
||||
i,
|
||||
evt.Timestamp.Format("15:04:05"),
|
||||
toolClass,
|
||||
html.EscapeString(toolLabel),
|
||||
html.EscapeString(truncate(evt.Input, 120)),
|
||||
durStr,
|
||||
statusIcon)
|
||||
|
||||
if evt.Input != "" {
|
||||
label := "Command"
|
||||
if evt.Type == "user" {
|
||||
label = "Message"
|
||||
} else if evt.Type == "assistant" {
|
||||
label = "Response"
|
||||
} else if evt.Tool == "Read" || evt.Tool == "Glob" || evt.Tool == "Grep" {
|
||||
label = "Target"
|
||||
} else if evt.Tool == "Edit" || evt.Tool == "Write" {
|
||||
label = "File"
|
||||
}
|
||||
fmt.Fprintf(f, ` <div class="section"><div class="label">%s</div><pre>%s</pre></div>
|
||||
`, label, html.EscapeString(evt.Input))
|
||||
}
|
||||
|
||||
if evt.Output != "" {
|
||||
outClass := "output"
|
||||
if !evt.Success {
|
||||
outClass = "output err"
|
||||
}
|
||||
fmt.Fprintf(f, ` <div class="section"><div class="label">Output</div><pre class="%s">%s</pre></div>
|
||||
`, outClass, html.EscapeString(evt.Output))
|
||||
}
|
||||
|
||||
fmt.Fprint(f, ` </div>
|
||||
</div>
|
||||
`)
|
||||
}
|
||||
|
||||
fmt.Fprint(f, `</div>
|
||||
<script>
|
||||
function toggle(i) {
|
||||
document.getElementById('evt-'+i).classList.toggle('open');
|
||||
}
|
||||
function filterEvents() {
|
||||
const q = document.getElementById('search').value.toLowerCase();
|
||||
const f = document.getElementById('filter').value;
|
||||
document.querySelectorAll('.event').forEach(el => {
|
||||
const type = el.dataset.type;
|
||||
const tool = el.dataset.tool;
|
||||
const text = el.dataset.text;
|
||||
let show = true;
|
||||
if (f === 'tool_use' && type !== 'tool_use') show = false;
|
||||
if (f === 'errors' && !el.classList.contains('error')) show = false;
|
||||
if (f === 'Bash' && tool !== 'Bash') show = false;
|
||||
if (f === 'user' && type !== 'user') show = false;
|
||||
if (q && !text.includes(q)) show = false;
|
||||
el.classList.toggle('hidden', !show);
|
||||
});
|
||||
}
|
||||
document.addEventListener('keydown', e => {
|
||||
if (e.key === '/' && document.activeElement.tagName !== 'INPUT') {
|
||||
e.preventDefault();
|
||||
document.getElementById('search').focus();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
`)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func shortID(id string) string {
|
||||
if len(id) > 8 {
|
||||
return id[:8]
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
||||
func formatDuration(d time.Duration) string {
|
||||
if d < time.Second {
|
||||
return fmt.Sprintf("%dms", d.Milliseconds())
|
||||
}
|
||||
if d < time.Minute {
|
||||
return fmt.Sprintf("%.1fs", d.Seconds())
|
||||
}
|
||||
if d < time.Hour {
|
||||
return fmt.Sprintf("%dm%ds", int(d.Minutes()), int(d.Seconds())%60)
|
||||
}
|
||||
return fmt.Sprintf("%dh%dm", int(d.Hours()), int(d.Minutes())%60)
|
||||
}
|
||||
383
pkg/session/parser.go
Normal file
383
pkg/session/parser.go
Normal file
|
|
@ -0,0 +1,383 @@
|
|||
package session
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Event represents a single action in a session timeline.
|
||||
type Event struct {
|
||||
Timestamp time.Time
|
||||
Type string // "tool_use", "user", "assistant", "error"
|
||||
Tool string // "Bash", "Read", "Edit", "Write", "Grep", "Glob", etc.
|
||||
ToolID string
|
||||
Input string // Command, file path, or message text
|
||||
Output string // Result text
|
||||
Duration time.Duration
|
||||
Success bool
|
||||
ErrorMsg string
|
||||
}
|
||||
|
||||
// Session holds parsed session metadata and events.
|
||||
type Session struct {
|
||||
ID string
|
||||
Path string
|
||||
StartTime time.Time
|
||||
EndTime time.Time
|
||||
Events []Event
|
||||
}
|
||||
|
||||
// rawEntry is the top-level structure of a Claude Code JSONL line.
|
||||
type rawEntry struct {
|
||||
Type string `json:"type"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
SessionID string `json:"sessionId"`
|
||||
Message json.RawMessage `json:"message"`
|
||||
UserType string `json:"userType"`
|
||||
}
|
||||
|
||||
type rawMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content []json.RawMessage `json:"content"`
|
||||
}
|
||||
|
||||
type contentBlock struct {
|
||||
Type string `json:"type"`
|
||||
Name string `json:"name,omitempty"`
|
||||
ID string `json:"id,omitempty"`
|
||||
Text string `json:"text,omitempty"`
|
||||
Input json.RawMessage `json:"input,omitempty"`
|
||||
ToolUseID string `json:"tool_use_id,omitempty"`
|
||||
Content interface{} `json:"content,omitempty"`
|
||||
IsError *bool `json:"is_error,omitempty"`
|
||||
}
|
||||
|
||||
type bashInput struct {
|
||||
Command string `json:"command"`
|
||||
Description string `json:"description"`
|
||||
Timeout int `json:"timeout"`
|
||||
}
|
||||
|
||||
type readInput struct {
|
||||
FilePath string `json:"file_path"`
|
||||
Offset int `json:"offset"`
|
||||
Limit int `json:"limit"`
|
||||
}
|
||||
|
||||
type editInput struct {
|
||||
FilePath string `json:"file_path"`
|
||||
OldString string `json:"old_string"`
|
||||
NewString string `json:"new_string"`
|
||||
}
|
||||
|
||||
type writeInput struct {
|
||||
FilePath string `json:"file_path"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type grepInput struct {
|
||||
Pattern string `json:"pattern"`
|
||||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
type globInput struct {
|
||||
Pattern string `json:"pattern"`
|
||||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
type taskInput struct {
|
||||
Prompt string `json:"prompt"`
|
||||
Description string `json:"description"`
|
||||
SubagentType string `json:"subagent_type"`
|
||||
}
|
||||
|
||||
// ListSessions returns all sessions found in the Claude projects directory.
|
||||
func ListSessions(projectsDir string) ([]Session, error) {
|
||||
matches, err := filepath.Glob(filepath.Join(projectsDir, "*.jsonl"))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("glob sessions: %w", err)
|
||||
}
|
||||
|
||||
var sessions []Session
|
||||
for _, path := range matches {
|
||||
base := filepath.Base(path)
|
||||
id := strings.TrimSuffix(base, ".jsonl")
|
||||
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
s := Session{
|
||||
ID: id,
|
||||
Path: path,
|
||||
}
|
||||
|
||||
// Quick scan for first and last timestamps
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
scanner.Buffer(make([]byte, 1024*1024), 1024*1024)
|
||||
var firstTS, lastTS string
|
||||
for scanner.Scan() {
|
||||
var entry rawEntry
|
||||
if json.Unmarshal(scanner.Bytes(), &entry) != nil {
|
||||
continue
|
||||
}
|
||||
if entry.Timestamp == "" {
|
||||
continue
|
||||
}
|
||||
if firstTS == "" {
|
||||
firstTS = entry.Timestamp
|
||||
}
|
||||
lastTS = entry.Timestamp
|
||||
}
|
||||
f.Close()
|
||||
|
||||
if firstTS != "" {
|
||||
s.StartTime, _ = time.Parse(time.RFC3339Nano, firstTS)
|
||||
}
|
||||
if lastTS != "" {
|
||||
s.EndTime, _ = time.Parse(time.RFC3339Nano, lastTS)
|
||||
}
|
||||
if s.StartTime.IsZero() {
|
||||
s.StartTime = info.ModTime()
|
||||
}
|
||||
|
||||
sessions = append(sessions, s)
|
||||
}
|
||||
|
||||
sort.Slice(sessions, func(i, j int) bool {
|
||||
return sessions[i].StartTime.After(sessions[j].StartTime)
|
||||
})
|
||||
|
||||
return sessions, nil
|
||||
}
|
||||
|
||||
// ParseTranscript reads a JSONL session file and returns structured events.
|
||||
func ParseTranscript(path string) (*Session, error) {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("open transcript: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
base := filepath.Base(path)
|
||||
sess := &Session{
|
||||
ID: strings.TrimSuffix(base, ".jsonl"),
|
||||
Path: path,
|
||||
}
|
||||
|
||||
// Collect tool_use entries keyed by ID
|
||||
type toolUse struct {
|
||||
timestamp time.Time
|
||||
tool string
|
||||
input string
|
||||
}
|
||||
pendingTools := make(map[string]toolUse)
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
scanner.Buffer(make([]byte, 4*1024*1024), 4*1024*1024)
|
||||
|
||||
for scanner.Scan() {
|
||||
var entry rawEntry
|
||||
if err := json.Unmarshal(scanner.Bytes(), &entry); err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
ts, _ := time.Parse(time.RFC3339Nano, entry.Timestamp)
|
||||
|
||||
if sess.StartTime.IsZero() && !ts.IsZero() {
|
||||
sess.StartTime = ts
|
||||
}
|
||||
if !ts.IsZero() {
|
||||
sess.EndTime = ts
|
||||
}
|
||||
|
||||
switch entry.Type {
|
||||
case "assistant":
|
||||
var msg rawMessage
|
||||
if json.Unmarshal(entry.Message, &msg) != nil {
|
||||
continue
|
||||
}
|
||||
for _, raw := range msg.Content {
|
||||
var block contentBlock
|
||||
if json.Unmarshal(raw, &block) != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
switch block.Type {
|
||||
case "text":
|
||||
if text := strings.TrimSpace(block.Text); text != "" {
|
||||
sess.Events = append(sess.Events, Event{
|
||||
Timestamp: ts,
|
||||
Type: "assistant",
|
||||
Input: truncate(text, 500),
|
||||
})
|
||||
}
|
||||
|
||||
case "tool_use":
|
||||
inputStr := extractToolInput(block.Name, block.Input)
|
||||
pendingTools[block.ID] = toolUse{
|
||||
timestamp: ts,
|
||||
tool: block.Name,
|
||||
input: inputStr,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case "user":
|
||||
var msg rawMessage
|
||||
if json.Unmarshal(entry.Message, &msg) != nil {
|
||||
continue
|
||||
}
|
||||
for _, raw := range msg.Content {
|
||||
var block contentBlock
|
||||
if json.Unmarshal(raw, &block) != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
switch block.Type {
|
||||
case "tool_result":
|
||||
if tu, ok := pendingTools[block.ToolUseID]; ok {
|
||||
output := extractResultContent(block.Content)
|
||||
isError := block.IsError != nil && *block.IsError
|
||||
evt := Event{
|
||||
Timestamp: tu.timestamp,
|
||||
Type: "tool_use",
|
||||
Tool: tu.tool,
|
||||
ToolID: block.ToolUseID,
|
||||
Input: tu.input,
|
||||
Output: truncate(output, 2000),
|
||||
Duration: ts.Sub(tu.timestamp),
|
||||
Success: !isError,
|
||||
}
|
||||
if isError {
|
||||
evt.ErrorMsg = truncate(output, 500)
|
||||
}
|
||||
sess.Events = append(sess.Events, evt)
|
||||
delete(pendingTools, block.ToolUseID)
|
||||
}
|
||||
|
||||
case "text":
|
||||
if text := strings.TrimSpace(block.Text); text != "" {
|
||||
sess.Events = append(sess.Events, Event{
|
||||
Timestamp: ts,
|
||||
Type: "user",
|
||||
Input: truncate(text, 500),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sess, scanner.Err()
|
||||
}
|
||||
|
||||
func extractToolInput(toolName string, raw json.RawMessage) string {
|
||||
if raw == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
switch toolName {
|
||||
case "Bash":
|
||||
var inp bashInput
|
||||
if json.Unmarshal(raw, &inp) == nil {
|
||||
desc := inp.Description
|
||||
if desc != "" {
|
||||
desc = " # " + desc
|
||||
}
|
||||
return inp.Command + desc
|
||||
}
|
||||
case "Read":
|
||||
var inp readInput
|
||||
if json.Unmarshal(raw, &inp) == nil {
|
||||
return inp.FilePath
|
||||
}
|
||||
case "Edit":
|
||||
var inp editInput
|
||||
if json.Unmarshal(raw, &inp) == nil {
|
||||
return fmt.Sprintf("%s (edit)", inp.FilePath)
|
||||
}
|
||||
case "Write":
|
||||
var inp writeInput
|
||||
if json.Unmarshal(raw, &inp) == nil {
|
||||
return fmt.Sprintf("%s (%d bytes)", inp.FilePath, len(inp.Content))
|
||||
}
|
||||
case "Grep":
|
||||
var inp grepInput
|
||||
if json.Unmarshal(raw, &inp) == nil {
|
||||
path := inp.Path
|
||||
if path == "" {
|
||||
path = "."
|
||||
}
|
||||
return fmt.Sprintf("/%s/ in %s", inp.Pattern, path)
|
||||
}
|
||||
case "Glob":
|
||||
var inp globInput
|
||||
if json.Unmarshal(raw, &inp) == nil {
|
||||
return inp.Pattern
|
||||
}
|
||||
case "Task":
|
||||
var inp taskInput
|
||||
if json.Unmarshal(raw, &inp) == nil {
|
||||
desc := inp.Description
|
||||
if desc == "" {
|
||||
desc = truncate(inp.Prompt, 80)
|
||||
}
|
||||
return fmt.Sprintf("[%s] %s", inp.SubagentType, desc)
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: show raw JSON keys
|
||||
var m map[string]interface{}
|
||||
if json.Unmarshal(raw, &m) == nil {
|
||||
var parts []string
|
||||
for k := range m {
|
||||
parts = append(parts, k)
|
||||
}
|
||||
sort.Strings(parts)
|
||||
return strings.Join(parts, ", ")
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func extractResultContent(content interface{}) string {
|
||||
switch v := content.(type) {
|
||||
case string:
|
||||
return v
|
||||
case []interface{}:
|
||||
var parts []string
|
||||
for _, item := range v {
|
||||
if m, ok := item.(map[string]interface{}); ok {
|
||||
if text, ok := m["text"].(string); ok {
|
||||
parts = append(parts, text)
|
||||
}
|
||||
}
|
||||
}
|
||||
return strings.Join(parts, "\n")
|
||||
case map[string]interface{}:
|
||||
if text, ok := v["text"].(string); ok {
|
||||
return text
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("%v", content)
|
||||
}
|
||||
|
||||
func truncate(s string, max int) string {
|
||||
if len(s) <= max {
|
||||
return s
|
||||
}
|
||||
return s[:max] + "..."
|
||||
}
|
||||
54
pkg/session/search.go
Normal file
54
pkg/session/search.go
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
package session
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// SearchResult represents a match found in a session transcript.
|
||||
type SearchResult struct {
|
||||
SessionID string
|
||||
Timestamp time.Time
|
||||
Tool string
|
||||
Match string
|
||||
}
|
||||
|
||||
// Search finds events matching the query across all sessions in the directory.
|
||||
func Search(projectsDir, query string) ([]SearchResult, error) {
|
||||
matches, err := filepath.Glob(filepath.Join(projectsDir, "*.jsonl"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var results []SearchResult
|
||||
query = strings.ToLower(query)
|
||||
|
||||
for _, path := range matches {
|
||||
sess, err := ParseTranscript(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, evt := range sess.Events {
|
||||
if evt.Type != "tool_use" {
|
||||
continue
|
||||
}
|
||||
text := strings.ToLower(evt.Input + " " + evt.Output)
|
||||
if strings.Contains(text, query) {
|
||||
matchCtx := evt.Input
|
||||
if matchCtx == "" {
|
||||
matchCtx = truncate(evt.Output, 120)
|
||||
}
|
||||
results = append(results, SearchResult{
|
||||
SessionID: sess.ID,
|
||||
Timestamp: evt.Timestamp,
|
||||
Tool: evt.Tool,
|
||||
Match: matchCtx,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
127
pkg/session/video.go
Normal file
127
pkg/session/video.go
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
package session
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// RenderMP4 generates an MP4 video from session events using VHS (charmbracelet).
|
||||
func RenderMP4(sess *Session, outputPath string) error {
|
||||
if _, err := exec.LookPath("vhs"); err != nil {
|
||||
return fmt.Errorf("vhs not installed (go install github.com/charmbracelet/vhs@latest)")
|
||||
}
|
||||
|
||||
tape := generateTape(sess, outputPath)
|
||||
|
||||
tmpFile, err := os.CreateTemp("", "session-*.tape")
|
||||
if err != nil {
|
||||
return fmt.Errorf("create tape: %w", err)
|
||||
}
|
||||
defer os.Remove(tmpFile.Name())
|
||||
|
||||
if _, err := tmpFile.WriteString(tape); err != nil {
|
||||
tmpFile.Close()
|
||||
return fmt.Errorf("write tape: %w", err)
|
||||
}
|
||||
tmpFile.Close()
|
||||
|
||||
cmd := exec.Command("vhs", tmpFile.Name())
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
return fmt.Errorf("vhs render: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func generateTape(sess *Session, outputPath string) string {
|
||||
var b strings.Builder
|
||||
|
||||
b.WriteString(fmt.Sprintf("Output %s\n", outputPath))
|
||||
b.WriteString("Set FontSize 16\n")
|
||||
b.WriteString("Set Width 1400\n")
|
||||
b.WriteString("Set Height 800\n")
|
||||
b.WriteString("Set TypingSpeed 30ms\n")
|
||||
b.WriteString("Set Theme \"Catppuccin Mocha\"\n")
|
||||
b.WriteString("Set Shell bash\n")
|
||||
b.WriteString("\n")
|
||||
|
||||
// Title frame
|
||||
id := sess.ID
|
||||
if len(id) > 8 {
|
||||
id = id[:8]
|
||||
}
|
||||
b.WriteString(fmt.Sprintf("Type \"# Session %s | %s\"\n",
|
||||
id, sess.StartTime.Format("2006-01-02 15:04")))
|
||||
b.WriteString("Enter\n")
|
||||
b.WriteString("Sleep 2s\n")
|
||||
b.WriteString("\n")
|
||||
|
||||
for _, evt := range sess.Events {
|
||||
if evt.Type != "tool_use" {
|
||||
continue
|
||||
}
|
||||
|
||||
switch evt.Tool {
|
||||
case "Bash":
|
||||
cmd := extractCommand(evt.Input)
|
||||
if cmd == "" {
|
||||
continue
|
||||
}
|
||||
// Show the command
|
||||
b.WriteString(fmt.Sprintf("Type %q\n", "$ "+cmd))
|
||||
b.WriteString("Enter\n")
|
||||
|
||||
// Show abbreviated output
|
||||
output := evt.Output
|
||||
if len(output) > 200 {
|
||||
output = output[:200] + "..."
|
||||
}
|
||||
if output != "" {
|
||||
for _, line := range strings.Split(output, "\n") {
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
b.WriteString(fmt.Sprintf("Type %q\n", line))
|
||||
b.WriteString("Enter\n")
|
||||
}
|
||||
}
|
||||
|
||||
// Status indicator
|
||||
if !evt.Success {
|
||||
b.WriteString("Type \"# ✗ FAILED\"\n")
|
||||
} else {
|
||||
b.WriteString("Type \"# ✓ OK\"\n")
|
||||
}
|
||||
b.WriteString("Enter\n")
|
||||
b.WriteString("Sleep 1s\n")
|
||||
b.WriteString("\n")
|
||||
|
||||
case "Read", "Edit", "Write":
|
||||
b.WriteString(fmt.Sprintf("Type %q\n",
|
||||
fmt.Sprintf("# %s: %s", evt.Tool, truncate(evt.Input, 80))))
|
||||
b.WriteString("Enter\n")
|
||||
b.WriteString("Sleep 500ms\n")
|
||||
|
||||
case "Task":
|
||||
b.WriteString(fmt.Sprintf("Type %q\n",
|
||||
fmt.Sprintf("# Agent: %s", truncate(evt.Input, 80))))
|
||||
b.WriteString("Enter\n")
|
||||
b.WriteString("Sleep 1s\n")
|
||||
}
|
||||
}
|
||||
|
||||
b.WriteString("Sleep 3s\n")
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func extractCommand(input string) string {
|
||||
// Remove description suffix (after " # ")
|
||||
if idx := strings.Index(input, " # "); idx > 0 {
|
||||
return input[:idx]
|
||||
}
|
||||
return input
|
||||
}
|
||||
Loading…
Add table
Reference in a new issue