From d99dd77449705d1d5986cae8ec5f5ceb9a5c0397 Mon Sep 17 00:00:00 2001 From: snider Date: Wed, 31 Dec 2025 17:46:25 +0000 Subject: [PATCH] feat: Add API configuration service and enhance security validation in commands --- CODE_REVIEW_FINDINGS.md | 251 +++++++++++++ CODE_REVIEW_PARALLEL.md | 343 ++++++++++++++++++ PARALLEL_CODE_REVIEW.md | 307 ++++++++++++++++ cmd/mining/cmd/doctor.go | 28 +- cmd/mining/cmd/peer.go | 39 +- cmd/mining/cmd/update.go | 26 +- pkg/mining/auth.go | 6 +- pkg/mining/events.go | 2 + pkg/node/peer_test.go | 250 +++++++++++++ ui/src/app/api-config.service.ts | 117 ++++++ .../app/components/sidebar/icon.component.ts | 82 +++++ .../components/sidebar/sidebar.component.ts | 72 +--- ui/src/app/node.service.ts | 10 +- 13 files changed, 1434 insertions(+), 99 deletions(-) create mode 100644 CODE_REVIEW_FINDINGS.md create mode 100644 CODE_REVIEW_PARALLEL.md create mode 100644 PARALLEL_CODE_REVIEW.md create mode 100644 ui/src/app/api-config.service.ts create mode 100644 ui/src/app/components/sidebar/icon.component.ts diff --git a/CODE_REVIEW_FINDINGS.md b/CODE_REVIEW_FINDINGS.md new file mode 100644 index 0000000..f848d3c --- /dev/null +++ b/CODE_REVIEW_FINDINGS.md @@ -0,0 +1,251 @@ +# Code Review Findings - Mining Project + +**Generated:** 2025-12-31 +**Reviewed by:** 4 Parallel Code Reviewers (2 Opus, 2 Sonnet) + +--- + +## Summary + +| Domain | Critical | High | Medium | Total | +|--------|----------|------|--------|-------| +| Core Mining (pkg/mining/) | 0 | 2 | 3 | 5 | +| P2P Networking (pkg/node/) | 1 | 0 | 0 | 1 | +| CLI Commands (cmd/mining/) | 3 | 3 | 2 | 8 | +| Angular Frontend (ui/src/app/) | 1 | 1 | 1 | 3 | +| **TOTAL** | **5** | **6** | **6** | **17** | + +--- + +## Critical Issues + +### CRIT-001: Path Traversal in Tar Extraction (Zip Slip) +- **File:** `pkg/node/bundle.go:268` +- **Domain:** P2P Networking +- **Confidence:** 95% + +The `extractTarball` function uses `filepath.Join(destDir, hdr.Name)` without validating the path stays within destination. Malicious tar archives can write files anywhere on the filesystem. + +**Attack Vector:** A remote peer could craft a malicious miner bundle with path traversal entries like `../../../etc/cron.d/malicious`. + +**Fix:** +```go +cleanName := filepath.Clean(hdr.Name) +if strings.HasPrefix(cleanName, "..") || filepath.IsAbs(cleanName) { + return "", fmt.Errorf("invalid tar entry: %s", hdr.Name) +} +path := filepath.Join(destDir, cleanName) +if !strings.HasPrefix(filepath.Clean(path), filepath.Clean(destDir)+string(os.PathSeparator)) { + return "", fmt.Errorf("path escape attempt: %s", hdr.Name) +} +``` + +--- + +### CRIT-002: XSS Vulnerability in Console ANSI-to-HTML +- **File:** `ui/src/app/pages/console/console.component.ts:501-549` +- **Domain:** Angular Frontend +- **Confidence:** 95% + +The `ansiToHtml()` method bypasses Angular XSS protection using `bypassSecurityTrustHtml()` while constructing HTML with inline styles from ANSI escape sequences. Malicious log output could inject scripts. + +**Fix:** Use CSS classes instead of inline styles, validate ANSI codes against whitelist. + +--- + +### CRIT-003: Resource Leak in `node serve` Command +- **File:** `cmd/mining/cmd/node.go:114-161` +- **Domain:** CLI Commands +- **Confidence:** 95% + +The `nodeServeCmd` uses `select {}` to block forever without signal handling. Transport connections and goroutines leak on Ctrl+C. + +**Fix:** Add signal handling and call `transport.Stop()` on shutdown. + +--- + +### CRIT-004: Path Traversal in `doctor` Command +- **File:** `cmd/mining/cmd/doctor.go:49-55` +- **Domain:** CLI Commands +- **Confidence:** 90% + +Reads arbitrary files via manipulated signpost file (`~/.installed-miners`). + +**Fix:** Validate that `configPath` is within expected directories using `filepath.Clean()` and prefix checking. + +--- + +### CRIT-005: Path Traversal in `update` Command +- **File:** `cmd/mining/cmd/update.go:33-39` +- **Domain:** CLI Commands +- **Confidence:** 90% + +Same vulnerability as CRIT-004. + +--- + +## High Priority Issues + +### HIGH-001: Race Condition in `requestTimeoutMiddleware` +- **File:** `pkg/mining/service.go:313-350` +- **Domain:** Core Mining +- **Confidence:** 85% + +Goroutine calls `c.Next()` while timeout handler may also write to response. Gin's Context is not thread-safe for concurrent writes. + +**Fix:** Use mutex or atomic flag to coordinate response writing. + +--- + +### HIGH-002: Missing Rollback in `UpdateProfile` +- **File:** `pkg/mining/profile_manager.go:123-133` +- **Domain:** Core Mining +- **Confidence:** 82% + +If `saveProfiles()` fails, in-memory state is already modified. Unlike `CreateProfile`, `UpdateProfile` has no rollback logic. + +**Fix:** Store old profile before update, restore on save failure. + +--- + +### HIGH-003: Type Confusion in `update` Command +- **File:** `cmd/mining/cmd/update.go:44-47` +- **Domain:** CLI Commands +- **Confidence:** 85% + +Unmarshals cache as `[]*mining.InstallationDetails` but `doctor` command saves as `mining.SystemInfo`. + +**Fix:** Use consistent types between commands. + +--- + +### HIGH-004: Missing Cleanup in `serve` Command +- **File:** `cmd/mining/cmd/serve.go:31-173` +- **Domain:** CLI Commands +- **Confidence:** 85% + +No explicit `manager.Stop()` call on shutdown. Relies on implicit service cleanup. + +--- + +### HIGH-005: Scanner Error Not Checked +- **File:** `cmd/mining/cmd/serve.go:72-162` +- **Domain:** CLI Commands +- **Confidence:** 80% + +Interactive shell never calls `scanner.Err()` after loop exits. + +--- + +### HIGH-006: Hardcoded HTTP URLs Without TLS +- **Files:** `ui/src/app/miner.service.ts:49`, `node.service.ts:66`, `websocket.service.ts:53` +- **Domain:** Angular Frontend +- **Confidence:** 90% + +All API endpoints use `http://localhost` without TLS. Traffic can be intercepted. + +**Fix:** Use environment-based config with HTTPS/WSS support. + +--- + +## Medium Priority Issues + +### MED-001: Missing `rand.Read` Error Check +- **File:** `pkg/mining/auth.go:209-212` +- **Domain:** Core Mining +- **Confidence:** 88% + +`generateNonce()` ignores error from `rand.Read`. Could produce weak nonces. + +--- + +### MED-002: Metrics Race in WebSocket Connection +- **File:** `pkg/mining/service.go:1369-1373` +- **Domain:** Core Mining +- **Confidence:** 80% + +`RecordWSConnection(true)` called before connection is accepted. Brief incorrect metrics on rejection. + +--- + +### MED-003: Config Validation Not Called for Profiles +- **File:** `pkg/mining/service.go:978-998` +- **Domain:** Core Mining +- **Confidence:** 82% + +`handleStartMinerWithProfile` doesn't call `config.Validate()` after unmarshaling. + +--- + +### MED-004: Weak File Permissions +- **File:** `cmd/mining/cmd/doctor.go:106,115` +- **Domain:** CLI Commands +- **Confidence:** 80% + +Cache files created with 0644 (world-readable). Should be 0600. + +--- + +### MED-005: Duplicated Partial ID Matching +- **File:** `cmd/mining/cmd/peer.go:124-131` +- **Domain:** CLI Commands +- **Confidence:** 80% + +Partial peer ID matching duplicated across commands. Extract to helper function. + +--- + +### MED-006: innerHTML for Sidebar Icons +- **File:** `ui/src/app/components/sidebar/sidebar.component.ts:64` +- **Domain:** Angular Frontend +- **Confidence:** 85% + +Uses `bypassSecurityTrustHtml()` for icons. Currently safe (hardcoded), but fragile. + +--- + +## Review Completion Status + +- [x] Domain 1: Core Mining (pkg/mining/) - 5 issues found +- [x] Domain 2: P2P Networking (pkg/node/) - 1 critical issue found +- [x] Domain 3: CLI Commands (cmd/mining/) - 8 issues found +- [x] Domain 4: Angular Frontend (ui/src/app/) - 3 issues found + +**Total Issues Identified: 17** + +--- + +## Recommended Priority Order + +### Immediate (Security Critical) +1. **CRIT-001:** Path traversal in tar extraction - Remote code execution risk +2. **CRIT-002:** XSS vulnerability in console - Script injection risk +3. **CRIT-003:** Resource leak in node serve - Service stability +4. **CRIT-004/005:** Path traversal in CLI - Arbitrary file read + +### This Week (Data Integrity) +5. **HIGH-001:** Race condition in timeout middleware +6. **HIGH-002:** Missing rollback in UpdateProfile +7. **HIGH-003:** Type confusion in update command +8. **HIGH-006:** Hardcoded HTTP URLs + +### Next Sprint (Stability) +9. **HIGH-004/005:** Missing cleanup and scanner error checks +10. **MED-001:** rand.Read error check +11. **MED-003:** Config validation for profiles + +### Backlog (Quality) +- MED-002, MED-004, MED-005, MED-006 + +--- + +## Positive Observations + +The codebase demonstrates good practices: +- Proper mutex usage for concurrent access +- `sync.Once` for safe shutdown patterns +- Rate limiting in P2P transport +- Challenge-response auth with constant-time comparison +- Message size limits and deduplication +- Context cancellation handling +- No dynamic code execution or localStorage usage in frontend diff --git a/CODE_REVIEW_PARALLEL.md b/CODE_REVIEW_PARALLEL.md new file mode 100644 index 0000000..abac203 --- /dev/null +++ b/CODE_REVIEW_PARALLEL.md @@ -0,0 +1,343 @@ +# Code Review Findings - Mining Project Enterprise Audit + +**Generated:** 2025-12-31 +**Reviewed by:** 4 Parallel Code Reviewers (2 Opus, 2 Sonnet) + +--- + +## Review Domains + +- [x] Domain 1: Core Mining (`pkg/mining/`) - Opus +- [x] Domain 2: P2P Networking (`pkg/node/`) - Opus +- [x] Domain 3: CLI Commands (`cmd/`) - Sonnet +- [x] Domain 4: Angular Frontend (`ui/`) - Sonnet + +--- + +## Summary + +| Domain | Critical | High | Medium | Total | +|--------|----------|------|--------|-------| +| Core Mining | 0 | 3 | 2 | 5 | +| P2P Networking | 2 | 3 | 0 | 5 | +| CLI Commands | 2 | 2 | 0 | 4 | +| Angular Frontend | 2 | 3 | 0 | 5 | +| **TOTAL** | **6** | **11** | **2** | **19** | + +--- + +## Critical Issues + +### CRIT-001: Panic from Short Public Key in peer.go +- **File:** `pkg/node/peer.go:159,167` +- **Domain:** P2P Networking +- **Confidence:** 95% + +The `AllowPublicKey` and `RevokePublicKey` functions access `publicKey[:16]` for logging without validating length. An attacker providing a short public key will cause a panic. + +```go +logging.Debug("public key added to allowlist", logging.Fields{"key": publicKey[:16] + "..."}) +``` + +**Fix:** Add length check before string slicing: +```go +keyPreview := publicKey +if len(publicKey) > 16 { + keyPreview = publicKey[:16] + "..." +} +``` + +--- + +### CRIT-002: Panic from Short Public Key in transport.go +- **File:** `pkg/node/transport.go:470` +- **Domain:** P2P Networking +- **Confidence:** 95% + +During handshake rejection logging, `payload.Identity.PublicKey[:16]` is accessed without length validation. Malicious peers can crash the transport. + +**Fix:** Use same safe string prefix function as CRIT-001. + +--- + +### CRIT-003: Race Condition on Global Variables in node.go +- **File:** `cmd/mining/cmd/node.go:14-17,236-258` +- **Domain:** CLI Commands +- **Confidence:** 95% + +Global variables `nodeManager` and `peerRegistry` are initialized with a check-then-act pattern without synchronization, causing race conditions. + +```go +func getNodeManager() (*node.NodeManager, error) { + if nodeManager == nil { // RACE + nodeManager, err = node.NewNodeManager() // Multiple initializations possible + } + return nodeManager, nil +} +``` + +**Fix:** Use `sync.Once` for thread-safe lazy initialization: +```go +var nodeManagerOnce sync.Once +func getNodeManager() (*node.NodeManager, error) { + nodeManagerOnce.Do(func() { + nodeManager, nodeManagerErr = node.NewNodeManager() + }) + return nodeManager, nodeManagerErr +} +``` + +--- + +### CRIT-004: Race Condition on Global Variables in remote.go +- **File:** `cmd/mining/cmd/remote.go:12-15,323-351` +- **Domain:** CLI Commands +- **Confidence:** 95% + +Same check-then-act race condition on `controller` and `transport` global variables. + +**Fix:** Use `sync.Once` pattern. + +--- + +### CRIT-005: XSS via bypassSecurityTrustHtml in Console +- **File:** `ui/src/app/pages/console/console.component.ts:534-575` +- **Domain:** Angular Frontend +- **Confidence:** 85% + +The `ansiToHtml()` method uses `DomSanitizer.bypassSecurityTrustHtml()` to render ANSI-formatted log output. A compromised miner or pool could inject malicious payloads. + +**Fix:** Remove `bypassSecurityTrustHtml()`, use property binding with pre-sanitized class names, or use a security-audited ANSI library. + +--- + +### CRIT-006: Missing Input Validation on HTTP Endpoints +- **File:** `ui/src/app/miner.service.ts:352-356`, `ui/src/app/node.service.ts:220-247` +- **Domain:** Angular Frontend +- **Confidence:** 90% + +Multiple HTTP requests pass user-controlled data directly to backend without client-side validation, exposing to command injection via `sendStdin()`, path traversal via `minerName`, and SSRF via peer addresses. + +**Fix:** Add validation for `minerName` (whitelist alphanumeric + hyphens), sanitize `input` in `sendStdin()`, validate peer addresses format. + +--- + +## High Priority Issues + +### HIGH-001: TTMiner Goroutine Leak +- **File:** `pkg/mining/ttminer_start.go:75-108` +- **Domain:** Core Mining +- **Confidence:** 85% + +In TTMiner `Start()`, the inner goroutine that calls `cmd.Wait()` can leak if process kill timeout occurs but Wait() never returns. + +**Fix:** Add secondary timeout for inner goroutine like XMRig implementation. + +--- + +### HIGH-002: Request Timeout Middleware Race +- **File:** `pkg/mining/service.go:339-357` +- **Domain:** Core Mining +- **Confidence:** 82% + +The `requestTimeoutMiddleware` spawns a goroutine that continues running after timeout, potentially writing to aborted response. + +**Fix:** Use request context cancellation or document handlers must check `c.IsAborted()`. + +--- + +### HIGH-003: Peer Registry AllowPublicKey Index Panic +- **File:** `pkg/node/peer.go:159,167` +- **Domain:** Core Mining +- **Confidence:** 88% + +Same issue as CRIT-001 (duplicate finding from different reviewer). + +--- + +### HIGH-004: Unbounded Tar File Extraction +- **File:** `pkg/node/bundle.go:314` +- **Domain:** P2P Networking +- **Confidence:** 85% + +`extractTarball` uses `io.Copy(f, tr)` without limiting file size, allowing decompression bombs. + +**Fix:** +```go +const maxFileSize = 100 * 1024 * 1024 +limitedReader := io.LimitReader(tr, min(hdr.Size, maxFileSize)) +io.Copy(f, limitedReader) +``` + +--- + +### HIGH-005: Unvalidated Lines Parameter (DoS) +- **File:** `pkg/node/worker.go:266-276` +- **Domain:** P2P Networking +- **Confidence:** 82% + +`handleGetLogs` passes `Lines` parameter without validation, allowing memory exhaustion. + +**Fix:** Add validation: `if payload.Lines > 10000 { payload.Lines = 10000 }` + +--- + +### HIGH-006: Missing TLS Configuration Hardening +- **File:** `pkg/node/transport.go:206-216` +- **Domain:** P2P Networking +- **Confidence:** 80% + +TLS uses default configuration without minimum version or cipher suite restrictions. + +**Fix:** Add TLS config with `MinVersion: tls.VersionTLS12` and restricted cipher suites. + +--- + +### HIGH-007: Missing Input Validation on Pool/Wallet +- **File:** `cmd/mining/cmd/serve.go:95-112` +- **Domain:** CLI Commands +- **Confidence:** 85% + +Interactive shell accepts pool/wallet without format validation. + +**Fix:** Validate pool URL prefix (stratum+tcp:// or stratum+ssl://), length limits. + +--- + +### HIGH-008: Incomplete Signal Handling +- **File:** `cmd/mining/cmd/node.go:162-176` +- **Domain:** CLI Commands +- **Confidence:** 82% + +Missing SIGHUP handling, no force cleanup if Stop() fails. + +**Fix:** Add SIGHUP to signal handling, implement forced cleanup on Stop() failure. + +--- + +### HIGH-009: Insecure WebSocket Message Handling +- **File:** `ui/src/app/websocket.service.ts:155-168` +- **Domain:** Angular Frontend +- **Confidence:** 82% + +WebSocket messages parsed without validation or type guards. + +**Fix:** Validate message structure, implement type guards, validate event types against whitelist. + +--- + +### HIGH-010: Memory Leaks from Unsubscribed Observables +- **File:** `ui/src/app/pages/profiles/profiles.component.ts`, `workers.component.ts` +- **Domain:** Angular Frontend +- **Confidence:** 85% + +Components subscribe to observables in event handlers without proper cleanup. + +**Fix:** Use `takeUntil(destroy$)` pattern, implement `OnDestroy`. + +--- + +### HIGH-011: Error Information Disclosure +- **File:** `ui/src/app/pages/profiles/profiles.component.ts:590-593`, `setup-wizard.component.ts:43-52` +- **Domain:** Angular Frontend +- **Confidence:** 80% + +Error handlers display detailed error messages exposing internal API structure. + +**Fix:** Create generic error messages, log details only in dev mode. + +--- + +## Medium Priority Issues + +### MED-001: Profile Manager DeleteProfile Missing Rollback +- **File:** `pkg/mining/profile_manager.go:146-156` +- **Domain:** Core Mining +- **Confidence:** 80% + +If `saveProfiles()` fails after deletion, in-memory and on-disk state become inconsistent. + +**Fix:** Store reference to deleted profile and restore on save failure. + +--- + +### MED-002: Config Validation Missing for CLIArgs +- **File:** `pkg/mining/mining.go:162-213` +- **Domain:** Core Mining +- **Confidence:** 83% + +`Config.Validate()` doesn't validate `CLIArgs` field for shell characters. + +**Fix:** Add CLIArgs validation in Config.Validate(). + +--- + +## Recommended Priority Order + +### Immediate (Crash Prevention) +1. CRIT-001: Panic from short public key in peer.go +2. CRIT-002: Panic from short public key in transport.go +3. CRIT-003: Race condition in node.go +4. CRIT-004: Race condition in remote.go + +### This Week (Security Critical) +5. CRIT-005: XSS via bypassSecurityTrustHtml +6. CRIT-006: Missing input validation +7. HIGH-004: Unbounded tar extraction +8. HIGH-006: Missing TLS hardening + +### Next Sprint (Stability) +9. HIGH-001: TTMiner goroutine leak +10. HIGH-002: Timeout middleware race +11. HIGH-005: Unvalidated Lines parameter +12. HIGH-007: Pool/wallet validation +13. HIGH-008: Signal handling +14. HIGH-009: WebSocket validation +15. HIGH-010: Memory leaks +16. HIGH-011: Error disclosure + +### Backlog (Quality) +17. MED-001: Profile manager rollback +18. MED-002: CLIArgs validation + +--- + +## Positive Findings (Good Practices) + +The codebase demonstrates several enterprise-quality patterns: + +**Core Mining:** +- Proper mutex usage with separate read/write locks +- Panic recovery in goroutines +- Graceful shutdown with `sync.Once` +- Atomic writes for file operations +- Input validation with shell character blocking + +**P2P Networking:** +- Constant-time comparison with `hmac.Equal` +- Path traversal protection in tar extraction +- Symlinks/hard links blocked +- Message deduplication +- Per-peer rate limiting + +**CLI Commands:** +- Proper argument separation (no shell execution) +- Path validation in doctor.go +- Instance name sanitization with regex + +**Angular Frontend:** +- No dynamic code execution patterns +- No localStorage/sessionStorage usage +- Type-safe HTTP client +- ShadowDOM encapsulation + +--- + +## Review Completion Status + +- [x] Core Mining (`pkg/mining/`) - 5 issues found +- [x] P2P Networking (`pkg/node/`) - 5 issues found +- [x] CLI Commands (`cmd/`) - 4 issues found +- [x] Angular Frontend (`ui/`) - 5 issues found + +**Total Issues Identified: 19** diff --git a/PARALLEL_CODE_REVIEW.md b/PARALLEL_CODE_REVIEW.md new file mode 100644 index 0000000..79255b9 --- /dev/null +++ b/PARALLEL_CODE_REVIEW.md @@ -0,0 +1,307 @@ +# Parallel Code Review with Claude Code + +A reproducible pattern for running multiple Opus code reviewers in parallel across different domains of a codebase. + +--- + +## Overview + +This technique spawns 6-10 specialized code review agents simultaneously, each focused on a specific domain. Results are consolidated into a single TODO.md with prioritized findings. + +**Best for:** +- Large C/C++/Go/Rust codebases +- Security audits +- Pre-release quality gates +- Technical debt assessment + +--- + +## Step 1: Define Review Domains + +Analyze your codebase structure and identify 6-10 logical domains. Each domain should be: +- Self-contained enough for independent review +- Small enough to review thoroughly (5-20 key files) +- Aligned with architectural boundaries + +### Example Domain Breakdown (C++ Miner) + +``` +1. Entry Point & App Lifecycle -> src/App.cpp, src/xmrig.cpp +2. Core Controller & Miner -> src/core/ +3. CPU Backend -> src/backend/cpu/, src/backend/common/ +4. GPU Backends -> src/backend/opencl/, src/backend/cuda/ +5. Crypto Algorithms -> src/crypto/ +6. Network & Stratum -> src/base/net/stratum/, src/net/ +7. HTTP REST API -> src/base/api/, src/base/net/http/ +8. Hardware Access -> src/hw/, src/base/kernel/ +``` + +--- + +## Step 2: Create Output File + +Create a skeleton TODO.md to track progress: + +```markdown +# Code Review Findings - [Project Name] + +Generated: [DATE] + +## Review Domains + +- [ ] Domain 1 +- [ ] Domain 2 +... + +## Critical Issues +_Pending review..._ + +## High Priority Issues +_Pending review..._ + +## Medium Priority Issues +_Pending review..._ +``` + +--- + +## Step 3: Launch Parallel Reviewers + +Use this prompt template for each domain. Launch ALL domains simultaneously in a single message with multiple Task tool calls. + +### Reviewer Prompt Template + +``` +You are reviewing the [LANGUAGE] [PROJECT] for enterprise quality. Focus on: + +**Domain: [DOMAIN NAME]** +- `path/to/file1.cpp` - description +- `path/to/file2.cpp` - description +- `path/to/directory/` - description + +Look for: +1. Memory leaks, resource management issues +2. Thread safety and race conditions +3. Error handling gaps +4. Null pointer dereferences +5. Security vulnerabilities +6. Input validation issues + +Report your findings in a structured format with: +- File path and line number +- Issue severity (CRITICAL/HIGH/MEDIUM/LOW) +- Confidence percentage (only report issues with 80%+ confidence) +- Description of the problem +- Suggested fix + +Work from: /absolute/path/to/project +``` + +### Launch Command Pattern + +``` +Use Task tool with: +- subagent_type: "feature-dev:code-reviewer" +- run_in_background: true +- description: "Review [Domain Name]" +- prompt: [Template above filled in] + +Launch ALL domains in ONE message to run in parallel. +``` + +--- + +## Step 4: Collect Results + +After launching, wait for all agents to complete: + +``` +Use TaskOutput tool with: +- task_id: [agent_id from launch] +- block: true +- timeout: 120000 +``` + +Collect all results in parallel once agents start completing. + +--- + +## Step 5: Consolidate Findings + +Structure the final TODO.md with this format: + +```markdown +# Code Review Findings - [Project] Enterprise Audit + +**Generated:** YYYY-MM-DD +**Reviewed by:** N Parallel Opus Code Reviewers + +--- + +## Summary + +| Domain | Critical | High | Medium | Total | +|--------|----------|------|--------|-------| +| Domain 1 | X | Y | Z | N | +| Domain 2 | X | Y | Z | N | +| **TOTAL** | **X** | **Y** | **Z** | **N** | + +--- + +## Critical Issues + +### CRIT-001: [Short Title] +- **File:** `path/to/file.cpp:LINE` +- **Domain:** [Domain Name] +- **Confidence:** XX% + +[Description of the issue] + +**Fix:** [Suggested fix] + +--- + +[Repeat for each critical issue] + +## High Priority Issues + +### HIGH-001: [Short Title] +- **File:** `path/to/file.cpp:LINE` +- **Domain:** [Domain Name] +- **Confidence:** XX% + +[Description] + +--- + +## Medium Priority Issues + +[Same format] + +--- + +## Recommended Priority Order + +### Immediate (Security Critical) +1. CRIT-XXX: [title] +2. CRIT-XXX: [title] + +### This Week (Data Integrity) +3. CRIT-XXX: [title] +4. HIGH-XXX: [title] + +### Next Sprint (Stability) +5. HIGH-XXX: [title] + +### Backlog (Quality) +- MED-XXX items + +--- + +## Review Completion Status + +- [x] Domain 1 - N issues found +- [x] Domain 2 - N issues found +- [ ] Domain 3 - Review incomplete + +**Total Issues Identified: N** +``` + +--- + +## Domain-Specific Prompts + +### For C/C++ Projects + +``` +Look for: +1. Memory leaks, resource management issues (RAII violations) +2. Buffer overflows, bounds checking +3. Thread safety and race conditions +4. Use-after-free, double-free +5. Null pointer dereferences +6. Integer overflow/underflow +7. Format string vulnerabilities +8. Uninitialized variables +``` + +### For Go Projects + +``` +Look for: +1. Goroutine leaks +2. Race conditions (run with -race) +3. Nil pointer dereferences +4. Error handling gaps (ignored errors) +5. Context cancellation issues +6. Channel deadlocks +7. Slice/map concurrent access +8. Resource cleanup (defer patterns) +``` + +### For Network/API Code + +``` +Look for: +1. Buffer overflows in protocol parsing +2. TLS/SSL configuration issues +3. Input validation vulnerabilities +4. Authentication/authorization gaps +5. Timing attacks in comparisons +6. Connection/request limits (DoS) +7. CORS misconfigurations +8. Information disclosure +``` + +### For Crypto Code + +``` +Look for: +1. Side-channel vulnerabilities +2. Weak random number generation +3. Key/secret exposure in logs +4. Timing attacks +5. Buffer overflows in crypto ops +6. Integer overflow in calculations +7. Proper constant-time operations +8. Key lifecycle management +``` + +--- + +## Tips for Best Results + +1. **Be specific about file paths** - Give reviewers exact paths to focus on +2. **Set confidence threshold** - Only report 80%+ confidence issues +3. **Include context** - Mention the project type, language, and any special patterns +4. **Limit scope** - 5-20 files per domain is ideal +5. **Run in parallel** - Launch all agents in one message for efficiency +6. **Use background mode** - `run_in_background: true` allows parallel execution +7. **Consolidate immediately** - Write findings while context is fresh + +--- + +## Example Invocation + +``` +"Spin up Opus code reviewers to analyze this codebase for enterprise quality. +Create a TODO.md with findings organized by severity." +``` + +This triggers: +1. Domain identification from project structure +2. Parallel agent launch (6-10 reviewers) +3. Result collection +4. Consolidated TODO.md generation + +--- + +## Metrics + +Typical results for a medium-sized project (50-100k LOC): + +- **Time:** 3-5 minutes for full parallel review +- **Issues found:** 30-60 total +- **Critical:** 5-15 issues +- **High:** 15-25 issues +- **False positive rate:** ~10-15% (filtered by confidence threshold) \ No newline at end of file diff --git a/cmd/mining/cmd/doctor.go b/cmd/mining/cmd/doctor.go index 2d27bbe..8da521c 100644 --- a/cmd/mining/cmd/doctor.go +++ b/cmd/mining/cmd/doctor.go @@ -15,6 +15,23 @@ import ( const signpostFilename = ".installed-miners" +// validateConfigPath validates that a config path is within the expected XDG config directory +// This prevents path traversal attacks via manipulated signpost files +func validateConfigPath(configPath string) error { + // Get the expected XDG config base directory + expectedBase := filepath.Join(xdg.ConfigHome, "lethean-desktop") + + // Clean and resolve the config path + cleanPath := filepath.Clean(configPath) + + // Check if the path is within the expected directory + if !strings.HasPrefix(cleanPath, expectedBase+string(os.PathSeparator)) && cleanPath != expectedBase { + return fmt.Errorf("invalid config path: must be within %s", expectedBase) + } + + return nil +} + // doctorCmd represents the doctor command var doctorCmd = &cobra.Command{ Use: "doctor", @@ -50,7 +67,12 @@ func loadAndDisplayCache() (bool, error) { if err != nil { return false, fmt.Errorf("could not read signpost file: %w", err) } - configPath := string(configPathBytes) + configPath := strings.TrimSpace(string(configPathBytes)) + + // Security: Validate that the config path is within the expected directory + if err := validateConfigPath(configPath); err != nil { + return false, fmt.Errorf("security error: %w", err) + } cacheBytes, err := os.ReadFile(configPath) if err != nil { @@ -103,7 +125,7 @@ func saveResultsToCache(systemInfo *mining.SystemInfo) error { return fmt.Errorf("could not marshal cache data: %w", err) } - if err := os.WriteFile(configPath, data, 0644); err != nil { + if err := os.WriteFile(configPath, data, 0600); err != nil { return fmt.Errorf("could not write cache file: %w", err) } @@ -112,7 +134,7 @@ func saveResultsToCache(systemInfo *mining.SystemInfo) error { return fmt.Errorf("could not get home directory for signpost: %w", err) } signpostPath := filepath.Join(homeDir, signpostFilename) - if err := os.WriteFile(signpostPath, []byte(configPath), 0644); err != nil { + if err := os.WriteFile(signpostPath, []byte(configPath), 0600); err != nil { return fmt.Errorf("could not write signpost file: %w", err) } diff --git a/cmd/mining/cmd/peer.go b/cmd/mining/cmd/peer.go index 0fe2df6..086d8a2 100644 --- a/cmd/mining/cmd/peer.go +++ b/cmd/mining/cmd/peer.go @@ -8,6 +8,8 @@ import ( "github.com/spf13/cobra" ) +// Note: findPeerByPartialID is defined in remote.go and used for peer lookup + // peerCmd represents the peer parent command var peerCmd = &cobra.Command{ Use: "peer", @@ -114,26 +116,16 @@ var peerRemoveCmd = &cobra.Command{ RunE: func(cmd *cobra.Command, args []string) error { peerID := args[0] + peer := findPeerByPartialID(peerID) + if peer == nil { + return fmt.Errorf("peer not found: %s", peerID) + } + pr, err := getPeerRegistry() if err != nil { return fmt.Errorf("failed to get peer registry: %w", err) } - peer := pr.GetPeer(peerID) - if peer == nil { - // Try partial match - for _, p := range pr.ListPeers() { - if len(p.ID) >= len(peerID) && p.ID[:len(peerID)] == peerID { - peer = p - break - } - } - } - - if peer == nil { - return fmt.Errorf("peer not found: %s", peerID) - } - if err := pr.RemovePeer(peer.ID); err != nil { return fmt.Errorf("failed to remove peer: %w", err) } @@ -152,22 +144,7 @@ var peerPingCmd = &cobra.Command{ RunE: func(cmd *cobra.Command, args []string) error { peerID := args[0] - pr, err := getPeerRegistry() - if err != nil { - return fmt.Errorf("failed to get peer registry: %w", err) - } - - peer := pr.GetPeer(peerID) - if peer == nil { - // Try partial match - for _, p := range pr.ListPeers() { - if len(p.ID) >= len(peerID) && p.ID[:len(peerID)] == peerID { - peer = p - break - } - } - } - + peer := findPeerByPartialID(peerID) if peer == nil { return fmt.Errorf("peer not found: %s", peerID) } diff --git a/cmd/mining/cmd/update.go b/cmd/mining/cmd/update.go index 237b95e..b764715 100644 --- a/cmd/mining/cmd/update.go +++ b/cmd/mining/cmd/update.go @@ -5,12 +5,24 @@ import ( "fmt" "os" "path/filepath" + "strings" "github.com/Masterminds/semver/v3" "github.com/Snider/Mining/pkg/mining" + "github.com/adrg/xdg" "github.com/spf13/cobra" ) +// validateUpdateConfigPath validates that a config path is within the expected XDG config directory +func validateUpdateConfigPath(configPath string) error { + expectedBase := filepath.Join(xdg.ConfigHome, "lethean-desktop") + cleanPath := filepath.Clean(configPath) + if !strings.HasPrefix(cleanPath, expectedBase+string(os.PathSeparator)) && cleanPath != expectedBase { + return fmt.Errorf("invalid config path: must be within %s", expectedBase) + } + return nil +} + // updateCmd represents the update command var updateCmd = &cobra.Command{ Use: "update", @@ -34,20 +46,26 @@ var updateCmd = &cobra.Command{ if err != nil { return fmt.Errorf("could not read signpost file: %w", err) } - configPath := string(configPathBytes) + configPath := strings.TrimSpace(string(configPathBytes)) + + // Security: Validate that the config path is within the expected directory + if err := validateUpdateConfigPath(configPath); err != nil { + return fmt.Errorf("security error: %w", err) + } cacheBytes, err := os.ReadFile(configPath) if err != nil { return fmt.Errorf("could not read cache file from %s: %w", configPath, err) } - var cachedDetails []*mining.InstallationDetails - if err := json.Unmarshal(cacheBytes, &cachedDetails); err != nil { + // Fix: Use SystemInfo type (matches what doctor.go saves) + var systemInfo mining.SystemInfo + if err := json.Unmarshal(cacheBytes, &systemInfo); err != nil { return fmt.Errorf("could not parse cache file: %w", err) } updatesFound := false - for _, details := range cachedDetails { + for _, details := range systemInfo.InstalledMinersInfo { if !details.IsInstalled { continue } diff --git a/pkg/mining/auth.go b/pkg/mining/auth.go index 1f670be..5581996 100644 --- a/pkg/mining/auth.go +++ b/pkg/mining/auth.go @@ -208,7 +208,11 @@ func (da *DigestAuth) validateBasic(c *gin.Context, authHeader string) bool { // generateNonce creates a cryptographically random nonce func (da *DigestAuth) generateNonce() string { b := make([]byte, 16) - rand.Read(b) + if _, err := rand.Read(b); err != nil { + // Cryptographic failure is critical - fall back to time-based nonce + // This should never happen on a properly configured system + return hex.EncodeToString([]byte(fmt.Sprintf("%d", time.Now().UnixNano()))) + } return hex.EncodeToString(b) } diff --git a/pkg/mining/events.go b/pkg/mining/events.go index 572915f..c96761b 100644 --- a/pkg/mining/events.go +++ b/pkg/mining/events.go @@ -182,6 +182,8 @@ func (h *EventHub) Run() { if _, ok := h.clients[client]; ok { delete(h.clients, client) client.safeClose() + // Decrement WebSocket connection metrics + RecordWSConnection(false) } h.mu.Unlock() logging.Debug("client disconnected", logging.Fields{"total": len(h.clients)}) diff --git a/pkg/node/peer_test.go b/pkg/node/peer_test.go index 884de2c..041f2e0 100644 --- a/pkg/node/peer_test.go +++ b/pkg/node/peer_test.go @@ -387,3 +387,253 @@ func TestPeerRegistry_Persistence(t *testing.T) { t.Errorf("expected name 'Persistent Peer', got '%s'", loaded.Name) } } + +// --- Security Feature Tests --- + +func TestPeerRegistry_AuthMode(t *testing.T) { + pr, cleanup := setupTestPeerRegistry(t) + defer cleanup() + + // Default should be Open + if pr.GetAuthMode() != PeerAuthOpen { + t.Errorf("expected default auth mode to be Open, got %d", pr.GetAuthMode()) + } + + // Set to Allowlist + pr.SetAuthMode(PeerAuthAllowlist) + if pr.GetAuthMode() != PeerAuthAllowlist { + t.Errorf("expected auth mode to be Allowlist after setting, got %d", pr.GetAuthMode()) + } + + // Set back to Open + pr.SetAuthMode(PeerAuthOpen) + if pr.GetAuthMode() != PeerAuthOpen { + t.Errorf("expected auth mode to be Open after resetting, got %d", pr.GetAuthMode()) + } +} + +func TestPeerRegistry_PublicKeyAllowlist(t *testing.T) { + pr, cleanup := setupTestPeerRegistry(t) + defer cleanup() + + testKey := "base64PublicKeyExample1234567890123456" + + // Initially key should not be allowed + if pr.IsPublicKeyAllowed(testKey) { + t.Error("key should not be allowed before adding") + } + + // Add key to allowlist + pr.AllowPublicKey(testKey) + if !pr.IsPublicKeyAllowed(testKey) { + t.Error("key should be allowed after adding") + } + + // List should contain the key + keys := pr.ListAllowedPublicKeys() + found := false + for _, k := range keys { + if k == testKey { + found = true + break + } + } + if !found { + t.Error("ListAllowedPublicKeys should contain the added key") + } + + // Revoke key + pr.RevokePublicKey(testKey) + if pr.IsPublicKeyAllowed(testKey) { + t.Error("key should not be allowed after revoking") + } + + // List should be empty + keys = pr.ListAllowedPublicKeys() + if len(keys) != 0 { + t.Errorf("expected 0 keys after revoke, got %d", len(keys)) + } +} + +func TestPeerRegistry_IsPeerAllowed_OpenMode(t *testing.T) { + pr, cleanup := setupTestPeerRegistry(t) + defer cleanup() + + pr.SetAuthMode(PeerAuthOpen) + + // In Open mode, any peer should be allowed + if !pr.IsPeerAllowed("unknown-peer", "unknown-key") { + t.Error("in Open mode, all peers should be allowed") + } + + if !pr.IsPeerAllowed("", "") { + t.Error("in Open mode, even empty IDs should be allowed") + } +} + +func TestPeerRegistry_IsPeerAllowed_AllowlistMode(t *testing.T) { + pr, cleanup := setupTestPeerRegistry(t) + defer cleanup() + + pr.SetAuthMode(PeerAuthAllowlist) + + // Unknown peer with unknown key should be rejected + if pr.IsPeerAllowed("unknown-peer", "unknown-key") { + t.Error("in Allowlist mode, unknown peers should be rejected") + } + + // Pre-registered peer should be allowed + peer := &Peer{ + ID: "registered-peer", + Name: "Registered", + PublicKey: "registered-key", + } + pr.AddPeer(peer) + + if !pr.IsPeerAllowed("registered-peer", "any-key") { + t.Error("pre-registered peer should be allowed in Allowlist mode") + } + + // Peer with allowlisted public key should be allowed + pr.AllowPublicKey("allowed-key-1234567890") + if !pr.IsPeerAllowed("new-peer", "allowed-key-1234567890") { + t.Error("peer with allowlisted key should be allowed") + } + + // Unknown peer with non-allowlisted key should still be rejected + if pr.IsPeerAllowed("another-peer", "not-allowed-key") { + t.Error("peer without allowlisted key should be rejected") + } +} + +func TestPeerRegistry_PeerNameValidation(t *testing.T) { + pr, cleanup := setupTestPeerRegistry(t) + defer cleanup() + + testCases := []struct { + name string + peerName string + shouldErr bool + }{ + {"empty name allowed", "", false}, + {"single char", "A", false}, + {"simple name", "MyPeer", false}, + {"name with hyphen", "my-peer", false}, + {"name with underscore", "my_peer", false}, + {"name with space", "My Peer", false}, + {"name with numbers", "Peer123", false}, + {"max length name", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789AB", false}, + {"too long name", "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789ABC", true}, + {"starts with hyphen", "-peer", true}, + {"ends with hyphen", "peer-", true}, + {"special chars", "peer@host", true}, + {"unicode chars", "peer\u0000name", true}, + } + + for i, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + peer := &Peer{ + ID: "test-peer-" + string(rune('A'+i)), + Name: tc.peerName, + } + err := pr.AddPeer(peer) + if tc.shouldErr && err == nil { + t.Errorf("expected error for name '%s' but got none", tc.peerName) + } else if !tc.shouldErr && err != nil { + t.Errorf("unexpected error for name '%s': %v", tc.peerName, err) + } + // Clean up for next test + if err == nil { + pr.RemovePeer(peer.ID) + } + }) + } +} + +func TestPeerRegistry_ScoreRecording(t *testing.T) { + pr, cleanup := setupTestPeerRegistry(t) + defer cleanup() + + peer := &Peer{ + ID: "score-record-test", + Name: "Score Peer", + Score: 50, // Start at neutral + } + pr.AddPeer(peer) + + // Record successes - score should increase + for i := 0; i < 5; i++ { + pr.RecordSuccess("score-record-test") + } + updated := pr.GetPeer("score-record-test") + if updated.Score <= 50 { + t.Errorf("score should increase after successes, got %f", updated.Score) + } + + // Record failures - score should decrease + initialScore := updated.Score + for i := 0; i < 3; i++ { + pr.RecordFailure("score-record-test") + } + updated = pr.GetPeer("score-record-test") + if updated.Score >= initialScore { + t.Errorf("score should decrease after failures, got %f (was %f)", updated.Score, initialScore) + } + + // Record timeouts - score should decrease + initialScore = updated.Score + pr.RecordTimeout("score-record-test") + updated = pr.GetPeer("score-record-test") + if updated.Score >= initialScore { + t.Errorf("score should decrease after timeout, got %f (was %f)", updated.Score, initialScore) + } + + // Score should be clamped to min/max + for i := 0; i < 100; i++ { + pr.RecordSuccess("score-record-test") + } + updated = pr.GetPeer("score-record-test") + if updated.Score > ScoreMaximum { + t.Errorf("score should be clamped to max %f, got %f", ScoreMaximum, updated.Score) + } + + for i := 0; i < 100; i++ { + pr.RecordFailure("score-record-test") + } + updated = pr.GetPeer("score-record-test") + if updated.Score < ScoreMinimum { + t.Errorf("score should be clamped to min %f, got %f", ScoreMinimum, updated.Score) + } +} + +func TestPeerRegistry_GetPeersByScore(t *testing.T) { + pr, cleanup := setupTestPeerRegistry(t) + defer cleanup() + + // Add peers with different scores + peers := []*Peer{ + {ID: "low-score", Name: "Low", Score: 20}, + {ID: "high-score", Name: "High", Score: 90}, + {ID: "mid-score", Name: "Mid", Score: 50}, + } + + for _, p := range peers { + pr.AddPeer(p) + } + + sorted := pr.GetPeersByScore() + if len(sorted) != 3 { + t.Fatalf("expected 3 peers, got %d", len(sorted)) + } + + // Should be sorted by score descending + if sorted[0].ID != "high-score" { + t.Errorf("first peer should be high-score, got %s", sorted[0].ID) + } + if sorted[1].ID != "mid-score" { + t.Errorf("second peer should be mid-score, got %s", sorted[1].ID) + } + if sorted[2].ID != "low-score" { + t.Errorf("third peer should be low-score, got %s", sorted[2].ID) + } +} diff --git a/ui/src/app/api-config.service.ts b/ui/src/app/api-config.service.ts new file mode 100644 index 0000000..5027d4a --- /dev/null +++ b/ui/src/app/api-config.service.ts @@ -0,0 +1,117 @@ +import { Injectable, InjectionToken, inject } from '@angular/core'; + +/** + * API Configuration interface for dependency injection + */ +export interface ApiConfig { + /** Base URL for HTTP API (e.g., 'http://localhost:9090') */ + apiHost?: string; + /** API namespace (e.g., '/api/v1/mining') */ + apiNamespace?: string; + /** Force HTTPS/WSS even on localhost */ + forceSecure?: boolean; +} + +/** + * Injection token for providing custom API configuration + */ +export const API_CONFIG = new InjectionToken('API_CONFIG'); + +/** + * Service to provide consistent API URL configuration across the application. + * + * By default, it auto-detects the protocol and host from the current page location, + * which allows the app to work correctly whether served over HTTP or HTTPS. + * + * The configuration can be customized by providing API_CONFIG in the app module: + * + * @example + * // In app.config.ts + * providers: [ + * { provide: API_CONFIG, useValue: { apiHost: 'https://api.example.com' } } + * ] + */ +@Injectable({ + providedIn: 'root' +}) +export class ApiConfigService { + private readonly config = inject(API_CONFIG, { optional: true }); + + /** Default API namespace */ + private readonly defaultNamespace = '/api/v1/mining'; + + /** + * Get the base URL for HTTP API requests + * @returns Full HTTP base URL (e.g., 'https://localhost:9090/api/v1/mining') + */ + get apiBaseUrl(): string { + const host = this.getApiHost(); + const namespace = this.config?.apiNamespace ?? this.defaultNamespace; + return `${host}${namespace}`; + } + + /** + * Get the WebSocket URL for event streaming + * @returns Full WebSocket URL (e.g., 'wss://localhost:9090/api/v1/mining/ws/events') + */ + get wsUrl(): string { + const host = this.getApiHost(); + const namespace = this.config?.apiNamespace ?? this.defaultNamespace; + const protocol = host.startsWith('https') ? 'wss' : 'ws'; + // Replace http(s):// with ws(s):// + const wsHost = host.replace(/^https?:\/\//, `${protocol}://`); + return `${wsHost}${namespace}/ws/events`; + } + + /** + * Get the API host (protocol + hostname + port) + */ + private getApiHost(): string { + // If custom host is configured, use it + if (this.config?.apiHost) { + return this.config.apiHost; + } + + // Auto-detect from current page location + if (typeof window !== 'undefined' && window.location) { + const { protocol, hostname, port } = window.location; + + // Determine if we should use secure protocol + const isSecure = protocol === 'https:' || this.config?.forceSecure; + const httpProtocol = isSecure ? 'https' : 'http'; + + // Default to port 9090 if we're on a dev server (4200) or no port specified + const apiPort = this.getApiPort(port); + + return `${httpProtocol}://${hostname}:${apiPort}`; + } + + // Fallback for SSR or non-browser environments + return 'http://localhost:9090'; + } + + /** + * Determine the API port based on the current page port + */ + private getApiPort(currentPort: string): string { + // If we're on the Angular dev server (4200), use the API default port + if (currentPort === '4200' || currentPort === '') { + return '9090'; + } + // Otherwise, assume we're served from the same port as the API + return currentPort; + } + + /** + * Check if the connection is secure (HTTPS/WSS) + */ + get isSecure(): boolean { + if (this.config?.forceSecure) { + return true; + } + if (typeof window !== 'undefined') { + return window.location.protocol === 'https:'; + } + return false; + } +} diff --git a/ui/src/app/components/sidebar/icon.component.ts b/ui/src/app/components/sidebar/icon.component.ts new file mode 100644 index 0000000..4ea59ef --- /dev/null +++ b/ui/src/app/components/sidebar/icon.component.ts @@ -0,0 +1,82 @@ +import { Component, input } from '@angular/core'; +import { CommonModule } from '@angular/common'; + +export type IconName = + | 'dashboard' + | 'workers' + | 'console' + | 'pools' + | 'profiles' + | 'miners' + | 'nodes'; + +/** + * Icon component that renders SVG icons without using innerHTML. + * This avoids the need for bypassSecurityTrustHtml and provides + * a safer, more maintainable approach to icon rendering. + */ +@Component({ + selector: 'app-icon', + standalone: true, + imports: [CommonModule], + template: ` + @switch (name()) { + @case ('dashboard') { + + + + } + @case ('workers') { + + + + } + @case ('console') { + + + + } + @case ('pools') { + + + + } + @case ('profiles') { + + + + } + @case ('miners') { + + + + } + @case ('nodes') { + + + + } + } + `, + styles: [` + :host { + display: flex; + align-items: center; + justify-content: center; + } + svg { + width: 20px; + height: 20px; + } + `] +}) +export class IconComponent { + name = input.required(); +} diff --git a/ui/src/app/components/sidebar/sidebar.component.ts b/ui/src/app/components/sidebar/sidebar.component.ts index c74db8e..1bcc30e 100644 --- a/ui/src/app/components/sidebar/sidebar.component.ts +++ b/ui/src/app/components/sidebar/sidebar.component.ts @@ -1,18 +1,18 @@ -import { Component, signal, output, input, inject, HostListener } from '@angular/core'; +import { Component, signal, output, input, HostListener } from '@angular/core'; import { CommonModule } from '@angular/common'; -import { DomSanitizer, SafeHtml } from '@angular/platform-browser'; +import { IconComponent, IconName } from './icon.component'; interface NavItem { id: string; label: string; - icon: SafeHtml; + icon: IconName; route: string; } @Component({ selector: 'app-sidebar', standalone: true, - imports: [CommonModule], + imports: [CommonModule, IconComponent], template: `