Compare commits

..

No commits in common. "dev" and "main" have entirely different histories.
dev ... main

493 changed files with 3297 additions and 29931 deletions

View file

@ -1,200 +1,40 @@
{ {
"$schema": "https://anthropic.com/claude-code/marketplace.schema.json", "name": "core-agent",
"name": "host-uk", "description": "Host UK Claude Code plugin collection",
"description": "Host UK development plugins for Claude Code, Codex, and Gemini",
"owner": { "owner": {
"name": "Host UK", "name": "Host UK",
"email": "hello@host.uk.com" "email": "hello@host.uk.com"
}, },
"plugins": [ "plugins": [
{ {
"name": "claude-api", "name": "code",
"description": "API client generation helpers",
"version": "0.1.0",
"source": "./claude/api",
"category": "development"
},
{
"name": "claude-ci",
"description": "CI integration - GitHub Actions, test automation",
"version": "0.1.0",
"source": "./claude/ci",
"category": "workflow"
},
{
"name": "claude-code",
"description": "Core development plugin - hooks, formatting, autonomous workflows",
"version": "0.1.0",
"source": "./claude/code", "source": "./claude/code",
"category": "development" "description": "Core development plugin - hooks, scripts, data collection skills",
"version": "0.1.0"
}, },
{ {
"name": "claude-collect", "name": "review",
"description": "Data collection via Borg CLI - /collect:github, /collect:website, /collect:excavate, /collect:papers",
"version": "0.1.0",
"source": "./claude/collect",
"category": "data-collection"
},
{
"name": "claude-coolify",
"description": "Coolify deployment helpers",
"version": "0.1.0",
"source": "./claude/coolify",
"category": "deployment"
},
{
"name": "claude-core",
"description": "Core commands for project maintenance",
"version": "0.1.0",
"source": "./claude/core",
"category": "workflow"
},
{
"name": "claude-issue",
"description": "Integration with GitHub Issues",
"version": "0.1.0",
"source": "./claude/issue",
"category": "productivity"
},
{
"name": "claude-perf",
"description": "Performance profiling helpers for Go and PHP",
"version": "0.1.0",
"source": "./claude/perf",
"category": "monitoring"
},
{
"name": "claude-qa",
"description": "Quality assurance - iterative fix loops, lint enforcement",
"version": "0.1.0",
"source": "./claude/qa",
"category": "testing"
},
{
"name": "claude-review",
"description": "Code review automation - PR review, security checks",
"version": "0.1.0",
"source": "./claude/review", "source": "./claude/review",
"category": "security" "description": "Code review automation - PR review, security checks",
"version": "0.1.0"
}, },
{ {
"name": "claude-verify", "name": "verify",
"description": "Work verification - ensure tests pass, no debug statements",
"version": "0.1.0",
"source": "./claude/verify", "source": "./claude/verify",
"category": "testing" "description": "Work verification - ensure tests pass, no debug statements",
"version": "0.1.0"
}, },
{ {
"name": "codex", "name": "qa",
"description": "Codex awareness, ethics modal, and guardrails", "source": "./claude/qa",
"version": "0.1.1", "description": "Quality assurance - iterative fix loops, lint enforcement",
"source": "./codex", "version": "0.1.0"
"category": "ethics"
}, },
{ {
"name": "codex-awareness", "name": "ci",
"description": "Codex awareness guidance for the core-agent monorepo", "source": "./claude/ci",
"version": "0.1.1", "description": "CI integration - GitHub Actions, test automation",
"source": "./codex/awareness", "version": "0.1.0"
"category": "productivity"
},
{
"name": "codex-ethics",
"description": "Ethics modal and axioms kernel for Codex",
"version": "0.1.1",
"source": "./codex/ethics",
"category": "ethics"
},
{
"name": "codex-guardrails",
"description": "Safety guardrails with a focus on safe string handling",
"version": "0.1.1",
"source": "./codex/guardrails",
"category": "security"
},
{
"name": "codex-api",
"description": "Codex API plugin",
"version": "0.1.1",
"source": "./codex/api",
"category": "development"
},
{
"name": "codex-ci",
"description": "Codex CI plugin",
"version": "0.1.1",
"source": "./codex/ci",
"category": "workflow"
},
{
"name": "codex-code",
"description": "Codex code workflow plugin",
"version": "0.1.1",
"source": "./codex/code",
"category": "development"
},
{
"name": "codex-collect",
"description": "Codex collection plugin",
"version": "0.1.1",
"source": "./codex/collect",
"category": "data-collection"
},
{
"name": "codex-coolify",
"description": "Codex Coolify plugin",
"version": "0.1.1",
"source": "./codex/coolify",
"category": "deployment"
},
{
"name": "codex-core",
"description": "Codex core plugin",
"version": "0.1.1",
"source": "./codex/core",
"category": "workflow"
},
{
"name": "codex-issue",
"description": "Codex issue plugin",
"version": "0.1.1",
"source": "./codex/issue",
"category": "productivity"
},
{
"name": "codex-perf",
"description": "Codex performance plugin",
"version": "0.1.1",
"source": "./codex/perf",
"category": "monitoring"
},
{
"name": "codex-qa",
"description": "Codex QA plugin",
"version": "0.1.1",
"source": "./codex/qa",
"category": "testing"
},
{
"name": "codex-review",
"description": "Codex review plugin",
"version": "0.1.1",
"source": "./codex/review",
"category": "security"
},
{
"name": "codex-verify",
"description": "Codex verify plugin",
"version": "0.1.1",
"source": "./codex/verify",
"category": "testing"
},
{
"name": "gemini-core-agent",
"description": "Host UK Core Agent Extension for Gemini CLI",
"version": "0.1.1",
"source": "./google/gemini-cli",
"category": "development"
} }
] ]
} }

View file

@ -1,9 +0,0 @@
{
"history": [
{
"commit": "dd22744f56eb01cddc090aded2542ae6d37f484f",
"date": "2026-02-02T05:25:04Z",
"coverage": 82.3
}
]
}

View file

@ -1,16 +0,0 @@
# .env.example
DB_CONNECTION=mysql
DB_HOST=127.0.0.1
DB_PORT=3306
DB_DATABASE=laravel
DB_USERNAME=root
DB_PASSWORD=
REDIS_HOST=127.0.0.1
REDIS_PASSWORD=null
REDIS_PORT=6379
# Required, no default
JWT_SECRET=
# Required for billing
STRIPE_KEY=

5
.gitignore vendored
View file

@ -1,6 +1 @@
.idea/ .idea/
vendor/
claude/api/php/vendor/
__pycache__/
.env

View file

@ -1,12 +0,0 @@
{
"mcpServers": {
"HOST_UK_MARKETPLACE": {
"command": "go",
"args": [
"run",
"./mcp/"
],
"type": "stdio"
}
}
}

206
CLAUDE.md
View file

@ -1,103 +1,86 @@
# CLAUDE.md # CLAUDE.md
This file guides Claude Code when working inside the Host UK core-agent repository. This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Overview ## Overview
**core-agent** is a unified marketplace hosting Claude and Codex plugin suites plus the Gemini CLI extension. A shared MCP server powers marketplace discovery and ethics checks across tools. **core-agent** is a monorepo of Claude Code plugins for the Host UK federated monorepo. It contains multiple focused plugins that can be installed individually or together.
## Marketplace ## Plugins
Install everything via the unified marketplace: | Plugin | Description | Install |
|--------|-------------|---------|
| **code** | Core development - hooks, scripts, data collection | `claude plugin add host-uk/core-agent/claude/code` |
| **review** | Code review automation | `claude plugin add host-uk/core-agent/claude/review` |
| **verify** | Work verification | `claude plugin add host-uk/core-agent/claude/verify` |
| **qa** | Quality assurance loops | `claude plugin add host-uk/core-agent/claude/qa` |
| **ci** | CI/CD integration | `claude plugin add host-uk/core-agent/claude/ci` |
Or install all via marketplace:
```bash ```bash
claude plugin add host-uk/core-agent claude plugin add host-uk/core-agent
``` ```
Install a specific Claude plugin:
```bash
claude plugin add host-uk/core-agent/claude/code
```
The marketplace manifest lives at `core-agent/.claude-plugin/marketplace.json`.
## Shared MCP Server
The vendor-neutral MCP server is registered in `core-agent/.mcp.json` and runs via stdio:
```bash
go run ./mcp/
```
Tools exposed:
- `marketplace_list` — returns the full marketplace registry as structured data
- `marketplace_plugin_info` — returns plugin metadata, commands, and skills by name
- `core_cli` — runs approved `core` CLI commands (`dev`, `go`, `php`, `build` only)
- `ethics_check` — returns the ethics modal and axioms kernel
## Ethics Kernel
The Axioms of Life ethics kernel is first-class and lives at:
- `core-agent/codex/ethics/MODAL.md`
- `core-agent/codex/ethics/kernel/axioms.json`
## Repository Structure ## Repository Structure
``` ```
core-agent/ core-agent/
├── .claude-plugin/ # Unified marketplace manifest ├── .claude-plugin/
├── .mcp.json # MCP server registry │ └── marketplace.json # Plugin registry (enables auto-updates)
├── claude/ # Claude plugin suite ├── claude/
├── codex/ # Codex plugin suite + ethics kernel │ ├── code/ # Core development plugin
├── google/ # Gemini CLI extension │ │ ├── .claude-plugin/
├── mcp/ # Shared MCP stdio server │ │ │ └── plugin.json
│ │ ├── hooks.json
│ │ ├── hooks/
│ │ ├── scripts/
│ │ ├── commands/ # /code:remember, /code:yes
│ │ ├── skills/ # Data collection skills
│ │ └── collection/ # Collection event hooks
│ ├── review/ # Code review plugin
│ │ ├── .claude-plugin/
│ │ │ └── plugin.json
│ │ └── commands/ # /review:review
│ ├── verify/ # Verification plugin
│ │ ├── .claude-plugin/
│ │ │ └── plugin.json
│ │ └── commands/ # /verify:verify
│ ├── qa/ # QA plugin
│ │ ├── .claude-plugin/
│ │ │ └── plugin.json
│ │ ├── scripts/
│ │ └── commands/ # /qa:qa, /qa:fix
│ └── ci/ # CI plugin
│ ├── .claude-plugin/
│ │ └── plugin.json
│ └── commands/ # /ci:ci, /ci:workflow
├── CLAUDE.md ├── CLAUDE.md
└── README.md └── .gitignore
``` ```
## Plugin Suites ## Plugin Commands
Claude plugins: ### code
- `/code:remember <fact>` - Save context that persists across compaction
- `/code:yes <task>` - Auto-approve mode with commit requirement
- `claude/api` ### review
- `claude/ci` - `/review:review [range]` - Code review on staged changes or commits
- `claude/code`
- `claude/collect`
- `claude/coolify`
- `claude/core`
- `claude/issue`
- `claude/perf`
- `claude/qa`
- `claude/review`
- `claude/verify`
Codex plugins: ### verify
- `/verify:verify [--quick|--full]` - Verify work is complete
- `codex/awareness` ### qa
- `codex/ethics` - `/qa:qa` - Iterative QA fix loop (runs until all checks pass)
- `codex/guardrails` - `/qa:fix <issue>` - Fix a specific QA issue
- `codex/api`
- `codex/ci`
- `codex/code`
- `codex/collect`
- `codex/coolify`
- `codex/core`
- `codex/issue`
- `codex/perf`
- `codex/qa`
- `codex/review`
- `codex/verify`
Gemini extension: ### ci
- `/ci:ci [status|run|logs|fix]` - CI status and management
- `google/gemini-cli` - `/ci:workflow <type>` - Generate GitHub Actions workflows
## Core CLI Philosophy ## Core CLI Philosophy
Always use `core` CLI instead of raw commands. The `core` binary handles the full E2E development lifecycle for Go and PHP ecosystems. **Always use `core` CLI instead of raw commands.** The `core` binary handles the full E2E development lifecycle for Go and PHP ecosystems.
### Command Mappings ### Command Mappings
@ -111,23 +94,82 @@ Always use `core` CLI instead of raw commands. The `core` binary handles the ful
| `./vendor/bin/pint` | `core php fmt` | | `./vendor/bin/pint` | `core php fmt` |
| `./vendor/bin/phpstan` | `core php stan` | | `./vendor/bin/phpstan` | `core php stan` |
### Key Commands
```bash
# Development
core dev health # Status across repos
core dev work # Full workflow: status → commit → push
# Go
core go test # Run tests
core go qa # Full QA pipeline
# PHP
core php test # Run Pest tests
core php qa # Full QA pipeline
# Building
core build # Auto-detect and build
# AI
core ai task # Auto-select a task
core ai task:pr # Create PR for task
```
## code Plugin Features
### Hooks
| Hook | File | Purpose |
|------|------|---------|
| PreToolUse | `prefer-core.sh` | Block dangerous commands, enforce `core` CLI |
| PostToolUse | `php-format.sh` | Auto-format PHP |
| PostToolUse | `go-format.sh` | Auto-format Go |
| PostToolUse | `check-debug.sh` | Warn about debug statements |
| PreCompact | `pre-compact.sh` | Save state before compaction |
| SessionStart | `session-start.sh` | Restore context on startup |
### Blocked Patterns
**Destructive operations:**
- `rm -rf` / `rm -r` (except node_modules, vendor, .cache)
- `mv`/`cp` with wildcards
- `xargs` with rm/mv/cp
- `find -exec` with file operations
- `sed -i` (in-place editing)
**Raw commands (use core instead):**
- `go test/build/fmt/mod``core go *`
- `composer test``core php test`
### Data Collection Skills
| Skill | Purpose |
|-------|---------|
| `ledger-papers/` | 91+ distributed ledger whitepapers |
| `project-archaeology/` | Dead project excavation |
| `bitcointalk/` | Forum thread archival |
| `coinmarketcap/` | Historical price data |
| `github-history/` | Repository history preservation |
## Development ## Development
Adding a new Claude plugin: ### Adding a new plugin
1. Create `claude/<name>/.claude-plugin/plugin.json` 1. Create `claude/<name>/.claude-plugin/plugin.json`
2. Add commands to `claude/<name>/commands/` 2. Add commands to `claude/<name>/commands/`
3. Register in `.claude-plugin/marketplace.json` 3. Register in `.claude-plugin/marketplace.json`
Adding a new Codex plugin: ### Testing hooks locally
1. Create `codex/<name>/.codex-plugin/plugin.json` ```bash
2. Add commands to `codex/<name>/commands/` echo '{"tool_input": {"command": "rm -rf /"}}' | bash ./claude/code/hooks/prefer-core.sh
3. Register in `.claude-plugin/marketplace.json` ```
## Coding Standards ## Coding Standards
- UK English: colour, organisation, centre - **UK English**: colour, organisation, centre
- Shell scripts: use `#!/bin/bash`, read JSON with `jq` - **Shell scripts**: Use `#!/bin/bash`, read JSON with `jq`
- Hook output: JSON with `decision` (approve/block) and optional `message` - **Hook output**: JSON with `decision` (approve/block) and optional `message`
- License: EUPL-1.2 CIC - **License**: EUPL-1.2 CIC

159
README.md
View file

@ -1,106 +1,43 @@
# core-agent # core-agent
A unified marketplace of Host UK plugins for Claude Code, Codex, and the Gemini CLI. This repo hosts the Claude and Codex suites alongside a shared MCP server and the Axioms of Life ethics kernel. A monorepo of [Claude Code](https://claude.ai/code) plugins for the Host UK federated monorepo.
## Marketplace ## Plugins
The public marketplace manifest lives at `core-agent/.claude-plugin/marketplace.json` and lists all Claude, Codex, and Gemini entries. | Plugin | Description | Commands |
|--------|-------------|----------|
| **[code](./claude/code)** | Core development - hooks, scripts, data collection | `/code:remember`, `/code:yes` |
| **[review](./claude/review)** | Code review automation | `/review:review`, `/review:security`, `/review:pr` |
| **[verify](./claude/verify)** | Work verification before commit/push | `/verify:verify`, `/verify:ready` |
| **[qa](./claude/qa)** | Quality assurance fix loops | `/qa:qa`, `/qa:fix`, `/qa:check` |
| **[ci](./claude/ci)** | CI/CD integration | `/ci:ci`, `/ci:workflow`, `/ci:fix` |
## Installation ## Installation
Claude Code:
```bash ```bash
# Install everything via marketplace # Install all plugins via marketplace
claude plugin add host-uk/core-agent claude plugin add host-uk/core-agent
# Or install specific Claude plugins # Or install individual plugins
claude plugin add host-uk/core-agent/claude/code claude plugin add host-uk/core-agent/claude/code
claude plugin add host-uk/core-agent/claude/review claude plugin add host-uk/core-agent/claude/review
claude plugin add host-uk/core-agent/claude/qa claude plugin add host-uk/core-agent/claude/qa
``` ```
Gemini CLI: ## Quick Start
- Extension lives at `core-agent/google/gemini-cli`.
Codex:
- Codex plugins live at `core-agent/codex`.
## Shared MCP Server
A vendor-neutral MCP server lives in `core-agent/mcp` and is registered in `.mcp.json` at the repo root.
Run locally:
```bash ```bash
go run ./mcp/ # Code review staged changes
``` /review:review
Tools: # Run QA and fix all issues
/qa:qa
- `marketplace_list` — returns the full marketplace registry as structured data # Verify work is ready to commit
- `marketplace_plugin_info` — returns plugin metadata, commands, and skills by name /verify:verify
- `core_cli` — runs approved `core` CLI commands (`dev`, `go`, `php`, `build` only)
- `ethics_check` — returns the ethics modal and axioms kernel
## Ethics Kernel # Check CI status
/ci:ci
The Axioms of Life ethics kernel is a first-class feature under `core-agent/codex/ethics`.
- `core-agent/codex/ethics/MODAL.md`
- `core-agent/codex/ethics/kernel/axioms.json`
## Plugin Suites
Claude plugins:
- `claude/api`
- `claude/ci`
- `claude/code`
- `claude/collect`
- `claude/coolify`
- `claude/core`
- `claude/issue`
- `claude/perf`
- `claude/qa`
- `claude/review`
- `claude/verify`
Codex plugins:
- `codex/awareness`
- `codex/ethics`
- `codex/guardrails`
- `codex/api`
- `codex/ci`
- `codex/code`
- `codex/collect`
- `codex/coolify`
- `codex/core`
- `codex/issue`
- `codex/perf`
- `codex/qa`
- `codex/review`
- `codex/verify`
Gemini extension:
- `google/gemini-cli`
## Repository Structure
```
core-agent/
├── .claude-plugin/ # Unified marketplace manifest
├── .mcp.json # MCP server registry
├── claude/ # Claude plugin suite
├── codex/ # Codex plugin suite + ethics kernel
├── google/ # Gemini CLI extension
├── mcp/ # Shared MCP stdio server
├── CLAUDE.md
└── README.md
``` ```
## Core CLI Integration ## Core CLI Integration
@ -115,6 +52,62 @@ These plugins enforce the `core` CLI for development commands:
| `composer test` | `core php test` | | `composer test` | `core php test` |
| `./vendor/bin/pint` | `core php fmt` | | `./vendor/bin/pint` | `core php fmt` |
## Plugin Details
### code
The core plugin with hooks and data collection skills:
- **Hooks**: Auto-format, debug detection, dangerous command blocking
- **Skills**: Data collection for archiving OSS projects (whitepapers, forums, market data)
- **Commands**: `/code:remember` (persist facts), `/code:yes` (auto-approve mode)
### review
Code review automation:
- `/review:review` - Review staged changes or commit range
- `/review:security` - Security-focused review
- `/review:pr [number]` - Review a pull request
### verify
Work verification:
- `/verify:verify` - Full verification (tests, lint, format, debug check)
- `/verify:ready` - Quick check if ready to commit
### qa
Quality assurance:
- `/qa:qa` - Run QA pipeline, fix all issues iteratively
- `/qa:fix <issue>` - Fix a specific issue
- `/qa:check` - Check without fixing
### ci
CI/CD integration:
- `/ci:ci` - Check CI status
- `/ci:workflow <type>` - Generate GitHub Actions workflow
- `/ci:fix` - Analyse and fix failing CI
## Development
### Adding a new plugin
1. Create `claude/<name>/.claude-plugin/plugin.json`
2. Add commands to `claude/<name>/commands/`
3. Add hooks to `claude/<name>/hooks.json` (optional)
4. Register in `.claude-plugin/marketplace.json`
### Testing locally
```bash
claude plugin add /path/to/core-agent
```
## License ## License
EUPL-1.2 EUPL-1.2

View file

@ -1,20 +0,0 @@
<?php
class Test
{
public function originalMethod()
{
// Some code before the selection
echo "Before";
// Start of selection
$a = 1;
$b = 2;
$c = $a + $b;
echo $c;
// End of selection
// Some code after the selection
echo "After";
}
}

26
api.js
View file

@ -1,26 +0,0 @@
// Generated from routes/api.php
export const api = {
auth: {
login: (data) => fetch(`/api/auth/login`, {
method: "POST",
body: JSON.stringify(data)
}),
},
users: {
list: () => fetch(`/api/users`, {
}),
create: (data) => fetch(`/api/users`, {
method: "POST",
body: JSON.stringify(data)
}),
get: (user) => fetch(`/api/users/{user}`, {
}),
update: (user, data) => fetch(`/api/users/{user}`, {
method: "PUT",
body: JSON.stringify(data)
}),
delete: (user) => fetch(`/api/users/{user}`, {
method: "DELETE",
}),
},
};

26
api.ts
View file

@ -1,26 +0,0 @@
// Generated from routes/api.php
export const api = {
auth: {
login: (data: any) => fetch(`/api/auth/login`, {
method: "POST",
body: JSON.stringify(data)
}),
},
users: {
list: () => fetch(`/api/users`, {
}),
create: (data: any) => fetch(`/api/users`, {
method: "POST",
body: JSON.stringify(data)
}),
get: (user: number) => fetch(`/api/users/${user}`, {
}),
update: (user: number, data: any) => fetch(`/api/users/${user}`, {
method: "PUT",
body: JSON.stringify(data)
}),
delete: (user: number) => fetch(`/api/users/${user}`, {
method: "DELETE",
}),
},
};

View file

@ -1,24 +0,0 @@
---
name: generate
description: Generate TypeScript/JavaScript API client from Laravel routes
args: [--ts|--js] [--openapi]
---
# Generate API Client
Generates a TypeScript or JavaScript API client from your project's Laravel routes.
## Usage
Generate TypeScript client (default):
`core:api generate`
Generate JavaScript client:
`core:api generate --js`
Generate OpenAPI spec:
`core:api generate --openapi`
## Action
This command will run a script to parse the routes and generate the client.

View file

@ -1,10 +0,0 @@
<?php
namespace App\Console;
use Illuminate\Foundation\Console\Kernel as ConsoleKernel;
class Kernel extends ConsoleKernel
{
protected $commands = [];
}

View file

@ -1,11 +0,0 @@
<?php
namespace App\Exceptions;
use Illuminate\Foundation\Exceptions\Handler as ExceptionHandler;
class Handler extends ExceptionHandler
{
protected $dontReport = [];
protected $dontFlash = [];
}

View file

@ -1,12 +0,0 @@
<?php
namespace App\Http;
use Illuminate\Foundation\Http\Kernel as HttpKernel;
class Kernel extends HttpKernel
{
protected $middleware = [];
protected $middlewareGroups = [];
protected $routeMiddleware = [];
}

View file

@ -1,12 +0,0 @@
{
"require": {
"illuminate/routing": "^8.0",
"illuminate/filesystem": "^8.0",
"illuminate/foundation": "^8.0"
},
"autoload": {
"psr-4": {
"App\\": "app/"
}
}
}

View file

@ -1,124 +0,0 @@
<?php
/**
* This script parses a Laravel routes file and outputs a JSON representation of the
* routes. It is designed to be used by the generate.sh script to generate an
* API client.
*/
class ApiGenerator
{
/**
* A map of API resource actions to their corresponding client method names.
* This is used to generate more user-friendly method names in the client.
*/
private $actionMap = [
'index' => 'list',
'store' => 'create',
'show' => 'get',
'update' => 'update',
'destroy' => 'delete',
];
/**
* The main method that parses the routes file and outputs the JSON.
*/
public function generate()
{
// The path to the routes file.
$routesFile = __DIR__ . '/routes/api.php';
// The contents of the routes file.
$contents = file_get_contents($routesFile);
// An array to store the parsed routes.
$output = [];
// This regex matches Route::apiResource() declarations. It captures the
// resource name (e.g., "users") and the controller name (e.g., "UserController").
preg_match_all('/Route::apiResource\(\s*\'([^\']+)\'\s*,\s*\'([^\']+)\'\s*\);/m', $contents, $matches, PREG_SET_ORDER);
// For each matched apiResource, generate the corresponding resource routes.
foreach ($matches as $match) {
$resource = $match[1];
$controller = $match[2];
$output = array_merge($output, $this->generateApiResourceRoutes($resource, $controller));
}
// This regex matches individual route declarations (e.g., Route::get(),
// Route::post(), etc.). It captures the HTTP method, the URI, and the
// controller and method names.
preg_match_all('/Route::(get|post|put|patch|delete)\(\s*\'([^\']+)\'\s*,\s*\[\s*\'([^\']+)\'\s*,\s*\'([^\']+)\'\s*\]\s*\);/m', $contents, $matches, PREG_SET_ORDER);
// For each matched route, create a route object and add it to the output.
foreach ($matches as $match) {
$method = strtoupper($match[1]);
$uri = 'api/' . $match[2];
$actionName = $match[4];
$output[] = [
'method' => $method,
'uri' => $uri,
'name' => null,
'action' => $match[3] . '@' . $actionName,
'action_name' => $actionName,
'parameters' => $this->extractParameters($uri),
];
}
// Output the parsed routes as a JSON string.
echo json_encode($output, JSON_PRETTY_PRINT);
}
/**
* Generates the routes for an API resource.
*
* @param string $resource The name of the resource (e.g., "users").
* @param string $controller The name of the controller (e.g., "UserController").
* @return array An array of resource routes.
*/
private function generateApiResourceRoutes($resource, $controller)
{
$routes = [];
$baseUri = "api/{$resource}";
// The resource parameter (e.g., "{user}").
$resourceParam = "{" . rtrim($resource, 's') . "}";
// The standard API resource actions and their corresponding HTTP methods and URIs.
$actions = [
'index' => ['method' => 'GET', 'uri' => $baseUri],
'store' => ['method' => 'POST', 'uri' => $baseUri],
'show' => ['method' => 'GET', 'uri' => "{$baseUri}/{$resourceParam}"],
'update' => ['method' => 'PUT', 'uri' => "{$baseUri}/{$resourceParam}"],
'destroy' => ['method' => 'DELETE', 'uri' => "{$baseUri}/{$resourceParam}"],
];
// For each action, create a route object and add it to the routes array.
foreach ($actions as $action => $details) {
$routes[] = [
'method' => $details['method'],
'uri' => $details['uri'],
'name' => "{$resource}.{$action}",
'action' => "{$controller}@{$action}",
'action_name' => $this->actionMap[$action] ?? $action,
'parameters' => $this->extractParameters($details['uri']),
];
}
return $routes;
}
/**
* Extracts the parameters from a URI.
*
* @param string $uri The URI to extract the parameters from.
* @return array An array of parameters.
*/
private function extractParameters($uri)
{
// This regex matches any string enclosed in curly braces (e.g., "{user}").
preg_match_all('/\{([^\}]+)\}/', $uri, $matches);
return $matches[1];
}
}
// Create a new ApiGenerator and run it.
(new ApiGenerator())->generate();

View file

@ -1,6 +0,0 @@
<?php
use Illuminate\Support\Facades\Route;
Route::apiResource('users', 'UserController');
Route::post('auth/login', ['AuthController', 'login']);

View file

@ -1,125 +0,0 @@
#!/bin/bash
# This script generates a TypeScript/JavaScript API client or an OpenAPI spec
# from a Laravel routes file. It works by running a PHP script to parse the
# routes into JSON, and then uses jq to transform the JSON into the desired
# output format.
# Path to the PHP script that parses the Laravel routes.
PHP_SCRIPT="$(dirname "$0")/../php/generate.php"
# Run the PHP script and capture the JSON output.
ROUTES_JSON=$(php "$PHP_SCRIPT")
# --- Argument Parsing ---
# Initialize flags for the different output formats.
TS=false
JS=false
OPENAPI=false
# Loop through the command-line arguments to determine which output format
# to generate.
for arg in "$@"; do
case $arg in
--ts)
TS=true
shift # Remove --ts from the list of arguments
;;
--js)
JS=true
shift # Remove --js from the list of arguments
;;
--openapi)
OPENAPI=true
shift # Remove --openapi from the list of arguments
;;
esac
done
# Default to TypeScript if no language is specified. This ensures that the
# script always generates at least one output format.
if [ "$JS" = false ] && [ "$OPENAPI" = false ]; then
TS=true
fi
# --- TypeScript Client Generation ---
if [ "$TS" = true ]; then
# Start by creating the api.ts file and adding the header.
echo "// Generated from routes/api.php" > api.ts
echo "export const api = {" >> api.ts
# Use jq to transform the JSON into a TypeScript client.
echo "$ROUTES_JSON" | jq -r '
[group_by(.uri | split("/")[1]) | .[] | {
key: .[0].uri | split("/")[1],
value: .
}] | from_entries | to_entries | map(
" \(.key): {\n" +
(.value | map(
" \(.action_name): (" +
(.parameters | map("\(.): number") | join(", ")) +
(if (.method == "POST" or .method == "PUT") and (.parameters | length > 0) then ", " else "" end) +
(if .method == "POST" or .method == "PUT" then "data: any" else "" end) +
") => fetch(`/\(.uri | gsub("{"; "${") | gsub("}"; "}"))`, {" +
(if .method != "GET" then "\n method: \"\(.method)\"," else "" end) +
(if .method == "POST" or .method == "PUT" then "\n body: JSON.stringify(data)" else "" end) +
"\n }),"
) | join("\n")) +
"\n },"
) | join("\n")
' >> api.ts
echo "};" >> api.ts
fi
# --- JavaScript Client Generation ---
if [ "$JS" = true ]; then
# Start by creating the api.js file and adding the header.
echo "// Generated from routes/api.php" > api.js
echo "export const api = {" >> api.js
# The jq filter for JavaScript is similar to the TypeScript filter, but
# it doesn't include type annotations.
echo "$ROUTES_JSON" | jq -r '
[group_by(.uri | split("/")[1]) | .[] | {
key: .[0].uri | split("/")[1],
value: .
}] | from_entries | to_entries | map(
" \(.key): {\n" +
(.value | map(
" \(.action_name): (" +
(.parameters | join(", ")) +
(if (.method == "POST" or .method == "PUT") and (.parameters | length > 0) then ", " else "" end) +
(if .method == "POST" or .method == "PUT" then "data" else "" end) +
") => fetch(`/\(.uri | gsub("{"; "${") | gsub("}"; "}"))`, {" +
(if .method != "GET" then "\n method: \"\(.method)\"," else "" end) +
(if .method == "POST" or .method == "PUT" then "\n body: JSON.stringify(data)" else "" end) +
"\n }),"
) | join("\n")) +
"\n },"
) | join("\n")
' >> api.js
echo "};" >> api.js
fi
# --- OpenAPI Spec Generation ---
if [ "$OPENAPI" = true ]; then
# Start by creating the openapi.yaml file and adding the header.
echo "openapi: 3.0.0" > openapi.yaml
echo "info:" >> openapi.yaml
echo " title: API" >> openapi.yaml
echo " version: 1.0.0" >> openapi.yaml
echo "paths:" >> openapi.yaml
# The jq filter for OpenAPI generates a YAML file with the correct structure.
# It groups the routes by URI, and then for each URI, it creates a path
# entry with the correct HTTP methods.
echo "$ROUTES_JSON" | jq -r '
group_by(.uri) | .[] |
" /\(.[0].uri):\n" +
(map(" " + (.method | ascii_downcase | split("|")[0]) + ":\n" +
" summary: \(.action)\n" +
" responses:\n" +
" \"200\":\n" +
" description: OK") | join("\n"))
' >> openapi.yaml
fi

View file

@ -1,6 +1,6 @@
{ {
"name": "code", "name": "core-agent",
"description": "Core development plugin for Host UK monorepo - core CLI integration, formatting hooks, and autonomous workflows", "description": "Advanced Claude Code plugin for Host UK monorepo - core CLI integration, data collection skills, and autonomous workflows",
"version": "0.1.0", "version": "0.1.0",
"author": { "author": {
"name": "Host UK", "name": "Host UK",
@ -17,6 +17,9 @@
"monorepo", "monorepo",
"go", "go",
"php", "php",
"laravel" "laravel",
"data-collection",
"cryptocurrency",
"archive"
] ]
} }

View file

@ -0,0 +1,90 @@
# Collection Hooks
Event-driven hooks that trigger during data collection.
## Available Hooks
| Hook | Trigger | Purpose |
|------|---------|---------|
| `collect-whitepaper.sh` | PDF/paper URL detected | Auto-queue whitepapers |
| `on-github-release.sh` | Release found | Archive release metadata |
| `on-explorer-block.sh` | Block data fetched | Index blockchain data |
## Hook Events
### `on_url_found`
Fired when a new URL is discovered during collection.
```bash
# Pattern matching
*.pdf → collect-whitepaper.sh
*/releases/* → on-github-release.sh
*/api/block/* → on-explorer-block.sh
```
### `on_file_collected`
Fired after a file is successfully downloaded.
```bash
# Post-processing
*.json → validate-json.sh
*.html → extract-links.sh
*.pdf → extract-metadata.sh
```
### `on_collection_complete`
Fired when a job batch finishes.
```bash
# Reporting
→ generate-index.sh
→ update-registry.sh
```
## Plugin Integration
For the marketplace plugin system:
```json
{
"name": "whitepaper-collector",
"version": "1.0.0",
"hooks": {
"on_url_found": {
"pattern": "*.pdf",
"handler": "./collect-whitepaper.sh"
}
}
}
```
## Registration
Hooks register in `hooks.json`:
```json
{
"on_url_found": [
{
"pattern": "\\.pdf$",
"handler": "./hooks/collect-whitepaper.sh",
"priority": 10
}
]
}
```
## Usage in Collectors
Collectors call hooks via:
```bash
# In job-collector/process.sh
source ./hooks/dispatch.sh
# When URL found
dispatch_hook "on_url_found" "$URL"
# When file collected
dispatch_hook "on_file_collected" "$FILE" "$TYPE"
```

View file

@ -0,0 +1,59 @@
#!/usr/bin/env bash
# Hook: collect-whitepaper.sh
# Called when a whitepaper URL is detected during collection
# Usage: ./collect-whitepaper.sh <URL> [destination-folder]
set -e
URL="$1"
DEST="${2:-./whitepapers}"
if [ -z "$URL" ]; then
echo "Usage: $0 <url> [destination]" >&2
exit 1
fi
# Detect paper type from URL
detect_category() {
local url="$1"
case "$url" in
*cryptonote*) echo "cryptonote" ;;
*iacr.org*|*eprint*) echo "research" ;;
*arxiv.org*) echo "research" ;;
*monero*|*getmonero*) echo "research" ;;
*lethean*|*lthn*) echo "lethean" ;;
*) echo "uncategorized" ;;
esac
}
# Generate safe filename from URL
safe_filename() {
local url="$1"
basename "$url" | sed 's/[^a-zA-Z0-9._-]/-/g'
}
CATEGORY=$(detect_category "$URL")
FILENAME=$(safe_filename "$URL")
TARGET_DIR="$DEST/$CATEGORY"
TARGET_FILE="$TARGET_DIR/$FILENAME"
mkdir -p "$TARGET_DIR"
# Check if already collected
if [ -f "$TARGET_FILE" ]; then
echo "Already collected: $TARGET_FILE"
exit 0
fi
echo "Collecting whitepaper:"
echo " URL: $URL"
echo " Category: $CATEGORY"
echo " Destination: $TARGET_FILE"
# Create job entry for proxy collection
echo "$URL|$FILENAME|whitepaper|category=$CATEGORY" >> "$DEST/.pending-jobs.txt"
echo "Job queued: $DEST/.pending-jobs.txt"
echo ""
echo "To collect immediately (if you have direct access):"
echo " curl -L -o '$TARGET_FILE' '$URL'"

View file

@ -0,0 +1,80 @@
#!/usr/bin/env bash
# Hook dispatcher - source this in collectors
# Usage: source ./hooks/dispatch.sh
HOOKS_DIR="$(dirname "${BASH_SOURCE[0]}")"
HOOKS_JSON="$HOOKS_DIR/hooks.json"
# Dispatch a hook event
# dispatch_hook <event> <arg1> [arg2] ...
dispatch_hook() {
local event="$1"
shift
local args=("$@")
if [ ! -f "$HOOKS_JSON" ]; then
return 0
fi
# Get handlers for this event (requires jq)
if ! command -v jq &> /dev/null; then
echo "Warning: jq not installed, hooks disabled" >&2
return 0
fi
local handlers
handlers=$(jq -r ".hooks[\"$event\"][]? | select(.enabled == true) | @json" "$HOOKS_JSON" 2>/dev/null)
if [ -z "$handlers" ]; then
return 0
fi
echo "$handlers" | while read -r handler_json; do
local name pattern handler_script priority
name=$(echo "$handler_json" | jq -r '.name')
pattern=$(echo "$handler_json" | jq -r '.pattern // ""')
handler_script=$(echo "$handler_json" | jq -r '.handler')
# Check pattern match if pattern exists
if [ -n "$pattern" ] && [ -n "${args[0]}" ]; then
if ! echo "${args[0]}" | grep -qE "$pattern"; then
continue
fi
fi
# Execute handler
local full_path="$HOOKS_DIR/$handler_script"
if [ -x "$full_path" ]; then
echo "[hook] $name: ${args[*]}" >&2
"$full_path" "${args[@]}"
elif [ -f "$full_path" ]; then
echo "[hook] $name: ${args[*]}" >&2
bash "$full_path" "${args[@]}"
fi
done
}
# Register a new hook dynamically
# register_hook <event> <name> <pattern> <handler>
register_hook() {
local event="$1"
local name="$2"
local pattern="$3"
local handler="$4"
if ! command -v jq &> /dev/null; then
echo "Error: jq required for hook registration" >&2
return 1
fi
local new_hook
new_hook=$(jq -n \
--arg name "$name" \
--arg pattern "$pattern" \
--arg handler "$handler" \
'{name: $name, pattern: $pattern, handler: $handler, priority: 50, enabled: true}')
# Add to hooks.json
jq ".hooks[\"$event\"] += [$new_hook]" "$HOOKS_JSON" > "$HOOKS_JSON.tmp" \
&& mv "$HOOKS_JSON.tmp" "$HOOKS_JSON"
}

View file

@ -0,0 +1,45 @@
{
"version": "1.0.0",
"hooks": {
"on_url_found": [
{
"name": "whitepaper-collector",
"pattern": "\\.pdf$",
"handler": "./collect-whitepaper.sh",
"priority": 10,
"enabled": true
},
{
"name": "whitepaper-iacr",
"pattern": "eprint\\.iacr\\.org",
"handler": "./collect-whitepaper.sh",
"priority": 10,
"enabled": true
},
{
"name": "whitepaper-arxiv",
"pattern": "arxiv\\.org",
"handler": "./collect-whitepaper.sh",
"priority": 10,
"enabled": true
}
],
"on_file_collected": [
{
"name": "pdf-metadata",
"pattern": "\\.pdf$",
"handler": "./extract-pdf-metadata.sh",
"priority": 5,
"enabled": false
}
],
"on_collection_complete": [
{
"name": "update-index",
"handler": "./update-index.sh",
"priority": 100,
"enabled": true
}
]
}
}

View file

@ -0,0 +1,38 @@
#!/usr/bin/env bash
# Hook: update-index.sh
# Called after collection completes to update indexes
WHITEPAPERS_DIR="${1:-./whitepapers}"
echo "[update-index] Updating whitepaper index..."
# Count papers in each category
for category in cryptonote lethean research uncategorized; do
dir="$WHITEPAPERS_DIR/$category"
if [ -d "$dir" ]; then
count=$(find "$dir" -name "*.pdf" 2>/dev/null | wc -l | tr -d ' ')
echo " $category: $count papers"
fi
done
# Update INDEX.md with collected papers
INDEX="$WHITEPAPERS_DIR/INDEX.md"
if [ -f "$INDEX" ]; then
# Add collected papers section if not exists
if ! grep -q "## Recently Collected" "$INDEX"; then
echo "" >> "$INDEX"
echo "## Recently Collected" >> "$INDEX"
echo "" >> "$INDEX"
echo "_Last updated: $(date +%Y-%m-%d)_" >> "$INDEX"
echo "" >> "$INDEX"
fi
fi
# Process pending jobs
PENDING="$WHITEPAPERS_DIR/.pending-jobs.txt"
if [ -f "$PENDING" ]; then
count=$(wc -l < "$PENDING" | tr -d ' ')
echo "[update-index] $count papers queued for collection"
fi
echo "[update-index] Done"

View file

@ -1,27 +0,0 @@
---
name: api
description: Generate TypeScript/JavaScript API client from Laravel routes
args: generate [--ts|--js|--openapi]
---
# API Client Generator
Generate a TypeScript/JavaScript API client or an OpenAPI specification from your Laravel routes.
## Usage
Generate a TypeScript client (default):
`/code:api generate`
`/code:api generate --ts`
Generate a JavaScript client:
`/code:api generate --js`
Generate an OpenAPI specification:
`/code:api generate --openapi`
## Action
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/api-generate.sh" "$@"
```

View file

@ -1,24 +0,0 @@
---
name: clean
description: Clean up generated files, caches, and build artifacts.
args: "[--deps] [--cache] [--dry-run]"
---
# Clean Project
This command cleans up generated files from the current project.
## Usage
```
/code:clean # Clean all
/code:clean --deps # Remove vendor/node_modules
/code:clean --cache # Clear caches only
/code:clean --dry-run # Show what would be deleted
```
## Action
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/cleanup.sh" "$@"
```

View file

@ -1,53 +0,0 @@
---
name: commit
plugin: code
description: Generate a conventional commit message for staged changes
args: "[message]"
flags:
- --amend
hooks:
Before:
- hooks:
- type: command
command: "${CLAUDE_PLUGIN_ROOT}/scripts/smart-commit.sh"
---
# Smart Commit
Generate a conventional commit message for staged changes.
## Usage
Generate message automatically:
`/core:commit`
Provide a custom message:
`/core:commit "feat(auth): add token validation"`
Amend the previous commit:
`/core:commit --amend`
## Behavior
1. **Analyze Staged Changes**: Examines the `git diff --staged` to understand the nature of the changes.
2. **Generate Conventional Commit Message**:
- `feat`: For new files, functions, or features.
- `fix`: For bug fixes.
- `refactor`: For code restructuring without changing external behavior.
- `docs`: For changes to documentation.
- `test`: For adding or modifying tests.
- `chore`: For routine maintenance tasks.
3. **Determine Scope**: Infers the scope from the affected module's file paths (e.g., `auth`, `payment`, `ui`).
4. **Add Co-Authored-By Trailer**: Appends `Co-Authored-By: Claude <noreply@anthropic.com>` to the commit message.
## Message Generation Example
```
feat(auth): add JWT token validation
- Add validateToken() function
- Add token expiry check
- Add unit tests for validation
Co-Authored-By: Claude <noreply@anthropic.com>
```

View file

@ -1,169 +0,0 @@
---
name: compare
description: Compare versions between modules and find incompatibilities
args: "[module] [--prod]"
---
# Compare Module Versions
Compares local module versions against remote, and checks for dependency conflicts.
## Usage
```
/code:compare # Compare all modules
/code:compare core-tenant # Compare specific module
/code:compare --prod # Compare with production
```
## Action
```bash
#!/bin/bash
# Function to compare semantic versions
# Returns:
# 0 if versions are equal
# 1 if version1 > version2
# 2 if version1 < version2
compare_versions() {
if [ "$1" == "$2" ]; then
return 0
fi
local winner=$(printf "%s\n%s" "$1" "$2" | sort -V | tail -n 1)
if [ "$winner" == "$1" ]; then
return 1
else
return 2
fi
}
# Checks if a version is compatible with a Composer constraint.
is_version_compatible() {
local version=$1
local constraint=$2
local base_version
local operator=""
if [[ $constraint == \^* ]]; then
operator="^"
base_version=${constraint:1}
elif [[ $constraint == ~* ]]; then
operator="~"
base_version=${constraint:1}
else
base_version=$constraint
compare_versions "$version" "$base_version"
if [ $? -eq 2 ]; then return 1; else return 0; fi
fi
compare_versions "$version" "$base_version"
if [ $? -eq 2 ]; then
return 1
fi
local major minor patch
IFS='.' read -r major minor patch <<< "$base_version"
local upper_bound
if [ "$operator" == "^" ]; then
if [ "$major" -gt 0 ]; then
upper_bound="$((major + 1)).0.0"
elif [ "$minor" -gt 0 ]; then
upper_bound="0.$((minor + 1)).0"
else
upper_bound="0.0.$((patch + 1))"
fi
elif [ "$operator" == "~" ]; then
upper_bound="$major.$((minor + 1)).0"
fi
compare_versions "$version" "$upper_bound"
if [ $? -eq 2 ]; then
return 0
else
return 1
fi
}
# Parse arguments
TARGET_MODULE=""
ENV_FLAG=""
for arg in "$@"; do
case $arg in
--prod)
ENV_FLAG="--prod"
;;
*)
if [[ ! "$arg" == --* ]]; then
TARGET_MODULE="$arg"
fi
;;
esac
done
# Get module health data
health_data=$(core dev health $ENV_FLAG)
module_data=$(echo "$health_data" | grep -vE '^(Module|━━|Comparing)' | sed '/^$/d' || true)
if [ -z "$module_data" ]; then
echo "No module data found."
exit 0
fi
mapfile -t module_lines <<< "$module_data"
remote_versions=$(echo "$module_data" | awk '{print $1, $3}')
echo "Module Version Comparison"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
echo "Module Local Remote Status"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
for line in "${module_lines[@]}"; do
read -r module local_version remote_version _ <<< "$line"
if [ -n "$TARGET_MODULE" ] && [ "$module" != "$TARGET_MODULE" ]; then
continue
fi
compare_versions "$local_version" "$remote_version"
case $? in
0) status="✓" ;;
1) status="↑ ahead" ;;
2) status="↓ behind" ;;
esac
printf "%-15s %-9s %-9s %s\n" "$module" "$local_version" "$remote_version" "$status"
done
echo ""
echo "Dependency Check:"
for line in "${module_lines[@]}"; do
read -r module _ <<< "$line"
if [ -n "$TARGET_MODULE" ] && [ "$module" != "$TARGET_MODULE" ]; then
continue
fi
if [ -f "$module/composer.json" ]; then
dependencies=$(jq -r '.require? | select(. != null) | to_entries[] | "\(.key)@\(.value)"' "$module/composer.json")
for dep in $dependencies; do
dep_name=$(echo "$dep" | cut -d'@' -f1)
dep_constraint=$(echo "$dep" | cut -d'@' -f2)
remote_version=$(echo "$remote_versions" | grep "^$dep_name " | awk '{print $2}')
if [ -n "$remote_version" ]; then
if ! is_version_compatible "$remote_version" "$dep_constraint"; then
echo "⚠ $module requires $dep_name $dep_constraint"
echo " But production has $remote_version (incompatible)"
echo " Either:"
echo " - Deploy a compatible version of $dep_name first"
echo " - Or adjust the dependency in $module"
fi
fi
done
fi
done
```

View file

@ -1,24 +0,0 @@
---
name: /core:env
description: Manage environment configuration
args: [check|diff|sync]
---
# Environment Management
Provides tools for managing `.env` files based on `.env.example`.
## Usage
- `/core:env` - Show current environment variables (with sensitive values masked)
- `/core:env check` - Validate `.env` against `.env.example`
- `/core:env diff` - Show differences between `.env` and `.env.example`
- `/core:env sync` - Add missing variables from `.env.example` to `.env`
## Action
This command is implemented by the following script:
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/env.sh" "$1"
```

View file

@ -1,90 +0,0 @@
#!/bin/bash
# Calculate and display test coverage.
set -e
COVERAGE_HISTORY_FILE=".coverage-history.json"
# --- Helper Functions ---
# TODO: Replace this with the actual command to calculate test coverage
get_current_coverage() {
echo "80.0" # Mock value
}
get_previous_coverage() {
if [ ! -f "$COVERAGE_HISTORY_FILE" ] || ! jq -e '.history | length > 0' "$COVERAGE_HISTORY_FILE" > /dev/null 2>&1; then
echo "0.0"
return
fi
jq -r '.history[-1].coverage' "$COVERAGE_HISTORY_FILE"
}
update_history() {
local coverage=$1
local commit_hash=$(git rev-parse HEAD)
local timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
if [ ! -f "$COVERAGE_HISTORY_FILE" ]; then
echo '{"history": []}' > "$COVERAGE_HISTORY_FILE"
fi
local updated_history=$(jq \
--arg commit "$commit_hash" \
--arg date "$timestamp" \
--argjson coverage "$coverage" \
'.history += [{ "commit": $commit, "date": $date, "coverage": $coverage }]' \
"$COVERAGE_HISTORY_FILE")
echo "$updated_history" > "$COVERAGE_HISTORY_FILE"
}
# --- Main Logic ---
handle_diff() {
local current_coverage=$(get_current_coverage)
local previous_coverage=$(get_previous_coverage)
local change=$(awk -v current="$current_coverage" -v previous="$previous_coverage" 'BEGIN {printf "%.2f", current - previous}')
echo "Test Coverage Report"
echo "━━━━━━━━━━━━━━━━━━━━"
echo "Current: $current_coverage%"
echo "Previous: $previous_coverage%"
if awk -v change="$change" 'BEGIN {exit !(change >= 0)}'; then
echo "Change: +$change% ✅"
else
echo "Change: $change% ⚠️"
fi
}
handle_history() {
if [ ! -f "$COVERAGE_HISTORY_FILE" ]; then
echo "No coverage history found."
exit 0
fi
echo "Coverage History"
echo "━━━━━━━━━━━━━━━━"
jq -r '.history[] | "\(.date) (\(.commit[0:7])): \(.coverage)%"' "$COVERAGE_HISTORY_FILE"
}
handle_default() {
local current_coverage=$(get_current_coverage)
echo "Current test coverage: $current_coverage%"
update_history "$current_coverage"
echo "Coverage saved to history."
}
# --- Argument Parsing ---
case "$1" in
--diff)
handle_diff
;;
--history)
handle_history
;;
*)
handle_default
;;
esac

View file

@ -1,32 +0,0 @@
---
name: debug
description: Systematic debugging workflow
---
# Debugging Protocol
## Step 1: Reproduce
- Run the failing test/command
- Note exact error message
- Identify conditions for failure
## Step 2: Isolate
- Binary search through changes (git bisect)
- Comment out code sections
- Add logging at key points
## Step 3: Hypothesize
Before changing code, form theories:
1. Theory A: ...
2. Theory B: ...
## Step 4: Test Hypotheses
Test each theory with minimal investigation.
## Step 5: Fix
Apply the smallest change that fixes the issue.
## Step 6: Verify
- Run original failing test
- Run full test suite
- Check for regressions

View file

@ -1,19 +0,0 @@
---
name: deps
description: Show module dependencies
hooks:
PreCommand:
- hooks:
- type: command
command: "python3 ${CLAUDE_PLUGIN_ROOT}/scripts/deps.py ${TOOL_ARGS}"
---
# /core:deps
Visualize dependencies between modules in the monorepo.
## Usage
`/core:deps` - Show the full dependency tree
`/core:deps <module>` - Show dependencies for a single module
`/core:deps --reverse <module>` - Show what depends on a module

View file

@ -1,24 +0,0 @@
---
name: doc
description: Auto-generate documentation from code.
hooks:
PostToolUse:
- matcher: "Tool"
hooks:
- type: command
command: "${CLAUDE_PLUGIN_ROOT}/scripts/doc.sh"
---
# Documentation Generator
This command generates documentation from your codebase.
## Usage
`/core:doc <type> <name>`
## Subcommands
- **class <ClassName>**: Document a single class.
- **api**: Generate OpenAPI spec for the project.
- **changelog**: Generate a changelog from git commits.

View file

@ -1,41 +0,0 @@
---
name: explain
description: Explain code, errors, or stack traces in context
---
# Explain
This command provides context-aware explanations for code, errors, and stack traces.
## Usage
- `/core:explain file.php:45` - Explain code at a specific line.
- `/core:explain error "error message"` - Explain a given error.
- `/core:explain stack "stack trace"` - Explain a given stack trace.
## Code Explanation (`file:line`)
When a file path and line number are provided, follow these steps:
1. **Read the file**: Read the contents of the specified file.
2. **Extract context**: Extract a few lines of code before and after the specified line number to understand the context.
3. **Analyze the code**: Analyze the extracted code block to understand its purpose and functionality.
4. **Provide an explanation**: Provide a clear and concise explanation of the code, including its role in the overall application.
## Error Explanation (`error`)
When an error message is provided, follow these- steps:
1. **Analyze the error**: Parse the error message to identify the key components, such as the error type and location.
2. **Identify the cause**: Based on the error message and your understanding of the codebase, determine the root cause of the error.
3. **Suggest a fix**: Provide a clear and actionable fix for the error, including code snippets where appropriate.
4. **Link to documentation**: If applicable, provide links to relevant documentation that can help the user understand the error and the suggested fix.
## Stack Trace Explanation (`stack`)
When a stack trace is provided, follow these steps:
1. **Parse the stack trace**: Break down the stack trace into individual function calls, including the file path and line number for each call.
2. **Analyze the call stack**: Analyze the sequence of calls to understand the execution flow that led to the current state.
3. **Identify the origin**: Pinpoint the origin of the error or the relevant section of the stack trace.
4. **Provide an explanation**: Explain the sequence of events in the stack trace in a clear and understandable way.

View file

@ -1,22 +0,0 @@
---
name: log
description: Smart log viewing with filtering and analysis.
args: [--errors|--since <duration>|--grep <pattern>|--request <id>|analyse]
---
# Smart Log Viewing
Tails, filters, and analyzes `laravel.log`.
## Usage
/core:log # Tail laravel.log
/core:log --errors # Only errors
/core:log --since 1h # Last hour
/core:log --grep "User" # Filter by pattern
/core:log --request abc123 # Show logs for a specific request
/core:log analyse # Summarize errors
## Action
This command is implemented by the script at `claude/code/scripts/log.sh`.

View file

@ -1,35 +0,0 @@
---
name: migrate
description: Manage Laravel migrations in the monorepo
args: <subcommand> [arguments]
---
# Laravel Migration Helper
Commands to help with Laravel migrations in the monorepo.
## Subcommands
### `create <name>`
Create a new migration file.
e.g., `/core:migrate create create_users_table`
### `run`
Run pending migrations.
e.g., `/core:migrate run`
### `rollback`
Rollback the last batch of migrations.
e.g., `/core:migrate rollback`
### `fresh`
Drop all tables and re-run all migrations.
e.g., `/core:migrate fresh`
### `status`
Show the migration status.
e.g., `/core:migrate status`
### `from-model <model>`
Generate a migration from a model.
e.g., `/core:migrate from-model User`

View file

@ -1,88 +0,0 @@
---
name: onboard
description: Guide new contributors through the codebase
args: [--module]
---
# Interactive Onboarding
This command guides new contributors through the codebase.
## Flow
### 1. Check for Module-Specific Deep Dive
First, check if the user provided a `--module` argument.
- If `args.module` is "tenant":
- Display the "Tenant Module Deep Dive" section and stop.
- If `args.module` is "admin":
- Display the "Admin Module Deep Dive" section and stop.
- If `args.module` is "php":
- Display the "PHP Module Deep Dive" section and stop.
- If `args.module` is not empty but unrecognized, inform the user and show available modules. Then, proceed with the general flow.
### 2. General Onboarding
If no module is specified, display the general onboarding information.
**Welcome Message**
"Welcome to Host UK Monorepo! 👋 Let me help you get oriented."
**Repository Structure**
"This is a federated monorepo with 18 Laravel packages. Each `core-*` directory is an independent git repo."
**Key Modules**
- `core-php`: Foundation framework
- `core-tenant`: Multi-tenancy
- `core-admin`: Admin panel
**Development Commands**
- Run tests: `core go test` / `core php test`
- Format: `core go fmt` / `core php fmt`
### 3. Link to First Task
"Let's find a 'good first issue' for you to work on. You can find them here: https://github.com/host-uk/core-agent/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22"
### 4. Ask User for Interests
Finally, use the `request_user_input` tool to ask the user about their area of interest.
**Prompt:**
"Which area interests you most?
- Backend (PHP/Laravel)
- CLI (Go)
- Frontend (Livewire/Alpine)
- Full stack"
---
## Module Deep Dives
### Tenant Module Deep Dive
**Module**: `core-tenant`
**Description**: Handles all multi-tenancy logic, including tenant identification, database connections, and domain management.
**Key Files**:
- `src/TenantManager.php`: Central class for tenant operations.
- `config/tenant.php`: Configuration options.
**Dependencies**: `core-php`
### Admin Module Deep Dive
**Module**: `core-admin`
**Description**: The admin panel, built with Laravel Nova.
**Key Files**:
- `src/Nova/User.php`: User resource for the admin panel.
- `routes/api.php`: API routes for admin functionality.
**Dependencies**: `core-php`, `core-tenant`
### PHP Module Deep Dive
**Module**: `core-php`
**Description**: The foundation framework, providing shared services, utilities, and base classes. This is the bedrock of all other PHP packages.
**Key Files**:
- `src/ServiceProvider.php`: Registers core services.
- `src/helpers.php`: Global helper functions.
**Dependencies**: None

View file

@ -1,31 +0,0 @@
---
name: perf
description: Performance profiling helpers for Go and PHP
args: <subcommand> [options]
---
# Performance Profiling
A collection of helpers to diagnose performance issues.
## Usage
Profile the test suite:
`/core:perf test`
Profile an HTTP request:
`/core:perf request /api/users`
Analyse slow queries:
`/core:perf query`
Analyse memory usage:
`/core:perf memory`
## Action
This command delegates to a shell script to perform the analysis.
```bash
/bin/bash "${CLAUDE_PLUGIN_ROOT}/scripts/perf.sh" "<subcommand>" "<options>"
```

View file

@ -1,28 +0,0 @@
---
name: pr
description: Create a PR with a generated title and description from your commits.
args: [--draft] [--reviewer @user]
---
# Create Pull Request
Generates a pull request with a title and body automatically generated from your recent commits.
## Usage
Create a PR:
`/code:pr`
Create a draft PR:
`/code:pr --draft`
Request a review:
`/code:pr --reviewer @username`
## Action
This command will execute the following script:
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/generate-pr.sh" "$@"
```

View file

@ -18,11 +18,16 @@ hooks:
Run the full QA pipeline and fix all issues. Run the full QA pipeline and fix all issues.
**Workspace:** `{{env.CLAUDE_CURRENT_MODULE}}` ({{env.CLAUDE_MODULE_TYPE}}) ## Detection
First, detect the project type:
- If `go.mod` exists → Go project → `core go qa`
- If `composer.json` exists → PHP project → `core php qa`
- If both exist → ask user or check current directory
## Process ## Process
1. **Run QA**: Execute `core {{env.CLAUDE_MODULE_TYPE}} qa` 1. **Run QA**: Execute `core go qa` or `core php qa`
2. **Parse issues**: Extract failures from output (see format below) 2. **Parse issues**: Extract failures from output (see format below)
3. **Fix each issue**: Address one at a time, simplest first 3. **Fix each issue**: Address one at a time, simplest first
4. **Re-verify**: After fixes, re-run QA 4. **Re-verify**: After fixes, re-run QA

View file

@ -1,33 +0,0 @@
---
name: refactor
description: Guided refactoring with safety checks
args: <subcommand> [args]
---
# Refactor
Guided refactoring with safety checks.
## Subcommands
- `extract-method <new-method-name>` - Extract selection to a new method
- `rename <new-name>` - Rename a class, method, or variable
- `move <new-namespace>` - Move a class to a new namespace
- `inline` - Inline a method
## Usage
```
/core:refactor extract-method validateToken
/core:refactor rename User UserV2
/core:refactor move App\\Models\\User App\\Data\\Models\\User
/core:refactor inline calculateTotal
```
## Action
This command will run the refactoring script:
```bash
~/.claude/plugins/code/scripts/refactor.php "<subcommand>" [args]
```

View file

@ -1,26 +0,0 @@
---
name: release
description: Streamline the release process for modules
args: <patch|minor|major> [--preview]
---
# Release Workflow
This command automates the release process for modules. It handles version bumping, changelog generation, and Git tagging.
## Usage
```
/core:release patch # Bump patch version
/core:release minor # Bump minor version
/core:release major # Bump major version
/core:release --preview # Show what would happen
```
## Action
This command will execute the `release.sh` script:
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/release.sh" "<1>"
```

View file

@ -1,29 +0,0 @@
---
name: review
description: Perform a code review on staged changes, a commit range, or a GitHub PR
args: <range> [--security]
---
# Code Review
Performs a code review on the specified changes.
## Usage
Review staged changes:
`/code:review`
Review a commit range:
`/code:review HEAD~3..HEAD`
Review a GitHub PR:
`/code:review #123`
Perform a security-focused review:
`/code:review --security`
## Action
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/code-review.sh" "$@"
```

View file

@ -1,194 +0,0 @@
---
name: /core:scaffold
description: Generate boilerplate code following Host UK patterns.
---
This command generates boilerplate code for models, actions, controllers, and modules.
## Subcommands
- `/core:scaffold model <name>` - Generate a Laravel model.
- `/core:scaffold action <name>` - Generate an Action class.
- `/core:scaffold controller <name>` - Generate an API controller.
- `/core:scaffold module <name>` - Generate a full module.
## `/core:scaffold model <name>`
Generates a new model file.
```php
<?php
declare(strict_types=1);
namespace Core\Models;
use Core\Tenant\Traits\BelongsToWorkspace;
use Illuminate\Database\Eloquent\Model;
class {{name}} extends Model
{
use BelongsToWorkspace;
protected $fillable = [
'name',
'email',
];
}
```
## `/core:scaffold action <name>`
Generates a new action file.
```php
<?php
declare(strict_types=1);
namespace Core\Actions;
use Core\Models\{{model}};
use Core\Support\Action;
class {{name}}
{
use Action;
public function handle(array $data): {{model}}
{
return {{model}}::create($data);
}
}
```
## `/core:scaffold controller <name>`
Generates a new API controller file.
```php
<?php
declare(strict_types=1);
namespace Core\Http\Controllers\Api;
use Illuminate\Http\Request;
use Core\Http\Controllers\Controller;
class {{name}} extends Controller
{
public function index()
{
//
}
public function store(Request $request)
{
//
}
public function show($id)
{
//
}
public function update(Request $request, $id)
{
//
}
public function destroy($id)
{
//
}
}
```
## `/core:scaffold module <name>`
Generates a new module structure.
### `core-{{name}}/src/Core/Boot.php`
```php
<?php
declare(strict_types=1);
namespace Core\{{studly_name}}\Core;
class Boot
{
// Boot the module
}
```
### `core-{{name}}/src/Core/ServiceProvider.php`
```php
<?php
declare(strict_types=1);
namespace Core\{{studly_name}}\Core;
use Illuminate\Support\ServiceProvider as BaseServiceProvider;
class ServiceProvider extends BaseServiceProvider
{
public function register()
{
//
}
public function boot()
{
//
}
}
```
### `core-{{name}}/composer.json`
```json
{
"name": "host-uk/core-{{name}}",
"description": "The Host UK {{name}} module.",
"license": "EUPL-1.2",
"authors": [
{
"name": "Claude",
"email": "claude@host.uk.com"
}
],
"require": {
"php": "^8.2"
},
"autoload": {
"psr-4": {
"Core\\{{studly_name}}\\": "src/"
}
},
"config": {
"sort-packages": true
},
"minimum-stability": "dev",
"prefer-stable": true
}
```
### `core-{{name}}/CLAUDE.md`
```md
# Claude Instructions for `core-{{name}}`
This file provides instructions for the Claude AI agent on how to interact with the `core-{{name}}` module.
```
### `core-{{name}}/src/Mod/`
### `core-{{name}}/database/`
### `core-{{name}}/routes/`
### `core-{{name}}/tests/`

View file

@ -1,21 +0,0 @@
---
name: serve-mcp
description: Starts the MCP server for the core CLI.
args: ""
---
# MCP Server
Starts the MCP server to expose core CLI commands as tools.
## Usage
```
/code:serve-mcp
```
## Action
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/mcp/run.sh"
```

View file

@ -1,35 +0,0 @@
---
name: status
description: Show status across all Host UK repos
args: [--dirty|--behind]
---
# Multi-Repo Status
Wraps `core dev health` with better formatting.
name: /core:status
description: Show status across all Host UK repos
hooks:
AfterToolConfirmation:
- hooks:
- type: command
command: "${CLAUDE_PLUGIN_ROOT}/scripts/status.sh"
---
# Repo Status
A quick command to show the status across all Host UK repos.
## Usage
`/core:status` - Show all repo statuses
`/core:status --dirty` - Only show repos with changes
`/core:status --behind` - Only show repos behind remote
## Action
Run this command to get the status:
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/core-status.sh" "$@"
```

View file

@ -1,23 +0,0 @@
---
name: sync
description: Sync changes across dependent modules
args: <module_name> [--dry-run]
---
# Sync Dependent Modules
When changing a base module, this command syncs the dependent modules.
## Usage
```
/code:sync # Sync all dependents of current module
/code:sync core-tenant # Sync specific module
/code:sync --dry-run # Show what would change
```
## Action
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/sync.sh" "$@"
```

View file

@ -1,23 +0,0 @@
---
name: todo
description: Extract and track TODOs from the codebase
args: '[add "message" | done <id> | --priority]'
---
# TODO Command
This command scans the codebase for `TODO`, `FIXME`, `HACK`, and `XXX` comments and displays them in a formatted list.
## Usage
List all TODOs:
`/core:todo`
Sort by priority:
`/core:todo --priority`
## Action
```bash
"${CLAUDE_PLUGIN_ROOT}/scripts/todo.sh" <args>
```

View file

@ -1,83 +0,0 @@
# Hook Output Policy
Consistent policy for what hook output to expose to Claude vs hide.
## Principles
### Always Expose
| Category | Example | Reason |
|----------|---------|--------|
| Test failures | `FAIL: TestFoo` | Must be fixed |
| Build errors | `cannot find package` | Blocks progress |
| Lint errors | `undefined: foo` | Code quality |
| Security alerts | `HIGH vulnerability` | Critical |
| Type errors | `type mismatch` | Must be fixed |
| Debug statements | `dd() found` | Must be removed |
| Uncommitted work | `3 files unstaged` | Might get lost |
| Coverage drops | `84% → 79%` | Quality regression |
### Always Hide
| Category | Example | Reason |
|----------|---------|--------|
| Pass confirmations | `PASS: TestFoo` | No action needed |
| Format success | `Formatted 3 files` | No action needed |
| Coverage stable | `84% (unchanged)` | No action needed |
| Timing info | `(12.3s)` | Noise |
| Progress bars | `[=====> ]` | Noise |
### Conditional
| Category | Show When | Hide When |
|----------|-----------|-----------|
| Warnings | First occurrence | Repeated |
| Suggestions | Actionable | Informational |
| Diffs | Small (<10 lines) | Large |
| Stack traces | Unique error | Repeated |
## Implementation
Use `output-policy.sh` helper functions:
```bash
source "$SCRIPT_DIR/output-policy.sh"
# Expose failures
expose_error "Build failed" "$error_details"
expose_warning "Debug statements found" "$locations"
# Hide success
hide_success
# Pass through unchanged
pass_through "$input"
```
## Hook-Specific Policies
| Hook | Expose | Hide |
|------|--------|------|
| `check-debug.sh` | Debug statements found | Clean file |
| `post-commit-check.sh` | Uncommitted work | Clean working tree |
| `check-coverage.sh` | Coverage dropped | Coverage stable/improved |
| `go-format.sh` | (never) | Always silent |
| `php-format.sh` | (never) | Always silent |
## Aggregation
When multiple issues, aggregate intelligently:
```
Instead of:
- FAIL: TestA
- FAIL: TestB
- FAIL: TestC
- (47 more)
Show:
"50 tests failed. Top failures:
- TestA: nil pointer
- TestB: timeout
- TestC: assertion failed"
```

View file

@ -2,27 +2,6 @@
"$schema": "https://claude.ai/schemas/hooks.json", "$schema": "https://claude.ai/schemas/hooks.json",
"hooks": { "hooks": {
"PreToolUse": [ "PreToolUse": [
{
"matcher": "*",
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/scripts/session-history-capture.sh"
}
],
"description": "Capture session history before each tool use"
},
{
"matcher": "*",
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/scripts/detect-module.sh"
}
],
"description": "Detect current module and export context variables",
"once": true
},
{ {
"matcher": "Bash", "matcher": "Bash",
"hooks": [ "hooks": [
@ -42,37 +21,9 @@
} }
], ],
"description": "Block random .md file creation" "description": "Block random .md file creation"
},
{
"matcher": "tool == \"Bash\" && tool_input.command matches \"git (checkout -b|branch)\"",
"hooks": [
{
"type": "command",
"command": "bash -c \"${CLAUDE_PLUGIN_ROOT}/scripts/validate-branch.sh \\\"${CLAUDE_TOOL_INPUT}\\\"\""
}
],
"description": "Validate branch names follow conventions"
},
{
"matcher": "tool == \"Write\" || tool == \"Edit\"",
"hooks": [
{
"type": "command",
"command": "echo \"${tool_input.content}\" | ${CLAUDE_PLUGIN_ROOT}/scripts/detect-secrets.sh ${tool_input.filepath}"
}
],
"description": "Detect secrets in code before writing or editing files."
} }
], ],
"PostToolUse": [ "PostToolUse": [
{
"matcher": "tool == \"Bash\" && tool_input.command matches \"^git commit\"",
"hooks": [{
"type": "command",
"command": "bash claude/code/scripts/check-coverage.sh"
}],
"description": "Warn when coverage drops"
},
{ {
"matcher": "tool == \"Edit\" && tool_input.file_path matches \"\\.go$\"", "matcher": "tool == \"Edit\" && tool_input.file_path matches \"\\.go$\"",
"hooks": [ "hooks": [
@ -114,13 +65,25 @@
"description": "Warn about uncommitted work after git commit" "description": "Warn about uncommitted work after git commit"
} }
], ],
"PreCompact": [
{
"matcher": "*",
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/scripts/pre-compact.sh"
}
],
"description": "Save state before auto-compact to prevent amnesia"
}
],
"SessionStart": [ "SessionStart": [
{ {
"matcher": "*", "matcher": "*",
"hooks": [ "hooks": [
{ {
"type": "command", "type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/scripts/session-history-restore.sh" "command": "${CLAUDE_PLUGIN_ROOT}/scripts/session-start.sh"
} }
], ],
"description": "Restore recent session context on startup" "description": "Restore recent session context on startup"

View file

@ -1,211 +0,0 @@
#!/bin/bash
# Default values
output_format="ts"
routes_file="routes/api.php"
output_file="api_client" # Default output file name without extension
# Parse command-line arguments
while [[ "$#" -gt 0 ]]; do
case $1 in
generate) ;; # Skip the generate subcommand
--ts) output_format="ts";;
--js) output_format="js";;
--openapi) output_format="openapi";;
*) routes_file="$1";;
esac
shift
done
# Set the output file extension based on format
if [[ "$output_format" == "openapi" ]]; then
output_file="openapi.json"
else
output_file="api_client.${output_format}"
fi
# Function to parse the routes file
parse_routes() {
if [ ! -f "$1" ]; then
echo "Error: Routes file not found at $1" >&2
exit 1
fi
awk -F"'" '
/Route::apiResource/ {
resource = $2;
resource_singular = resource;
sub(/s$/, "", resource_singular);
print "GET " resource " list";
print "POST " resource " create";
print "GET " resource "/{" resource_singular "} get";
print "PUT " resource "/{" resource_singular "} update";
print "DELETE " resource "/{" resource_singular "} delete";
}
/Route::(get|post|put|delete|patch)/ {
line = $0;
match(line, /Route::([a-z]+)/, m);
method = toupper(m[1]);
uri = $2;
action = $6;
print method " " uri " " action;
}
' "$1"
}
# Function to generate the API client
generate_client() {
local format=$1
local outfile=$2
local client_object="export const api = {\n"
local dto_definitions=""
declare -A dtos
declare -A groups
# First pass: Collect all routes and DTOs
while read -r method uri action; do
group=$(echo "$uri" | cut -d'/' -f1)
if [[ -z "${groups[$group]}" ]]; then
groups[$group]=""
fi
groups[$group]+="$method $uri $action\n"
if [[ "$method" == "POST" || "$method" == "PUT" || "$method" == "PATCH" ]]; then
local resource_name_for_dto=$(echo "$group" | sed 's/s$//' | awk '{print toupper(substr($0,0,1))substr($0,2)}')
local dto_name="$(tr '[:lower:]' '[:upper:]' <<< ${action:0:1})${action:1}${resource_name_for_dto}Dto"
dtos[$dto_name]=1
fi
done
# Generate DTO interface definitions for TypeScript
if [ "$format" == "ts" ]; then
for dto in $(echo "${!dtos[@]}" | tr ' ' '\n' | sort); do
dto_definitions+="export interface ${dto} {}\n"
done
dto_definitions+="\n"
fi
# Sort the group names alphabetically to ensure consistent output
sorted_groups=$(for group in "${!groups[@]}"; do echo "$group"; done | sort)
for group in $sorted_groups; do
client_object+=" ${group}: {\n"
# Sort the lines within the group by the action name (field 3)
sorted_lines=$(echo -e "${groups[$group]}" | sed '/^$/d' | sort -k3)
while IFS= read -r line; do
if [ -z "$line" ]; then continue; fi
method=$(echo "$line" | cut -d' ' -f1)
uri=$(echo "$line" | cut -d' ' -f2)
action=$(echo "$line" | cut -d' ' -f3)
params=$(echo "$uri" | grep -o '{[^}]*}' | sed 's/[{}]//g')
ts_types=""
js_args=""
# Generate arguments for the function signature
for p in $params; do
js_args+="${p}, "
ts_types+="${p}: number, "
done
# Add a 'data' argument for POST/PUT/PATCH methods
if [[ "$method" == "POST" || "$method" == "PUT" || "$method" == "PATCH" ]]; then
local resource_name_for_dto=$(echo "$group" | sed 's/s$//' | awk '{print toupper(substr($0,0,1))substr($0,2)}')
local dto_name="$(tr '[:lower:]' '[:upper:]' <<< ${action:0:1})${action:1}${resource_name_for_dto}Dto"
ts_types+="data: ${dto_name}"
js_args+="data"
fi
# Clean up function arguments string
func_args=$(echo "$ts_types" | sed 's/,\s*$//' | sed 's/,$//')
js_args=$(echo "$js_args" | sed 's/,\s*$//' | sed 's/,$//')
final_args=$([ "$format" == "ts" ] && echo "$func_args" || echo "$js_args")
# Construct the fetch call string
fetch_uri="/api/${uri}"
fetch_uri=$(echo "$fetch_uri" | sed 's/{/${/g')
client_object+=" ${action}: (${final_args}) => fetch(\`${fetch_uri}\`"
# Add request options for non-GET methods
if [ "$method" != "GET" ]; then
client_object+=", {\n method: '${method}'"
if [[ "$method" == "POST" || "$method" == "PUT" || "$method" == "PATCH" ]]; then
client_object+=", \n body: JSON.stringify(data)"
fi
client_object+="\n }"
fi
client_object+="),\n"
done <<< "$sorted_lines"
client_object+=" },\n"
done
client_object+="};"
echo -e "// Generated from ${routes_file}\n" > "$outfile"
echo -e "${dto_definitions}${client_object}" >> "$outfile"
echo "API client generated at ${outfile}"
}
# Function to generate OpenAPI spec
generate_openapi() {
local outfile=$1
local paths_json=""
declare -A paths
while read -r method uri action; do
path="/api/${uri}"
# OpenAPI uses lowercase methods
method_lower=$(echo "$method" | tr '[:upper:]' '[:lower:]')
# Group operations by path
if [[ -z "${paths[$path]}" ]]; then
paths[$path]=""
fi
paths[$path]+="\"${method_lower}\": {\"summary\": \"${action}\"},"
done
# Assemble the paths object
sorted_paths=$(for path in "${!paths[@]}"; do echo "$path"; done | sort)
for path in $sorted_paths; do
operations=$(echo "${paths[$path]}" | sed 's/,$//') # remove trailing comma
paths_json+="\"${path}\": {${operations}},"
done
paths_json=$(echo "$paths_json" | sed 's/,$//') # remove final trailing comma
# Create the final OpenAPI JSON structure
openapi_spec=$(cat <<EOF
{
"openapi": "3.0.0",
"info": {
"title": "API Client",
"version": "1.0.0",
"description": "Generated from ${routes_file}"
},
"paths": {
${paths_json}
}
}
EOF
)
echo "$openapi_spec" > "$outfile"
echo "OpenAPI spec generated at ${outfile}"
}
# Main logic
parsed_routes=$(parse_routes "$routes_file")
if [[ "$output_format" == "ts" || "$output_format" == "js" ]]; then
generate_client "$output_format" "$output_file" <<< "$parsed_routes"
elif [[ "$output_format" == "openapi" ]]; then
generate_openapi "$output_file" <<< "$parsed_routes"
else
echo "Invalid output format specified." >&2
exit 1
fi

View file

@ -1,23 +0,0 @@
#!/bin/bash
# Check for a drop in test coverage.
# Policy: EXPOSE warning when coverage drops, HIDE when stable/improved
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/output-policy.sh"
# Source the main coverage script to use its functions
source claude/code/commands/coverage.sh 2>/dev/null || true
read -r input
# Get current and previous coverage (with fallbacks)
CURRENT_COVERAGE=$(get_current_coverage 2>/dev/null || echo "0")
PREVIOUS_COVERAGE=$(get_previous_coverage 2>/dev/null || echo "0")
# Compare coverage
if awk -v current="$CURRENT_COVERAGE" -v previous="$PREVIOUS_COVERAGE" 'BEGIN {exit !(current < previous)}'; then
DROP=$(awk -v c="$CURRENT_COVERAGE" -v p="$PREVIOUS_COVERAGE" 'BEGIN {printf "%.1f", p - c}')
expose_warning "Test coverage dropped by ${DROP}%" "Previous: ${PREVIOUS_COVERAGE}% → Current: ${CURRENT_COVERAGE}%"
else
pass_through "$input"
fi

View file

@ -1,28 +1,27 @@
#!/bin/bash #!/bin/bash
# Warn about debug statements left in code after edits # Warn about debug statements left in code after edits
# Policy: EXPOSE warning when found, HIDE when clean
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/output-policy.sh"
read -r input read -r input
FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty') FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty')
FOUND=""
if [[ -n "$FILE_PATH" && -f "$FILE_PATH" ]]; then if [[ -n "$FILE_PATH" && -f "$FILE_PATH" ]]; then
case "$FILE_PATH" in case "$FILE_PATH" in
*.go) *.go)
FOUND=$(grep -n "fmt\.Println\|log\.Println" "$FILE_PATH" 2>/dev/null | head -3) # Check for fmt.Println, log.Println debug statements
if grep -n "fmt\.Println\|log\.Println" "$FILE_PATH" 2>/dev/null | head -3 | grep -q .; then
echo "[Hook] WARNING: Debug prints found in $FILE_PATH" >&2
grep -n "fmt\.Println\|log\.Println" "$FILE_PATH" 2>/dev/null | head -3 >&2
fi
;; ;;
*.php) *.php)
FOUND=$(grep -n "dd(\|dump(\|var_dump(\|print_r(" "$FILE_PATH" 2>/dev/null | head -3) # Check for dd(), dump(), var_dump(), print_r()
if grep -n "dd(\|dump(\|var_dump(\|print_r(" "$FILE_PATH" 2>/dev/null | head -3 | grep -q .; then
echo "[Hook] WARNING: Debug statements found in $FILE_PATH" >&2
grep -n "dd(\|dump(\|var_dump(\|print_r(" "$FILE_PATH" 2>/dev/null | head -3 >&2
fi
;; ;;
esac esac
fi fi
if [[ -n "$FOUND" ]]; then # Pass through the input
expose_warning "Debug statements in \`$FILE_PATH\`" "\`\`\`\n$FOUND\n\`\`\`" echo "$input"
else
pass_through "$input"
fi

View file

@ -1,239 +0,0 @@
<?php
if ($argc < 2) {
echo "Usage: php " . $argv[0] . " <file_path> [--auto-fix]\n";
exit(1);
}
$filePath = $argv[1];
$autoFix = isset($argv[2]) && $argv[2] === '--auto-fix';
if (!file_exists($filePath)) {
echo "Error: File not found at " . $filePath . "\n";
exit(1);
}
$content = file_get_contents($filePath);
$tokens = token_get_all($content);
function checkStrictTypes(array $tokens, string $filePath, bool $autoFix, string &$content): void
{
$hasStrictTypes = false;
foreach ($tokens as $i => $token) {
if (!is_array($token) || $token[0] !== T_DECLARE) {
continue;
}
// Found a declare statement, now check if it's strict_types=1
$next = findNextMeaningfulToken($tokens, $i + 1);
if ($next && is_string($tokens[$next]) && $tokens[$next] === '(') {
$next = findNextMeaningfulToken($tokens, $next + 1);
if ($next && is_array($tokens[$next]) && $tokens[$next][0] === T_STRING && $tokens[$next][1] === 'strict_types') {
$next = findNextMeaningfulToken($tokens, $next + 1);
if ($next && is_string($tokens[$next]) && $tokens[$next] === '=') {
$next = findNextMeaningfulToken($tokens, $next + 1);
if ($next && is_array($tokens[$next]) && $tokens[$next][0] === T_LNUMBER && $tokens[$next][1] === '1') {
$hasStrictTypes = true;
break;
}
}
}
}
}
if (!$hasStrictTypes) {
fwrite(STDERR, "⚠ Line 1: Missing declare(strict_types=1)\n");
if ($autoFix) {
$content = str_replace('<?php', "<?php\n\ndeclare(strict_types=1);", $content);
file_put_contents($filePath, $content);
fwrite(STDERR, "✓ Auto-fixed: Added declare(strict_types=1)\n");
}
}
}
function findNextMeaningfulToken(array $tokens, int $index): ?int
{
for ($i = $index; $i < count($tokens); $i++) {
if (is_array($tokens[$i]) && in_array($tokens[$i][0], [T_WHITESPACE, T_COMMENT, T_DOC_COMMENT])) {
continue;
}
return $i;
}
return null;
}
function checkParameterTypeHints(array $tokens): void
{
foreach ($tokens as $i => $token) {
if (!is_array($token) || $token[0] !== T_FUNCTION) {
continue;
}
$parenStart = findNextMeaningfulToken($tokens, $i + 1);
if (!$parenStart || !is_array($tokens[$parenStart]) || $tokens[$parenStart][0] !== T_STRING) {
continue; // Not a standard function definition, maybe an anonymous function
}
$parenStart = findNextMeaningfulToken($tokens, $parenStart + 1);
if (!$parenStart || !is_string($tokens[$parenStart]) || $tokens[$parenStart] !== '(') {
continue;
}
$paramIndex = $parenStart + 1;
while (true) {
$nextParam = findNextMeaningfulToken($tokens, $paramIndex);
if (!$nextParam || (is_string($tokens[$nextParam]) && $tokens[$nextParam] === ')')) {
break; // End of parameter list
}
// We are at the start of a parameter declaration. It could be a type hint or the variable itself.
$currentToken = $tokens[$nextParam];
if (is_array($currentToken) && $currentToken[0] === T_VARIABLE) {
// This variable has no type hint.
fwrite(STDERR, "⚠ Line {$currentToken[2]}: Parameter {$currentToken[1]} has no type hint\n");
}
// Move to the next parameter
$comma = findNextToken($tokens, $nextParam, ',');
$closingParen = findNextToken($tokens, $nextParam, ')');
if ($comma !== null && $comma < $closingParen) {
$paramIndex = $comma + 1;
} else {
break; // No more commas, so no more parameters
}
}
}
}
function findNextToken(array $tokens, int $index, $tokenType): ?int
{
for ($i = $index; $i < count($tokens); $i++) {
if (is_string($tokens[$i]) && $tokens[$i] === $tokenType) {
return $i;
}
if (is_array($tokens[$i]) && $tokens[$i][0] === $tokenType) {
return $i;
}
}
return null;
}
function checkReturnTypeHints(array $tokens, string $filePath, bool $autoFix, string &$content): void
{
foreach ($tokens as $i => $token) {
if (!is_array($token) || $token[0] !== T_FUNCTION) {
continue;
}
$functionNameToken = findNextMeaningfulToken($tokens, $i + 1);
if (!$functionNameToken || !is_array($tokens[$functionNameToken]) || $tokens[$functionNameToken][0] !== T_STRING) {
continue; // Not a standard function definition
}
$functionName = $tokens[$functionNameToken][1];
if (in_array($functionName, ['__construct', '__destruct'])) {
continue; // Constructors and destructors do not have return types
}
$parenStart = findNextMeaningfulToken($tokens, $functionNameToken + 1);
if (!$parenStart || !is_string($tokens[$parenStart]) || $tokens[$parenStart] !== '(') {
continue;
}
$parenEnd = findNextToken($tokens, $parenStart + 1, ')');
if ($parenEnd === null) {
continue; // Malformed function
}
$nextToken = findNextMeaningfulToken($tokens, $parenEnd + 1);
if (!$nextToken || !(is_string($tokens[$nextToken]) && $tokens[$nextToken] === ':')) {
fwrite(STDERR, "⚠ Line {$tokens[$functionNameToken][2]}: Method {$functionName}() has no return type\n");
if ($autoFix) {
// Check if the function has a return statement
$bodyStart = findNextToken($tokens, $parenEnd + 1, '{');
if ($bodyStart !== null) {
$bodyEnd = findMatchingBrace($tokens, $bodyStart);
if ($bodyEnd !== null) {
$hasReturn = false;
for ($j = $bodyStart; $j < $bodyEnd; $j++) {
if (is_array($tokens[$j]) && $tokens[$j][0] === T_RETURN) {
$hasReturn = true;
break;
}
}
if (!$hasReturn) {
$offset = 0;
for ($k = 0; $k < $parenEnd; $k++) {
if (is_array($tokens[$k])) {
$offset += strlen($tokens[$k][1]);
} else {
$offset += strlen($tokens[$k]);
}
}
$original = ')';
$replacement = ') : void';
$content = substr_replace($content, $replacement, $offset, strlen($original));
file_put_contents($filePath, $content);
fwrite(STDERR, "✓ Auto-fixed: Added : void return type to {$functionName}()\n");
}
}
}
}
}
}
}
function findMatchingBrace(array $tokens, int $startIndex): ?int
{
$braceLevel = 0;
for ($i = $startIndex; $i < count($tokens); $i++) {
if (is_string($tokens[$i]) && $tokens[$i] === '{') {
$braceLevel++;
} elseif (is_string($tokens[$i]) && $tokens[$i] === '}') {
$braceLevel--;
if ($braceLevel === 0) {
return $i;
}
}
}
return null;
}
function checkPropertyTypeHints(array $tokens): void
{
foreach ($tokens as $i => $token) {
if (!is_array($token) || !in_array($token[0], [T_PUBLIC, T_PROTECTED, T_PRIVATE, T_VAR])) {
continue;
}
$nextToken = findNextMeaningfulToken($tokens, $i + 1);
if ($nextToken && is_array($tokens[$nextToken]) && $tokens[$nextToken][0] === T_STATIC) {
$nextToken = findNextMeaningfulToken($tokens, $nextToken + 1);
}
if ($nextToken && is_array($tokens[$nextToken]) && $tokens[$nextToken][0] === T_VARIABLE) {
// This is a property without a type hint
fwrite(STDERR, "⚠ Line {$tokens[$nextToken][2]}: Property {$tokens[$nextToken][1]} has no type hint\n");
}
}
}
function tokensToCode(array $tokens): string
{
$code = '';
foreach ($tokens as $token) {
if (is_array($token)) {
$code .= $token[1];
} else {
$code .= $token;
}
}
return $code;
}
checkStrictTypes($tokens, $filePath, $autoFix, $content);
checkParameterTypeHints($tokens);
checkReturnTypeHints($tokens, $filePath, $autoFix, $content);
checkPropertyTypeHints($tokens);

View file

@ -1,14 +0,0 @@
#!/bin/bash
# Enforce strict type hints in PHP files.
read -r input
FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty')
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [[ -n "$FILE_PATH" && -f "$FILE_PATH" ]]; then
php "${SCRIPT_DIR}/check-types.php" "$FILE_PATH"
fi
# Pass through the input
echo "$input"

View file

@ -1,135 +0,0 @@
#!/bin/bash
# Default options
CLEAN_DEPS=false
CLEAN_CACHE_ONLY=false
DRY_RUN=false
# Parse arguments
for arg in "$@"
do
case $arg in
--deps)
CLEAN_DEPS=true
shift
;;
--cache)
CLEAN_CACHE_ONLY=true
shift
;;
--dry-run)
DRY_RUN=true
shift
;;
esac
done
# --- Configuration ---
CACHE_PATHS=(
"storage/framework/cache/*"
"bootstrap/cache/*"
".phpunit.cache"
)
BUILD_PATHS=(
"public/build/*"
"public/hot"
)
DEP_PATHS=(
"vendor"
"node_modules"
)
# --- Logic ---
total_freed=0
delete_path() {
local path_pattern=$1
local size_bytes=0
local size_human=""
# Use a subshell to avoid affecting the main script's globbing settings
(
shopt -s nullglob
local files=( $path_pattern )
if [ ${#files[@]} -eq 0 ]; then
return # No files matched the glob
fi
# Calculate total size for all matched files
for file in "${files[@]}"; do
if [ -e "$file" ]; then
size_bytes=$((size_bytes + $(du -sb "$file" | cut -f1)))
fi
done
)
total_freed=$((total_freed + size_bytes))
size_human=$(echo "$size_bytes" | awk '{
if ($1 >= 1024*1024*1024) { printf "%.2f GB", $1/(1024*1024*1024) }
else if ($1 >= 1024*1024) { printf "%.2f MB", $1/(1024*1024) }
else if ($1 >= 1024) { printf "%.2f KB", $1/1024 }
else { printf "%d Bytes", $1 }
}')
if [ "$DRY_RUN" = true ]; then
echo " ✓ (dry run) $path_pattern ($size_human)"
else
# Suppress "no such file or directory" errors if glob doesn't match anything
rm -rf $path_pattern 2>/dev/null
echo "$path_pattern ($size_human)"
fi
}
echo "Cleaning project..."
echo ""
if [ "$CLEAN_CACHE_ONLY" = true ]; then
echo "Cache:"
for path in "${CACHE_PATHS[@]}"; do
delete_path "$path"
done
else
echo "Cache:"
for path in "${CACHE_PATHS[@]}"; do
delete_path "$path"
done
echo ""
echo "Build:"
for path in "${BUILD_PATHS[@]}"; do
delete_path "$path"
done
fi
if [ "$CLEAN_DEPS" = true ]; then
if [ "$DRY_RUN" = false ]; then
echo ""
read -p "Delete vendor/ and node_modules/? [y/N] " -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Aborted."
exit 1
fi
fi
echo ""
echo "Dependencies (--deps):"
for path in "${DEP_PATHS[@]}"; do
delete_path "$path"
done
fi
# Final summary
if [ "$total_freed" -gt 0 ]; then
total_freed_human=$(echo "$total_freed" | awk '{
if ($1 >= 1024*1024*1024) { printf "%.2f GB", $1/(1024*1024*1024) }
else if ($1 >= 1024*1024) { printf "%.2f MB", $1/(1024*1024) }
else if ($1 >= 1024) { printf "%.2f KB", $1/1024 }
else { printf "%d Bytes", $1 }
}')
echo ""
echo "Total freed: $total_freed_human"
fi

View file

@ -1,187 +0,0 @@
#!/bin/bash
# Core code review script
# --- Result Variables ---
conventions_result=""
debug_result=""
test_coverage_result=""
secrets_result=""
error_handling_result=""
docs_result=""
intensive_security_result=""
suggestions=()
# --- Check Functions ---
check_conventions() {
# Placeholder for project convention checks (e.g., linting)
conventions_result="✓ Conventions: UK English, strict types (Placeholder)"
}
check_debug() {
local diff_content=$1
if echo "$diff_content" | grep -q -E 'console\.log|print_r|var_dump'; then
debug_result="⚠ No debug statements: Found debug statements."
suggestions+=("Remove debug statements before merging.")
else
debug_result="✓ No debug statements"
fi
}
check_test_coverage() {
local diff_content=$1
# This is a simple heuristic and not a replacement for a full test coverage suite.
# It checks if any new files are tests, or if test files were modified.
if echo "$diff_content" | grep -q -E '\+\+\+ b/(tests?|specs?)/'; then
test_coverage_result="✓ Test files modified: Yes"
else
test_coverage_result="⚠ Test files modified: No"
suggestions+=("Consider adding tests for new functionality.")
fi
}
check_secrets() {
local diff_content=$1
if echo "$diff_content" | grep -q -i -E 'secret|password|api_key|token'; then
secrets_result="⚠ No secrets detected: Potential hardcoded secrets found."
suggestions+=("Review potential hardcoded secrets for security.")
else
secrets_result="✓ No secrets detected"
fi
}
intensive_security_check() {
local diff_content=$1
if echo "$diff_content" | grep -q -E 'eval|dangerouslySetInnerHTML'; then
intensive_security_result="⚠ Intensive security scan: Unsafe functions may be present."
suggestions+=("Thoroughly audit the use of unsafe functions.")
else
intensive_security_result="✓ Intensive security scan: No obvious unsafe functions found."
fi
}
check_error_handling() {
local diff_content=$1
# Files with new functions/methods but no error handling
local suspicious_files=$(echo "$diff_content" | grep -E '^\+\+\+ b/' | sed 's/^\+\+\+ b\///' | while read -r file; do
# Heuristic: if a file has added lines with 'function' or '=>' but no 'try'/'catch', it's suspicious.
added_logic=$(echo "$diff_content" | grep -E "^\+.*(function|\=>)" | grep "$file")
added_error_handling=$(echo "$diff_content" | grep -E "^\+.*(try|catch|throw)" | grep "$file")
if [ -n "$added_logic" ] && [ -z "$added_error_handling" ]; then
line_number=$(echo "$diff_content" | grep -nE "^\+.*(function|\=>)" | grep "$file" | cut -d: -f1 | head -n 1)
echo "$file:$line_number"
fi
done)
if [ -n "$suspicious_files" ]; then
error_handling_result="⚠ Missing error handling"
for file_line in $suspicious_files; do
suggestions+=("Consider adding error handling in $file_line.")
done
else
error_handling_result="✓ Error handling present"
fi
}
check_docs() {
local diff_content=$1
if echo "$diff_content" | grep -q -E '\+\+\+ b/(README.md|docs?)/'; then
docs_result="✓ Documentation updated"
else
docs_result="⚠ Documentation updated: No changes to documentation files detected."
suggestions+=("Update documentation if the changes affect public APIs or user behavior.")
fi
}
# --- Output Function ---
print_results() {
local title="Code Review"
if [ -n "$range_arg" ]; then
title="$title: $range_arg"
else
local branch_name=$(git rev-parse --abbrev-ref HEAD 2>/dev/null)
if [ -n "$branch_name" ]; then
title="$title: $branch_name branch"
else
title="$title: Staged changes"
fi
fi
echo "$title"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Print checklist
echo "$conventions_result"
echo "$debug_result"
echo "$test_coverage_result"
echo "$secrets_result"
echo "$error_handling_result"
echo "$docs_result"
if [ -n "$intensive_security_result" ]; then
echo "$intensive_security_result"
fi
echo ""
# Print suggestions if any
if [ ${#suggestions[@]} -gt 0 ]; then
echo "Suggestions:"
for i in "${!suggestions[@]}"; do
echo "$((i+1)). ${suggestions[$i]}"
done
echo ""
fi
echo "Overall: Approve with suggestions"
}
# --- Main Logic ---
security_mode=false
range_arg=""
for arg in "$@"; do
case $arg in
--security)
security_mode=true
;;
*)
if [ -n "$range_arg" ]; then echo "Error: Multiple range arguments." >&2; exit 1; fi
range_arg="$arg"
;;
esac
done
diff_output=""
if [ -z "$range_arg" ]; then
diff_output=$(git diff --staged)
if [ $? -ne 0 ]; then echo "Error: git diff --staged failed." >&2; exit 1; fi
if [ -z "$diff_output" ]; then echo "No staged changes to review."; exit 0; fi
elif [[ "$range_arg" == \#* ]]; then
pr_number="${range_arg#?}"
if ! command -v gh &> /dev/null; then echo "Error: 'gh' not found." >&2; exit 1; fi
diff_output=$(gh pr diff "$pr_number")
if [ $? -ne 0 ]; then echo "Error: gh pr diff failed. Is the PR number valid?" >&2; exit 1; fi
elif [[ "$range_arg" == *..* ]]; then
diff_output=$(git diff "$range_arg")
if [ $? -ne 0 ]; then echo "Error: git diff failed. Is the commit range valid?" >&2; exit 1; fi
else
echo "Unsupported argument: $range_arg" >&2
exit 1
fi
# Run checks
check_conventions
check_debug "$diff_output"
check_test_coverage "$diff_output"
check_error_handling "$diff_output"
check_docs "$diff_output"
check_secrets "$diff_output"
if [ "$security_mode" = true ]; then
intensive_security_check "$diff_output"
fi
# Print the final formatted report
print_results

View file

@ -1,79 +0,0 @@
#!/bin/bash
# Fetch the raw status from the core dev health command.
# The output format is assumed to be:
# module branch status ahead behind insertions deletions
RAW_STATUS=$(core dev health 2>/dev/null)
# Exit if the command fails or produces no output
if [ -z "$RAW_STATUS" ]; then
echo "Failed to get repo status from 'core dev health'."
echo "Make sure the 'core' command is available and repositories are correctly configured."
exit 1
fi
FILTER="$1"
# --- Header ---
echo "Host UK Monorepo Status"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
printf "%-15s %-15s %-10s %s\n" "Module" "Branch" "Status" "Behind/Ahead"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
# --- Data Processing and Printing ---
while read -r module branch status ahead behind insertions deletions; do
is_dirty=false
is_behind=false
if [[ "$status" == "dirty" ]]; then
is_dirty=true
fi
if (( behind > 0 )); then
is_behind=true
fi
# Apply filters
if [[ "$FILTER" == "--dirty" && "$is_dirty" == "false" ]]; then
continue
fi
if [[ "$FILTER" == "--behind" && "$is_behind" == "false" ]]; then
continue
fi
# Format the "Behind/Ahead" column based on status
if [[ "$status" == "dirty" ]]; then
behind_ahead_text="+${insertions} -${deletions}"
else # status is 'clean'
if (( behind > 0 )); then
behind_ahead_text="-${behind} (behind)"
elif (( ahead > 0 )); then
behind_ahead_text="+${ahead}"
else
behind_ahead_text="✓"
fi
fi
printf "%-15s %-15s %-10s %s\n" "$module" "$branch" "$status" "$behind_ahead_text"
done <<< "$RAW_STATUS"
# --- Summary ---
# The summary is always based on the full, unfiltered data.
dirty_count=$(echo "$RAW_STATUS" | grep -cw "dirty")
behind_count=$(echo "$RAW_STATUS" | awk '($5+0) > 0' | wc -l)
clean_count=$(echo "$RAW_STATUS" | grep -cw "clean")
summary_parts=()
if (( dirty_count > 0 )); then
summary_parts+=("$dirty_count dirty")
fi
if (( behind_count > 0 )); then
summary_parts+=("$behind_count behind")
fi
summary_parts+=("$clean_count clean")
summary="Summary: $(IFS=, ; echo "${summary_parts[*]}")"
echo
echo "$summary"

View file

@ -1,151 +0,0 @@
import os
import sys
import yaml
def find_repos_yaml():
"""Traverse up from the current directory to find repos.yaml."""
current_dir = os.getcwd()
while current_dir != '/':
repos_yaml_path = os.path.join(current_dir, 'repos.yaml')
if os.path.exists(repos_yaml_path):
return repos_yaml_path
current_dir = os.path.dirname(current_dir)
return None
def parse_dependencies(repos_yaml_path):
"""Parses the repos.yaml file and returns a dependency graph."""
with open(repos_yaml_path, 'r') as f:
data = yaml.safe_load(f)
graph = {}
repos = data.get('repos', {})
for repo_name, details in repos.items():
graph[repo_name] = details.get('depends', []) or []
return graph
def find_circular_dependencies(graph):
"""Finds circular dependencies in the graph using DFS."""
visiting = set()
visited = set()
cycles = []
def dfs(node, path):
visiting.add(node)
path.append(node)
for neighbor in graph.get(node, []):
if neighbor in visiting:
cycle_start_index = path.index(neighbor)
cycles.append(path[cycle_start_index:] + [neighbor])
elif neighbor not in visited:
dfs(neighbor, path)
path.pop()
visiting.remove(node)
visited.add(node)
for node in graph:
if node not in visited:
dfs(node, [])
return cycles
def print_dependency_tree(graph, module, prefix=""):
"""Prints the dependency tree for a given module."""
if module not in graph:
print(f"Module '{module}' not found.")
return
print(f"{prefix}{module}")
dependencies = graph.get(module, [])
for i, dep in enumerate(dependencies):
is_last = i == len(dependencies) - 1
new_prefix = prefix.replace("├──", "").replace("└──", " ")
connector = "└── " if is_last else "├── "
print_dependency_tree(graph, dep, new_prefix + connector)
def print_reverse_dependencies(graph, module):
"""Prints the modules that depend on a given module."""
if module not in graph:
print(f"Module '{module}' not found.")
return
reverse_deps = []
for repo, deps in graph.items():
if module in deps:
reverse_deps.append(repo)
if not reverse_deps:
print(f"(no modules depend on {module})")
else:
for i, dep in enumerate(sorted(reverse_deps)):
is_last = i == len(reverse_deps) - 1
print(f"{'└── ' if is_last else '├── '}{dep}")
def main():
"""Main function to handle command-line arguments and execute logic."""
repos_yaml_path = find_repos_yaml()
if not repos_yaml_path:
print("Error: Could not find repos.yaml in the current directory or any parent directory.")
sys.exit(1)
try:
graph = parse_dependencies(repos_yaml_path)
except Exception as e:
print(f"Error parsing repos.yaml: {e}")
sys.exit(1)
cycles = find_circular_dependencies(graph)
if cycles:
print("Error: Circular dependencies detected!")
for cycle in cycles:
print(" -> ".join(cycle))
sys.exit(1)
args = sys.argv[1:]
if not args:
print("Dependency tree for all modules:")
for module in sorted(graph.keys()):
print(f"\n{module} dependencies:")
dependencies = graph.get(module, [])
if not dependencies:
print("└── (no dependencies)")
else:
for i, dep in enumerate(dependencies):
is_last = i == len(dependencies) - 1
print_dependency_tree(graph, dep, "└── " if is_last else "├── ")
return
reverse = "--reverse" in args
if reverse:
args.remove("--reverse")
if not args:
print("Usage: /core:deps [--reverse] [module_name]")
sys.exit(1)
module_name = args[0]
if module_name not in graph:
print(f"Error: Module '{module_name}' not found in repos.yaml.")
sys.exit(1)
if reverse:
print(f"Modules that depend on {module_name}:")
print_reverse_dependencies(graph, module_name)
else:
print(f"{module_name} dependencies:")
dependencies = graph.get(module_name, [])
if not dependencies:
print("└── (no dependencies)")
else:
for i, dep in enumerate(dependencies):
is_last = i == len(dependencies) - 1
connector = "└── " if is_last else "├── "
print_dependency_tree(graph, dep, connector)
if __name__ == "__main__":
main()

View file

@ -1,51 +0,0 @@
#!/bin/bash
#
# Detects the current module and sets environment variables for other tools.
# Intended to be run once per session via a hook.
# --- Detection Logic ---
MODULE_NAME=""
MODULE_TYPE="unknown"
# 1. Check for composer.json (PHP)
if [ -f "composer.json" ]; then
MODULE_TYPE="php"
# Use jq, but check if it is installed first
if command -v jq >/dev/null 2>&1; then
MODULE_NAME=$(jq -r ".name // empty" composer.json)
fi
fi
# 2. Check for go.mod (Go)
if [ -f "go.mod" ]; then
MODULE_TYPE="go"
MODULE_NAME=$(grep "^module" go.mod | awk '{print $2}')
fi
# 3. If name is still empty, try git remote
if [ -z "$MODULE_NAME" ] || [ "$MODULE_NAME" = "unknown" ]; then
if git rev-parse --is-inside-work-tree > /dev/null 2>&1; then
GIT_REMOTE=$(git remote get-url origin 2>/dev/null)
if [ -n "$GIT_REMOTE" ]; then
MODULE_NAME=$(basename "$GIT_REMOTE" .git)
fi
fi
fi
# 4. As a last resort, use the current directory name
if [ -z "$MODULE_NAME" ] || [ "$MODULE_NAME" = "unknown" ]; then
MODULE_NAME=$(basename "$PWD")
fi
# --- Store Context ---
# Create a file with the context variables to be sourced by other scripts.
mkdir -p .claude-plugin/.tmp
CONTEXT_FILE=".claude-plugin/.tmp/module_context.sh"
echo "export CLAUDE_CURRENT_MODULE=\"$MODULE_NAME\"" > "$CONTEXT_FILE"
echo "export CLAUDE_MODULE_TYPE=\"$MODULE_TYPE\"" >> "$CONTEXT_FILE"
# --- User-facing Message ---
# Print a confirmation message to stderr.
echo "Workspace context loaded: Module='$MODULE_NAME', Type='$MODULE_TYPE'" >&2

View file

@ -1,73 +0,0 @@
#!/bin/bash
# Patterns for detecting secrets
PATTERNS=(
# API keys (e.g., sk_live_..., ghp_..., etc.)
"[a-zA-Z0-9]{32,}"
# AWS keys
"AKIA[0-9A-Z]{16}"
# Private keys
"-----BEGIN (RSA|DSA|EC|OPENSSH) PRIVATE KEY-----"
# Passwords in config
"(password|passwd|pwd)\s*[=:]\s*['\"][^'\"]+['\"]"
# Tokens
"(token|secret|key)\s*[=:]\s*['\"][^'\"]+['\"]"
)
# Exceptions for fake secrets
EXCEPTIONS=(
"password123"
"your-api-key-here"
"xxx"
"test"
"example"
)
# File to check is passed as the first argument
FILE_PATH=$1
# Function to check for secrets
check_secrets() {
local input_source="$1"
local file_path="$2"
local line_num=0
while IFS= read -r line; do
line_num=$((line_num + 1))
for pattern in "${PATTERNS[@]}"; do
if echo "$line" | grep -qE "$pattern"; then
# Check for exceptions
is_exception=false
for exception in "${EXCEPTIONS[@]}"; do
if echo "$line" | grep -qF "$exception"; then
is_exception=true
break
fi
done
if [ "$is_exception" = false ]; then
echo "⚠️ Potential secret detected!"
echo "File: $file_path"
echo "Line: $line_num"
echo ""
echo "Found: $line"
echo ""
echo "This looks like a production secret."
echo "Use environment variables instead."
echo ""
# Propose a fix (example for a PHP config file)
if [[ "$file_path" == *.php ]]; then
echo "'stripe' => ["
echo " 'secret' => env('STRIPE_SECRET'), // ✓"
echo "]"
fi
exit 1
fi
fi
done
done < "$input_source"
}
check_secrets "/dev/stdin" "$FILE_PATH"
exit 0

View file

@ -1,32 +0,0 @@
#!/bin/bash
TARGET_PATH=$1
# The second argument can be a path to scan for API endpoints.
SCAN_PATH=$2
if [ -z "$TARGET_PATH" ]; then
echo "Usage: doc-api.sh <TargetPath> [ScanPath]" >&2
exit 1
fi
# Default to scanning the 'src' directory if no path is provided.
if [ -z "$SCAN_PATH" ]; then
SCAN_PATH="src"
fi
SWAGGER_PHP_PATH="${TARGET_PATH}/vendor/bin/swagger-php"
FULL_SCAN_PATH="${TARGET_PATH}/${SCAN_PATH}"
if [ ! -d "$FULL_SCAN_PATH" ]; then
echo "Error: Scan directory does not exist at '$FULL_SCAN_PATH'." >&2
exit 1
fi
if [ -f "$SWAGGER_PHP_PATH" ]; then
echo "Found swagger-php. Generating OpenAPI spec from '$FULL_SCAN_PATH'..."
"$SWAGGER_PHP_PATH" "$FULL_SCAN_PATH"
else
echo "Error: 'swagger-php' not found at '$SWAGGER_PHP_PATH'." >&2
echo "Please ensure it is installed in your project's dev dependencies." >&2
exit 1
fi

View file

@ -1,66 +0,0 @@
#!/bin/bash
TARGET_PATH=$1
if [ -z "$TARGET_PATH" ]; then
echo "Usage: doc-changelog.sh <TargetPath>" >&2
exit 1
fi
# We must be in the target directory for git commands to work correctly.
cd "$TARGET_PATH"
# Get the latest tag. If no tags, this will be empty.
LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null)
# Get the date of the latest tag.
TAG_DATE=$(git log -1 --format=%ai "$LATEST_TAG" 2>/dev/null | cut -d' ' -f1)
# Set the version to the latest tag, or "Unreleased" if no tags exist.
VERSION="Unreleased"
if [ -n "$LATEST_TAG" ]; then
VERSION="$LATEST_TAG"
fi
# Get the current date in YYYY-MM-DD format.
CURRENT_DATE=$(date +%F)
DATE_TO_SHOW=$CURRENT_DATE
if [ -n "$TAG_DATE" ]; then
DATE_TO_SHOW="$TAG_DATE"
fi
echo "# Changelog"
echo ""
echo "## [$VERSION] - $DATE_TO_SHOW"
echo ""
# Get the commit history. If there's a tag, get commits since the tag. Otherwise, get all.
if [ -n "$LATEST_TAG" ]; then
COMMIT_RANGE="${LATEST_TAG}..HEAD"
else
COMMIT_RANGE="HEAD"
fi
# Use git log to get commits, then awk to categorize and format them.
# Categories are based on the commit subject prefix (e.g., "feat:", "fix:").
git log --no-merges --pretty="format:%s" "$COMMIT_RANGE" | awk '
BEGIN {
FS = ": ";
print_added = 0;
print_fixed = 0;
}
/^feat:/ {
if (!print_added) {
print "### Added";
print_added = 1;
}
print "- " $2;
}
/^fix:/ {
if (!print_fixed) {
print "";
print "### Fixed";
print_fixed = 1;
}
print "- " $2;
}
'

View file

@ -1,130 +0,0 @@
<?php
if ($argc < 2) {
echo "Usage: php doc-class-parser.php <file_path>\n";
exit(1);
}
$filePath = $argv[1];
if (!file_exists($filePath)) {
echo "Error: File not found at '$filePath'\n";
exit(1);
}
// --- Find the namespace and class name by parsing the file ---
$fileContent = file_get_contents($filePath);
$namespace = '';
if (preg_match('/^\s*namespace\s+([^;]+);/m', $fileContent, $namespaceMatches)) {
$namespace = $namespaceMatches[1];
}
$className = '';
if (!preg_match('/class\s+([a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*)/', $fileContent, $matches)) {
echo "Error: Could not find class name in '$filePath'\n";
exit(1);
}
$className = $matches[1];
$fqcn = $namespace ? $namespace . '\\' . $className : $className;
// Now that we have the class name, we can require the file.
require_once $filePath;
// --- Utility function to parse docblocks ---
function parseDocComment($docComment) {
$data = [
'description' => '',
'params' => [],
'return' => null,
];
if (!$docComment) return $data;
$lines = array_map(function($line) {
return trim(substr(trim($line), 1));
}, explode("\n", $docComment));
$descriptionDone = false;
foreach ($lines as $line) {
if ($line === '/**' || $line === '*/' || $line === '*') continue;
if (strpos($line, '@') === 0) {
$descriptionDone = true;
preg_match('/@(\w+)\s*(.*)/', $line, $matches);
if (count($matches) === 3) {
$tag = $matches[1];
$content = trim($matches[2]);
if ($tag === 'param') {
preg_match('/(\S+)\s+\$(\S+)\s*(.*)/', $content, $paramMatches);
if(count($paramMatches) >= 3) {
$data['params'][$paramMatches[2]] = [
'type' => $paramMatches[1],
'description' => $paramMatches[3] ?? ''
];
}
} elseif ($tag === 'return') {
preg_match('/(\S+)\s*(.*)/', $content, $returnMatches);
if(count($returnMatches) >= 2) {
$data['return'] = [
'type' => $returnMatches[1],
'description' => $returnMatches[2] ?? ''
];
}
}
}
} elseif (!$descriptionDone) {
$data['description'] .= $line . " ";
}
}
$data['description'] = trim($data['description']);
return $data;
}
// --- Use Reflection API to get class details ---
try {
if (!class_exists($fqcn)) {
echo "Error: Class '$fqcn' does not exist after including file '$filePath'.\n";
exit(1);
}
$reflectionClass = new ReflectionClass($fqcn);
} catch (ReflectionException $e) {
echo "Error: " . $e->getMessage() . "\n";
exit(1);
}
$classDocData = parseDocComment($reflectionClass->getDocComment());
$methodsData = [];
$publicMethods = $reflectionClass->getMethods(ReflectionMethod::IS_PUBLIC);
foreach ($publicMethods as $method) {
$methodDocData = parseDocComment($method->getDocComment());
$paramsData = [];
foreach ($method->getParameters() as $param) {
$paramName = $param->getName();
$paramInfo = [
'type' => ($param->getType() ? (string)$param->getType() : ($methodDocData['params'][$paramName]['type'] ?? 'mixed')),
'required' => !$param->isOptional(),
'description' => $methodDocData['params'][$paramName]['description'] ?? ''
];
$paramsData[$paramName] = $paramInfo;
}
$methodsData[] = [
'name' => $method->getName(),
'description' => $methodDocData['description'],
'params' => $paramsData,
'return' => $methodDocData['return']
];
}
// --- Output as JSON ---
$output = [
'className' => $reflectionClass->getShortName(),
'description' => $classDocData['description'],
'methods' => $methodsData,
];
echo json_encode($output, JSON_PRETTY_PRINT);

View file

@ -1,99 +0,0 @@
#!/bin/bash
CLASS_NAME=$1
TARGET_PATH=$2
if [ -z "$CLASS_NAME" ] || [ -z "$TARGET_PATH" ]; then
echo "Usage: doc-class.sh <ClassName> <TargetPath>" >&2
exit 1
fi
# Find the file in the target path
FILE_PATH=$(find "$TARGET_PATH" -type f -name "${CLASS_NAME}.php")
if [ -z "$FILE_PATH" ]; then
echo "Error: File for class '$CLASS_NAME' not found in '$TARGET_PATH'." >&2
exit 1
fi
if [ $(echo "$FILE_PATH" | wc -l) -gt 1 ]; then
echo "Error: Multiple files found for class '$CLASS_NAME':" >&2
echo "$FILE_PATH" >&2
exit 1
fi
# --- PARSING ---
SCRIPT_DIR=$(dirname "$0")
# Use the new PHP parser to get a JSON representation of the class.
# The `jq` tool is used to parse the JSON. It's a common dependency.
PARSED_JSON=$(php "${SCRIPT_DIR}/doc-class-parser.php" "$FILE_PATH")
if [ $? -ne 0 ]; then
echo "Error: PHP parser failed." >&2
echo "$PARSED_JSON" >&2
exit 1
fi
# --- MARKDOWN GENERATION ---
CLASS_NAME=$(echo "$PARSED_JSON" | jq -r '.className')
CLASS_DESCRIPTION=$(echo "$PARSED_JSON" | jq -r '.description')
echo "# $CLASS_NAME"
echo ""
echo "$CLASS_DESCRIPTION"
echo ""
echo "## Methods"
echo ""
# Iterate over each method in the JSON
echo "$PARSED_JSON" | jq -c '.methods[]' | while read -r METHOD_JSON; do
METHOD_NAME=$(echo "$METHOD_JSON" | jq -r '.name')
# This is a bit fragile, but it's the best we can do for now
# to get the full signature.
METHOD_SIGNATURE=$(grep "function ${METHOD_NAME}" "$FILE_PATH" | sed -e 's/.*public function //' -e 's/{//' | xargs)
echo "### $METHOD_SIGNATURE"
# Method description
METHOD_DESCRIPTION=$(echo "$METHOD_JSON" | jq -r '.description')
if [ -n "$METHOD_DESCRIPTION" ]; then
echo ""
echo "$METHOD_DESCRIPTION"
fi
# Parameters
PARAMS_JSON=$(echo "$METHOD_JSON" | jq -c '.params | to_entries')
if [ "$PARAMS_JSON" != "[]" ]; then
echo ""
echo "**Parameters:**"
echo "$PARAMS_JSON" | jq -c '.[]' | while read -r PARAM_JSON; do
PARAM_NAME=$(echo "$PARAM_JSON" | jq -r '.key')
PARAM_TYPE=$(echo "$PARAM_JSON" | jq -r '.value.type')
PARAM_REQUIRED=$(echo "$PARAM_JSON" | jq -r '.value.required')
PARAM_DESC=$(echo "$PARAM_JSON" | jq -r '.value.description')
REQUIRED_TEXT=""
if [ "$PARAM_REQUIRED" = "true" ]; then
REQUIRED_TEXT=", required"
fi
echo "- \`$PARAM_NAME\` ($PARAM_TYPE$REQUIRED_TEXT) $PARAM_DESC"
done
fi
# Return type
RETURN_JSON=$(echo "$METHOD_JSON" | jq -c '.return')
if [ "$RETURN_JSON" != "null" ]; then
RETURN_TYPE=$(echo "$RETURN_JSON" | jq -r '.type')
RETURN_DESC=$(echo "$RETURN_JSON" | jq -r '.description')
echo ""
if [ -n "$RETURN_DESC" ]; then
echo "**Returns:** \`$RETURN_TYPE\` $RETURN_DESC"
else
echo "**Returns:** \`$RETURN_TYPE\`"
fi
fi
echo ""
done
exit 0

View file

@ -1,58 +0,0 @@
#!/bin/bash
MODULE_NAME=$1
TARGET_PATH=$2
if [ -z "$MODULE_NAME" ] || [ -z "$TARGET_PATH" ]; then
echo "Usage: doc-module.sh <ModuleName> <TargetPath>" >&2
exit 1
fi
MODULE_PATH="${TARGET_PATH}/${MODULE_NAME}"
COMPOSER_JSON_PATH="${MODULE_PATH}/composer.json"
if [ ! -d "$MODULE_PATH" ]; then
echo "Error: Module directory not found at '$MODULE_PATH'." >&2
exit 1
fi
if [ ! -f "$COMPOSER_JSON_PATH" ]; then
echo "Error: 'composer.json' not found in module directory '$MODULE_PATH'." >&2
exit 1
fi
# --- PARSING & MARKDOWN GENERATION ---
# Use jq to parse the composer.json file.
NAME=$(jq -r '.name' "$COMPOSER_JSON_PATH")
DESCRIPTION=$(jq -r '.description' "$COMPOSER_JSON_PATH")
TYPE=$(jq -r '.type' "$COMPOSER_JSON_PATH")
LICENSE=$(jq -r '.license' "$COMPOSER_JSON_PATH")
echo "# Module: $NAME"
echo ""
echo "**Description:** $DESCRIPTION"
echo "**Type:** $TYPE"
echo "**License:** $LICENSE"
echo ""
# List dependencies
DEPENDENCIES=$(jq -r '.require | keys[] as $key | "\($key): \(.[$key])"' "$COMPOSER_JSON_PATH")
if [ -n "$DEPENDENCIES" ]; then
echo "## Dependencies"
echo ""
echo "$DEPENDENCIES" | while read -r DEP; do
echo "- $DEP"
done
echo ""
fi
# List dev dependencies
DEV_DEPENDENCIES=$(jq -r '.["require-dev"] | keys[] as $key | "\($key): \(.[$key])"' "$COMPOSER_JSON_PATH")
if [ -n "$DEV_DEPENDENCIES" ]; then
echo "## Dev Dependencies"
echo ""
echo "$DEV_DEPENDENCIES" | while read -r DEP; do
echo "- $DEP"
done
echo ""
fi

View file

@ -1,58 +0,0 @@
#!/bin/bash
# Default path is the current directory
TARGET_PATH="."
ARGS=()
# Parse --path argument
# This allows testing by pointing the command to a mock project directory.
for arg in "$@"; do
case $arg in
--path=*)
TARGET_PATH="${arg#*=}"
;;
*)
ARGS+=("$arg")
;;
esac
done
# The subcommand is the first positional argument
SUBCOMMAND="${ARGS[0]}"
# The second argument is the name for class/module
NAME="${ARGS[1]}"
# The third argument is the optional path for api
SCAN_PATH="${ARGS[2]}"
# Get the directory where this script is located to call sub-scripts
SCRIPT_DIR=$(dirname "$0")
case "$SUBCOMMAND" in
class)
if [ -z "$NAME" ]; then
echo "Error: Missing class name." >&2
echo "Usage: /core:doc class <ClassName>" >&2
exit 1
fi
"${SCRIPT_DIR}/doc-class.sh" "$NAME" "$TARGET_PATH"
;;
module)
if [ -z "$NAME" ]; then
echo "Error: Missing module name." >&2
echo "Usage: /core:doc module <ModuleName>" >&2
exit 1
fi
"${SCRIPT_DIR}/doc-module.sh" "$NAME" "$TARGET_PATH"
;;
api)
"${SCRIPT_DIR}/doc-api.sh" "$TARGET_PATH" "$SCAN_PATH"
;;
changelog)
"${SCRIPT_DIR}/doc-changelog.sh" "$TARGET_PATH"
;;
*)
echo "Error: Unknown subcommand '$SUBCOMMAND'." >&2
echo "Usage: /core:doc [class|module|api|changelog] [name]" >&2
exit 1
;;
esac

View file

@ -1,205 +0,0 @@
#!/bin/bash
# Environment management script for /core:env command
set -e
# Function to mask sensitive values
mask_sensitive_value() {
local key="$1"
local value="$2"
if [[ "$key" =~ (_SECRET|_KEY|_PASSWORD|_TOKEN)$ ]]; then
if [ -z "$value" ]; then
echo "***not set***"
else
echo "***set***"
fi
else
echo "$value"
fi
}
# The subcommand is the first argument
SUBCOMMAND="$1"
case "$SUBCOMMAND" in
"")
# Default command: Show env vars
if [ ! -f ".env" ]; then
echo ".env file not found."
exit 1
fi
while IFS= read -r line || [[ -n "$line" ]]; do
# Skip comments and empty lines
if [[ "$line" =~ ^\s*#.*$ || -z "$line" ]]; then
continue
fi
# Extract key and value
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
masked_value=$(mask_sensitive_value "$key" "$value")
echo "$key=$masked_value"
done < ".env"
;;
check)
# Subcommand: check
if [ ! -f ".env.example" ]; then
echo ".env.example file not found."
exit 1
fi
# Create an associative array of env vars
declare -A env_vars
if [ -f ".env" ]; then
while IFS= read -r line || [[ -n "$line" ]]; do
if [[ ! "$line" =~ ^\s*# && "$line" =~ = ]]; then
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
env_vars["$key"]="$value"
fi
done < ".env"
fi
echo "Environment Check"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo
errors=0
warnings=0
while IFS= read -r line || [[ -n "$line" ]]; do
if [[ -z "$line" || "$line" =~ ^\s*# ]]; then
continue
fi
example_key=$(echo "$line" | cut -d '=' -f 1)
example_value=$(echo "$line" | cut -d '=' -f 2-)
if [[ ${env_vars[$example_key]+_} ]]; then
# Key exists in .env
env_value="${env_vars[$example_key]}"
if [ -n "$env_value" ]; then
echo "$example_key=$(mask_sensitive_value "$example_key" "$env_value")"
else
# Key exists but value is empty
if [ -z "$example_value" ]; then
echo "$example_key missing (required, no default)"
((errors++))
else
echo "$example_key missing (default: $example_value)"
((warnings++))
fi
fi
else
# Key does not exist in .env
if [ -z "$example_value" ]; then
echo "$example_key missing (required, no default)"
((errors++))
else
echo "$example_key missing (default: $example_value)"
((warnings++))
fi
fi
done < ".env.example"
echo
if [ "$errors" -gt 0 ] || [ "$warnings" -gt 0 ]; then
echo "$errors errors, $warnings warnings"
else
echo "✓ All checks passed."
fi
;;
diff)
# Subcommand: diff
if [ ! -f ".env.example" ]; then
echo ".env.example file not found."
exit 1
fi
# Create associative arrays for both files
declare -A env_vars
if [ -f ".env" ]; then
while IFS= read -r line || [[ -n "$line" ]]; do
if [[ ! "$line" =~ ^\s*# && "$line" =~ = ]]; then
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
env_vars["$key"]="$value"
fi
done < ".env"
fi
declare -A example_vars
while IFS= read -r line || [[ -n "$line" ]]; do
if [[ ! "$line" =~ ^\s*# && "$line" =~ = ]]; then
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
example_vars["$key"]="$value"
fi
done < ".env.example"
echo "Environment Diff"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo
# Check for modifications and deletions
for key in "${!example_vars[@]}"; do
example_value="${example_vars[$key]}"
if [[ ${env_vars[$key]+_} ]]; then
# Key exists in .env
env_value="${env_vars[$key]}"
if [ "$env_value" != "$example_value" ]; then
echo "~ $key: $(mask_sensitive_value "$key" "$example_value") -> $(mask_sensitive_value "$key" "$env_value")"
fi
else
# Key does not exist in .env
echo "- $key: $(mask_sensitive_value "$key" "$example_value")"
fi
done
# Check for additions
for key in "${!env_vars[@]}"; do
if [[ ! ${example_vars[$key]+_} ]]; then
echo "+ $key: $(mask_sensitive_value "$key" "${env_vars[$key]}")"
fi
done
;;
sync)
# Subcommand: sync
if [ ! -f ".env.example" ]; then
echo ".env.example file not found."
exit 1
fi
# Create an associative array of env vars
declare -A env_vars
if [ -f ".env" ]; then
while IFS= read -r line || [[ -n "$line" ]]; do
if [[ ! "$line" =~ ^\s*# && "$line" =~ = ]]; then
key=$(echo "$line" | cut -d '=' -f 1)
value=$(echo "$line" | cut -d '=' -f 2-)
env_vars["$key"]="$value"
fi
done < ".env"
fi
while IFS= read -r line || [[ -n "$line" ]]; do
if [[ -z "$line" || "$line" =~ ^\s*# ]]; then
continue
fi
example_key=$(echo "$line" | cut -d '=' -f 1)
example_value=$(echo "$line" | cut -d '=' -f 2-)
if [[ ! ${env_vars[$example_key]+_} ]]; then
# Key does not exist in .env, so add it
echo "$example_key=$example_value" >> ".env"
echo "Added: $example_key"
fi
done < ".env.example"
echo "Sync complete."
;;
*)
echo "Unknown subcommand: $SUBCOMMAND"
exit 1
;;
esac

View file

@ -1,94 +0,0 @@
#!/bin/bash
set -euo pipefail
# Default values
DRAFT_FLAG=""
REVIEWERS=""
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--draft)
DRAFT_FLAG="--draft"
shift
;;
--reviewer)
if [[ -n "$2" ]]; then
REVIEWERS="$REVIEWERS --reviewer $2"
shift
shift
else
echo "Error: --reviewer flag requires an argument." >&2
exit 1
fi
;;
*)
echo "Unknown option: $1" >&2
exit 1
;;
esac
done
# --- Git data ---
# Get default branch (main or master)
DEFAULT_BRANCH=$(git remote show origin | grep 'HEAD branch' | cut -d' ' -f5)
if [[ -z "$DEFAULT_BRANCH" ]]; then
# Fallback if remote isn't set up or is weird
if git show-ref --verify --quiet refs/heads/main; then
DEFAULT_BRANCH="main"
else
DEFAULT_BRANCH="master"
fi
fi
# Get current branch
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
if [[ "$CURRENT_BRANCH" == "HEAD" ]]; then
echo "Error: Not on a branch. Aborting." >&2
exit 1
fi
# Get merge base
MERGE_BASE=$(git merge-base HEAD "$DEFAULT_BRANCH")
if [[ -z "$MERGE_BASE" ]]; then
echo "Error: Could not find a common ancestor with '$DEFAULT_BRANCH'. Are you up to date?" >&2
exit 1
fi
# --- PR Content Generation ---
# Generate Title
# Convert branch name from kebab-case/snake_case to Title Case
TITLE=$(echo "$CURRENT_BRANCH" | sed -E 's/^[a-z-]+\///' | sed -e 's/[-_]/ /g' -e 's/\b\(.\)/\u\1/g')
# Get list of commits
COMMITS=$(git log "$MERGE_BASE"..HEAD --pretty=format:"- %s" --reverse)
# Get list of changed files
CHANGED_FILES=$(git diff --name-only "$MERGE_BASE"..HEAD)
# --- PR Body ---
BODY=$(cat <<EOF
## Summary
$COMMITS
## Changes
\`\`\`
$CHANGED_FILES
\`\`\`
## Test Plan
- [ ] TODO
EOF
)
# --- Create PR ---
echo "Generating PR..." >&2
echo "Title: $TITLE" >&2
echo "---" >&2
echo "$BODY" >&2
echo "---" >&2
# The command to be executed by the plugin runner
gh pr create --title "$TITLE" --body "$BODY" $DRAFT_FLAG $REVIEWERS

View file

@ -1,9 +1,5 @@
#!/bin/bash #!/bin/bash
# Auto-format Go files after edits using core go fmt # Auto-format Go files after edits using core go fmt
# Policy: HIDE success (formatting is silent background operation)
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/output-policy.sh"
read -r input read -r input
FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty') FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty')
@ -19,5 +15,5 @@ if [[ -n "$FILE_PATH" && -f "$FILE_PATH" ]]; then
fi fi
fi fi
# Silent success - no output needed # Pass through the input
hide_success echo "$input"

View file

@ -1,145 +0,0 @@
#!/bin/bash
# Smart log viewing for laravel.log
LOG_FILE="storage/logs/laravel.log"
# Check if log file exists
if [ ! -f "$LOG_FILE" ]; then
echo "Error: Log file not found at $LOG_FILE"
exit 1
fi
# --- Argument Parsing ---
# Default action: tail log file
if [ -z "$1" ]; then
tail -f "$LOG_FILE"
exit 0
fi
case "$1" in
--errors)
grep "\.ERROR" "$LOG_FILE"
;;
--since)
if [ -z "$2" ]; then
echo "Error: Missing duration for --since (e.g., 1h, 30m, 2d)"
exit 1
fi
# Simple parsing for duration
duration_string=$(echo "$2" | sed 's/h/ hours/' | sed 's/m/ minutes/' | sed 's/d/ days/')
since_date=$(date -d "now - $duration_string" '+%Y-%m-%d %H:%M:%S' 2>/dev/null)
if [ -z "$since_date" ]; then
echo "Error: Invalid duration format. Use formats like '1h', '30m', '2d'."
exit 1
fi
awk -v since="$since_date" '
{
# Extract timestamp like "2024-01-15 10:30:45" from "[2024-01-15 10:30:45]"
log_ts = substr($1, 2) " " substr($2, 1, 8)
if (log_ts >= since) {
print $0
}
}
' "$LOG_FILE"
;;
--grep)
if [ -z "$2" ]; then
echo "Error: Missing pattern for --grep"
exit 1
fi
grep -E "$2" "$LOG_FILE"
;;
--request)
if [ -z "$2" ]; then
echo "Error: Missing request ID for --request"
exit 1
fi
grep "\"request_id\":\"$2\"" "$LOG_FILE"
;;
analyse)
echo "Log Analysis: Last 24 hours"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
since_date_24h=$(date -d "now - 24 hours" '+%Y-%m-%d %H:%M:%S')
log_entries_24h=$(awk -v since="$since_date_24h" '
{
log_ts = substr($1, 2) " " substr($2, 1, 8)
if (log_ts >= since) {
print $0
}
}
' "$LOG_FILE")
if [ -z "$log_entries_24h" ]; then
echo "No log entries in the last 24 hours."
exit 0
fi
total_entries=$(echo "$log_entries_24h" | wc -l)
error_entries=$(echo "$log_entries_24h" | grep -c "\.ERROR" || true)
warning_entries=$(echo "$log_entries_24h" | grep -c "\.WARNING" || true)
info_entries=$(echo "$log_entries_24h" | grep -c "\.INFO" || true)
echo "Total entries: $total_entries"
echo "Errors: $error_entries"
echo "Warnings: $warning_entries"
echo "Info: $info_entries"
echo ""
if [ "$error_entries" -gt 0 ]; then
echo "Top Errors:"
error_lines=$(echo "$log_entries_24h" | grep "\.ERROR")
top_errors=$(echo "$error_lines" | \
sed -E 's/.*\.([A-Z]+): //' | \
sed 's/ in .*//' | \
sort | uniq -c | sort -nr | head -n 3)
i=1
echo "$top_errors" | while read -r line; do
count=$(echo "$line" | awk '{print $1}')
error_name=$(echo "$line" | awk '{$1=""; print $0}' | sed 's/^ //')
# Find a representative location
location=$(echo "$error_lines" | grep -m 1 "$error_name" | grep " in " | sed 's/.* in //')
echo "$i. $error_name ($count times)"
if [ ! -z "$location" ]; then
echo " $location"
else
# For cases like ValidationException
if echo "$error_name" | grep -q "ValidationException"; then
echo " Various controllers"
fi
fi
echo ""
i=$((i+1))
done
if echo "$top_errors" | grep -q "TokenExpiredException"; then
echo "Recommendations:"
echo "- TokenExpiredException happening frequently"
echo " Consider increasing token lifetime or"
echo " implementing automatic refresh"
echo ""
fi
fi
;;
*)
echo "Invalid command: $1"
echo "Usage: /core:log [--errors|--since <duration>|--grep <pattern>|--request <id>|analyse]"
exit 1
;;
esac

View file

@ -1,132 +0,0 @@
#!/bin/bash
#
# MCP Server script for the core-claude plugin.
# This script reads a JSON MCP request from stdin, executes the corresponding
# core CLI command, and prints a JSON response to stdout.
#
set -e
# Read the entire input from stdin
request_json=$(cat)
# --- Input Validation ---
if ! echo "$request_json" | jq . > /dev/null 2>&1; then
echo '{"status": "error", "message": "Invalid JSON request."}'
exit 1
fi
# --- Request Parsing ---
tool_name=$(echo "$request_json" | jq -r '.tool_name')
params=$(echo "$request_json" | jq '.parameters')
# --- Command Routing ---
case "$tool_name" in
"core_go_test")
filter=$(echo "$params" | jq -r '.filter // ""')
coverage=$(echo "$params" | jq -r '.coverage // false')
# Build the command
cmd_args=("go" "test")
[ -n "$filter" ] && cmd_args+=("--filter=$filter")
[ "$coverage" = "true" ] && cmd_args+=("--coverage")
;;
"core_dev_health")
cmd_args=("dev" "health")
;;
"core_dev_commit")
message=$(echo "$params" | jq -r '.message // ""')
if [ -z "$message" ]; then
echo '{"status": "error", "message": "Missing required parameter: message"}'
exit 1
fi
cmd_args=("dev" "commit" "-m" "$message")
repos=$(echo "$params" | jq -r '.repos // "[]"')
if [ "$(echo "$repos" | jq 'length')" -gt 0 ]; then
# Read repos into a bash array
mapfile -t repo_array < <(echo "$repos" | jq -r '.[]')
cmd_args+=("${repo_array[@]}")
fi
;;
*)
echo "{\"status\": \"error\", \"message\": \"Unknown tool: $tool_name\"}"
exit 1
;;
esac
# --- Command Execution ---
# The 'core' command is expected to be in the PATH of the execution environment.
output=$(core "${cmd_args[@]}" 2>&1)
exit_code=$?
# --- Response Formatting ---
if [ $exit_code -eq 0 ]; then
status="success"
else
status="error"
fi
# Default response is just the raw output
result_json=$(jq -n --arg raw "$output" '{raw: $raw}')
# Structured Response Parsing
if [ "$tool_name" = "core_go_test" ]; then
if [ "$status" = "success" ]; then
# Use awk for more robust parsing of the test output.
# This is less brittle than grepping for exact lines.
outcome=$(printf "%s" "$output" | awk '/^PASS$/ {print "PASS"}')
coverage=$(printf "%s" "$output" | awk '/coverage:/ {print $2}')
summary=$(printf "%s" "$output" | awk '/^ok\s/ {print $0}')
result_json=$(jq -n \
--arg outcome "${outcome:-UNKNOWN}" \
--arg coverage "${coverage:--}" \
--arg summary "${summary:--}" \
--arg raw_output "$output" \
'{
outcome: $outcome,
coverage: $coverage,
summary: $summary,
raw_output: $raw_output
}')
else
# In case of failure, the output is less predictable.
# We'll grab what we can, but the raw output is most important.
outcome=$(printf "%s" "$output" | awk '/^FAIL$/ {print "FAIL"}')
summary=$(printf "%s" "$output" | awk '/^FAIL\s/ {print $0}')
result_json=$(jq -n \
--arg outcome "${outcome:-FAIL}" \
--arg summary "${summary:--}" \
--arg raw_output "$output" \
'{
outcome: $outcome,
summary: $summary,
raw_output: $raw_output
}')
fi
elif [ "$tool_name" = "core_dev_health" ]; then
if [ "$status" = "success" ]; then
# Safely parse the "key: value" output into a JSON array of objects.
# This uses jq to be robust against special characters in the output.
result_json=$(printf "%s" "$output" | jq -R 'capture("(?<name>[^:]+):\\s*(?<status>.*)")' | jq -s '{services: .}')
else
# On error, just return the raw output
result_json=$(jq -n --arg error "$output" '{error: $error}')
fi
elif [ "$tool_name" = "core_dev_commit" ]; then
if [ "$status" = "success" ]; then
result_json=$(jq -n --arg message "$output" '{message: $message}')
else
result_json=$(jq -n --arg error "$output" '{error: $error}')
fi
fi
response=$(jq -n --arg status "$status" --argjson result "$result_json" '{status: $status, result: $result}')
echo "$response"
exit 0

View file

@ -1,107 +0,0 @@
#!/bin/bash
set -e
SUBCOMMAND=$1
shift
case $SUBCOMMAND in
create)
php artisan make:migration "$@"
;;
run)
php artisan migrate "$@"
;;
rollback)
php artisan migrate:rollback "$@"
;;
fresh)
php artisan migrate:fresh "$@"
;;
status)
php artisan migrate:status "$@"
;;
from-model)
MODEL_NAME=$(basename "$1")
if [ -z "$MODEL_NAME" ]; then
echo "Error: Model name not provided."
exit 1
fi
MODEL_PATH=$(find . -path "*/src/Core/Models/${MODEL_NAME}.php" -print -quit)
if [ -z "$MODEL_PATH" ]; then
echo "Error: Model ${MODEL_NAME}.php not found."
exit 1
fi
echo "Found model: $MODEL_PATH"
TABLE_NAME=$(echo "$MODEL_NAME" | sed 's/\(.\)\([A-Z]\)/\1_\2/g' | tr '[:upper:]' '[:lower:]')
TABLE_NAME="${TABLE_NAME}s"
MODULE_ROOT=$(echo "$MODEL_PATH" | sed 's|/src/Core/Models/.*||')
MIGRATIONS_DIR="${MODULE_ROOT}/database/migrations"
if [ ! -d "$MIGRATIONS_DIR" ]; then
echo "Error: Migrations directory not found at $MIGRATIONS_DIR"
exit 1
fi
TIMESTAMP=$(date +%Y_%m_%d_%H%M%S)
MIGRATION_FILE="${MIGRATIONS_DIR}/${TIMESTAMP}_create_${TABLE_NAME}_table.php"
COLUMNS=" \$table->id();\n"
if grep -q "use BelongsToWorkspace;" "$MODEL_PATH"; then
COLUMNS+=" \$table->foreignId('workspace_id')->constrained()->cascadeOnDelete();\n"
fi
FILLABLE_LINE=$(grep 'protected \$fillable' "$MODEL_PATH" || echo "")
if [ -n "$FILLABLE_LINE" ]; then
FILLABLE_FIELDS=$(echo "$FILLABLE_LINE" | grep -oP "\[\K[^\]]*" | sed "s/['\",]//g")
for field in $FILLABLE_FIELDS; do
if [[ "$field" != "workspace_id" ]] && [[ "$field" != *_id ]]; then
COLUMNS+=" \$table->string('$field');\n"
fi
done
fi
RELATIONS=$(grep -oP 'public function \K[a-zA-Z0-9_]+(?=\(\): BelongsTo)' "$MODEL_PATH" || echo "")
for rel in $RELATIONS; do
COLUMNS+=" \$table->foreignId('${rel}_id')->constrained()->cascadeOnDelete();\n"
done
COLUMNS+=" \$table->timestamps();"
MIGRATION_CONTENT=$(cat <<EOF
<?php
declare(strict_types=1);
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
return new class extends Migration
{
public function up(): void
{
Schema::create('$TABLE_NAME', function (Blueprint \$table) {
\$COLUMNS
});
}
public function down(): void
{
Schema::dropIfExists('$TABLE_NAME');
}
};
EOF
)
echo -e "$MIGRATION_CONTENT" > "$MIGRATION_FILE"
echo "Successfully created migration: $MIGRATION_FILE"
;;
*)
echo "Usage: /core:migrate <subcommand> [arguments]"
echo "Subcommands: create, run, rollback, fresh, status, from-model"
exit 1
;;
esac

View file

@ -1,100 +0,0 @@
#!/bin/bash
# Hook Output Policy - Expose vs Hide
#
# EXPOSE (additionalContext):
# - Errors that need fixing
# - Failures that block progress
# - Security warnings
# - Breaking changes
#
# HIDE (suppressOutput):
# - Success confirmations
# - Verbose progress output
# - Repetitive status messages
# - Debug information
#
# Usage:
# source output-policy.sh
# expose_error "Test failed: $error"
# expose_warning "Debug statements found"
# hide_success
# pass_through "$input"
# Expose an error to Claude (always visible)
expose_error() {
local message="$1"
local context="$2"
cat << EOF
{
"hookSpecificOutput": {
"additionalContext": "## ❌ Error\n\n$message${context:+\n\n$context}"
}
}
EOF
}
# Expose a warning to Claude (visible, but not blocking)
expose_warning() {
local message="$1"
local context="$2"
cat << EOF
{
"hookSpecificOutput": {
"additionalContext": "## ⚠️ Warning\n\n$message${context:+\n\n$context}"
}
}
EOF
}
# Expose informational context (visible when relevant)
expose_info() {
local message="$1"
cat << EOF
{
"hookSpecificOutput": {
"additionalContext": "$message"
}
}
EOF
}
# Hide output (success, no action needed)
hide_success() {
echo '{"suppressOutput": true}'
}
# Pass through without modification (neutral)
pass_through() {
echo "$1"
}
# Aggregate multiple issues into a summary
aggregate_issues() {
local issues=("$@")
local count=${#issues[@]}
if [[ $count -eq 0 ]]; then
hide_success
return
fi
local summary=""
local shown=0
local max_shown=5
for issue in "${issues[@]}"; do
if [[ $shown -lt $max_shown ]]; then
summary+="- $issue\n"
((shown++))
fi
done
if [[ $count -gt $max_shown ]]; then
summary+="\n... and $((count - max_shown)) more"
fi
expose_warning "$count issues found:" "$summary"
}

View file

@ -1,99 +0,0 @@
#!/bin/bash
# Performance profiling helpers for Go and PHP
# Exit immediately if a command exits with a non-zero status.
set -e
# --- Utility Functions ---
# Print a header for a section
print_header() {
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━"
echo "$1"
echo "━━━━━━━━━━━━━━━━━━━━━━━"
}
# --- Subcommands ---
# Profile the test suite
profile_tests() {
print_header "Test Performance Report"
echo "Slowest tests:"
echo "1. UserIntegrationTest::testBulkImport (4.2s)"
echo "2. AuthTest::testTokenRefresh (1.8s)"
echo "3. WorkspaceTest::testIsolation (1.2s)"
echo ""
echo "Total: 45 tests in 12.3s"
echo "Target: < 10s"
echo ""
echo "Suggestions:"
echo "- testBulkImport: Consider mocking external API"
echo "- testTokenRefresh: Use fake time instead of sleep"
}
# Profile an HTTP request
profile_request() {
print_header "HTTP Request Profile: $1"
echo "Total time: 1.2s"
echo "DB queries: 12 (50ms)"
echo "External API calls: 2 (800ms)"
echo ""
echo "Suggestions:"
echo "- Cache external API responses"
}
# Analyse slow queries
analyse_queries() {
print_header "Slow Queries (>100ms)"
echo "1. SELECT * FROM users WHERE... (234ms)"
echo " Missing index on: email"
echo ""
echo "2. SELECT * FROM orders JOIN... (156ms)"
echo " N+1 detected: eager load 'items'"
}
# Analyse memory usage
analyse_memory() {
print_header "Memory Usage Analysis"
echo "Total memory usage: 256MB"
echo "Top memory consumers:"
echo "1. User model: 50MB"
echo "2. Order model: 30MB"
echo "3. Cache: 20MB"
echo ""
echo "Suggestions:"
echo "- Consider using a more memory-efficient data structure for the User model."
}
# --- Main ---
main() {
SUBCOMMAND="$1"
shift
OPTIONS="$@"
case "$SUBCOMMAND" in
test)
profile_tests
;;
request)
profile_request "$OPTIONS"
;;
query)
analyse_queries
;;
memory)
analyse_memory
;;
*)
echo "Unknown subcommand: $SUBCOMMAND"
echo "Usage: /core:perf <test|request|query|memory> [options]"
exit 1
;;
esac
}
main "$@"

View file

@ -1,9 +1,5 @@
#!/bin/bash #!/bin/bash
# Auto-format PHP files after edits using core php fmt # Auto-format PHP files after edits using core php fmt
# Policy: HIDE success (formatting is silent background operation)
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/output-policy.sh"
read -r input read -r input
FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty') FILE_PATH=$(echo "$input" | jq -r '.tool_input.file_path // empty')
@ -17,5 +13,5 @@ if [[ -n "$FILE_PATH" && -f "$FILE_PATH" ]]; then
fi fi
fi fi
# Silent success - no output needed # Pass through the input
hide_success echo "$input"

View file

@ -1,16 +1,15 @@
#!/bin/bash #!/bin/bash
# Post-commit hook: Check for uncommitted work that might get lost # Post-commit hook: Check for uncommitted work that might get lost
# Policy: EXPOSE warning when uncommitted work exists, HIDE when clean #
# After committing task-specific files, check if there's other work
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" # in the repo that should be committed or stashed
source "$SCRIPT_DIR/output-policy.sh"
read -r input read -r input
COMMAND=$(echo "$input" | jq -r '.tool_input.command // empty') COMMAND=$(echo "$input" | jq -r '.tool_input.command // empty')
# Only run after git commit # Only run after git commit
if ! echo "$COMMAND" | grep -qE '^git commit'; then if ! echo "$COMMAND" | grep -qE '^git commit'; then
pass_through "$input" echo "$input"
exit 0 exit 0
fi fi
@ -22,26 +21,31 @@ UNTRACKED=$(git ls-files --others --exclude-standard 2>/dev/null | wc -l | tr -d
TOTAL=$((UNSTAGED + STAGED + UNTRACKED)) TOTAL=$((UNSTAGED + STAGED + UNTRACKED))
if [[ $TOTAL -gt 0 ]]; then if [[ $TOTAL -gt 0 ]]; then
DETAILS="" echo "" >&2
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" >&2
echo "[PostCommit] WARNING: Uncommitted work remains" >&2
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" >&2
if [[ $UNSTAGED -gt 0 ]]; then if [[ $UNSTAGED -gt 0 ]]; then
FILES=$(git diff --name-only 2>/dev/null | head -5 | sed 's/^/ - /') echo " Modified (unstaged): $UNSTAGED files" >&2
DETAILS+="**Modified (unstaged):** $UNSTAGED files\n$FILES\n" git diff --name-only 2>/dev/null | head -5 | sed 's/^/ /' >&2
[[ $UNSTAGED -gt 5 ]] && DETAILS+=" ... and $((UNSTAGED - 5)) more\n" [[ $UNSTAGED -gt 5 ]] && echo " ... and $((UNSTAGED - 5)) more" >&2
fi fi
if [[ $STAGED -gt 0 ]]; then if [[ $STAGED -gt 0 ]]; then
FILES=$(git diff --cached --name-only 2>/dev/null | head -5 | sed 's/^/ - /') echo " Staged (not committed): $STAGED files" >&2
DETAILS+="**Staged (not committed):** $STAGED files\n$FILES\n" git diff --cached --name-only 2>/dev/null | head -5 | sed 's/^/ /' >&2
fi fi
if [[ $UNTRACKED -gt 0 ]]; then if [[ $UNTRACKED -gt 0 ]]; then
FILES=$(git ls-files --others --exclude-standard 2>/dev/null | head -5 | sed 's/^/ - /') echo " Untracked: $UNTRACKED files" >&2
DETAILS+="**Untracked:** $UNTRACKED files\n$FILES\n" git ls-files --others --exclude-standard 2>/dev/null | head -5 | sed 's/^/ /' >&2
[[ $UNTRACKED -gt 5 ]] && DETAILS+=" ... and $((UNTRACKED - 5)) more\n" [[ $UNTRACKED -gt 5 ]] && echo " ... and $((UNTRACKED - 5)) more" >&2
fi fi
expose_warning "Uncommitted work remains ($TOTAL files)" "$DETAILS" echo "" >&2
else echo "Consider: commit these, stash them, or confirm they're intentionally left" >&2
pass_through "$input" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" >&2
fi fi
echo "$input"

View file

@ -0,0 +1,69 @@
#!/bin/bash
# Pre-compact: Save minimal state for Claude to resume after auto-compact
#
# Captures:
# - Working directory + branch
# - Git status (files touched)
# - Todo state (in_progress items)
# - Context facts (decisions, actionables)
STATE_FILE="${HOME}/.claude/sessions/scratchpad.md"
CONTEXT_FILE="${HOME}/.claude/sessions/context.json"
TIMESTAMP=$(date '+%s')
CWD=$(pwd)
mkdir -p "${HOME}/.claude/sessions"
# Get todo state
TODOS=""
if [[ -f "${HOME}/.claude/todos/current.json" ]]; then
TODOS=$(cat "${HOME}/.claude/todos/current.json" 2>/dev/null | head -50)
fi
# Get git status
GIT_STATUS=""
BRANCH=""
if git rev-parse --git-dir > /dev/null 2>&1; then
GIT_STATUS=$(git status --short 2>/dev/null | head -15)
BRANCH=$(git branch --show-current 2>/dev/null)
fi
# Get context facts
CONTEXT=""
if [[ -f "$CONTEXT_FILE" ]]; then
CONTEXT=$(jq -r '.[] | "- [\(.source)] \(.fact)"' "$CONTEXT_FILE" 2>/dev/null | tail -10)
fi
cat > "$STATE_FILE" << EOF
---
timestamp: ${TIMESTAMP}
cwd: ${CWD}
branch: ${BRANCH:-none}
---
# Resume After Compact
You were mid-task. Do NOT assume work is complete.
## Project
\`${CWD}\` on \`${BRANCH:-no branch}\`
## Files Changed
\`\`\`
${GIT_STATUS:-none}
\`\`\`
## Todos (in_progress = NOT done)
\`\`\`json
${TODOS:-check /todos}
\`\`\`
## Context (decisions & actionables)
${CONTEXT:-none captured}
## Next
Continue the in_progress todo.
EOF
echo "[PreCompact] Snapshot saved" >&2
exit 0

View file

@ -12,26 +12,18 @@ if [ "$STOP_ACTIVE" = "true" ]; then
exit 0 exit 0
fi fi
# Source module context to get CLAUDE_MODULE_TYPE # Detect project type and run QA
CONTEXT_FILE=".claude-plugin/.tmp/module_context.sh" if [ -f "go.mod" ]; then
if [ -f "$CONTEXT_FILE" ]; then PROJECT="go"
source "$CONTEXT_FILE" RESULT=$(core go qa 2>&1) || true
elif [ -f "composer.json" ]; then
PROJECT="php"
RESULT=$(core php qa 2>&1) || true
else
# Not a Go or PHP project, allow stop
exit 0
fi fi
# Run QA based on module type
case "$CLAUDE_MODULE_TYPE" in
"go")
RESULT=$(core go qa 2>&1) || true
;;
"php")
RESULT=$(core php qa 2>&1) || true
;;
*)
# Not a Go or PHP project, allow stop
exit 0
;;
esac
# Check if QA passed # Check if QA passed
if echo "$RESULT" | grep -qE "FAIL|ERROR|✗|panic:|undefined:"; then if echo "$RESULT" | grep -qE "FAIL|ERROR|✗|panic:|undefined:"; then
# Extract top issues for context # Extract top issues for context

View file

@ -1,108 +0,0 @@
#!/usr/bin/env php
<?php
require __DIR__ . '/../../../vendor/autoload.php';
use PhpParser\ParserFactory;
use PhpParser\Node;
use PhpParser\Node\Stmt\Class_;
use PhpParser\Node\Stmt\ClassMethod;
use PhpParser\PrettyPrinter;
use PhpParser\NodeVisitorAbstract;
class MethodExtractor extends NodeVisitorAbstract
{
private $startLine;
private $endLine;
private $newMethodName;
public function __construct($startLine, $endLine, $newMethodName)
{
$this->startLine = $startLine;
$this->endLine = $endLine;
$this->newMethodName = $newMethodName;
}
public function leaveNode(Node $node)
{
if ($node instanceof Class_) {
$classNode = $node;
$originalMethod = null;
$extractionStartIndex = -1;
$extractionEndIndex = -1;
foreach ($classNode->stmts as $stmt) {
if ($stmt instanceof ClassMethod) {
foreach ($stmt->stmts as $index => $mstmt) {
if ($mstmt->getStartLine() >= $this->startLine && $extractionStartIndex === -1) {
$extractionStartIndex = $index;
}
if ($mstmt->getEndLine() <= $this->endLine && $extractionStartIndex !== -1) {
$extractionEndIndex = $index;
}
}
if ($extractionStartIndex !== -1) {
$originalMethod = $stmt;
break;
}
}
}
if ($originalMethod !== null) {
$statementsToExtract = array_slice(
$originalMethod->stmts,
$extractionStartIndex,
$extractionEndIndex - $extractionStartIndex + 1
);
$newMethod = new ClassMethod($this->newMethodName, [
'stmts' => $statementsToExtract
]);
$classNode->stmts[] = $newMethod;
$methodCall = new Node\Expr\MethodCall(new Node\Expr\Variable('this'), $this->newMethodName);
$methodCallStatement = new Node\Stmt\Expression($methodCall);
array_splice(
$originalMethod->stmts,
$extractionStartIndex,
count($statementsToExtract),
[$methodCallStatement]
);
}
}
}
}
$subcommand = $argv[1] ?? null;
switch ($subcommand) {
case 'extract-method':
$filePath = 'Test.php';
$startLine = 9;
$endLine = 13;
$newMethodName = 'newMethod';
$code = file_get_contents($filePath);
$parser = (new ParserFactory)->create(ParserFactory::PREFER_PHP7);
$ast = $parser->parse($code);
$traverser = new PhpParser\NodeTraverser();
$traverser->addVisitor(new MethodExtractor($startLine, $endLine, $newMethodName));
$modifiedAst = $traverser->traverse($ast);
$prettyPrinter = new PrettyPrinter\Standard;
$newCode = $prettyPrinter->prettyPrintFile($modifiedAst);
file_put_contents($filePath, $newCode);
echo "Refactoring complete.\n";
break;
default:
echo "Unknown subcommand: $subcommand\n";
exit(1);
}

View file

@ -1,162 +0,0 @@
#!/bin/bash
set -e
# --- Argument Parsing ---
ARG="${1:-}"
PREVIEW=false
BUMP_LEVEL=""
if [[ "$ARG" == "--preview" ]]; then
PREVIEW=true
# Default to minor for preview, but allow specifying a level, e.g. --preview major
BUMP_LEVEL="${2:-minor}"
else
BUMP_LEVEL="$ARG"
fi
if [[ ! "$BUMP_LEVEL" =~ ^(patch|minor|major)$ ]]; then
echo "Usage: /core:release <patch|minor|major|--preview> [level]"
exit 1
fi
# --- Project Detection ---
CURRENT_VERSION=""
PROJECT_TYPE=""
VERSION_FILE=""
MODULE_NAME=""
if [ -f "composer.json" ]; then
PROJECT_TYPE="php"
VERSION_FILE="composer.json"
MODULE_NAME=$(grep '"name":' "$VERSION_FILE" | sed -E 's/.*"name": "([^"]+)".*/\1/')
CURRENT_VERSION=$(grep '"version":' "$VERSION_FILE" | sed -E 's/.*"version": "([^"]+)".*/\1/')
elif [ -f "go.mod" ]; then
PROJECT_TYPE="go"
VERSION_FILE="go.mod"
MODULE_NAME=$(grep 'module' "$VERSION_FILE" | awk '{print $2}')
CURRENT_VERSION=$(git describe --tags --abbrev=0 2>/dev/null | sed 's/^v//' || echo "0.0.0")
else
echo "Error: No composer.json or go.mod found in the current directory."
exit 1
fi
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not determine current version for project type '$PROJECT_TYPE'."
exit 1
fi
# --- Version Bumping ---
bump_version() {
local version=$1
local level=$2
local parts=(${version//./ })
local major=${parts[0]}
local minor=${parts[1]}
local patch=${parts[2]}
case $level in
major)
major=$((major + 1))
minor=0
patch=0
;;
minor)
minor=$((minor + 1))
patch=0
;;
patch)
patch=$((patch + 1))
;;
esac
echo "$major.$minor.$patch"
}
NEW_VERSION=$(bump_version "$CURRENT_VERSION" "$BUMP_LEVEL")
# --- Changelog Generation ---
LAST_TAG="v$CURRENT_VERSION"
COMMITS=$(git log "$LAST_TAG..HEAD" --no-merges --pretty=format:"%s")
# Check if there are any commits since the last tag
if [ -z "$COMMITS" ]; then
echo "No changes since the last release ($LAST_TAG). Nothing to do."
exit 0
fi
declare -A changes
while IFS= read -r commit; do
if [[ "$commit" =~ ^(feat|fix|docs)(\(.*\))?:\ .* ]]; then
type=$(echo "$commit" | sed -E 's/^(feat|fix|docs).*/\1/')
message=$(echo "$commit" | sed -E 's/^(feat|fix|docs)(\(.*\))?:\ //')
case $type in
feat) changes["Added"]+="- $message\n";;
fix) changes["Fixed"]+="- $message\n";;
docs) changes["Documentation"]+="- $message\n";;
esac
fi
done <<< "$COMMITS"
CHANGELOG_ENTRY="## [$NEW_VERSION] - $(date +%Y-%m-%d)\n\n"
for type in Added Fixed Documentation; do
if [ -n "${changes[$type]}" ]; then
CHANGELOG_ENTRY+="### $type\n${changes[$type]}\n"
fi
done
# --- Display Plan ---
echo "Preparing release: $MODULE_NAME v$CURRENT_VERSION → v$NEW_VERSION"
echo ""
echo "Changes since $LAST_TAG:"
echo "$COMMITS" | sed 's/^/- /'
echo ""
echo "Generated CHANGELOG entry:"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo -e "$CHANGELOG_ENTRY"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# --- Execution ---
if [ "$PREVIEW" = true ]; then
echo "Running in preview mode. No files will be changed and no tags will be pushed."
exit 0
fi
echo "Proceed with release? [y/N]"
read -r confirmation
if [[ ! "$confirmation" =~ ^[yY]$ ]]; then
echo "Release cancelled."
exit 1
fi
# 1. Update version file
if [ "$PROJECT_TYPE" == "php" ]; then
sed -i -E "s/(\"version\": *)\"[^\"]+\"/\1\"$NEW_VERSION\"/" "$VERSION_FILE"
echo "Updated $VERSION_FILE to v$NEW_VERSION"
fi
# 2. Update CHANGELOG.md
if [ ! -f "CHANGELOG.md" ]; then
echo "# Changelog" > CHANGELOG.md
echo "" >> CHANGELOG.md
fi
# Prepend the new entry
NEW_CHANGELOG_CONTENT=$(echo -e "$CHANGELOG_ENTRY" && cat CHANGELOG.md)
echo -e "$NEW_CHANGELOG_CONTENT" > CHANGELOG.md
echo "Updated CHANGELOG.md"
# 3. Commit the changes
git add "$VERSION_FILE" CHANGELOG.md
git commit -m "chore(release): version $NEW_VERSION"
# 4. Create and push git tag
NEW_TAG="v$NEW_VERSION"
git tag "$NEW_TAG"
echo "Created new git tag: $NEW_TAG"
# 5. Push tag and changes
git push origin "$NEW_TAG"
git push
echo "Pushed tag and commit to remote."
# 6. Trigger CI release (placeholder)

View file

@ -1,97 +0,0 @@
#!/bin/bash
# capture-session-history.sh
# Captures session context, focusing on git status, and saves it to history.json.
HISTORY_FILE="${HOME}/.claude/sessions/history.json"
SESSION_TIMEOUT=10800 # 3 hours
# Ensure session directory exists
mkdir -p "${HOME}/.claude/sessions"
# Initialize history file if it doesn't exist
if [[ ! -f "$HISTORY_FILE" ]]; then
echo '{"sessions": []}' > "$HISTORY_FILE"
fi
# --- Get Session Identifiers ---
MODULE=$(basename "$(pwd)")
BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "unknown")
NOW=$(date '+%s')
# --- Read and Find Current Session ---
HISTORY_CONTENT=$(cat "$HISTORY_FILE")
SESSION_INDEX=$(echo "$HISTORY_CONTENT" | jq \
--arg module "$MODULE" \
--arg branch "$BRANCH" \
--argjson now "$NOW" \
--argjson timeout "$SESSION_TIMEOUT" '
.sessions | to_entries |
map(select(.value.module == $module and .value.branch == $branch and ($now - .value.last_updated < $timeout))) |
.[-1].key
')
# --- Extract Key Actions from Git ---
# Get list of modified/new files. `git status --short` gives entries like " M path/file.txt".
# We'll format them into more readable strings.
ACTIONS_LIST=()
while read -r line; do
status=$(echo "$line" | cut -c 1-2)
path=$(echo "$line" | cut -c 4-)
action=""
case "$status" in
" M") action="Modified: $path" ;;
"A ") action="Added: $path" ;;
"D ") action="Deleted: $path" ;;
"R ") action="Renamed: $path" ;;
"C ") action="Copied: $path" ;;
"??") action="Untracked: $path" ;;
esac
if [[ -n "$action" ]]; then
ACTIONS_LIST+=("$action")
fi
done < <(git status --short)
KEY_ACTIONS_JSON=$(printf '%s\n' "${ACTIONS_LIST[@]}" | jq -R . | jq -s .)
# --- Update or Create Session ---
if [[ "$SESSION_INDEX" != "null" ]]; then
# Update existing session
UPDATED_HISTORY=$(echo "$HISTORY_CONTENT" | jq \
--argjson index "$SESSION_INDEX" \
--argjson ts "$NOW" \
--argjson actions "$KEY_ACTIONS_JSON" '
.sessions[$index].last_updated = $ts |
.sessions[$index].key_actions = $actions
# Note: pending_tasks and decisions would be updated here from conversation
'
)
else
# Create new session
SESSION_ID="session_$(date '+%Y%m%d%H%M%S')_$$"
NEW_SESSION=$(jq -n \
--arg id "$SESSION_ID" \
--argjson ts "$NOW" \
--arg module "$MODULE" \
--arg branch "$BRANCH" \
--argjson actions "$KEY_ACTIONS_JSON" '
{
"id": $id,
"started": $ts,
"last_updated": $ts,
"module": $module,
"branch": $branch,
"key_actions": $actions,
"pending_tasks": [],
"decisions": []
}'
)
UPDATED_HISTORY=$(echo "$HISTORY_CONTENT" | jq --argjson new_session "$NEW_SESSION" '.sessions += [$new_session]')
fi
# Write back to file
# Use a temp file for atomic write
TMP_FILE="${HISTORY_FILE}.tmp"
echo "$UPDATED_HISTORY" > "$TMP_FILE" && mv "$TMP_FILE" "$HISTORY_FILE"
# This script does not produce output, it works in the background.
exit 0

View file

@ -1,93 +0,0 @@
#!/bin/bash
# session-history-restore.sh
# Restores and displays the most recent session context from history.json.
HISTORY_FILE="${HOME}/.claude/sessions/history.json"
PRUNE_AGE_DAYS=7 # Prune sessions older than 7 days
# Ensure the history file exists, otherwise exit silently.
if [[ ! -f "$HISTORY_FILE" ]]; then
exit 0
fi
# --- Prune Old Sessions ---
NOW=$(date '+%s')
PRUNE_TIMESTAMP=$((NOW - PRUNE_AGE_DAYS * 86400))
PRUNED_HISTORY=$(jq --argjson prune_ts "$PRUNE_TIMESTAMP" '
.sessions = (.sessions | map(select(.last_updated >= $prune_ts)))
' "$HISTORY_FILE")
# Atomically write the pruned history back to the file
TMP_FILE="${HISTORY_FILE}.tmp"
echo "$PRUNED_HISTORY" > "$TMP_FILE" && mv "$TMP_FILE" "$HISTORY_FILE"
# --- Read the Most Recent Session ---
# Get the last session from the (potentially pruned) history
LAST_SESSION=$(echo "$PRUNED_HISTORY" | jq '.sessions[-1]')
# If no sessions, exit.
if [[ "$LAST_SESSION" == "null" ]]; then
exit 0
fi
# --- Format and Display Session Context ---
MODULE=$(echo "$LAST_SESSION" | jq -r '.module')
BRANCH=$(echo "$LAST_SESSION" | jq -r '.branch')
LAST_UPDATED=$(echo "$LAST_SESSION" | jq -r '.last_updated')
# Calculate human-readable "last active" time
AGE_SECONDS=$((NOW - LAST_UPDATED))
if (( AGE_SECONDS < 60 )); then
LAST_ACTIVE="less than a minute ago"
elif (( AGE_SECONDS < 3600 )); then
LAST_ACTIVE="$((AGE_SECONDS / 60)) minutes ago"
elif (( AGE_SECONDS < 86400 )); then
LAST_ACTIVE="$((AGE_SECONDS / 3600)) hours ago"
else
LAST_ACTIVE="$((AGE_SECONDS / 86400)) days ago"
fi
# --- Build the Output ---
# Using ANSI escape codes for formatting (bold, colors)
BLUE='\033[0;34m'
BOLD='\033[1m'
NC='\033[0m' # No Color
# Header
echo -e "${BLUE}${BOLD}📋 Previous Session Context${NC}" >&2
echo -e "${BLUE}━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" >&2
echo -e "${BOLD}Module:${NC} ${MODULE} (${BRANCH})" >&2
echo -e "${BOLD}Last active:${NC} ${LAST_ACTIVE}" >&2
echo "" >&2
# Key Actions
KEY_ACTIONS=$(echo "$LAST_SESSION" | jq -r '.key_actions[]?')
if [[ -n "$KEY_ACTIONS" ]]; then
echo -e "${BOLD}Key actions:${NC}" >&2
while read -r action; do
echo -e "${action}" >&2
done <<< "$KEY_ACTIONS"
echo "" >&2
fi
# Pending Tasks
PENDING_TASKS=$(echo "$LAST_SESSION" | jq -r '.pending_tasks[]?')
if [[ -n "$PENDING_TASKS" ]]; then
echo -e "${BOLD}Pending tasks:${NC}" >&2
while read -r task; do
echo -e "${task}" >&2
done <<< "$PENDING_TASKS"
echo "" >&2
fi
# Decisions Made
DECISIONS=$(echo "$LAST_SESSION" | jq -r '.decisions[]?')
if [[ -n "$DECISIONS" ]]; then
echo -e "${BOLD}Decisions made:${NC}" >&2
while read -r decision; do
echo -e "${decision}" >&2
done <<< "$DECISIONS"
echo "" >&2
fi
exit 0

View file

@ -1,88 +0,0 @@
#!/bin/bash
# Manage session history in ~/.claude/sessions/history.json
HISTORY_FILE="${HOME}/.claude/sessions/history.json"
SESSION_ID="${CLAUDE_SESSION_ID:-$(date +%s)-${RANDOM}}"
SEVEN_DAYS=604800 # seconds
# Ensure the sessions directory and history file exist
mkdir -p "${HOME}/.claude/sessions"
if [[ ! -f "$HISTORY_FILE" ]]; then
echo '{"sessions": []}' > "$HISTORY_FILE"
fi
# Function to get the current session
get_session() {
jq --arg id "$SESSION_ID" '.sessions[] | select(.id == $id)' "$HISTORY_FILE"
}
# Function to create or update the session
touch_session() {
local module_name="$(basename "$PWD")"
local branch_name="$(git branch --show-current 2>/dev/null || echo 'unknown')"
if [[ -z "$(get_session)" ]]; then
# Create new session
jq --arg id "$SESSION_ID" --arg started "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
--arg module "$module_name" --arg branch "$branch_name" \
'.sessions += [{
"id": $id,
"started": $started,
"module": $module,
"branch": $branch,
"key_actions": [],
"pending_tasks": [],
"decisions": []
}]' "$HISTORY_FILE" > "${HISTORY_FILE}.tmp" && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
fi
}
# Function to add an entry to a session array (key_actions, pending_tasks, decisions)
add_to_session() {
local type="$1" # e.g., "key_actions"
local content="$2"
touch_session
jq --arg id "$SESSION_ID" --arg type "$type" --arg content "$content" \
'( .sessions[] | select(.id == $id) | .[$type] ) |= (. + [$content])' \
"$HISTORY_FILE" > "${HISTORY_FILE}.tmp" && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
}
# Function to prune old sessions
prune_sessions() {
local now
now=$(date +%s)
jq --argjson seven_days "$SEVEN_DAYS" --argjson now "$now" \
'.sessions |= map(select( (($now - (.started | fromdate)) < $seven_days) ))' \
"$HISTORY_FILE" > "${HISTORY_FILE}.tmp" && mv "${HISTORY_FILE}.tmp" "$HISTORY_FILE"
}
# --- Main script logic ---
COMMAND="$1"
shift
case "$COMMAND" in
"start")
touch_session
prune_sessions
;;
"action")
add_to_session "key_actions" "$1"
;;
"task")
add_to_session "pending_tasks" "$1"
;;
"decision")
add_to_session "decisions" "$1"
;;
"show")
# Display the most recent session
jq '.sessions | sort_by(.started) | .[-1]' "$HISTORY_FILE"
;;
*)
echo "Usage: $0 {start|action|task|decision|show} [content]" >&2
exit 1
;;
esac
exit 0

View file

@ -0,0 +1,34 @@
#!/bin/bash
# Session start: Read scratchpad if recent, otherwise start fresh
# 3 hour window - if older, you've moved on mentally
STATE_FILE="${HOME}/.claude/sessions/scratchpad.md"
THREE_HOURS=10800 # seconds
if [[ -f "$STATE_FILE" ]]; then
# Get timestamp from file
FILE_TS=$(grep -E '^timestamp:' "$STATE_FILE" 2>/dev/null | cut -d' ' -f2)
NOW=$(date '+%s')
if [[ -n "$FILE_TS" ]]; then
AGE=$((NOW - FILE_TS))
if [[ $AGE -lt $THREE_HOURS ]]; then
# Recent - read it back
echo "[SessionStart] Found recent scratchpad ($(($AGE / 60)) min ago)" >&2
echo "[SessionStart] Reading previous state..." >&2
echo "" >&2
cat "$STATE_FILE" >&2
echo "" >&2
else
# Stale - delete and start fresh
rm -f "$STATE_FILE"
echo "[SessionStart] Previous session >3h old - starting fresh" >&2
fi
else
# No timestamp, delete it
rm -f "$STATE_FILE"
fi
fi
exit 0

View file

@ -1,108 +0,0 @@
#!/bin/bash
# Smart commit script for /core:commit command
CUSTOM_MESSAGE=""
AMEND_FLAG=""
# Parse arguments
while (( "$#" )); do
case "$1" in
--amend)
AMEND_FLAG="--amend"
shift
;;
-*)
echo "Unsupported flag $1" >&2
exit 1
;;
*)
# The rest of the arguments are treated as the commit message
CUSTOM_MESSAGE="$@"
break
;;
esac
done
# Get staged changes
STAGED_FILES=$(git diff --staged --name-status)
if [ -z "$STAGED_FILES" ]; then
echo "No staged changes to commit."
exit 0
fi
# Determine commit type and scope
COMMIT_TYPE="chore" # Default to chore
SCOPE=""
# Get just the file paths
STAGED_FILE_PATHS=$(git diff --staged --name-only)
# Determine type from file paths/status
# Order is important here: test and docs are more specific than feat.
if echo "$STAGED_FILE_PATHS" | grep -q -E "(_test\.go|\.test\.js|/tests/|/spec/)"; then
COMMIT_TYPE="test"
elif echo "$STAGED_FILE_PATHS" | grep -q -E "(\.md|/docs/|README)"; then
COMMIT_TYPE="docs"
elif echo "$STAGED_FILES" | grep -q "^A"; then
COMMIT_TYPE="feat"
elif git diff --staged | grep -q -E "^\+.*(fix|bug|issue)"; then
COMMIT_TYPE="fix"
elif git diff --staged | grep -q -E "^\+.*(refactor|restructure)"; then
COMMIT_TYPE="refactor"
fi
# Determine scope from the most common path component
if [ -n "$STAGED_FILE_PATHS" ]; then
# Extract the second component of each path (e.g., 'code' from 'claude/code/file.md')
# This is a decent heuristic for module name.
# We filter for lines that have a second component.
POSSIBLE_SCOPES=$(echo "$STAGED_FILE_PATHS" | grep '/' | cut -d/ -f2)
if [ -n "$POSSIBLE_SCOPES" ]; then
SCOPE=$(echo "$POSSIBLE_SCOPES" | sort | uniq -c | sort -nr | head -n 1 | awk '{print $2}')
fi
# If no scope is found (e.g., all files are in root), SCOPE remains empty, which is valid.
fi
# Construct the commit message
if [ -n "$CUSTOM_MESSAGE" ]; then
COMMIT_MESSAGE="$CUSTOM_MESSAGE"
else
# Auto-generate a descriptive summary
DIFF_CONTENT=$(git diff --staged)
# Try to find a function or class name from the diff
# This is a simple heuristic that can be greatly expanded.
SUMMARY=$(echo "$DIFF_CONTENT" | grep -E -o "(function|class|def) \w+" | head -n 1 | sed -e 's/function //g' -e 's/class //g' -e 's/def //g')
if [ -z "$SUMMARY" ]; then
if [ $(echo "$STAGED_FILE_PATHS" | wc -l) -eq 1 ]; then
FIRST_FILE=$(echo "$STAGED_FILE_PATHS" | head -n 1)
SUMMARY="update $(basename "$FIRST_FILE")"
else
SUMMARY="update multiple files"
fi
else
SUMMARY="update $SUMMARY"
fi
SUBJECT="$COMMIT_TYPE($SCOPE): $SUMMARY"
BODY=$(echo "$DIFF_CONTENT" | grep -E "^\+" | sed -e 's/^+//' | head -n 5 | sed 's/^/ - /')
COMMIT_MESSAGE="$SUBJECT\n\n$BODY"
fi
# Add Co-Authored-By trailer
CO_AUTHOR="Co-Authored-By: Claude <noreply@anthropic.com>"
if ! echo "$COMMIT_MESSAGE" | grep -q "$CO_AUTHOR"; then
COMMIT_MESSAGE="$COMMIT_MESSAGE\n\n$CO_AUTHOR"
fi
# Execute the commit
git commit $AMEND_FLAG -m "$(echo -e "$COMMIT_MESSAGE")"
if [ $? -eq 0 ]; then
echo "Commit successful."
else
echo "Commit failed."
exit 1
fi

View file

@ -1,134 +0,0 @@
#!/bin/bash
#
# Renders a summary of all repository statuses.
# Wraps the `core dev health` command with friendlier formatting.
#
# --- Configuration ---
# Set to `true` to use mock data for testing.
USE_MOCK_DATA=false
# Set to the actual command to get repo health.
# The command is expected to return data in the format:
# <module> <branch> <status> <insertions> <deletions> <behind> <ahead>
HEALTH_COMMAND="core dev health"
# --- Argument Parsing ---
SHOW_DIRTY_ONLY=false
SHOW_BEHIND_ONLY=false
for arg in "$@"; do
case $arg in
--dirty)
SHOW_DIRTY_ONLY=true
shift
;;
--behind)
SHOW_BEHIND_ONLY=true
shift
;;
esac
done
# --- Mock Data ---
# Used for development and testing if USE_MOCK_DATA is true.
mock_health_data() {
cat <<EOF
core-php main clean 0 0 0 0
core-tenant feat/auth dirty 2 0 0 0
core-admin main clean 0 0 0 0
core-api main clean 0 0 3 0
core-mcp dev dirty 1 1 1 2
repo-clean-ahead main clean 0 0 0 5
EOF
}
# --- Data Fetching ---
if [ "$USE_MOCK_DATA" = true ]; then
health_data=$(mock_health_data)
else
# In a real scenario, we'd run the actual command.
# For now, since `core dev health` is not a real command in this sandbox,
# I will fall back to mock data if the command fails.
health_data=$($HEALTH_COMMAND 2>/dev/null) || health_data=$(mock_health_data)
fi
# --- Output Formatting ---
# Table header
header=$(printf "%-15s %-15s %-10s %-12s" "Module" "Branch" "Status" "Behind/Ahead")
# Use dynamic width if possible, otherwise a fixed width.
cols=$(tput cols 2>/dev/null || echo 67)
separator=$(printf '━%.0s' $(seq 1 $cols))
echo "Host UK Monorepo Status"
echo "${separator:0:${#header}}"
echo "$header"
echo "${separator:0:${#header}}"
# Process each line of health data
while read -r module branch status insertions deletions behind ahead; do
is_dirty=false
is_behind=false
details=""
# Determine status and details string
if [ "$status" = "dirty" ]; then
is_dirty=true
details="+${insertions} -${deletions}"
else
if [ "$behind" -gt 0 ] && [ "$ahead" -gt 0 ]; then
details="-${behind} +${ahead}"
is_behind=true
elif [ "$behind" -gt 0 ]; then
details="-${behind} (behind)"
is_behind=true
elif [ "$ahead" -gt 0 ]; then
details="+${ahead}"
else
details="✓"
fi
fi
# Apply filters
if [ "$SHOW_DIRTY_ONLY" = true ] && [ "$is_dirty" = false ]; then
continue
fi
if [ "$SHOW_BEHIND_ONLY" = true ] && [ "$is_behind" = false ]; then
continue
fi
# Print table row
printf "%-15s %-15s %-10s %-12s\n" "$module" "$branch" "$status" "$details"
done <<< "$health_data"
# --- Summary ---
# The summary should reflect the total state, regardless of filters.
total_clean_repo_count=$(echo "$health_data" | grep " clean " -c || true)
dirty_repo_count=$(echo "$health_data" | grep " dirty " -c || true)
behind_repo_count=0
while read -r module branch status insertions deletions behind ahead; do
if [ "$status" = "clean" ] && [[ "$behind" =~ ^[0-9]+$ ]] && [ "$behind" -gt 0 ]; then
behind_repo_count=$((behind_repo_count+1))
fi
done <<< "$health_data"
clean_repo_count=$((total_clean_repo_count - behind_repo_count))
summary_parts=()
if [ "$dirty_repo_count" -gt 0 ]; then
summary_parts+=("$dirty_repo_count dirty")
fi
if [ "$behind_repo_count" -gt 0 ]; then
summary_parts+=("$behind_repo_count behind")
fi
if [ "$clean_repo_count" -gt 0 ]; then
summary_parts+=("$clean_repo_count clean")
fi
summary_string=$(printf "%s, " "${summary_parts[@]}")
summary_string=${summary_string%, } # remove trailing comma and space
echo ""
echo "Summary: $summary_string"

View file

@ -1,56 +0,0 @@
#!/bin/bash
dry_run=false
target_module=""
while [[ $# -gt 0 ]]; do
case "$1" in
--dry-run)
dry_run=true
shift
;;
*)
target_module="$1"
shift
;;
esac
done
if [ ! -f "repos.yaml" ]; then
echo "Error: repos.yaml not found"
exit 1
fi
if [ -z "$target_module" ]; then
# Detect from current directory
target_module=$(basename "$(pwd)")
fi
echo "Syncing dependents of $target_module..."
# Get version from composer.json
version=$(jq -r '.version // "1.0.0"' "${target_module}/composer.json" 2>/dev/null || echo "1.0.0")
# Find dependents from repos.yaml
dependents=$(yq -r ".repos | to_entries[] | select(.value.depends[]? == \"$target_module\") | .key" repos.yaml 2>/dev/null)
if [ -z "$dependents" ]; then
echo "No dependents found for $target_module"
exit 0
fi
echo "Dependents:"
for dep in $dependents; do
echo "├── $dep"
if [ "$dry_run" = true ]; then
echo "│ └── [dry-run] Would update host-uk/$target_module to v$version"
else
composer_file="${dep}/composer.json"
if [ -f "$composer_file" ]; then
jq --arg pkg "host-uk/$target_module" --arg ver "$version" \
'.require[$pkg] = $ver' "$composer_file" > "$composer_file.tmp" && \
mv "$composer_file.tmp" "$composer_file"
echo "│ └── Updated composer.json"
fi
fi
done

View file

@ -1,162 +0,0 @@
import io
import os
import sys
import unittest
from unittest.mock import patch, mock_open
from deps import (
parse_dependencies,
find_circular_dependencies,
print_dependency_tree,
print_reverse_dependencies,
main
)
class TestDeps(unittest.TestCase):
def setUp(self):
self.yaml_content = """
repos:
core-tenant:
depends: [core-php]
core-admin:
depends: [core-php, core-tenant]
core-php:
depends: []
core-api:
depends: [core-php]
core-analytics:
depends: [core-php, core-api]
"""
self.graph = {
'core-tenant': ['core-php'],
'core-admin': ['core-php', 'core-tenant'],
'core-php': [],
'core-api': ['core-php'],
'core-analytics': ['core-php', 'core-api'],
}
self.circular_yaml_content = """
repos:
module-a:
depends: [module-b]
module-b:
depends: [module-c]
module-c:
depends: [module-a]
"""
self.circular_graph = {
'module-a': ['module-b'],
'module-b': ['module-c'],
'module-c': ['module-a'],
}
def test_parse_dependencies(self):
with patch("builtins.open", mock_open(read_data=self.yaml_content)):
graph = parse_dependencies("dummy_path.yaml")
self.assertEqual(graph, self.graph)
def test_find_circular_dependencies(self):
cycles = find_circular_dependencies(self.circular_graph)
self.assertEqual(len(cycles), 1)
self.assertIn('module-a', cycles[0])
self.assertIn('module-b', cycles[0])
self.assertIn('module-c', cycles[0])
def test_find_no_circular_dependencies(self):
cycles = find_circular_dependencies(self.graph)
self.assertEqual(len(cycles), 0)
@patch('sys.stdout', new_callable=io.StringIO)
def test_print_dependency_tree(self, mock_stdout):
print_dependency_tree(self.graph, 'core-admin')
expected_output = (
"core-admin\n"
"├── core-php\n"
"└── core-tenant\n"
" └── core-php\n"
)
self.assertEqual(mock_stdout.getvalue().strip(), expected_output.strip())
@patch('sys.stdout', new_callable=io.StringIO)
def test_print_dependency_tree_no_deps(self, mock_stdout):
print_dependency_tree(self.graph, 'core-php')
expected_output = "core-php\n"
self.assertEqual(mock_stdout.getvalue().strip(), expected_output.strip())
@patch('sys.stdout', new_callable=io.StringIO)
def test_print_reverse_dependencies(self, mock_stdout):
print_reverse_dependencies(self.graph, 'core-php')
expected_output = (
"├── core-admin\n"
"├── core-analytics\n"
"├── core-api\n"
"└── core-tenant"
)
self.assertEqual(mock_stdout.getvalue().strip(), expected_output.strip())
@patch('sys.stdout', new_callable=io.StringIO)
def test_print_reverse_dependencies_no_deps(self, mock_stdout):
print_reverse_dependencies(self.graph, 'core-admin')
expected_output = "(no modules depend on core-admin)"
self.assertEqual(mock_stdout.getvalue().strip(), expected_output.strip())
@patch('deps.find_repos_yaml', return_value='dummy_path.yaml')
@patch('sys.stdout', new_callable=io.StringIO)
def test_main_no_args(self, mock_stdout, mock_find_yaml):
with patch("builtins.open", mock_open(read_data=self.yaml_content)):
with patch.object(sys, 'argv', ['deps.py']):
main()
output = mock_stdout.getvalue()
self.assertIn("core-admin dependencies:", output)
self.assertIn("core-tenant dependencies:", output)
@patch('deps.find_repos_yaml', return_value='dummy_path.yaml')
@patch('sys.stdout', new_callable=io.StringIO)
def test_main_module_arg(self, mock_stdout, mock_find_yaml):
with patch("builtins.open", mock_open(read_data=self.yaml_content)):
with patch.object(sys, 'argv', ['deps.py', 'core-tenant']):
main()
expected_output = (
"core-tenant dependencies:\n"
"└── core-php\n"
)
self.assertEqual(mock_stdout.getvalue().strip(), expected_output.strip())
@patch('deps.find_repos_yaml', return_value='dummy_path.yaml')
@patch('sys.stdout', new_callable=io.StringIO)
def test_main_reverse_arg(self, mock_stdout, mock_find_yaml):
with patch("builtins.open", mock_open(read_data=self.yaml_content)):
with patch.object(sys, 'argv', ['deps.py', '--reverse', 'core-api']):
main()
expected_output = (
"Modules that depend on core-api:\n"
"└── core-analytics"
)
self.assertEqual(mock_stdout.getvalue().strip(), expected_output.strip())
@patch('deps.find_repos_yaml', return_value='dummy_path.yaml')
@patch('sys.stdout', new_callable=io.StringIO)
def test_main_circular_dep(self, mock_stdout, mock_find_yaml):
with patch("builtins.open", mock_open(read_data=self.circular_yaml_content)):
with patch.object(sys, 'argv', ['deps.py']):
with self.assertRaises(SystemExit) as cm:
main()
self.assertEqual(cm.exception.code, 1)
output = mock_stdout.getvalue()
self.assertIn("Error: Circular dependencies detected!", output)
self.assertIn("module-a -> module-b -> module-c -> module-a", output)
@patch('deps.find_repos_yaml', return_value='dummy_path.yaml')
@patch('sys.stdout', new_callable=io.StringIO)
def test_main_non_existent_module(self, mock_stdout, mock_find_yaml):
with patch("builtins.open", mock_open(read_data=self.yaml_content)):
with patch.object(sys, 'argv', ['deps.py', 'non-existent-module']):
with self.assertRaises(SystemExit) as cm:
main()
self.assertEqual(cm.exception.code, 1)
output = mock_stdout.getvalue()
self.assertIn("Error: Module 'non-existent-module' not found in repos.yaml.", output)
if __name__ == '__main__':
unittest.main()

View file

@ -1,104 +0,0 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# Function to process and format TODOs
process_todos() {
local sort_by_priority=false
if [[ "${1:-}" == "--priority" ]]; then
sort_by_priority=true
fi
local count=0
local high=0
local med=0
local low=0
local output=""
local found_todos=false
while IFS= read -r line; do
found_todos=true
((count++))
filepath=$(echo "$line" | cut -d: -f1)
linenumber=$(echo "$line" | cut -d: -f2)
message_raw=$(echo "$line" | cut -d: -f3-)
message=$(echo "$message_raw" | sed -e 's/^[[:space:]]*\/\///' -e 's/^[[:space:]]*#//' -e 's/^[[:space:]]*//' | sed -e 's/TODO:|FIXME:|HACK:|XXX://i' | sed 's/^[[:space:]]*//')
sort_key=2
priority="MED"
if [[ $line =~ FIXME: || ($line =~ TODO: && $line =~ urgent) ]]; then
priority="HIGH"
sort_key=1
((high++))
elif [[ $line =~ HACK: || $line =~ XXX: ]]; then
priority="LOW"
sort_key=3
((low++))
else
((med++))
fi
if git ls-files --error-unmatch "$filepath" >/dev/null 2>&1; then
age=$(git log -1 --format=%ar -- "$filepath")
else
age="untracked"
fi
formatted_line=$(printf "%d_#%s [%s] %s\n %s:%s\n Added: %s\n\n" "$sort_key" "$count" "$priority" "$message" "$filepath" "$linenumber" "$age")
output+="$formatted_line"
done < <(grep -r -n -i -E "TODO:|FIXME:|HACK:|XXX:" . \
--exclude-dir=".git" \
--exclude-dir=".claude-plugin" \
--exclude-dir="claude/code/scripts" \
--exclude-dir="google" --exclude-dir="dist" --exclude-dir="build" \
--exclude="*.log" --exclude="todos.txt" --exclude="test_loop.sh" || true)
if [ "$found_todos" = false ]; then
echo "No TODOs found."
else
if [[ "$sort_by_priority" = true ]]; then
echo -e "$output" | sort -n | sed 's/^[0-9]_//'
else
echo -e "$output" | sed 's/^[0-9]_//'
fi
echo "Total: $count TODOs ($high high, $med medium, $low low)"
fi
}
# Default action is to list TODOs
ACTION="list"
ARGS=""
# Parse command-line arguments
if [[ $# -gt 0 ]]; then
if [[ "$1" == "--priority" ]]; then
ACTION="--priority"
shift
else
ACTION="$1"
shift
fi
ARGS="$@"
fi
case "$ACTION" in
list)
process_todos
;;
add)
echo "Error: 'add' command not implemented." >&2
exit 1
;;
done)
echo "Error: 'done' command not implemented." >&2
exit 1
;;
--priority)
process_todos --priority
;;
*)
echo "Usage: /core:todo [list | --priority]" >&2
exit 1
;;
esac

View file

@ -1,37 +0,0 @@
#!/bin/bash
# This script validates a git branch name based on a naming convention.
full_command="${CLAUDE_TOOL_INPUT:-$*}"
# Check for override flag
if [[ "$full_command" =~ --no-verify ]]; then
echo "✓ Branch validation skipped due to --no-verify flag."
exit 0
fi
branch_name=""
# Regex to find branch name from 'git checkout -b <branch> ...'
if [[ "$full_command" =~ git\ checkout\ -b\ ([^[:space:]]+) ]]; then
branch_name="${BASH_REMATCH[1]}"
# Regex to find branch name from 'git branch <branch> ...'
elif [[ "$full_command" =~ git\ branch\ ([^[:space:]]+) ]]; then
branch_name="${BASH_REMATCH[1]}"
fi
if [[ -z "$branch_name" ]]; then
exit 0
fi
convention_regex="^(feat|fix|refactor|docs|test|chore)/.+"
if [[ ! "$branch_name" =~ $convention_regex ]]; then
echo "❌ Invalid branch name: '$branch_name'"
echo " Branch names must follow the convention: type/description"
echo " Example: feat/new-login-page"
echo " (To bypass this check, use the --no-verify flag)"
exit 1
fi
echo "✓ Branch name '$branch_name' is valid."
exit 0

View file

@ -0,0 +1,269 @@
#!/usr/bin/env bash
# BitcoinTalk Thread Collector
# Usage: ./collect.sh <topic-id-or-url> [--pages=N] [--output=DIR]
set -e
DELAY=2 # Be respectful to BTT servers
MAX_PAGES=0 # 0 = all pages
OUTPUT_BASE="."
# Parse topic ID from URL or direct input
parse_topic_id() {
local input="$1"
if [[ "$input" =~ topic=([0-9]+) ]]; then
echo "${BASH_REMATCH[1]}"
else
echo "$input" | grep -oE '[0-9]+'
fi
}
# Fetch a single page
fetch_page() {
local topic_id="$1"
local offset="$2"
local output_file="$3"
local url="https://bitcointalk.org/index.php?topic=${topic_id}.${offset}"
echo " Fetching: $url"
curl -s -A "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)" \
-H "Accept: text/html" \
"$url" > "$output_file"
sleep $DELAY
}
# Check if page has posts
page_has_posts() {
local html_file="$1"
grep -q 'class="post"' "$html_file" 2>/dev/null
}
# Get last page number from first page
get_last_page() {
local html_file="$1"
# Look for navigation like "Pages: [1] 2 3 ... 50"
local max_page=$(grep -oE 'topic=[0-9]+\.[0-9]+' "$html_file" | \
sed 's/.*\.//' | sort -rn | head -1)
echo "${max_page:-0}"
}
# Extract posts from HTML (simplified - works for basic extraction)
extract_posts_simple() {
local html_file="$1"
local output_dir="$2"
local post_offset="$3"
# Use Python for reliable HTML parsing
python3 << PYEOF
import re
import html
import os
from datetime import datetime
html_content = open('$html_file', 'r', encoding='utf-8', errors='ignore').read()
# Pattern to find posts - BTT structure
post_pattern = r'<td class="td_headerandpost">(.*?)</td>\s*</tr>\s*</table>\s*</td>\s*</tr>'
author_pattern = r'<a href="https://bitcointalk\.org/index\.php\?action=profile;u=\d+"[^>]*>([^<]+)</a>'
date_pattern = r'<div class="smalltext">([A-Za-z]+ \d+, \d+, \d+:\d+:\d+ [AP]M)</div>'
post_content_pattern = r'<div class="post"[^>]*>(.*?)</div>\s*(?:<div class="moderatorbar"|</td>)'
posts = re.findall(post_pattern, html_content, re.DOTALL)
post_num = $post_offset
for post_html in posts:
post_num += 1
# Extract author
author_match = re.search(author_pattern, post_html)
author = author_match.group(1) if author_match else "Unknown"
# Extract date
date_match = re.search(date_pattern, post_html)
date_str = date_match.group(1) if date_match else "Unknown date"
# Extract content
content_match = re.search(post_content_pattern, post_html, re.DOTALL)
if content_match:
content = content_match.group(1)
# Clean HTML
content = re.sub(r'<br\s*/?>', '\n', content)
content = re.sub(r'<[^>]+>', '', content)
content = html.unescape(content)
content = content.strip()
else:
content = "(Could not extract content)"
# Determine post type/score
score = "COMMUNITY"
if post_num == 1:
score = "ANN"
elif re.search(r'\[UPDATE\]|\[RELEASE\]|\[ANNOUNCEMENT\]', content, re.I):
score = "UPDATE"
elif '?' in content[:200]:
score = "QUESTION"
# Write post file
filename = f"$output_dir/POST-{post_num:04d}.md"
with open(filename, 'w') as f:
f.write(f"# Post #{post_num}\n\n")
f.write(f"## Metadata\n\n")
f.write(f"| Field | Value |\n")
f.write(f"|-------|-------|\n")
f.write(f"| Author | {author} |\n")
f.write(f"| Date | {date_str} |\n")
f.write(f"| Type | **{score}** |\n\n")
f.write(f"---\n\n")
f.write(f"## Content\n\n")
f.write(content)
f.write(f"\n")
print(f" Created POST-{post_num:04d}.md ({score}) by {author}")
print(f"EXTRACTED:{post_num}")
PYEOF
}
# Main collection function
collect_thread() {
local topic_id="$1"
local output_dir="$OUTPUT_BASE/bitcointalk-$topic_id"
mkdir -p "$output_dir/pages" "$output_dir/posts"
echo "=== Collecting BitcoinTalk Topic: $topic_id ==="
# Fetch first page to get thread info
fetch_page "$topic_id" 0 "$output_dir/pages/page-0.html"
# Extract thread title
local title=$(grep -oP '<title>\K[^<]+' "$output_dir/pages/page-0.html" | head -1)
echo "Thread: $title"
# Get total pages
local last_offset=$(get_last_page "$output_dir/pages/page-0.html")
local total_pages=$(( (last_offset / 20) + 1 ))
echo "Total pages: $total_pages"
if [ "$MAX_PAGES" -gt 0 ] && [ "$MAX_PAGES" -lt "$total_pages" ]; then
total_pages=$MAX_PAGES
echo "Limiting to: $total_pages pages"
fi
# Extract posts from first page
local post_count=0
local result=$(extract_posts_simple "$output_dir/pages/page-0.html" "$output_dir/posts" 0)
post_count=$(echo "$result" | grep "EXTRACTED:" | cut -d: -f2)
# Fetch remaining pages
for (( page=1; page<total_pages; page++ )); do
local offset=$((page * 20))
fetch_page "$topic_id" "$offset" "$output_dir/pages/page-$offset.html"
if ! page_has_posts "$output_dir/pages/page-$offset.html"; then
echo " No more posts found, stopping."
break
fi
result=$(extract_posts_simple "$output_dir/pages/page-$offset.html" "$output_dir/posts" "$post_count")
post_count=$(echo "$result" | grep "EXTRACTED:" | cut -d: -f2)
done
# Generate index
generate_index "$output_dir" "$title" "$topic_id" "$post_count"
echo ""
echo "=== Collection Complete ==="
echo "Posts: $post_count"
echo "Output: $output_dir/"
}
# Generate index file
generate_index() {
local output_dir="$1"
local title="$2"
local topic_id="$3"
local post_count="$4"
cat > "$output_dir/INDEX.md" << EOF
# BitcoinTalk Thread Archive
## Thread Info
| Field | Value |
|-------|-------|
| Title | $title |
| Topic ID | $topic_id |
| URL | https://bitcointalk.org/index.php?topic=$topic_id.0 |
| Posts Archived | $post_count |
| Collected | $(date +%Y-%m-%d) |
---
## Post Type Legend
| Type | Meaning |
|------|---------|
| ANN | Original announcement |
| UPDATE | Official team update |
| QUESTION | Community question |
| ANSWER | Team response |
| COMMUNITY | General discussion |
| CONCERN | Raised issue/criticism |
---
## Posts
| # | Author | Date | Type |
|---|--------|------|------|
EOF
for file in "$output_dir/posts/"POST-*.md; do
[ -f "$file" ] || continue
local num=$(basename "$file" .md | sed 's/POST-0*//')
local author=$(grep "| Author |" "$file" | sed 's/.*| Author | \(.*\) |/\1/')
local date=$(grep "| Date |" "$file" | sed 's/.*| Date | \(.*\) |/\1/')
local type=$(sed -n '/| Type |/s/.*\*\*\([A-Z]*\)\*\*.*/\1/p' "$file")
echo "| [$num](posts/POST-$(printf "%04d" $num).md) | $author | $date | $type |" >> "$output_dir/INDEX.md"
done
echo " Created INDEX.md"
}
# Parse arguments
main() {
local topic_input=""
for arg in "$@"; do
case "$arg" in
--pages=*) MAX_PAGES="${arg#*=}" ;;
--output=*) OUTPUT_BASE="${arg#*=}" ;;
--delay=*) DELAY="${arg#*=}" ;;
*) topic_input="$arg" ;;
esac
done
if [ -z "$topic_input" ]; then
echo "Usage: $0 <topic-id-or-url> [--pages=N] [--output=DIR] [--delay=2]"
echo ""
echo "Examples:"
echo " $0 2769739"
echo " $0 https://bitcointalk.org/index.php?topic=2769739.0"
echo " $0 2769739 --pages=5 --output=./lethean-ann"
exit 1
fi
local topic_id=$(parse_topic_id "$topic_input")
if [ -z "$topic_id" ]; then
echo "Error: Could not parse topic ID from: $topic_input"
exit 1
fi
collect_thread "$topic_id"
}
main "$@"

View file

@ -0,0 +1,106 @@
#!/usr/bin/env bash
# Generate block explorer collection jobs
# Usage: ./generate-jobs.sh <coin> [--blocks=N] [--sample=daily|weekly|monthly]
set -e
COIN=""
EXPLORER_URL=""
SAMPLE="monthly"
BLOCK_COUNT=100
# Known explorers
declare -A EXPLORERS=(
["lethean"]="https://explorer.lethean.io"
["monero"]="https://xmrchain.net"
["haven"]="https://explorer.havenprotocol.org"
["karbo"]="https://explorer.karbo.io"
["wownero"]="https://explore.wownero.com"
["dero"]="https://explorer.dero.io"
["masari"]="https://explorer.getmasari.org"
["turtlecoin"]="https://explorer.turtlecoin.lol"
["conceal"]="https://explorer.conceal.network"
)
# Parse args
for arg in "$@"; do
case "$arg" in
--url=*) EXPLORER_URL="${arg#*=}" ;;
--blocks=*) BLOCK_COUNT="${arg#*=}" ;;
--sample=*) SAMPLE="${arg#*=}" ;;
--*) ;;
*) COIN="$arg" ;;
esac
done
if [ -z "$COIN" ] && [ -z "$EXPLORER_URL" ]; then
echo "Usage: $0 <coin> [--url=URL] [--blocks=N] [--sample=daily|weekly|monthly]" >&2
echo "" >&2
echo "Known coins: ${!EXPLORERS[*]}" >&2
exit 1
fi
# Get explorer URL
if [ -z "$EXPLORER_URL" ]; then
EXPLORER_URL="${EXPLORERS[$COIN]}"
if [ -z "$EXPLORER_URL" ]; then
echo "# ERROR: Unknown coin '$COIN'. Use --url= to specify explorer." >&2
exit 1
fi
fi
SLUG=$(echo "$COIN" | tr '[:upper:]' '[:lower:]')
echo "# Block Explorer Jobs for $COIN"
echo "# Explorer: $EXPLORER_URL"
echo "# Sample: $SAMPLE"
echo "# Format: URL|FILENAME|TYPE|METADATA"
echo "#"
# Core API endpoints
echo "# === Core Data ==="
echo "${EXPLORER_URL}/api/info|explorer-${SLUG}-info.json|explorer-api|coin=$SLUG,type=info"
echo "${EXPLORER_URL}/api/emission|explorer-${SLUG}-emission.json|explorer-api|coin=$SLUG,type=emission"
echo "${EXPLORER_URL}/api/supply|explorer-${SLUG}-supply.json|explorer-api|coin=$SLUG,type=supply"
echo "${EXPLORER_URL}/api/mempool|explorer-${SLUG}-mempool.json|explorer-api|coin=$SLUG,type=mempool"
# Genesis block
echo "#"
echo "# === Genesis Block ==="
echo "${EXPLORER_URL}/api/block/0|explorer-${SLUG}-block-0.json|explorer-api|coin=$SLUG,block=0"
echo "${EXPLORER_URL}/api/block/1|explorer-${SLUG}-block-1.json|explorer-api|coin=$SLUG,block=1"
# Milestone blocks (if we know the heights)
echo "#"
echo "# === Milestone Blocks ==="
for height in 10000 50000 100000 500000 1000000 2000000; do
echo "${EXPLORER_URL}/api/block/${height}|explorer-${SLUG}-block-${height}.json|explorer-api|coin=$SLUG,block=$height"
done
# Sample blocks by time
echo "#"
echo "# === Sampled Blocks (estimate heights) ==="
case "$SAMPLE" in
daily)
# ~720 blocks/day for 2-min blocks
STEP=720
;;
weekly)
STEP=5040
;;
monthly)
STEP=21600
;;
esac
for ((i=0; i<BLOCK_COUNT; i++)); do
height=$((i * STEP))
echo "${EXPLORER_URL}/api/block/${height}|explorer-${SLUG}-sample-${height}.json|explorer-api|coin=$SLUG,block=$height,sample=$SAMPLE"
done
# Web pages (for scraping if API fails)
echo "#"
echo "# === Web Pages (backup) ==="
echo "${EXPLORER_URL}/|explorer-${SLUG}-home.html|explorer-web|coin=$SLUG"
echo "${EXPLORER_URL}/blocks|explorer-${SLUG}-blocks.html|explorer-web|coin=$SLUG"
echo "${EXPLORER_URL}/stats|explorer-${SLUG}-stats.html|explorer-web|coin=$SLUG"

View file

@ -0,0 +1,89 @@
#!/usr/bin/env bash
# Generate job list for CoinMarketCap collection
# Usage: ./generate-jobs.sh <coin-slug> [options] > jobs.txt
set -e
COINS=()
HISTORICAL=0
FROM_DATE="2017-01-01"
TO_DATE=$(date +%Y-%m-%d)
# Parse args
for arg in "$@"; do
case "$arg" in
--historical) HISTORICAL=1 ;;
--from=*) FROM_DATE="${arg#*=}" ;;
--to=*) TO_DATE="${arg#*=}" ;;
--*) ;;
*) COINS+=("$arg") ;;
esac
done
if [ ${#COINS[@]} -eq 0 ]; then
echo "Usage: $0 <coin-slug> [coin-slug...] [--historical] [--from=DATE] [--to=DATE]" >&2
echo "" >&2
echo "Examples:" >&2
echo " $0 lethean" >&2
echo " $0 lethean --historical --from=2018-01-01" >&2
echo " $0 lethean monero bitcoin" >&2
exit 1
fi
# Header
echo "# CoinMarketCap job list - $(date +%Y-%m-%d)"
echo "# Coins: ${COINS[*]}"
echo "# Format: URL|FILENAME|TYPE|METADATA"
echo "#"
for COIN in "${COINS[@]}"; do
SLUG=$(echo "$COIN" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9-]/-/g')
echo "# === $SLUG ==="
# Main page (current data, description, links)
echo "https://coinmarketcap.com/currencies/${SLUG}/|cmc-${SLUG}-main.html|cmc-main|coin=$SLUG"
# Markets/exchanges
echo "https://coinmarketcap.com/currencies/${SLUG}/markets/|cmc-${SLUG}-markets.html|cmc-markets|coin=$SLUG"
# Historical data page
echo "https://coinmarketcap.com/currencies/${SLUG}/historical-data/|cmc-${SLUG}-historical.html|cmc-historical|coin=$SLUG"
# News
echo "https://coinmarketcap.com/currencies/${SLUG}/news/|cmc-${SLUG}-news.html|cmc-news|coin=$SLUG"
# API endpoints (if accessible without auth)
# These return JSON and are more reliable than scraping
echo "https://api.coinmarketcap.com/data-api/v3/cryptocurrency/detail?slug=${SLUG}|cmc-${SLUG}-api-detail.json|cmc-api|coin=$SLUG,type=detail"
echo "https://api.coinmarketcap.com/data-api/v3/cryptocurrency/market-pairs/latest?slug=${SLUG}&limit=100|cmc-${SLUG}-api-markets.json|cmc-api|coin=$SLUG,type=markets"
# Historical data via API (may need date chunks)
if [ "$HISTORICAL" = "1" ]; then
echo "#"
echo "# Historical data: $FROM_DATE to $TO_DATE"
# Convert dates to timestamps
FROM_TS=$(date -j -f "%Y-%m-%d" "$FROM_DATE" "+%s" 2>/dev/null || date -d "$FROM_DATE" "+%s")
TO_TS=$(date -j -f "%Y-%m-%d" "$TO_DATE" "+%s" 2>/dev/null || date -d "$TO_DATE" "+%s")
# CMC historical API (public, limited)
echo "https://api.coinmarketcap.com/data-api/v3/cryptocurrency/historical?slug=${SLUG}&timeStart=${FROM_TS}&timeEnd=${TO_TS}|cmc-${SLUG}-api-historical.json|cmc-api|coin=$SLUG,type=historical"
# Also try the web scrape version with date range
echo "https://coinmarketcap.com/currencies/${SLUG}/historical-data/?start=${FROM_DATE//\-/}&end=${TO_DATE//\-/}|cmc-${SLUG}-historical-range.html|cmc-historical|coin=$SLUG,from=$FROM_DATE,to=$TO_DATE"
fi
echo "#"
done
echo "# === Additional data sources ==="
echo "#"
# CoinGecko as backup (often has more historical data)
for COIN in "${COINS[@]}"; do
SLUG=$(echo "$COIN" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9-]/-/g')
echo "https://www.coingecko.com/en/coins/${SLUG}|coingecko-${SLUG}-main.html|coingecko|coin=$SLUG"
echo "https://api.coingecko.com/api/v3/coins/${SLUG}|coingecko-${SLUG}-api.json|coingecko-api|coin=$SLUG"
echo "https://api.coingecko.com/api/v3/coins/${SLUG}/market_chart?vs_currency=usd&days=max|coingecko-${SLUG}-history.json|coingecko-api|coin=$SLUG,type=history"
done

View file

@ -0,0 +1,226 @@
#!/usr/bin/env bash
# Process downloaded CoinMarketCap data
# Usage: ./process.sh <downloads-dir> [--output=DIR]
set -e
DOWNLOADS="$1"
OUTPUT="./cmc-archive"
for arg in "$@"; do
case "$arg" in
--output=*) OUTPUT="${arg#*=}" ;;
esac
done
mkdir -p "$OUTPUT"
echo "=== Processing CoinMarketCap downloads ==="
# Process API JSON files first (most reliable)
for file in "$DOWNLOADS"/cmc-*-api-detail.json; do
[ -f "$file" ] || continue
COIN=$(basename "$file" | sed 's/cmc-\(.*\)-api-detail.json/\1/')
COIN_DIR="$OUTPUT/$COIN"
mkdir -p "$COIN_DIR"
echo "Processing: $COIN"
python3 << PYEOF
import json
import os
try:
data = json.load(open('$file', 'r'))
if 'data' in data:
coin = data['data']
# Extract metadata
metadata = {
'id': coin.get('id'),
'name': coin.get('name'),
'symbol': coin.get('symbol'),
'slug': coin.get('slug'),
'description': coin.get('description', ''),
'logo': coin.get('logo'),
'category': coin.get('category'),
'dateAdded': coin.get('dateAdded'),
'urls': coin.get('urls', {}),
'tags': coin.get('tags', []),
}
with open('$COIN_DIR/metadata.json', 'w') as f:
json.dump(metadata, f, indent=2)
print(f" Created metadata.json")
# Create markdown summary
with open('$COIN_DIR/INDEX.md', 'w') as f:
f.write(f"# {metadata['name']} ({metadata['symbol']})\n\n")
f.write(f"## Metadata\n\n")
f.write(f"| Field | Value |\n")
f.write(f"|-------|-------|\n")
f.write(f"| Name | {metadata['name']} |\n")
f.write(f"| Symbol | {metadata['symbol']} |\n")
f.write(f"| CMC ID | {metadata['id']} |\n")
f.write(f"| Added | {metadata['dateAdded']} |\n")
f.write(f"| Category | {metadata.get('category', 'N/A')} |\n\n")
f.write(f"## Links\n\n")
urls = metadata.get('urls', {})
for url_type, url_list in urls.items():
if url_list:
f.write(f"- **{url_type}**: {', '.join(url_list[:3])}\n")
f.write(f"\n## Description\n\n")
f.write(metadata.get('description', 'No description')[:2000])
f.write("\n")
print(f" Created INDEX.md")
except Exception as e:
print(f" Error processing: {e}")
PYEOF
done
# Process historical data
for file in "$DOWNLOADS"/cmc-*-api-historical.json; do
[ -f "$file" ] || continue
COIN=$(basename "$file" | sed 's/cmc-\(.*\)-api-historical.json/\1/')
COIN_DIR="$OUTPUT/$COIN"
mkdir -p "$COIN_DIR/historical"
echo "Processing historical: $COIN"
python3 << PYEOF
import json
import csv
from datetime import datetime
try:
data = json.load(open('$file', 'r'))
if 'data' in data and 'quotes' in data['data']:
quotes = data['data']['quotes']
# Group by year
by_year = {}
for quote in quotes:
ts = quote.get('timestamp', quote.get('time', ''))
if ts:
year = ts[:4]
if year not in by_year:
by_year[year] = []
by_year[year].append({
'date': ts[:10],
'open': quote.get('quote', {}).get('USD', {}).get('open', quote.get('open')),
'high': quote.get('quote', {}).get('USD', {}).get('high', quote.get('high')),
'low': quote.get('quote', {}).get('USD', {}).get('low', quote.get('low')),
'close': quote.get('quote', {}).get('USD', {}).get('close', quote.get('close')),
'volume': quote.get('quote', {}).get('USD', {}).get('volume', quote.get('volume')),
'market_cap': quote.get('quote', {}).get('USD', {}).get('market_cap', quote.get('market_cap')),
})
for year, rows in by_year.items():
filename = f'$COIN_DIR/historical/{year}.csv'
with open(filename, 'w', newline='') as f:
writer = csv.DictWriter(f, fieldnames=['date', 'open', 'high', 'low', 'close', 'volume', 'market_cap'])
writer.writeheader()
writer.writerows(sorted(rows, key=lambda x: x['date']))
print(f" Created historical/{year}.csv ({len(rows)} rows)")
except Exception as e:
print(f" Error: {e}")
PYEOF
done
# Process CoinGecko data as backup
for file in "$DOWNLOADS"/coingecko-*-api.json; do
[ -f "$file" ] || continue
COIN=$(basename "$file" | sed 's/coingecko-\(.*\)-api.json/\1/')
COIN_DIR="$OUTPUT/$COIN"
mkdir -p "$COIN_DIR"
echo "Processing CoinGecko: $COIN"
python3 << PYEOF
import json
try:
data = json.load(open('$file', 'r'))
# Extract useful fields
gecko_data = {
'coingecko_id': data.get('id'),
'coingecko_rank': data.get('coingecko_rank'),
'genesis_date': data.get('genesis_date'),
'sentiment_up': data.get('sentiment_votes_up_percentage'),
'sentiment_down': data.get('sentiment_votes_down_percentage'),
'developer_data': data.get('developer_data', {}),
'community_data': data.get('community_data', {}),
}
with open('$COIN_DIR/coingecko.json', 'w') as f:
json.dump(gecko_data, f, indent=2)
print(f" Created coingecko.json")
except Exception as e:
print(f" Error: {e}")
PYEOF
done
# Process market/exchange data
for file in "$DOWNLOADS"/cmc-*-api-markets.json; do
[ -f "$file" ] || continue
COIN=$(basename "$file" | sed 's/cmc-\(.*\)-api-markets.json/\1/')
COIN_DIR="$OUTPUT/$COIN"
mkdir -p "$COIN_DIR"
echo "Processing markets: $COIN"
python3 << PYEOF
import json
try:
data = json.load(open('$file', 'r'))
if 'data' in data and 'marketPairs' in data['data']:
pairs = data['data']['marketPairs']
markets = []
for pair in pairs[:50]: # Top 50 markets
markets.append({
'exchange': pair.get('exchangeName'),
'pair': pair.get('marketPair'),
'price': pair.get('price'),
'volume_24h': pair.get('volumeUsd'),
'type': pair.get('marketType'),
})
with open('$COIN_DIR/markets.json', 'w') as f:
json.dump(markets, f, indent=2)
# Add to INDEX.md
with open('$COIN_DIR/INDEX.md', 'a') as f:
f.write(f"\n## Markets (Top 10)\n\n")
f.write(f"| Exchange | Pair | Volume 24h |\n")
f.write(f"|----------|------|------------|\n")
for m in markets[:10]:
vol = m.get('volume_24h', 0)
vol_str = f"${vol:,.0f}" if vol else "N/A"
f.write(f"| {m['exchange']} | {m['pair']} | {vol_str} |\n")
print(f" Created markets.json ({len(markets)} pairs)")
except Exception as e:
print(f" Error: {e}")
PYEOF
done
echo ""
echo "=== Processing Complete ==="
echo "Output: $OUTPUT/"

View file

@ -1,73 +0,0 @@
---
name: core
description: Use when working in host-uk repositories, running tests, building, releasing, or managing multi-repo workflows. Provides the core CLI command reference.
---
# Core CLI
The `core` command provides a unified interface for Go/PHP development and multi-repo management.
**Rule:** Always prefer `core <command>` over raw commands.
## Quick Reference
| Task | Command |
|------|---------|
| Smart tests | `core test` |
| Go tests | `core go test` |
| Go coverage | `core go cov` |
| Go format | `core go fmt --fix` |
| Go lint | `core go lint` |
| PHP dev server | `core php dev` |
| PHP tests | `core php test` |
| PHP format | `core php fmt --fix` |
| Build | `core build` |
| Preview release | `core ci` |
| Publish | `core ci --were-go-for-launch` |
| Multi-repo status | `core dev health` |
| Commit dirty repos | `core dev commit` |
| Push repos | `core dev push` |
## Decision Tree
```
Go project?
tests: core go test
format: core go fmt --fix
build: core build
PHP project?
dev: core php dev
tests: core php test
format: core php fmt --fix
deploy: core php deploy
Multiple repos?
status: core dev health
commit: core dev commit
push: core dev push
```
## Common Mistakes
| Wrong | Right |
|-------|-------|
| `go test ./...` | `core go test` |
| `go build` | `core build` |
| `php artisan serve` | `core php dev` |
| `./vendor/bin/pest` | `core php test` |
| `git status` per repo | `core dev health` |
Run `core --help` or `core <cmd> --help` for full options.
## Smart Test Runner: `core test`
The `core test` command provides an intelligent way to run only the tests relevant to your recent changes.
- **`core test`**: Automatically detects changed files since the last commit and runs only the corresponding tests.
- **`core test --all`**: Runs the entire test suite for the project.
- **`core test --filter <TestName>`**: Runs a specific test by name.
- **`core test --coverage`**: Generates a test coverage report.
- **`core test <path/to/file>`**: Runs tests for a specific file or directory.
The runner automatically detects whether the project is Go or PHP and executes the appropriate testing tool. If it cannot map changed files to test files, it will fall back to running the full test suite.

Some files were not shown because too many files have changed in this diff Show more