diff --git a/.claude-plugin/marketplace.json b/.claude-plugin/marketplace.json index a684368..b26375a 100644 --- a/.claude-plugin/marketplace.json +++ b/.claude-plugin/marketplace.json @@ -49,6 +49,9 @@ "version": "0.1.0" }, { + "name": "perf", + "source": "./claude/perf", + "description": "Performance profiling helpers for Go and PHP.", "name": "core", "source": "./claude/core", "description": "Core functionality - release management", diff --git a/claude/perf/.claude-plugin/plugin.json b/claude/perf/.claude-plugin/plugin.json new file mode 100644 index 0000000..d48f4b7 --- /dev/null +++ b/claude/perf/.claude-plugin/plugin.json @@ -0,0 +1,7 @@ +{ + "name": "perf", + "description": "Performance profiling helpers for Go and PHP.", + "version": "0.1.0", + "author": "Jules", + "license": "EUPL-1.2" +} diff --git a/claude/perf/commands/perf.md b/claude/perf/commands/perf.md new file mode 100644 index 0000000..9f65087 --- /dev/null +++ b/claude/perf/commands/perf.md @@ -0,0 +1,59 @@ +--- +name: perf +description: Performance profiling helpers for Go and PHP. +args: [options] +--- + +# Performance Profiling + +Profile test suite, HTTP requests, and analyze slow queries and memory usage. + +## Subcommands + +- `test`: Profile the test suite. +- `request `: Profile an HTTP request. +- `query `: Analyze a slow query (requires MySQL client and credentials). +- `memory [script_path]`: Analyze memory usage. + +## Usage + +``` +/core:perf test +/core:perf request /api/users +/core:perf query "SELECT * FROM users WHERE email = 'test@example.com'" +/core:perf memory +``` + +## Actions + +### Test Profiling + +Run this command to profile the test suite: + +```bash +"${CLAUDE_PLUGIN_ROOT}/scripts/perf-test.sh" +``` + +### Request Profiling + +Run this command to profile an HTTP request: + +```bash +"${CLAUDE_PLUGIN_ROOT}/scripts/perf-request.sh" "" +``` + +### Query Analysis + +Run this command to analyze a slow query: + +```bash +"${CLAUDE_PLUGIN_ROOT}/scripts/perf-query.sh" "" +``` + +### Memory Analysis + +Run this command to analyze memory usage: + +```bash +"${CLAUDE_PLUGIN_ROOT}/scripts/perf-memory.sh" "" +``` diff --git a/claude/perf/scripts/perf-memory.sh b/claude/perf/scripts/perf-memory.sh new file mode 100644 index 0000000..01dd7c0 --- /dev/null +++ b/claude/perf/scripts/perf-memory.sh @@ -0,0 +1,82 @@ +#!/bin/bash + +set -e + +if [ -f "go.mod" ]; then + PROJECT_TYPE="go" +elif [ -f "composer.json" ]; then + PROJECT_TYPE="php" +else + echo "Error: Unable to determine project type. No go.mod or composer.json found." + exit 1 +fi + +echo "Detected project type: $PROJECT_TYPE" + +case $PROJECT_TYPE in + "go") + if [ ! -f "mem.prof" ]; then + echo "Error: Memory profile 'mem.prof' not found." + echo "Please run '/core:perf test' on your Go project first to generate the memory profile." + exit 1 + fi + echo "Analyzing Go memory profile..." + go tool pprof -top mem.prof + ;; + "php") + if ! php -m | grep -q 'Xdebug'; then + echo "Xdebug is not installed. Please install it to use the PHP memory profiler." + exit 1 + fi + + if [ -z "$1" ]; then + echo "Usage for PHP: $0 " + exit 1 + fi + + PHP_SCRIPT=$1 + if [ ! -f "$PHP_SCRIPT" ]; then + echo "Error: File not found: $PHP_SCRIPT" + exit 1 + fi + + echo "Generating memory profile for $PHP_SCRIPT..." + # Generate a unique filename for the profile output + PROFILE_OUTPUT="cachegrind.out.$$" + XDEBUG_MODE=profile php -d xdebug.profiler_output_name="$PROFILE_OUTPUT" "$PHP_SCRIPT" > /dev/null 2>&1 + + if [ ! -f "$PROFILE_OUTPUT" ]; then + echo "Error: Memory profile could not be generated. Check your Xdebug configuration." + exit 1 + fi + + echo "Analyzing memory profile..." + # Parse the cachegrind file to find functions with high memory usage + awk ' + /^fn=/ { + current_function = substr($0, 4) + } + /^[0-9]/ { + # Column 2 is self-inclusive memory cost + memory_cost = $2 + functions[current_function] += memory_cost + } + END { + for (func in functions) { + printf "%12d %s\n", functions[func], func + } + } + ' "$PROFILE_OUTPUT" | sort -nr | head -n 10 > top_memory_functions.log + + echo "--- Top 10 Memory-Consuming Functions (in bytes) ---" + cat top_memory_functions.log + echo "----------------------------------------------------" + + echo "Actionable Suggestions:" + echo " - Review the functions listed above. High memory usage may indicate large arrays, unreleased resources, or inefficient data structures." + echo " - For processing large files or database results, consider using generators to reduce memory footprint." + + # Clean up the generated files + rm "$PROFILE_OUTPUT" top_memory_functions.log + ;; +esac diff --git a/claude/perf/scripts/perf-query.sh b/claude/perf/scripts/perf-query.sh new file mode 100644 index 0000000..25a7566 --- /dev/null +++ b/claude/perf/scripts/perf-query.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +set -e + +if [ -z "$1" ]; then + echo "Usage: $0 \"\"" + echo "Required environment variables: DB_HOST, DB_USER, DB_PASS, DB_NAME" + exit 1 +fi + +if ! command -v mysql &> /dev/null; then + echo "mysql command could not be found. Please install the MySQL client." + exit 1 +fi + +if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASS" ] || [ -z "$DB_NAME" ]; then + echo "Error: Missing required database environment variables." + echo "Please set DB_HOST, DB_USER, DB_PASS, and DB_NAME." + exit 1 +fi + +QUERY=$1 + +echo "Analyzing query: $QUERY" + +EXPLAIN_OUTPUT=$(mysql -h"$DB_HOST" -u"$DB_USER" -p"$DB_PASS" "$DB_NAME" -e "EXPLAIN $QUERY" --batch 2>/dev/null) + +if [ $? -ne 0 ]; then + echo "Error executing EXPLAIN on the query. Please check your query and database credentials." + exit 1 +fi + +echo "--- EXPLAIN Output ---" +echo "$EXPLAIN_OUTPUT" +echo "----------------------" + +SUGGESTIONS="" + +# suggestion 1: Full table scan +if echo "$EXPLAIN_OUTPUT" | awk 'NR > 1' | awk '{print $5}' | grep -q "ALL"; then + TABLE=$(echo "$EXPLAIN_OUTPUT" | awk 'NR > 1 && $5 == "ALL" {print $3}') + SUGGESTIONS+=" - Consider adding an index to the join condition or WHERE clause for table '$TABLE' to avoid a full table scan.\n" +fi + +# suggestion 2: Using filesort +if echo "$EXPLAIN_OUTPUT" | awk 'NR > 1' | awk '{print $10}' | grep -q "filesort"; then + SUGGESTIONS+=" - 'Using filesort' indicates an inefficient sort. Consider adding an index on the columns used in the ORDER BY clause.\n" +fi + +# suggestion 3: Using temporary +if echo "$EXPLAIN_OUTPUT" | awk 'NR > 1' | awk '{print $10}' | grep -q "temporary"; then + SUGGESTIONS+=" - 'Using temporary' indicates the creation of a temporary table, which can be slow. This might be improved by adding an index.\n" +fi + + +if [ -z "$SUGGESTIONS" ]; then + echo "No obvious performance issues found." +else + echo "Actionable Suggestions:" + echo -e "$SUGGESTIONS" +fi diff --git a/claude/perf/scripts/perf-request.sh b/claude/perf/scripts/perf-request.sh new file mode 100644 index 0000000..01a3775 --- /dev/null +++ b/claude/perf/scripts/perf-request.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +set -e + +if [ -z "$1" ]; then + echo "Usage: $0 " + exit 1 +fi + +URL=$1 + +echo "Profiling request to: $URL" + +OUTPUT=$(curl -w "time_namelookup=%{time_namelookup}\ntime_connect=%{time_connect}\ntime_appconnect=%{time_appconnect}\ntime_pretransfer=%{time_pretransfer}\ntime_redirect=%{time_redirect}\ntime_starttransfer=%{time_starttransfer}\ntime_total=%{time_total}" -o /dev/null -s "$URL") + +# Extract values +get_value() { + echo "$OUTPUT" | grep "$1" | cut -d'=' -f2 +} + +TIME_NAMELOOKUP=$(get_value time_namelookup) +TIME_CONNECT=$(get_value time_connect) +TIME_STARTTRANSFER=$(get_value time_starttransfer) + +echo "--- Timing Metrics ---" +echo "DNS Lookup: ${TIME_NAMELOOKUP}s" +echo "Connect: ${TIME_CONNECT}s" +echo "Start Transfer: ${TIME_STARTTRANSFER}s" +echo "----------------------" + +SUGGESTIONS="" + +# Suggestion 1: High DNS lookup time +if (( $(echo "$TIME_NAMELOOKUP > 0.1" | bc -l) )); then + SUGGESTIONS+=" - DNS lookup took over 100ms. Consider using a faster DNS provider or checking your network configuration.\n" +fi + +# Suggestion 2: High connect time +if (( $(echo "$TIME_CONNECT > 0.2" | bc -l) )); then + SUGGESTIONS+=" - Connection time is over 200ms. If this is a remote server, consider using a CDN. If it's local, check for network latency or server load.\n" +fi + +# Suggestion 3: High start transfer time (Time To First Byte) +if (( $(echo "$TIME_STARTTRANSFER > 0.5" | bc -l) )); then + SUGGESTIONS+=" - Time To First Byte (TTFB) is over 500ms. This indicates a slow backend. Profile your application code to identify and optimize bottlenecks.\n" +fi + +if [ -z "$SUGGESTIONS" ]; then + echo "No obvious performance issues found." +else + echo "Actionable Suggestions:" + echo -e "$SUGGESTIONS" +fi diff --git a/claude/perf/scripts/perf-test.sh b/claude/perf/scripts/perf-test.sh new file mode 100644 index 0000000..f8f54c0 --- /dev/null +++ b/claude/perf/scripts/perf-test.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +set -e + +if [ -f "go.mod" ]; then + PROJECT_TYPE="go" +elif [ -f "composer.json" ]; then + PROJECT_TYPE="php" +else + echo "Error: Unable to determine project type. No go.mod or composer.json found." + exit 1 +fi + +echo "Detected project type: $PROJECT_TYPE" + +case $PROJECT_TYPE in + "go") + echo "Running Go test profiling..." + go test -v -cpuprofile=cpu.prof -memprofile=mem.prof -bench=. 2>&1 | tee test_output.log + + echo "Analyzing test performance..." + grep "--- PASS" test_output.log | awk '{print $4, $3}' | sort -nr | head -n 10 > slowest_tests.log + + echo "Slowest tests:" + cat slowest_tests.log + + echo "" + echo "Actionable Suggestions:" + awk '$1 > 2.0 {print " - The test \""$2"\" took " $1 "s to run. Consider using mocks for external dependencies to speed it up."}' slowest_tests.log + ;; + "php") + if ! php -m | grep -q 'Xdebug'; then + echo "Xdebug is not installed. Please install it to use the PHP test profiler." + exit 1 + fi + + echo "Running PHP test profiling..." + if [ -f "vendor/bin/pest" ]; then + vendor/bin/pest --log-junit report.xml + elif [ -f "vendor/bin/phpunit" ]; then + vendor/bin/phpunit --log-junit report.xml + else + echo "Error: No pest or phpunit executable found." + exit 1 + fi + + if ! command -v xmlstarlet &> /dev/null; then + echo "xmlstarlet could not be found. Please install it to use this feature." + echo "On Debian/Ubuntu: sudo apt-get install xmlstarlet" + echo "On macOS (Homebrew): brew install xmlstarlet" + exit 1 + fi + + echo "Analyzing test performance..." + xmlstarlet sel -t -m "//testcase" -v "@time" -o " " -v "@name" -n report.xml | sort -nr | head -n 10 > slowest_tests.log + + echo "Slowest tests:" + cat slowest_tests.log + + echo "" + echo "Actionable Suggestions:" + awk '$1 > 2.0 {print " - The test \""$2"\" took " $1 "s to run. Consider using mocks for external dependencies to speed it up."}' slowest_tests.log + ;; +esac