From 3fbfd7f7ff910d89f36475d73b657b7d3e680d66 Mon Sep 17 00:00:00 2001 From: xingzihai <1315258019@qq.com> Date: Wed, 25 Mar 2026 17:05:02 +0000 Subject: [PATCH 1/4] feat: Add git-workflow skill Add comprehensive Git workflow skill covering: - Branching strategies (GitHub Flow, Trunk-Based, GitFlow) - Conventional commits format and best practices - Merge vs rebase with clear guidance - Pull request workflow and templates - Conflict resolution strategies - Branch management and naming conventions - Release management with semantic versioning - Git configuration and useful aliases - Common workflows and anti-patterns This skill helps developers and teams establish consistent Git practices for collaborative development. --- skills/git-workflow/SKILL.md | 716 +++++++++++++++++++++++++++++++++++ 1 file changed, 716 insertions(+) create mode 100644 skills/git-workflow/SKILL.md diff --git a/skills/git-workflow/SKILL.md b/skills/git-workflow/SKILL.md new file mode 100644 index 00000000..d57f51d3 --- /dev/null +++ b/skills/git-workflow/SKILL.md @@ -0,0 +1,716 @@ +--- +name: git-workflow +description: Git workflow patterns including branching strategies, commit conventions, merge vs rebase, conflict resolution, and collaborative development best practices for teams of all sizes. +origin: ECC +--- + +# Git Workflow Patterns + +Best practices for Git version control, branching strategies, and collaborative development. + +## When to Activate + +- Setting up Git workflow for a new project +- Deciding on branching strategy (GitFlow, trunk-based, GitHub flow) +- Writing commit messages and PR descriptions +- Resolving merge conflicts +- Managing releases and version tags +- Onboarding new team members to Git practices + +## Branching Strategies + +### GitHub Flow (Simple, Recommended for Most) + +Best for continuous deployment and small-to-medium teams. + +``` +main (protected, always deployable) + │ + ├── feature/user-auth → PR → merge to main + ├── feature/payment-flow → PR → merge to main + └── fix/login-bug → PR → merge to main +``` + +**Rules:** +- `main` is always deployable +- Create feature branches from `main` +- Open Pull Request when ready for review +- After approval and CI passes, merge to `main` +- Deploy immediately after merge + +### Trunk-Based Development (High-Velocity Teams) + +Best for teams with strong CI/CD and feature flags. + +``` +main (trunk) + │ + ├── short-lived feature (1-2 days max) + ├── short-lived feature + └── short-lived feature +``` + +**Rules:** +- Everyone commits to `main` or very short-lived branches +- Feature flags hide incomplete work +- CI must pass before merge +- Deploy multiple times per day + +### GitFlow (Complex, Release-Cycle Driven) + +Best for scheduled releases and enterprise projects. + +``` +main (production releases) + │ + └── develop (integration branch) + │ + ├── feature/user-auth + ├── feature/payment + │ + ├── release/1.0.0 → merge to main and develop + │ + └── hotfix/critical → merge to main and develop +``` + +**Rules:** +- `main` contains production-ready code only +- `develop` is the integration branch +- Feature branches from `develop`, merge back to `develop` +- Release branches from `develop`, merge to `main` and `develop` +- Hotfix branches from `main`, merge to both `main` and `develop` + +### When to Use Which + +| Strategy | Team Size | Release Cadence | Best For | +|----------|-----------|-----------------|----------| +| GitHub Flow | Any | Continuous | SaaS, web apps, startups | +| Trunk-Based | 5+ experienced | Multiple/day | High-velocity teams, feature flags | +| GitFlow | 10+ | Scheduled | Enterprise, regulated industries | + +## Commit Messages + +### Conventional Commits Format + +``` +(): + +[optional body] + +[optional footer(s)] +``` + +### Types + +| Type | Use For | Example | +|------|---------|---------| +| `feat` | New feature | `feat(auth): add OAuth2 login` | +| `fix` | Bug fix | `fix(api): handle null response in user endpoint` | +| `docs` | Documentation | `docs(readme): update installation instructions` | +| `style` | Formatting, no code change | `style: fix indentation in login component` | +| `refactor` | Code refactoring | `refactor(db): extract connection pool to module` | +| `test` | Adding/updating tests | `test(auth): add unit tests for token validation` | +| `chore` | Maintenance tasks | `chore(deps): update dependencies` | +| `perf` | Performance improvement | `perf(query): add index to users table` | +| `ci` | CI/CD changes | `ci: add PostgreSQL service to test workflow` | +| `revert` | Revert previous commit | `revert: revert "feat(auth): add OAuth2 login"` | + +### Good vs Bad Examples + +``` +# BAD: Vague, no context +git commit -m "fixed stuff" +git commit -m "updates" +git commit -m "WIP" + +# GOOD: Clear, specific, explains why +git commit -m "fix(api): retry requests on 503 Service Unavailable + +The external API occasionally returns 503 errors during peak hours. +Added exponential backoff retry logic with max 3 attempts. + +Closes #123" +``` + +### Commit Message Template + +Create `.gitmessage` in repo root: + +``` +# (): +# +# Types: feat, fix, docs, style, refactor, test, chore, perf, ci, revert +# Scope: api, ui, db, auth, etc. +# Subject: imperative mood, no period, max 50 chars +# +# [optional body] - explain why, not what +# [optional footer] - Breaking changes, closes #issue +``` + +Enable with: `git config commit.template .gitmessage` + +## Merge vs Rebase + +### Merge (Preserves History) + +```bash +# Creates a merge commit +git checkout main +git merge feature/user-auth + +# Result: +# * merge commit +# |\ +# | * feature commits +# |/ +# * main commits +``` + +**Use when:** +- Merging feature branches into `main` +- You want to preserve exact history +- Multiple people worked on the branch +- The branch has been pushed and others may have based work on it + +### Rebase (Linear History) + +```bash +# Rewrites feature commits onto target branch +git checkout feature/user-auth +git rebase main + +# Result: +# * feature commits (rewritten) +# * main commits +``` + +**Use when:** +- Updating your local feature branch with latest `main` +- You want a linear, clean history +- The branch is local-only (not pushed) +- You're the only one working on the branch + +### Rebase Workflow + +```bash +# Update feature branch with latest main (before PR) +git checkout feature/user-auth +git fetch origin +git rebase origin/main + +# Fix any conflicts +# Tests should still pass + +# Force push (only if you're the only contributor) +git push --force-with-lease origin feature/user-auth +``` + +### When NOT to Rebase + +``` +# NEVER rebase branches that: +- Have been pushed to a shared repository +- Other people have based work on +- Are protected branches (main, develop) +- Are already merged + +# Why: Rebase rewrites history, breaking others' work +``` + +## Pull Request Workflow + +### PR Title Format + +``` +(): + +Examples: +feat(auth): add SSO support for enterprise users +fix(api): resolve race condition in order processing +docs(api): add OpenAPI specification for v2 endpoints +``` + +### PR Description Template + +```markdown +## What + +Brief description of what this PR does. + +## Why + +Explain the motivation and context. + +## How + +Key implementation details worth highlighting. + +## Testing + +- [ ] Unit tests added/updated +- [ ] Integration tests added/updated +- [ ] Manual testing performed + +## Screenshots (if applicable) + +Before/after screenshots for UI changes. + +## Checklist + +- [ ] Code follows project style guidelines +- [ ] Self-review completed +- [ ] Comments added for complex logic +- [ ] Documentation updated +- [ ] No new warnings introduced +- [ ] Tests pass locally +- [ ] Related issues linked + +Closes #123 +``` + +### Code Review Checklist + +**For Reviewers:** + +- [ ] Does the code solve the stated problem? +- [ ] Are there any edge cases not handled? +- [ ] Is the code readable and maintainable? +- [ ] Are there sufficient tests? +- [ ] Are there security concerns? +- [ ] Is the commit history clean (squashed if needed)? + +**For Authors:** + +- [ ] Self-review completed before requesting review +- [ ] CI passes (tests, lint, typecheck) +- [ ] PR size is reasonable (<500 lines ideal) +- [ ] Related to a single feature/fix +- [ ] Description clearly explains the change + +## Conflict Resolution + +### Identify Conflicts + +```bash +# Check for conflicts before merge +git checkout main +git merge feature/user-auth --no-commit --no-ff + +# If conflicts, Git will show: +# CONFLICT (content): Merge conflict in src/auth/login.ts +# Automatic merge failed; fix conflicts and then commit the result. +``` + +### Resolve Conflicts + +```bash +# See conflicted files +git status + +# View conflict markers in file +# <<<<<<< HEAD +# content from main +# ======= +# content from feature branch +# >>>>>>> feature/user-auth + +# Option 1: Manual resolution +# Edit file, remove markers, keep correct content + +# Option 2: Use merge tool +git mergetool + +# Option 3: Accept one side +git checkout --ours src/auth/login.ts # Keep main version +git checkout --theirs src/auth/login.ts # Keep feature version + +# After resolving, stage and commit +git add src/auth/login.ts +git commit +``` + +### Conflict Prevention Strategies + +```bash +# 1. Keep feature branches small and short-lived +# 2. Rebase frequently onto main +git checkout feature/user-auth +git fetch origin +git rebase origin/main + +# 3. Communicate with team about touching shared files +# 4. Use feature flags instead of long-lived branches +# 5. Review and merge PRs promptly +``` + +## Branch Management + +### Naming Conventions + +``` +# Feature branches +feature/user-authentication +feature/JIRA-123-payment-integration + +# Bug fixes +fix/login-redirect-loop +fix/456-null-pointer-exception + +# Hotfixes (production issues) +hotfix/critical-security-patch +hotfix/database-connection-leak + +# Releases +release/1.2.0 +release/2024-01-hotfix + +# Experiments/POCs +experiment/new-caching-strategy +poc/graphql-migration +``` + +### Branch Cleanup + +```bash +# Delete local branches that are merged +git branch --merged main | grep -v "^\*\|main" | xargs -n 1 git branch -d + +# Delete remote-tracking references for deleted remote branches +git fetch -p + +# Delete local branch +git branch -d feature/user-auth # Safe delete (only if merged) +git branch -D feature/user-auth # Force delete + +# Delete remote branch +git push origin --delete feature/user-auth +``` + +### Stash Workflow + +```bash +# Save work in progress +git stash push -m "WIP: user authentication" + +# List stashes +git stash list + +# Apply most recent stash +git stash pop + +# Apply specific stash +git stash apply stash@{2} + +# Drop stash +git stash drop stash@{0} +``` + +## Release Management + +### Semantic Versioning + +``` +MAJOR.MINOR.PATCH + +MAJOR: Breaking changes +MINOR: New features, backward compatible +PATCH: Bug fixes, backward compatible + +Examples: +1.0.0 → 1.0.1 (patch: bug fix) +1.0.1 → 1.1.0 (minor: new feature) +1.1.0 → 2.0.0 (major: breaking change) +``` + +### Creating Releases + +```bash +# Create annotated tag +git tag -a v1.2.0 -m "Release v1.2.0 + +Features: +- Add user authentication +- Implement password reset + +Fixes: +- Resolve login redirect issue + +Breaking Changes: +- None" + +# Push tag to remote +git push origin v1.2.0 + +# List tags +git tag -l + +# Delete tag +git tag -d v1.2.0 +git push origin --delete v1.2.0 +``` + +### Changelog Generation + +```bash +# Generate changelog from commits +git log v1.1.0..v1.2.0 --oneline --no-merges + +# Or use conventional-changelog +npx conventional-changelog -i CHANGELOG.md -s +``` + +## Git Configuration + +### Essential Configs + +```bash +# User identity +git config --global user.name "Your Name" +git config --global user.email "your@email.com" + +# Default branch name +git config --global init.defaultBranch main + +# Pull behavior (rebase instead of merge) +git config --global pull.rebase true + +# Push behavior (push current branch only) +git config --global push.default current + +# Auto-correct typos +git config --global help.autocorrect 1 + +# Better diff algorithm +git config --global diff.algorithm histogram + +# Color output +git config --global color.ui auto +``` + +### Useful Aliases + +```bash +# Add to ~/.gitconfig +[alias] + co = checkout + br = branch + ci = commit + st = status + unstage = reset HEAD -- + last = log -1 HEAD + visual = log --oneline --graph --all + amend = commit --amend --no-edit + wip = commit -m "WIP" + undo = reset --soft HEAD~1 + contributors = shortlog -sn +``` + +### Gitignore Patterns + +```gitignore +# Dependencies +node_modules/ +vendor/ + +# Build outputs +dist/ +build/ +*.o +*.exe + +# Environment files +.env +.env.local +.env.*.local + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# OS files +.DS_Store +Thumbs.db + +# Logs +*.log +logs/ + +# Test coverage +coverage/ + +# Cache +.cache/ +*.tsbuildinfo +``` + +## Common Workflows + +### Starting a New Feature + +```bash +# 1. Update main branch +git checkout main +git pull origin main + +# 2. Create feature branch +git checkout -b feature/user-auth + +# 3. Make changes and commit +git add . +git commit -m "feat(auth): implement OAuth2 login" + +# 4. Push to remote +git push -u origin feature/user-auth + +# 5. Create Pull Request on GitHub/GitLab +``` + +### Updating a PR with New Changes + +```bash +# 1. Make additional changes +git add . +git commit -m "feat(auth): add error handling" + +# 2. Push updates +git push origin feature/user-auth +``` + +### Syncing Fork with Upstream + +```bash +# 1. Add upstream remote (once) +git remote add upstream https://github.com/original/repo.git + +# 2. Fetch upstream +git fetch upstream + +# 3. Merge upstream/main into your main +git checkout main +git merge upstream/main + +# 4. Push to your fork +git push origin main +``` + +### Undoing Mistakes + +```bash +# Undo last commit (keep changes) +git reset --soft HEAD~1 + +# Undo last commit (discard changes) +git reset --hard HEAD~1 + +# Undo last commit pushed to remote +git revert HEAD +git push origin main + +# Undo specific file changes +git checkout HEAD -- path/to/file + +# Fix last commit message +git commit --amend -m "New message" + +# Add forgotten file to last commit +git add forgotten-file +git commit --amend --no-edit +``` + +## Git Hooks + +### Pre-Commit Hook + +```bash +#!/bin/bash +# .git/hooks/pre-commit + +# Run linting +npm run lint || exit 1 + +# Run tests +npm test || exit 1 + +# Check for secrets +if git diff --cached | grep -E '(password|api_key|secret)'; then + echo "Possible secret detected. Commit aborted." + exit 1 +fi +``` + +### Pre-Push Hook + +```bash +#!/bin/bash +# .git/hooks/pre-push + +# Run full test suite +npm run test:all || exit 1 + +# Check for console.log statements +if git diff origin/main | grep -E 'console\.log'; then + echo "Remove console.log statements before pushing." + exit 1 +fi +``` + +## Anti-Patterns + +``` +# BAD: Committing directly to main +git checkout main +git commit -m "fix bug" + +# GOOD: Use feature branches and PRs + +# BAD: Committing secrets +git add .env # Contains API keys + +# GOOD: Add to .gitignore, use environment variables + +# BAD: Giant PRs (1000+ lines) +# GOOD: Break into smaller, focused PRs + +# BAD: "Update" commit messages +git commit -m "update" +git commit -m "fix" + +# GOOD: Descriptive messages +git commit -m "fix(auth): resolve redirect loop after login" + +# BAD: Rewriting public history +git push --force origin main + +# GOOD: Use revert for public branches +git revert HEAD + +# BAD: Long-lived feature branches (weeks/months) +# GOOD: Keep branches short (days), rebase frequently + +# BAD: Committing generated files +git add dist/ +git add node_modules/ + +# GOOD: Add to .gitignore +``` + +## Quick Reference + +| Task | Command | +|------|---------| +| Create branch | `git checkout -b feature/name` | +| Switch branch | `git checkout branch-name` | +| Delete branch | `git branch -d branch-name` | +| Merge branch | `git merge branch-name` | +| Rebase branch | `git rebase main` | +| View history | `git log --oneline --graph` | +| View changes | `git diff` | +| Stage changes | `git add .` or `git add -p` | +| Commit | `git commit -m "message"` | +| Push | `git push origin branch-name` | +| Pull | `git pull origin branch-name` | +| Stash | `git stash push -m "message"` | +| Undo last commit | `git reset --soft HEAD~1` | +| Revert commit | `git revert HEAD` | \ No newline at end of file From dc92b5c62b2a691e9c5b97ebe6f104696bea5083 Mon Sep 17 00:00:00 2001 From: xingzihai <1315258019@qq.com> Date: Wed, 25 Mar 2026 17:24:31 +0000 Subject: [PATCH 2/4] feat: Add performance-optimizer agent for code performance analysis and optimization --- agents/performance-optimizer.md | 445 ++++++++++++++++++++++++++++++++ 1 file changed, 445 insertions(+) create mode 100644 agents/performance-optimizer.md diff --git a/agents/performance-optimizer.md b/agents/performance-optimizer.md new file mode 100644 index 00000000..663a1891 --- /dev/null +++ b/agents/performance-optimizer.md @@ -0,0 +1,445 @@ +--- +name: performance-optimizer +description: Performance analysis and optimization specialist. Use PROACTIVELY for identifying bottlenecks, optimizing slow code, reducing bundle sizes, and improving runtime performance. Profiling, memory leaks, render optimization, and algorithmic improvements. +tools: ["Read", "Write", "Edit", "Bash", "Grep", "Glob"] +model: sonnet +--- + +# Performance Optimizer + +You are an expert performance specialist focused on identifying bottlenecks and optimizing application speed, memory usage, and efficiency. Your mission is to make code faster, lighter, and more responsive. + +## Core Responsibilities + +1. **Performance Profiling** — Identify slow code paths, memory leaks, and bottlenecks +2. **Bundle Optimization** — Reduce JavaScript bundle sizes, lazy loading, code splitting +3. **Runtime Optimization** — Improve algorithmic efficiency, reduce unnecessary computations +4. **React/Rendering Optimization** — Prevent unnecessary re-renders, optimize component trees +5. **Database & Network** — Optimize queries, reduce API calls, implement caching +6. **Memory Management** — Detect leaks, optimize memory usage, cleanup resources + +## Analysis Commands + +```bash +# Bundle analysis +npx bundle-analyzer +npx source-map-explorer build/static/js/*.js + +# Lighthouse performance audit +npx lighthouse https://your-app.com --view + +# Node.js profiling +node --prof your-app.js +node --prof-process isolate-*.log + +# Memory analysis +node --inspect your-app.js # Then use Chrome DevTools + +# React profiling (in browser) +# React DevTools > Profiler tab + +# Network analysis +npx webpack-bundle-analyzer +``` + +## Performance Review Workflow + +### 1. Identify Performance Issues + +**Critical Performance Indicators:** + +| Metric | Target | Action if Exceeded | +|--------|--------|-------------------| +| First Contentful Paint | < 1.8s | Optimize critical path, inline critical CSS | +| Largest Contentful Paint | < 2.5s | Lazy load images, optimize server response | +| Time to Interactive | < 3.8s | Code splitting, reduce JavaScript | +| Cumulative Layout Shift | < 0.1 | Reserve space for images, avoid layout thrashing | +| Total Blocking Time | < 200ms | Break up long tasks, use web workers | +| Bundle Size (gzipped) | < 200KB | Tree shaking, lazy loading, code splitting | + +### 2. Algorithmic Analysis + +Check for inefficient algorithms: + +| Pattern | Complexity | Better Alternative | +|---------|------------|-------------------| +| Nested loops on same data | O(n²) | Use Map/Set for O(1) lookups | +| Repeated array searches | O(n) per search | Convert to Map for O(1) | +| Sorting inside loop | O(n² log n) | Sort once outside loop | +| String concatenation in loop | O(n²) | Use array.join() | +| Deep cloning large objects | O(n) each time | Use shallow copy or immer | +| Recursion without memoization | O(2^n) | Add memoization | + +```typescript +// BAD: O(n²) - searching array in loop +for (const user of users) { + const posts = allPosts.filter(p => p.userId === user.id); // O(n) per user +} + +// GOOD: O(n) - group once with Map +const postsByUser = new Map(); +for (const post of allPosts) { + const userPosts = postsByUser.get(post.userId) || []; + userPosts.push(post); + postsByUser.set(post.userId, userPosts); +} +// Now O(1) lookup per user +``` + +### 3. React Performance Optimization + +**Common React Anti-patterns:** + +```tsx +// BAD: Inline function creation in render + + +// GOOD: Stable callback with useCallback +const handleButtonClick = useCallback(() => handleClick(id), [id]); + + +// BAD: Object creation in render + + +// GOOD: Stable object reference +const style = useMemo(() => ({ color: 'red' }), []); + + +// BAD: Expensive computation on every render +const sortedItems = items.sort((a, b) => a.name.localeCompare(b.name)); + +// GOOD: Memoize expensive computations +const sortedItems = useMemo( + () => items.sort((a, b) => a.name.localeCompare(b.name)), + [items] +); + +// BAD: List without keys or with index +{items.map((item, index) => )} + +// GOOD: Stable unique keys +{items.map(item => )} +``` + +**React Performance Checklist:** + +- [ ] `useMemo` for expensive computations +- [ ] `useCallback` for functions passed to children +- [ ] `React.memo` for frequently re-rendered components +- [ ] Proper dependency arrays in hooks +- [ ] Virtualization for long lists (react-window, react-virtualized) +- [ ] Lazy loading for heavy components (`React.lazy`) +- [ ] Code splitting at route level + +### 4. Bundle Size Optimization + +**Bundle Analysis Checklist:** + +```bash +# Analyze bundle composition +npx webpack-bundle-analyzer build/static/js/*.js + +# Check for duplicate dependencies +npx duplicate-package-checker-analyzer + +# Find largest files +du -sh node_modules/* | sort -hr | head -20 +``` + +**Optimization Strategies:** + +| Issue | Solution | +|-------|----------| +| Large vendor bundle | Tree shaking, smaller alternatives | +| Duplicate code | Extract to shared module | +| Unused exports | Remove dead code with knip | +| Moment.js | Use date-fns or dayjs (smaller) | +| Lodash | Use lodash-es or native methods | +| Large icons library | Import only needed icons | + +```javascript +// BAD: Import entire library +import _ from 'lodash'; +import moment from 'moment'; + +// GOOD: Import only what you need +import debounce from 'lodash/debounce'; +import { format, addDays } from 'date-fns'; + +// Or use lodash-es with tree shaking +import { debounce, throttle } from 'lodash-es'; +``` + +### 5. Database & Query Optimization + +**Query Optimization Patterns:** + +```sql +-- BAD: Select all columns +SELECT * FROM users WHERE active = true; + +-- GOOD: Select only needed columns +SELECT id, name, email FROM users WHERE active = true; + +-- BAD: N+1 queries (in application loop) +-- 1 query for users, then N queries for each user's orders + +-- GOOD: Single query with JOIN or batch fetch +SELECT u.*, o.id as order_id, o.total +FROM users u +LEFT JOIN orders o ON u.id = o.user_id +WHERE u.active = true; + +-- Add index for frequently queried columns +CREATE INDEX idx_users_active ON users(active); +CREATE INDEX idx_orders_user_id ON orders(user_id); +``` + +**Database Performance Checklist:** + +- [ ] Indexes on frequently queried columns +- [ ] Composite indexes for multi-column queries +- [ ] Avoid SELECT * in production code +- [ ] Use connection pooling +- [ ] Implement query result caching +- [ ] Use pagination for large result sets +- [ ] Monitor slow query logs + +### 6. Network & API Optimization + +**Network Optimization Strategies:** + +```typescript +// BAD: Multiple sequential requests +const user = await fetchUser(id); +const posts = await fetchPosts(user.id); +const comments = await fetchComments(posts[0].id); + +// GOOD: Parallel requests when independent +const [user, posts] = await Promise.all([ + fetchUser(id), + fetchPosts(id) +]); + +// GOOD: Batch requests when possible +const results = await batchFetch(['user1', 'user2', 'user3']); + +// Implement request caching +const fetchWithCache = async (url: string, ttl = 300000) => { + const cached = cache.get(url); + if (cached) return cached; + + const data = await fetch(url).then(r => r.json()); + cache.set(url, data, ttl); + return data; +}; + +// Debounce rapid API calls +const debouncedSearch = debounce(async (query: string) => { + const results = await searchAPI(query); + setResults(results); +}, 300); +``` + +**Network Optimization Checklist:** + +- [ ] Parallel independent requests with `Promise.all` +- [ ] Implement request caching +- [ ] Debounce rapid-fire requests +- [ ] Use streaming for large responses +- [ ] Implement pagination for large datasets +- [ ] Use GraphQL or API batching to reduce requests +- [ ] Enable compression (gzip/brotli) on server + +### 7. Memory Leak Detection + +**Common Memory Leak Patterns:** + +```typescript +// BAD: Event listener without cleanup +useEffect(() => { + window.addEventListener('resize', handleResize); + // Missing cleanup! +}, []); + +// GOOD: Clean up event listeners +useEffect(() => { + window.addEventListener('resize', handleResize); + return () => window.removeEventListener('resize', handleResize); +}, []); + +// BAD: Timer without cleanup +useEffect(() => { + setInterval(() => pollData(), 1000); + // Missing cleanup! +}, []); + +// GOOD: Clean up timers +useEffect(() => { + const interval = setInterval(() => pollData(), 1000); + return () => clearInterval(interval); +}, []); + +// BAD: Holding references in closures +const Component = () => { + const largeData = useLargeData(); + useEffect(() => { + eventEmitter.on('update', () => { + console.log(largeData); // Closure keeps reference + }); + }, [largeData]); +}; + +// GOOD: Use refs or proper dependencies +const largeDataRef = useRef(largeData); +useEffect(() => { + largeDataRef.current = largeData; +}, [largeData]); + +useEffect(() => { + eventEmitter.on('update', () => { + console.log(largeDataRef.current); + }); + return () => eventEmitter.off('update'); +}, []); +``` + +**Memory Leak Detection:** + +```bash +# Chrome DevTools Memory tab: +# 1. Take heap snapshot +# 2. Perform action +# 3. Take another snapshot +# 4. Compare to find objects that shouldn't exist +# 5. Look for detached DOM nodes, event listeners, closures + +# Node.js memory debugging +node --inspect app.js +# Open chrome://inspect +# Take heap snapshots and compare +``` + +## Performance Testing + +### Lighthouse Audits + +```bash +# Run full lighthouse audit +npx lighthouse https://your-app.com --view --preset=desktop + +# CI mode for automated checks +npx lighthouse https://your-app.com --output=json --output-path=./lighthouse.json + +# Check specific metrics +npx lighthouse https://your-app.com --only-categories=performance +``` + +### Performance Budgets + +```json +// package.json +{ + "bundlesize": [ + { + "path": "./build/static/js/*.js", + "maxSize": "200 kB" + } + ] +} +``` + +### Web Vitals Monitoring + +```typescript +// Track Core Web Vitals +import { getCLS, getFID, getLCP, getFCP, getTTFB } from 'web-vitals'; + +getCLS(console.log); // Cumulative Layout Shift +getFID(console.log); // First Input Delay +getLCP(console.log); // Largest Contentful Paint +getFCP(console.log); // First Contentful Paint +getTTFB(console.log); // Time to First Byte +``` + +## Performance Report Template + +```markdown +# Performance Audit Report + +## Executive Summary +- **Overall Score**: X/100 +- **Critical Issues**: X +- **Recommendations**: X + +## Bundle Analysis +| Metric | Current | Target | Status | +|--------|---------|--------|--------| +| Total Size (gzip) | XXX KB | < 200 KB | ⚠️ | +| Main Bundle | XXX KB | < 100 KB | ✅ | +| Vendor Bundle | XXX KB | < 150 KB | ⚠️ | + +## Web Vitals +| Metric | Current | Target | Status | +|--------|---------|--------|--------| +| LCP | X.Xs | < 2.5s | ✅ | +| FID | XXms | < 100ms | ✅ | +| CLS | X.XX | < 0.1 | ⚠️ | + +## Critical Issues + +### 1. [Issue Title] +**File**: path/to/file.ts:42 +**Impact**: High - Causes XXXms delay +**Fix**: [Description of fix] + +```typescript +// Before (slow) +const slowCode = ...; + +// After (optimized) +const fastCode = ...; +``` + +### 2. [Issue Title] +... + +## Recommendations +1. [Priority recommendation] +2. [Priority recommendation] +3. [Priority recommendation] + +## Estimated Impact +- Bundle size reduction: XX KB (XX%) +- LCP improvement: XXms +- Time to Interactive improvement: XXms +``` + +## When to Run + +**ALWAYS:** Before major releases, after adding new features, when users report slowness, during performance regression testing. + +**IMMEDIATELY:** Lighthouse score drops, bundle size increases >10%, memory usage grows, slow page loads. + +## Red Flags - Act Immediately + +| Issue | Action | +|-------|--------| +| Bundle > 500KB gzip | Code split, lazy load, tree shake | +| LCP > 4s | Optimize critical path, preload resources | +| Memory usage growing | Check for leaks, review useEffect cleanup | +| CPU spikes | Profile with Chrome DevTools | +| Database query > 1s | Add index, optimize query, cache results | + +## Success Metrics + +- Lighthouse performance score > 90 +- All Core Web Vitals in "good" range +- Bundle size under budget +- No memory leaks detected +- Test suite still passing +- No performance regressions + +--- + +**Remember**: Performance is a feature. Users notice speed. Every 100ms of improvement matters. Optimize for the 90th percentile, not the average. \ No newline at end of file From b44ba7096ffbe45569050ed9feb21f547a40d1e2 Mon Sep 17 00:00:00 2001 From: xingzihai <1315258019@qq.com> Date: Thu, 26 Mar 2026 00:28:26 +0000 Subject: [PATCH 3/4] feat(hooks): add pre-commit quality check hook - Add pre-bash-commit-quality.js hook script - Runs quality checks before git commit commands: - Lints staged files (ESLint, Pylint, golint) - Validates commit message format (conventional commits) - Detects console.log/debugger statements - Warns about TODO/FIXME without issue references - Detects potential hardcoded secrets - Updates hooks.json with new hook configuration - Updates README.md with hook documentation Cross-platform (Windows, macOS, Linux) --- hooks/README.md | 1 + hooks/hooks.json | 10 + scripts/hooks/pre-bash-commit-quality.js | 374 +++++++++++++++++++++++ 3 files changed, 385 insertions(+) create mode 100644 scripts/hooks/pre-bash-commit-quality.js diff --git a/hooks/README.md b/hooks/README.md index e3d50e51..0355b4d7 100644 --- a/hooks/README.md +++ b/hooks/README.md @@ -23,6 +23,7 @@ User request → Claude picks a tool → PreToolUse hook runs → Tool executes | **Dev server blocker** | `Bash` | Blocks `npm run dev` etc. outside tmux — ensures log access | 2 (blocks) | | **Tmux reminder** | `Bash` | Suggests tmux for long-running commands (npm test, cargo build, docker) | 0 (warns) | | **Git push reminder** | `Bash` | Reminds to review changes before `git push` | 0 (warns) | +| **Pre-commit quality check** | `Bash` | Runs quality checks before `git commit`: lints staged files, validates commit message format, detects console.log/debugger/secrets | 2 (blocks critical) / 0 (warns) | | **Doc file warning** | `Write` | Warns about non-standard `.md`/`.txt` files (allows README, CLAUDE, CONTRIBUTING, CHANGELOG, LICENSE, SKILL, docs/, skills/); cross-platform path handling | 0 (warns) | | **Strategic compact** | `Edit\|Write` | Suggests manual `/compact` at logical intervals (every ~50 tool calls) | 0 (warns) | | **InsAIts security monitor (opt-in)** | `Bash\|Write\|Edit\|MultiEdit` | Optional security scan for high-signal tool inputs. Disabled unless `ECC_ENABLE_INSAITS=1`. Blocks on critical findings, warns on non-critical, and writes audit log to `.insaits_audit_session.jsonl`. Requires `pip install insa-its`. [Details](../scripts/hooks/insaits-security-monitor.py) | 2 (blocks critical) / 0 (warns) | diff --git a/hooks/hooks.json b/hooks/hooks.json index 8610b512..64abd73b 100644 --- a/hooks/hooks.json +++ b/hooks/hooks.json @@ -42,6 +42,16 @@ ], "description": "Reminder before git push to review changes" }, + { + "matcher": "Bash", + "hooks": [ + { + "type": "command", + "command": "node \"${CLAUDE_PLUGIN_ROOT}/scripts/hooks/run-with-flags.js\" \"pre:bash:commit-quality\" \"scripts/hooks/pre-bash-commit-quality.js\" \"strict\"" + } + ], + "description": "Pre-commit quality check: lint staged files, validate commit message format, detect console.log/debugger/secrets before committing" + }, { "matcher": "Write", "hooks": [ diff --git a/scripts/hooks/pre-bash-commit-quality.js b/scripts/hooks/pre-bash-commit-quality.js new file mode 100644 index 00000000..4d48e510 --- /dev/null +++ b/scripts/hooks/pre-bash-commit-quality.js @@ -0,0 +1,374 @@ +#!/usr/bin/env node +/** + * PreToolUse Hook: Pre-commit Quality Check + * + * Runs quality checks before git commit commands: + * - Detects staged files + * - Runs linter on staged files (if available) + * - Checks for common issues (console.log, TODO, etc.) + * - Validates commit message format (if provided) + * + * Cross-platform (Windows, macOS, Linux) + * + * Exit codes: + * 0 - Success (allow commit) + * 2 - Block commit (quality issues found) + */ + +const { execSync, spawnSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); + +const MAX_STDIN = 1024 * 1024; // 1MB limit + +/** + * Detect staged files for commit + * @returns {string[]} Array of staged file paths + */ +function getStagedFiles() { + try { + const output = execSync('git diff --cached --name-only --diff-filter=ACMR', { + encoding: 'utf8', + stdio: ['pipe', 'pipe', 'pipe'] + }); + return output.trim().split('\n').filter(f => f.length > 0); + } catch { + return []; + } +} + +/** + * Check if a file should be quality-checked + * @param {string} filePath + * @returns {boolean} + */ +function shouldCheckFile(filePath) { + const checkableExtensions = ['.js', '.jsx', '.ts', '.tsx', '.py', '.go', '.rs']; + return checkableExtensions.some(ext => filePath.endsWith(ext)); +} + +/** + * Find issues in file content + * @param {string} filePath + * @returns {object[]} Array of issues found + */ +function findFileIssues(filePath) { + const issues = []; + + try { + const content = fs.readFileSync(filePath, 'utf8'); + const lines = content.split('\n'); + + lines.forEach((line, index) => { + const lineNum = index + 1; + + // Check for console.log + if (line.includes('console.log') && !line.trim().startsWith('//') && !line.trim().startsWith('*')) { + issues.push({ + type: 'console.log', + message: `console.log found at line ${lineNum}`, + line: lineNum, + severity: 'warning' + }); + } + + // Check for debugger statements + if (/\bdebugger\b/.test(line) && !line.trim().startsWith('//')) { + issues.push({ + type: 'debugger', + message: `debugger statement at line ${lineNum}`, + line: lineNum, + severity: 'error' + }); + } + + // Check for TODO/FIXME without issue reference + const todoMatch = line.match(/\/\/\s*(TODO|FIXME):?\s*(.+)/); + if (todoMatch && !todoMatch[2].match(/#\d+|issue/i)) { + issues.push({ + type: 'todo', + message: `TODO/FIXME without issue reference at line ${lineNum}: "${todoMatch[2].trim()}"`, + line: lineNum, + severity: 'info' + }); + } + + // Check for hardcoded secrets (basic patterns) + const secretPatterns = [ + { pattern: /sk-[a-zA-Z0-9]{20,}/, name: 'OpenAI API key' }, + { pattern: /ghp_[a-zA-Z0-9]{36}/, name: 'GitHub PAT' }, + { pattern: /AKIA[A-Z0-9]{16}/, name: 'AWS Access Key' }, + { pattern: /api[_-]?key\s*[=:]\s*['"][^'"]+['"]/i, name: 'API key' } + ]; + + for (const { pattern, name } of secretPatterns) { + if (pattern.test(line)) { + issues.push({ + type: 'secret', + message: `Potential ${name} exposed at line ${lineNum}`, + line: lineNum, + severity: 'error' + }); + } + } + }); + } catch { + // File not readable, skip + } + + return issues; +} + +/** + * Validate commit message format + * @param {string} command + * @returns {object|null} Validation result or null if no message to validate + */ +function validateCommitMessage(command) { + // Extract commit message from command + const messageMatch = command.match(/(?:-m|--message)[=\s]+["']?([^"']+)["']?/); + if (!messageMatch) return null; + + const message = messageMatch[1]; + const issues = []; + + // Check conventional commit format + const conventionalCommit = /^(feat|fix|docs|style|refactor|test|chore|build|ci|perf|revert)(\(.+\))?:\s*.+/; + if (!conventionalCommit.test(message)) { + issues.push({ + type: 'format', + message: 'Commit message does not follow conventional commit format', + suggestion: 'Use format: type(scope): description (e.g., "feat(auth): add login flow")' + }); + } + + // Check message length + if (message.length > 72) { + issues.push({ + type: 'length', + message: `Commit message too long (${message.length} chars, max 72)`, + suggestion: 'Keep the first line under 72 characters' + }); + } + + // Check for lowercase first letter (conventional) + if (message.charAt(0) === message.charAt(0).toUpperCase() && conventionalCommit.test(message)) { + const afterColon = message.split(':')[1]; + if (afterColon && afterColon.trim().charAt(0) === afterColon.trim().charAt(0).toUpperCase()) { + issues.push({ + type: 'capitalization', + message: 'Subject should start with lowercase after type', + suggestion: 'Use lowercase for the first letter of the subject' + }); + } + } + + // Check for trailing period + if (message.endsWith('.')) { + issues.push({ + type: 'punctuation', + message: 'Commit message should not end with a period', + suggestion: 'Remove the trailing period' + }); + } + + return { message, issues }; +} + +/** + * Run linter on staged files + * @param {string[]} files + * @returns {object} Lint results + */ +function runLinter(files) { + const jsFiles = files.filter(f => /\.(js|jsx|ts|tsx)$/.test(f)); + const pyFiles = files.filter(f => f.endsWith('.py')); + const goFiles = files.filter(f => f.endsWith('.go')); + + const results = { + eslint: null, + pylint: null, + golint: null + }; + + // Run ESLint if available + if (jsFiles.length > 0) { + try { + const eslintPath = path.join(process.cwd(), 'node_modules', '.bin', 'eslint'); + if (fs.existsSync(eslintPath)) { + const result = spawnSync(eslintPath, ['--format', 'compact', ...jsFiles], { + encoding: 'utf8', + stdio: ['pipe', 'pipe', 'pipe'], + timeout: 30000 + }); + results.eslint = { + success: result.status === 0, + output: result.stdout || result.stderr + }; + } + } catch { + // ESLint not available + } + } + + // Run Pylint if available + if (pyFiles.length > 0) { + try { + const result = spawnSync('pylint', ['--output-format=text', ...pyFiles], { + encoding: 'utf8', + stdio: ['pipe', 'pipe', 'pipe'], + timeout: 30000 + }); + results.pylint = { + success: result.status === 0, + output: result.stdout || result.stderr + }; + } catch { + // Pylint not available + } + } + + // Run golint if available + if (goFiles.length > 0) { + try { + const result = spawnSync('golint', goFiles, { + encoding: 'utf8', + stdio: ['pipe', 'pipe', 'pipe'], + timeout: 30000 + }); + results.golint = { + success: !result.stdout || result.stdout.trim() === '', + output: result.stdout + }; + } catch { + // golint not available + } + } + + return results; +} + +/** + * Core logic — exported for direct invocation + * @param {string} rawInput - Raw JSON string from stdin + * @returns {string} The original input (pass-through) + */ +function run(rawInput) { + try { + const input = JSON.parse(rawInput); + const command = input.tool_input?.command || ''; + + // Only run for git commit commands + if (!command.includes('git commit')) { + return rawInput; + } + + // Check if this is an amend (skip checks for amends to avoid blocking) + if (command.includes('--amend')) { + return rawInput; + } + + const issues = []; + const warnings = []; + + // Get staged files + const stagedFiles = getStagedFiles(); + + if (stagedFiles.length === 0) { + console.error('[Hook] No staged files found. Use "git add" to stage files first.'); + return rawInput; + } + + console.error(`[Hook] Checking ${stagedFiles.length} staged file(s)...`); + + // Check each staged file + const filesToCheck = stagedFiles.filter(shouldCheckFile); + let totalIssues = 0; + let errorCount = 0; + + for (const file of filesToCheck) { + const fileIssues = findFileIssues(file); + if (fileIssues.length > 0) { + console.error(`\n📁 ${file}`); + for (const issue of fileIssues) { + const icon = issue.severity === 'error' ? '❌' : issue.severity === 'warning' ? '⚠️' : 'ℹ️'; + console.error(` ${icon} Line ${issue.line}: ${issue.message}`); + totalIssues++; + if (issue.severity === 'error') errorCount++; + } + } + } + + // Validate commit message if provided + const messageValidation = validateCommitMessage(command); + if (messageValidation && messageValidation.issues.length > 0) { + console.error('\n📝 Commit Message Issues:'); + for (const issue of messageValidation.issues) { + console.error(` ⚠️ ${issue.message}`); + if (issue.suggestion) { + console.error(` 💡 ${issue.suggestion}`); + } + } + } + + // Run linter + const lintResults = runLinter(filesToCheck); + + if (lintResults.eslint && !lintResults.eslint.success) { + console.error('\n🔍 ESLint Issues:'); + console.error(lintResults.eslint.output); + } + + if (lintResults.pylint && !lintResults.pylint.success) { + console.error('\n🔍 Pylint Issues:'); + console.error(lintResults.pylint.output); + } + + if (lintResults.golint && !lintResults.golint.success) { + console.error('\n🔍 golint Issues:'); + console.error(lintResults.golint.output); + } + + // Summary + if (totalIssues > 0) { + console.error(`\n📊 Summary: ${totalIssues} issue(s) found (${errorCount} error(s), ${totalIssues - errorCount} warning(s))`); + + if (errorCount > 0) { + console.error('\n[Hook] ❌ Commit blocked due to critical issues. Fix them before committing.'); + process.exit(2); + } else { + console.error('\n[Hook] ⚠️ Warnings found. Consider fixing them, but commit is allowed.'); + console.error('[Hook] To bypass these checks, use: git commit --no-verify'); + } + } else { + console.error('\n[Hook] ✅ All checks passed!'); + } + + } catch (error) { + console.error(`[Hook] Error: ${error.message}`); + // Non-blocking on error + } + + return rawInput; +} + +// ── stdin entry point ──────────────────────────────────────────── +if (require.main === module) { + let data = ''; + process.stdin.setEncoding('utf8'); + + process.stdin.on('data', chunk => { + if (data.length < MAX_STDIN) { + const remaining = MAX_STDIN - data.length; + data += chunk.substring(0, remaining); + } + }); + + process.stdin.on('end', () => { + data = run(data); + process.stdout.write(data); + process.exit(0); + }); +} + +module.exports = { run }; \ No newline at end of file From 81acf0c9287478d70d7f2225939f21bd703bacb9 Mon Sep 17 00:00:00 2001 From: Affaan Mustafa Date: Sun, 29 Mar 2026 00:07:18 -0400 Subject: [PATCH 4/4] fix(hooks): make pre-commit quality checks enforce staged state --- agents/performance-optimizer.md | 17 +-- hooks/README.md | 2 +- scripts/hooks/pre-bash-commit-quality.js | 129 ++++++++++++-------- skills/git-workflow/SKILL.md | 2 +- tests/hooks/pre-bash-commit-quality.test.js | 81 ++++++++++++ 5 files changed, 172 insertions(+), 59 deletions(-) create mode 100644 tests/hooks/pre-bash-commit-quality.test.js diff --git a/agents/performance-optimizer.md b/agents/performance-optimizer.md index 663a1891..914c69a9 100644 --- a/agents/performance-optimizer.md +++ b/agents/performance-optimizer.md @@ -95,7 +95,7 @@ for (const post of allPosts) { // GOOD: Stable callback with useCallback -const handleButtonClick = useCallback(() => handleClick(id), [id]); +const handleButtonClick = useCallback(() => handleClick(id), [handleClick, id]); // BAD: Object creation in render @@ -110,7 +110,7 @@ const sortedItems = items.sort((a, b) => a.name.localeCompare(b.name)); // GOOD: Memoize expensive computations const sortedItems = useMemo( - () => items.sort((a, b) => a.name.localeCompare(b.name)), + () => [...items].sort((a, b) => a.name.localeCompare(b.name)), [items] ); @@ -297,10 +297,11 @@ useEffect(() => { }, [largeData]); useEffect(() => { - eventEmitter.on('update', () => { + const handleUpdate = () => { console.log(largeDataRef.current); - }); - return () => eventEmitter.off('update'); + }; + eventEmitter.on('update', handleUpdate); + return () => eventEmitter.off('update', handleUpdate); }, []); ``` @@ -364,7 +365,7 @@ getTTFB(console.log); // Time to First Byte ## Performance Report Template -```markdown +````markdown # Performance Audit Report ## Executive Summary @@ -413,7 +414,7 @@ const fastCode = ...; - Bundle size reduction: XX KB (XX%) - LCP improvement: XXms - Time to Interactive improvement: XXms -``` +```` ## When to Run @@ -442,4 +443,4 @@ const fastCode = ...; --- -**Remember**: Performance is a feature. Users notice speed. Every 100ms of improvement matters. Optimize for the 90th percentile, not the average. \ No newline at end of file +**Remember**: Performance is a feature. Users notice speed. Every 100ms of improvement matters. Optimize for the 90th percentile, not the average. diff --git a/hooks/README.md b/hooks/README.md index 0355b4d7..27fae0a5 100644 --- a/hooks/README.md +++ b/hooks/README.md @@ -23,7 +23,7 @@ User request → Claude picks a tool → PreToolUse hook runs → Tool executes | **Dev server blocker** | `Bash` | Blocks `npm run dev` etc. outside tmux — ensures log access | 2 (blocks) | | **Tmux reminder** | `Bash` | Suggests tmux for long-running commands (npm test, cargo build, docker) | 0 (warns) | | **Git push reminder** | `Bash` | Reminds to review changes before `git push` | 0 (warns) | -| **Pre-commit quality check** | `Bash` | Runs quality checks before `git commit`: lints staged files, validates commit message format, detects console.log/debugger/secrets | 2 (blocks critical) / 0 (warns) | +| **Pre-commit quality check** | `Bash` | Runs quality checks before `git commit`: lints staged files, validates commit message format when provided via `-m/--message`, detects console.log/debugger/secrets | 2 (blocks critical) / 0 (warns) | | **Doc file warning** | `Write` | Warns about non-standard `.md`/`.txt` files (allows README, CLAUDE, CONTRIBUTING, CHANGELOG, LICENSE, SKILL, docs/, skills/); cross-platform path handling | 0 (warns) | | **Strategic compact** | `Edit\|Write` | Suggests manual `/compact` at logical intervals (every ~50 tool calls) | 0 (warns) | | **InsAIts security monitor (opt-in)** | `Bash\|Write\|Edit\|MultiEdit` | Optional security scan for high-signal tool inputs. Disabled unless `ECC_ENABLE_INSAITS=1`. Blocks on critical findings, warns on non-critical, and writes audit log to `.insaits_audit_session.jsonl`. Requires `pip install insa-its`. [Details](../scripts/hooks/insaits-security-monitor.py) | 2 (blocks critical) / 0 (warns) | diff --git a/scripts/hooks/pre-bash-commit-quality.js b/scripts/hooks/pre-bash-commit-quality.js index 4d48e510..10e2d589 100644 --- a/scripts/hooks/pre-bash-commit-quality.js +++ b/scripts/hooks/pre-bash-commit-quality.js @@ -15,7 +15,7 @@ * 2 - Block commit (quality issues found) */ -const { execSync, spawnSync } = require('child_process'); +const { spawnSync } = require('child_process'); const path = require('path'); const fs = require('fs'); @@ -26,15 +26,25 @@ const MAX_STDIN = 1024 * 1024; // 1MB limit * @returns {string[]} Array of staged file paths */ function getStagedFiles() { - try { - const output = execSync('git diff --cached --name-only --diff-filter=ACMR', { - encoding: 'utf8', - stdio: ['pipe', 'pipe', 'pipe'] - }); - return output.trim().split('\n').filter(f => f.length > 0); - } catch { + const result = spawnSync('git', ['diff', '--cached', '--name-only', '--diff-filter=ACMR'], { + encoding: 'utf8', + stdio: ['pipe', 'pipe', 'pipe'] + }); + if (result.status !== 0) { return []; } + return result.stdout.trim().split('\n').filter(f => f.length > 0); +} + +function getStagedFileContent(filePath) { + const result = spawnSync('git', ['show', `:${filePath}`], { + encoding: 'utf8', + stdio: ['pipe', 'pipe', 'pipe'] + }); + if (result.status !== 0) { + return null; + } + return result.stdout; } /** @@ -56,7 +66,10 @@ function findFileIssues(filePath) { const issues = []; try { - const content = fs.readFileSync(filePath, 'utf8'); + const content = getStagedFileContent(filePath); + if (content == null) { + return issues; + } const lines = content.split('\n'); lines.forEach((line, index) => { @@ -152,9 +165,9 @@ function validateCommitMessage(command) { } // Check for lowercase first letter (conventional) - if (message.charAt(0) === message.charAt(0).toUpperCase() && conventionalCommit.test(message)) { + if (conventionalCommit.test(message)) { const afterColon = message.split(':')[1]; - if (afterColon && afterColon.trim().charAt(0) === afterColon.trim().charAt(0).toUpperCase()) { + if (afterColon && /^[A-Z]/.test(afterColon.trim())) { issues.push({ type: 'capitalization', message: 'Subject should start with lowercase after type', @@ -193,21 +206,18 @@ function runLinter(files) { // Run ESLint if available if (jsFiles.length > 0) { - try { - const eslintPath = path.join(process.cwd(), 'node_modules', '.bin', 'eslint'); - if (fs.existsSync(eslintPath)) { - const result = spawnSync(eslintPath, ['--format', 'compact', ...jsFiles], { - encoding: 'utf8', - stdio: ['pipe', 'pipe', 'pipe'], - timeout: 30000 - }); - results.eslint = { - success: result.status === 0, - output: result.stdout || result.stderr - }; - } - } catch { - // ESLint not available + const eslintBin = process.platform === 'win32' ? 'eslint.cmd' : 'eslint'; + const eslintPath = path.join(process.cwd(), 'node_modules', '.bin', eslintBin); + if (fs.existsSync(eslintPath)) { + const result = spawnSync(eslintPath, ['--format', 'compact', ...jsFiles], { + encoding: 'utf8', + stdio: ['pipe', 'pipe', 'pipe'], + timeout: 30000 + }); + results.eslint = { + success: result.status === 0, + output: result.stdout || result.stderr + }; } } @@ -219,10 +229,14 @@ function runLinter(files) { stdio: ['pipe', 'pipe', 'pipe'], timeout: 30000 }); - results.pylint = { - success: result.status === 0, - output: result.stdout || result.stderr - }; + if (result.error && result.error.code === 'ENOENT') { + results.pylint = null; + } else { + results.pylint = { + success: result.status === 0, + output: result.stdout || result.stderr + }; + } } catch { // Pylint not available } @@ -236,10 +250,14 @@ function runLinter(files) { stdio: ['pipe', 'pipe', 'pipe'], timeout: 30000 }); - results.golint = { - success: !result.stdout || result.stdout.trim() === '', - output: result.stdout - }; + if (result.error && result.error.code === 'ENOENT') { + results.golint = null; + } else { + results.golint = { + success: !result.stdout || result.stdout.trim() === '', + output: result.stdout + }; + } } catch { // golint not available } @@ -251,32 +269,29 @@ function runLinter(files) { /** * Core logic — exported for direct invocation * @param {string} rawInput - Raw JSON string from stdin - * @returns {string} The original input (pass-through) + * @returns {{output:string, exitCode:number}} Pass-through output and exit code */ -function run(rawInput) { +function evaluate(rawInput) { try { const input = JSON.parse(rawInput); const command = input.tool_input?.command || ''; // Only run for git commit commands if (!command.includes('git commit')) { - return rawInput; + return { output: rawInput, exitCode: 0 }; } // Check if this is an amend (skip checks for amends to avoid blocking) if (command.includes('--amend')) { - return rawInput; + return { output: rawInput, exitCode: 0 }; } - const issues = []; - const warnings = []; - // Get staged files const stagedFiles = getStagedFiles(); if (stagedFiles.length === 0) { console.error('[Hook] No staged files found. Use "git add" to stage files first.'); - return rawInput; + return { output: rawInput, exitCode: 0 }; } console.error(`[Hook] Checking ${stagedFiles.length} staged file(s)...`); @@ -285,6 +300,8 @@ function run(rawInput) { const filesToCheck = stagedFiles.filter(shouldCheckFile); let totalIssues = 0; let errorCount = 0; + let warningCount = 0; + let infoCount = 0; for (const file of filesToCheck) { const fileIssues = findFileIssues(file); @@ -295,6 +312,8 @@ function run(rawInput) { console.error(` ${icon} Line ${issue.line}: ${issue.message}`); totalIssues++; if (issue.severity === 'error') errorCount++; + if (issue.severity === 'warning') warningCount++; + if (issue.severity === 'info') infoCount++; } } } @@ -308,6 +327,8 @@ function run(rawInput) { if (issue.suggestion) { console.error(` 💡 ${issue.suggestion}`); } + totalIssues++; + warningCount++; } } @@ -317,25 +338,31 @@ function run(rawInput) { if (lintResults.eslint && !lintResults.eslint.success) { console.error('\n🔍 ESLint Issues:'); console.error(lintResults.eslint.output); + totalIssues++; + errorCount++; } if (lintResults.pylint && !lintResults.pylint.success) { console.error('\n🔍 Pylint Issues:'); console.error(lintResults.pylint.output); + totalIssues++; + errorCount++; } if (lintResults.golint && !lintResults.golint.success) { console.error('\n🔍 golint Issues:'); console.error(lintResults.golint.output); + totalIssues++; + errorCount++; } // Summary if (totalIssues > 0) { - console.error(`\n📊 Summary: ${totalIssues} issue(s) found (${errorCount} error(s), ${totalIssues - errorCount} warning(s))`); + console.error(`\n📊 Summary: ${totalIssues} issue(s) found (${errorCount} error(s), ${warningCount} warning(s), ${infoCount} info)`); if (errorCount > 0) { console.error('\n[Hook] ❌ Commit blocked due to critical issues. Fix them before committing.'); - process.exit(2); + return { output: rawInput, exitCode: 2 }; } else { console.error('\n[Hook] ⚠️ Warnings found. Consider fixing them, but commit is allowed.'); console.error('[Hook] To bypass these checks, use: git commit --no-verify'); @@ -349,7 +376,11 @@ function run(rawInput) { // Non-blocking on error } - return rawInput; + return { output: rawInput, exitCode: 0 }; +} + +function run(rawInput) { + return evaluate(rawInput).output; } // ── stdin entry point ──────────────────────────────────────────── @@ -365,10 +396,10 @@ if (require.main === module) { }); process.stdin.on('end', () => { - data = run(data); - process.stdout.write(data); - process.exit(0); + const result = evaluate(data); + process.stdout.write(result.output); + process.exit(result.exitCode); }); } -module.exports = { run }; \ No newline at end of file +module.exports = { run, evaluate }; diff --git a/skills/git-workflow/SKILL.md b/skills/git-workflow/SKILL.md index d57f51d3..8d1e2523 100644 --- a/skills/git-workflow/SKILL.md +++ b/skills/git-workflow/SKILL.md @@ -713,4 +713,4 @@ git add node_modules/ | Pull | `git pull origin branch-name` | | Stash | `git stash push -m "message"` | | Undo last commit | `git reset --soft HEAD~1` | -| Revert commit | `git revert HEAD` | \ No newline at end of file +| Revert commit | `git revert HEAD` | diff --git a/tests/hooks/pre-bash-commit-quality.test.js b/tests/hooks/pre-bash-commit-quality.test.js new file mode 100644 index 00000000..478d34d2 --- /dev/null +++ b/tests/hooks/pre-bash-commit-quality.test.js @@ -0,0 +1,81 @@ +/** + * Tests for scripts/hooks/pre-bash-commit-quality.js + * + * Run with: node tests/hooks/pre-bash-commit-quality.test.js + */ + +const assert = require('assert'); +const fs = require('fs'); +const os = require('os'); +const path = require('path'); +const { spawnSync } = require('child_process'); + +const hook = require('../../scripts/hooks/pre-bash-commit-quality'); + +function test(name, fn) { + try { + fn(); + console.log(` ✓ ${name}`); + return true; + } catch (err) { + console.log(` ✗ ${name}`); + console.log(` Error: ${err.message}`); + return false; + } +} + +function inTempRepo(fn) { + const prevCwd = process.cwd(); + const repoDir = fs.mkdtempSync(path.join(os.tmpdir(), 'pre-bash-commit-quality-')); + + try { + spawnSync('git', ['init'], { cwd: repoDir, stdio: 'pipe', encoding: 'utf8' }); + spawnSync('git', ['config', 'user.name', 'ECC Test'], { cwd: repoDir, stdio: 'pipe', encoding: 'utf8' }); + spawnSync('git', ['config', 'user.email', 'ecc@example.com'], { cwd: repoDir, stdio: 'pipe', encoding: 'utf8' }); + process.chdir(repoDir); + return fn(repoDir); + } finally { + process.chdir(prevCwd); + fs.rmSync(repoDir, { recursive: true, force: true }); + } +} + +let passed = 0; +let failed = 0; + +console.log('\nPre-Bash Commit Quality Hook Tests'); +console.log('==================================\n'); + +if (test('evaluate blocks commits when staged snapshot contains debugger', () => { + inTempRepo(repoDir => { + const filePath = path.join(repoDir, 'index.js'); + fs.writeFileSync(filePath, 'function main() {\n debugger;\n}\n', 'utf8'); + spawnSync('git', ['add', 'index.js'], { cwd: repoDir, stdio: 'pipe', encoding: 'utf8' }); + + const input = JSON.stringify({ tool_input: { command: 'git commit -m "fix: test debugger hook"' } }); + const result = hook.evaluate(input); + + assert.strictEqual(result.output, input, 'should preserve stdin payload'); + assert.strictEqual(result.exitCode, 2, 'should block commit when staged snapshot has debugger'); + }); +})) passed++; else failed++; + +if (test('evaluate inspects staged snapshot instead of newer working tree content', () => { + inTempRepo(repoDir => { + const filePath = path.join(repoDir, 'index.js'); + fs.writeFileSync(filePath, 'function main() {\n return 1;\n}\n', 'utf8'); + spawnSync('git', ['add', 'index.js'], { cwd: repoDir, stdio: 'pipe', encoding: 'utf8' }); + + // Working tree diverges after staging; hook should still inspect staged content. + fs.writeFileSync(filePath, 'function main() {\n debugger;\n return 1;\n}\n', 'utf8'); + + const input = JSON.stringify({ tool_input: { command: 'git commit -m "fix: staged snapshot only"' } }); + const result = hook.evaluate(input); + + assert.strictEqual(result.output, input, 'should preserve stdin payload'); + assert.strictEqual(result.exitCode, 0, 'should ignore unstaged debugger in working tree'); + }); +})) passed++; else failed++; + +console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`); +process.exit(failed > 0 ? 1 : 0);