From 369a7d9a1af6f6e6cad6128f33e177df34f6f99a Mon Sep 17 00:00:00 2001 From: SmartFlow Developer Date: Wed, 7 Jan 2026 23:40:34 +0100 Subject: [PATCH] feat(dev-tools): add comprehensive developer experience tooling This contribution adds several developer experience improvements to help contributors be more productive and maintain code quality: ## Pre-commit Hook Script (ops/scripts/pre-commit.sh) - Automated code quality checks before commits - Go formatting (gofmt) and imports organization (goimports) - Go vet static analysis for common mistakes - Debug statement detection (prevents accidental commits) - TODO format validation (matches existing todo-checker patterns) - TypeScript type checking for packages - Solidity formatting via forge fmt - Install mode (--install) to set up as git hook - Quick mode (--quick) for fast checks without tests ## Dev Environment Check Script (ops/scripts/check-dev-environment.sh) - Validates all required development tools are installed - Checks tool versions match project requirements - Core tools: git, go, node, pnpm, make, jq - Blockchain tools: foundry (forge/cast/anvil) with version pinning - Optional tools: docker, direnv, golangci-lint, slither - Environment validation: .nvmrc, go modules, npm deps, submodules - Disk space validation - JSON output mode (--json) for CI integration - Auto-fix mode (--fix) for common issues ## Performance Profiling Utility (op-service/perf) - Timer with configurable thresholds and structured logging - Aggregated metrics collection (count, min, max, avg, total) - Thread-safe concurrent access - Generic helper functions for measuring any function - Context-aware timing with cancellation support - Comprehensive test coverage with benchmarks ## Makefile Targets - make dev-check: Validate development environment - make dev-check-ci: JSON output for CI pipelines - make dev-check-fix: Auto-fix common issues - make install-hooks: Install pre-commit hook - make pre-commit: Run pre-commit checks manually - make pre-commit-quick: Fast checks only - make dev-setup: Full onboarding (check + install hooks) --- Makefile | 41 +++ op-service/perf/perf.go | 328 +++++++++++++++++++++ op-service/perf/perf_test.go | 292 +++++++++++++++++++ ops/scripts/check-dev-environment.sh | 417 +++++++++++++++++++++++++++ ops/scripts/pre-commit.sh | 336 +++++++++++++++++++++ 5 files changed, 1414 insertions(+) create mode 100644 op-service/perf/perf.go create mode 100644 op-service/perf/perf_test.go create mode 100755 ops/scripts/check-dev-environment.sh create mode 100755 ops/scripts/pre-commit.sh diff --git a/Makefile b/Makefile index 1f7f916674086..60b19025a7a73 100644 --- a/Makefile +++ b/Makefile @@ -242,3 +242,44 @@ install-geth: go install -v github.com/ethereum/go-ethereum/cmd/geth@$(shell jq -r .geth < versions.json); \ echo "Installed geth!"; true) .PHONY: install-geth + +# Development Environment Tools +# ============================= + +# Check development environment for required tools and configurations +dev-check: + @echo "Checking development environment..." + @./ops/scripts/check-dev-environment.sh +.PHONY: dev-check + +# Check development environment and output JSON for CI +dev-check-ci: + @./ops/scripts/check-dev-environment.sh --json +.PHONY: dev-check-ci + +# Check development environment and attempt to fix issues +dev-check-fix: + @./ops/scripts/check-dev-environment.sh --fix +.PHONY: dev-check-fix + +# Install pre-commit hook +install-hooks: + @./ops/scripts/pre-commit.sh --install +.PHONY: install-hooks + +# Run pre-commit checks manually +pre-commit: + @./ops/scripts/pre-commit.sh +.PHONY: pre-commit + +# Run quick pre-commit checks (no tests) +pre-commit-quick: + @./ops/scripts/pre-commit.sh --quick +.PHONY: pre-commit-quick + +# Developer onboarding: check environment and install hooks +dev-setup: dev-check install-hooks + @echo "" + @echo "Development environment is ready!" + @echo "Pre-commit hooks have been installed." +.PHONY: dev-setup diff --git a/op-service/perf/perf.go b/op-service/perf/perf.go new file mode 100644 index 0000000000000..64ba819912eea --- /dev/null +++ b/op-service/perf/perf.go @@ -0,0 +1,328 @@ +// Package perf provides utilities for measuring and reporting execution time +// of operations with configurable thresholds and structured logging. +// +// This package is designed for development and debugging purposes, helping +// developers identify performance bottlenecks and track execution times +// across the codebase. +// +// Example usage: +// +// timer := perf.NewTimer("database_query") +// defer timer.Done() +// // ... perform operation +// +// Or with thresholds: +// +// timer := perf.NewTimerWithThreshold("rpc_call", 100*time.Millisecond) +// defer timer.Done() // Will log warning if exceeds 100ms +package perf + +import ( + "context" + "fmt" + "sync" + "sync/atomic" + "time" + + "github.com/ethereum/go-ethereum/log" +) + +// Timer measures the execution time of an operation +type Timer struct { + name string + start time.Time + threshold time.Duration + logger log.Logger + tags map[string]string + stopped atomic.Bool +} + +// TimerOption configures a Timer +type TimerOption func(*Timer) + +// WithThreshold sets a warning threshold for the timer. +// If the operation takes longer than the threshold, a warning is logged. +func WithThreshold(d time.Duration) TimerOption { + return func(t *Timer) { + t.threshold = d + } +} + +// WithLogger sets a custom logger for the timer +func WithLogger(l log.Logger) TimerOption { + return func(t *Timer) { + t.logger = l + } +} + +// WithTag adds a tag to the timer for additional context in logs +func WithTag(key, value string) TimerOption { + return func(t *Timer) { + if t.tags == nil { + t.tags = make(map[string]string) + } + t.tags[key] = value + } +} + +// NewTimer creates a new timer that starts immediately +func NewTimer(name string, opts ...TimerOption) *Timer { + t := &Timer{ + name: name, + start: time.Now(), + logger: log.Root(), + } + for _, opt := range opts { + opt(t) + } + return t +} + +// NewTimerWithThreshold creates a new timer with a warning threshold +func NewTimerWithThreshold(name string, threshold time.Duration) *Timer { + return NewTimer(name, WithThreshold(threshold)) +} + +// Elapsed returns the duration since the timer started +func (t *Timer) Elapsed() time.Duration { + return time.Since(t.start) +} + +// Done stops the timer and logs the elapsed time. +// It is safe to call Done multiple times; only the first call has effect. +func (t *Timer) Done() time.Duration { + if t.stopped.Swap(true) { + return 0 // Already stopped + } + + elapsed := t.Elapsed() + + // Build log context + ctx := []interface{}{ + "operation", t.name, + "duration_ms", elapsed.Milliseconds(), + "duration", elapsed.String(), + } + + // Add tags to context + for k, v := range t.tags { + ctx = append(ctx, k, v) + } + + // Log based on threshold + if t.threshold > 0 && elapsed > t.threshold { + t.logger.Warn("Operation exceeded threshold", + append(ctx, "threshold_ms", t.threshold.Milliseconds())...) + } else { + t.logger.Debug("Operation completed", ctx...) + } + + return elapsed +} + +// DoneWithResult stops the timer and returns both duration and any provided result +func (t *Timer) DoneWithResult(result interface{}) (time.Duration, interface{}) { + return t.Done(), result +} + +// Metrics provides aggregated timing statistics for operations +type Metrics struct { + mu sync.RWMutex + entries map[string]*MetricEntry + logger log.Logger +} + +// MetricEntry holds aggregated statistics for a single operation type +type MetricEntry struct { + Name string + Count int64 + TotalTime time.Duration + MinTime time.Duration + MaxTime time.Duration + LastTime time.Duration + Threshold time.Duration + Violations int64 // Number of times threshold was exceeded +} + +// NewMetrics creates a new metrics collector +func NewMetrics() *Metrics { + return &Metrics{ + entries: make(map[string]*MetricEntry), + logger: log.Root(), + } +} + +// SetLogger sets the logger for the metrics collector +func (m *Metrics) SetLogger(l log.Logger) { + m.logger = l +} + +// Record adds a timing measurement for the given operation +func (m *Metrics) Record(name string, duration time.Duration) { + m.RecordWithThreshold(name, duration, 0) +} + +// RecordWithThreshold adds a timing measurement with threshold checking +func (m *Metrics) RecordWithThreshold(name string, duration time.Duration, threshold time.Duration) { + m.mu.Lock() + defer m.mu.Unlock() + + entry, exists := m.entries[name] + if !exists { + entry = &MetricEntry{ + Name: name, + MinTime: duration, + MaxTime: duration, + Threshold: threshold, + } + m.entries[name] = entry + } + + entry.Count++ + entry.TotalTime += duration + entry.LastTime = duration + + if duration < entry.MinTime { + entry.MinTime = duration + } + if duration > entry.MaxTime { + entry.MaxTime = duration + } + if threshold > 0 && duration > threshold { + entry.Violations++ + } +} + +// Get returns the metrics entry for the given operation name +func (m *Metrics) Get(name string) *MetricEntry { + m.mu.RLock() + defer m.mu.RUnlock() + return m.entries[name] +} + +// All returns all recorded metrics entries +func (m *Metrics) All() map[string]*MetricEntry { + m.mu.RLock() + defer m.mu.RUnlock() + + result := make(map[string]*MetricEntry, len(m.entries)) + for k, v := range m.entries { + // Return a copy to prevent races + entryCopy := *v + result[k] = &entryCopy + } + return result +} + +// Reset clears all recorded metrics +func (m *Metrics) Reset() { + m.mu.Lock() + defer m.mu.Unlock() + m.entries = make(map[string]*MetricEntry) +} + +// Summary returns a human-readable summary of all metrics +func (m *Metrics) Summary() string { + m.mu.RLock() + defer m.mu.RUnlock() + + if len(m.entries) == 0 { + return "No metrics recorded" + } + + var result string + result += fmt.Sprintf("Performance Metrics Summary (%d operations)\n", len(m.entries)) + result += "============================================\n" + + for name, entry := range m.entries { + avgTime := time.Duration(0) + if entry.Count > 0 { + avgTime = entry.TotalTime / time.Duration(entry.Count) + } + + result += fmt.Sprintf("\n%s:\n", name) + result += fmt.Sprintf(" Count: %d\n", entry.Count) + result += fmt.Sprintf(" Avg: %v\n", avgTime) + result += fmt.Sprintf(" Min: %v\n", entry.MinTime) + result += fmt.Sprintf(" Max: %v\n", entry.MaxTime) + result += fmt.Sprintf(" Total: %v\n", entry.TotalTime) + + if entry.Threshold > 0 { + result += fmt.Sprintf(" Threshold: %v (violated %d times)\n", + entry.Threshold, entry.Violations) + } + } + + return result +} + +// LogSummary logs the metrics summary +func (m *Metrics) LogSummary() { + for name, entry := range m.All() { + avgTime := time.Duration(0) + if entry.Count > 0 { + avgTime = entry.TotalTime / time.Duration(entry.Count) + } + + m.logger.Info("Performance metric", + "operation", name, + "count", entry.Count, + "avg_ms", avgTime.Milliseconds(), + "min_ms", entry.MinTime.Milliseconds(), + "max_ms", entry.MaxTime.Milliseconds(), + "total_ms", entry.TotalTime.Milliseconds(), + "violations", entry.Violations, + ) + } +} + +// Average returns the average duration for the given operation +func (e *MetricEntry) Average() time.Duration { + if e.Count == 0 { + return 0 + } + return e.TotalTime / time.Duration(e.Count) +} + +// Global metrics instance for convenience +var globalMetrics = NewMetrics() + +// GlobalMetrics returns the global metrics instance +func GlobalMetrics() *Metrics { + return globalMetrics +} + +// RecordGlobal records a timing to the global metrics instance +func RecordGlobal(name string, duration time.Duration) { + globalMetrics.Record(name, duration) +} + +// TrackContext creates a timer that can be used with context cancellation +func TrackContext(ctx context.Context, name string, opts ...TimerOption) (*Timer, context.CancelFunc) { + ctx, cancel := context.WithCancel(ctx) + timer := NewTimer(name, opts...) + + // Create a wrapper cancel that also stops the timer + wrappedCancel := func() { + timer.Done() + cancel() + } + + return timer, wrappedCancel +} + +// MeasureFunc wraps a function and returns its execution time +func MeasureFunc[T any](name string, fn func() T, opts ...TimerOption) (T, time.Duration) { + timer := NewTimer(name, opts...) + result := fn() + elapsed := timer.Done() + return result, elapsed +} + +// MeasureFuncErr wraps a function that returns an error and measures its execution time +func MeasureFuncErr[T any](name string, fn func() (T, error), opts ...TimerOption) (T, error, time.Duration) { + timer := NewTimer(name, opts...) + result, err := fn() + elapsed := timer.Done() + return result, err, elapsed +} diff --git a/op-service/perf/perf_test.go b/op-service/perf/perf_test.go new file mode 100644 index 0000000000000..2ec80a9f09b7a --- /dev/null +++ b/op-service/perf/perf_test.go @@ -0,0 +1,292 @@ +package perf + +import ( + "context" + "sync" + "testing" + "time" + + "github.com/ethereum/go-ethereum/log" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewTimer(t *testing.T) { + timer := NewTimer("test_operation") + require.NotNil(t, timer) + assert.Equal(t, "test_operation", timer.name) + assert.False(t, timer.stopped.Load()) +} + +func TestTimerElapsed(t *testing.T) { + timer := NewTimer("test_operation") + time.Sleep(10 * time.Millisecond) + elapsed := timer.Elapsed() + assert.GreaterOrEqual(t, elapsed, 10*time.Millisecond) +} + +func TestTimerDone(t *testing.T) { + timer := NewTimer("test_operation") + time.Sleep(5 * time.Millisecond) + elapsed := timer.Done() + + assert.GreaterOrEqual(t, elapsed, 5*time.Millisecond) + assert.True(t, timer.stopped.Load()) +} + +func TestTimerDoneIdempotent(t *testing.T) { + timer := NewTimer("test_operation") + first := timer.Done() + second := timer.Done() + third := timer.Done() + + assert.NotZero(t, first) + assert.Zero(t, second, "Second Done() should return zero") + assert.Zero(t, third, "Third Done() should return zero") +} + +func TestTimerWithThreshold(t *testing.T) { + threshold := 50 * time.Millisecond + timer := NewTimerWithThreshold("test_operation", threshold) + + assert.Equal(t, threshold, timer.threshold) +} + +func TestTimerWithOptions(t *testing.T) { + logger := log.New() + timer := NewTimer("test_operation", + WithThreshold(100*time.Millisecond), + WithLogger(logger), + WithTag("key1", "value1"), + WithTag("key2", "value2"), + ) + + assert.Equal(t, 100*time.Millisecond, timer.threshold) + assert.Equal(t, logger, timer.logger) + assert.Equal(t, "value1", timer.tags["key1"]) + assert.Equal(t, "value2", timer.tags["key2"]) +} + +func TestDoneWithResult(t *testing.T) { + timer := NewTimer("test_operation") + result := "test_result" + + duration, res := timer.DoneWithResult(result) + + assert.NotZero(t, duration) + assert.Equal(t, "test_result", res) +} + +func TestMetrics(t *testing.T) { + m := NewMetrics() + + // Record some timings + m.Record("operation_a", 10*time.Millisecond) + m.Record("operation_a", 20*time.Millisecond) + m.Record("operation_a", 30*time.Millisecond) + m.Record("operation_b", 5*time.Millisecond) + + // Check operation_a metrics + entryA := m.Get("operation_a") + require.NotNil(t, entryA) + assert.Equal(t, int64(3), entryA.Count) + assert.Equal(t, 60*time.Millisecond, entryA.TotalTime) + assert.Equal(t, 10*time.Millisecond, entryA.MinTime) + assert.Equal(t, 30*time.Millisecond, entryA.MaxTime) + assert.Equal(t, 30*time.Millisecond, entryA.LastTime) + assert.Equal(t, 20*time.Millisecond, entryA.Average()) + + // Check operation_b metrics + entryB := m.Get("operation_b") + require.NotNil(t, entryB) + assert.Equal(t, int64(1), entryB.Count) + + // Check non-existent operation + entryC := m.Get("operation_c") + assert.Nil(t, entryC) +} + +func TestMetricsWithThreshold(t *testing.T) { + m := NewMetrics() + + threshold := 15 * time.Millisecond + + // Record some timings with threshold + m.RecordWithThreshold("operation", 10*time.Millisecond, threshold) // Under + m.RecordWithThreshold("operation", 20*time.Millisecond, threshold) // Over + m.RecordWithThreshold("operation", 12*time.Millisecond, threshold) // Under + m.RecordWithThreshold("operation", 25*time.Millisecond, threshold) // Over + + entry := m.Get("operation") + require.NotNil(t, entry) + assert.Equal(t, int64(4), entry.Count) + assert.Equal(t, int64(2), entry.Violations) +} + +func TestMetricsAll(t *testing.T) { + m := NewMetrics() + + m.Record("op1", 10*time.Millisecond) + m.Record("op2", 20*time.Millisecond) + m.Record("op3", 30*time.Millisecond) + + all := m.All() + + assert.Len(t, all, 3) + assert.NotNil(t, all["op1"]) + assert.NotNil(t, all["op2"]) + assert.NotNil(t, all["op3"]) +} + +func TestMetricsReset(t *testing.T) { + m := NewMetrics() + + m.Record("operation", 10*time.Millisecond) + assert.NotNil(t, m.Get("operation")) + + m.Reset() + assert.Nil(t, m.Get("operation")) + assert.Len(t, m.All(), 0) +} + +func TestMetricsSummary(t *testing.T) { + m := NewMetrics() + + // Empty metrics + summary := m.Summary() + assert.Equal(t, "No metrics recorded", summary) + + // With some data + m.Record("test_op", 100*time.Millisecond) + summary = m.Summary() + assert.Contains(t, summary, "Performance Metrics Summary") + assert.Contains(t, summary, "test_op") + assert.Contains(t, summary, "Count:") +} + +func TestMetricsConcurrency(t *testing.T) { + m := NewMetrics() + var wg sync.WaitGroup + + // Concurrent writes + for i := 0; i < 100; i++ { + wg.Add(1) + go func(i int) { + defer wg.Done() + m.Record("operation", time.Duration(i)*time.Millisecond) + }(i) + } + + // Concurrent reads + for i := 0; i < 50; i++ { + wg.Add(1) + go func() { + defer wg.Done() + _ = m.Get("operation") + _ = m.All() + }() + } + + wg.Wait() + + entry := m.Get("operation") + require.NotNil(t, entry) + assert.Equal(t, int64(100), entry.Count) +} + +func TestMetricEntryAverage(t *testing.T) { + // Test zero count + entry := &MetricEntry{Count: 0, TotalTime: 0} + assert.Zero(t, entry.Average()) + + // Test with data + entry = &MetricEntry{Count: 4, TotalTime: 100 * time.Millisecond} + assert.Equal(t, 25*time.Millisecond, entry.Average()) +} + +func TestGlobalMetrics(t *testing.T) { + // Reset global metrics first + GlobalMetrics().Reset() + + RecordGlobal("global_op", 50*time.Millisecond) + RecordGlobal("global_op", 100*time.Millisecond) + + entry := GlobalMetrics().Get("global_op") + require.NotNil(t, entry) + assert.Equal(t, int64(2), entry.Count) + assert.Equal(t, 150*time.Millisecond, entry.TotalTime) +} + +func TestTrackContext(t *testing.T) { + ctx := context.Background() + timer, cancel := TrackContext(ctx, "context_op") + + require.NotNil(t, timer) + require.NotNil(t, cancel) + + // Simulate some work + time.Sleep(5 * time.Millisecond) + + // Cancel should stop the timer + cancel() + assert.True(t, timer.stopped.Load()) +} + +func TestMeasureFunc(t *testing.T) { + result, duration := MeasureFunc("test_func", func() string { + time.Sleep(5 * time.Millisecond) + return "result" + }) + + assert.Equal(t, "result", result) + assert.GreaterOrEqual(t, duration, 5*time.Millisecond) +} + +func TestMeasureFuncErr(t *testing.T) { + // Test successful function + result, err, duration := MeasureFuncErr("test_func", func() (int, error) { + time.Sleep(5 * time.Millisecond) + return 42, nil + }) + + assert.Equal(t, 42, result) + assert.NoError(t, err) + assert.GreaterOrEqual(t, duration, 5*time.Millisecond) + + // Test function with error + _, errResult, _ := MeasureFuncErr("test_func_err", func() (int, error) { + return 0, assert.AnError + }) + + assert.Error(t, errResult) +} + +// Benchmark tests +func BenchmarkTimerCreation(b *testing.B) { + for i := 0; i < b.N; i++ { + timer := NewTimer("benchmark_op") + timer.Done() + } +} + +func BenchmarkMetricsRecord(b *testing.B) { + m := NewMetrics() + duration := 10 * time.Millisecond + + b.ResetTimer() + for i := 0; i < b.N; i++ { + m.Record("benchmark_op", duration) + } +} + +func BenchmarkMetricsRecordConcurrent(b *testing.B) { + m := NewMetrics() + duration := 10 * time.Millisecond + + b.ResetTimer() + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + m.Record("benchmark_op", duration) + } + }) +} diff --git a/ops/scripts/check-dev-environment.sh b/ops/scripts/check-dev-environment.sh new file mode 100755 index 0000000000000..9f3cd668d1c4a --- /dev/null +++ b/ops/scripts/check-dev-environment.sh @@ -0,0 +1,417 @@ +#!/usr/bin/env bash +# Development Environment Health Check for the Optimism Monorepo +# +# This script verifies that all required development dependencies are properly +# installed and configured. Run this after cloning the repository or when +# experiencing build/test issues. +# +# Usage: +# ./ops/scripts/check-dev-environment.sh [--fix] [--verbose] +# +# Options: +# --fix Attempt to fix common issues automatically +# --verbose Show detailed version information +# --json Output results as JSON for CI integration + +set -uo pipefail + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +MAGENTA='\033[0;35m' +NC='\033[0m' # No Color +BOLD='\033[1m' + +# Configuration +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" +VERSIONS_FILE="$REPO_ROOT/versions.json" + +# Flags +FIX_MODE=false +VERBOSE=false +JSON_OUTPUT=false + +# Results tracking +declare -A RESULTS +WARNINGS=0 +ERRORS=0 + +# Parse arguments +for arg in "$@"; do + case $arg in + --fix) + FIX_MODE=true + shift + ;; + --verbose) + VERBOSE=true + shift + ;; + --json) + JSON_OUTPUT=true + shift + ;; + --help|-h) + echo "Usage: $0 [--fix] [--verbose] [--json]" + echo "" + echo "Options:" + echo " --fix Attempt to fix common issues automatically" + echo " --verbose Show detailed version information" + echo " --json Output results as JSON for CI integration" + exit 0 + ;; + esac +done + +# Utility functions +print_header() { + if [[ "$JSON_OUTPUT" == "false" ]]; then + echo "" + echo -e "${BLUE}${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + echo -e "${CYAN}${BOLD} $1${NC}" + echo -e "${BLUE}${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + fi +} + +print_check() { + if [[ "$JSON_OUTPUT" == "false" ]]; then + echo -e "${YELLOW}▶${NC} Checking: $1" + fi +} + +print_ok() { + if [[ "$JSON_OUTPUT" == "false" ]]; then + echo -e " ${GREEN}✓${NC} $1" + fi +} + +print_warn() { + if [[ "$JSON_OUTPUT" == "false" ]]; then + echo -e " ${YELLOW}!${NC} $1" + fi + ((WARNINGS++)) +} + +print_err() { + if [[ "$JSON_OUTPUT" == "false" ]]; then + echo -e " ${RED}✗${NC} $1" + fi + ((ERRORS++)) +} + +print_info() { + if [[ "$JSON_OUTPUT" == "false" ]] && [[ "$VERBOSE" == "true" ]]; then + echo -e " ${MAGENTA}ℹ${NC} $1" + fi +} + +# Version comparison function +version_gte() { + # Returns 0 if $1 >= $2 + printf '%s\n%s' "$2" "$1" | sort -V -C +} + +version_extract() { + # Extract version number from version string + echo "$1" | grep -oE '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -1 +} + +# Check if command exists +check_command() { + local cmd="$1" + local name="$2" + local min_version="${3:-}" + local version_cmd="${4:-$cmd --version}" + local install_hint="${5:-}" + + print_check "$name" + + if ! command -v "$cmd" &> /dev/null; then + print_err "$name is not installed" + RESULTS["$name"]="missing" + if [[ -n "$install_hint" ]]; then + echo -e " ${YELLOW}Install: $install_hint${NC}" + fi + return 1 + fi + + # Get version + local version_output + version_output=$(eval "$version_cmd" 2>&1 | head -1) + local version + version=$(version_extract "$version_output") + + if [[ -n "$min_version" ]] && [[ -n "$version" ]]; then + if version_gte "$version" "$min_version"; then + print_ok "$name v$version (>= $min_version required)" + RESULTS["$name"]="ok" + else + print_warn "$name v$version (>= $min_version recommended)" + RESULTS["$name"]="outdated" + fi + else + print_ok "$name installed ($version_output)" + RESULTS["$name"]="ok" + fi + + print_info "Path: $(which "$cmd")" + + return 0 +} + +# ============================================================================ +# Main Checks +# ============================================================================ + +cd "$REPO_ROOT" + +if [[ "$JSON_OUTPUT" == "false" ]]; then + echo -e "${BOLD}Optimism Development Environment Health Check${NC}" + echo -e "Repository: ${CYAN}$REPO_ROOT${NC}" + echo "" +fi + +# ============================================================================ +# Core Development Tools +# ============================================================================ +print_header "Core Development Tools" + +check_command "git" "Git" "2.0" "git --version" "https://git-scm.com/" + +check_command "go" "Go" "1.21" "go version" "https://go.dev/doc/install" + +check_command "node" "Node.js" "20.0" "node --version" "https://nodejs.org/" + +check_command "pnpm" "pnpm" "8.0" "pnpm --version" "npm install -g pnpm" + +check_command "make" "Make" "3.0" "make --version" "Install via system package manager" + +check_command "jq" "jq" "1.6" "jq --version" "Install via system package manager" + +# ============================================================================ +# Blockchain Development Tools +# ============================================================================ +print_header "Blockchain Development Tools" + +# Check for foundry with pinned version +print_check "Foundry (forge)" +if command -v forge &> /dev/null; then + FORGE_VERSION=$(forge --version 2>&1 | head -1) + + # Check if versions.json exists and has foundry version + if [[ -f "$VERSIONS_FILE" ]]; then + EXPECTED_FOUNDRY=$(jq -r '.foundry // empty' "$VERSIONS_FILE" 2>/dev/null) + if [[ -n "$EXPECTED_FOUNDRY" ]]; then + if echo "$FORGE_VERSION" | grep -q "$EXPECTED_FOUNDRY"; then + print_ok "Foundry version matches pinned version" + RESULTS["Foundry"]="ok" + else + print_warn "Foundry version mismatch" + echo -e " ${YELLOW}Expected: $EXPECTED_FOUNDRY${NC}" + echo -e " ${YELLOW}Found: $FORGE_VERSION${NC}" + echo -e " ${YELLOW}Run: pnpm update:foundry${NC}" + RESULTS["Foundry"]="mismatch" + fi + else + print_ok "Foundry installed ($FORGE_VERSION)" + RESULTS["Foundry"]="ok" + fi + else + print_ok "Foundry installed ($FORGE_VERSION)" + RESULTS["Foundry"]="ok" + fi + print_info "Path: $(which forge)" +else + print_err "Foundry is not installed" + RESULTS["Foundry"]="missing" + echo -e " ${YELLOW}Install: curl -L https://foundry.paradigm.xyz | bash${NC}" +fi + +check_command "cast" "Cast (Foundry)" "" "cast --version" + +check_command "anvil" "Anvil (Foundry)" "" "anvil --version" + +# ============================================================================ +# Optional Tools +# ============================================================================ +print_header "Optional Tools" + +check_command "docker" "Docker" "24.0" "docker --version" "https://docs.docker.com/get-docker/" + +print_check "Docker Compose" +if docker compose version &> /dev/null; then + COMPOSE_VERSION=$(docker compose version 2>&1) + print_ok "Docker Compose installed ($COMPOSE_VERSION)" + RESULTS["Docker Compose"]="ok" +else + print_warn "Docker Compose not available" + RESULTS["Docker Compose"]="missing" +fi + +check_command "direnv" "direnv" "2.0" "direnv --version" "https://direnv.net" + +check_command "golangci-lint" "golangci-lint" "" "golangci-lint --version" "go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest" + +check_command "slither" "Slither" "" "slither --version" "pip3 install slither-analyzer" + +# ============================================================================ +# Environment Configuration +# ============================================================================ +print_header "Environment Configuration" + +# Check .nvmrc +print_check "Node version configuration" +if [[ -f "$REPO_ROOT/.nvmrc" ]]; then + NVMRC_VERSION=$(cat "$REPO_ROOT/.nvmrc") + CURRENT_NODE=$(node --version 2>/dev/null | tr -d 'v') + + if [[ "$CURRENT_NODE" == "$NVMRC_VERSION"* ]]; then + print_ok "Node version matches .nvmrc (v$NVMRC_VERSION)" + RESULTS["Node version"]="ok" + else + print_warn "Node version mismatch" + echo -e " ${YELLOW}Expected: v$NVMRC_VERSION, Found: v$CURRENT_NODE${NC}" + echo -e " ${YELLOW}Run: nvm use${NC}" + RESULTS["Node version"]="mismatch" + fi +else + print_info ".nvmrc not found" + RESULTS["Node version"]="unknown" +fi + +# Check Go modules +print_check "Go modules" +if [[ -f "$REPO_ROOT/go.mod" ]]; then + if go mod verify &> /dev/null; then + print_ok "Go modules verified" + RESULTS["Go modules"]="ok" + else + print_warn "Go module verification failed" + echo -e " ${YELLOW}Run: go mod download${NC}" + RESULTS["Go modules"]="error" + + if [[ "$FIX_MODE" == "true" ]]; then + echo -e " ${CYAN}Attempting to fix...${NC}" + go mod download + fi + fi +else + print_info "go.mod not found" + RESULTS["Go modules"]="unknown" +fi + +# Check npm dependencies +print_check "npm dependencies" +if [[ -f "$REPO_ROOT/pnpm-lock.yaml" ]]; then + if [[ -d "$REPO_ROOT/node_modules" ]]; then + print_ok "node_modules exists" + RESULTS["npm deps"]="ok" + else + print_warn "node_modules not found" + echo -e " ${YELLOW}Run: pnpm install${NC}" + RESULTS["npm deps"]="missing" + + if [[ "$FIX_MODE" == "true" ]]; then + echo -e " ${CYAN}Attempting to fix...${NC}" + pnpm install + fi + fi +else + print_info "pnpm-lock.yaml not found" + RESULTS["npm deps"]="unknown" +fi + +# Check git submodules +print_check "Git submodules" +if [[ -f "$REPO_ROOT/.gitmodules" ]]; then + UNINIT_SUBMODULES=$(git submodule status | grep '^-' | wc -l | tr -d ' ') + if [[ "$UNINIT_SUBMODULES" -eq 0 ]]; then + print_ok "All git submodules initialized" + RESULTS["Submodules"]="ok" + else + print_warn "$UNINIT_SUBMODULES submodule(s) not initialized" + echo -e " ${YELLOW}Run: git submodule update --init --recursive${NC}" + RESULTS["Submodules"]="incomplete" + + if [[ "$FIX_MODE" == "true" ]]; then + echo -e " ${CYAN}Attempting to fix...${NC}" + git submodule update --init --recursive + fi + fi +else + print_info "No .gitmodules file found" + RESULTS["Submodules"]="none" +fi + +# ============================================================================ +# Disk Space Check +# ============================================================================ +print_header "System Resources" + +print_check "Available disk space" +AVAILABLE_SPACE=$(df -h "$REPO_ROOT" | awk 'NR==2 {print $4}') +AVAILABLE_BYTES=$(df "$REPO_ROOT" | awk 'NR==2 {print $4}') + +# Require at least 10GB (10485760 KB) +if [[ "$AVAILABLE_BYTES" -gt 10485760 ]]; then + print_ok "Disk space: $AVAILABLE_SPACE available" + RESULTS["Disk space"]="ok" +else + print_warn "Low disk space: $AVAILABLE_SPACE available" + echo -e " ${YELLOW}At least 10GB recommended for builds${NC}" + RESULTS["Disk space"]="low" +fi + +# ============================================================================ +# Summary +# ============================================================================ +print_header "Summary" + +if [[ "$JSON_OUTPUT" == "true" ]]; then + # Output JSON for CI integration + echo "{" + echo " \"errors\": $ERRORS," + echo " \"warnings\": $WARNINGS," + echo " \"checks\": {" + first=true + for key in "${!RESULTS[@]}"; do + if [[ "$first" == "true" ]]; then + first=false + else + echo "," + fi + echo -n " \"$key\": \"${RESULTS[$key]}\"" + done + echo "" + echo " }" + echo "}" +else + if [[ $ERRORS -eq 0 ]] && [[ $WARNINGS -eq 0 ]]; then + echo -e "${GREEN}${BOLD}✓ All checks passed! Your development environment is ready.${NC}" + elif [[ $ERRORS -eq 0 ]]; then + echo -e "${YELLOW}${BOLD}! Environment is usable with $WARNINGS warning(s).${NC}" + echo -e " Review warnings above for optimal setup." + else + echo -e "${RED}${BOLD}✗ Found $ERRORS error(s) and $WARNINGS warning(s).${NC}" + echo -e " Please fix errors before proceeding." + echo "" + echo -e " ${CYAN}Tip: Run with --fix to attempt automatic fixes${NC}" + fi + + echo "" + echo -e "${BLUE}Quick fixes:${NC}" + echo -e " pnpm install - Install npm dependencies" + echo -e " go mod download - Download Go modules" + echo -e " pnpm update:foundry - Update Foundry to pinned version" + echo -e " git submodule update --init --recursive - Initialize submodules" +fi + +# Exit with appropriate code +if [[ $ERRORS -gt 0 ]]; then + exit 1 +fi +exit 0 diff --git a/ops/scripts/pre-commit.sh b/ops/scripts/pre-commit.sh new file mode 100755 index 0000000000000..ca88913dee9c8 --- /dev/null +++ b/ops/scripts/pre-commit.sh @@ -0,0 +1,336 @@ +#!/usr/bin/env bash +# Pre-commit hook for the Optimism monorepo +# This script runs code quality checks before allowing commits +# +# Usage: +# ./ops/scripts/pre-commit.sh [--install] [--quick] +# +# Options: +# --install Install this script as a git pre-commit hook +# --quick Run only fast checks (formatting, no tests) +# +# To install as a git hook: +# ./ops/scripts/pre-commit.sh --install + +set -euo pipefail + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color +BOLD='\033[1m' + +# Configuration +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" +QUICK_MODE=false +INSTALL_MODE=false + +# Parse arguments +for arg in "$@"; do + case $arg in + --quick) + QUICK_MODE=true + shift + ;; + --install) + INSTALL_MODE=true + shift + ;; + --help|-h) + echo "Usage: $0 [--install] [--quick]" + echo "" + echo "Options:" + echo " --install Install this script as a git pre-commit hook" + echo " --quick Run only fast checks (formatting, no tests)" + exit 0 + ;; + esac +done + +# Install as git hook if requested +if [[ "$INSTALL_MODE" == "true" ]]; then + HOOK_PATH="$REPO_ROOT/.git/hooks/pre-commit" + + cat > "$HOOK_PATH" << 'EOF' +#!/usr/bin/env bash +# Auto-generated pre-commit hook - do not edit directly +# To update, run: ./ops/scripts/pre-commit.sh --install + +exec "$(git rev-parse --show-toplevel)/ops/scripts/pre-commit.sh" --quick +EOF + + chmod +x "$HOOK_PATH" + echo -e "${GREEN}✓${NC} Pre-commit hook installed at $HOOK_PATH" + exit 0 +fi + +# Utility functions +print_header() { + echo "" + echo -e "${BLUE}${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + echo -e "${CYAN}${BOLD} $1${NC}" + echo -e "${BLUE}${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" +} + +print_step() { + echo -e "${YELLOW}▶${NC} $1" +} + +print_success() { + echo -e "${GREEN}✓${NC} $1" +} + +print_error() { + echo -e "${RED}✗${NC} $1" +} + +print_warning() { + echo -e "${YELLOW}!${NC} $1" +} + +# Track failed checks +FAILED_CHECKS=() + +run_check() { + local name="$1" + local cmd="$2" + + print_step "Running: $name" + + if eval "$cmd" > /dev/null 2>&1; then + print_success "$name passed" + return 0 + else + print_error "$name failed" + FAILED_CHECKS+=("$name") + return 1 + fi +} + +# Change to repo root +cd "$REPO_ROOT" + +print_header "Optimism Pre-Commit Checks" +echo -e "Mode: ${CYAN}$(if $QUICK_MODE; then echo "Quick"; else echo "Full"; fi)${NC}" +echo -e "Repository: ${CYAN}$REPO_ROOT${NC}" + +# Get list of staged Go files +STAGED_GO_FILES=$(git diff --cached --name-only --diff-filter=ACM | grep '\.go$' || true) +STAGED_SOL_FILES=$(git diff --cached --name-only --diff-filter=ACM | grep '\.sol$' || true) +STAGED_TS_FILES=$(git diff --cached --name-only --diff-filter=ACM | grep -E '\.(ts|tsx|js|jsx)$' || true) + +# ============================================================================ +# Check 1: Go Formatting +# ============================================================================ +if [[ -n "$STAGED_GO_FILES" ]]; then + print_header "Go Formatting Check" + + UNFORMATTED_FILES="" + for file in $STAGED_GO_FILES; do + if [[ -f "$file" ]]; then + if ! gofmt -l "$file" | grep -q '^'; then + : # File is formatted + else + UNFORMATTED_FILES="$UNFORMATTED_FILES $file" + fi + fi + done + + if [[ -z "$UNFORMATTED_FILES" ]]; then + print_success "All Go files are properly formatted" + else + print_error "The following Go files need formatting:" + for f in $UNFORMATTED_FILES; do + echo " - $f" + done + echo "" + echo -e "${YELLOW}Run 'gofmt -w ' to fix formatting${NC}" + FAILED_CHECKS+=("Go formatting") + fi +fi + +# ============================================================================ +# Check 2: Go Imports +# ============================================================================ +if [[ -n "$STAGED_GO_FILES" ]] && command -v goimports &> /dev/null; then + print_header "Go Imports Check" + + UNIMPORTED_FILES="" + for file in $STAGED_GO_FILES; do + if [[ -f "$file" ]]; then + if ! goimports -l "$file" | grep -q '^'; then + : # File imports are sorted + else + UNIMPORTED_FILES="$UNIMPORTED_FILES $file" + fi + fi + done + + if [[ -z "$UNIMPORTED_FILES" ]]; then + print_success "All Go imports are properly organized" + else + print_warning "The following Go files may have unorganized imports:" + for f in $UNIMPORTED_FILES; do + echo " - $f" + done + echo "" + echo -e "${YELLOW}Run 'goimports -w ' to fix imports${NC}" + fi +fi + +# ============================================================================ +# Check 3: Go Vet (quick static analysis) +# ============================================================================ +if [[ -n "$STAGED_GO_FILES" ]]; then + print_header "Go Vet Check" + + # Get unique directories containing changed Go files + CHANGED_DIRS=$(echo "$STAGED_GO_FILES" | xargs -I{} dirname {} | sort -u) + + VET_FAILED=false + for dir in $CHANGED_DIRS; do + if [[ -d "$dir" ]]; then + if ! go vet "./$dir/..." 2>/dev/null; then + VET_FAILED=true + fi + fi + done + + if [[ "$VET_FAILED" == "false" ]]; then + print_success "Go vet passed" + else + print_error "Go vet found issues" + FAILED_CHECKS+=("Go vet") + fi +fi + +# ============================================================================ +# Check 4: Check for common issues +# ============================================================================ +print_header "Common Issues Check" + +# Check for debug statements +DEBUG_PATTERNS="fmt\.Println|log\.Print|console\.log|debugger" +DEBUG_FILES="" + +for file in $STAGED_GO_FILES $STAGED_TS_FILES; do + if [[ -f "$file" ]]; then + if grep -E "$DEBUG_PATTERNS" "$file" > /dev/null 2>&1; then + DEBUG_FILES="$DEBUG_FILES $file" + fi + fi +done + +if [[ -z "$DEBUG_FILES" ]]; then + print_success "No debug statements found" +else + print_warning "Possible debug statements found in:" + for f in $DEBUG_FILES; do + echo " - $f" + done + echo -e "${YELLOW}Please review and remove if unintended${NC}" +fi + +# Check for TODO without issue reference +if [[ -n "$STAGED_GO_FILES" ]]; then + INVALID_TODOS="" + for file in $STAGED_GO_FILES; do + if [[ -f "$file" ]]; then + # Check for TODOs that don't follow the format TODO(issue#) or TODO(username) + if grep -n "TODO[^(]" "$file" > /dev/null 2>&1; then + INVALID_TODOS="$INVALID_TODOS $file" + fi + fi + done + + if [[ -n "$INVALID_TODOS" ]]; then + print_warning "TODOs without issue reference found in:" + for f in $INVALID_TODOS; do + echo " - $f" + done + echo -e "${YELLOW}Consider using format: TODO(): description${NC}" + fi +fi + +# ============================================================================ +# Check 5: Run Go tests (only in full mode) +# ============================================================================ +if [[ "$QUICK_MODE" == "false" ]] && [[ -n "$STAGED_GO_FILES" ]]; then + print_header "Go Tests" + + # Get unique modules containing changed Go files + CHANGED_DIRS=$(echo "$STAGED_GO_FILES" | xargs -I{} dirname {} | sort -u) + + for dir in $CHANGED_DIRS; do + if [[ -d "$dir" ]] && [[ -f "$dir/go.mod" || -f "$(dirname "$dir")/go.mod" ]]; then + print_step "Testing $dir..." + if go test -short "./$dir/..." 2>/dev/null; then + print_success "Tests passed for $dir" + else + print_error "Tests failed for $dir" + FAILED_CHECKS+=("Go tests: $dir") + fi + fi + done +fi + +# ============================================================================ +# Check 6: TypeScript/JavaScript checks +# ============================================================================ +if [[ -n "$STAGED_TS_FILES" ]] && command -v pnpm &> /dev/null; then + print_header "TypeScript/JavaScript Checks" + + # Check if there are any relevant package.json files + TS_PACKAGES=$(echo "$STAGED_TS_FILES" | xargs -I{} dirname {} | sort -u) + + for pkg in $TS_PACKAGES; do + if [[ -f "$pkg/package.json" ]]; then + print_step "Checking TypeScript in $pkg..." + if (cd "$pkg" && pnpm typecheck 2>/dev/null); then + print_success "TypeScript check passed for $pkg" + else + print_warning "TypeScript check skipped or failed for $pkg" + fi + fi + done +fi + +# ============================================================================ +# Check 7: Solidity formatting (if forge is available) +# ============================================================================ +if [[ -n "$STAGED_SOL_FILES" ]] && command -v forge &> /dev/null; then + print_header "Solidity Checks" + + print_step "Checking Solidity formatting..." + if forge fmt --check 2>/dev/null; then + print_success "Solidity files are properly formatted" + else + print_error "Solidity files need formatting" + echo -e "${YELLOW}Run 'forge fmt' to fix formatting${NC}" + FAILED_CHECKS+=("Solidity formatting") + fi +fi + +# ============================================================================ +# Summary +# ============================================================================ +print_header "Summary" + +if [[ ${#FAILED_CHECKS[@]} -eq 0 ]]; then + echo -e "${GREEN}${BOLD}All checks passed! ✓${NC}" + echo "" + exit 0 +else + echo -e "${RED}${BOLD}The following checks failed:${NC}" + for check in "${FAILED_CHECKS[@]}"; do + echo -e " ${RED}✗${NC} $check" + done + echo "" + echo -e "${YELLOW}Please fix these issues before committing.${NC}" + echo -e "${YELLOW}Use --quick flag to run only fast checks.${NC}" + exit 1 +fi