--- description: Standards for performance benchmarks and optimization strategies globs: **/*.{ts,tsx,js,jsx,go,css,scss} --- filters: - type: file_extension pattern: "\\.(ts|tsx|js|jsx|go|css|scss)$" - type: content pattern: "(?s)performance|optimization|render|benchmark|profil|memory|cpu|network|latency|throughput" actions: - type: suggest message: | # Performance Optimization Standards ## Frontend Performance 1. Core Web Vitals Targets: - Largest Contentful Paint (LCP): < 2.5s - First Input Delay (FID): < 100ms - Cumulative Layout Shift (CLS): < 0.1 - First Contentful Paint (FCP): < 1.5s 2. Bundle Size Targets: - Initial JS bundle: < 170KB compressed - Initial CSS: < 50KB compressed - Total page size: < 1MB - Use code splitting for routes 3. Rendering Optimization: - React memo for pure components - Virtual lists for long scrollable content - Debounced/throttled event handlers - useCallback/useMemo for expensive operations 4. React Component Guidelines: - Move state up to appropriate level - Use context API judiciously - Avoid prop drilling - Implement shouldComponentUpdate or memo 5. Asset Optimization: - Optimize images (WebP/AVIF formats) - Lazy load images and non-critical components - Use font-display: swap - Compress SVGs ## Backend Performance 1. API Response Time Targets: - P50 (median): < 200ms - P95: < 500ms - P99: < 1000ms 2. Database Optimization: - Use indexes for frequently queried fields - Optimize queries with EXPLAIN - Use database connection pooling - Implement query caching for repeated requests 3. Go Performance: - Use goroutines appropriately - Avoid unnecessary allocations - Profile with pprof - Consider sync.Pool for frequent allocations 4. API Design: - GraphQL for flexible data fetching - Pagination for large result sets - Partial responses - Batch operations 5. Caching Strategy: - Cache calculation results - HTTP caching headers - In-memory caching for frequent reads - Distributed cache for shared data ## Network Optimization 1. HTTP/2 or HTTP/3 Support: - Enable multiplexing - Server push for critical resources - Header compression 2. API Compression: - Enable gzip/Brotli compression - Compress responses > 1KB - Skip compression for already compressed formats 3. CDN Usage: - Static assets via CDN - Edge caching for API responses - Regional deployments for global users ## Monitoring and Benchmarking 1. Tools: - Lighthouse for frontend performance - New Relic/Datadog for backend monitoring - Custom traces for critical paths 2. Load Testing: - Benchmark on specification-matched environments - Test at 2x expected peak load - Identify bottlenecks - Establish baseline and regression tests 3. Regular Performance Reviews: - Weekly performance dashboard review - Monthly deep-dive analysis - Continuous monitoring alerts ## Performance Budgets 1. Regression Prevention: - No more than 5% performance degradation between releases - Alert on exceeding performance budgets - Block deployment on critical performance failures 2. Optimization Targets: - Identify top 3 performance issues each sprint - Continuously improve critical user journeys - Set specific targets for identified bottlenecks ## Implementation Guidelines 1. Performance First: - Consider performance implications during design - Profile before and after significant changes - Document performance considerations in PRs 2. Known Patterns: - Use established patterns for common performance issues - Document performance tricks and techniques - Share learnings across teams 3. Testing Environment: - Test on low-end devices - Test on slower network connections - Test with representative datasets metadata: priority: high version: 1.0 examples: - input: | // Bad: Inefficient React component function UserList({ users }) { const [filter, setFilter] = useState(''); // Expensive operation on every render const filteredUsers = users.filter(user => user.name.toLowerCase().includes(filter.toLowerCase()) ); return (
setFilter(e.target.value)} /> {filteredUsers.map(user => (
{user.name}
))}
); } output: | // Good: Optimized React component function UserList({ users }) { const [filter, setFilter] = useState(''); // Memoized expensive operation const filteredUsers = useMemo(() => users.filter(user => user.name.toLowerCase().includes(filter.toLowerCase()) ), [users, filter] ); // Debounced filter change const handleFilterChange = useCallback( debounce(value => setFilter(value), 300), [] ); return (
handleFilterChange(e.target.value)} /> {filteredUsers.length > 100 ? ( { const user = filteredUsers[index]; return (
{`${user.name}'s {user.name}
); }} /> ) : ( filteredUsers.map(user => (
{`${user.name}'s {user.name}
)) )}
); } - input: | // Bad: Inefficient database query func GetUserPosts(db *sql.DB, userID int) ([]Post, error) { var posts []Post rows, err := db.Query("SELECT * FROM posts WHERE user_id = ?", userID) if err != nil { return nil, err } defer rows.Close() for rows.Next() { var post Post err := rows.Scan(&post.ID, &post.Title, &post.Content, &post.UserID, &post.CreatedAt) if err != nil { return nil, err } // N+1 query problem commentRows, err := db.Query("SELECT * FROM comments WHERE post_id = ?", post.ID) if err != nil { return nil, err } defer commentRows.Close() for commentRows.Next() { var comment Comment err := commentRows.Scan(&comment.ID, &comment.Content, &comment.PostID, &comment.UserID) if err != nil { return nil, err } post.Comments = append(post.Comments, comment) } posts = append(posts, post) } return posts, nil } output: | // Good: Optimized database query func GetUserPosts(db *sql.DB, userID int) ([]Post, error) { // First, fetch all posts in one query var posts []Post postRows, err := db.Query(` SELECT id, title, content, user_id, created_at FROM posts WHERE user_id = ? ORDER BY created_at DESC`, userID) if err != nil { return nil, fmt.Errorf("error querying posts: %w", err) } defer postRows.Close() postIDs := []int{} postMap := make(map[int]*Post) for postRows.Next() { var post Post err := postRows.Scan(&post.ID, &post.Title, &post.Content, &post.UserID, &post.CreatedAt) if err != nil { return nil, fmt.Errorf("error scanning post: %w", err) } posts = append(posts, post) postIDs = append(postIDs, post.ID) postMap[post.ID] = &posts[len(posts)-1] } if len(postIDs) == 0 { return posts, nil } // Use a single query with IN clause to fetch all comments for all posts query, args, err := sqlx.In(` SELECT id, content, post_id, user_id FROM comments WHERE post_id IN (?) ORDER BY created_at ASC`, postIDs) if err != nil { return nil, fmt.Errorf("error preparing IN query: %w", err) } query = db.Rebind(query) commentRows, err := db.Query(query, args...) if err != nil { return nil, fmt.Errorf("error querying comments: %w", err) } defer commentRows.Close() // Populate the comments for each post for commentRows.Next() { var comment Comment var postID int err := commentRows.Scan(&comment.ID, &comment.Content, &postID, &comment.UserID) if err != nil { return nil, fmt.Errorf("error scanning comment: %w", err) } if post, ok := postMap[postID]; ok { post.Comments = append(post.Comments, comment) } } return posts, nil }