diff --git a/.cursor/rules/rust/complex/workspace.mdc b/.cursor/rules/rust/complex/workspace.mdc index 5540e16..a7d443a 100644 --- a/.cursor/rules/rust/complex/workspace.mdc +++ b/.cursor/rules/rust/complex/workspace.mdc @@ -911,7 +911,7 @@ chrono = { workspace = true } uuid = { workspace = true } # Domain-specific dependencies -bcrypt = "0.15" +argon2 = "0.5" validator = { version = "0.18", features = ["derive"] } regex = { workspace = true } diff --git a/.cursor/rules/rust/core/api-design.mdc b/.cursor/rules/rust/core/api-design.mdc new file mode 100644 index 0000000..361fa91 --- /dev/null +++ b/.cursor/rules/rust/core/api-design.mdc @@ -0,0 +1,798 @@ +--- +description: +globs: +alwaysApply: false +--- +# ๐ŸŽจ RUST API DESIGN BEST PRACTICES + +> **TL;DR:** Comprehensive API design guidelines for creating ergonomic, maintainable, and idiomatic Rust libraries and services. + +## ๐Ÿ” API DESIGN STRATEGY + +```mermaid +graph TD + Start["API Design"] --> Purpose{"API
Purpose?"} + + Purpose -->|Library| LibAPI["Library API"] + Purpose -->|Service| ServiceAPI["Service API"] + Purpose -->|CLI| CLIAPI["CLI API"] + + LibAPI --> Ergonomics["Ergonomic Design"] + ServiceAPI --> RESTDesign["REST/gRPC Design"] + CLIAPI --> CLIDesign["Command Interface"] + + Ergonomics --> FlexibleInputs["Flexible Input Types"] + Ergonomics --> BuilderPattern["Builder Pattern"] + Ergonomics --> ErrorDesign["Error Design"] + + RESTDesign --> OpenAPI["OpenAPI Documentation"] + RESTDesign --> Validation["Input Validation"] + RESTDesign --> Authentication["Authentication"] + + CLIDesign --> Subcommands["Subcommand Structure"] + CLIDesign --> Configuration["Configuration Management"] + CLIDesign --> HelpSystem["Help System"] + + FlexibleInputs --> TraitDesign["Trait Design"] + BuilderPattern --> TraitDesign + ErrorDesign --> TraitDesign + + OpenAPI --> AsyncAPI["Async API Patterns"] + Validation --> AsyncAPI + Authentication --> AsyncAPI + + Subcommands --> Testing["Testing Strategy"] + Configuration --> Testing + HelpSystem --> Testing + + TraitDesign --> Documentation["Documentation"] + AsyncAPI --> Documentation + Testing --> Documentation + + Documentation --> APIComplete["API Complete"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style Ergonomics fill:#4dbb5f,stroke:#36873f,color:white + style RESTDesign fill:#ffa64d,stroke:#cc7a30,color:white + style CLIDesign fill:#d94dbb,stroke:#a3378a,color:white +``` + +## ๐ŸŽฏ API DESIGN PRINCIPLES + +### Ergonomic Function Signatures +```rust +use std::path::Path; + +// โœ… Accept flexible input types +pub fn read_config>(path: P) -> Result { + let path = path.as_ref(); + // Implementation +} + +// โœ… Use Into for string-like parameters +pub fn create_user>(name: S, email: S) -> Result { + let name = name.into(); + let email = email.into(); + // Implementation +} + +// โœ… Prefer borrowing over ownership when possible +pub fn validate_email(email: &str) -> Result<(), ValidationError> { + // Implementation - doesn't need to own the string +} + +// โœ… Return owned data when caller needs ownership +pub fn generate_token() -> String { + // Implementation returns owned String +} + +// โŒ Avoid overly generic signatures without clear benefit +// pub fn process(input: T, func: F) -> U where F: Fn(T) -> U +``` + +### Builder Pattern Implementation +```rust +use typed_builder::TypedBuilder; + +// โœ… Use TypedBuilder for complex configuration +#[derive(Debug, TypedBuilder)] +pub struct HttpClient { + #[builder(setter(into))] + base_url: String, + + #[builder(default = Duration::from_secs(30))] + timeout: Duration, + + #[builder(default)] + headers: HashMap, + + #[builder(default, setter(strip_option))] + proxy: Option, + + #[builder(default = false)] + verify_ssl: bool, +} + +impl HttpClient { + // โœ… Provide a simple constructor for common cases + pub fn new>(base_url: S) -> Self { + Self::builder() + .base_url(base_url) + .build() + } + + // โœ… Provide convenient factory methods + pub fn with_auth>(base_url: S, token: S) -> Self { + let mut headers = HashMap::new(); + headers.insert("Authorization".to_string(), format!("Bearer {}", token.into())); + + Self::builder() + .base_url(base_url) + .headers(headers) + .build() + } +} + +// โœ… Usage examples +let client = HttpClient::new("https://api.example.com"); + +let authenticated_client = HttpClient::builder() + .base_url("https://api.example.com") + .timeout(Duration::from_secs(60)) + .verify_ssl(true) + .build(); +``` + +### Error Handling Design +```rust +use thiserror::Error; + +// โœ… Well-structured error hierarchy +#[derive(Error, Debug)] +pub enum ApiError { + #[error("Network error: {source}")] + Network { + #[from] + source: reqwest::Error, + }, + + #[error("Invalid request: {message}")] + InvalidRequest { message: String }, + + #[error("Authentication failed")] + Authentication, + + #[error("Resource not found: {resource_type} with id {id}")] + NotFound { + resource_type: String, + id: String, + }, + + #[error("Rate limit exceeded: retry after {retry_after} seconds")] + RateLimit { retry_after: u64 }, + + #[error("Server error: {status_code}")] + Server { status_code: u16 }, +} + +impl ApiError { + // โœ… Provide utility methods for error classification + pub fn is_retryable(&self) -> bool { + matches!( + self, + ApiError::Network { .. } | ApiError::RateLimit { .. } | ApiError::Server { status_code } if *status_code >= 500 + ) + } + + pub fn retry_after(&self) -> Option { + match self { + ApiError::RateLimit { retry_after } => Some(Duration::from_secs(*retry_after)), + _ => None, + } + } +} + +// โœ… Domain-specific result type +pub type ApiResult = Result; +``` + +## ๐Ÿ”„ TRAIT DESIGN PATTERNS + +### Cohesive Trait Design +```rust +// โœ… Single responsibility traits +pub trait Serializable { + fn serialize(&self) -> Result, SerializationError>; + fn deserialize(data: &[u8]) -> Result + where + Self: Sized; +} + +pub trait Cacheable { + type Key; + fn cache_key(&self) -> Self::Key; + fn cache_ttl(&self) -> Option; +} + +// โœ… Composable traits +pub trait Repository { + type Error; + type Id; + + async fn find_by_id(&self, id: Self::Id) -> Result, Self::Error>; + async fn save(&self, entity: &T) -> Result; + async fn delete(&self, id: Self::Id) -> Result; +} + +pub trait Queryable: Repository { + type Query; + type Page; + + async fn find_by_query(&self, query: Self::Query) -> Result, Self::Error>; + async fn find_paginated(&self, query: Self::Query, page: Self::Page) -> Result<(Vec, bool), Self::Error>; +} + +// โœ… Default implementations for common patterns +pub trait Timestamped { + fn created_at(&self) -> DateTime; + fn updated_at(&self) -> DateTime; + + // Default implementation for age calculation + fn age(&self) -> Duration { + Utc::now().signed_duration_since(self.created_at()).to_std().unwrap_or_default() + } +} +``` + +### Extension Traits +```rust +// โœ… Extension traits for external types +pub trait StringExtensions { + fn is_valid_email(&self) -> bool; + fn to_snake_case(&self) -> String; + fn truncate_with_ellipsis(&self, max_len: usize) -> String; +} + +impl StringExtensions for str { + fn is_valid_email(&self) -> bool { + // Email validation logic + self.contains('@') && self.contains('.') + } + + fn to_snake_case(&self) -> String { + // Snake case conversion + self.chars() + .map(|c| if c.is_uppercase() { format!("_{}", c.to_lowercase()) } else { c.to_string() }) + .collect::() + .trim_start_matches('_') + .to_string() + } + + fn truncate_with_ellipsis(&self, max_len: usize) -> String { + if self.len() <= max_len { + self.to_string() + } else { + format!("{}...", &self[..max_len.saturating_sub(3)]) + } + } +} + +// โœ… Extension traits for Result types +pub trait ResultExtensions { + fn log_error(self) -> Self; + fn with_context(self, f: F) -> Result> + where + F: FnOnce() -> String; +} + +impl ResultExtensions for Result { + fn log_error(self) -> Self { + if let Err(ref e) = self { + tracing::error!("Operation failed: {:?}", e); + } + self + } + + fn with_context(self, f: F) -> Result> + where + F: FnOnce() -> String, + { + self.map_err(|e| ContextError { + context: f(), + source: e, + }) + } +} +``` + +## ๐Ÿ“ฆ MODULE ORGANIZATION + +### Public API Structure +```rust +// lib.rs - Main library entry point +//! # MyLibrary +//! +//! A comprehensive library for handling X, Y, and Z. +//! +//! ## Quick Start +//! +//! ```rust +//! use my_library::Client; +//! +//! let client = Client::new("api-key"); +//! let result = client.fetch_data().await?; +//! ``` +//! +//! ## Features +//! +//! - Feature A: Enable with `features = ["feature-a"]` +//! - Feature B: Enable with `features = ["feature-b"]` + +// Re-export main public API +pub use client::Client; +pub use config::Config; +pub use error::{Error, Result}; + +// Re-export important types +pub use types::{User, Product, Order}; + +// Module declarations +mod client; +mod config; +mod error; +mod types; + +// Internal modules (not re-exported) +mod internal { + pub mod auth; + pub mod http; + pub mod serialization; +} + +// Prelude module for convenient imports +pub mod prelude { + pub use crate::{Client, Config, Error, Result}; + pub use crate::types::*; +} + +// Feature-gated modules +#[cfg(feature = "async")] +pub mod async_client; + +#[cfg(feature = "blocking")] +pub mod blocking_client; +``` + +### Documentation Standards +```rust +/// A client for interacting with the Example API. +/// +/// The `Client` provides methods for authentication, data retrieval, +/// and resource management. It handles rate limiting, retries, and +/// error handling automatically. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ```rust +/// use my_library::Client; +/// +/// # tokio_test::block_on(async { +/// let client = Client::new("your-api-key"); +/// let users = client.list_users().await?; +/// # Ok::<(), Box>(()) +/// # }); +/// ``` +/// +/// With custom configuration: +/// +/// ```rust +/// use my_library::{Client, Config}; +/// use std::time::Duration; +/// +/// let config = Config::builder() +/// .timeout(Duration::from_secs(30)) +/// .retry_attempts(3) +/// .build(); +/// +/// let client = Client::with_config("your-api-key", config); +/// ``` +pub struct Client { + api_key: String, + config: Config, + http_client: reqwest::Client, +} + +impl Client { + /// Creates a new client with the given API key. + /// + /// Uses default configuration with reasonable timeouts and retry settings. + /// + /// # Arguments + /// + /// * `api_key` - Your API key for authentication + /// + /// # Examples + /// + /// ```rust + /// use my_library::Client; + /// + /// let client = Client::new("sk-1234567890abcdef"); + /// ``` + pub fn new>(api_key: S) -> Self { + Self::with_config(api_key, Config::default()) + } + + /// Creates a new client with custom configuration. + /// + /// # Arguments + /// + /// * `api_key` - Your API key for authentication + /// * `config` - Custom configuration settings + /// + /// # Examples + /// + /// ```rust + /// use my_library::{Client, Config}; + /// use std::time::Duration; + /// + /// let config = Config::builder() + /// .timeout(Duration::from_secs(60)) + /// .build(); + /// + /// let client = Client::with_config("api-key", config); + /// ``` + pub fn with_config>(api_key: S, config: Config) -> Self { + // Implementation + } + + /// Retrieves a list of users. + /// + /// # Returns + /// + /// A `Result` containing a vector of `User` objects on success, + /// or an `Error` on failure. + /// + /// # Errors + /// + /// This function will return an error if: + /// + /// * The API key is invalid (`Error::Authentication`) + /// * The request times out (`Error::Network`) + /// * The server returns an error (`Error::Server`) + /// + /// # Examples + /// + /// ```rust + /// # use my_library::{Client, Error}; + /// # tokio_test::block_on(async { + /// let client = Client::new("api-key"); + /// + /// match client.list_users().await { + /// Ok(users) => println!("Found {} users", users.len()), + /// Err(Error::Authentication) => eprintln!("Invalid API key"), + /// Err(e) => eprintln!("Request failed: {}", e), + /// } + /// # }); + /// ``` + pub async fn list_users(&self) -> Result, Error> { + // Implementation + } +} +``` + +## ๐Ÿ”ง CONFIGURATION PATTERNS + +### Layered Configuration +```rust +use serde::{Deserialize, Serialize}; +use std::path::Path; + +// โœ… Configuration with multiple sources +#[derive(Debug, Clone, Serialize, Deserialize, TypedBuilder)] +pub struct Config { + // Server settings + #[builder(default = "127.0.0.1".to_string(), setter(into))] + pub host: String, + + #[builder(default = 8080)] + pub port: u16, + + // API settings + #[builder(default = Duration::from_secs(30))] + pub timeout: Duration, + + #[builder(default = 3)] + pub retry_attempts: u32, + + // Feature flags + #[builder(default = true)] + pub enable_metrics: bool, + + #[builder(default = false)] + pub debug_mode: bool, +} + +impl Config { + /// Load configuration from multiple sources with precedence: + /// 1. Environment variables (highest priority) + /// 2. Configuration file + /// 3. Defaults (lowest priority) + pub fn load() -> Result { + let mut config = Self::default(); + + // Load from file if it exists + if let Ok(file_config) = Self::from_file("config.toml") { + config = config.merge(file_config); + } + + // Override with environment variables + config = config.merge(Self::from_env()?); + + Ok(config) + } + + pub fn from_file>(path: P) -> Result { + let content = std::fs::read_to_string(path) + .map_err(ConfigError::FileRead)?; + + toml::from_str(&content) + .map_err(ConfigError::ParseError) + } + + pub fn from_env() -> Result { + let mut builder = Self::builder(); + + if let Ok(host) = std::env::var("HOST") { + builder = builder.host(host); + } + + if let Ok(port) = std::env::var("PORT") { + let port = port.parse() + .map_err(|_| ConfigError::InvalidPort)?; + builder = builder.port(port); + } + + if let Ok(timeout) = std::env::var("TIMEOUT_SECONDS") { + let seconds = timeout.parse() + .map_err(|_| ConfigError::InvalidTimeout)?; + builder = builder.timeout(Duration::from_secs(seconds)); + } + + Ok(builder.build()) + } + + fn merge(self, other: Self) -> Self { + // Merge logic - other takes precedence + Self { + host: if other.host != "127.0.0.1" { other.host } else { self.host }, + port: if other.port != 8080 { other.port } else { self.port }, + timeout: if other.timeout != Duration::from_secs(30) { other.timeout } else { self.timeout }, + retry_attempts: if other.retry_attempts != 3 { other.retry_attempts } else { self.retry_attempts }, + enable_metrics: other.enable_metrics, // Boolean fields always take the other value + debug_mode: other.debug_mode, + } + } +} + +impl Default for Config { + fn default() -> Self { + Self::builder().build() + } +} +``` + +## ๐ŸŽญ ASYNC API PATTERNS + +### Async Iterator and Stream Design +```rust +use futures::Stream; +use std::pin::Pin; + +// โœ… Async iterator for paginated results +pub struct PaginatedStream { + client: Arc, + query: Query, + current_page: Option, + buffer: VecDeque, + exhausted: bool, +} + +impl PaginatedStream { + pub fn new(client: Arc, query: Query) -> Self { + Self { + client, + query, + current_page: None, + buffer: VecDeque::new(), + exhausted: false, + } + } +} + +impl Stream for PaginatedStream +where + T: for<'de> Deserialize<'de> + Send + 'static, +{ + type Item = Result; + + fn poll_next( + mut self: Pin<&mut Self>, + cx: &mut Context<'_>, + ) -> Poll> { + if let Some(item) = self.buffer.pop_front() { + return Poll::Ready(Some(Ok(item))); + } + + if self.exhausted { + return Poll::Ready(None); + } + + // Fetch next page + let client = self.client.clone(); + let query = self.query.clone(); + let page = self.current_page.clone(); + + let future = async move { + client.fetch_page(query, page).await + }; + + // Poll the future and handle the result + // Implementation depends on your async runtime + todo!("Implement polling logic") + } +} + +// โœ… Cancellation-aware async operations +pub struct CancellableOperation { + inner: Pin> + Send>>, + cancel_token: CancelToken, +} + +impl CancellableOperation { + pub fn new(future: F, cancel_token: CancelToken) -> Self + where + F: Future> + Send + 'static, + { + Self { + inner: Box::pin(future), + cancel_token, + } + } +} + +impl Future for CancellableOperation { + type Output = Result; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + if self.cancel_token.is_cancelled() { + return Poll::Ready(Err(ApiError::Cancelled)); + } + + self.inner.as_mut().poll(cx) + } +} +``` + +## ๐Ÿ” TESTING API DESIGN + +### Testable API Structure +```rust +// โœ… Dependency injection for testability +pub trait HttpClientTrait: Send + Sync { + async fn get(&self, url: &str) -> Result; + async fn post(&self, url: &str, body: Vec) -> Result; +} + +pub struct Client { + http_client: H, + config: Config, +} + +impl Client { + pub fn new(http_client: H, config: Config) -> Self { + Self { http_client, config } + } + + pub async fn fetch_user(&self, id: &str) -> Result { + let url = format!("{}/users/{}", self.config.base_url, id); + let response = self.http_client.get(&url).await?; + // Parse response + todo!() + } +} + +// โœ… Production implementation +impl HttpClientTrait for reqwest::Client { + async fn get(&self, url: &str) -> Result { + // Implementation + } + + async fn post(&self, url: &str, body: Vec) -> Result { + // Implementation + } +} + +// โœ… Mock implementation for testing +#[cfg(test)] +pub struct MockHttpClient { + responses: HashMap>, +} + +#[cfg(test)] +impl MockHttpClient { + pub fn new() -> Self { + Self { + responses: HashMap::new(), + } + } + + pub fn expect_get(&mut self, url: &str, response: Result) { + self.responses.insert(format!("GET {}", url), response); + } +} + +#[cfg(test)] +impl HttpClientTrait for MockHttpClient { + async fn get(&self, url: &str) -> Result { + self.responses + .get(&format!("GET {}", url)) + .cloned() + .unwrap_or(Err(HttpError::NotFound)) + } + + async fn post(&self, url: &str, _body: Vec) -> Result { + self.responses + .get(&format!("POST {}", url)) + .cloned() + .unwrap_or(Err(HttpError::NotFound)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_fetch_user_success() { + let mut mock_client = MockHttpClient::new(); + mock_client.expect_get( + "https://api.example.com/users/123", + Ok(Response { + status: 200, + body: r#"{"id": "123", "name": "John"}"#.to_string(), + }), + ); + + let client = Client::new(mock_client, Config::default()); + let user = client.fetch_user("123").await.unwrap(); + + assert_eq!(user.id, "123"); + assert_eq!(user.name, "John"); + } +} +``` + +## โœ… API DESIGN CHECKLIST + +```markdown +### API Design Implementation Verification +- [ ] Function signatures accept flexible input types (AsRef, Into) +- [ ] Error types are well-structured with proper context +- [ ] Builder pattern used for complex configuration +- [ ] Traits have single responsibility and clear contracts +- [ ] Public API is well-documented with examples +- [ ] Configuration supports multiple sources with precedence +- [ ] Async APIs handle cancellation and backpressure +- [ ] Dependencies are injected for testability +- [ ] Extension traits enhance existing types ergonomically +- [ ] Module organization follows convention +- [ ] Feature gates are used appropriately +- [ ] Error handling provides actionable information +- [ ] API follows Rust naming conventions +- [ ] Generic parameters have appropriate bounds +- [ ] Public API surface is minimal but complete +``` + +This API design guide ensures consistent, ergonomic, and maintainable interfaces across Rust projects. diff --git a/.cursor/rules/rust/core/code-quality.mdc b/.cursor/rules/rust/core/code-quality.mdc index c32f477..65ea9d9 100644 --- a/.cursor/rules/rust/core/code-quality.mdc +++ b/.cursor/rules/rust/core/code-quality.mdc @@ -22,124 +22,6 @@ alwaysApply: false - **Production-ready code**: All code must be deployable and maintainable - **No `unwrap()` or `expect()`** in production code - use proper error handling -## ๐Ÿ“ฆ DEPENDENCY MANAGEMENT - -### Workspace Dependencies Priority -```toml -# Always prefer workspace dependencies first -[dependencies] -tokio = { workspace = true } -serde = { workspace = true, features = ["derive"] } - -# Only add new dependencies if not available in workspace -# Request permission before modifying Cargo.toml -``` - -### Standard Crate Recommendations -When adding new dependencies, prefer these battle-tested crates: - -```toml -# Core utilities -anyhow = "1.0" # Error handling -thiserror = "2.0" # Error type definitions -derive_more = { version = "2", features = ["full"] } # Extended derive macros -typed-builder = "0.21" # Builder pattern - -# Async/Concurrency -tokio = { version = "1.45", features = [ - "macros", - "rt-multi-thread", - "signal", - "sync" -] } -async-trait = "0.1" # Async traits -futures = "0.3" # Async utilities -dashmap = { version = "6", features = ["serde"] } # Concurrent HashMap - -# Serialization -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -serde_yaml = "0.9" -base64 = "0.22" - -# Web/HTTP -axum = { version = "0.8", features = ["macros", "http2"] } -reqwest = { version = "0.12", default-features = false, features = [ - "charset", - "rustls-tls-webpki-roots", - "http2", - "json", - "cookies", - "gzip", - "brotli", - "zstd", - "deflate" -] } -tower = { version = "0.5", features = ["util"] } -tower-http = { version = "0.6", features = ["cors", "trace"] } -http = "1" - -# Database -sqlx = { version = "0.8", features = [ - "chrono", - "postgres", - "runtime-tokio-rustls", - "sqlite", - "time", - "uuid" -] } - -# Documentation/API -utoipa = { version = "5", features = ["axum_extras"] } -utoipa-axum = { version = "0.2" } -utoipa-swagger-ui = { version = "9", features = [ - "axum", - "vendored" -], default-features = false } -schemars = { version = "0.8", features = ["chrono", "url"] } - -# Time/Date -chrono = { version = "0.4", features = ["serde"] } -time = { version = "0.3", features = ["serde"] } - -# Templating/Text Processing -minijinja = { version = "2", features = [ - "json", - "loader", - "loop_controls", - "speedups" -] } -regex = "1" -htmd = "0.2" # HTML to Markdown - -# Authentication/Security -jsonwebtoken = "9.0" -uuid = { version = "1.17", features = ["v4", "serde"] } - -# Data Processing -jsonpath-rust = "1" -url = "2.5" - -# CLI (when needed) -clap = { version = "4.0", features = ["derive"] } - -# Utilities -rand = "0.8" -getrandom = "0.3" -atomic_enum = "0.3" # Atomic enumerations - -# Logging/Observability -tracing = "0.1" -tracing-subscriber = { version = "0.3", features = ["env-filter"] } -``` - -### Version Strategy -- **Always use latest versions** when adding new dependencies -- **Request permission** before modifying `Cargo.toml` -- **Check workspace first** - never duplicate dependencies unnecessarily -- **Use specific feature flags** to minimize compilation time and binary size -- **Prefer rustls over openssl** for TLS (better for cross-compilation) - ## ๐Ÿ—๏ธ CODE STRUCTURE PATTERNS ### Data Structure Organization @@ -332,19 +214,6 @@ pub struct DatabaseManagerImpl; // Any file > 500 lines (excluding tests) needs refactoring ``` -### Dependency Anti-Patterns -```rust -// โŒ Don't duplicate workspace dependencies -[dependencies] -tokio = "1.0" # Already in workspace - -// โŒ Don't modify Cargo.toml without permission -# Always ask before adding new dependencies - -// โŒ Don't use outdated versions -serde = "0.9" # Use latest stable -``` - ## โœ… QUALITY CHECKLIST ```markdown @@ -362,7 +231,6 @@ serde = "0.9" # Use latest stable - [ ] `cargo test` passes - [ ] `cargo clippy` passes with no warnings - [ ] Public APIs documented with examples -- [ ] Workspace dependencies used when available ``` This code quality standard ensures consistent, maintainable, and production-ready Rust code across all projects. diff --git a/.cursor/rules/rust/core/dependencies.mdc b/.cursor/rules/rust/core/dependencies.mdc new file mode 100644 index 0000000..f723648 --- /dev/null +++ b/.cursor/rules/rust/core/dependencies.mdc @@ -0,0 +1,325 @@ +--- +description: +globs: +alwaysApply: false +--- +# ๐Ÿ“ฆ RUST DEPENDENCY MANAGEMENT + +> **TL;DR:** Centralized dependency management guidelines for consistent, secure, and maintainable Rust projects. + +## ๐Ÿ” DEPENDENCY MANAGEMENT STRATEGY + +```mermaid +graph TD + Start["Project Setup"] --> WorkspaceCheck{"Workspace
Project?"} + + WorkspaceCheck -->|Yes| WorkspaceRoot["Use Workspace Dependencies"] + WorkspaceCheck -->|No| SingleCrate["Single Crate Dependencies"] + + WorkspaceRoot --> WorkspaceTable["[workspace.dependencies]
Define versions centrally"] + SingleCrate --> DirectDeps["[dependencies]
Direct version specification"] + + WorkspaceTable --> CrateUsage["[dependencies]
crate = { workspace = true }"] + + CrateUsage --> SecurityCheck["Security Assessment"] + DirectDeps --> SecurityCheck + + SecurityCheck --> Audit["cargo audit"] + Audit --> Outdated["cargo outdated"] + Outdated --> VersionPin["Pin Critical Versions"] + + VersionPin --> FeatureGates["Feature Gate Optional Deps"] + FeatureGates --> Testing["Testing Dependencies"] + Testing --> Documentation["Document Choices"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style WorkspaceRoot fill:#4dbb5f,stroke:#36873f,color:white + style SingleCrate fill:#ffa64d,stroke:#cc7a30,color:white + style SecurityCheck fill:#d94dbb,stroke:#a3378a,color:white +``` + +## ๐ŸŽฏ DEPENDENCY STRATEGY + +### Workspace Dependencies Priority +```toml +# Always prefer workspace dependencies first +[dependencies] +tokio = { workspace = true } +serde = { workspace = true, features = ["derive"] } + +# Only add new dependencies if not available in workspace +# Request permission before modifying Cargo.toml +``` + +## ๐Ÿ“‹ STANDARD CRATE RECOMMENDATIONS + +### Core Utilities +```toml +# Error handling +anyhow = "1.0" # Simple error handling +thiserror = "2.0" # Structured error types +derive_more = { version = "2", features = ["full"] } # Extended derive macros + +# Data structures +typed-builder = "0.21" # Builder pattern +uuid = { version = "1.17", features = ["v4", "v7", "serde"] } +chrono = { version = "0.4", features = ["serde"] } +time = { version = "0.3", features = ["serde"] } +``` + +### Async/Concurrency +```toml +tokio = { version = "1.45", features = [ + "macros", + "rt-multi-thread", + "signal", + "sync", + "fs", + "net", + "time" +] } +async-trait = "0.1" # Async traits +futures = "0.3" # Async utilities +dashmap = { version = "6", features = ["serde"] } # Concurrent HashMap +``` + +### Serialization +```toml +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +serde_yaml = "0.9" +base64 = "0.22" +``` + +### Web/HTTP +```toml +axum = { version = "0.8", features = ["macros", "http2", "multipart"] } +reqwest = { version = "0.12", default-features = false, features = [ + "charset", + "rustls-tls-webpki-roots", + "http2", + "json", + "cookies", + "gzip", + "brotli", + "zstd", + "deflate" +] } +tower = { version = "0.5", features = ["util", "timeout", "load-shed"] } +tower-http = { version = "0.6", features = ["cors", "trace", "compression"] } +http = "1.0" +``` + +### Database +```toml +sqlx = { version = "0.8", features = [ + "chrono", + "postgres", + "runtime-tokio-rustls", + "sqlite", + "time", + "uuid", + "json" +] } +``` + +### Documentation/API +```toml +utoipa = { version = "5", features = ["axum_extras", "chrono", "uuid"] } +utoipa-axum = "0.2" +utoipa-swagger-ui = { version = "9", features = [ + "axum", + "vendored" +], default-features = false } +schemars = { version = "0.8", features = ["chrono", "url"] } +``` + +### CLI Applications +```toml +clap = { version = "4.0", features = ["derive", "env", "unicode"] } +dialoguer = "0.11" # Interactive prompts +indicatif = "0.17" # Progress bars +colored = "2.0" # Terminal colors +console = "0.15" # Terminal utilities +``` + +### gRPC/Protobuf +```toml +tonic = { version = "0.13", features = ["transport", "codegen", "prost"] } +prost = "0.13" +prost-types = "0.13" +tonic-build = "0.13" +prost-build = "0.13" +tonic-health = "0.13" +tonic-reflection = "0.13" +``` + +### Development/Testing +```toml +[dev-dependencies] +tempfile = "3.0" # Temporary files +wiremock = "0.6" # HTTP mocking +assert_cmd = "2.0" # CLI testing +predicates = "3.0" # Test assertions +axum-test = "15.0" # Axum testing +tokio-test = "0.4" # Tokio testing utilities +``` + +## ๐Ÿ”ง FEATURE FLAG STRATEGY + +### Minimal Feature Sets +```toml +# โœ… Good: Only enable needed features +reqwest = { version = "0.12", default-features = false, features = [ + "rustls-tls-webpki-roots", # TLS support + "json", # JSON serialization + "gzip" # Compression +] } + +# โŒ Bad: Enabling all features +# reqwest = { version = "0.12", features = ["full"] } +``` + +### Feature Documentation +```toml +# Document why each feature is needed +tokio = { version = "1.45", features = [ + "macros", # #[tokio::main] and #[tokio::test] + "rt-multi-thread", # Multi-threaded runtime + "signal", # Signal handling for graceful shutdown + "net", # Network primitives + "fs", # File system operations + "time" # Time utilities +] } +``` + +## ๐Ÿ”’ SECURITY CONSIDERATIONS + +### TLS Configuration +```toml +# โœ… Prefer rustls over openssl +reqwest = { version = "0.12", default-features = false, features = [ + "rustls-tls-webpki-roots" # Use rustls with web PKI roots +] } + +# โŒ Avoid native-tls when possible +# reqwest = { version = "0.12", features = ["native-tls"] } +``` + +### Crypto Dependencies +```toml +# Use well-established crypto crates +rand = { version = "0.8", features = ["std_rng"] } +getrandom = { version = "0.3", features = ["std"] } +jsonwebtoken = "9.0" +argon2 = "0.15" +``` + +## ๐Ÿ“Š VERSION STRATEGY + +### Version Selection Rules +1. **Always use latest stable versions** for new dependencies +2. **Use semantic versioning** - prefer `"1.0"` over `"=1.0.0"` +3. **Check workspace first** - never duplicate dependencies +4. **Document breaking changes** when updating major versions + +### Workspace Version Management +```toml +# workspace Cargo.toml +[workspace.dependencies] +tokio = { version = "1.45", features = ["macros", "rt-multi-thread"] } +serde = { version = "1.0", features = ["derive"] } +anyhow = "1.0" +thiserror = "2.0" +uuid = { version = "1.17", features = ["v4", "serde"] } + +# Individual crate Cargo.toml +[dependencies] +tokio = { workspace = true, features = ["signal"] } # Add extra features as needed +serde = { workspace = true } +anyhow = { workspace = true } +``` + +## ๐Ÿšจ DEPENDENCY ANTI-PATTERNS + +### What to Avoid +```toml +# โŒ Don't duplicate workspace dependencies +[dependencies] +tokio = "1.0" # Already in workspace + +# โŒ Don't enable unnecessary features +tokio = { version = "1.45", features = ["full"] } # Too broad + +# โŒ Don't use outdated versions +serde = "0.9" # Use latest stable + +# โŒ Don't mix TLS implementations +reqwest = { version = "0.12", features = ["native-tls", "rustls-tls"] } + +# โŒ Don't use git dependencies in production +my-crate = { git = "https://github.com/user/repo" } +``` + +### Common Mistakes +```rust +// โŒ Don't import with wildcard +use serde::*; + +// โœ… Import specific items +use serde::{Deserialize, Serialize}; + +// โŒ Don't use deprecated APIs +use std::sync::ONCE_INIT; // Deprecated + +// โœ… Use modern alternatives +use std::sync::Once; +``` + +## ๐Ÿ“ DEPENDENCY AUDIT + +### Regular Maintenance +```bash +# Check for outdated dependencies +cargo outdated + +# Audit for security vulnerabilities +cargo audit + +# Check for unused dependencies +cargo machete + +# Update dependencies +cargo update +``` + +### Security Best Practices +```toml +# Pin security-critical dependencies +openssl = "=0.10.64" # Pin exact version for security + +# Use cargo-deny for policy enforcement +[advisories] +db-path = "~/.cargo/advisory-db" +db-urls = ["https://github.com/rustsec/advisory-db"] +vulnerability = "deny" +unmaintained = "warn" +``` + +## โœ… DEPENDENCY CHECKLIST + +```markdown +### Dependency Management Verification +- [ ] Uses workspace dependencies when available +- [ ] Features flags are minimal and documented +- [ ] Prefers rustls over native-tls +- [ ] Uses latest stable versions +- [ ] Security-critical deps are audited +- [ ] No duplicate dependencies across workspace +- [ ] Dev dependencies separated from runtime deps +- [ ] Feature documentation explains necessity +- [ ] Regular dependency updates scheduled +- [ ] Vulnerability scanning enabled +``` + +This dependency management guide ensures consistent, secure, and maintainable dependency choices across all Rust projects. diff --git a/.cursor/rules/rust/core/design-patterns.mdc b/.cursor/rules/rust/core/design-patterns.mdc new file mode 100644 index 0000000..cf42abf --- /dev/null +++ b/.cursor/rules/rust/core/design-patterns.mdc @@ -0,0 +1,873 @@ +--- +description: +globs: +alwaysApply: false +--- +# ๐ŸŽญ RUST DESIGN PATTERNS + +> **TL;DR:** Essential design patterns for Rust applications, focusing on idiomatic solutions that leverage Rust's ownership system and zero-cost abstractions. + +## ๐Ÿ” DESIGN PATTERN SELECTION STRATEGY + +```mermaid +graph TD + Start["Design Challenge"] --> ProblemType{"Problem
Category?"} + + ProblemType -->|Object Creation| Creational["Creational Patterns"] + ProblemType -->|Object Behavior| Behavioral["Behavioral Patterns"] + ProblemType -->|Object Structure| Structural["Structural Patterns"] + ProblemType -->|Concurrency| ConcurrencyP["Concurrency Patterns"] + + Creational --> BuilderCheck{"Complex
Configuration?"} + Creational --> FactoryCheck{"Multiple
Implementations?"} + + BuilderCheck -->|Yes| TypeStateBuilder["Type-State Builder"] + FactoryCheck -->|Yes| AbstractFactory["Abstract Factory"] + + Behavioral --> StrategyCheck{"Runtime Algorithm
Selection?"} + Behavioral --> CommandCheck{"Undo/Redo
Required?"} + Behavioral --> ObserverCheck{"Event-Driven
Architecture?"} + + StrategyCheck -->|Yes| StrategyPattern["Strategy Pattern"] + CommandCheck -->|Yes| CommandPattern["Command Pattern"] + ObserverCheck -->|Yes| ObserverPattern["Observer Pattern"] + + Structural --> AdapterCheck{"External API
Integration?"} + Structural --> DecoratorCheck{"Cross-Cutting
Concerns?"} + + AdapterCheck -->|Yes| AdapterPattern["Adapter Pattern"] + DecoratorCheck -->|Yes| DecoratorPattern["Decorator Pattern"] + + ConcurrencyP --> ActorCheck{"Isolated State
Management?"} + ConcurrencyP --> PipelineCheck{"Data Pipeline
Processing?"} + + ActorCheck -->|Yes| ActorPattern["Actor Pattern"] + PipelineCheck -->|Yes| PipelinePattern["Pipeline Pattern"] + + TypeStateBuilder --> Implementation["Implementation"] + AbstractFactory --> Implementation + StrategyPattern --> Implementation + CommandPattern --> Implementation + ObserverPattern --> Implementation + AdapterPattern --> Implementation + DecoratorPattern --> Implementation + ActorPattern --> Implementation + PipelinePattern --> Implementation + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style Creational fill:#4dbb5f,stroke:#36873f,color:white + style Behavioral fill:#ffa64d,stroke:#cc7a30,color:white + style Structural fill:#d94dbb,stroke:#a3378a,color:white + style ConcurrencyP fill:#9d4dbb,stroke:#7a3a8a,color:white +``` + +## ๐Ÿ—๏ธ CREATIONAL PATTERNS + +### Builder Pattern with Type State +```rust +use std::marker::PhantomData; + +// โœ… Type-safe builder preventing invalid configurations +pub struct DatabaseConfigBuilder { + host: Option, + port: Option, + database: Option, + username: Option, + password: Option, + _marker: PhantomData<(HasHost, HasPort, HasDatabase)>, +} + +pub struct Missing; +pub struct Present; + +impl DatabaseConfigBuilder { + pub fn new() -> Self { + Self { + host: None, + port: None, + database: None, + username: None, + password: None, + _marker: PhantomData, + } + } +} + +impl DatabaseConfigBuilder { + pub fn host(self, host: impl Into) -> DatabaseConfigBuilder { + DatabaseConfigBuilder { + host: Some(host.into()), + port: self.port, + database: self.database, + username: self.username, + password: self.password, + _marker: PhantomData, + } + } +} + +impl DatabaseConfigBuilder { + pub fn port(self, port: u16) -> DatabaseConfigBuilder { + DatabaseConfigBuilder { + host: self.host, + port: Some(port), + database: self.database, + username: self.username, + password: self.password, + _marker: PhantomData, + } + } +} + +impl DatabaseConfigBuilder { + pub fn database(self, database: impl Into) -> DatabaseConfigBuilder { + DatabaseConfigBuilder { + host: self.host, + port: self.port, + database: Some(database.into()), + username: self.username, + password: self.password, + _marker: PhantomData, + } + } +} + +impl DatabaseConfigBuilder { + pub fn username(mut self, username: impl Into) -> Self { + self.username = Some(username.into()); + self + } + + pub fn password(mut self, password: impl Into) -> Self { + self.password = Some(password.into()); + self + } +} + +// Only allow building when all required fields are present +impl DatabaseConfigBuilder { + pub fn build(self) -> DatabaseConfig { + DatabaseConfig { + host: self.host.unwrap(), + port: self.port.unwrap(), + database: self.database.unwrap(), + username: self.username, + password: self.password, + } + } +} + +// โœ… Usage - compiler enforces required fields +let config = DatabaseConfigBuilder::new() + .host("localhost") + .port(5432) + .database("myapp") + .username("admin") + .build(); +``` + +### Factory Pattern with Associated Types +```rust +// โœ… Factory pattern for creating different database connections +pub trait ConnectionFactory { + type Connection; + type Config; + type Error; + + fn create_connection(config: Self::Config) -> Result; + fn connection_type() -> &'static str; +} + +pub struct PostgresFactory; +pub struct SqliteFactory; + +impl ConnectionFactory for PostgresFactory { + type Connection = sqlx::PgPool; + type Config = PostgresConfig; + type Error = sqlx::Error; + + fn create_connection(config: Self::Config) -> Result { + // Implementation + } + + fn connection_type() -> &'static str { + "PostgreSQL" + } +} + +impl ConnectionFactory for SqliteFactory { + type Connection = sqlx::SqlitePool; + type Config = SqliteConfig; + type Error = sqlx::Error; + + fn create_connection(config: Self::Config) -> Result { + // Implementation + } + + fn connection_type() -> &'static str { + "SQLite" + } +} + +// โœ… Generic database service using factory +pub struct DatabaseService { + connection: F::Connection, + _factory: PhantomData, +} + +impl DatabaseService { + pub fn new(config: F::Config) -> Result { + let connection = F::create_connection(config)?; + Ok(Self { + connection, + _factory: PhantomData, + }) + } + + pub fn connection_info(&self) -> &'static str { + F::connection_type() + } +} +``` + +## ๐Ÿ”„ BEHAVIORAL PATTERNS + +### Strategy Pattern with Enums +```rust +// โœ… Strategy pattern for different authentication methods +#[derive(Debug, Clone)] +pub enum AuthStrategy { + Bearer { token: String }, + ApiKey { key: String, header: String }, + Basic { username: String, password: String }, + OAuth2 { client_id: String, client_secret: String }, +} + +impl AuthStrategy { + pub fn apply_to_request(&self, request: &mut Request) -> Result<(), AuthError> { + match self { + AuthStrategy::Bearer { token } => { + request.headers_mut().insert( + "Authorization", + format!("Bearer {}", token).parse().unwrap(), + ); + } + AuthStrategy::ApiKey { key, header } => { + request.headers_mut().insert( + header.as_str(), + key.parse().unwrap(), + ); + } + AuthStrategy::Basic { username, password } => { + let encoded = base64::encode(format!("{}:{}", username, password)); + request.headers_mut().insert( + "Authorization", + format!("Basic {}", encoded).parse().unwrap(), + ); + } + AuthStrategy::OAuth2 { client_id, client_secret } => { + // OAuth2 implementation + self.handle_oauth2(request, client_id, client_secret)?; + } + } + Ok(()) + } + + fn handle_oauth2(&self, request: &mut Request, client_id: &str, client_secret: &str) -> Result<(), AuthError> { + // OAuth2 token exchange logic + todo!() + } +} + +// โœ… Context that uses the strategy +pub struct HttpClient { + client: reqwest::Client, + auth_strategy: Option, +} + +impl HttpClient { + pub fn new() -> Self { + Self { + client: reqwest::Client::new(), + auth_strategy: None, + } + } + + pub fn with_auth(mut self, strategy: AuthStrategy) -> Self { + self.auth_strategy = Some(strategy); + self + } + + pub async fn request(&self, url: &str) -> Result { + let mut request = self.client.get(url).build()?; + + if let Some(ref auth) = self.auth_strategy { + auth.apply_to_request(&mut request)?; + } + + let response = self.client.execute(request).await?; + Ok(response) + } +} +``` + +### Command Pattern with Undo +```rust +// โœ… Command pattern for operations with undo capability +pub trait Command { + type Error; + + fn execute(&mut self) -> Result<(), Self::Error>; + fn undo(&mut self) -> Result<(), Self::Error>; + fn description(&self) -> &str; +} + +#[derive(Debug)] +pub struct CreateUserCommand { + user_service: Arc, + user_data: User, + created_user_id: Option, +} + +impl CreateUserCommand { + pub fn new(user_service: Arc, user_data: User) -> Self { + Self { + user_service, + user_data, + created_user_id: None, + } + } +} + +impl Command for CreateUserCommand { + type Error = UserServiceError; + + fn execute(&mut self) -> Result<(), Self::Error> { + let user = self.user_service.create_user(&self.user_data)?; + self.created_user_id = Some(user.id); + Ok(()) + } + + fn undo(&mut self) -> Result<(), Self::Error> { + if let Some(user_id) = self.created_user_id.take() { + self.user_service.delete_user(user_id)?; + } + Ok(()) + } + + fn description(&self) -> &str { + "Create user" + } +} + +// โœ… Command invoker with history +pub struct CommandHistory { + executed_commands: Vec>>>, + current_position: usize, +} + +impl CommandHistory { + pub fn new() -> Self { + Self { + executed_commands: Vec::new(), + current_position: 0, + } + } + + pub fn execute(&mut self, mut command: C) -> Result<(), C::Error> + where + C: Command + 'static, + C::Error: Into>, + { + command.execute()?; + + // Remove any commands after current position (when redoing after undo) + self.executed_commands.truncate(self.current_position); + + // Add the new command + self.executed_commands.push(Box::new(command)); + self.current_position += 1; + + Ok(()) + } + + pub fn undo(&mut self) -> Result<(), Box> { + if self.current_position > 0 { + self.current_position -= 1; + self.executed_commands[self.current_position].undo()?; + } + Ok(()) + } + + pub fn redo(&mut self) -> Result<(), Box> { + if self.current_position < self.executed_commands.len() { + self.executed_commands[self.current_position].execute()?; + self.current_position += 1; + } + Ok(()) + } +} +``` + +### Observer Pattern with Async +```rust +use tokio::sync::broadcast; + +// โœ… Event-driven observer pattern with async support +#[derive(Debug, Clone)] +pub enum DomainEvent { + UserCreated { user_id: UserId, email: String }, + UserUpdated { user_id: UserId, changes: Vec }, + UserDeleted { user_id: UserId }, + OrderPlaced { order_id: OrderId, user_id: UserId, amount: Decimal }, +} + +#[async_trait::async_trait] +pub trait EventHandler { + async fn handle(&self, event: &DomainEvent) -> Result<(), EventError>; + fn interested_in(&self) -> Vec>; +} + +pub struct EmailNotificationHandler { + email_service: Arc, +} + +#[async_trait::async_trait] +impl EventHandler for EmailNotificationHandler { + async fn handle(&self, event: &DomainEvent) -> Result<(), EventError> { + match event { + DomainEvent::UserCreated { email, .. } => { + self.email_service.send_welcome_email(email).await?; + } + DomainEvent::OrderPlaced { user_id, amount, .. } => { + let user = self.get_user(*user_id).await?; + self.email_service.send_order_confirmation(&user.email, *amount).await?; + } + _ => {} + } + Ok(()) + } + + fn interested_in(&self) -> Vec> { + vec![ + std::mem::discriminant(&DomainEvent::UserCreated { user_id: UserId::new(), email: String::new() }), + std::mem::discriminant(&DomainEvent::OrderPlaced { + order_id: OrderId::new(), + user_id: UserId::new(), + amount: Decimal::ZERO + }), + ] + } +} + +// โœ… Event bus for managing observers +pub struct EventBus { + sender: broadcast::Sender, + handlers: Vec>, +} + +impl EventBus { + pub fn new() -> Self { + let (sender, _) = broadcast::channel(1000); + Self { + sender, + handlers: Vec::new(), + } + } + + pub fn subscribe(&mut self, handler: Arc) { + self.handlers.push(handler); + } + + pub async fn publish(&self, event: DomainEvent) -> Result<(), EventError> { + // Send to broadcast channel for other subscribers + let _ = self.sender.send(event.clone()); + + // Handle with registered handlers + for handler in &self.handlers { + let event_discriminant = std::mem::discriminant(&event); + if handler.interested_in().contains(&event_discriminant) { + if let Err(e) = handler.handle(&event).await { + tracing::error!("Event handler failed: {:?}", e); + // Continue with other handlers + } + } + } + + Ok(()) + } + + pub fn subscribe_to_stream(&self) -> broadcast::Receiver { + self.sender.subscribe() + } +} +``` + +## ๐Ÿ›๏ธ STRUCTURAL PATTERNS + +### Adapter Pattern for External APIs +```rust +// โœ… Adapter pattern for integrating different payment providers +#[async_trait::async_trait] +pub trait PaymentProcessor { + async fn process_payment(&self, payment: &Payment) -> Result; + async fn refund_payment(&self, payment_id: &str, amount: Option) -> Result; +} + +// External Stripe API (different interface) +pub struct StripeClient { + // Stripe-specific implementation +} + +impl StripeClient { + pub async fn charge(&self, amount_cents: u64, token: &str) -> Result { + // Stripe-specific charge logic + } + + pub async fn create_refund(&self, charge_id: &str, amount_cents: Option) -> Result { + // Stripe-specific refund logic + } +} + +// โœ… Adapter to make Stripe compatible with our interface +pub struct StripeAdapter { + client: StripeClient, +} + +impl StripeAdapter { + pub fn new(client: StripeClient) -> Self { + Self { client } + } +} + +#[async_trait::async_trait] +impl PaymentProcessor for StripeAdapter { + async fn process_payment(&self, payment: &Payment) -> Result { + let amount_cents = (payment.amount * 100).to_u64().ok_or(PaymentError::InvalidAmount)?; + + let charge = self.client + .charge(amount_cents, &payment.token) + .await + .map_err(|e| PaymentError::ProviderError(e.to_string()))?; + + Ok(PaymentResult { + id: charge.id, + status: match charge.status.as_str() { + "succeeded" => PaymentStatus::Completed, + "pending" => PaymentStatus::Pending, + "failed" => PaymentStatus::Failed, + _ => PaymentStatus::Unknown, + }, + amount: payment.amount, + fees: charge.fees.map(|f| Decimal::from(f) / 100), + }) + } + + async fn refund_payment(&self, payment_id: &str, amount: Option) -> Result { + let amount_cents = amount.map(|a| (a * 100).to_u64().unwrap()); + + let refund = self.client + .create_refund(payment_id, amount_cents) + .await + .map_err(|e| PaymentError::ProviderError(e.to_string()))?; + + Ok(RefundResult { + id: refund.id, + amount: Decimal::from(refund.amount) / 100, + status: RefundStatus::Completed, + }) + } +} + +// โœ… Similar adapter for PayPal +pub struct PayPalAdapter { + client: PayPalClient, +} + +#[async_trait::async_trait] +impl PaymentProcessor for PayPalAdapter { + async fn process_payment(&self, payment: &Payment) -> Result { + // PayPal-specific implementation + } + + async fn refund_payment(&self, payment_id: &str, amount: Option) -> Result { + // PayPal-specific implementation + } +} + +// โœ… Payment service using any adapter +pub struct PaymentService { + processor: Arc, +} + +impl PaymentService { + pub fn new(processor: Arc) -> Self { + Self { processor } + } + + pub async fn charge_customer(&self, payment: Payment) -> Result { + self.processor.process_payment(&payment).await + } +} +``` + +### Decorator Pattern with Middleware +```rust +// โœ… Decorator pattern for HTTP middleware +#[async_trait::async_trait] +pub trait HttpHandler { + async fn handle(&self, request: Request) -> Result; +} + +// Base handler +pub struct BaseHandler; + +#[async_trait::async_trait] +impl HttpHandler for BaseHandler { + async fn handle(&self, request: Request) -> Result { + // Basic request handling + Ok(Response::new("Hello World".into())) + } +} + +// โœ… Logging decorator +pub struct LoggingDecorator { + inner: H, +} + +impl LoggingDecorator { + pub fn new(inner: H) -> Self { + Self { inner } + } +} + +#[async_trait::async_trait] +impl HttpHandler for LoggingDecorator { + async fn handle(&self, request: Request) -> Result { + let start = std::time::Instant::now(); + let method = request.method().clone(); + let uri = request.uri().clone(); + + tracing::info!("Incoming request: {} {}", method, uri); + + let result = self.inner.handle(request).await; + + let duration = start.elapsed(); + match &result { + Ok(response) => { + tracing::info!( + "Request completed: {} {} -> {} in {:?}", + method, uri, response.status(), duration + ); + } + Err(e) => { + tracing::error!( + "Request failed: {} {} -> {:?} in {:?}", + method, uri, e, duration + ); + } + } + + result + } +} + +// โœ… Rate limiting decorator +pub struct RateLimitDecorator { + inner: H, + rate_limiter: Arc, +} + +impl RateLimitDecorator { + pub fn new(inner: H, rate_limiter: Arc) -> Self { + Self { inner, rate_limiter } + } +} + +#[async_trait::async_trait] +impl HttpHandler for RateLimitDecorator { + async fn handle(&self, request: Request) -> Result { + let client_ip = extract_client_ip(&request)?; + + self.rate_limiter.check_rate_limit(&client_ip).await + .map_err(|_| HttpError::RateLimited)?; + + self.inner.handle(request).await + } +} + +// โœ… Composition of decorators +let handler = RateLimitDecorator::new( + LoggingDecorator::new( + BaseHandler + ), + rate_limiter +); +``` + +## ๐Ÿงต CONCURRENCY PATTERNS + +### Actor Pattern with Tokio +```rust +use tokio::sync::{mpsc, oneshot}; + +// โœ… Actor pattern for managing state with message passing +#[derive(Debug)] +pub enum UserActorMessage { + GetUser { + user_id: UserId, + respond_to: oneshot::Sender>, + }, + UpdateUser { + user_id: UserId, + updates: UserUpdates, + respond_to: oneshot::Sender>, + }, + DeleteUser { + user_id: UserId, + respond_to: oneshot::Sender>, + }, +} + +pub struct UserActor { + receiver: mpsc::Receiver, + repository: Arc, + cache: DashMap, +} + +impl UserActor { + pub fn new(repository: Arc) -> (Self, UserActorHandle) { + let (sender, receiver) = mpsc::channel(100); + let actor = Self { + receiver, + repository, + cache: DashMap::new(), + }; + let handle = UserActorHandle { sender }; + (actor, handle) + } + + pub async fn run(mut self) { + while let Some(msg) = self.receiver.recv().await { + self.handle_message(msg).await; + } + } + + async fn handle_message(&mut self, msg: UserActorMessage) { + match msg { + UserActorMessage::GetUser { user_id, respond_to } => { + let result = self.get_user_internal(user_id).await; + let _ = respond_to.send(result); + } + UserActorMessage::UpdateUser { user_id, updates, respond_to } => { + let result = self.update_user_internal(user_id, updates).await; + let _ = respond_to.send(result); + } + UserActorMessage::DeleteUser { user_id, respond_to } => { + let result = self.delete_user_internal(user_id).await; + let _ = respond_to.send(result); + } + } + } + + async fn get_user_internal(&self, user_id: UserId) -> Result { + // Check cache first + if let Some(user) = self.cache.get(&user_id) { + return Ok(user.clone()); + } + + // Fetch from repository + let user = self.repository.find_by_id(user_id).await? + .ok_or(UserError::NotFound { user_id })?; + + // Cache the result + self.cache.insert(user_id, user.clone()); + + Ok(user) + } + + async fn update_user_internal(&mut self, user_id: UserId, updates: UserUpdates) -> Result { + let updated_user = self.repository.update(user_id, updates).await?; + + // Update cache + self.cache.insert(user_id, updated_user.clone()); + + Ok(updated_user) + } + + async fn delete_user_internal(&mut self, user_id: UserId) -> Result<(), UserError> { + self.repository.delete(user_id).await?; + + // Remove from cache + self.cache.remove(&user_id); + + Ok(()) + } +} + +// โœ… Handle for communicating with the actor +#[derive(Clone)] +pub struct UserActorHandle { + sender: mpsc::Sender, +} + +impl UserActorHandle { + pub async fn get_user(&self, user_id: UserId) -> Result { + let (respond_to, response) = oneshot::channel(); + + self.sender + .send(UserActorMessage::GetUser { user_id, respond_to }) + .await + .map_err(|_| UserError::ActorUnavailable)?; + + response.await + .map_err(|_| UserError::ActorUnavailable)? + } + + pub async fn update_user(&self, user_id: UserId, updates: UserUpdates) -> Result { + let (respond_to, response) = oneshot::channel(); + + self.sender + .send(UserActorMessage::UpdateUser { user_id, updates, respond_to }) + .await + .map_err(|_| UserError::ActorUnavailable)?; + + response.await + .map_err(|_| UserError::ActorUnavailable)? + } +} + +// โœ… Starting the actor system +pub async fn start_user_actor(repository: Arc) -> UserActorHandle { + let (actor, handle) = UserActor::new(repository); + + tokio::spawn(async move { + actor.run().await; + }); + + handle +} +``` + +## โœ… DESIGN PATTERNS CHECKLIST + +```markdown +### Design Patterns Implementation Verification +- [ ] Builder pattern used for complex configuration objects +- [ ] Factory pattern for creating related object families +- [ ] Strategy pattern for runtime algorithm selection +- [ ] Command pattern for operations requiring undo/redo +- [ ] Observer pattern for event-driven architecture +- [ ] Adapter pattern for external API integration +- [ ] Decorator pattern for cross-cutting concerns +- [ ] Actor pattern for concurrent state management +- [ ] Type-state pattern for compile-time validation +- [ ] Repository pattern for data access abstraction +- [ ] Dependency injection for testability +- [ ] Event sourcing for audit trails (when applicable) +- [ ] CQRS separation for read/write operations (when applicable) +- [ ] Circuit breaker for resilience patterns +- [ ] Retry pattern with exponential backoff +``` + +This design patterns guide provides battle-tested solutions for common architectural challenges in Rust applications. diff --git a/.cursor/rules/rust/core/performance.mdc b/.cursor/rules/rust/core/performance.mdc new file mode 100644 index 0000000..36ff86b --- /dev/null +++ b/.cursor/rules/rust/core/performance.mdc @@ -0,0 +1,638 @@ +--- +description: +globs: +alwaysApply: false +--- +# โšก RUST PERFORMANCE OPTIMIZATION + +> **TL;DR:** Performance optimization strategies for Rust applications, focusing on zero-cost abstractions, memory management, and profiling-driven optimization. + +## ๐Ÿ” PERFORMANCE OPTIMIZATION STRATEGY + +```mermaid +graph TD + Start["Performance Issue"] --> Measure["Profile & Measure"] + + Measure --> Bottleneck{"Bottleneck
Type?"} + + Bottleneck -->|CPU| CPUOpt["CPU Optimization"] + Bottleneck -->|Memory| MemOpt["Memory Optimization"] + Bottleneck -->|I/O| IOOpt["I/O Optimization"] + Bottleneck -->|Concurrency| ConcOpt["Concurrency Optimization"] + + CPUOpt --> SIMD["SIMD Vectorization"] + CPUOpt --> Algorithms["Algorithm Optimization"] + CPUOpt --> CompileTime["Compile-Time Optimization"] + + MemOpt --> Allocation["Allocation Strategy"] + MemOpt --> DataStructure["Data Structure Choice"] + MemOpt --> Caching["Caching Patterns"] + + IOOpt --> Buffering["Buffering Strategy"] + IOOpt --> AsyncIO["Async I/O Patterns"] + IOOpt --> Batching["Request Batching"] + + ConcOpt --> Parallelism["Parallel Processing"] + ConcOpt --> Channels["Channel Optimization"] + ConcOpt --> LockFree["Lock-Free Structures"] + + SIMD --> Verify["Benchmark & Verify"] + Algorithms --> Verify + CompileTime --> Verify + Allocation --> Verify + DataStructure --> Verify + Caching --> Verify + Buffering --> Verify + AsyncIO --> Verify + Batching --> Verify + Parallelism --> Verify + Channels --> Verify + LockFree --> Verify + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style Measure fill:#ffa64d,stroke:#cc7a30,color:white + style CPUOpt fill:#4dbb5f,stroke:#36873f,color:white + style MemOpt fill:#d94dbb,stroke:#a3378a,color:white +``` + +## ๐ŸŽฏ PERFORMANCE PRINCIPLES + +### Measure First, Optimize Second +```rust +// โœ… Always profile before optimizing +use std::time::Instant; + +#[cfg(feature = "profiling")] +macro_rules! time_it { + ($name:expr, $block:block) => {{ + let start = Instant::now(); + let result = $block; + let duration = start.elapsed(); + tracing::info!("{} took {:?}", $name, duration); + result + }}; +} + +#[cfg(not(feature = "profiling"))] +macro_rules! time_it { + ($name:expr, $block:block) => { + $block + }; +} + +// Usage +fn process_data(data: &[u8]) -> Vec { + time_it!("process_data", { + // Expensive computation here + data.iter().map(|&b| b.wrapping_mul(2)).collect() + }) +} +``` + +## ๐Ÿ—๏ธ MEMORY OPTIMIZATION + +### String and Allocation Management +```rust +use std::borrow::Cow; + +// โœ… Use Cow for flexible string handling +pub fn process_text<'a>(input: &'a str) -> Cow<'a, str> { + if input.contains("old") { + Cow::Owned(input.replace("old", "new")) + } else { + Cow::Borrowed(input) + } +} + +// โœ… Pre-allocate with known capacity +pub fn build_large_string(items: &[&str]) -> String { + let total_len = items.iter().map(|s| s.len()).sum::(); + let mut result = String::with_capacity(total_len + items.len() - 1); + + for (i, item) in items.iter().enumerate() { + if i > 0 { + result.push(' '); + } + result.push_str(item); + } + result +} + +// โœ… Use Vec::with_capacity for known sizes +pub fn process_numbers(count: usize) -> Vec { + let mut result = Vec::with_capacity(count); + for i in 0..count { + result.push(i as i32 * 2); + } + result +} + +// โŒ Avoid repeated allocations +// fn bad_string_building(items: &[&str]) -> String { +// let mut result = String::new(); +// for item in items { +// result = result + item + " "; // New allocation each time +// } +// result +// } +``` + +### Smart Pointer Optimization +```rust +use std::rc::Rc; +use std::sync::Arc; + +// โœ… Use Rc for single-threaded shared ownership +#[derive(Debug, Clone)] +pub struct ConfigManager { + config: Rc, +} + +impl ConfigManager { + pub fn new(config: Config) -> Self { + Self { + config: Rc::new(config), + } + } + + // Cheap to clone - only increments reference count + pub fn get_config(&self) -> Rc { + self.config.clone() + } +} + +// โœ… Use Arc for multi-threaded scenarios +#[derive(Debug, Clone)] +pub struct ThreadSafeCache { + data: Arc>>, +} + +// โœ… Pool expensive objects +pub struct ConnectionPool { + connections: Vec, + available: std::collections::VecDeque, +} + +impl ConnectionPool { + pub async fn get_connection(&mut self) -> Option { + if let Some(index) = self.available.pop_front() { + Some(PooledConnection { + connection: &mut self.connections[index], + pool_index: index, + }) + } else { + None + } + } +} +``` + +## ๐Ÿ”„ ITERATION OPTIMIZATION + +### Iterator Patterns +```rust +// โœ… Chain iterators for efficiency +pub fn process_and_filter(data: &[i32]) -> Vec { + data.iter() + .filter(|&&x| x > 0) + .map(|&x| x * 2) + .filter(|&x| x < 1000) + .collect() +} + +// โœ… Use fold for accumulation +pub fn sum_of_squares(numbers: &[i32]) -> i64 { + numbers + .iter() + .map(|&x| x as i64) + .map(|x| x * x) + .fold(0, |acc, x| acc + x) +} + +// โœ… Parallel iteration with rayon +use rayon::prelude::*; + +pub fn parallel_process(data: &[f64]) -> Vec { + data.par_iter() + .map(|&x| expensive_computation(x)) + .collect() +} + +fn expensive_computation(x: f64) -> f64 { + // CPU-intensive operation + x.powi(3) + x.powi(2) + x + 1.0 +} + +// โŒ Avoid collecting intermediate results +// fn inefficient_processing(data: &[i32]) -> Vec { +// let filtered: Vec<_> = data.iter().filter(|&&x| x > 0).collect(); +// let mapped: Vec<_> = filtered.iter().map(|&x| x * 2).collect(); +// mapped.into_iter().filter(|&x| x < 1000).collect() +// } +``` + +### Custom Iterator Implementation +```rust +// โœ… Implement efficient custom iterators +pub struct ChunkIterator<'a, T> { + data: &'a [T], + chunk_size: usize, + position: usize, +} + +impl<'a, T> ChunkIterator<'a, T> { + pub fn new(data: &'a [T], chunk_size: usize) -> Self { + Self { + data, + chunk_size, + position: 0, + } + } +} + +impl<'a, T> Iterator for ChunkIterator<'a, T> { + type Item = &'a [T]; + + fn next(&mut self) -> Option { + if self.position >= self.data.len() { + return None; + } + + let end = std::cmp::min(self.position + self.chunk_size, self.data.len()); + let chunk = &self.data[self.position..end]; + self.position = end; + Some(chunk) + } + + fn size_hint(&self) -> (usize, Option) { + let remaining = (self.data.len() - self.position + self.chunk_size - 1) / self.chunk_size; + (remaining, Some(remaining)) + } +} + +impl<'a, T> ExactSizeIterator for ChunkIterator<'a, T> {} +``` + +## ๐Ÿงฎ COMPUTATIONAL OPTIMIZATION + +### Vectorization and SIMD +```rust +// โœ… Use SIMD when available +#[cfg(target_arch = "x86_64")] +use std::arch::x86_64::*; + +pub fn sum_f32_slice(values: &[f32]) -> f32 { + if is_x86_feature_detected!("avx2") { + unsafe { sum_f32_avx2(values) } + } else { + values.iter().sum() + } +} + +#[cfg(target_arch = "x86_64")] +#[target_feature(enable = "avx2")] +unsafe fn sum_f32_avx2(values: &[f32]) -> f32 { + let mut sum = _mm256_setzero_ps(); + let chunks = values.chunks_exact(8); + let remainder = chunks.remainder(); + + for chunk in chunks { + let v = _mm256_loadu_ps(chunk.as_ptr()); + sum = _mm256_add_ps(sum, v); + } + + // Extract the sum from the vector + let mut result = [0.0f32; 8]; + _mm256_storeu_ps(result.as_mut_ptr(), sum); + let vector_sum: f32 = result.iter().sum(); + + // Add remainder + vector_sum + remainder.iter().sum::() +} +``` + +### Lookup Tables and Memoization +```rust +use std::collections::HashMap; + +// โœ… Use lookup tables for expensive computations +pub struct FibonacciCalculator { + cache: HashMap, +} + +impl FibonacciCalculator { + pub fn new() -> Self { + let mut cache = HashMap::new(); + cache.insert(0, 0); + cache.insert(1, 1); + Self { cache } + } + + pub fn fibonacci(&mut self, n: u64) -> u64 { + if let Some(&result) = self.cache.get(&n) { + return result; + } + + let result = self.fibonacci(n - 1) + self.fibonacci(n - 2); + self.cache.insert(n, result); + result + } +} + +// โœ… Pre-computed lookup tables +pub struct SinTable { + table: Vec, + resolution: f64, +} + +impl SinTable { + pub fn new(resolution: usize) -> Self { + let table: Vec = (0..resolution) + .map(|i| { + let angle = (i as f64) * 2.0 * std::f64::consts::PI / (resolution as f64); + angle.sin() + }) + .collect(); + + Self { + table, + resolution: resolution as f64, + } + } + + pub fn sin_approx(&self, angle: f64) -> f64 { + let normalized = angle % (2.0 * std::f64::consts::PI); + let index = (normalized * self.resolution / (2.0 * std::f64::consts::PI)) as usize; + self.table[index.min(self.table.len() - 1)] + } +} +``` + +## ๐Ÿ”ง ASYNC PERFORMANCE + +### Async Optimization Patterns +```rust +use tokio::task::JoinSet; +use futures::future::{join_all, try_join_all}; + +// โœ… Batch async operations +pub async fn fetch_user_data_batch(user_ids: &[UserId]) -> Result, ServiceError> { + const BATCH_SIZE: usize = 50; + + let mut results = Vec::with_capacity(user_ids.len()); + + for chunk in user_ids.chunks(BATCH_SIZE) { + let futures = chunk.iter().map(|&id| fetch_user_data(id)); + let batch_results = try_join_all(futures).await?; + results.extend(batch_results); + } + + Ok(results) +} + +// โœ… Use bounded channels to prevent memory issues +pub async fn process_stream_with_backpressure() -> Result<(), ProcessingError> { + let (tx, mut rx) = tokio::sync::mpsc::channel(100); // Bounded channel + + // Producer task + tokio::spawn(async move { + for i in 0..1000 { + if tx.send(i).await.is_err() { + break; + } + // Producer will block when channel is full + } + }); + + // Consumer task + while let Some(item) = rx.recv().await { + process_item(item).await?; + } + + Ok(()) +} + +// โœ… Optimize async task spawning +pub async fn parallel_processing_optimized(items: Vec) -> Vec { + let mut join_set = JoinSet::new(); + let concurrency_limit = num_cpus::get(); + + for chunk in items.chunks(items.len() / concurrency_limit + 1) { + let chunk = chunk.to_vec(); + join_set.spawn(async move { + chunk.into_iter().map(process_item_sync).collect::>() + }); + } + + let mut results = Vec::new(); + while let Some(result) = join_set.join_next().await { + if let Ok(chunk_results) = result { + results.extend(chunk_results); + } + } + + results +} +``` + +## ๐Ÿ“Š PROFILING AND BENCHMARKING + +### Benchmarking with Criterion +```rust +// Cargo.toml +// [dev-dependencies] +// criterion = { version = "0.5", features = ["html_reports"] } + +#[cfg(test)] +mod benches { + use super::*; + use criterion::{black_box, criterion_group, criterion_main, Criterion}; + + fn bench_string_concatenation(c: &mut Criterion) { + let data = vec!["hello"; 1000]; + + c.bench_function("string_concat_push", |b| { + b.iter(|| { + let mut result = String::new(); + for s in &data { + result.push_str(black_box(s)); + } + result + }) + }); + + c.bench_function("string_concat_join", |b| { + b.iter(|| data.join("")) + }); + + c.bench_function("string_concat_capacity", |b| { + b.iter(|| { + let mut result = String::with_capacity(data.len() * 5); + for s in &data { + result.push_str(black_box(s)); + } + result + }) + }); + } + + criterion_group!(benches, bench_string_concatenation); + criterion_main!(benches); +} +``` + +### Memory Profiling +```rust +// Use instruments on macOS or valgrind on Linux +#[cfg(feature = "profiling")] +pub fn memory_intensive_operation() { + // Add memory tracking + let start_memory = get_memory_usage(); + + // Your operation here + let result = expensive_operation(); + + let end_memory = get_memory_usage(); + println!("Memory used: {} bytes", end_memory - start_memory); +} + +#[cfg(feature = "profiling")] +fn get_memory_usage() -> usize { + // Platform-specific memory usage detection + #[cfg(target_os = "linux")] + { + use std::fs; + if let Ok(contents) = fs::read_to_string("/proc/self/status") { + for line in contents.lines() { + if line.starts_with("VmRSS:") { + if let Some(kb) = line.split_whitespace().nth(1) { + return kb.parse::().unwrap_or(0) * 1024; + } + } + } + } + } + 0 +} +``` + +## ๐Ÿšจ PERFORMANCE ANTI-PATTERNS + +### What to Avoid +```rust +// โŒ Don't clone unnecessarily +// fn bad_function(data: Vec) -> Vec { +// data.clone() // Unnecessary clone +// } + +// โœ… Take ownership or borrow +fn good_function(data: Vec) -> Vec { + data // Move ownership +} + +// โŒ Don't use Vec when you need Set operations +// fn slow_contains(vec: &Vec, item: &str) -> bool { +// vec.iter().any(|s| s == item) // O(n) lookup +// } + +// โœ… Use appropriate data structures +use std::collections::HashSet; +fn fast_contains(set: &HashSet, item: &str) -> bool { + set.contains(item) // O(1) lookup +} + +// โŒ Don't collect unnecessarily +// fn wasteful_processing(data: &[i32]) -> i32 { +// data.iter() +// .filter(|&&x| x > 0) +// .collect::>() // Unnecessary allocation +// .iter() +// .sum() +// } + +// โœ… Chain operations +fn efficient_processing(data: &[i32]) -> i32 { + data.iter() + .filter(|&&x| x > 0) + .sum() +} +``` + +## ๐ŸŽฏ COMPILE-TIME OPTIMIZATION + +### Cargo.toml Optimizations +```toml +[profile.release] +lto = true # Link-time optimization +codegen-units = 1 # Better optimization at cost of compile time +panic = "abort" # Smaller binary size +strip = true # Remove debug symbols + +[profile.release-with-debug] +inherits = "release" +debug = true # Keep debug info for profiling + +# CPU-specific optimizations +[profile.release] +rustflags = ["-C", "target-cpu=native"] +``` + +### Feature Gates for Performance +```rust +// Cargo.toml +// [features] +// simd = [] +// parallel = ["rayon"] + +#[cfg(feature = "simd")] +pub fn fast_sum(data: &[f32]) -> f32 { + sum_f32_slice(data) +} + +#[cfg(not(feature = "simd"))] +pub fn fast_sum(data: &[f32]) -> f32 { + data.iter().sum() +} + +#[cfg(feature = "parallel")] +pub fn parallel_map(data: &[T], f: F) -> Vec +where + T: Sync, + U: Send, + F: Fn(&T) -> U + Sync, +{ + use rayon::prelude::*; + data.par_iter().map(f).collect() +} + +#[cfg(not(feature = "parallel"))] +pub fn parallel_map(data: &[T], f: F) -> Vec +where + F: Fn(&T) -> U, +{ + data.iter().map(f).collect() +} +``` + +## โœ… PERFORMANCE CHECKLIST + +```markdown +### Performance Implementation Verification +- [ ] Profile before optimizing (use criterion for benchmarks) +- [ ] Pre-allocate collections with known capacity +- [ ] Use appropriate data structures (HashMap vs Vec for lookups) +- [ ] Leverage iterator chains instead of intermediate collections +- [ ] Consider parallel processing for CPU-intensive tasks +- [ ] Use Cow for flexible string handling +- [ ] Implement object pooling for expensive resources +- [ ] Use SIMD when appropriate and available +- [ ] Optimize async task spawning and batching +- [ ] Enable LTO and appropriate optimization flags +- [ ] Use bounded channels to prevent memory issues +- [ ] Implement memoization for expensive computations +- [ ] Choose between Arc/Rc based on threading needs +- [ ] Avoid unnecessary clones and allocations +- [ ] Use const generics for compile-time optimizations +``` + +This performance guide provides practical optimization strategies while maintaining Rust's safety guarantees and zero-cost abstraction principles. diff --git a/.cursor/rules/rust/core/security.mdc b/.cursor/rules/rust/core/security.mdc new file mode 100644 index 0000000..0a1dd03 --- /dev/null +++ b/.cursor/rules/rust/core/security.mdc @@ -0,0 +1,744 @@ +--- +description: +globs: +alwaysApply: false +--- +# ๐Ÿ” RUST SECURITY BEST PRACTICES + +> **TL;DR:** Security-focused programming patterns for Rust applications, covering input validation, cryptography, secrets management, and secure coding practices. + +## ๐Ÿ” SECURITY IMPLEMENTATION STRATEGY + +```mermaid +graph TD + Start["Security Assessment"] --> ThreatModel["Threat Modeling"] + + ThreatModel --> InputSecurity{"Input
Validation?"} + ThreatModel --> AuthSecurity{"Authentication
Required?"} + ThreatModel --> DataSecurity{"Data
Protection?"} + ThreatModel --> AccessSecurity{"Access
Control?"} + + InputSecurity -->|Yes| Validation["Input Validation"] + InputSecurity -->|No| InputDone["โœ“"] + + AuthSecurity -->|Yes| PasswordHash["Password Hashing"] + AuthSecurity -->|No| AuthDone["โœ“"] + + DataSecurity -->|Yes| Encryption["Data Encryption"] + DataSecurity -->|No| DataDone["โœ“"] + + AccessSecurity -->|Yes| RBAC["Role-Based Access Control"] + AccessSecurity -->|No| AccessDone["โœ“"] + + Validation --> PathTraversal["Path Traversal Prevention"] + PathTraversal --> SQLInjection["SQL Injection Prevention"] + + PasswordHash --> Argon2["Argon2 Implementation"] + Argon2 --> JWT["JWT Token Security"] + + Encryption --> SecretsManagement["Secrets Management"] + SecretsManagement --> AESGCMEncryption["AES-GCM Encryption"] + + RBAC --> RateLimiting["Rate Limiting"] + RateLimiting --> Audit["Security Audit Logging"] + + SQLInjection --> SecurityDone["Security Verified"] + JWT --> SecurityDone + AESGCMEncryption --> SecurityDone + Audit --> SecurityDone + InputDone --> SecurityDone + AuthDone --> SecurityDone + DataDone --> SecurityDone + AccessDone --> SecurityDone + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style ThreatModel fill:#ffa64d,stroke:#cc7a30,color:white + style Argon2 fill:#4dbb5f,stroke:#36873f,color:white + style SecurityDone fill:#d94dbb,stroke:#a3378a,color:white +``` + +## ๐ŸŽฏ SECURITY PRINCIPLES + +### Input Validation and Sanitization +```rust +use validator::{Validate, ValidationError}; +use regex::Regex; +use std::collections::HashSet; + +// โœ… Always validate and sanitize user input +#[derive(Debug, Clone, Validate)] +pub struct UserRegistration { + #[validate(email, message = "Invalid email format")] + pub email: String, + + #[validate(length(min = 8, max = 128, message = "Password must be 8-128 characters"))] + #[validate(custom = "validate_password_strength")] + pub password: String, + + #[validate(length(min = 2, max = 50, message = "Username must be 2-50 characters"))] + #[validate(regex = "USERNAME_REGEX", message = "Username contains invalid characters")] + pub username: String, +} + +lazy_static::lazy_static! { + static ref USERNAME_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9_-]+$").unwrap(); + static ref FORBIDDEN_PASSWORDS: HashSet<&'static str> = { + let mut set = HashSet::new(); + set.insert("password"); + set.insert("123456"); + set.insert("admin"); + set.insert("qwerty"); + set + }; +} + +fn validate_password_strength(password: &str) -> Result<(), ValidationError> { + // Check for forbidden passwords + if FORBIDDEN_PASSWORDS.contains(&password.to_lowercase().as_str()) { + return Err(ValidationError::new("forbidden_password")); + } + + // Require at least one uppercase, lowercase, digit, and special character + let has_upper = password.chars().any(|c| c.is_uppercase()); + let has_lower = password.chars().any(|c| c.is_lowercase()); + let has_digit = password.chars().any(|c| c.is_numeric()); + let has_special = password.chars().any(|c| "!@#$%^&*()_+-=[]{}|;:,.<>?".contains(c)); + + if !(has_upper && has_lower && has_digit && has_special) { + return Err(ValidationError::new("weak_password")); + } + + Ok(()) +} + +// โœ… SQL injection prevention with parameterized queries +pub async fn find_user_by_email( + pool: &sqlx::PgPool, + email: &str, +) -> Result, sqlx::Error> { + // โœ… Safe: Uses parameterized query + sqlx::query_as::<_, User>( + "SELECT id, email, username FROM users WHERE email = $1" + ) + .bind(email) + .fetch_optional(pool) + .await +} + +// โŒ NEVER: String interpolation vulnerable to SQL injection +// let query = format!("SELECT * FROM users WHERE email = '{}'", email); +``` + +### Path Traversal Prevention +```rust +use std::path::{Path, PathBuf}; + +// โœ… Safe file path handling +pub fn safe_file_access(base_dir: &Path, user_path: &str) -> Result { + // Normalize and resolve the path + let requested_path = base_dir.join(user_path); + let canonical_path = requested_path.canonicalize() + .map_err(|_| SecurityError::InvalidPath)?; + + // Ensure the canonical path is within the base directory + if !canonical_path.starts_with(base_dir) { + return Err(SecurityError::PathTraversal); + } + + Ok(canonical_path) +} + +// โœ… File upload with validation +pub async fn upload_file( + file_data: &[u8], + filename: &str, + upload_dir: &Path, +) -> Result { + // Validate filename + if filename.contains("..") || filename.contains('/') || filename.contains('\\') { + return Err(SecurityError::InvalidFilename); + } + + // Check file size + const MAX_FILE_SIZE: usize = 10 * 1024 * 1024; // 10MB + if file_data.len() > MAX_FILE_SIZE { + return Err(SecurityError::FileTooLarge); + } + + // Validate file type by magic bytes + let file_type = detect_file_type(file_data)?; + if !is_allowed_file_type(&file_type) { + return Err(SecurityError::DisallowedFileType); + } + + // Generate safe filename + let safe_filename = sanitize_filename(filename); + let file_path = upload_dir.join(safe_filename); + + tokio::fs::write(&file_path, file_data).await + .map_err(|_| SecurityError::FileWriteError)?; + + Ok(file_path) +} + +fn sanitize_filename(filename: &str) -> String { + filename + .chars() + .filter(|c| c.is_alphanumeric() || *c == '.' || *c == '-' || *c == '_') + .collect() +} +``` + +## ๐Ÿ”‘ CRYPTOGRAPHY AND HASHING + +### Password Hashing with Argon2 +```rust +use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier}; +use argon2::password_hash::{rand_core::OsRng, SaltString}; + +// โœ… Secure password hashing with Argon2 (recommended) +pub struct PasswordService; + +impl PasswordService { + pub fn hash_password(password: &str) -> Result { + let salt = SaltString::generate(&mut OsRng); + + // Use Argon2id (default) with recommended parameters + let argon2 = Argon2::default(); + + let password_hash = argon2 + .hash_password(password.as_bytes(), &salt) + .map_err(|_| SecurityError::HashingError)?; + + Ok(password_hash.to_string()) + } + + pub fn verify_password(password: &str, hash: &str) -> Result { + let parsed_hash = PasswordHash::new(hash) + .map_err(|_| SecurityError::InvalidHash)?; + + let argon2 = Argon2::default(); + Ok(argon2.verify_password(password.as_bytes(), &parsed_hash).is_ok()) + } + + // โœ… Custom Argon2 configuration for high-security applications + pub fn hash_password_high_security(password: &str) -> Result { + use argon2::{Algorithm, Params, Version}; + + let salt = SaltString::generate(&mut OsRng); + + // Custom parameters for higher security (adjust based on performance requirements) + let params = Params::new( + 65536, // m_cost (memory cost) - 64 MB + 3, // t_cost (time cost) - 3 iterations + 4, // p_cost (parallelism) - 4 threads + Some(32) // output length + ).map_err(|_| SecurityError::HashingError)?; + + let argon2 = Argon2::new(Algorithm::Argon2id, Version::V0x13, params); + + let password_hash = argon2 + .hash_password(password.as_bytes(), &salt) + .map_err(|_| SecurityError::HashingError)?; + + Ok(password_hash.to_string()) + } +} +``` + +### JWT Token Security +```rust +use jsonwebtoken::{encode, decode, Header, Algorithm, Validation, EncodingKey, DecodingKey}; +use serde::{Deserialize, Serialize}; +use chrono::{DateTime, Utc, Duration}; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Claims { + pub sub: String, // Subject (user ID) + pub exp: i64, // Expiration time + pub iat: i64, // Issued at + pub jti: String, // JWT ID for revocation + pub scope: Vec, // User permissions +} + +pub struct JwtService { + encoding_key: EncodingKey, + decoding_key: DecodingKey, + algorithm: Algorithm, +} + +impl JwtService { + pub fn new(secret: &[u8]) -> Self { + Self { + encoding_key: EncodingKey::from_secret(secret), + decoding_key: DecodingKey::from_secret(secret), + algorithm: Algorithm::HS256, + } + } + + pub fn create_token(&self, user_id: &str, scopes: Vec) -> Result { + let now = Utc::now(); + let expiration = now + Duration::hours(24); + + let claims = Claims { + sub: user_id.to_string(), + exp: expiration.timestamp(), + iat: now.timestamp(), + jti: uuid::Uuid::new_v4().to_string(), + scope: scopes, + }; + + let mut header = Header::new(self.algorithm); + header.kid = Some("1".to_string()); // Key ID for key rotation + + encode(&header, &claims, &self.encoding_key) + .map_err(|_| SecurityError::TokenCreationError) + } + + pub fn verify_token(&self, token: &str) -> Result { + let mut validation = Validation::new(self.algorithm); + validation.validate_exp = true; + validation.validate_nbf = true; + + let token_data = decode::(token, &self.decoding_key, &validation) + .map_err(|_| SecurityError::InvalidToken)?; + + Ok(token_data.claims) + } +} +``` + +## ๐Ÿ”’ SECRETS MANAGEMENT + +### Environment Variable Security +```rust +use std::env; +use zeroize::Zeroize; + +// โœ… Secure secret handling +#[derive(Zeroize)] +#[zeroize(drop)] +pub struct Secret { + value: String, +} + +impl Secret { + pub fn from_env(key: &str) -> Result { + let value = env::var(key) + .map_err(|_| SecurityError::MissingSecret)?; + + if value.is_empty() { + return Err(SecurityError::EmptySecret); + } + + Ok(Self { value }) + } + + pub fn as_bytes(&self) -> &[u8] { + self.value.as_bytes() + } + + // โŒ Never implement Display or Debug for secrets + // This prevents accidental logging +} + +// โœ… Configuration with secure defaults +#[derive(Debug)] +pub struct SecurityConfig { + pub jwt_secret: Secret, + pub database_url: Secret, + pub encryption_key: Secret, + pub session_timeout: Duration, + pub max_login_attempts: u32, + pub rate_limit_per_minute: u32, +} + +impl SecurityConfig { + pub fn from_env() -> Result { + Ok(Self { + jwt_secret: Secret::from_env("JWT_SECRET")?, + database_url: Secret::from_env("DATABASE_URL")?, + encryption_key: Secret::from_env("ENCRYPTION_KEY")?, + session_timeout: Duration::minutes( + env::var("SESSION_TIMEOUT_MINUTES") + .unwrap_or_else(|_| "30".to_string()) + .parse() + .unwrap_or(30) + ), + max_login_attempts: env::var("MAX_LOGIN_ATTEMPTS") + .unwrap_or_else(|_| "5".to_string()) + .parse() + .unwrap_or(5), + rate_limit_per_minute: env::var("RATE_LIMIT_PER_MINUTE") + .unwrap_or_else(|_| "60".to_string()) + .parse() + .unwrap_or(60), + }) + } +} +``` + +### Data Encryption +```rust +use aes_gcm::{Aes256Gcm, Key, Nonce, aead::{Aead, NewAead}}; +use rand::{RngCore, rngs::OsRng}; + +pub struct EncryptionService { + cipher: Aes256Gcm, +} + +impl EncryptionService { + pub fn new(key: &[u8]) -> Result { + if key.len() != 32 { + return Err(SecurityError::InvalidKeyLength); + } + + let key = Key::from_slice(key); + let cipher = Aes256Gcm::new(key); + + Ok(Self { cipher }) + } + + pub fn encrypt(&self, plaintext: &[u8]) -> Result, SecurityError> { + let mut nonce_bytes = [0u8; 12]; + OsRng.fill_bytes(&mut nonce_bytes); + let nonce = Nonce::from_slice(&nonce_bytes); + + let mut ciphertext = self.cipher + .encrypt(nonce, plaintext) + .map_err(|_| SecurityError::EncryptionError)?; + + // Prepend nonce to ciphertext + let mut result = nonce_bytes.to_vec(); + result.append(&mut ciphertext); + + Ok(result) + } + + pub fn decrypt(&self, encrypted_data: &[u8]) -> Result, SecurityError> { + if encrypted_data.len() < 12 { + return Err(SecurityError::InvalidCiphertext); + } + + let (nonce_bytes, ciphertext) = encrypted_data.split_at(12); + let nonce = Nonce::from_slice(nonce_bytes); + + self.cipher + .decrypt(nonce, ciphertext) + .map_err(|_| SecurityError::DecryptionError) + } +} + +// โœ… Secure data structure for sensitive information +#[derive(Zeroize)] +#[zeroize(drop)] +pub struct SensitiveData { + data: Vec, +} + +impl SensitiveData { + pub fn new(data: Vec) -> Self { + Self { data } + } + + pub fn as_slice(&self) -> &[u8] { + &self.data + } +} +``` + +## ๐Ÿ›ก๏ธ ACCESS CONTROL AND AUTHORIZATION + +### Role-Based Access Control (RBAC) +```rust +use std::collections::{HashMap, HashSet}; + +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub enum Permission { + UserRead, + UserWrite, + UserDelete, + AdminAccess, + SystemConfig, +} + +#[derive(Debug, Clone)] +pub struct Role { + pub name: String, + pub permissions: HashSet, +} + +#[derive(Debug, Clone)] +pub struct User { + pub id: String, + pub roles: HashSet, +} + +pub struct AuthorizationService { + roles: HashMap, + user_sessions: HashMap, +} + +impl AuthorizationService { + pub fn new() -> Self { + let mut roles = HashMap::new(); + + // Define standard roles + roles.insert("user".to_string(), Role { + name: "user".to_string(), + permissions: [Permission::UserRead].into_iter().collect(), + }); + + roles.insert("admin".to_string(), Role { + name: "admin".to_string(), + permissions: [ + Permission::UserRead, + Permission::UserWrite, + Permission::UserDelete, + Permission::AdminAccess, + ].into_iter().collect(), + }); + + roles.insert("super_admin".to_string(), Role { + name: "super_admin".to_string(), + permissions: [ + Permission::UserRead, + Permission::UserWrite, + Permission::UserDelete, + Permission::AdminAccess, + Permission::SystemConfig, + ].into_iter().collect(), + }); + + Self { + roles, + user_sessions: HashMap::new(), + } + } + + pub fn check_permission(&self, user_id: &str, permission: &Permission) -> bool { + if let Some(user) = self.user_sessions.get(user_id) { + for role_name in &user.roles { + if let Some(role) = self.roles.get(role_name) { + if role.permissions.contains(permission) { + return true; + } + } + } + } + false + } + + pub fn require_permission(&self, user_id: &str, permission: Permission) -> Result<(), SecurityError> { + if self.check_permission(user_id, &permission) { + Ok(()) + } else { + Err(SecurityError::InsufficientPermissions) + } + } +} + +// โœ… Authorization middleware for web frameworks +pub async fn require_auth_middleware( + auth_service: &AuthorizationService, + user_id: &str, + required_permission: Permission, +) -> Result<(), SecurityError> { + auth_service.require_permission(user_id, required_permission) +} +``` + +## ๐Ÿšจ RATE LIMITING AND DDOS PROTECTION + +### Rate Limiting Implementation +```rust +use std::collections::HashMap; +use std::time::{Duration, Instant}; +use tokio::sync::RwLock; + +pub struct RateLimiter { + limits: RwLock>, + max_requests: u32, + window_duration: Duration, +} + +#[derive(Debug)] +struct RateLimit { + requests: Vec, + last_cleanup: Instant, +} + +impl RateLimiter { + pub fn new(max_requests: u32, window_duration: Duration) -> Self { + Self { + limits: RwLock::new(HashMap::new()), + max_requests, + window_duration, + } + } + + pub async fn check_rate_limit(&self, identifier: &str) -> Result<(), SecurityError> { + let now = Instant::now(); + let mut limits = self.limits.write().await; + + let rate_limit = limits.entry(identifier.to_string()).or_insert(RateLimit { + requests: Vec::new(), + last_cleanup: now, + }); + + // Cleanup old requests + if now.duration_since(rate_limit.last_cleanup) > self.window_duration { + rate_limit.requests.retain(|&request_time| { + now.duration_since(request_time) <= self.window_duration + }); + rate_limit.last_cleanup = now; + } + + // Check if limit exceeded + if rate_limit.requests.len() >= self.max_requests as usize { + return Err(SecurityError::RateLimitExceeded); + } + + // Add current request + rate_limit.requests.push(now); + Ok(()) + } +} + +// โœ… IP-based rate limiting for web endpoints +pub async fn rate_limit_by_ip( + rate_limiter: &RateLimiter, + ip_address: &str, +) -> Result<(), SecurityError> { + rate_limiter.check_rate_limit(ip_address).await +} +``` + +## ๐Ÿšจ SECURITY ERROR TYPES + +### Comprehensive Security Errors +```rust +#[derive(thiserror::Error, Debug)] +pub enum SecurityError { + #[error("Invalid email format")] + InvalidEmail, + + #[error("Weak password")] + WeakPassword, + + #[error("Path traversal attempt detected")] + PathTraversal, + + #[error("Invalid file path")] + InvalidPath, + + #[error("Invalid filename")] + InvalidFilename, + + #[error("File too large")] + FileTooLarge, + + #[error("Disallowed file type")] + DisallowedFileType, + + #[error("File write error")] + FileWriteError, + + #[error("Hashing error")] + HashingError, + + #[error("Password verification error")] + VerificationError, + + #[error("Invalid hash format")] + InvalidHash, + + #[error("Token creation error")] + TokenCreationError, + + #[error("Invalid token")] + InvalidToken, + + #[error("Missing secret configuration")] + MissingSecret, + + #[error("Empty secret value")] + EmptySecret, + + #[error("Invalid encryption key length")] + InvalidKeyLength, + + #[error("Encryption failed")] + EncryptionError, + + #[error("Decryption failed")] + DecryptionError, + + #[error("Invalid ciphertext")] + InvalidCiphertext, + + #[error("Insufficient permissions")] + InsufficientPermissions, + + #[error("Rate limit exceeded")] + RateLimitExceeded, + + #[error("Authentication required")] + AuthenticationRequired, + + #[error("Session expired")] + SessionExpired, + + #[error("Account locked")] + AccountLocked, +} + +impl SecurityError { + pub fn is_client_error(&self) -> bool { + matches!( + self, + Self::InvalidEmail + | Self::WeakPassword + | Self::PathTraversal + | Self::InvalidPath + | Self::InvalidFilename + | Self::FileTooLarge + | Self::DisallowedFileType + | Self::InvalidToken + | Self::InsufficientPermissions + | Self::RateLimitExceeded + | Self::AuthenticationRequired + ) + } + + pub fn should_log_details(&self) -> bool { + !self.is_client_error() + } +} +``` + +## โœ… SECURITY CHECKLIST + +```markdown +### Security Implementation Verification +- [ ] All user inputs are validated and sanitized +- [ ] SQL queries use parameterized statements +- [ ] File paths are validated against traversal attacks +- [ ] File uploads are validated by type and size +- [ ] Passwords are hashed with Argon2 +- [ ] JWT tokens include expiration and proper validation +- [ ] Secrets are loaded from environment variables +- [ ] Sensitive data structures implement Zeroize +- [ ] Encryption uses authenticated encryption (AES-GCM) +- [ ] Role-based access control is implemented +- [ ] Rate limiting protects against abuse +- [ ] Error messages don't leak sensitive information +- [ ] Security headers are set in HTTP responses +- [ ] Input validation happens on both client and server +- [ ] Audit logging tracks security-relevant events +- [ ] Regular security updates are applied +- [ ] Cryptographic randomness uses secure sources +- [ ] Session management includes timeout and rotation +``` + +This security guide provides comprehensive protection patterns while maintaining usability and performance in Rust applications. diff --git a/.cursor/rules/rust/core/type-system.mdc b/.cursor/rules/rust/core/type-system.mdc new file mode 100644 index 0000000..182e8da --- /dev/null +++ b/.cursor/rules/rust/core/type-system.mdc @@ -0,0 +1,590 @@ +--- +description: +globs: +alwaysApply: false +--- +# ๐Ÿ” RUST TYPE SYSTEM BEST PRACTICES + +> **TL;DR:** Leverage Rust's powerful type system for safety, performance, and expressiveness through newtype patterns, phantom types, and zero-cost abstractions. + +## ๐Ÿ” TYPE SYSTEM DESIGN STRATEGY + +```mermaid +graph TD + Start["Type Design"] --> DomainCheck{"Domain-Specific
Types Needed?"} + + DomainCheck -->|Yes| NewtypePattern["Newtype Pattern"] + DomainCheck -->|No| PrimitiveCheck{"Primitive
Obsession?"} + + NewtypePattern --> StateTracking{"State Tracking
Required?"} + PrimitiveCheck -->|Yes| NewtypePattern + PrimitiveCheck -->|No| TraitDesign["Trait Design"] + + StateTracking -->|Yes| PhantomTypes["Phantom Types"] + StateTracking -->|No| ValidatedTypes["Validated Types"] + + PhantomTypes --> CompileTimeCheck["Compile-Time Validation"] + ValidatedTypes --> RuntimeCheck["Runtime Validation"] + + CompileTimeCheck --> ZeroCost["Zero-Cost Abstractions"] + RuntimeCheck --> ZeroCost + TraitDesign --> ZeroCost + + ZeroCost --> ErrorModeling["Error Modeling"] + ErrorModeling --> SafetyPatterns["Safety Patterns"] + SafetyPatterns --> Performance["Performance Optimization"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style NewtypePattern fill:#4dbb5f,stroke:#36873f,color:white + style PhantomTypes fill:#ffa64d,stroke:#cc7a30,color:white + style ZeroCost fill:#d94dbb,stroke:#a3378a,color:white +``` + +## ๐ŸŽฏ TYPE SAFETY PRINCIPLES + +### Newtype Pattern for Domain Modeling +```rust +use derive_more::{Constructor, Display, From, Into}; +use serde::{Deserialize, Serialize}; +use std::fmt; + +// โœ… Strong typing for domain concepts +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Constructor, Display, From, Into)] +pub struct UserId(uuid::Uuid); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Constructor, Display, From, Into)] +pub struct ProductId(uuid::Uuid); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Constructor, Display, From, Into)] +pub struct OrderId(uuid::Uuid); + +// โœ… Prevents mixing up IDs at compile time +fn process_order(user_id: UserId, product_id: ProductId) -> OrderId { + // Compiler prevents: process_order(product_id, user_id) + OrderId(uuid::Uuid::new_v4()) +} + +// โŒ Weak typing - prone to errors +// fn process_order(user_id: String, product_id: String) -> String +``` + +### Validated Types with Builder Pattern +```rust +use typed_builder::TypedBuilder; +use validator::Validate; + +#[derive(Debug, Clone, Serialize, Deserialize, TypedBuilder, Validate)] +#[serde(rename_all = "camelCase")] +pub struct Email { + #[validate(email)] + #[builder(setter(into))] + value: String, +} + +impl Email { + pub fn new(value: impl Into) -> Result { + let email = Self { value: value.into() }; + email.validate()?; + Ok(email) + } + + pub fn as_str(&self) -> &str { + &self.value + } +} + +impl fmt::Display for Email { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.value) + } +} + +// โœ… Usage - compile-time guarantee of valid email +let email = Email::new("user@example.com")?; +``` + +### Phantom Types for Compile-Time State +```rust +use std::marker::PhantomData; + +// State types +pub struct Draft; +pub struct Published; +pub struct Archived; + +// Document with compile-time state tracking +#[derive(Debug, Clone)] +pub struct Document { + id: DocumentId, + title: String, + content: String, + _state: PhantomData, +} + +impl Document { + pub fn id(&self) -> DocumentId { + self.id + } + + pub fn title(&self) -> &str { + &self.title + } +} + +impl Document { + pub fn new(title: String, content: String) -> Self { + Self { + id: DocumentId::new(), + title, + content, + _state: PhantomData, + } + } + + pub fn publish(self) -> Document { + Document { + id: self.id, + title: self.title, + content: self.content, + _state: PhantomData, + } + } +} + +impl Document { + pub fn archive(self) -> Document { + Document { + id: self.id, + title: self.title, + content: self.content, + _state: PhantomData, + } + } + + pub fn content(&self) -> &str { + &self.content + } +} + +impl Document { + pub fn restore(self) -> Document { + Document { + id: self.id, + title: self.title, + content: self.content, + _state: PhantomData, + } + } +} + +// โœ… Usage - compiler prevents invalid state transitions +let draft = Document::::new("Title".to_string(), "Content".to_string()); +let published = draft.publish(); +let archived = published.archive(); +// Compiler error: draft.archive() - can't archive a draft +``` + +## ๐Ÿ”„ TRAIT DESIGN PATTERNS + +### Trait Objects vs Generic Bounds +```rust +// โœ… Use generics for known types at compile time +pub fn process_items(items: &[T]) -> Vec { + items.iter().map(|item| item.process()).collect() +} + +// โœ… Use trait objects for runtime polymorphism +pub struct EventBus { + handlers: Vec>, +} + +impl EventBus { + pub fn register_handler(&mut self, handler: Box) { + self.handlers.push(handler); + } + + pub fn dispatch(&self, event: &Event) { + for handler in &self.handlers { + handler.handle(event); + } + } +} + +// โœ… Async trait pattern +#[async_trait::async_trait] +pub trait AsyncProcessor { + type Error; + type Output; + + async fn process(&self, input: &[u8]) -> Result; +} +``` + +### Associated Types vs Generic Parameters +```rust +// โœ… Use associated types for tight coupling +pub trait Iterator { + type Item; // One Item type per Iterator implementation + fn next(&mut self) -> Option; +} + +// โœ… Use generic parameters for flexibility +pub trait Convert { + fn convert(&self, input: T) -> U; +} + +// Example: A single type can implement multiple conversions +impl Convert for NumberParser { + fn convert(&self, input: String) -> i32 { /* ... */ } +} + +impl Convert for NumberParser { + fn convert(&self, input: String) -> f64 { /* ... */ } +} +``` + +## ๐Ÿ“Š ENUM DESIGN PATTERNS + +### Comprehensive Error Modeling +```rust +#[derive(thiserror::Error, Debug)] +pub enum UserServiceError { + #[error("User not found: {user_id}")] + NotFound { user_id: UserId }, + + #[error("Email already exists: {email}")] + EmailExists { email: Email }, + + #[error("Database error: {source}")] + Database { + #[from] + source: sqlx::Error, + }, + + #[error("Validation error: {message}")] + Validation { message: String }, + + #[error("Permission denied: {action} requires {permission}")] + PermissionDenied { + action: String, + permission: String, + }, +} + +// โœ… Structured error handling with context +impl UserServiceError { + pub fn is_retryable(&self) -> bool { + matches!(self, Self::Database { .. }) + } + + pub fn error_code(&self) -> &'static str { + match self { + Self::NotFound { .. } => "USER_NOT_FOUND", + Self::EmailExists { .. } => "EMAIL_EXISTS", + Self::Database { .. } => "DATABASE_ERROR", + Self::Validation { .. } => "VALIDATION_ERROR", + Self::PermissionDenied { .. } => "PERMISSION_DENIED", + } + } +} +``` + +### State Machine with Enums +```rust +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "status", content = "data", rename_all = "camelCase")] +pub enum OrderStatus { + Pending { items: Vec }, + Processing { estimated_completion: DateTime }, + Shipped { tracking_number: String, carrier: String }, + Delivered { delivery_time: DateTime }, + Cancelled { reason: String, refund_issued: bool }, +} + +impl OrderStatus { + pub fn can_cancel(&self) -> bool { + matches!(self, Self::Pending { .. } | Self::Processing { .. }) + } + + pub fn can_ship(&self) -> bool { + matches!(self, Self::Processing { .. }) + } + + pub fn is_final(&self) -> bool { + matches!(self, Self::Delivered { .. } | Self::Cancelled { .. }) + } +} + +// โœ… Type-safe state transitions +impl Order { + pub fn ship(mut self, tracking_number: String, carrier: String) -> Result { + match self.status { + OrderStatus::Processing { .. } => { + self.status = OrderStatus::Shipped { tracking_number, carrier }; + Ok(self) + } + _ => Err(OrderError::InvalidStateTransition { + from: self.status.clone(), + to: "Shipped".to_string(), + }), + } + } +} +``` + +## ๐Ÿ›ก๏ธ SAFETY PATTERNS + +### Option and Result Combinators +```rust +// โœ… Chain operations safely +fn process_user_data(user_id: UserId) -> Result { + find_user(user_id)? + .and_then(|user| user.profile.as_ref().ok_or(ServiceError::MissingProfile)) + .and_then(|profile| validate_profile(profile)) + .map(|profile| process_profile(profile)) +} + +// โœ… Use combinators for cleaner code +fn get_user_email(user_id: UserId) -> Option { + find_user(user_id) + .ok() + .and_then(|user| user.email) + .filter(|email| email.is_verified()) +} + +// โœ… Error conversion with context +fn create_user(request: CreateUserRequest) -> Result { + validate_email(&request.email) + .map_err(|e| UserServiceError::Validation { message: e.to_string() })?; + + repository + .create_user(request) + .await + .map_err(UserServiceError::from) +} +``` + +### Custom Smart Pointers +```rust +use std::ops::{Deref, DerefMut}; + +// โœ… Validated wrapper that maintains invariants +#[derive(Debug)] +pub struct NonEmptyVec { + inner: Vec, +} + +impl NonEmptyVec { + pub fn new(first: T) -> Self { + Self { + inner: vec![first], + } + } + + pub fn try_from_vec(vec: Vec) -> Result { + if vec.is_empty() { + Err(EmptyVecError) + } else { + Ok(Self { inner: vec }) + } + } + + pub fn push(&mut self, item: T) { + self.inner.push(item); + } + + pub fn first(&self) -> &T { + // Safe to unwrap because we maintain the non-empty invariant + self.inner.first().unwrap() + } +} + +impl Deref for NonEmptyVec { + type Target = [T]; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} +``` + +## ๐ŸŽจ ZERO-COST ABSTRACTIONS + +### Compile-Time Constants +```rust +// โœ… Use const generics for compile-time validation +#[derive(Debug, Clone)] +pub struct FixedArray { + data: [T; N], +} + +impl FixedArray { + pub fn new() -> Self { + Self { + data: [T::default(); N], + } + } +} + +// โœ… Type-level programming with const generics +pub struct Matrix { + data: [[T; COLS]; ROWS], +} + +impl Matrix { + pub fn multiply( + self, + other: Matrix, + ) -> Matrix + where + T: Default + Copy + std::ops::Add + std::ops::Mul, + { + // Matrix multiplication with compile-time dimension checking + todo!() + } +} +``` + +### Builder with Type State +```rust +// โœ… Builder pattern with compile-time validation +pub struct ConfigBuilder { + host: Option, + port: Option, + timeout: Option, + _marker: PhantomData<(HasHost, HasPort)>, +} + +pub struct Missing; +pub struct Present; + +impl ConfigBuilder { + pub fn new() -> Self { + Self { + host: None, + port: None, + timeout: None, + _marker: PhantomData, + } + } +} + +impl ConfigBuilder { + pub fn host(self, host: String) -> ConfigBuilder { + ConfigBuilder { + host: Some(host), + port: self.port, + timeout: self.timeout, + _marker: PhantomData, + } + } +} + +impl ConfigBuilder { + pub fn port(self, port: u16) -> ConfigBuilder { + ConfigBuilder { + host: self.host, + port: Some(port), + timeout: self.timeout, + _marker: PhantomData, + } + } +} + +impl ConfigBuilder { + pub fn timeout(mut self, timeout: Duration) -> Self { + self.timeout = Some(timeout); + self + } +} + +// Only allow build when both host and port are set +impl ConfigBuilder { + pub fn build(self) -> Config { + Config { + host: self.host.unwrap(), + port: self.port.unwrap(), + timeout: self.timeout.unwrap_or(Duration::from_secs(30)), + } + } +} + +// โœ… Usage - compiler ensures required fields +let config = ConfigBuilder::new() + .host("localhost".to_string()) + .port(8080) + .timeout(Duration::from_secs(60)) + .build(); +``` + +## ๐Ÿšจ TYPE SYSTEM ANTI-PATTERNS + +### What to Avoid +```rust +// โŒ Weak typing - error prone +fn calculate_discount(price: f64, percentage: f64) -> f64 { + // Could accidentally pass percentage as price + price * (percentage / 100.0) +} + +// โœ… Strong typing prevents errors +#[derive(Debug, Clone, Copy)] +pub struct Price(f64); + +#[derive(Debug, Clone, Copy)] +pub struct Percentage(f64); + +fn calculate_discount(price: Price, percentage: Percentage) -> Price { + Price(price.0 * (percentage.0 / 100.0)) +} + +// โŒ Overuse of String for everything +// struct User { +// id: String, +// email: String, +// status: String, +// } + +// โœ… Proper typing +struct User { + id: UserId, + email: Email, + status: UserStatus, +} + +// โŒ Large enums with mixed concerns +// enum AppState { +// Loading, +// UserData(User), +// Error(String), +// DatabaseConnection(Database), +// HttpRequest(Request), +// } + +// โœ… Focused enums +enum LoadingState { + Loading, + Loaded(User), + Failed(LoadError), +} +``` + +## โœ… TYPE SYSTEM CHECKLIST + +```markdown +### Type System Implementation Verification +- [ ] Uses newtype pattern for domain concepts +- [ ] Phantom types for compile-time state tracking +- [ ] Associated types vs generics chosen appropriately +- [ ] Enums model state machines correctly +- [ ] Option/Result combinators used over unwrap +- [ ] Zero-cost abstractions leverage compile-time checks +- [ ] Builder patterns enforce required fields +- [ ] Error types are structured and informative +- [ ] No primitive obsession (avoid String/i32 for everything) +- [ ] Type safety prevents common runtime errors +- [ ] Const generics used for compile-time validation +- [ ] Trait objects vs generics chosen appropriately +``` + +This type system guide leverages Rust's powerful type system to catch errors at compile time and create more maintainable, expressive code. diff --git a/.cursor/rules/rust/features/axum.mdc b/.cursor/rules/rust/features/axum.mdc index 22bb867..82b6102 100644 --- a/.cursor/rules/rust/features/axum.mdc +++ b/.cursor/rules/rust/features/axum.mdc @@ -135,7 +135,9 @@ pub struct DatabaseConfig { pub struct AuthConfig { pub jwt_secret: String, pub token_expiry_hours: u64, - pub bcrypt_cost: u32, + pub argon2_mem_cost: u32, + pub argon2_time_cost: u32, + pub argon2_parallelism: u32, } #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] @@ -180,8 +182,14 @@ impl AppConfig { token_expiry_hours: std::env::var("TOKEN_EXPIRY_HOURS") .unwrap_or_else(|_| "24".to_string()) .parse()?, - bcrypt_cost: std::env::var("BCRYPT_COST") - .unwrap_or_else(|_| "12".to_string()) + argon2_mem_cost: std::env::var("ARGON2_MEM_COST") + .unwrap_or_else(|_| "65536".to_string()) + .parse()?, + argon2_time_cost: std::env::var("ARGON2_TIME_COST") + .unwrap_or_else(|_| "3".to_string()) + .parse()?, + argon2_parallelism: std::env::var("ARGON2_PARALLELISM") + .unwrap_or_else(|_| "4".to_string()) .parse()?, }, features: FeatureFlags { @@ -218,7 +226,9 @@ impl Default for AppConfig { auth: AuthConfig { jwt_secret: "development-secret".to_string(), token_expiry_hours: 24, - bcrypt_cost: 12, + argon2_mem_cost: 65536, + argon2_time_cost: 3, + argon2_parallelism: 4, }, features: FeatureFlags { enable_registration: true, diff --git a/.cursor/rules/rust/features/cli.mdc b/.cursor/rules/rust/features/cli.mdc index 0c7c928..b2cb820 100644 --- a/.cursor/rules/rust/features/cli.mdc +++ b/.cursor/rules/rust/features/cli.mdc @@ -7,6 +7,40 @@ alwaysApply: false > **TL;DR:** Modern CLI application patterns using clap 4.0+ with derive features, subcommands, enum_dispatch, and production-ready command execution architecture. +## ๐Ÿ” CLI APPLICATION DESIGN STRATEGY + +```mermaid +graph TD + Start["CLI Application"] --> CLIType{"CLI
Complexity?"} + + CLIType -->|Simple| SimpleCLI["Single Command CLI"] + CLIType -->|Complex| ComplexCLI["Multi-Command CLI"] + + SimpleCLI --> DirectExecution["Direct Execution"] + ComplexCLI --> SubcommandArch["Subcommand Architecture"] + + SubcommandArch --> EnumDispatch["enum_dispatch Pattern"] + EnumDispatch --> TraitExecution["CommandExecutor Trait"] + + DirectExecution --> ErrorHandling["Error Handling"] + TraitExecution --> ErrorHandling + + ErrorHandling --> UserFeedback["User Feedback"] + UserFeedback --> ProgressIndicators["Progress Indicators"] + ProgressIndicators --> Configuration["Configuration Management"] + + Configuration --> Testing["CLI Testing"] + Testing --> Documentation["Help & Documentation"] + Documentation --> Completion["Shell Completion"] + + Completion --> Production["Production CLI"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style SimpleCLI fill:#4dbb5f,stroke:#36873f,color:white + style ComplexCLI fill:#ffa64d,stroke:#cc7a30,color:white + style EnumDispatch fill:#d94dbb,stroke:#a3378a,color:white +``` + ## ๐ŸŽฏ CLI FRAMEWORK REQUIREMENTS ### Clap 4.0+ Configuration diff --git a/.cursor/rules/rust/features/concurrency.mdc b/.cursor/rules/rust/features/concurrency.mdc index 08efc36..fa32d99 100644 --- a/.cursor/rules/rust/features/concurrency.mdc +++ b/.cursor/rules/rust/features/concurrency.mdc @@ -7,6 +7,42 @@ alwaysApply: false > **TL;DR:** Modern async/await patterns and thread-safe data structures for high-performance Rust applications. +## ๐Ÿ” CONCURRENCY ARCHITECTURE STRATEGY + +```mermaid +graph TD + Start["Concurrency Requirements"] --> ConcurrencyType{"Concurrency
Pattern?"} + + ConcurrencyType -->|Async I/O| AsyncPattern["Async/Await Pattern"] + ConcurrencyType -->|CPU Intensive| ParallelPattern["Parallel Processing"] + ConcurrencyType -->|Shared State| SharedStatePattern["Shared State Management"] + ConcurrencyType -->|Message Passing| MessagePattern["Message Passing"] + + AsyncPattern --> TokioRuntime["Tokio Runtime"] + ParallelPattern --> Rayon["Rayon Parallel Iterators"] + SharedStatePattern --> DashMap["DashMap Collections"] + MessagePattern --> Channels["Channel Communication"] + + TokioRuntime --> AsyncPrimitives["Async Sync Primitives"] + Rayon --> ThreadPool["Thread Pool Management"] + DashMap --> LockFree["Lock-Free Data Structures"] + Channels --> ChannelTypes["Channel Type Selection"] + + AsyncPrimitives --> ErrorHandling["Error Handling"] + ThreadPool --> ErrorHandling + LockFree --> ErrorHandling + ChannelTypes --> ErrorHandling + + ErrorHandling --> Testing["Concurrency Testing"] + Testing --> Performance["Performance Monitoring"] + Performance --> Production["Production Concurrency"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style AsyncPattern fill:#4dbb5f,stroke:#36873f,color:white + style SharedStatePattern fill:#ffa64d,stroke:#cc7a30,color:white + style MessagePattern fill:#d94dbb,stroke:#a3378a,color:white +``` + ## ๐ŸŽฏ ASYNC RUNTIME SELECTION ### Tokio as the Standard diff --git a/.cursor/rules/rust/features/database.mdc b/.cursor/rules/rust/features/database.mdc index 74b8118..38e6a0f 100644 --- a/.cursor/rules/rust/features/database.mdc +++ b/.cursor/rules/rust/features/database.mdc @@ -7,6 +7,41 @@ alwaysApply: false > **TL;DR:** Comprehensive guidelines for database access in Rust using SQLx, focusing on type safety, async patterns, and testing strategies. +## ๐Ÿ” DATABASE ARCHITECTURE STRATEGY + +```mermaid +graph TD + Start["Database Integration"] --> DBChoice{"Database
Type?"} + + DBChoice -->|PostgreSQL| Postgres["PostgreSQL with SQLx"] + DBChoice -->|SQLite| SQLite["SQLite with SQLx"] + DBChoice -->|Multiple| MultiDB["Multi-Database Support"] + + Postgres --> ConnectionPool["Connection Pool Setup"] + SQLite --> ConnectionPool + MultiDB --> ConnectionPool + + ConnectionPool --> Migrations["Database Migrations"] + Migrations --> EntityDesign["Entity Design"] + + EntityDesign --> RepositoryPattern["Repository Pattern"] + RepositoryPattern --> QueryPatterns["Query Patterns"] + + QueryPatterns --> TypeSafety["Type Safety"] + TypeSafety --> ErrorHandling["Error Handling"] + + ErrorHandling --> Testing["Database Testing"] + Testing --> Transactions["Transaction Management"] + Transactions --> Performance["Performance Optimization"] + + Performance --> Production["Production Database"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style ConnectionPool fill:#4dbb5f,stroke:#36873f,color:white + style RepositoryPattern fill:#ffa64d,stroke:#cc7a30,color:white + style Testing fill:#d94dbb,stroke:#a3378a,color:white +``` + ## ๐ŸŽฏ DATABASE LIBRARY SELECTION ### SQLx as the Standard diff --git a/.cursor/rules/rust/features/http-client.mdc b/.cursor/rules/rust/features/http-client.mdc index 707c386..6787364 100644 --- a/.cursor/rules/rust/features/http-client.mdc +++ b/.cursor/rules/rust/features/http-client.mdc @@ -7,6 +7,37 @@ alwaysApply: false > **TL;DR:** Modern HTTP client patterns using reqwest with proper error handling, timeouts, and security configurations. +## ๐Ÿ” HTTP CLIENT ARCHITECTURE STRATEGY + +```mermaid +graph TD + Start["HTTP Client Requirements"] --> ClientType{"Client
Usage Pattern?"} + + ClientType -->|Simple Requests| SimpleClient["Simple Request Pattern"] + ClientType -->|Complex Integration| AdvancedClient["Advanced Client Pattern"] + ClientType -->|Service Integration| ServiceClient["Service Client Pattern"] + + SimpleClient --> BasicConfig["Basic Configuration"] + AdvancedClient --> BuilderPattern["Builder Pattern"] + ServiceClient --> TypedClient["Typed Client"] + + BasicConfig --> ErrorHandling["Error Handling"] + BuilderPattern --> ErrorHandling + TypedClient --> ErrorHandling + + ErrorHandling --> RetryLogic["Retry Logic"] + RetryLogic --> Authentication["Authentication"] + Authentication --> Monitoring["Request Monitoring"] + + Monitoring --> Testing["Client Testing"] + Testing --> Production["Production HTTP Client"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style SimpleClient fill:#4dbb5f,stroke:#36873f,color:white + style AdvancedClient fill:#ffa64d,stroke:#cc7a30,color:white + style ServiceClient fill:#d94dbb,stroke:#a3378a,color:white +``` + ## ๐Ÿ”ง REQWEST CONFIGURATION ### Standard Dependencies diff --git a/.cursor/rules/rust/features/protobuf-grpc.mdc b/.cursor/rules/rust/features/protobuf-grpc.mdc index 3313599..7bb4595 100644 --- a/.cursor/rules/rust/features/protobuf-grpc.mdc +++ b/.cursor/rules/rust/features/protobuf-grpc.mdc @@ -7,6 +7,37 @@ alwaysApply: false > **TL;DR:** Modern protobuf and gRPC patterns using prost/tonic 0.13+ with clean code generation, Inner data structures, MessageSanitizer trait, gRPC reflection, and simplified service implementations. +## ๐Ÿ” PROTOBUF & GRPC DESIGN STRATEGY + +```mermaid +graph TD + Start["gRPC Service Design"] --> ProtoDesign["Protocol Buffer Design"] + + ProtoDesign --> CodeGen["Code Generation"] + CodeGen --> DataStructures["Data Structure Design"] + + DataStructures --> InnerTypes["Inner Types Pattern"] + DataStructures --> Sanitization["Message Sanitization"] + + InnerTypes --> BusinessLogic["Business Logic Separation"] + Sanitization --> BusinessLogic + + BusinessLogic --> ServiceImpl["Service Implementation"] + ServiceImpl --> ErrorHandling["Error Handling"] + + ErrorHandling --> Testing["Service Testing"] + Testing --> Reflection["gRPC Reflection"] + Reflection --> Deployment["Service Deployment"] + + Deployment --> Monitoring["Monitoring & Observability"] + Monitoring --> Production["Production gRPC Service"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style InnerTypes fill:#4dbb5f,stroke:#36873f,color:white + style Sanitization fill:#ffa64d,stroke:#cc7a30,color:white + style ServiceImpl fill:#d94dbb,stroke:#a3378a,color:white +``` + ## ๐ŸŽฏ PROTOBUF & GRPC FRAMEWORK REQUIREMENTS ### Prost/Tonic Configuration diff --git a/.cursor/rules/rust/features/tools-and-config.mdc b/.cursor/rules/rust/features/tools-and-config.mdc index 363180c..d727ce6 100644 --- a/.cursor/rules/rust/features/tools-and-config.mdc +++ b/.cursor/rules/rust/features/tools-and-config.mdc @@ -7,6 +7,39 @@ alwaysApply: false > **TL;DR:** Essential tools and configuration patterns for modern Rust applications, focusing on logging, configuration management, and templating. +## ๐Ÿ” TOOLS & CONFIGURATION STRATEGY + +```mermaid +graph TD + Start["Application Setup"] --> ConfigType{"Configuration
Complexity?"} + + ConfigType -->|Simple| EnvVars["Environment Variables"] + ConfigType -->|Complex| YAMLConfig["YAML Configuration"] + + EnvVars --> Logging["Logging Setup"] + YAMLConfig --> ConfigValidation["Configuration Validation"] + ConfigValidation --> Logging + + Logging --> StructuredLogging["Structured Logging"] + StructuredLogging --> LogRotation["Log Rotation"] + + LogRotation --> Templating{"Template
Engine Needed?"} + + Templating -->|Yes| MiniJinja["MiniJinja Templates"] + Templating -->|No| DataProcessing["Data Processing"] + + MiniJinja --> DataProcessing + DataProcessing --> JSONPath["JSON Path Extraction"] + + JSONPath --> Monitoring["Application Monitoring"] + Monitoring --> Production["Production Tools"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style YAMLConfig fill:#4dbb5f,stroke:#36873f,color:white + style StructuredLogging fill:#ffa64d,stroke:#cc7a30,color:white + style MiniJinja fill:#d94dbb,stroke:#a3378a,color:white +``` + ## ๐Ÿ“Š LOGGING AND OBSERVABILITY ### Tracing Ecosystem (Not env_logger) diff --git a/.cursor/rules/rust/features/utilities.mdc b/.cursor/rules/rust/features/utilities.mdc index e80c63b..11b42a9 100644 --- a/.cursor/rules/rust/features/utilities.mdc +++ b/.cursor/rules/rust/features/utilities.mdc @@ -7,6 +7,49 @@ alwaysApply: false > **TL;DR:** Essential utility patterns for authentication, CLI tools, data structures, and common development tasks. +## ๐Ÿ” UTILITY LIBRARY SELECTION STRATEGY + +```mermaid +graph TD + Start["Utility Requirements"] --> UtilityType{"Utility
Category?"} + + UtilityType -->|Authentication| AuthUtils["Authentication Utilities"] + UtilityType -->|CLI Tools| CLIUtils["CLI Utilities"] + UtilityType -->|Data Structures| DataUtils["Data Structure Utilities"] + UtilityType -->|Validation| ValidationUtils["Validation Utilities"] + + AuthUtils --> JWT["JWT Token Management"] + AuthUtils --> PasswordHash["Password Hashing"] + + CLIUtils --> ClapCLI["Clap CLI Framework"] + CLIUtils --> ProgressBars["Progress Indicators"] + + DataUtils --> TypedBuilder["TypedBuilder Pattern"] + DataUtils --> EnumDispatch["enum_dispatch"] + + ValidationUtils --> SerdeValidation["Serde Validation"] + ValidationUtils --> CustomValidation["Custom Validators"] + + JWT --> Security["Security Implementation"] + PasswordHash --> Security + ClapCLI --> UserInterface["User Interface"] + ProgressBars --> UserInterface + TypedBuilder --> CodeGeneration["Code Generation"] + EnumDispatch --> CodeGeneration + SerdeValidation --> DataIntegrity["Data Integrity"] + CustomValidation --> DataIntegrity + + Security --> Production["Production Utilities"] + UserInterface --> Production + CodeGeneration --> Production + DataIntegrity --> Production + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style AuthUtils fill:#4dbb5f,stroke:#36873f,color:white + style CLIUtils fill:#ffa64d,stroke:#cc7a30,color:white + style DataUtils fill:#d94dbb,stroke:#a3378a,color:white +``` + ## ๐Ÿ” AUTHENTICATION AND SECURITY ### JWT with jsonwebtoken diff --git a/.cursor/rules/rust/main.mdc b/.cursor/rules/rust/main.mdc index 60b17b0..538f497 100644 --- a/.cursor/rules/rust/main.mdc +++ b/.cursor/rules/rust/main.mdc @@ -77,7 +77,12 @@ graph TD Main --> Complexity{"Project
Complexity?"} Core --> Quality["Code Quality Rules"] - Core --> Testing["Testing Standards"] + Core --> Dependencies["Dependency Management"] + Core --> Types["Type System Patterns"] + Core --> Performance["Performance Guidelines"] + Core --> Security["Security Standards"] + Core --> API["API Design Principles"] + Core --> Patterns["Design Patterns"] Core --> ErrorHandling["Error Handling"] Complexity -->|Simple| SimpleRules["Simple Project Rules"] @@ -112,10 +117,15 @@ graph TD ## ๐Ÿ“‹ CORE PRINCIPLES (ALWAYS APPLIED) 1. **Code Quality**: Follow DRY/SRP principles, function size limits -2. **File Organization**: Functionality-based structure, not type-based -3. **Error Handling**: Consistent error handling patterns -4. **Testing**: Comprehensive unit test coverage -5. **Documentation**: Clear, maintainable code documentation +2. **Dependencies**: Workspace-first, security-focused dependency management +3. **Type System**: Leverage newtype patterns, phantom types, and zero-cost abstractions +4. **Performance**: Profile-driven optimization, memory-efficient patterns +5. **Security**: Input validation, secure secrets management, authorization +6. **API Design**: Ergonomic interfaces, builder patterns, comprehensive documentation +7. **File Organization**: Functionality-based structure, not type-based +8. **Error Handling**: Consistent error handling patterns with structured errors +9. **Testing**: Comprehensive unit test coverage with mocking strategies +10. **Documentation**: Clear, maintainable code documentation with examples ## ๐Ÿš€ PROJECT INITIALIZATION WORKFLOW @@ -251,6 +261,12 @@ Based on project analysis, load specific rule sets: | Module | File | Description | |--------|------|-------------| | **Core** | `core/code-quality.mdc` | Rust 2024, no unsafe, production-ready code | +| **Core** | `core/dependencies.mdc` | Centralized dependency management and workspace patterns | +| **Core** | `core/type-system.mdc` | Type system mastery, newtype patterns, phantom types | +| **Core** | `core/performance.mdc` | Performance optimization, SIMD, memory management | +| **Core** | `core/security.mdc` | Security patterns, Argon2 hashing, encryption | +| **Core** | `core/api-design.mdc` | Ergonomic API design, builder patterns, trait design | +| **Core** | `core/design-patterns.mdc` | Essential design patterns, actor model, strategy | | **Simple** | `simple/single-crate.mdc` | Single crate project structure | | **Complex** | `complex/workspace.mdc` | Multi-crate workspace management | | **Web** | `features/axum.mdc` | Axum 0.8 patterns, OpenAPI with utoipa | diff --git a/.cursor/rules/rust/simple/single-crate.mdc b/.cursor/rules/rust/simple/single-crate.mdc index dfa21d1..8439e2a 100644 --- a/.cursor/rules/rust/simple/single-crate.mdc +++ b/.cursor/rules/rust/simple/single-crate.mdc @@ -7,6 +7,45 @@ alwaysApply: false > **TL;DR:** Guidelines for organizing simple Rust projects using a single crate structure with clean separation of concerns and maintainable file organization. +## ๐Ÿ” SINGLE CRATE DESIGN STRATEGY + +```mermaid +graph TD + Start["Single Crate Project"] --> CrateType{"Crate
Type?"} + + CrateType -->|Binary| BinaryStructure["Binary Crate Structure"] + CrateType -->|Library| LibraryStructure["Library Crate Structure"] + CrateType -->|Mixed| MixedStructure["Mixed Crate Structure"] + + BinaryStructure --> MinimalMain["Minimal main.rs"] + BinaryStructure --> CoreLib["Core Logic in lib.rs"] + + LibraryStructure --> PublicAPI["Public API Design"] + LibraryStructure --> ModuleOrg["Module Organization"] + + MixedStructure --> BinaryEntry["Binary Entry Point"] + MixedStructure --> LibraryAPI["Library API"] + + MinimalMain --> FeatureModules["Feature-Based Modules"] + CoreLib --> FeatureModules + PublicAPI --> FeatureModules + ModuleOrg --> FeatureModules + BinaryEntry --> FeatureModules + LibraryAPI --> FeatureModules + + FeatureModules --> ErrorHandling["Centralized Error Handling"] + ErrorHandling --> Configuration["Configuration Management"] + Configuration --> Testing["Testing Strategy"] + Testing --> Documentation["Documentation"] + + Documentation --> Production["Production Single Crate"] + + style Start fill:#4da6ff,stroke:#0066cc,color:white + style BinaryStructure fill:#4dbb5f,stroke:#36873f,color:white + style LibraryStructure fill:#ffa64d,stroke:#cc7a30,color:white + style MixedStructure fill:#d94dbb,stroke:#a3378a,color:white +``` + ## ๐Ÿ—๏ธ PROJECT STRUCTURE OVERVIEW ```mermaid diff --git a/specs/instructions.md b/specs/instructions.md index 146106d..3e78c3f 100644 --- a/specs/instructions.md +++ b/specs/instructions.md @@ -167,3 +167,5 @@ async pub fn say_hello( 4. ไธ่ฆๆทปๅŠ  protoc_arg 5. ๅฏน primitive type ไธ้œ€่ฆ sanitize_otional_xxxใ€‚ 6. TypedBuilder ็”จๆณ•้ตๅพช:ๅนถๅฏนๆฏไธชๅญ—ๆฎตๆ นๆฎๆƒ…ๅ†ตๅผ•ๅ…ฅ default, default_code, ไปฅๅŠ setter(strip_option), setter(into), ๆˆ–่€… setter(strip_option, into)ใ€‚ๆฏ”ๅฆ‚ Option ่ฆไฝฟ็”จ `#[builder(default, setter(strip_option, into)]`. ไธ่ฆๆปฅ็”จ defaultใ€‚ + +่ฏทไป”็ป†ๅฎกๆ ธ @/rust rule set๏ผŒ็œ‹ๅ„้ƒจๅˆ†ๅ†…ๅฎนๆ˜ฏๅฆๆœ‰้‡ๅค๏ผŒๆ˜ฏๅฆๆญฃไบค๏ผŒ่ฏท็›ธๅบ”ไฟฎๆ”นๅ’Œ้‡ๆž„๏ผŒๅฆๅค–๏ผŒๅฆ‚ๆžœ่ฟ˜ๆœ‰ best practice ๅ’Œ rust ๆœ€ไฝณๅฎž่ทตๅ’Œ่ฎพ่ฎกๆจกๅผๆฒกๆœ‰ๅ†™่ฟ›ๅŽป๏ผŒ่ฏทๆทปๅŠ ใ€‚