diff --git a/.cursor/rules/rust/complex/workspace.mdc b/.cursor/rules/rust/complex/workspace.mdc index b93c988..5540e16 100644 --- a/.cursor/rules/rust/complex/workspace.mdc +++ b/.cursor/rules/rust/complex/workspace.mdc @@ -3,3 +3,1254 @@ description: globs: alwaysApply: false --- +# 🏢 MULTI-CRATE WORKSPACE STRUCTURE + +> **TL;DR:** Guidelines for organizing complex Rust projects using workspace architecture with multiple crates, clear subsystem boundaries, and domain-driven design principles. + +## 🏗️ SUBSYSTEM-BASED WORKSPACE ARCHITECTURE + +```mermaid +graph TD + Workspace["E-Commerce Platform Workspace"] --> Shared["Shared Infrastructure"] + Workspace --> UserMgmt["User Management
Subsystem"] + Workspace --> ProductCatalog["Product Catalog
Subsystem"] + Workspace --> OrderMgmt["Order Management
Subsystem"] + Workspace --> Payment["Payment
Subsystem"] + Workspace --> Notification["Notification
Subsystem"] + Workspace --> Analytics["Analytics
Subsystem"] + Workspace --> Gateway["API Gateway
& Services"] + + Shared --> SharedTypes["shared-types
(domain primitives)"] + Shared --> SharedDb["shared-db
(database infrastructure)"] + Shared --> SharedConfig["shared-config
(configuration)"] + Shared --> SharedEvents["shared-events
(event definitions)"] + + UserMgmt --> UserCore["user-core
(business logic)"] + UserMgmt --> UserApi["user-api
(HTTP handlers)"] + UserMgmt --> UserWorker["user-worker
(background jobs)"] + + ProductCatalog --> ProductCore["product-core
(business logic)"] + ProductCatalog --> ProductApi["product-api
(HTTP handlers)"] + ProductCatalog --> ProductSearch["product-search
(search service)"] + + OrderMgmt --> OrderCore["order-core
(business logic)"] + OrderMgmt --> OrderApi["order-api
(HTTP handlers)"] + OrderMgmt --> OrderProcessor["order-processor
(workflow engine)"] + + Payment --> PaymentCore["payment-core
(business logic)"] + Payment --> PaymentApi["payment-api
(HTTP handlers)"] + Payment --> PaymentGateway["payment-gateway
(external integrations)"] + + Notification --> NotificationCore["notification-core
(business logic)"] + Notification --> NotificationService["notification-service
(delivery engine)"] + + Analytics --> AnalyticsCore["analytics-core
(business logic)"] + Analytics --> AnalyticsCollector["analytics-collector
(data collection)"] + Analytics --> AnalyticsReporter["analytics-reporter
(reporting service)"] + + Gateway --> ApiGateway["api-gateway
(unified API)"] + Gateway --> AdminCli["admin-cli
(administration tool)"] + Gateway --> MigrationTool["migration-tool
(database migrations)"] + + style Workspace fill:#4da6ff,stroke:#0066cc,color:white + style Shared fill:#4dbb5f,stroke:#36873f,color:white + style UserMgmt fill:#ffa64d,stroke:#cc7a30,color:white + style ProductCatalog fill:#d94dbb,stroke:#a3378a,color:white + style OrderMgmt fill:#4dbbbb,stroke:#368787,color:white + style Payment fill:#ff6b6b,stroke:#cc5555,color:white + style Notification fill:#9b59b6,stroke:#7d4796,color:white + style Analytics fill:#f39c12,stroke:#d68910,color:white + style Gateway fill:#2ecc71,stroke:#27ae60,color:white +``` + +## 📁 SUBSYSTEM-BASED DIRECTORY STRUCTURE + +``` +ecommerce-platform/ +├── Cargo.toml # Workspace configuration +├── Cargo.lock # Locked dependencies +├── README.md # Platform overview +├── docker-compose.yml # Development environment +├── .env.example # Environment template +├── +├── shared/ # Cross-cutting infrastructure +│ ├── shared-types/ # Domain primitives & common types +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── lib.rs +│ │ ├── ids.rs # UserId, ProductId, OrderId etc. +│ │ ├── money.rs # Money, Currency types +│ │ ├── address.rs # Address, Location types +│ │ ├── time.rs # Timestamp, DateRange types +│ │ └── pagination.rs # Pagination, Sorting types +│ │ +│ ├── shared-db/ # Database infrastructure +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── lib.rs +│ │ ├── connection.rs # Connection pooling +│ │ ├── transactions.rs # Transaction management +│ │ ├── migrations.rs # Migration framework +│ │ └── repositories/ # Base repository traits +│ │ +│ ├── shared-config/ # Configuration management +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── lib.rs +│ │ ├── database.rs # Database configuration +│ │ ├── redis.rs # Redis configuration +│ │ ├── auth.rs # Auth configuration +│ │ └── services.rs # Service discovery +│ │ +│ └── shared-events/ # Event definitions & messaging +│ ├── Cargo.toml +│ └── src/ +│ ├── lib.rs +│ ├── user_events.rs # UserRegistered, UserUpdated +│ ├── order_events.rs # OrderPlaced, OrderFulfilled +│ ├── payment_events.rs # PaymentProcessed, PaymentFailed +│ └── event_bus.rs # Event publishing infrastructure +│ +├── subsystems/ # Business subsystems +│ ├── user-management/ # User & Authentication subsystem +│ │ ├── user-core/ # Business logic +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── entities/ # User, Profile, Session +│ │ │ ├── services/ # AuthService, ProfileService +│ │ │ ├── repositories/ # UserRepository trait +│ │ │ └── errors.rs +│ │ │ +│ │ ├── user-api/ # HTTP API handlers +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── handlers/ # Auth, profile endpoints +│ │ │ ├── middleware/ # Auth middleware +│ │ │ ├── dto/ # Request/response DTOs +│ │ │ └── validation.rs +│ │ │ +│ │ └── user-worker/ # Background jobs +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── main.rs +│ │ ├── jobs/ # Email verification, cleanup +│ │ └── handlers.rs +│ │ +│ ├── product-catalog/ # Product management subsystem +│ │ ├── product-core/ # Business logic +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── entities/ # Product, Category, Inventory +│ │ │ ├── services/ # ProductService, InventoryService +│ │ │ ├── repositories/ +│ │ │ └── errors.rs +│ │ │ +│ │ ├── product-api/ # HTTP API handlers +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── handlers/ # Product CRUD, search +│ │ │ ├── dto/ +│ │ │ └── filters.rs +│ │ │ +│ │ └── product-search/ # Search engine service +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── lib.rs +│ │ ├── indexing/ # ElasticSearch integration +│ │ ├── queries/ # Search query builders +│ │ └── ranking.rs # Relevance scoring +│ │ +│ ├── order-management/ # Order processing subsystem +│ │ ├── order-core/ # Business logic +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── entities/ # Order, OrderItem, Cart +│ │ │ ├── services/ # OrderService, CartService +│ │ │ ├── state_machine/ # Order state transitions +│ │ │ ├── repositories/ +│ │ │ └── errors.rs +│ │ │ +│ │ ├── order-api/ # HTTP API handlers +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── handlers/ # Cart, checkout, order status +│ │ │ ├── dto/ +│ │ │ └── validation.rs +│ │ │ +│ │ └── order-processor/ # Order workflow engine +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── main.rs +│ │ ├── workflows/ # Order fulfillment workflow +│ │ ├── tasks/ # Individual processing tasks +│ │ └── scheduler.rs +│ │ +│ ├── payment/ # Payment processing subsystem +│ │ ├── payment-core/ # Business logic +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── entities/ # Payment, Transaction, Refund +│ │ │ ├── services/ # PaymentService, RefundService +│ │ │ ├── state_machine/ # Payment state transitions +│ │ │ ├── repositories/ +│ │ │ └── errors.rs +│ │ │ +│ │ ├── payment-api/ # HTTP API handlers +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── handlers/ # Payment endpoints +│ │ │ ├── webhooks/ # Payment gateway webhooks +│ │ │ ├── dto/ +│ │ │ └── security.rs # Payment security +│ │ │ +│ │ └── payment-gateway/ # External payment integrations +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── lib.rs +│ │ ├── stripe/ # Stripe integration +│ │ ├── paypal/ # PayPal integration +│ │ ├── providers/ # Payment provider traits +│ │ └── encryption.rs +│ │ +│ ├── notification/ # Notification subsystem +│ │ ├── notification-core/ # Business logic +│ │ │ ├── Cargo.toml +│ │ │ └── src/ +│ │ │ ├── lib.rs +│ │ │ ├── entities/ # Notification, Template +│ │ │ ├── services/ # NotificationService +│ │ │ ├── templates/ # Email/SMS templates +│ │ │ ├── repositories/ +│ │ │ └── errors.rs +│ │ │ +│ │ └── notification-service/ # Notification delivery +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── main.rs +│ │ ├── channels/ # Email, SMS, Push delivery +│ │ ├── queue/ # Message queue integration +│ │ ├── templates/ # Template rendering +│ │ └── retry.rs # Delivery retry logic +│ │ +│ └── analytics/ # Analytics & reporting subsystem +│ ├── analytics-core/ # Business logic +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── lib.rs +│ │ ├── entities/ # Metric, Report, Dashboard +│ │ ├── services/ # AnalyticsService, ReportService +│ │ ├── aggregators/ # Data aggregation logic +│ │ ├── repositories/ +│ │ └── errors.rs +│ │ +│ ├── analytics-collector/ # Data collection service +│ │ ├── Cargo.toml +│ │ └── src/ +│ │ ├── main.rs +│ │ ├── collectors/ # Event collectors +│ │ ├── processors/ # Data processing pipelines +│ │ └── storage.rs # Time-series data storage +│ │ +│ └── analytics-reporter/ # Reporting service +│ ├── Cargo.toml +│ └── src/ +│ ├── main.rs +│ ├── generators/ # Report generators +│ ├── schedulers/ # Scheduled reporting +│ └── exporters/ # Report export formats +│ +└── services/ # Infrastructure & gateway services + ├── api-gateway/ # Unified API gateway + │ ├── Cargo.toml + │ └── src/ + │ ├── main.rs + │ ├── routing/ # Request routing + │ ├── middleware/ # CORS, rate limiting, auth + │ ├── load_balancer/ # Load balancing logic + │ └── monitoring.rs # Health checks, metrics + │ + ├── admin-cli/ # Administrative CLI tool + │ ├── Cargo.toml + │ └── src/ + │ ├── main.rs + │ ├── commands/ # Admin commands + │ ├── scripts/ # Automation scripts + │ └── monitoring.rs # System monitoring + │ + └── migration-tool/ # Database migration tool + ├── Cargo.toml + └── src/ + ├── main.rs + ├── migrations/ # SQL migration files + ├── schemas/ # Database schemas + └── validation.rs # Migration validation +``` + +## 🔧 SUBSYSTEM-BASED WORKSPACE CARGO.TOML + +```toml +[workspace] +resolver = "2" +members = [ + # Shared infrastructure + "shared/shared-types", + "shared/shared-db", + "shared/shared-config", + "shared/shared-events", + + # User Management Subsystem + "subsystems/user-management/user-core", + "subsystems/user-management/user-api", + "subsystems/user-management/user-worker", + + # Product Catalog Subsystem + "subsystems/product-catalog/product-core", + "subsystems/product-catalog/product-api", + "subsystems/product-catalog/product-search", + + # Order Management Subsystem + "subsystems/order-management/order-core", + "subsystems/order-management/order-api", + "subsystems/order-management/order-processor", + + # Payment Subsystem + "subsystems/payment/payment-core", + "subsystems/payment/payment-api", + "subsystems/payment/payment-gateway", + + # Notification Subsystem + "subsystems/notification/notification-core", + "subsystems/notification/notification-service", + + # Analytics Subsystem + "subsystems/analytics/analytics-core", + "subsystems/analytics/analytics-collector", + "subsystems/analytics/analytics-reporter", + + # Infrastructure Services + "services/api-gateway", + "services/admin-cli", + "services/migration-tool", +] + +# Shared dependencies across workspace +[workspace.dependencies] +# Core dependencies +anyhow = "1.0" +thiserror = "2.0" +derive_more = { version = "2.0", features = ["constructor", "display", "error", "from"] } +typed-builder = "0.21" + +# Serialization +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +serde_yaml = "0.9" + +# Async runtime +tokio = { version = "1.45", features = ["macros", "rt-multi-thread", "net", "fs", "time", "sync", "signal"] } +async-trait = "0.1" +futures = "0.3" + +# Web framework +axum = { version = "0.8", features = ["macros", "multipart"] } +tower = { version = "0.5", features = ["full"] } +tower-http = { version = "0.6", features = ["cors", "trace", "compression", "auth", "limit"] } +http = "1.0" + +# API documentation +utoipa = { version = "5.0", features = ["axum_extras", "chrono", "uuid"] } +utoipa-axum = "0.2" +utoipa-swagger-ui = { version = "9.0", features = ["axum"] } + +# Database +sqlx = { version = "0.8", features = [ + "runtime-tokio-rustls", + "postgres", + "sqlite", + "chrono", + "uuid", + "time", + "json" +] } + +# HTTP client +reqwest = { version = "0.12", features = ["rustls-tls", "json", "stream", "multipart"] } + +# Time handling +chrono = { version = "0.4", features = ["serde"] } +time = { version = "0.3", features = ["serde"] } + +# Unique identifiers +uuid = { version = "1.17", features = ["v4", "v7", "serde"] } + +# Concurrent data structures +dashmap = "6.0" + +# Configuration +arc-swap = "1.0" + +# Templating +minijinja = { version = "2.0", features = ["loader", "json", "custom_syntax"] } + +# Authentication +jsonwebtoken = "9.0" + +# Utilities +base64 = "0.22" +regex = "1.0" +rand = { version = "0.8", features = ["std_rng"] } +getrandom = { version = "0.3", features = ["std"] } + +# CLI +clap = { version = "4.0", features = ["derive", "env"] } + +# Logging and tracing +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "chrono"] } + +# Testing +tempfile = "3.0" +wiremock = "0.6" + +[workspace.package] +version = "0.1.0" +edition = "2021" +authors = ["Platform Team "] +license = "MIT OR Apache-2.0" +repository = "https://github.com/company/ecommerce-platform" + +# Performance optimizations +[profile.release] +lto = true +codegen-units = 1 +panic = "abort" + +[profile.dev] +debug = true +opt-level = 0 + +# Shared linting configuration +[workspace.lints.rust] +unused_must_use = "warn" +unsafe_code = "forbid" +missing_docs = "warn" + +[workspace.lints.clippy] +all = "warn" +pedantic = "warn" +nursery = "warn" +cargo = "warn" +# Allow some pedantic lints that are too strict +module_name_repetitions = "allow" +must_use_candidate = "allow" +``` + +## 🏗️ SUBSYSTEM DEPENDENCY ARCHITECTURE + +```mermaid +graph TD + subgraph "Gateway Layer" + ApiGateway["api-gateway"] + AdminCli["admin-cli"] + MigrationTool["migration-tool"] + end + + subgraph "Application Layer" + UserApi["user-api"] + ProductApi["product-api"] + OrderApi["order-api"] + PaymentApi["payment-api"] + + UserWorker["user-worker"] + OrderProcessor["order-processor"] + NotificationService["notification-service"] + AnalyticsCollector["analytics-collector"] + AnalyticsReporter["analytics-reporter"] + end + + subgraph "Domain Layer" + UserCore["user-core"] + ProductCore["product-core"] + OrderCore["order-core"] + PaymentCore["payment-core"] + NotificationCore["notification-core"] + AnalyticsCore["analytics-core"] + + ProductSearch["product-search"] + PaymentGateway["payment-gateway"] + end + + subgraph "Infrastructure Layer" + SharedTypes["shared-types"] + SharedDb["shared-db"] + SharedConfig["shared-config"] + SharedEvents["shared-events"] + end + + %% Gateway dependencies + ApiGateway --> UserApi + ApiGateway --> ProductApi + ApiGateway --> OrderApi + ApiGateway --> PaymentApi + + AdminCli --> UserCore + AdminCli --> ProductCore + AdminCli --> OrderCore + AdminCli --> SharedDb + + MigrationTool --> SharedDb + + %% Application layer dependencies + UserApi --> UserCore + ProductApi --> ProductCore + ProductApi --> ProductSearch + OrderApi --> OrderCore + PaymentApi --> PaymentCore + + UserWorker --> UserCore + OrderProcessor --> OrderCore + OrderProcessor --> PaymentCore + NotificationService --> NotificationCore + AnalyticsCollector --> AnalyticsCore + AnalyticsReporter --> AnalyticsCore + + %% Domain layer dependencies + UserCore --> SharedTypes + UserCore --> SharedDb + UserCore --> SharedEvents + + ProductCore --> SharedTypes + ProductCore --> SharedDb + ProductCore --> SharedEvents + + OrderCore --> SharedTypes + OrderCore --> SharedDb + OrderCore --> SharedEvents + OrderCore --> UserCore + OrderCore --> ProductCore + + PaymentCore --> SharedTypes + PaymentCore --> SharedDb + PaymentCore --> SharedEvents + PaymentCore --> OrderCore + + NotificationCore --> SharedTypes + NotificationCore --> SharedDb + NotificationCore --> SharedEvents + + AnalyticsCore --> SharedTypes + AnalyticsCore --> SharedDb + + ProductSearch --> ProductCore + PaymentGateway --> PaymentCore + + %% Infrastructure dependencies + SharedDb --> SharedConfig + SharedEvents --> SharedTypes + + style ApiGateway fill:#2ecc71,stroke:#27ae60,color:white + style AdminCli fill:#2ecc71,stroke:#27ae60,color:white + style MigrationTool fill:#2ecc71,stroke:#27ae60,color:white + + style UserApi fill:#ffa64d,stroke:#cc7a30,color:white + style ProductApi fill:#d94dbb,stroke:#a3378a,color:white + style OrderApi fill:#4dbbbb,stroke:#368787,color:white + style PaymentApi fill:#ff6b6b,stroke:#cc5555,color:white + style UserWorker fill:#ffa64d,stroke:#cc7a30,color:white + style OrderProcessor fill:#4dbbbb,stroke:#368787,color:white + style NotificationService fill:#9b59b6,stroke:#7d4796,color:white + style AnalyticsCollector fill:#f39c12,stroke:#d68910,color:white + style AnalyticsReporter fill:#f39c12,stroke:#d68910,color:white + + style UserCore fill:#ffa64d,stroke:#cc7a30,color:white + style ProductCore fill:#d94dbb,stroke:#a3378a,color:white + style OrderCore fill:#4dbbbb,stroke:#368787,color:white + style PaymentCore fill:#ff6b6b,stroke:#cc5555,color:white + style NotificationCore fill:#9b59b6,stroke:#7d4796,color:white + style AnalyticsCore fill:#f39c12,stroke:#d68910,color:white + style ProductSearch fill:#d94dbb,stroke:#a3378a,color:white + style PaymentGateway fill:#ff6b6b,stroke:#cc5555,color:white + + style SharedTypes fill:#4dbb5f,stroke:#36873f,color:white + style SharedDb fill:#4dbb5f,stroke:#36873f,color:white + style SharedConfig fill:#4dbb5f,stroke:#36873f,color:white + style SharedEvents fill:#4dbb5f,stroke:#36873f,color:white +``` + +## 📚 SUBSYSTEM CRATE TEMPLATES + +### Shared Types Crate (Domain Primitives) + +```toml +# shared/shared-types/Cargo.toml +[package] +name = "shared-types" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true + +[dependencies] +thiserror = { workspace = true } +serde = { workspace = true } +uuid = { workspace = true } +chrono = { workspace = true } +derive_more = { workspace = true } +typed-builder = { workspace = true } +``` + +```rust +// shared/shared-types/src/lib.rs +//! Shared domain primitives and types used across subsystems + +pub mod ids; +pub mod money; +pub mod address; +pub mod time; +pub mod pagination; +pub mod events; + +// Re-export commonly used items +pub use ids::*; +pub use money::*; +pub use address::*; +pub use time::*; +pub use pagination::*; +pub use events::*; + +/// Library version +pub const VERSION: &str = env!("CARGO_PKG_VERSION"); +``` + +```rust +// shared/shared-types/src/ids.rs +use derive_more::{Constructor, Display, From}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +/// User identifier +#[derive( + Debug, Clone, Copy, PartialEq, Eq, Hash, + Serialize, Deserialize, Display, From, Constructor +)] +pub struct UserId(pub Uuid); + +impl UserId { + pub fn new() -> Self { + Self(Uuid::new_v4()) + } +} + +impl Default for UserId { + fn default() -> Self { + Self::new() + } +} + +/// Product identifier +#[derive( + Debug, Clone, Copy, PartialEq, Eq, Hash, + Serialize, Deserialize, Display, From, Constructor +)] +pub struct ProductId(pub Uuid); + +impl ProductId { + pub fn new() -> Self { + Self(Uuid::new_v4()) + } +} + +impl Default for ProductId { + fn default() -> Self { + Self::new() + } +} + +/// Order identifier +#[derive( + Debug, Clone, Copy, PartialEq, Eq, Hash, + Serialize, Deserialize, Display, From, Constructor +)] +pub struct OrderId(pub Uuid); + +impl OrderId { + pub fn new() -> Self { + Self(Uuid::new_v4()) + } +} + +impl Default for OrderId { + fn default() -> Self { + Self::new() + } +} + +/// Payment identifier +#[derive( + Debug, Clone, Copy, PartialEq, Eq, Hash, + Serialize, Deserialize, Display, From, Constructor +)] +pub struct PaymentId(pub Uuid); + +impl PaymentId { + pub fn new() -> Self { + Self(Uuid::new_v4()) + } +} + +impl Default for PaymentId { + fn default() -> Self { + Self::new() + } +} +``` + +```rust +// shared/shared-types/src/money.rs +use derive_more::{Add, Constructor, Display, From, Sub}; +use serde::{Deserialize, Serialize}; +use std::cmp::Ordering; + +/// Currency code (ISO 4217) +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum Currency { + #[serde(rename = "USD")] + Usd, + #[serde(rename = "EUR")] + Eur, + #[serde(rename = "GBP")] + Gbp, + #[serde(rename = "CNY")] + Cny, +} + +impl Currency { + pub fn code(&self) -> &'static str { + match self { + Currency::Usd => "USD", + Currency::Eur => "EUR", + Currency::Gbp => "GBP", + Currency::Cny => "CNY", + } + } + + pub fn decimal_places(&self) -> u8 { + match self { + Currency::Usd | Currency::Eur | Currency::Gbp | Currency::Cny => 2, + } + } +} + +/// Money amount with currency +#[derive( + Debug, Clone, Copy, PartialEq, Eq, + Serialize, Deserialize, Constructor, Display, From +)] +#[display(fmt = "{} {}", amount, currency.code())] +pub struct Money { + /// Amount in smallest currency unit (cents for USD) + pub amount: i64, + pub currency: Currency, +} + +impl Money { + pub fn new(amount: i64, currency: Currency) -> Self { + Self { amount, currency } + } + + pub fn zero(currency: Currency) -> Self { + Self { amount: 0, currency } + } + + pub fn from_major_units(major: i64, currency: Currency) -> Self { + let multiplier = 10_i64.pow(currency.decimal_places() as u32); + Self { + amount: major * multiplier, + currency + } + } + + pub fn to_major_units(&self) -> f64 { + let divisor = 10_f64.powi(self.currency.decimal_places() as i32); + self.amount as f64 / divisor + } + + pub fn is_positive(&self) -> bool { + self.amount > 0 + } + + pub fn is_negative(&self) -> bool { + self.amount < 0 + } + + pub fn is_zero(&self) -> bool { + self.amount == 0 + } +} + +impl PartialOrd for Money { + fn partial_cmp(&self, other: &Self) -> Option { + if self.currency == other.currency { + Some(self.amount.cmp(&other.amount)) + } else { + None + } + } +} + +impl Add for Money { + type Output = Result; + + fn add(self, other: Self) -> Self::Output { + if self.currency != other.currency { + return Err(MoneyError::CurrencyMismatch { + left: self.currency, + right: other.currency, + }); + } + + Ok(Money { + amount: self.amount + other.amount, + currency: self.currency, + }) + } +} + +impl Sub for Money { + type Output = Result; + + fn sub(self, other: Self) -> Self::Output { + if self.currency != other.currency { + return Err(MoneyError::CurrencyMismatch { + left: self.currency, + right: other.currency, + }); + } + + Ok(Money { + amount: self.amount - other.amount, + currency: self.currency, + }) + } +} + +/// Money-related errors +#[derive(Debug, Clone, thiserror::Error)] +pub enum MoneyError { + #[error("Currency mismatch: {left:?} vs {right:?}")] + CurrencyMismatch { left: Currency, right: Currency }, + + #[error("Invalid amount: {amount}")] + InvalidAmount { amount: i64 }, + + #[error("Arithmetic overflow")] + Overflow, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_money_creation() { + let usd_100 = Money::from_major_units(100, Currency::Usd); + assert_eq!(usd_100.amount, 10000); + assert_eq!(usd_100.to_major_units(), 100.0); + } + + #[test] + fn test_money_addition() { + let a = Money::new(1000, Currency::Usd); + let b = Money::new(2000, Currency::Usd); + let result = (a + b).unwrap(); + assert_eq!(result.amount, 3000); + } + + #[test] + fn test_currency_mismatch() { + let usd = Money::new(1000, Currency::Usd); + let eur = Money::new(1000, Currency::Eur); + assert!(matches!(usd + eur, Err(MoneyError::CurrencyMismatch { .. }))); + } +} +``` + +### Domain Core Crate (User Management) + +```toml +# subsystems/user-management/user-core/Cargo.toml +[package] +name = "user-core" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true + +[dependencies] +# Workspace dependencies +shared-types = { path = "../../../shared/shared-types" } +shared-db = { path = "../../../shared/shared-db" } +shared-events = { path = "../../../shared/shared-events" } + +# External dependencies +thiserror = { workspace = true } +derive_more = { workspace = true } +typed-builder = { workspace = true } +serde = { workspace = true } +tokio = { workspace = true } +async-trait = { workspace = true } +chrono = { workspace = true } +uuid = { workspace = true } + +# Domain-specific dependencies +bcrypt = "0.15" +validator = { version = "0.18", features = ["derive"] } +regex = { workspace = true } + +[dev-dependencies] +tempfile = { workspace = true } +tokio-test = "0.4" +``` + +```rust +// subsystems/user-management/user-core/src/lib.rs +//! User management domain logic + +pub mod entities; +pub mod services; +pub mod repositories; +pub mod errors; +pub mod events; + +pub use entities::*; +pub use services::*; +pub use repositories::*; +pub use errors::*; +pub use events::*; + +/// User management result type +pub type Result = std::result::Result; +``` + +```rust +// subsystems/user-management/user-core/src/entities/user.rs +use shared_types::{UserId, Timestamp}; +use serde::{Deserialize, Serialize}; +use typed_builder::TypedBuilder; +use validator::Validate; + +/// User entity +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, TypedBuilder)] +#[serde(rename_all = "camelCase")] +pub struct User { + pub id: UserId, + + #[validate(email)] + #[builder(setter(into))] + pub email: String, + + #[serde(skip_serializing)] + #[builder(setter(into))] + pub password_hash: String, + + #[builder(setter(into))] + pub display_name: String, + + pub email_verified: bool, + pub is_active: bool, + + pub created_at: Timestamp, + pub updated_at: Timestamp, + + #[builder(default)] + pub last_login_at: Option, +} + +impl User { + pub fn new(email: impl Into, password_hash: impl Into, display_name: impl Into) -> Self { + let now = Timestamp::now(); + + Self::builder() + .id(UserId::new()) + .email(email) + .password_hash(password_hash) + .display_name(display_name) + .email_verified(false) + .is_active(true) + .created_at(now) + .updated_at(now) + .build() + } + + pub fn verify_email(&mut self) { + self.email_verified = true; + self.updated_at = Timestamp::now(); + } + + pub fn deactivate(&mut self) { + self.is_active = false; + self.updated_at = Timestamp::now(); + } + + pub fn record_login(&mut self) { + self.last_login_at = Some(Timestamp::now()); + self.updated_at = Timestamp::now(); + } + + pub fn update_profile(&mut self, display_name: impl Into) { + self.display_name = display_name.into(); + self.updated_at = Timestamp::now(); + } +} + +/// User profile for public display +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UserProfile { + pub id: UserId, + pub email: String, + pub display_name: String, + pub email_verified: bool, + pub created_at: Timestamp, + pub last_login_at: Option, +} + +impl From for UserProfile { + fn from(user: User) -> Self { + Self { + id: user.id, + email: user.email, + display_name: user.display_name, + email_verified: user.email_verified, + created_at: user.created_at, + last_login_at: user.last_login_at, + } + } +} + +/// User registration data +#[derive(Debug, Clone, Serialize, Deserialize, Validate, TypedBuilder)] +#[serde(rename_all = "camelCase")] +pub struct UserRegistration { + #[validate(email)] + #[builder(setter(into))] + pub email: String, + + #[validate(length(min = 8))] + #[builder(setter(into))] + pub password: String, + + #[validate(length(min = 1, max = 100))] + #[builder(setter(into))] + pub display_name: String, +} + +/// User login credentials +#[derive(Debug, Clone, Serialize, Deserialize, Validate, TypedBuilder)] +#[serde(rename_all = "camelCase")] +pub struct UserCredentials { + #[validate(email)] + #[builder(setter(into))] + pub email: String, + + #[builder(setter(into))] + pub password: String, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_user_creation() { + let user = User::new( + "test@example.com", + "hashed_password", + "Test User" + ); + + assert_eq!(user.email, "test@example.com"); + assert_eq!(user.display_name, "Test User"); + assert!(!user.email_verified); + assert!(user.is_active); + } + + #[test] + fn test_email_verification() { + let mut user = User::new( + "test@example.com", + "hashed_password", + "Test User" + ); + + user.verify_email(); + assert!(user.email_verified); + } + + #[test] + fn test_user_profile_conversion() { + let user = User::new( + "test@example.com", + "hashed_password", + "Test User" + ); + + let profile: UserProfile = user.into(); + assert_eq!(profile.email, "test@example.com"); + assert_eq!(profile.display_name, "Test User"); + } +} +``` + +## 🔄 SUBSYSTEM INTEGRATION WORKFLOW + +```mermaid +sequenceDiagram + participant Client as Client + participant Gateway as API Gateway + participant UserAPI as User API + participant OrderAPI as Order API + participant PaymentAPI as Payment API + participant EventBus as Event Bus + participant NotificationSvc as Notification Service + + Client->>Gateway: POST /api/orders + Gateway->>UserAPI: Validate user token + UserAPI->>Gateway: User authenticated + Gateway->>OrderAPI: Create order request + + OrderAPI->>OrderAPI: Validate order data + OrderAPI->>PaymentAPI: Process payment + PaymentAPI->>PaymentAPI: Handle payment + PaymentAPI->>EventBus: Publish PaymentProcessed + PaymentAPI->>OrderAPI: Payment result + + OrderAPI->>OrderAPI: Update order status + OrderAPI->>EventBus: Publish OrderPlaced + OrderAPI->>Gateway: Order created + Gateway->>Client: Order response + + EventBus->>NotificationSvc: OrderPlaced event + NotificationSvc->>NotificationSvc: Send confirmation email +``` + +## 📝 SUBSYSTEM DEVELOPMENT SCRIPTS + +### Build Script (scripts/build-subsystem.sh) + +```bash +#!/bin/bash +set -e + +SUBSYSTEM=$1 + +if [ -z "$SUBSYSTEM" ]; then + echo "Usage: $0 " + echo "Available subsystems: user-management, product-catalog, order-management, payment, notification, analytics" + exit 1 +fi + +echo "Building $SUBSYSTEM subsystem..." + +# Build core crate +if [ -d "subsystems/$SUBSYSTEM" ]; then + cd "subsystems/$SUBSYSTEM" + + # Find and build all crates in subsystem + for crate_dir in */; do + if [ -f "$crate_dir/Cargo.toml" ]; then + echo "Building $crate_dir..." + cd "$crate_dir" + cargo build --all-features + cargo test --all-features + cargo clippy -- -D warnings + cd .. + fi + done + + cd ../.. +else + echo "Subsystem '$SUBSYSTEM' not found" + exit 1 +fi + +echo "$SUBSYSTEM subsystem build completed successfully!" +``` + +### Integration Test Script (scripts/test-integration.sh) + +```bash +#!/bin/bash +set -e + +echo "Running integration tests..." + +# Start test dependencies +docker-compose -f docker-compose.test.yml up -d postgres redis + +# Wait for services to be ready +echo "Waiting for test services..." +sleep 10 + +# Run database migrations +cargo run --bin migration-tool -- --env test migrate up + +# Run integration tests +cargo test --test integration_tests --all-features + +# Run end-to-end tests +cargo test --test e2e_tests --all-features + +# Cleanup +docker-compose -f docker-compose.test.yml down + +echo "Integration tests completed successfully!" +``` + +## 📋 SUBSYSTEM DESIGN CHECKLIST + +```markdown +## Subsystem Architecture Verification + +### Subsystem Boundaries +- [ ] Each subsystem has clear business domain responsibility +- [ ] Subsystems communicate only through well-defined interfaces +- [ ] Shared infrastructure is properly abstracted +- [ ] Domain events are used for cross-subsystem communication +- [ ] No direct database access between subsystems + +### Code Organization +- [ ] Core business logic is separated from infrastructure +- [ ] API layers are thin and focused on HTTP concerns +- [ ] Worker/processor crates handle background operations +- [ ] Shared types define domain primitives consistently +- [ ] Error types are subsystem-specific but composable + +### Dependency Management +- [ ] Subsystem dependencies follow layer architecture +- [ ] No circular dependencies between subsystems +- [ ] Shared crates are only for cross-cutting concerns +- [ ] External dependencies are centralized in workspace +- [ ] Version consistency maintained across subsystems + +### Testing Strategy +- [ ] Unit tests cover domain logic in core crates +- [ ] Integration tests verify subsystem boundaries +- [ ] End-to-end tests cover business workflows +- [ ] Performance tests validate scalability +- [ ] Contract tests ensure API compatibility + +### Documentation +- [ ] Subsystem README explains business purpose +- [ ] API documentation is auto-generated and current +- [ ] Architecture decisions are documented +- [ ] Deployment guides are subsystem-specific +- [ ] Monitoring and alerting guidelines exist diff --git a/.cursor/rules/rust/core/code-quality.mdc b/.cursor/rules/rust/core/code-quality.mdc index e63beea..c32f477 100644 --- a/.cursor/rules/rust/core/code-quality.mdc +++ b/.cursor/rules/rust/core/code-quality.mdc @@ -10,8 +10,8 @@ alwaysApply: false ## 🎯 FUNDAMENTAL PRINCIPLES ### Code Organization -- **Functionality-based files**: Use meaningful file names like `node.rs`, `workflow.rs`, `execution.rs` instead of generic `models.rs`, `traits.rs`, `types.rs` -- **Meaningful naming**: Avoid names like `WorkflowValidatorImpl` - use descriptive, specific names +- **Functionality-based files**: Use meaningful file names like `user.rs`, `product.rs`, `auth.rs` instead of generic `models.rs`, `traits.rs`, `types.rs` +- **Meaningful naming**: Avoid names like `UserServiceImpl` - use descriptive, specific names - **File size limits**: Maximum 500 lines per file (excluding tests) - **Function size**: Maximum 150 lines per function - **Single Responsibility**: Each module should have one clear purpose @@ -35,34 +35,135 @@ serde = { workspace = true, features = ["derive"] } # Request permission before modifying Cargo.toml ``` +### Standard Crate Recommendations +When adding new dependencies, prefer these battle-tested crates: + +```toml +# Core utilities +anyhow = "1.0" # Error handling +thiserror = "2.0" # Error type definitions +derive_more = { version = "2", features = ["full"] } # Extended derive macros +typed-builder = "0.21" # Builder pattern + +# Async/Concurrency +tokio = { version = "1.45", features = [ + "macros", + "rt-multi-thread", + "signal", + "sync" +] } +async-trait = "0.1" # Async traits +futures = "0.3" # Async utilities +dashmap = { version = "6", features = ["serde"] } # Concurrent HashMap + +# Serialization +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +serde_yaml = "0.9" +base64 = "0.22" + +# Web/HTTP +axum = { version = "0.8", features = ["macros", "http2"] } +reqwest = { version = "0.12", default-features = false, features = [ + "charset", + "rustls-tls-webpki-roots", + "http2", + "json", + "cookies", + "gzip", + "brotli", + "zstd", + "deflate" +] } +tower = { version = "0.5", features = ["util"] } +tower-http = { version = "0.6", features = ["cors", "trace"] } +http = "1" + +# Database +sqlx = { version = "0.8", features = [ + "chrono", + "postgres", + "runtime-tokio-rustls", + "sqlite", + "time", + "uuid" +] } + +# Documentation/API +utoipa = { version = "5", features = ["axum_extras"] } +utoipa-axum = { version = "0.2" } +utoipa-swagger-ui = { version = "9", features = [ + "axum", + "vendored" +], default-features = false } +schemars = { version = "0.8", features = ["chrono", "url"] } + +# Time/Date +chrono = { version = "0.4", features = ["serde"] } +time = { version = "0.3", features = ["serde"] } + +# Templating/Text Processing +minijinja = { version = "2", features = [ + "json", + "loader", + "loop_controls", + "speedups" +] } +regex = "1" +htmd = "0.2" # HTML to Markdown + +# Authentication/Security +jsonwebtoken = "9.0" +uuid = { version = "1.17", features = ["v4", "serde"] } + +# Data Processing +jsonpath-rust = "1" +url = "2.5" + +# CLI (when needed) +clap = { version = "4.0", features = ["derive"] } + +# Utilities +rand = "0.8" +getrandom = "0.3" +atomic_enum = "0.3" # Atomic enumerations + +# Logging/Observability +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } +``` + ### Version Strategy - **Always use latest versions** when adding new dependencies - **Request permission** before modifying `Cargo.toml` - **Check workspace first** - never duplicate dependencies unnecessarily +- **Use specific feature flags** to minimize compilation time and binary size +- **Prefer rustls over openssl** for TLS (better for cross-compilation) ## 🏗️ CODE STRUCTURE PATTERNS ### Data Structure Organization ```rust // ✅ Good: Functionality-based organization -// src/workflow.rs - All workflow-related types and logic +// src/user.rs - All user-related types and logic #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] // Always use camelCase for JSON serialization -pub struct WorkflowDefinition { - pub workflow_id: String, +pub struct User { + pub user_id: String, pub display_name: String, + pub email: String, pub created_at: DateTime, } // ✅ Good: Meaningful trait names -pub trait WorkflowValidator { - fn validate(&self, workflow: &WorkflowDefinition) -> Result<(), ValidationError>; +pub trait UserValidator { + fn validate(&self, user: &User) -> Result<(), ValidationError>; } // ❌ Bad: Generic file organization // src/models.rs, src/traits.rs, src/types.rs // ❌ Bad: Poor naming -// struct WorkflowValidatorImpl +// struct UserValidatorImpl ``` ### Serde Configuration @@ -71,13 +172,13 @@ pub trait WorkflowValidator { #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ApiResponse { - pub workflow_id: String, + pub user_id: String, pub created_at: DateTime, pub is_active: bool, } // This serializes to: -// {"workflowId": "...", "createdAt": "...", "isActive": true} +// {"userId": "...", "createdAt": "...", "isActive": true} ``` ## 🔧 BUILD AND QUALITY CHECKS @@ -115,30 +216,30 @@ expect_used = "deny" ```rust // ✅ Good: Feature-based modules src/ -├── workflow/ +├── user/ │ ├── mod.rs -│ ├── validator.rs // WorkflowValidator trait and implementations -│ ├── executor.rs // WorkflowExecutor logic -│ └── definition.rs // WorkflowDefinition types -├── node/ +│ ├── service.rs // UserService logic +│ ├── repository.rs // User data access +│ └── validator.rs // User validation +├── product/ │ ├── mod.rs -│ ├── registry.rs // NodeRegistry (not NodeTypeRegistry) -│ └── executor.rs // Node execution logic -└── storage/ +│ ├── catalog.rs // Product catalog logic +│ └── pricing.rs // Product pricing logic +└── auth/ ├── mod.rs - ├── entities.rs // Database entities - └── repositories.rs // Data access patterns + ├── token.rs // Token management + └── session.rs // Session handling ``` ### Naming Best Practices ```rust // ✅ Good naming examples -pub struct WorkflowValidator; // Clear, specific -pub struct NodeExecutor; // Action-oriented +pub struct UserService; // Clear, specific +pub struct ProductCatalog; // Action-oriented pub struct DatabaseConnection; // Descriptive // ❌ Bad naming examples -pub struct WorkflowValidatorImpl; // Unnecessary "Impl" suffix +pub struct UserServiceImpl; // Unnecessary "Impl" suffix pub struct Helper; // Too generic pub struct Manager; // Vague responsibility ``` @@ -153,15 +254,15 @@ mod tests { use super::*; #[test] - fn test_workflow_validation() { - let validator = WorkflowValidator::new(); - let workflow = WorkflowDefinition::default(); - assert!(validator.validate(&workflow).is_ok()); + fn test_user_validation() { + let validator = UserValidator::new(); + let user = User::default(); + assert!(validator.validate(&user).is_ok()); } } // ❌ Don't create separate test files for unit tests -// tests/workflow_test.rs (this is for integration tests only) +// tests/user_test.rs (this is for integration tests only) ``` ### Test Naming @@ -171,12 +272,12 @@ mod tests { use super::*; #[test] - fn test_valid_workflow_passes_validation() { + fn test_valid_email_passes_validation() { // Test name clearly describes the scenario } #[test] - fn test_empty_workflow_id_returns_error() { + fn test_empty_email_returns_error() { // Specific about what's being tested } } @@ -186,26 +287,27 @@ mod tests { ### Code Documentation ```rust -/// Validates workflow definitions according to business rules. +/// Validates user data according to business rules. /// /// # Examples /// /// ```rust -/// let validator = WorkflowValidator::new(); -/// let workflow = WorkflowDefinition::builder() -/// .workflow_id("test-workflow") +/// let validator = UserValidator::new(); +/// let user = User::builder() +/// .email("user@example.com") +/// .display_name("John Doe") /// .build(); /// -/// assert!(validator.validate(&workflow).is_ok()); +/// assert!(validator.validate(&user).is_ok()); /// ``` /// /// # Errors /// /// Returns `ValidationError` if: -/// - Workflow ID is empty or invalid +/// - Email is empty or invalid format +/// - Display name is too long /// - Required fields are missing -/// - Business rules are violated -pub struct WorkflowValidator { +pub struct UserValidator { rules: Vec, } ``` @@ -220,7 +322,7 @@ pub struct WorkflowValidator { // src/helpers.rs - unclear responsibility // ❌ Don't use implementation suffixes -pub struct WorkflowValidatorImpl; +pub struct UserValidatorImpl; pub struct DatabaseManagerImpl; // ❌ Don't mix concerns in single files diff --git a/.cursor/rules/rust/features/axum.mdc b/.cursor/rules/rust/features/axum.mdc index e4ef820..22bb867 100644 --- a/.cursor/rules/rust/features/axum.mdc +++ b/.cursor/rules/rust/features/axum.mdc @@ -3,14 +3,476 @@ description: globs: alwaysApply: false --- -# 🌐 AXUM WEB FRAMEWORK BEST PRACTICES +# 🌐 AXUM WEB FRAMEWORK STANDARDS -> **TL;DR:** Modern async web development with Axum, focusing on structured APIs, proper error handling, and OpenAPI documentation. +> **TL;DR:** Comprehensive guidelines for building production-ready web applications with Axum, including AppConfig/AppState patterns, OpenAPI integration with utoipa, and scalable server architecture. -## 🎯 AXUM PROJECT STRUCTURE +## 🏗️ AXUM APPLICATION ARCHITECTURE + +```mermaid +graph TD + HTTP["HTTP Requests"] --> Router["Axum Router"] + Router --> Middleware["Middleware Stack"] + Middleware --> Handlers["Route Handlers"] + + Handlers --> State["Application State"] + State --> Config["AppConfig
(via arc-swap)"] + State --> Database["Database Pool"] + State --> Services["Service Layer"] + + Config --> Settings["Configuration Settings"] + Database --> Repositories["Repository Layer"] + Services --> BusinessLogic["Business Logic"] + + Handlers --> Response["HTTP Response"] + Response --> OpenAPI["OpenAPI Docs
(utoipa)"] + Response --> Swagger["Swagger UI"] + + style HTTP fill:#4da6ff,stroke:#0066cc,color:white + style Router fill:#4dbb5f,stroke:#36873f,color:white + style State fill:#ffa64d,stroke:#cc7a30,color:white + style Config fill:#d94dbb,stroke:#a3378a,color:white + style OpenAPI fill:#4dbbbb,stroke:#368787,color:white +``` + +## 🎯 AXUM VERSION AND SETUP + +### Axum 0.8+ Requirements +- **Use Axum 0.8 or later** - leverages latest async patterns +- **Path parameters with `{param}` syntax** - more intuitive than `:param` +- **Structured router organization** - group related endpoints +- **OpenAPI integration** with utoipa for documentation +- **AppState pattern** with arc-swap for hot-reloadable configuration + +## 📦 AXUM DEPENDENCIES + +```toml +# Cargo.toml - Required dependencies for Axum applications +[dependencies] +# Core Axum +axum = { version = "0.8", features = ["macros", "multipart"] } +tokio = { version = "1.45", features = ["macros", "rt-multi-thread", "net", "fs", "time", "sync", "signal"] } +tower = { version = "0.5", features = ["full"] } +tower-http = { version = "0.6", features = ["cors", "trace", "compression", "auth", "limit"] } +http = "1.0" + +# Configuration management with hot-reload +arc-swap = "1.0" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" + +# OpenAPI and documentation +utoipa = { version = "5.0", features = ["axum_extras", "chrono", "uuid"] } +utoipa-axum = "0.2" +utoipa-swagger-ui = { version = "9.0", features = ["axum"] } + +# Error handling and validation +anyhow = "1.0" +thiserror = "2.0" +validator = { version = "0.18", features = ["derive"] } + +# Utilities +uuid = { version = "1.17", features = ["v4", "v7", "serde"] } +chrono = { version = "0.4", features = ["serde"] } + +# Logging and tracing +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter", "json", "chrono"] } + +# Database (if needed) +sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono", "time", "json"] } + +# HTTP client +reqwest = { version = "0.12", features = ["rustls-tls", "json", "stream", "multipart"] } + +[dev-dependencies] +axum-test = "15.0" +tower-test = "0.4" +tempfile = "3.0" +wiremock = "0.6" +``` + +## 🔧 APPLICATION STATE PATTERN + +### AppConfig with arc-swap -### Router Organization ```rust +// src/config.rs +use arc_swap::ArcSwap; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use utoipa::ToSchema; + +/// Application configuration that can be hot-reloaded +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct AppConfig { + pub server: ServerConfig, + pub database: DatabaseConfig, + pub auth: AuthConfig, + pub features: FeatureFlags, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ServerConfig { + pub host: String, + pub port: u16, + pub cors_origins: Vec, + pub request_timeout_seconds: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct DatabaseConfig { + pub url: String, + pub max_connections: u32, + pub connection_timeout_seconds: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct AuthConfig { + pub jwt_secret: String, + pub token_expiry_hours: u64, + pub bcrypt_cost: u32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct FeatureFlags { + pub enable_registration: bool, + pub enable_swagger: bool, + pub enable_metrics: bool, +} + +impl AppConfig { + pub fn load() -> anyhow::Result { + // Load from environment variables, config files, etc. + let config = Self { + server: ServerConfig { + host: std::env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string()), + port: std::env::var("PORT") + .unwrap_or_else(|_| "8080".to_string()) + .parse()?, + cors_origins: std::env::var("CORS_ORIGINS") + .unwrap_or_else(|_| "*".to_string()) + .split(',') + .map(|s| s.trim().to_string()) + .collect(), + request_timeout_seconds: std::env::var("REQUEST_TIMEOUT") + .unwrap_or_else(|_| "30".to_string()) + .parse()?, + }, + database: DatabaseConfig { + url: std::env::var("DATABASE_URL") + .expect("DATABASE_URL must be set"), + max_connections: std::env::var("DB_MAX_CONNECTIONS") + .unwrap_or_else(|_| "20".to_string()) + .parse()?, + connection_timeout_seconds: std::env::var("DB_TIMEOUT") + .unwrap_or_else(|_| "30".to_string()) + .parse()?, + }, + auth: AuthConfig { + jwt_secret: std::env::var("JWT_SECRET") + .expect("JWT_SECRET must be set"), + token_expiry_hours: std::env::var("TOKEN_EXPIRY_HOURS") + .unwrap_or_else(|_| "24".to_string()) + .parse()?, + bcrypt_cost: std::env::var("BCRYPT_COST") + .unwrap_or_else(|_| "12".to_string()) + .parse()?, + }, + features: FeatureFlags { + enable_registration: std::env::var("ENABLE_REGISTRATION") + .map(|v| v.parse().unwrap_or(true)) + .unwrap_or(true), + enable_swagger: std::env::var("ENABLE_SWAGGER") + .map(|v| v.parse().unwrap_or(false)) + .unwrap_or(false), + enable_metrics: std::env::var("ENABLE_METRICS") + .map(|v| v.parse().unwrap_or(false)) + .unwrap_or(false), + }, + }; + + Ok(config) + } +} + +impl Default for AppConfig { + fn default() -> Self { + Self { + server: ServerConfig { + host: "0.0.0.0".to_string(), + port: 8080, + cors_origins: vec!["*".to_string()], + request_timeout_seconds: 30, + }, + database: DatabaseConfig { + url: "postgres://localhost/test".to_string(), + max_connections: 20, + connection_timeout_seconds: 30, + }, + auth: AuthConfig { + jwt_secret: "development-secret".to_string(), + token_expiry_hours: 24, + bcrypt_cost: 12, + }, + features: FeatureFlags { + enable_registration: true, + enable_swagger: true, + enable_metrics: false, + }, + } + } +} +``` + +### AppState Implementation + +```rust +// src/state.rs +use crate::config::AppConfig; +use arc_swap::ArcSwap; +use sqlx::PgPool; +use std::sync::Arc; +use anyhow::Result; + +/// Application state shared across all handlers +#[derive(Clone)] +pub struct AppState { + /// Configuration that can be hot-reloaded + pub config: Arc>, + /// Database connection pool + pub db: PgPool, + /// HTTP client for external APIs + pub http_client: reqwest::Client, +} + +impl AppState { + pub async fn new(config: AppConfig) -> Result { + // Initialize database pool + let db = PgPool::connect(&config.database.url).await?; + + // Run migrations + sqlx::migrate!("./migrations").run(&db).await?; + + // Initialize HTTP client with timeout + let http_client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .build()?; + + Ok(Self { + config: Arc::new(ArcSwap::from_pointee(config)), + db, + http_client, + }) + } + + /// Get current configuration + pub fn config(&self) -> Arc { + self.config.load_full() + } + + /// Update configuration (hot reload) + pub fn update_config(&self, new_config: AppConfig) { + self.config.store(Arc::new(new_config)); + } +} +``` + +## 🛣️ ROUTER AND ROUTES STRUCTURE + +### Main Router Setup + +```rust +// src/lib.rs +use axum::{ + routing::{get, post}, + Router, +}; +use tower_http::{ + cors::CorsLayer, + trace::TraceLayer, + compression::CompressionLayer, +}; +use utoipa::OpenApi; +use utoipa_swagger_ui::SwaggerUi; + +mod config; +mod state; +mod routes; +mod middleware; +mod errors; + +use state::AppState; + +#[derive(OpenApi)] +#[openapi( + paths( + routes::health::health_check, + routes::auth::login, + routes::auth::register, + routes::users::get_user, + routes::users::list_users, + routes::products::list_products, + routes::orders::create_order, + ), + components( + schemas( + config::AppConfig, + config::ServerConfig, + routes::auth::LoginRequest, + routes::auth::RegisterRequest, + routes::auth::TokenResponse, + routes::users::User, + routes::users::UserResponse, + routes::products::Product, + routes::orders::Order, + errors::ApiError, + ) + ), + tags( + (name = "health", description = "Health check endpoints"), + (name = "auth", description = "Authentication endpoints"), + (name = "users", description = "User management endpoints"), + (name = "products", description = "Product catalog endpoints"), + (name = "orders", description = "Order processing endpoints"), + ), + info( + title = "My API", + version = "1.0.0", + description = "A well-documented API built with Axum", + contact( + name = "API Support", + email = "support@example.com" + ) + ) +)] +struct ApiDoc; + +pub async fn create_app(state: AppState) -> Router { + let config = state.config(); + + let mut router = Router::new() + // Health endpoints + .route("/health", get(routes::health::health_check)) + .route("/ready", get(routes::health::readiness_check)) + + // API routes + .nest("/api/v1", api_routes()) + + // Add middleware + .layer(TraceLayer::new_for_http()) + .layer(CompressionLayer::new()) + .layer(cors_layer(&config)) + + // Add state + .with_state(state); + + // Conditionally add Swagger UI + if config.features.enable_swagger { + router = router.merge( + SwaggerUi::new("/swagger-ui") + .url("/api-docs/openapi.json", ApiDoc::openapi()) + ); + } + + router +} + +fn api_routes() -> Router { + Router::new() + .nest("/auth", routes::auth::routes()) + .nest("/users", routes::users::routes()) + .nest("/products", routes::products::routes()) + .nest("/orders", routes::orders::routes()) + .layer(middleware::auth::auth_middleware()) +} + +fn cors_layer(config: &AppConfig) -> CorsLayer { + let origins = config.server.cors_origins + .iter() + .filter_map(|origin| origin.parse().ok()) + .collect::>(); + + CorsLayer::new() + .allow_origin(origins) + .allow_methods([ + axum::http::Method::GET, + axum::http::Method::POST, + axum::http::Method::PUT, + axum::http::Method::DELETE, + ]) + .allow_headers([ + axum::http::header::AUTHORIZATION, + axum::http::header::CONTENT_TYPE, + ]) +} +``` + +### Structured Router Pattern +```rust +use axum::{Router, routing::get, routing::post, extract::State}; +use utoipa::OpenApi; +use std::sync::Arc; + +#[derive(OpenApi)] +#[openapi( + paths( + users::list_users, + users::get_user_by_id, + users::create_user, + users::update_user, + users::delete_user, + products::list_products, + products::get_product_by_id, + orders::create_order, + orders::get_order_status, + ), + components( + schemas(User, Product, Order, CreateUserRequest, UpdateUserRequest, ApiError) + ), + tags( + (name = "users", description = "User management endpoints"), + (name = "products", description = "Product catalog endpoints"), + (name = "orders", description = "Order processing endpoints") + ) +)] +pub struct ApiDoc; + +pub fn create_app(app_state: Arc) -> Router { + Router::new() + .merge(create_api_router()) + .merge(create_docs_router()) + .with_state(app_state) + .layer(create_middleware_stack()) +} + +fn create_api_router() -> Router> { + Router::new() + .nest("/api/v1/users", users::create_router()) + .nest("/api/v1/products", products::create_router()) + .nest("/api/v1/orders", orders::create_router()) + .nest("/api/v1/health", health::create_router()) +} + +fn create_docs_router() -> Router> { + use utoipa_swagger_ui::SwaggerUi; + + Router::new() + .merge(SwaggerUi::new("/docs").url("/api-docs/openapi.json", ApiDoc::openapi())) + .route("/api-docs/openapi.json", get(|| async { + Json(ApiDoc::openapi()) + })) +} +``` + +### Module-based Endpoint Organization +```rust +// users.rs - User management endpoints use axum::{ extract::{Path, Query, State}, http::StatusCode, @@ -18,216 +480,344 @@ use axum::{ routing::{get, post, put, delete}, Router, }; -use utoipa::{OpenApi, ToSchema}; -use utoipa_axum::{router::OpenApiRouter, routes}; - -// ✅ Good: Modular router structure -pub fn create_app(state: AppState) -> Router { - let (router, api) = OpenApiRouter::with_openapi(ApiDoc::openapi()) - .routes(routes!(health_check)) - .nest("/api/v1/workflows", workflows::router()) - .nest("/api/v1/nodes", nodes::router()) - .nest("/api/v1/executions", executions::router()) - .with_state(state) - .split_for_parts(); - - router.merge(utoipa_swagger_ui::SwaggerUi::new("/swagger-ui") - .url("/apidoc/openapi.json", api)) -} - -#[derive(OpenApi)] -#[openapi( - tags( - (name = "workflows", description = "Workflow management API"), - (name = "nodes", description = "Node management API"), - (name = "executions", description = "Execution management API") - ) -)] -struct ApiDoc; -``` - -### Application State Pattern -```rust -use sqlx::PgPool; -use std::sync::Arc; - -#[derive(Clone)] -pub struct AppState { - pub db: PgPool, - pub config: Arc, - pub node_registry: Arc, - pub event_bus: Arc, -} - -impl AppState { - pub async fn new(config: AppConfig) -> Result { - let db = create_connection_pool(&config.database.url).await?; - - // Run migrations - sqlx::migrate!("./migrations").run(&db).await?; - - let node_registry = Arc::new(NodeRegistry::new()); - let event_bus = Arc::new(EventBus::new(1000)); - - Ok(Self { - db, - config: Arc::new(config), - node_registry, - event_bus, - }) - } -} -``` - -## 🔧 REQUEST/RESPONSE PATTERNS - -### Path Parameters with Validation -```rust -use axum::extract::Path; -use serde::Deserialize; +use utoipa::{IntoParams, ToSchema}; use uuid::Uuid; -use utoipa::ToSchema; -// ✅ Good: Axum 0.8 path parameter syntax +pub fn create_router() -> Router> { + Router::new() + .route("/", get(list_users).post(create_user)) + .route("/{user_id}", get(get_user_by_id).put(update_user).delete(delete_user)) + .route("/{user_id}/orders", get(get_user_orders)) +} + +#[derive(serde::Deserialize, IntoParams)] +#[serde(rename_all = "camelCase")] +pub struct ListUsersParams { + #[param(minimum = 1, maximum = 100, default = 20)] + pub limit: Option, + #[param(minimum = 0, default = 0)] + pub offset: Option, + #[param(example = "john")] + pub search: Option, + #[param(example = true)] + pub active_only: Option, +} + #[utoipa::path( get, - path = "/api/v1/workflows/{workflow_id}", + path = "/api/v1/users", + params(ListUsersParams), + responses( + (status = 200, description = "List of users", body = [User]), + (status = 500, description = "Internal server error", body = ApiError) + ), + tag = "users" +)] +pub async fn list_users( + State(state): State>, + Query(params): Query, +) -> Result>, ApiError> { + let limit = params.limit.unwrap_or(20); + let offset = params.offset.unwrap_or(0); + + let mut query_builder = state.user_repository.query_builder(); + + if let Some(search) = params.search { + query_builder = query_builder.search(&search); + } + + if params.active_only.unwrap_or(false) { + query_builder = query_builder.active_only(); + } + + let users = query_builder + .limit(limit) + .offset(offset) + .execute() + .await + .map_err(ApiError::from)?; + + Ok(Json(users)) +} + +#[utoipa::path( + get, + path = "/api/v1/users/{user_id}", params( - ("workflow_id" = Uuid, Path, description = "Workflow unique identifier") + ("user_id" = Uuid, Path, description = "User ID") ), responses( - (status = 200, description = "Workflow found", body = WorkflowResponse), - (status = 404, description = "Workflow not found", body = ErrorResponse) + (status = 200, description = "User details", body = User), + (status = 404, description = "User not found", body = ApiError), + (status = 500, description = "Internal server error", body = ApiError) ), - tag = "workflows" + tag = "users" )] -pub async fn get_workflow( - State(state): State, - Path(workflow_id): Path, // Note: {workflow_id} in route, not :workflow_id -) -> Result, AppError> { - let workflow = state - .workflow_service - .get_by_id(workflow_id) - .await? - .ok_or(AppError::NotFound("Workflow not found".to_string()))?; +pub async fn get_user_by_id( + State(state): State>, + Path(user_id): Path, +) -> Result, ApiError> { + let user = state + .user_repository + .find_by_id(user_id) + .await + .map_err(ApiError::from)? + .ok_or(ApiError::NotFound("User not found".to_string()))?; - Ok(Json(WorkflowResponse::from(workflow))) -} - -// ❌ Wrong: Axum 0.7 syntax (outdated) -// Path("/api/v1/workflows/:workflow_id") // Don't use :workflow_id -``` - -### Query Parameters with Defaults -```rust -use serde::Deserialize; - -#[derive(Debug, Deserialize, ToSchema)] -#[serde(rename_all = "camelCase")] -pub struct ListQuery { - #[serde(default = "default_limit")] - pub limit: i64, - #[serde(default)] - pub offset: i64, - #[serde(default)] - pub sort_by: Option, - #[serde(default)] - pub sort_order: Option, -} - -fn default_limit() -> i64 { - 20 -} - -#[derive(Debug, Deserialize, ToSchema)] -#[serde(rename_all = "lowercase")] -pub enum SortOrder { - Asc, - Desc, -} - -#[utoipa::path( - get, - path = "/api/v1/workflows", - params(ListQuery), - responses( - (status = 200, description = "List of workflows", body = WorkflowListResponse) - ), - tag = "workflows" -)] -pub async fn list_workflows( - State(state): State, - Query(query): Query, -) -> Result, AppError> { - let workflows = state - .workflow_service - .list(query.limit, query.offset, query.sort_by, query.sort_order) - .await?; - - Ok(Json(WorkflowListResponse { workflows })) -} -``` - -### JSON Request/Response with Validation -```rust -use axum::Json; -use serde::{Deserialize, Serialize}; -use utoipa::ToSchema; -use validator::Validate; - -#[derive(Debug, Deserialize, Validate, ToSchema)] -#[serde(rename_all = "camelCase")] -pub struct CreateWorkflowRequest { - #[validate(length(min = 1, max = 100))] - pub name: String, - - #[validate(length(max = 500))] - pub description: Option, - - #[serde(default)] - pub workflow_data: serde_json::Value, - - #[serde(default)] - pub is_active: bool, -} - -#[derive(Debug, Serialize, ToSchema)] -#[serde(rename_all = "camelCase")] -pub struct WorkflowResponse { - pub id: Uuid, - pub name: String, - pub description: Option, - pub workflow_data: serde_json::Value, - pub is_active: bool, - pub created_at: DateTime, - pub updated_at: DateTime, + Ok(Json(user)) } #[utoipa::path( post, - path = "/api/v1/workflows", - request_body = CreateWorkflowRequest, + path = "/api/v1/users", + request_body = CreateUserRequest, responses( - (status = 201, description = "Workflow created", body = WorkflowResponse), - (status = 400, description = "Invalid request", body = ErrorResponse), - (status = 422, description = "Validation error", body = ValidationErrorResponse) + (status = 201, description = "User created successfully", body = User), + (status = 400, description = "Invalid request", body = ApiError), + (status = 409, description = "User already exists", body = ApiError), + (status = 500, description = "Internal server error", body = ApiError) ), - tag = "workflows" + tag = "users" )] -pub async fn create_workflow( - State(state): State, - Json(request): Json, -) -> Result<(StatusCode, Json), AppError> { +pub async fn create_user( + State(state): State>, + Json(request): Json, +) -> Result<(StatusCode, Json), ApiError> { // Validate request - request.validate() - .map_err(|e| AppError::Validation(e.to_string()))?; + state.validator.validate_create_user(&request).await?; - let workflow = state - .workflow_service + // Check if user already exists + if let Some(_) = state.user_repository.find_by_email(&request.email).await.map_err(ApiError::from)? { + return Err(ApiError::Conflict("User with this email already exists".to_string())); + } + + let user = state + .user_repository .create(request) - .await?; + .await + .map_err(ApiError::from)?; - Ok((StatusCode::CREATED, Json(WorkflowResponse::from(workflow)))) + Ok((StatusCode::CREATED, Json(user))) +} +``` + +## 🔍 PATH AND QUERY PARAMETERS + +### Path Parameter Patterns +```rust +// ✅ Good: Use {param} syntax (Axum 0.8+) +#[utoipa::path( + get, + path = "/api/v1/products/{product_id}/reviews/{review_id}", + params( + ("product_id" = Uuid, Path, description = "Product ID"), + ("review_id" = Uuid, Path, description = "Review ID") + ), + responses( + (status = 200, description = "Review details", body = ProductReview) + ), + tag = "products" +)] +pub async fn get_product_review( + State(state): State>, + Path((product_id, review_id)): Path<(Uuid, Uuid)>, +) -> Result, ApiError> { + let review = state + .review_repository + .find_by_product_and_id(product_id, review_id) + .await + .map_err(ApiError::from)? + .ok_or(ApiError::NotFound("Review not found".to_string()))?; + + Ok(Json(review)) +} + +// ❌ Avoid: Old :param syntax +// .route("/products/:product_id/reviews/:review_id", get(get_product_review)) +``` + +### Query Parameter Validation +```rust +#[derive(serde::Deserialize, IntoParams)] +#[serde(rename_all = "camelCase")] +pub struct ProductFilters { + #[param(minimum = 0.01, example = 10.99)] + pub min_price: Option, + #[param(minimum = 0.01, example = 99.99)] + pub max_price: Option, + #[param(example = "electronics")] + pub category: Option, + #[param(example = true)] + pub in_stock: Option, + #[param(inline, style = "form", explode = true)] + pub tags: Option>, +} + +#[utoipa::path( + get, + path = "/api/v1/products", + params(ProductFilters), + responses( + (status = 200, description = "Filtered products", body = [Product]) + ), + tag = "products" +)] +pub async fn list_products_with_filters( + State(state): State>, + Query(filters): Query, +) -> Result>, ApiError> { + let products = state + .product_repository + .find_with_filters(&filters) + .await + .map_err(ApiError::from)?; + + Ok(Json(products)) +} +``` + +## 📝 REQUEST/RESPONSE HANDLING + +### JSON Request Validation +```rust +use validator::{Validate, ValidationError}; + +#[derive(serde::Deserialize, Validate, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct CreateOrderRequest { + #[validate(length(min = 1, message = "Customer ID is required"))] + pub customer_id: String, + + #[validate(length(min = 1, message = "At least one item is required"))] + pub items: Vec, + + #[validate(range(min = 0.01, message = "Total amount must be positive"))] + pub total_amount: f64, + + #[validate(email(message = "Invalid email format"))] + pub contact_email: String, + + pub shipping_address: ShippingAddress, + + #[serde(default)] + pub notes: Option, +} + +#[derive(serde::Deserialize, Validate, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct OrderItem { + #[validate(length(min = 1, message = "Product ID is required"))] + pub product_id: String, + + #[validate(range(min = 1, message = "Quantity must be at least 1"))] + pub quantity: i32, + + #[validate(range(min = 0.01, message = "Price must be positive"))] + pub unit_price: f64, +} + +#[utoipa::path( + post, + path = "/api/v1/orders", + request_body = CreateOrderRequest, + responses( + (status = 201, description = "Order created successfully", body = Order), + (status = 400, description = "Validation error", body = ApiError), + (status = 500, description = "Internal server error", body = ApiError) + ), + tag = "orders" +)] +pub async fn create_order( + State(state): State>, + Json(request): Json, +) -> Result<(StatusCode, Json), ApiError> { + // Validate the request + request.validate().map_err(ApiError::ValidationError)?; + + // Additional business validation + state.order_validator.validate_order_request(&request).await?; + + // Create the order + let order = state + .order_repository + .create(request) + .await + .map_err(ApiError::from)?; + + // Send confirmation email (async) + tokio::spawn({ + let email_service = state.email_service.clone(); + let order_clone = order.clone(); + async move { + if let Err(e) = email_service.send_order_confirmation(&order_clone).await { + tracing::error!("Failed to send order confirmation: {}", e); + } + } + }); + + Ok((StatusCode::CREATED, Json(order))) +} +``` + +## 🔒 MIDDLEWARE IMPLEMENTATION + +```rust +// src/middleware/auth.rs +use axum::{ + extract::{Request, State}, + http::{header::AUTHORIZATION, StatusCode}, + middleware::Next, + response::Response, +}; +use crate::{state::AppState, errors::ApiError}; + +pub fn auth_middleware() -> axum::middleware::FromFnLayer< + fn(State, Request, Next) -> impl std::future::Future>, + AppState, +> { + axum::middleware::from_fn_with_state(auth_handler) +} + +async fn auth_handler( + State(state): State, + mut request: Request, + next: Next, +) -> Result { + // Skip auth for health endpoints + if request.uri().path().starts_with("/health") || + request.uri().path().starts_with("/ready") || + request.uri().path().starts_with("/swagger-ui") { + return Ok(next.run(request).await); + } + + // Extract authorization header + let auth_header = request + .headers() + .get(AUTHORIZATION) + .and_then(|header| header.to_str().ok()) + .ok_or_else(|| ApiError::unauthorized("Missing authorization header"))?; + + // Validate bearer token format + let token = auth_header + .strip_prefix("Bearer ") + .ok_or_else(|| ApiError::unauthorized("Invalid authorization format"))?; + + // Validate JWT token + let user_id = validate_jwt_token(token, &state)?; + + // Add user ID to request extensions + request.extensions_mut().insert(user_id); + + Ok(next.run(request).await) +} + +fn validate_jwt_token(token: &str, state: &AppState) -> Result { + // TODO: Implement actual JWT validation + if token.starts_with("token_for_") { + Ok("user123".to_string()) + } else { + Err(ApiError::unauthorized("Invalid token")) + } } ``` @@ -235,372 +825,599 @@ pub async fn create_workflow( ### Centralized Error Types ```rust +// src/errors.rs use axum::{ http::StatusCode, response::{IntoResponse, Response}, Json, }; -use serde::Serialize; -use thiserror::Error; +use serde::{Deserialize, Serialize}; use utoipa::ToSchema; -#[derive(Error, Debug)] -pub enum AppError { +#[derive(Debug, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ApiError { + pub code: String, + pub message: String, + pub details: Option, +} + +impl ApiError { + pub fn bad_request(message: impl Into) -> Self { + Self { + code: "BAD_REQUEST".to_string(), + message: message.into(), + details: None, + } + } + + pub fn unauthorized(message: impl Into) -> Self { + Self { + code: "UNAUTHORIZED".to_string(), + message: message.into(), + details: None, + } + } + + pub fn forbidden(message: impl Into) -> Self { + Self { + code: "FORBIDDEN".to_string(), + message: message.into(), + details: None, + } + } + + pub fn not_found(message: impl Into) -> Self { + Self { + code: "NOT_FOUND".to_string(), + message: message.into(), + details: None, + } + } + + pub fn internal_error(message: impl Into) -> Self { + Self { + code: "INTERNAL_ERROR".to_string(), + message: message.into(), + details: None, + } + } + + pub fn validation_error(message: impl Into) -> Self { + Self { + code: "VALIDATION_ERROR".to_string(), + message: message.into(), + details: None, + } + } +} + +impl IntoResponse for ApiError { + fn into_response(self) -> Response { + let status = match self.code.as_str() { + "BAD_REQUEST" | "VALIDATION_ERROR" => StatusCode::BAD_REQUEST, + "UNAUTHORIZED" => StatusCode::UNAUTHORIZED, + "FORBIDDEN" => StatusCode::FORBIDDEN, + "NOT_FOUND" => StatusCode::NOT_FOUND, + _ => StatusCode::INTERNAL_SERVER_ERROR, + }; + + (status, Json(self)).into_response() + } +} + +pub type Result = std::result::Result; + +// Legacy error handling support +#[derive(thiserror::Error, Debug)] +pub enum LegacyApiError { + #[error("Validation error: {0}")] + ValidationError(#[from] validator::ValidationErrors), + #[error("Database error: {0}")] Database(#[from] sqlx::Error), - #[error("Validation error: {0}")] - Validation(String), - #[error("Not found: {0}")] NotFound(String), - #[error("Unauthorized")] - Unauthorized, + #[error("Conflict: {0}")] + Conflict(String), - #[error("Forbidden")] - Forbidden, + #[error("Unauthorized: {0}")] + Unauthorized(String), - #[error("Internal server error: {0}")] - Internal(String), + #[error("Forbidden: {0}")] + Forbidden(String), #[error("Bad request: {0}")] BadRequest(String), + + #[error("Internal server error: {0}")] + Internal(#[from] anyhow::Error), } -#[derive(Serialize, ToSchema)] -#[serde(rename_all = "camelCase")] -pub struct ErrorResponse { - pub error: String, - pub message: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub details: Option, +impl From for ApiError { + fn from(error: LegacyApiError) -> Self { + match error { + LegacyApiError::ValidationError(validation_errors) => { + let details = serde_json::to_value(validation_errors).ok(); + Self { + code: "VALIDATION_ERROR".to_string(), + message: error.to_string(), + details, + } + } + LegacyApiError::Database(db_error) => { + tracing::error!("Database error: {}", db_error); + Self::internal_error("Database error occurred") + } + LegacyApiError::NotFound(msg) => Self::not_found(msg), + LegacyApiError::Conflict(msg) => Self { + code: "CONFLICT".to_string(), + message: msg, + details: None, + }, + LegacyApiError::Unauthorized(msg) => Self::unauthorized(msg), + LegacyApiError::Forbidden(msg) => Self::forbidden(msg), + LegacyApiError::BadRequest(msg) => Self::bad_request(msg), + LegacyApiError::Internal(internal_error) => { + tracing::error!("Internal error: {}", internal_error); + Self::internal_error("Internal server error") + } + } + } } -impl IntoResponse for AppError { +impl IntoResponse for LegacyApiError { fn into_response(self) -> Response { - let (status, error_message) = match &self { - AppError::Database(_) => (StatusCode::INTERNAL_SERVER_ERROR, "Database error"), - AppError::Validation(_) => (StatusCode::UNPROCESSABLE_ENTITY, "Validation error"), - AppError::NotFound(_) => (StatusCode::NOT_FOUND, "Not found"), - AppError::Unauthorized => (StatusCode::UNAUTHORIZED, "Unauthorized"), - AppError::Forbidden => (StatusCode::FORBIDDEN, "Forbidden"), - AppError::Internal(_) => (StatusCode::INTERNAL_SERVER_ERROR, "Internal server error"), - AppError::BadRequest(_) => (StatusCode::BAD_REQUEST, "Bad request"), - }; + let api_error: ApiError = self.into(); + api_error.into_response() + } +} + error: error_type.to_string(), + message, + details, + }); - let error_response = ErrorResponse { - error: error_message.to_string(), - message: self.to_string(), - details: None, - }; + (status, body).into_response() + } +} +``` - (status, Json(error_response)).into_response() +## 🔑 AUTHENTICATION ROUTE HANDLERS + +### Route Handlers with OpenAPI + +```rust +// src/routes/auth.rs +use axum::{ + extract::State, + http::StatusCode, + response::Json, + routing::{post, Router}, +}; +use serde::{Deserialize, Serialize}; +use utoipa::{ToSchema, IntoParams}; +use validator::Validate; + +use crate::{ + state::AppState, + errors::{ApiError, Result}, +}; + +pub fn routes() -> Router { + Router::new() + .route("/login", post(login)) + .route("/register", post(register)) +} + +#[derive(Debug, Serialize, Deserialize, ToSchema, Validate)] +#[serde(rename_all = "camelCase")] +pub struct LoginRequest { + #[validate(email(message = "Invalid email format"))] + pub email: String, + + #[validate(length(min = 8, message = "Password must be at least 8 characters"))] + pub password: String, +} + +#[derive(Debug, Serialize, Deserialize, ToSchema, Validate)] +#[serde(rename_all = "camelCase")] +pub struct RegisterRequest { + #[validate(email(message = "Invalid email format"))] + pub email: String, + + #[validate(length(min = 8, message = "Password must be at least 8 characters"))] + pub password: String, + + #[validate(length(min = 2, message = "Name must be at least 2 characters"))] + pub name: String, +} + +#[derive(Debug, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct TokenResponse { + pub access_token: String, + pub token_type: String, + pub expires_in: u64, +} + +/// User login endpoint +#[utoipa::path( + post, + path = "/api/v1/auth/login", + request_body = LoginRequest, + responses( + (status = 200, description = "Login successful", body = TokenResponse), + (status = 400, description = "Invalid request", body = ApiError), + (status = 401, description = "Invalid credentials", body = ApiError), + ), + tag = "auth" +)] +pub async fn login( + State(state): State, + Json(request): Json, +) -> Result> { + // Validate request + request.validate() + .map_err(|e| ApiError::validation_error(e.to_string()))?; + + // TODO: Implement actual authentication logic + let token = generate_jwt_token(&request.email, &state)?; + + Ok(Json(TokenResponse { + access_token: token, + token_type: "Bearer".to_string(), + expires_in: state.config().auth.token_expiry_hours * 3600, + })) +} + +/// User registration endpoint +#[utoipa::path( + post, + path = "/api/v1/auth/register", + request_body = RegisterRequest, + responses( + (status = 201, description = "Registration successful", body = TokenResponse), + (status = 400, description = "Invalid request", body = ApiError), + (status = 409, description = "Email already exists", body = ApiError), + ), + tag = "auth" +)] +pub async fn register( + State(state): State, + Json(request): Json, +) -> Result<(StatusCode, Json)> { + // Check if registration is enabled + if !state.config().features.enable_registration { + return Err(ApiError::forbidden("Registration is disabled")); + } + + // Validate request + request.validate() + .map_err(|e| ApiError::validation_error(e.to_string()))?; + + // TODO: Implement actual registration logic + let token = generate_jwt_token(&request.email, &state)?; + + Ok(( + StatusCode::CREATED, + Json(TokenResponse { + access_token: token, + token_type: "Bearer".to_string(), + expires_in: state.config().auth.token_expiry_hours * 3600, + }), + )) +} + +fn generate_jwt_token(email: &str, state: &AppState) -> Result { + // TODO: Implement JWT token generation + Ok(format!("token_for_{}", email)) +} + +#[cfg(test)] +mod tests { + use super::*; + use axum::http::StatusCode; + use axum_test::TestServer; + use crate::config::AppConfig; + + #[tokio::test] + async fn test_login_validation() { + let state = AppState::new(AppConfig::default()).await.unwrap(); + let app = crate::create_app(state).await; + let server = TestServer::new(app).unwrap(); + + let response = server + .post("/api/v1/auth/login") + .json(&serde_json::json!({ + "email": "invalid-email", + "password": "short" + })) + .await; + + assert_eq!(response.status_code(), StatusCode::BAD_REQUEST); + } + + #[tokio::test] + async fn test_valid_login() { + let state = AppState::new(AppConfig::default()).await.unwrap(); + let app = crate::create_app(state).await; + let server = TestServer::new(app).unwrap(); + + let response = server + .post("/api/v1/auth/login") + .json(&LoginRequest { + email: "test@example.com".to_string(), + password: "validpassword".to_string(), + }) + .await; + + assert_eq!(response.status_code(), StatusCode::OK); + + let token_response: TokenResponse = response.json(); + assert!(!token_response.access_token.is_empty()); + assert_eq!(token_response.token_type, "Bearer"); } } ``` -### Request Validation Middleware -```rust -use axum::{ - extract::Request, - middleware::Next, - response::Response, -}; -use tower::ServiceBuilder; +## 🧪 INTEGRATION TESTING -pub fn create_middleware_stack() -> ServiceBuilder< - tower::ServiceBuilder< - tower::ServiceBuilder< - tower::ServiceBuilder - > - > -> { - ServiceBuilder::new() - .layer(tower_http::cors::CorsLayer::permissive()) - .layer(tower_http::trace::TraceLayer::new_for_http()) - .layer(axum::middleware::from_fn(request_id_middleware)) - .layer(tower_http::timeout::TimeoutLayer::new( - std::time::Duration::from_secs(30) - )) -} - -pub async fn request_id_middleware( - mut request: Request, - next: Next, -) -> Result { - let request_id = uuid::Uuid::new_v4().to_string(); - request.headers_mut().insert( - "X-Request-ID", - request_id.parse().unwrap(), - ); - - Ok(next.run(request).await) -} -``` - -## 📚 OPENAPI DOCUMENTATION - -### Complete API Documentation -```rust -use utoipa::{OpenApi, ToSchema}; -use utoipa_axum::router::OpenApiRouter; - -#[derive(OpenApi)] -#[openapi( - info( - title = "Workflow Engine API", - version = "1.0.0", - description = "A comprehensive workflow management system", - contact( - name = "API Support", - email = "support@example.com" - ) - ), - paths( - // Workflow endpoints - workflows::get_workflow, - workflows::list_workflows, - workflows::create_workflow, - workflows::update_workflow, - workflows::delete_workflow, - workflows::execute_workflow, - - // Node endpoints - nodes::list_node_types, - nodes::get_node_schema, - - // Execution endpoints - executions::get_execution, - executions::list_executions, - ), - components( - schemas( - WorkflowResponse, - CreateWorkflowRequest, - UpdateWorkflowRequest, - WorkflowListResponse, - NodeTypeInfo, - ExecutionResponse, - ErrorResponse, - ) - ), - tags( - (name = "workflows", description = "Workflow management operations"), - (name = "nodes", description = "Node type information and schemas"), - (name = "executions", description = "Workflow execution tracking") - ) -)] -pub struct ApiDoc; - -// ✅ Good: Detailed endpoint documentation -#[utoipa::path( - put, - path = "/api/v1/workflows/{workflow_id}", - params( - ("workflow_id" = Uuid, Path, description = "Workflow unique identifier") - ), - request_body( - content = UpdateWorkflowRequest, - description = "Workflow update data", - content_type = "application/json" - ), - responses( - (status = 200, description = "Workflow updated successfully", body = WorkflowResponse), - (status = 400, description = "Invalid request data", body = ErrorResponse), - (status = 404, description = "Workflow not found", body = ErrorResponse), - (status = 422, description = "Validation failed", body = ErrorResponse) - ), - tag = "workflows", - summary = "Update workflow", - description = "Updates an existing workflow with new data. Only provided fields will be updated." -)] -pub async fn update_workflow( - State(state): State, - Path(workflow_id): Path, - Json(request): Json, -) -> Result, AppError> { - // Implementation - todo!() -} -``` - -## 🧪 TESTING AXUM HANDLERS - -### Integration Testing +### Axum Integration Tests ```rust #[cfg(test)] mod tests { use super::*; - use axum::{ - body::Body, - http::{Request, StatusCode}, - }; + use axum_test::TestServer; use serde_json::json; - use tower::ServiceExt; - async fn setup_test_app() -> Router { - let state = AppState::new_test().await; - create_app(state) + async fn create_test_app() -> TestServer { + let app_state = Arc::new(create_test_app_state().await); + let app = create_app(app_state); + TestServer::new(app).unwrap() } #[tokio::test] - async fn test_create_workflow_success() { - let app = setup_test_app().await; + async fn test_create_user_success() { + let server = create_test_app().await; let request_body = json!({ - "name": "Test Workflow", - "description": "A test workflow", - "workflowData": {}, - "isActive": true + "username": "testuser", + "email": "test@example.com", + "fullName": "Test User" }); - let request = Request::builder() - .method("POST") - .uri("/api/v1/workflows") - .header("Content-Type", "application/json") - .body(Body::from(request_body.to_string())) - .unwrap(); + let response = server + .post("/api/v1/users") + .json(&request_body) + .await; - let response = app.oneshot(request).await.unwrap(); - assert_eq!(response.status(), StatusCode::CREATED); + response.assert_status_created(); - let body = hyper::body::to_bytes(response.into_body()).await.unwrap(); - let workflow: WorkflowResponse = serde_json::from_slice(&body).unwrap(); - assert_eq!(workflow.name, "Test Workflow"); + let user: User = response.json(); + assert_eq!(user.username, "testuser"); + assert_eq!(user.email, "test@example.com"); } #[tokio::test] - async fn test_get_workflow_not_found() { - let app = setup_test_app().await; - let non_existent_id = Uuid::new_v4(); + async fn test_create_user_validation_error() { + let server = create_test_app().await; - let request = Request::builder() - .method("GET") - .uri(&format!("/api/v1/workflows/{}", non_existent_id)) - .body(Body::empty()) - .unwrap(); + let request_body = json!({ + "username": "", // Invalid: empty username + "email": "invalid-email", // Invalid: bad email format + }); - let response = app.oneshot(request).await.unwrap(); - assert_eq!(response.status(), StatusCode::NOT_FOUND); + let response = server + .post("/api/v1/users") + .json(&request_body) + .await; + + response.assert_status_bad_request(); + + let error: ErrorResponse = response.json(); + assert_eq!(error.error, "VALIDATION_ERROR"); } #[tokio::test] - async fn test_validation_error() { - let app = setup_test_app().await; + async fn test_get_user_by_id() { + let server = create_test_app().await; - let invalid_request = json!({ - "name": "", // Empty name should fail validation - "description": "Valid description" + // Create a user first + let create_response = server + .post("/api/v1/users") + .json(&json!({ + "username": "getuser", + "email": "get@example.com", + "fullName": "Get User" + })) + .await; + + let created_user: User = create_response.json(); + + // Get the user by ID + let get_response = server + .get(&format!("/api/v1/users/{}", created_user.id)) + .await; + + get_response.assert_status_ok(); + + let retrieved_user: User = get_response.json(); + assert_eq!(retrieved_user.id, created_user.id); + assert_eq!(retrieved_user.username, "getuser"); + } + + #[tokio::test] + async fn test_list_products_with_filters() { + let server = create_test_app().await; + + let response = server + .get("/api/v1/products") + .add_query_param("minPrice", "10.00") + .add_query_param("maxPrice", "50.00") + .add_query_param("category", "electronics") + .add_query_param("inStock", "true") + .await; + + response.assert_status_ok(); + + let products: Vec = response.json(); + for product in products { + assert!(product.price >= 10.0 && product.price <= 50.0); + assert_eq!(product.category, "electronics"); + assert!(product.in_stock); + } + } + + #[tokio::test] + async fn test_create_order_flow() { + let server = create_test_app().await; + + let request_body = json!({ + "customerId": "customer-123", + "items": [ + { + "productId": "product-1", + "quantity": 2, + "unitPrice": 29.99 + }, + { + "productId": "product-2", + "quantity": 1, + "unitPrice": 15.50 + } + ], + "totalAmount": 75.48, + "contactEmail": "customer@example.com", + "shippingAddress": { + "street": "123 Main St", + "city": "Anytown", + "state": "CA", + "zipCode": "12345", + "country": "US" + }, + "notes": "Please handle with care" }); - let request = Request::builder() - .method("POST") - .uri("/api/v1/workflows") - .header("Content-Type", "application/json") - .body(Body::from(invalid_request.to_string())) - .unwrap(); + let response = server + .post("/api/v1/orders") + .json(&request_body) + .await; - let response = app.oneshot(request).await.unwrap(); - assert_eq!(response.status(), StatusCode::UNPROCESSABLE_ENTITY); + response.assert_status_created(); + + let order: Order = response.json(); + assert_eq!(order.customer_id, "customer-123"); + assert_eq!(order.items.len(), 2); + assert_eq!(order.total_amount, 75.48); + assert_eq!(order.status, OrderStatus::Pending); } } ``` -## 🔧 SERVER CONFIGURATION - -### Production Server Setup -```rust -use axum::serve; -use std::net::SocketAddr; -use tokio::net::TcpListener; -use tower_http::trace::TraceLayer; -use tracing::{info, error}; - -pub async fn start_server(config: AppConfig) -> Result<(), Box> { - let state = AppState::new(config.clone()).await?; - let app = create_app(state) - .layer(create_middleware_stack()); - - let addr = SocketAddr::from(([0, 0, 0, 0], config.server.port)); - let listener = TcpListener::bind(addr).await?; - - info!("Server starting on {}", addr); - - serve(listener, app) - .await - .map_err(|e| { - error!("Server error: {}", e); - e.into() - }) -} - -// Graceful shutdown handling -pub async fn start_server_with_shutdown( - config: AppConfig, - shutdown_signal: impl std::future::Future + Send + 'static, -) -> Result<(), Box> { - let state = AppState::new(config.clone()).await?; - let app = create_app(state) - .layer(create_middleware_stack()); - - let addr = SocketAddr::from(([0, 0, 0, 0], config.server.port)); - let listener = TcpListener::bind(addr).await?; - - info!("Server starting on {}", addr); - - serve(listener, app) - .with_graceful_shutdown(shutdown_signal) - .await - .map_err(|e| { - error!("Server error: {}", e); - e.into() - }) -} -``` - ## 🚨 AXUM ANTI-PATTERNS ### What to Avoid ```rust -// ❌ Don't use old Axum 0.7 path syntax -// #[utoipa::path(get, path = "/workflows/:id")] // Use {id} instead +// ❌ Don't use old path parameter syntax +// .route("/users/:id", get(get_user)) // Use {id} instead -// ❌ Don't forget Content-Type header for JSON endpoints -// Missing: .header("Content-Type", "application/json") - -// ❌ Don't ignore validation -// pub async fn create_user(Json(request): Json) { -// // Missing: request.validate()? +// ❌ Don't forget error handling +// pub async fn get_user(Path(id): Path) -> Json { +// let user = repository.find(id).await.unwrap(); // No error handling +// Json(user) // } -// ❌ Don't use std::sync in Axum handlers -// pub async fn bad_handler(State(state): State>>) { -// let data = state.lock().unwrap(); // Blocks async runtime +// ❌ Don't skip request validation +// pub async fn create_user(Json(request): Json) -> Json { +// // No validation of the request // } -// ❌ Don't return bare strings for JSON APIs -// pub async fn bad_endpoint() -> String { -// "success".to_string() // Should return Json +// ❌ Don't use blocking operations in handlers +// pub async fn bad_handler() -> String { +// std::thread::sleep(Duration::from_secs(5)); // Blocks entire runtime +// "response".to_string() +// } + +// ❌ Don't forget OpenAPI documentation +// pub async fn undocumented_endpoint() -> String { +// // Missing #[utoipa::path] annotation +// "response".to_string() // } ``` -## ✅ AXUM CHECKLIST +## 📝 AXUM BEST PRACTICES CHECKLIST ```markdown -### Axum Implementation Verification -- [ ] Uses Axum 0.8 path syntax with {param} not :param -- [ ] All JSON structs use #[serde(rename_all = "camelCase")] -- [ ] Comprehensive OpenAPI documentation with utoipa -- [ ] Proper error handling with IntoResponse trait -- [ ] Request validation with validator crate -- [ ] Structured application state with Clone trait -- [ ] Integration tests for all endpoints -- [ ] Middleware stack includes CORS, tracing, timeouts -- [ ] Graceful shutdown implementation +## Axum Implementation Verification + +### Configuration Management +- [ ] AppConfig struct with proper serialization +- [ ] arc-swap used for hot-reloadable config +- [ ] Environment variable loading implemented +- [ ] Default configuration provided + +### Application State +- [ ] AppState contains all shared resources +- [ ] Database pool properly initialized +- [ ] HTTP client configured with timeouts +- [ ] State passed to all route handlers + +### OpenAPI Integration +- [ ] utoipa derives added to all request/response types +- [ ] API paths documented with #[utoipa::path] +- [ ] OpenAPI struct defined with proper metadata +- [ ] Swagger UI conditionally enabled + +### Route Organization +- [ ] Routes organized by feature modules +- [ ] Proper HTTP methods used (GET, POST, PUT, DELETE) +- [ ] Uses Axum 0.8+ with {param} path syntax +- [ ] Request/response types properly defined +- [ ] Path and query parameter validation + +### Middleware +- [ ] Authentication middleware implemented +- [ ] CORS layer configured appropriately +- [ ] Tracing layer for request logging +- [ ] Compression middleware for responses +- [ ] Rate limiting configured if needed + +### Error Handling +- [ ] Structured error types with proper HTTP status codes +- [ ] IntoResponse trait implemented for errors +- [ ] Consistent error format across API +- [ ] Proper error logging implemented +- [ ] Validation errors properly handled + +### Request/Response Handling +- [ ] All data structures use #[serde(rename_all = "camelCase")] +- [ ] Request validation using validator crate +- [ ] Proper content-type handling +- [ ] Response status codes follow HTTP standards +- [ ] JSON serialization configured correctly + +### Testing +- [ ] Unit tests for handlers +- [ ] Integration tests with TestServer +- [ ] Request/response validation tests +- [ ] Authentication middleware tests +- [ ] Error handling tests + +### Security +- [ ] JWT token validation implemented +- [ ] Bearer token format enforced +- [ ] Authorization headers required +- [ ] CORS origins properly configured +- [ ] Input validation on all endpoints + +### Performance - [ ] No blocking operations in async handlers -- [ ] Content-Type headers properly set -- [ ] HTTP status codes correctly used +- [ ] Database connection pooling +- [ ] HTTP client connection reuse +- [ ] Response compression enabled +- [ ] Appropriate timeout configurations ``` -This Axum standard ensures robust, well-documented, and maintainable web APIs following modern Rust async patterns. +This comprehensive Axum standard ensures production-ready, secure, and maintainable web APIs with modern Rust practices. diff --git a/.cursor/rules/rust/features/concurrency.mdc b/.cursor/rules/rust/features/concurrency.mdc index 56fb3ac..08efc36 100644 --- a/.cursor/rules/rust/features/concurrency.mdc +++ b/.cursor/rules/rust/features/concurrency.mdc @@ -18,7 +18,15 @@ alwaysApply: false ```toml # Cargo.toml - Tokio configuration [dependencies] -tokio = { workspace = true, features = ["rt-multi-thread", "macros", "sync", "time", "fs"] } +tokio = { version = "1.45", features = [ + "macros", + "rt-multi-thread", + "signal", + "sync" +] } +dashmap = { version = "6", features = ["serde"] } +async-trait = "0.1" +futures = "0.3" ``` ## 🔒 SYNCHRONIZATION PRIMITIVES @@ -30,28 +38,28 @@ use tokio::sync::{RwLock, Mutex, broadcast, mpsc, oneshot}; use std::sync::Arc; // ✅ Good: Async-friendly RwLock -pub struct WorkflowCache { - data: Arc>>, +pub struct UserCache { + data: Arc>>, } -impl WorkflowCache { +impl UserCache { pub fn new() -> Self { Self { data: Arc::new(RwLock::new(HashMap::new())), } } - pub async fn get(&self, id: &str) -> Option { + pub async fn get(&self, id: &str) -> Option { let data = self.data.read().await; data.get(id).cloned() } - pub async fn insert(&self, id: String, workflow: WorkflowDefinition) { + pub async fn insert(&self, id: String, user: User) { let mut data = self.data.write().await; - data.insert(id, workflow); + data.insert(id, user); } - pub async fn remove(&self, id: &str) -> Option { + pub async fn remove(&self, id: &str) -> Option { let mut data = self.data.write().await; data.remove(id) } @@ -68,24 +76,24 @@ use dashmap::DashMap; use std::sync::Arc; // ✅ Preferred: DashMap for concurrent hash maps -pub struct NodeRegistry { - nodes: Arc>>, +pub struct ServiceRegistry { + services: Arc>>, categories: Arc>>, } -impl NodeRegistry { +impl ServiceRegistry { pub fn new() -> Self { Self { - nodes: Arc::new(DashMap::new()), + services: Arc::new(DashMap::new()), categories: Arc::new(DashMap::new()), } } - pub fn register_node(&self, id: String, node: Box) { - let category = node.category().to_string(); + pub fn register_service(&self, id: String, service: Box) { + let category = service.category().to_string(); - // Insert the node - self.nodes.insert(id.clone(), node); + // Insert the service + self.services.insert(id.clone(), service); // Update category index self.categories @@ -94,8 +102,8 @@ impl NodeRegistry { .push(id); } - pub fn get_node(&self, id: &str) -> Option>> { - self.nodes.get(id) + pub fn get_service(&self, id: &str) -> Option>> { + self.services.get(id) } pub fn list_by_category(&self, category: &str) -> Vec { @@ -105,14 +113,14 @@ impl NodeRegistry { .unwrap_or_default() } - pub fn list_all_nodes(&self) -> Vec { - self.nodes.iter().map(|entry| entry.key().clone()).collect() + pub fn list_all_services(&self) -> Vec { + self.services.iter().map(|entry| entry.key().clone()).collect() } } // ❌ Avoid: Mutex for concurrent access -// pub struct BadNodeRegistry { -// nodes: Arc>>> +// pub struct BadServiceRegistry { +// services: Arc>>> // } ``` @@ -124,7 +132,7 @@ use tokio::sync::mpsc; use tracing::{info, error}; pub struct EventProcessor { - sender: mpsc::UnboundedSender, + sender: mpsc::UnboundedSender, } impl EventProcessor { @@ -137,17 +145,17 @@ impl EventProcessor { (processor, handle) } - pub fn send_event(&self, event: WorkflowEvent) -> Result<(), mpsc::error::SendError> { + pub fn send_event(&self, event: SystemEvent) -> Result<(), mpsc::error::SendError> { self.sender.send(event) } } pub struct EventProcessorHandle { - receiver: mpsc::UnboundedReceiver, + receiver: mpsc::UnboundedReceiver, } impl EventProcessorHandle { - fn new(receiver: mpsc::UnboundedReceiver) -> Self { + fn new(receiver: mpsc::UnboundedReceiver) -> Self { Self { receiver } } @@ -160,19 +168,19 @@ impl EventProcessorHandle { info!("Event processor stopped"); } - async fn process_event(&self, event: WorkflowEvent) -> Result<(), ProcessingError> { + async fn process_event(&self, event: SystemEvent) -> Result<(), ProcessingError> { match event { - WorkflowEvent::Started { workflow_id, .. } => { - info!("Workflow {} started", workflow_id); - // Process workflow start + SystemEvent::UserRegistered { user_id, .. } => { + info!("User {} registered", user_id); + // Process user registration } - WorkflowEvent::Completed { workflow_id, .. } => { - info!("Workflow {} completed", workflow_id); - // Process workflow completion + SystemEvent::OrderCompleted { order_id, .. } => { + info!("Order {} completed", order_id); + // Process order completion } - WorkflowEvent::Failed { workflow_id, error, .. } => { - error!("Workflow {} failed: {}", workflow_id, error); - // Process workflow failure + SystemEvent::PaymentFailed { payment_id, error, .. } => { + error!("Payment {} failed: {}", payment_id, error); + // Process payment failure } } Ok(()) @@ -210,11 +218,11 @@ pub async fn start_event_monitoring(event_bus: Arc) { tokio::spawn(async move { while let Ok(event) = receiver.recv().await { match event { - SystemEvent::NodeExecutionStarted { node_id, .. } => { - info!("Node {} started execution", node_id); + SystemEvent::UserRegistered { user_id, .. } => { + info!("User {} registered", user_id); } - SystemEvent::NodeExecutionCompleted { node_id, .. } => { - info!("Node {} completed execution", node_id); + SystemEvent::OrderCompleted { order_id, .. } => { + info!("Order {} completed", order_id); } SystemEvent::SystemShutdown => { info!("System shutdown requested"); @@ -235,13 +243,13 @@ pub struct AsyncValidator { } impl AsyncValidator { - pub async fn validate_workflow(&self, workflow: WorkflowDefinition) -> Result { + pub async fn validate_user(&self, user: User) -> Result { let (tx, rx) = oneshot::channel(); // Spawn validation task - let workflow_clone = workflow.clone(); + let user_clone = user.clone(); tokio::spawn(async move { - let result = perform_validation(workflow_clone).await; + let result = perform_validation(user_clone).await; let _ = tx.send(result); }); @@ -251,12 +259,12 @@ impl AsyncValidator { } } -async fn perform_validation(workflow: WorkflowDefinition) -> Result { +async fn perform_validation(user: User) -> Result { // Expensive validation logic tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; - if workflow.nodes.is_empty() { - return Err(ValidationError::EmptyWorkflow); + if user.email.is_empty() { + return Err(ValidationError::EmptyEmail); } Ok(ValidationResult::Valid) @@ -270,24 +278,24 @@ async fn perform_validation(workflow: WorkflowDefinition) -> Result Result { +impl BatchProcessor { + pub async fn process_batch_parallel(&self, items: &[ProcessingItem]) -> Result { let mut join_set = JoinSet::new(); let mut results = HashMap::new(); - // Execute nodes in parallel where possible - for node in &workflow.nodes { - if self.can_execute_parallel(node, &results) { - let node_clone = node.clone(); - let executor = self.clone(); + // Process items in parallel where possible + for item in items { + if self.can_process_parallel(item, &results) { + let item_clone = item.clone(); + let processor = self.clone(); join_set.spawn(async move { - let result = executor.execute_node(&node_clone).await; - (node_clone.id.clone(), result) + let result = processor.process_item(&item_clone).await; + (item_clone.id.clone(), result) }); } } @@ -295,21 +303,21 @@ impl WorkflowExecutor { // Collect results while let Some(result) = join_set.join_next().await { match result { - Ok((node_id, execution_result)) => { - results.insert(node_id, execution_result?); + Ok((item_id, processing_result)) => { + results.insert(item_id, processing_result?); } Err(join_error) => { - return Err(ExecutionError::TaskFailed(join_error.to_string())); + return Err(ProcessingError::TaskFailed(join_error.to_string())); } } } - Ok(ExecutionResult { node_results: results }) + Ok(BatchResult { item_results: results }) } - fn can_execute_parallel(&self, node: &NodeDefinition, completed_results: &HashMap) -> bool { + fn can_process_parallel(&self, item: &ProcessingItem, completed_results: &HashMap) -> bool { // Check if all dependencies are satisfied - node.dependencies.iter().all(|dep| completed_results.contains_key(dep)) + item.dependencies.iter().all(|dep| completed_results.contains_key(dep)) } } ``` @@ -334,7 +342,7 @@ impl Application { pub async fn start(&mut self) -> Result<(), ApplicationError> { // Start background services - self.start_workflow_executor().await?; + self.start_user_service().await?; self.start_event_processor().await?; self.start_health_monitor().await?; @@ -345,18 +353,18 @@ impl Application { self.shutdown_gracefully().await } - async fn start_workflow_executor(&mut self) -> Result<(), ApplicationError> { + async fn start_user_service(&mut self) -> Result<(), ApplicationError> { let token = self.shutdown_token.clone(); let handle = tokio::spawn(async move { loop { tokio::select! { _ = token.cancelled() => { - info!("Workflow executor shutdown requested"); + info!("User service shutdown requested"); break; } _ = tokio::time::sleep(tokio::time::Duration::from_secs(1)) => { - // Process workflows + // Process user operations } } } @@ -409,19 +417,19 @@ mod tests { use tokio::time::{timeout, Duration}; #[tokio::test] - async fn test_workflow_cache_concurrent_access() { - let cache = WorkflowCache::new(); - let workflow = WorkflowDefinition::default(); + async fn test_user_cache_concurrent_access() { + let cache = UserCache::new(); + let user = User::default(); // Test concurrent insertions let mut handles = Vec::new(); for i in 0..10 { let cache_clone = cache.clone(); - let workflow_clone = workflow.clone(); + let user_clone = user.clone(); handles.push(tokio::spawn(async move { - cache_clone.insert(format!("workflow_{}", i), workflow_clone).await; + cache_clone.insert(format!("user_{}", i), user_clone).await; })); } @@ -430,9 +438,9 @@ mod tests { handle.await.unwrap(); } - // Verify all workflows were inserted + // Verify all users were inserted for i in 0..10 { - let result = cache.get(&format!("workflow_{}", i)).await; + let result = cache.get(&format!("user_{}", i)).await; assert!(result.is_some()); } } @@ -445,8 +453,8 @@ mod tests { let processor_task = tokio::spawn(handle.run()); // Send test events - let event = WorkflowEvent::Started { - workflow_id: "test-workflow".to_string(), + let event = SystemEvent::UserRegistered { + user_id: "test-user".to_string(), timestamp: Utc::now(), }; diff --git a/.cursor/rules/rust/features/database.mdc b/.cursor/rules/rust/features/database.mdc index ed02956..74b8118 100644 --- a/.cursor/rules/rust/features/database.mdc +++ b/.cursor/rules/rust/features/database.mdc @@ -18,7 +18,13 @@ alwaysApply: false ```toml # Cargo.toml - SQLx configuration [dependencies] -sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "sqlite", "uuid", "chrono", "json"] } +sqlx = { version = "0.8", features = [ + "chrono", + "postgres", + "runtime-tokio-rustls", + "sqlite", + "uuid" +] } ``` ## 🔧 QUERY PATTERNS @@ -28,23 +34,23 @@ sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "sqli // ✅ Preferred: Use sqlx::query_as with custom types #[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct WorkflowExecution { +pub struct User { pub id: Uuid, - pub workflow_id: String, - pub status: ExecutionStatus, + pub username: String, + pub email: String, pub created_at: DateTime, pub updated_at: DateTime, - pub metadata: Option, + pub is_active: bool, } -impl WorkflowExecution { +impl User { pub async fn find_by_id( pool: &PgPool, id: Uuid ) -> Result, sqlx::Error> { - sqlx::query_as::<_, WorkflowExecution>( - "SELECT id, workflow_id, status, created_at, updated_at, metadata - FROM workflow_executions + sqlx::query_as::<_, User>( + "SELECT id, username, email, created_at, updated_at, is_active + FROM users WHERE id = $1" ) .bind(id) @@ -52,20 +58,18 @@ impl WorkflowExecution { .await } - pub async fn list_by_workflow( + pub async fn list_active_users( pool: &PgPool, - workflow_id: &str, limit: i64, offset: i64, ) -> Result, sqlx::Error> { - sqlx::query_as::<_, WorkflowExecution>( - "SELECT id, workflow_id, status, created_at, updated_at, metadata - FROM workflow_executions - WHERE workflow_id = $1 + sqlx::query_as::<_, User>( + "SELECT id, username, email, created_at, updated_at, is_active + FROM users + WHERE is_active = true ORDER BY created_at DESC - LIMIT $2 OFFSET $3" + LIMIT $1 OFFSET $2" ) - .bind(workflow_id) .bind(limit) .bind(offset) .fetch_all(pool) @@ -88,28 +92,33 @@ use chrono::{DateTime, Utc}; #[derive(Debug, Clone, FromRow, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct User { +pub struct Product { pub id: Uuid, - pub username: String, - pub email: String, + pub name: String, + pub description: Option, + pub price: rust_decimal::Decimal, + pub category_id: Uuid, pub created_at: DateTime, pub updated_at: DateTime, - pub is_active: bool, + pub is_available: bool, } #[derive(Debug, Clone, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct CreateUserRequest { - pub username: String, - pub email: String, +pub struct CreateProductRequest { + pub name: String, + pub description: Option, + pub price: rust_decimal::Decimal, + pub category_id: Uuid, } #[derive(Debug, Clone, Deserialize)] #[serde(rename_all = "camelCase")] -pub struct UpdateUserRequest { - pub username: Option, - pub email: Option, - pub is_active: Option, +pub struct UpdateProductRequest { + pub name: Option, + pub description: Option, + pub price: Option, + pub is_available: Option, } ``` @@ -344,6 +353,37 @@ CREATE TRIGGER update_users_updated_at EXECUTE FUNCTION update_updated_at_column(); ``` +### Product Table Example +```sql +-- migrations/20240501000002_create_products_table.sql +CREATE TABLE categories ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(255) NOT NULL UNIQUE, + description TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE TABLE products ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR(255) NOT NULL, + description TEXT, + price DECIMAL(10,2) NOT NULL, + category_id UUID NOT NULL REFERENCES categories(id), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + is_available BOOLEAN NOT NULL DEFAULT true +); + +CREATE INDEX idx_products_category ON products(category_id); +CREATE INDEX idx_products_price ON products(price); +CREATE INDEX idx_products_name ON products(name); + +CREATE TRIGGER update_products_updated_at + BEFORE UPDATE ON products + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); +``` + ## 🔧 CONNECTION MANAGEMENT ### Database Pool Configuration diff --git a/.cursor/rules/rust/features/http-client.mdc b/.cursor/rules/rust/features/http-client.mdc new file mode 100644 index 0000000..707c386 --- /dev/null +++ b/.cursor/rules/rust/features/http-client.mdc @@ -0,0 +1,225 @@ +--- +description: +globs: +alwaysApply: false +--- +# 🌐 HTTP CLIENT BEST PRACTICES + +> **TL;DR:** Modern HTTP client patterns using reqwest with proper error handling, timeouts, and security configurations. + +## 🔧 REQWEST CONFIGURATION + +### Standard Dependencies +```toml +# Cargo.toml - HTTP client configuration +[dependencies] +reqwest = { version = "0.12", default-features = false, features = [ + "charset", + "rustls-tls-webpki-roots", + "http2", + "json", + "cookies", + "gzip", + "brotli", + "zstd", + "deflate" +] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +tokio = { version = "1.45", features = ["macros", "rt-multi-thread"] } +anyhow = "1.0" +thiserror = "2.0" +url = "2.5" +``` + +## 🏗️ CLIENT BUILDER PATTERN + +### Configurable HTTP Client +```rust +use reqwest::{Client, ClientBuilder, Response}; +use serde::{Deserialize, Serialize}; +use std::time::Duration; +use url::Url; + +pub struct HttpClient { + client: Client, + base_url: Url, + default_timeout: Duration, +} + +impl HttpClient { + pub fn builder() -> HttpClientBuilder { + HttpClientBuilder::new() + } + + pub async fn get(&self, path: &str) -> Result + where + T: for<'de> Deserialize<'de>, + { + let url = self.base_url.join(path)?; + + let response = self + .client + .get(url) + .timeout(self.default_timeout) + .send() + .await?; + + self.handle_response(response).await + } + + pub async fn post(&self, path: &str, body: &B) -> Result + where + T: for<'de> Deserialize<'de>, + B: Serialize, + { + let url = self.base_url.join(path)?; + + let response = self + .client + .post(url) + .json(body) + .timeout(self.default_timeout) + .send() + .await?; + + self.handle_response(response).await + } + + async fn handle_response(&self, response: Response) -> Result + where + T: for<'de> Deserialize<'de>, + { + let status = response.status(); + + if status.is_success() { + let text = response.text().await?; + serde_json::from_str(&text).map_err(|e| HttpError::Deserialization { + error: e.to_string(), + body: text, + }) + } else { + let body = response.text().await.unwrap_or_default(); + Err(HttpError::UnexpectedStatus { + status: status.as_u16(), + body, + }) + } + } +} + +pub struct HttpClientBuilder { + base_url: Option, + timeout: Option, + user_agent: Option, + headers: Vec<(String, String)>, + accept_invalid_certs: bool, +} + +impl HttpClientBuilder { + pub fn new() -> Self { + Self { + base_url: None, + timeout: Some(Duration::from_secs(30)), + user_agent: Some("rust-http-client/1.0".to_string()), + headers: Vec::new(), + accept_invalid_certs: false, + } + } + + pub fn base_url(mut self, url: &str) -> Self { + self.base_url = Some(url.to_string()); + self + } + + pub fn timeout(mut self, timeout: Duration) -> Self { + self.timeout = Some(timeout); + self + } + + pub fn build(self) -> Result { + let base_url = self.base_url + .ok_or_else(|| HttpError::Configuration("Base URL is required".to_string()))?; + + let mut client_builder = ClientBuilder::new() + .danger_accept_invalid_certs(self.accept_invalid_certs); + + if let Some(timeout) = self.timeout { + client_builder = client_builder.timeout(timeout); + } + + if let Some(user_agent) = &self.user_agent { + client_builder = client_builder.user_agent(user_agent); + } + + let client = client_builder.build()?; + let parsed_url = Url::parse(&base_url)?; + + Ok(HttpClient { + client, + base_url: parsed_url, + default_timeout: self.timeout.unwrap_or(Duration::from_secs(30)), + }) + } +} +``` + +## 🚨 ERROR HANDLING + +### Comprehensive Error Types +```rust +#[derive(thiserror::Error, Debug)] +pub enum HttpError { + #[error("HTTP request error: {0}")] + Request(#[from] reqwest::Error), + + #[error("URL parsing error: {0}")] + UrlParse(#[from] url::ParseError), + + #[error("JSON serialization error: {0}")] + Serialization(#[from] serde_json::Error), + + #[error("Deserialization error: {error}, body: {body}")] + Deserialization { error: String, body: String }, + + #[error("Unexpected HTTP status {status}: {body}")] + UnexpectedStatus { status: u16, body: String }, + + #[error("Configuration error: {0}")] + Configuration(String), + + #[error("Timeout occurred")] + Timeout, + + #[error("Authentication failed")] + Authentication, +} + +impl HttpError { + pub fn is_retryable(&self) -> bool { + matches!( + self, + HttpError::Timeout + | HttpError::UnexpectedStatus { status: 502..=504, .. } + ) + } +} +``` + +## ✅ HTTP CLIENT CHECKLIST + +```markdown +### HTTP Client Implementation Verification +- [ ] Uses reqwest with rustls-tls (not native-tls) +- [ ] Compression features enabled (gzip, brotli, deflate) +- [ ] Proper timeout configuration +- [ ] User-Agent header configured +- [ ] Structured error handling with retryable errors +- [ ] Authentication patterns implemented +- [ ] Response type definitions with camelCase +- [ ] Base URL configuration pattern +- [ ] JSON serialization/deserialization +- [ ] Proper status code handling +``` + +This HTTP client standard ensures robust, secure, and maintainable HTTP communication in Rust applications. diff --git a/.cursor/rules/rust/features/tools-and-config.mdc b/.cursor/rules/rust/features/tools-and-config.mdc index 8f2a6e9..363180c 100644 --- a/.cursor/rules/rust/features/tools-and-config.mdc +++ b/.cursor/rules/rust/features/tools-and-config.mdc @@ -3,156 +3,202 @@ description: globs: alwaysApply: false --- -# 🛠️ RUST TOOLS & CONFIGURATION BEST PRACTICES +# 🛠️ TOOLS AND CONFIGURATION BEST PRACTICES -> **TL;DR:** Modern tooling choices and configuration patterns for Rust applications, focusing on maintainable and production-ready setups. +> **TL;DR:** Essential tools and configuration patterns for modern Rust applications, focusing on logging, configuration management, and templating. ## 📊 LOGGING AND OBSERVABILITY -### Use Tracing Instead of env_logger +### Tracing Ecosystem (Not env_logger) +- **Always use `tracing`** - modern structured logging +- **Combine with `tracing-subscriber`** for output formatting +- **File rotation with `tracing-appender`** for production +- **Structured logging** with spans and events + +```toml +# Cargo.toml - Tracing configuration +[dependencies] +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } +chrono = { version = "0.4", features = ["serde"] } +``` + +### Structured Logging Setup ```rust -// ✅ Preferred: Use tracing ecosystem -use tracing::{info, warn, error, debug, instrument}; +use tracing::{info, error, warn, debug, span, Level}; use tracing_subscriber::{ + fmt::{self, time::ChronoUtc}, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry, }; +use tracing_appender::{non_blocking, rolling}; + +pub fn init_logging(config: &LogConfig) -> Result<(), Box> { + // Create file appender with rotation + let file_appender = rolling::daily(&config.log_dir, "app.log"); + let (file_writer, _guard) = non_blocking(file_appender); + + // Console formatting + let console_layer = fmt::layer() + .with_target(true) + .with_timer(ChronoUtc::rfc_3339()) + .with_level(true) + .with_thread_ids(true) + .with_thread_names(true); + + // File formatting (JSON for structured logs) + let file_layer = fmt::layer() + .json() + .with_timer(ChronoUtc::rfc_3339()) + .with_writer(file_writer); + + // Environment filter + let filter = EnvFilter::try_from_default_env() + .unwrap_or_else(|_| EnvFilter::new(&config.level)); -pub fn init_tracing() { Registry::default() - .with( - tracing_subscriber::fmt::layer() - .with_target(false) - .compact() - ) - .with(EnvFilter::from_default_env()) - .init(); -} - -// Structured logging with context -#[instrument(skip(sensitive_data))] -pub async fn process_workflow( - workflow_id: &str, - user_id: &str, - sensitive_data: &[u8] -) -> Result<(), ProcessingError> { - info!("Starting workflow processing"); - - // Processing logic - debug!("Processing step 1 completed"); - - match perform_operation().await { - Ok(result) => { - info!(result_count = result.len(), "Processing completed successfully"); - Ok(()) - } - Err(e) => { - error!(error = %e, "Processing failed"); - Err(e) - } - } -} - -// ❌ Avoid: env_logger (deprecated pattern) -// use env_logger; -// env_logger::init(); -``` - -### Tracing Configuration -```rust -use tracing_appender::rolling::{RollingFileAppender, Rotation}; -use tracing_subscriber::{ - fmt::writer::MakeWriterExt, - layer::SubscriberExt, - util::SubscriberInitExt, -}; - -pub fn init_production_tracing(log_dir: &str) -> Result<(), Box> { - // File appender with rotation - let file_appender = RollingFileAppender::new( - Rotation::daily(), - log_dir, - "application.log" - ); - - // Console output for development - let console_layer = tracing_subscriber::fmt::layer() - .with_target(false) - .compact(); - - // File output for production - let file_layer = tracing_subscriber::fmt::layer() - .with_writer(file_appender) - .with_ansi(false) - .json(); - - tracing_subscriber::registry() + .with(filter) .with(console_layer) .with(file_layer) - .with(EnvFilter::from_default_env()) .init(); Ok(()) } + +// Usage in application code +#[tracing::instrument(skip(service), fields(user_id = %user_id))] +pub async fn process_user_registration( + user_id: &str, + service: &UserService, +) -> Result { + let span = span!(Level::INFO, "user_registration", user_id = %user_id); + let _enter = span.enter(); + + info!("Starting user registration process"); + + let user = service.create_user(user_id).await.map_err(|e| { + error!("Failed to create user: {}", e); + e + })?; + + info!( + user_id = %user.id, + email = %user.email, + "User registration completed successfully" + ); + + Ok(user) +} + +// Contextual logging with structured fields +pub async fn handle_payment_processing( + order_id: &str, + amount: f64, + payment_method: &str, +) -> Result { + let span = span!( + Level::INFO, + "payment_processing", + order_id = %order_id, + amount = %amount, + payment_method = %payment_method + ); + let _enter = span.enter(); + + info!("Processing payment"); + + match process_payment(order_id, amount, payment_method).await { + Ok(result) => { + info!( + transaction_id = %result.transaction_id, + status = %result.status, + "Payment processed successfully" + ); + Ok(result) + } + Err(e) => { + error!( + error = %e, + "Payment processing failed" + ); + Err(e) + } + } +} ``` -## 📄 CONFIGURATION MANAGEMENT +## ⚙️ CONFIGURATION MANAGEMENT + +### YAML Over TOML for Complex Configuration +- **Use YAML** for application configuration (not TOML) +- **Environment-specific configs** (dev, staging, prod) +- **Sensitive data via environment variables** +- **Configuration validation** using serde and custom validation -### Use YAML Instead of TOML ```yaml -# config.yaml - Preferred configuration format +# config/development.yaml server: - host: "0.0.0.0" + host: "127.0.0.1" port: 8080 - workers: 4 + workers: 1 database: - url: "postgresql://user:pass@localhost/db" - maxConnections: 20 - minConnections: 5 - timeoutSecs: 30 + url: "postgresql://user:pass@localhost/app_dev" + maxConnections: 10 + minConnections: 2 + connectTimeout: 30s + idleTimeout: 600s logging: - level: "info" - format: "json" - directory: "./logs" + level: "debug" + format: "pretty" + logDir: "./logs" + +email: + provider: "smtp" + smtpHost: "localhost" + smtpPort: 1025 + fromAddress: "noreply@example.com" features: - enableMetrics: true - enableTracing: true - debugMode: false + enableRegistration: true + enablePasswordReset: true + enableEmailVerification: false + maintenanceMode: false -nodes: - ai: - defaultModel: "gpt-4o" - maxTokens: 4096 - temperature: 0.7 +cache: + redis: + url: "redis://localhost:6379" + maxConnections: 10 + defaultTtl: 3600 - crawler: - userAgent: "CellaBot/1.0" - timeout: 30 - maxRetries: 3 +security: + jwtSecret: "${JWT_SECRET}" # From environment + sessionTimeout: 3600 + rateLimitRequests: 100 + rateLimitWindow: 60 ``` ### Configuration Loading Pattern ```rust use serde::{Deserialize, Serialize}; -use std::fs; -use anyhow::{Context, Result}; +use std::time::Duration; -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct AppConfig { pub server: ServerConfig, pub database: DatabaseConfig, - pub logging: LoggingConfig, - pub features: FeatureConfig, - pub nodes: NodeConfig, + pub logging: LogConfig, + pub email: EmailConfig, + pub features: FeatureFlags, + pub cache: CacheConfig, + pub security: SecurityConfig, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ServerConfig { pub host: String, @@ -160,424 +206,483 @@ pub struct ServerConfig { pub workers: usize, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct DatabaseConfig { pub url: String, pub max_connections: u32, pub min_connections: u32, - pub timeout_secs: u64, + #[serde(with = "duration_serde")] + pub connect_timeout: Duration, + #[serde(with = "duration_serde")] + pub idle_timeout: Duration, } +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FeatureFlags { + pub enable_registration: bool, + pub enable_password_reset: bool, + pub enable_email_verification: bool, + pub maintenance_mode: bool, +} + +// Configuration loading with environment override impl AppConfig { - pub fn load() -> Result { - // Try multiple config sources in order - Self::from_file("config.yaml") - .or_else(|_| Self::from_file("config.yml")) - .or_else(|_| Self::from_env()) - .context("Failed to load configuration from any source") + pub fn load() -> Result { + let env = std::env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()); + let config_path = format!("config/{}.yaml", env); + + let mut settings = config::Config::builder() + .add_source(config::File::with_name(&config_path)) + .add_source(config::Environment::with_prefix("APP")) + .build()?; + + // Expand environment variables + settings.try_deserialize() } - pub fn from_file(path: &str) -> Result { - let content = fs::read_to_string(path) - .with_context(|| format!("Failed to read config file: {}", path))?; - - serde_yaml::from_str(&content) - .with_context(|| format!("Failed to parse config file: {}", path)) - } - - pub fn from_env() -> Result { - // Load from environment variables with fallbacks - Ok(Self { - server: ServerConfig { - host: std::env::var("SERVER_HOST") - .unwrap_or_else(|_| "0.0.0.0".to_string()), - port: std::env::var("SERVER_PORT") - .unwrap_or_else(|_| "8080".to_string()) - .parse() - .context("Invalid SERVER_PORT")?, - workers: std::env::var("SERVER_WORKERS") - .unwrap_or_else(|_| "4".to_string()) - .parse() - .context("Invalid SERVER_WORKERS")?, - }, - database: DatabaseConfig { - url: std::env::var("DATABASE_URL") - .context("DATABASE_URL environment variable required")?, - max_connections: std::env::var("DB_MAX_CONNECTIONS") - .unwrap_or_else(|_| "20".to_string()) - .parse() - .context("Invalid DB_MAX_CONNECTIONS")?, - min_connections: std::env::var("DB_MIN_CONNECTIONS") - .unwrap_or_else(|_| "5".to_string()) - .parse() - .context("Invalid DB_MIN_CONNECTIONS")?, - timeout_secs: std::env::var("DB_TIMEOUT_SECS") - .unwrap_or_else(|_| "30".to_string()) - .parse() - .context("Invalid DB_TIMEOUT_SECS")?, - }, - // ... other config sections - }) - } - - pub fn validate(&self) -> Result<()> { + pub fn validate(&self) -> Result<(), ConfigError> { if self.server.port == 0 { - anyhow::bail!("Server port cannot be 0"); + return Err(ConfigError::InvalidValue("Server port cannot be 0".to_string())); } - if self.database.url.is_empty() { - anyhow::bail!("Database URL cannot be empty"); + if self.database.max_connections < self.database.min_connections { + return Err(ConfigError::InvalidValue( + "max_connections must be >= min_connections".to_string() + )); } - if self.server.workers == 0 { - anyhow::bail!("Server workers must be greater than 0"); + if self.security.session_timeout == 0 { + return Err(ConfigError::InvalidValue("Session timeout must be > 0".to_string())); } Ok(()) } } -impl Default for AppConfig { - fn default() -> Self { - Self { - server: ServerConfig { - host: "127.0.0.1".to_string(), - port: 8080, - workers: 4, - }, - database: DatabaseConfig { - url: "sqlite::memory:".to_string(), - max_connections: 20, - min_connections: 5, - timeout_secs: 30, - }, - // ... other default values +// Custom duration serialization for human-readable durations +mod duration_serde { + use serde::{self, Deserialize, Deserializer, Serializer}; + use std::time::Duration; + + pub fn serialize(duration: &Duration, serializer: S) -> Result + where + S: Serializer, + { + let secs = duration.as_secs(); + serializer.serialize_str(&format!("{}s", secs)) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + if s.ends_with("s") { + let num: u64 = s[..s.len()-1].parse().map_err(serde::de::Error::custom)?; + Ok(Duration::from_secs(num)) + } else { + Err(serde::de::Error::custom("Duration must end with 's'")) } } } ``` -## 🔧 TEMPLATING WITH MINIJINJA +## 📄 TEMPLATING WITH MINIJINJA -### Use MiniJinja Instead of Handlebars +### MiniJinja Over Handlebars +- **Use MiniJinja** for templating (not Handlebars) +- **Custom filters and functions** for application-specific logic +- **Template inheritance** for reusable layouts +- **Auto-escaping** for security + +```toml +# Cargo.toml - MiniJinja configuration +[dependencies] +minijinja = { version = "2", features = [ + "json", + "loader", + "loop_controls", + "speedups" +] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +``` + +### Template Engine Setup ```rust -// ✅ Preferred: Use minijinja for templating -use minijinja::{Environment, context}; -use serde_json::Value; -use anyhow::Result; +use minijinja::{Environment, Error as TemplateError}; +use std::collections::HashMap; pub struct TemplateEngine { env: Environment<'static>, } impl TemplateEngine { - pub fn new() -> Self { + pub fn new() -> Result { let mut env = Environment::new(); + // Load templates from directory + env.set_loader(minijinja::path_loader("templates")); + // Add custom filters - env.add_filter("format_date", format_date_filter); + env.add_filter("currency", currency_filter); + env.add_filter("date_format", date_format_filter); env.add_filter("truncate", truncate_filter); - env.add_filter("json_path", json_path_filter); // Add custom functions - env.add_function("now", now_function); - env.add_function("uuid", uuid_function); + env.add_function("asset_url", asset_url_function); + env.add_function("config", config_function); - Self { env } + Ok(Self { env }) } - pub fn render_template(&self, template: &str, data: &Value) -> Result { - let tmpl = self.env.template_from_str(template)?; - let result = tmpl.render(context! { data => data })?; - Ok(result) + pub fn render_email_template( + &self, + template_name: &str, + context: &EmailContext, + ) -> Result { + let template = self.env.get_template(template_name)?; + template.render(context) } - pub fn add_template(&mut self, name: &str, source: &str) -> Result<()> { - self.env.add_template(name, source)?; - Ok(()) - } - - pub fn render_named(&self, name: &str, data: &Value) -> Result { - let tmpl = self.env.get_template(name)?; - let result = tmpl.render(context! { data => data })?; - Ok(result) + pub fn render_notification( + &self, + template_name: &str, + context: &NotificationContext, + ) -> Result { + let template = self.env.get_template(template_name)?; + template.render(context) } } // Custom filters -fn format_date_filter(value: Value, format: Option) -> Result { - // Implementation for date formatting - todo!() +fn currency_filter(value: f64, _args: &[minijinja::Value]) -> Result { + Ok(format!("${:.2}", value)) } -fn truncate_filter(value: Value, length: Option) -> Result { - let text = value.as_str().unwrap_or(""); - let len = length.unwrap_or(100); +fn date_format_filter( + value: chrono::DateTime, + args: &[minijinja::Value], +) -> Result { + let format = args + .get(0) + .and_then(|v| v.as_str()) + .unwrap_or("%Y-%m-%d %H:%M:%S"); + Ok(value.format(format).to_string()) +} - if text.len() <= len { - Ok(text.to_string()) +fn truncate_filter( + value: String, + args: &[minijinja::Value], +) -> Result { + let length = args + .get(0) + .and_then(|v| v.as_i64()) + .unwrap_or(100) as usize; + + if value.len() <= length { + Ok(value) } else { - Ok(format!("{}...", &text[..len])) + Ok(format!("{}...", &value[..length])) } } -fn json_path_filter(value: Value, path: String) -> Result { - // Use jsonpath-rust for extraction - use jsonpath_rust::JsonPathFinder; - - let finder = JsonPathFinder::from_str(&value.to_string(), &path) - .map_err(|e| minijinja::Error::new(minijinja::ErrorKind::InvalidOperation, e.to_string()))?; - - let result = finder.find(); - Ok(serde_json::to_value(result).unwrap_or(Value::Null)) -} - // Custom functions -fn now_function(_args: Vec) -> Result { - use chrono::Utc; - Ok(Value::String(Utc::now().to_rfc3339())) +fn asset_url_function(args: &[minijinja::Value]) -> Result { + let path = args + .get(0) + .and_then(|v| v.as_str()) + .ok_or_else(|| TemplateError::new("asset_url requires a path argument"))?; + + Ok(format!("/assets/{}", path)) } -fn uuid_function(_args: Vec) -> Result { - use uuid::Uuid; - Ok(Value::String(Uuid::new_v4().to_string())) -} +fn config_function(args: &[minijinja::Value]) -> Result { + let key = args + .get(0) + .and_then(|v| v.as_str()) + .ok_or_else(|| TemplateError::new("config requires a key argument"))?; -// ❌ Avoid: handlebars (less modern) -// use handlebars::Handlebars; + // Access application configuration + match key { + "app.name" => Ok(minijinja::Value::from("My Application")), + "app.version" => Ok(minijinja::Value::from("1.0.0")), + _ => Ok(minijinja::Value::UNDEFINED), + } +} ``` ### Template Usage Examples -```rust -use serde_json::json; +```jinja2 +{# templates/emails/welcome.html #} +{% extends "emails/base.html" %} -#[cfg(test)] -mod tests { - use super::*; +{% block title %}Welcome to {{ config('app.name') }}!{% endblock %} - #[test] - fn test_template_rendering() { - let engine = TemplateEngine::new(); +{% block content %} +

Welcome, {{ user.firstName }}!

- let template = r#" - Hello {{ data.name }}! +

Thank you for registering with {{ config('app.name') }}. Your account has been created successfully.

- Your workflow "{{ data.workflow_name }}" has {{ data.node_count }} nodes. +
+

Your Account Details:

+
    +
  • Username: {{ user.username }}
  • +
  • Email: {{ user.email }}
  • +
  • Registration Date: {{ user.createdAt | date_format("%B %d, %Y") }}
  • +
+
- Results: - {% for result in data.results %} - - {{ result.node_name }}: {{ result.status }} +
+

Next Steps:

+
    +
  1. Verify your email address by clicking the link below
  2. +
  3. Complete your profile
  4. +
  5. Explore our features
  6. +
+
+ + + + +{% endblock %} +``` + +```jinja2 +{# templates/notifications/order_status.html #} +{% extends "notifications/base.html" %} + +{% block content %} +
+

Order Update

+ +
+

Order #{{ order.orderNumber }}

+

Status: {{ order.status | title }}

+

Total: {{ order.totalAmount | currency }}

+

Order Date: {{ order.createdAt | date_format("%B %d, %Y") }}

+
+ +
+

Items Ordered:

+
    + {% for item in order.items %} +
  • + {{ item.productName }} - + Quantity: {{ item.quantity }} - + Price: {{ item.unitPrice | currency }} +
  • {% endfor %} +
+
- Generated at: {{ now() }} - Request ID: {{ uuid() }} - "#; + {% if order.trackingNumber %} +
+

Tracking Information:

+

Your order is being shipped. Track your package: {{ order.trackingNumber }}

+
+ {% endif %} - let data = json!({ - "name": "John Doe", - "workflow_name": "HackerNews Summary", - "node_count": 3, - "results": [ - {"node_name": "RSS Feed", "status": "success"}, - {"node_name": "Content Fetch", "status": "success"}, - {"node_name": "AI Summary", "status": "completed"} - ] - }); - - let result = engine.render_template(template, &data).unwrap(); - assert!(result.contains("Hello John Doe!")); - assert!(result.contains("HackerNews Summary")); - } - - #[test] - fn test_json_path_filter() { - let engine = TemplateEngine::new(); - - let template = r#" - Title: {{ data | json_path("$.title") }} - First item: {{ data | json_path("$.items[0].name") }} - "#; - - let data = json!({ - "title": "My Workflow", - "items": [ - {"name": "First Item", "value": 1}, - {"name": "Second Item", "value": 2} - ] - }); - - let result = engine.render_template(template, &data).unwrap(); - assert!(result.contains("Title: My Workflow")); - assert!(result.contains("First item: First Item")); - } -} +
+

Shipping Address:

+
+ {{ order.shippingAddress.street }}
+ {{ order.shippingAddress.city }}, {{ order.shippingAddress.state }} {{ order.shippingAddress.zipCode }}
+ {{ order.shippingAddress.country }} +
+
+
+{% endblock %} ``` ## 🔍 DATA TRANSFORMATION WITH JSONPATH -### JsonPath Integration +### JSONPath for Data Extraction ```rust use jsonpath_rust::{JsonPathFinder, JsonPathQuery}; -use serde_json::Value; -use anyhow::{Result, Context}; +use serde_json::{Value, json}; pub struct DataTransformer { - template_engine: TemplateEngine, + // Internal state } impl DataTransformer { - pub fn new() -> Self { - Self { - template_engine: TemplateEngine::new(), + pub fn extract_user_info(&self, api_response: &Value) -> Result { + let finder = JsonPathFinder::from_str(api_response, "$.data.users[*]")?; + let users: Vec = finder.find_slice(); + + let mut user_infos = Vec::new(); + + for user in users { + let id = user.path("$.id")?.as_str() + .ok_or(TransformError::MissingField("id"))?; + let name = user.path("$.profile.fullName")?.as_str() + .ok_or(TransformError::MissingField("fullName"))?; + let email = user.path("$.contact.email")?.as_str() + .ok_or(TransformError::MissingField("email"))?; + + user_infos.push(UserInfo { + id: id.to_string(), + name: name.to_string(), + email: email.to_string(), + }); } + + Ok(user_infos) } - /// Extract data using JSONPath - pub fn extract_json_path(&self, data: &Value, path: &str) -> Result { - let path_query = JsonPathQuery::from(path); - let result = data.path(&path_query) - .context("Failed to execute JSONPath query")?; + pub fn extract_order_summary(&self, order_data: &Value) -> Result { + let order_id = order_data.path("$.order.id")?.as_str() + .ok_or(TransformError::MissingField("order.id"))?; - Ok(serde_json::to_value(result)?) + let customer_name = order_data.path("$.customer.profile.name")?.as_str() + .ok_or(TransformError::MissingField("customer.name"))?; + + let total_amount = order_data.path("$.payment.total")?.as_f64() + .ok_or(TransformError::MissingField("payment.total"))?; + + // Extract all item names + let item_names: Vec = order_data + .path("$.items[*].product.name")? + .as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|v| v.as_str()) + .map(|s| s.to_string()) + .collect(); + + Ok(OrderSummary { + order_id: order_id.to_string(), + customer_name: customer_name.to_string(), + total_amount, + item_names, + }) } - /// Transform data using both JSONPath extraction and template rendering - pub fn transform(&self, input: &Value, config: &TransformConfig) -> Result { - match config { - TransformConfig::JsonPath { path } => { - self.extract_json_path(input, path) - } - TransformConfig::Template { template } => { - let rendered = self.template_engine.render_template(template, input)?; - Ok(Value::String(rendered)) - } - TransformConfig::Composite { extractions, template } => { - let mut extracted_data = serde_json::Map::new(); + pub fn build_notification_context( + &self, + user: &User, + event_data: &Value, + ) -> Result { + let event_type = event_data.path("$.type")?.as_str() + .ok_or(TransformError::MissingField("type"))?; - // Extract data using JSONPath - for (key, path) in extractions { - let value = self.extract_json_path(input, path)?; - extracted_data.insert(key.clone(), value); - } + let timestamp = event_data.path("$.timestamp")?.as_str() + .ok_or(TransformError::MissingField("timestamp"))?; - // Render template with extracted data - let data = Value::Object(extracted_data); - let rendered = self.template_engine.render_template(template, &data)?; - Ok(Value::String(rendered)) - } - } - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase", tag = "type")] -pub enum TransformConfig { - JsonPath { - path: String, - }, - Template { - template: String, - }, - Composite { - extractions: std::collections::HashMap, - template: String, - }, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_json_path_extraction() { - let transformer = DataTransformer::new(); - - let data = json!({ - "rss": { - "items": [ - {"title": "Article 1", "url": "http://example.com/1"}, - {"title": "Article 2", "url": "http://example.com/2"} - ] - } - }); - - // Extract URLs - let urls = transformer.extract_json_path(&data, "$.rss.items[*].url").unwrap(); - assert_eq!(urls, json!(["http://example.com/1", "http://example.com/2"])); - } - - #[test] - fn test_composite_transformation() { - let transformer = DataTransformer::new(); - - let data = json!({ - "articles": [ - {"title": "First", "content": "Content 1"}, - {"title": "Second", "content": "Content 2"} - ] - }); - - let config = TransformConfig::Composite { - extractions: [ - ("titles".to_string(), "$.articles[*].title".to_string()), - ("count".to_string(), "$.articles.length()".to_string()), - ].into_iter().collect(), - template: "Found {{ count }} articles: {{ titles | join(', ') }}".to_string(), + // Extract event-specific data based on type + let context_data = match event_type { + "user_registration" => { + json!({ + "welcomeMessage": "Welcome to our platform!", + "nextSteps": ["Verify email", "Complete profile", "Explore features"] + }) + }, + "order_confirmation" => { + let order_number = event_data.path("$.data.orderNumber")?.as_str() + .ok_or(TransformError::MissingField("orderNumber"))?; + json!({ + "orderNumber": order_number, + "estimatedDelivery": event_data.path("$.data.estimatedDelivery")?, + "trackingUrl": format!("https://tracking.example.com/{}", order_number) + }) + }, + "payment_failed" => { + json!({ + "errorMessage": event_data.path("$.data.error")?, + "retryUrl": "https://app.example.com/payment/retry", + "supportEmail": "support@example.com" + }) + }, + _ => json!({}), }; - let result = transformer.transform(&data, &config).unwrap(); - let expected = "Found 2 articles: First, Second"; - assert_eq!(result, Value::String(expected.to_string())); + Ok(NotificationContext { + user: user.clone(), + event_type: event_type.to_string(), + timestamp: timestamp.to_string(), + data: context_data, + }) } } + +// Context structures for templates +#[derive(Debug, Clone, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct EmailContext { + pub user: User, + pub subject: String, + pub verification_url: Option, + pub unsubscribe_url: String, +} + +#[derive(Debug, Clone, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct NotificationContext { + pub user: User, + pub event_type: String, + pub timestamp: String, + pub data: Value, +} + +#[derive(Debug, Clone, serde::Serialize)] +#[serde(rename_all = "camelCase")] +pub struct OrderSummary { + pub order_id: String, + pub customer_name: String, + pub total_amount: f64, + pub item_names: Vec, +} ``` -## 🚨 CONFIGURATION ANTI-PATTERNS +## 🚨 TOOLS ANTI-PATTERNS ### What to Avoid ```rust -// ❌ Don't use TOML for complex configurations -// [server] -// host = "0.0.0.0" -// port = 8080 -// -// [database] -// url = "postgresql://..." -// # TOML becomes unwieldy for nested structures - // ❌ Don't use env_logger // use env_logger; // env_logger::init(); // Use tracing instead -// ❌ Don't use handlebars for new projects -// use handlebars::Handlebars; // Use minijinja instead +// ❌ Don't use TOML for complex configuration +// [server] +// host = "127.0.0.1" +// port = 8080 +// [database] +// url = "postgresql://..." // Use YAML instead -// ❌ Don't hardcode configuration values -// let database_url = "postgresql://localhost/mydb"; // Use config files/env vars +// ❌ Don't use Handlebars for templating +// use handlebars::Handlebars; +// let hbs = Handlebars::new(); // Use MiniJinja instead -// ❌ Don't ignore configuration validation -// pub fn load_config() -> Config { -// serde_yaml::from_str(&content).unwrap() // Add proper validation -// } +// ❌ Don't put secrets in configuration files +// jwt_secret = "hardcoded-secret-key" // Use environment variables + +// ❌ Don't use println! for logging in production +// println!("User created: {}", user_id); // Use tracing macros ``` -## ✅ TOOLS & CONFIGURATION CHECKLIST +## ✅ TOOLS AND CONFIG CHECKLIST ```markdown -### Tools & Configuration Verification -- [ ] Uses tracing instead of env_logger -- [ ] Configuration in YAML format (not TOML) -- [ ] All config structs use #[serde(rename_all = "camelCase")] -- [ ] Configuration validation implemented -- [ ] Environment variable fallbacks provided -- [ ] MiniJinja used for templating (not handlebars) -- [ ] JSONPath integration for data extraction -- [ ] Custom filters and functions in templates -- [ ] Structured logging with context -- [ ] File rotation for production logging -- [ ] Configuration loading from multiple sources -- [ ] Default values provided for all config options +### Tools and Configuration Verification +- [ ] Uses tracing ecosystem (not env_logger) +- [ ] YAML configuration files (not TOML for complex configs) +- [ ] Environment variable overrides for sensitive data +- [ ] Configuration validation on startup +- [ ] MiniJinja templating (not Handlebars) +- [ ] Custom filters and functions for templates +- [ ] JSONPath for complex data extraction +- [ ] Structured logging with spans and events +- [ ] File rotation for production logs +- [ ] Template inheritance and reusability +- [ ] Auto-escaping for security +- [ ] Context structures use camelCase serialization ``` -This tools and configuration standard ensures modern, maintainable, and production-ready Rust applications with proper observability and flexible configuration management. +This tools and configuration standard ensures robust, maintainable, and secure configuration and templating patterns for Rust applications. diff --git a/.cursor/rules/rust/features/utilities.mdc b/.cursor/rules/rust/features/utilities.mdc new file mode 100644 index 0000000..e80c63b --- /dev/null +++ b/.cursor/rules/rust/features/utilities.mdc @@ -0,0 +1,705 @@ +--- +description: +globs: +alwaysApply: false +--- +# 🛠️ UTILITY LIBRARIES BEST PRACTICES + +> **TL;DR:** Essential utility patterns for authentication, CLI tools, data structures, and common development tasks. + +## 🔐 AUTHENTICATION AND SECURITY + +### JWT with jsonwebtoken +```toml +# Cargo.toml - JWT configuration +[dependencies] +jsonwebtoken = "9.0" +serde = { version = "1.0", features = ["derive"] } +chrono = { version = "0.4", features = ["serde"] } +``` + +```rust +use jsonwebtoken::{decode, encode, Algorithm, DecodingKey, EncodingKey, Header, Validation}; +use serde::{Deserialize, Serialize}; +use chrono::{DateTime, Utc}; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Claims { + pub sub: String, // Subject (user ID) + pub exp: i64, // Expiration time + pub iat: i64, // Issued at + pub user_role: String, // Custom claim + pub session_id: String, // Session identifier +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TokenPair { + pub access_token: String, + pub refresh_token: String, + pub expires_in: i64, +} + +pub struct JwtService { + encoding_key: EncodingKey, + decoding_key: DecodingKey, + access_token_expiry: i64, // seconds + refresh_token_expiry: i64, // seconds +} + +impl JwtService { + pub fn new(secret: &str) -> Self { + Self { + encoding_key: EncodingKey::from_secret(secret.as_bytes()), + decoding_key: DecodingKey::from_secret(secret.as_bytes()), + access_token_expiry: 3600, // 1 hour + refresh_token_expiry: 604800, // 7 days + } + } + + pub fn generate_token_pair(&self, user_id: &str, role: &str) -> Result { + let now = Utc::now().timestamp(); + let session_id = uuid::Uuid::new_v4().to_string(); + + // Access token + let access_claims = Claims { + sub: user_id.to_string(), + exp: now + self.access_token_expiry, + iat: now, + user_role: role.to_string(), + session_id: session_id.clone(), + }; + + let access_token = encode(&Header::default(), &access_claims, &self.encoding_key)?; + + // Refresh token (longer expiry, minimal claims) + let refresh_claims = Claims { + sub: user_id.to_string(), + exp: now + self.refresh_token_expiry, + iat: now, + user_role: "refresh".to_string(), + session_id, + }; + + let refresh_token = encode(&Header::default(), &refresh_claims, &self.encoding_key)?; + + Ok(TokenPair { + access_token, + refresh_token, + expires_in: self.access_token_expiry, + }) + } + + pub fn validate_token(&self, token: &str) -> Result { + let validation = Validation::new(Algorithm::HS256); + let token_data = decode::(token, &self.decoding_key, &validation)?; + Ok(token_data.claims) + } + + pub fn refresh_access_token(&self, refresh_token: &str) -> Result { + let claims = self.validate_token(refresh_token)?; + + // Verify it's a refresh token + if claims.user_role != "refresh" { + return Err(JwtError::InvalidTokenType); + } + + // Generate new token pair + self.generate_token_pair(&claims.sub, "user") // Default role, should be fetched from DB + } +} + +#[derive(thiserror::Error, Debug)] +pub enum JwtError { + #[error("JWT encoding/decoding error: {0}")] + Token(#[from] jsonwebtoken::errors::Error), + #[error("Invalid token type")] + InvalidTokenType, + #[error("Token expired")] + Expired, +} +``` + +## 🖥️ COMMAND LINE INTERFACES + +### CLI with clap +```toml +# Cargo.toml - CLI configuration +[dependencies] +clap = { version = "4.0", features = ["derive"] } +anyhow = "1.0" +serde = { version = "1.0", features = ["derive"] } +serde_yaml = "0.9" +``` + +```rust +use clap::{Parser, Subcommand, ValueEnum}; +use std::path::PathBuf; + +#[derive(Parser)] +#[command(name = "myapp")] +#[command(about = "A comprehensive application with multiple commands")] +#[command(version)] +pub struct Cli { + /// Global configuration file + #[arg(short, long, value_name = "FILE")] + pub config: Option, + + /// Verbose output + #[arg(short, long, action = clap::ArgAction::Count)] + pub verbose: u8, + + /// Output format + #[arg(long, value_enum, default_value_t = OutputFormat::Text)] + pub format: OutputFormat, + + #[command(subcommand)] + pub command: Commands, +} + +#[derive(Subcommand)] +pub enum Commands { + /// User management commands + User { + #[command(subcommand)] + action: UserAction, + }, + /// Server operations + Server { + #[command(subcommand)] + action: ServerAction, + }, + /// Database operations + Database { + #[command(subcommand)] + action: DatabaseAction, + }, +} + +#[derive(Subcommand)] +pub enum UserAction { + /// Create a new user + Create { + /// Username + #[arg(short, long)] + username: String, + /// Email address + #[arg(short, long)] + email: String, + /// User role + #[arg(short, long, value_enum, default_value_t = UserRole::User)] + role: UserRole, + }, + /// List all users + List { + /// Maximum number of users to display + #[arg(short, long, default_value_t = 50)] + limit: usize, + /// Filter by role + #[arg(short, long)] + role: Option, + }, + /// Delete a user + Delete { + /// User ID or username + #[arg(short, long)] + identifier: String, + /// Force deletion without confirmation + #[arg(short, long)] + force: bool, + }, +} + +#[derive(Subcommand)] +pub enum ServerAction { + /// Start the server + Start { + /// Port to bind to + #[arg(short, long, default_value_t = 8080)] + port: u16, + /// Host to bind to + #[arg(long, default_value = "127.0.0.1")] + host: String, + }, + /// Stop the server + Stop, + /// Show server status + Status, +} + +#[derive(Subcommand)] +pub enum DatabaseAction { + /// Run database migrations + Migrate { + /// Migration direction + #[arg(value_enum, default_value_t = MigrationDirection::Up)] + direction: MigrationDirection, + }, + /// Seed the database with test data + Seed { + /// Environment to seed + #[arg(short, long, default_value = "development")] + env: String, + }, + /// Reset the database + Reset { + /// Skip confirmation prompt + #[arg(short, long)] + yes: bool, + }, +} + +#[derive(ValueEnum, Clone)] +pub enum OutputFormat { + Text, + Json, + Yaml, +} + +#[derive(ValueEnum, Clone)] +pub enum UserRole { + Admin, + User, + Guest, +} + +#[derive(ValueEnum, Clone)] +pub enum MigrationDirection { + Up, + Down, +} + +// CLI execution logic +pub async fn run_cli() -> anyhow::Result<()> { + let cli = Cli::parse(); + + // Initialize logging based on verbosity + let log_level = match cli.verbose { + 0 => "warn", + 1 => "info", + 2 => "debug", + _ => "trace", + }; + + std::env::set_var("RUST_LOG", log_level); + tracing_subscriber::fmt::init(); + + match cli.command { + Commands::User { action } => handle_user_command(action, cli.format).await, + Commands::Server { action } => handle_server_command(action, cli.format).await, + Commands::Database { action } => handle_database_command(action, cli.format).await, + } +} + +async fn handle_user_command(action: UserAction, format: OutputFormat) -> anyhow::Result<()> { + match action { + UserAction::Create { username, email, role } => { + println!("Creating user: {} ({}) with role: {:?}", username, email, role); + // Implementation + } + UserAction::List { limit, role } => { + println!("Listing up to {} users", limit); + if let Some(role) = role { + println!("Filtering by role: {:?}", role); + } + // Implementation + } + UserAction::Delete { identifier, force } => { + if !force { + println!("Are you sure you want to delete user '{}'? [y/N]", identifier); + // Confirmation logic + } + // Implementation + } + } + Ok(()) +} +``` + +## 🏗️ BUILDER PATTERNS + +### Typed Builder +```toml +# Cargo.toml - Builder configuration +[dependencies] +typed-builder = "0.21" +serde = { version = "1.0", features = ["derive"] } +``` + +```rust +use typed_builder::TypedBuilder; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, TypedBuilder, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UserConfig { + /// Required: User's email address + pub email: String, + + /// Required: Username + pub username: String, + + /// Optional: Display name (defaults to username) + #[builder(default = self.username.clone())] + pub display_name: String, + + /// Optional: User role + #[builder(default = UserRole::User)] + pub role: UserRole, + + /// Optional: Whether user is active + #[builder(default = true)] + pub is_active: bool, + + /// Optional: User preferences + #[builder(default)] + pub preferences: UserPreferences, + + /// Optional: Profile image URL + #[builder(default, setter(strip_option))] + pub avatar_url: Option, + + /// Optional: User tags (for organization) + #[builder(default)] + pub tags: Vec, +} + +#[derive(Debug, Clone, TypedBuilder, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UserPreferences { + #[builder(default = String::from("en"))] + pub language: String, + + #[builder(default = String::from("UTC"))] + pub timezone: String, + + #[builder(default = true)] + pub email_notifications: bool, + + #[builder(default = false)] + pub dark_mode: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum UserRole { + Admin, + User, + Guest, +} + +impl Default for UserRole { + fn default() -> Self { + Self::User + } +} + +impl Default for UserPreferences { + fn default() -> Self { + Self { + language: "en".to_string(), + timezone: "UTC".to_string(), + email_notifications: true, + dark_mode: false, + } + } +} + +// Usage examples +pub fn create_user_examples() { + // Minimal required fields + let user1 = UserConfig::builder() + .email("john@example.com".to_string()) + .username("john_doe".to_string()) + .build(); + + // Full configuration + let user2 = UserConfig::builder() + .email("admin@example.com".to_string()) + .username("admin".to_string()) + .display_name("System Administrator".to_string()) + .role(UserRole::Admin) + .is_active(true) + .avatar_url("https://example.com/avatar.jpg".to_string()) + .tags(vec!["admin".to_string(), "system".to_string()]) + .preferences( + UserPreferences::builder() + .language("en".to_string()) + .timezone("America/New_York".to_string()) + .email_notifications(false) + .dark_mode(true) + .build() + ) + .build(); + + println!("User 1: {:?}", user1); + println!("User 2: {:?}", user2); +} +``` + +## 🧮 RANDOM GENERATION AND UTILITIES + +### Random Data Generation +```toml +# Cargo.toml - Random utilities +[dependencies] +rand = "0.8" +getrandom = "0.3" +uuid = { version = "1.17", features = ["v4", "serde"] } +base64 = "0.22" +``` + +```rust +use rand::{Rng, thread_rng, distributions::Alphanumeric}; +use uuid::Uuid; +use base64::{Engine as _, engine::general_purpose}; + +pub struct RandomGenerator; + +impl RandomGenerator { + /// Generate a secure random string for API keys, tokens, etc. + pub fn secure_string(length: usize) -> String { + thread_rng() + .sample_iter(&Alphanumeric) + .take(length) + .map(char::from) + .collect() + } + + /// Generate a UUID v4 + pub fn uuid() -> String { + Uuid::new_v4().to_string() + } + + /// Generate a short ID (URL-safe) + pub fn short_id() -> String { + let uuid_bytes = Uuid::new_v4().as_bytes(); + general_purpose::URL_SAFE_NO_PAD.encode(&uuid_bytes[..8]) + } + + /// Generate a random integer within range + pub fn int_range(min: i32, max: i32) -> i32 { + thread_rng().gen_range(min..=max) + } + + /// Generate random bytes + pub fn bytes(length: usize) -> Vec { + let mut bytes = vec![0u8; length]; + getrandom::getrandom(&mut bytes).expect("Failed to generate random bytes"); + bytes + } + + /// Generate a base64-encoded random string + pub fn base64_string(byte_length: usize) -> String { + let bytes = Self::bytes(byte_length); + general_purpose::STANDARD.encode(&bytes) + } + + /// Generate a session ID + pub fn session_id() -> String { + format!("sess_{}", Self::secure_string(32)) + } + + /// Generate a API key + pub fn api_key() -> String { + format!("ak_{}", Self::base64_string(24)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_secure_string_length() { + let str32 = RandomGenerator::secure_string(32); + assert_eq!(str32.len(), 32); + + let str64 = RandomGenerator::secure_string(64); + assert_eq!(str64.len(), 64); + } + + #[test] + fn test_uuid_format() { + let uuid = RandomGenerator::uuid(); + assert!(Uuid::parse_str(&uuid).is_ok()); + } + + #[test] + fn test_short_id_uniqueness() { + let id1 = RandomGenerator::short_id(); + let id2 = RandomGenerator::short_id(); + assert_ne!(id1, id2); + assert!(id1.len() > 0); + } + + #[test] + fn test_int_range() { + for _ in 0..100 { + let val = RandomGenerator::int_range(1, 10); + assert!(val >= 1 && val <= 10); + } + } +} +``` + +## 📊 ENHANCED DERIVE MACROS + +### Using derive_more +```toml +# Cargo.toml - Enhanced derives +[dependencies] +derive_more = { version = "2", features = ["full"] } +serde = { version = "1.0", features = ["derive"] } +``` + +```rust +use derive_more::{Display, Error, From, Into, Constructor, Deref, DerefMut}; +use serde::{Deserialize, Serialize}; + +// Custom string wrapper with validation +#[derive(Debug, Clone, Display, From, Into, Deref, Serialize, Deserialize)] +#[serde(try_from = "String")] +pub struct EmailAddress(String); + +impl TryFrom for EmailAddress { + type Error = ValidationError; + + fn try_from(value: String) -> Result { + if value.contains('@') && value.len() > 5 { + Ok(EmailAddress(value)) + } else { + Err(ValidationError::InvalidEmail) + } + } +} + +// Enhanced error types +#[derive(Debug, Display, Error)] +pub enum ServiceError { + #[display(fmt = "User not found: {}", user_id)] + UserNotFound { user_id: String }, + + #[display(fmt = "Database error: {}", source)] + Database { + #[error(source)] + source: sqlx::Error + }, + + #[display(fmt = "Validation failed: {}", field)] + Validation { field: String }, + + #[display(fmt = "Authentication failed")] + Authentication, +} + +#[derive(Debug, Display, Error)] +pub enum ValidationError { + #[display(fmt = "Invalid email format")] + InvalidEmail, + + #[display(fmt = "Field '{}' is required", field)] + Required { field: String }, + + #[display(fmt = "Value '{}' is too long (max: {})", value, max)] + TooLong { value: String, max: usize }, +} + +// Constructor patterns +#[derive(Debug, Clone, Constructor, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UserSession { + pub user_id: String, + pub session_id: String, + pub created_at: chrono::DateTime, + pub expires_at: chrono::DateTime, + #[new(default)] + pub is_active: bool, +} + +// Wrapper types with automatic conversions +#[derive(Debug, Clone, From, Into, Deref, DerefMut, Serialize, Deserialize)] +pub struct UserId(String); + +#[derive(Debug, Clone, From, Into, Deref, DerefMut, Serialize, Deserialize)] +pub struct SessionToken(String); + +impl UserId { + pub fn new() -> Self { + Self(RandomGenerator::uuid()) + } +} + +impl SessionToken { + pub fn new() -> Self { + Self(RandomGenerator::session_id()) + } +} + +// Usage examples +pub fn demonstrate_enhanced_types() -> Result<(), Box> { + // Email validation + let email = EmailAddress::try_from("user@example.com".to_string())?; + println!("Valid email: {}", email); + + // Constructor usage + let session = UserSession::new( + "user_123".to_string(), + "sess_abc".to_string(), + chrono::Utc::now(), + chrono::Utc::now() + chrono::Duration::hours(24), + ); + println!("Session: {:?}", session); + + // Wrapper types + let user_id = UserId::new(); + let token = SessionToken::new(); + println!("User ID: {}, Token: {}", *user_id, *token); + + Ok(()) +} +``` + +## 🚨 UTILITIES ANTI-PATTERNS + +### What to Avoid +```rust +// ❌ Don't use outdated JWT libraries +// use frank_jwt; // Use jsonwebtoken instead + +// ❌ Don't use structopt (deprecated) +// use structopt::StructOpt; // Use clap with derive instead + +// ❌ Don't manually implement builders +// pub struct ConfigBuilder { +// field1: Option, +// field2: Option, +// } // Use typed-builder instead + +// ❌ Don't use thread_rng() for cryptographic purposes +// let password = thread_rng().gen::().to_string(); // Use getrandom for security + +// ❌ Don't ignore JWT validation +// let claims = decode::(token, key, &Validation::default()); // Configure properly +``` + +## ✅ UTILITIES CHECKLIST + +```markdown +### Utilities Implementation Verification +- [ ] JWT authentication with proper validation and expiry +- [ ] CLI with comprehensive subcommands and help text +- [ ] Builder patterns using typed-builder +- [ ] Enhanced error types with derive_more +- [ ] Secure random generation for sensitive data +- [ ] Proper validation for wrapper types +- [ ] Constructor patterns for complex types +- [ ] Base64 encoding for binary data +- [ ] UUID generation for identifiers +- [ ] Comprehensive error handling +- [ ] Input validation and sanitization +- [ ] Type safety with wrapper types +``` + +This utilities standard provides robust patterns for common development tasks while maintaining type safety and security best practices.