feature: improve rules

This commit is contained in:
Tyr Chen
2025-05-31 11:47:18 -07:00
parent d7d87f3445
commit 99507db677
8 changed files with 4294 additions and 1041 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -10,8 +10,8 @@ alwaysApply: false
## 🎯 FUNDAMENTAL PRINCIPLES ## 🎯 FUNDAMENTAL PRINCIPLES
### Code Organization ### Code Organization
- **Functionality-based files**: Use meaningful file names like `node.rs`, `workflow.rs`, `execution.rs` instead of generic `models.rs`, `traits.rs`, `types.rs` - **Functionality-based files**: Use meaningful file names like `user.rs`, `product.rs`, `auth.rs` instead of generic `models.rs`, `traits.rs`, `types.rs`
- **Meaningful naming**: Avoid names like `WorkflowValidatorImpl` - use descriptive, specific names - **Meaningful naming**: Avoid names like `UserServiceImpl` - use descriptive, specific names
- **File size limits**: Maximum 500 lines per file (excluding tests) - **File size limits**: Maximum 500 lines per file (excluding tests)
- **Function size**: Maximum 150 lines per function - **Function size**: Maximum 150 lines per function
- **Single Responsibility**: Each module should have one clear purpose - **Single Responsibility**: Each module should have one clear purpose
@@ -35,34 +35,135 @@ serde = { workspace = true, features = ["derive"] }
# Request permission before modifying Cargo.toml # Request permission before modifying Cargo.toml
``` ```
### Standard Crate Recommendations
When adding new dependencies, prefer these battle-tested crates:
```toml
# Core utilities
anyhow = "1.0" # Error handling
thiserror = "2.0" # Error type definitions
derive_more = { version = "2", features = ["full"] } # Extended derive macros
typed-builder = "0.21" # Builder pattern
# Async/Concurrency
tokio = { version = "1.45", features = [
"macros",
"rt-multi-thread",
"signal",
"sync"
] }
async-trait = "0.1" # Async traits
futures = "0.3" # Async utilities
dashmap = { version = "6", features = ["serde"] } # Concurrent HashMap
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_yaml = "0.9"
base64 = "0.22"
# Web/HTTP
axum = { version = "0.8", features = ["macros", "http2"] }
reqwest = { version = "0.12", default-features = false, features = [
"charset",
"rustls-tls-webpki-roots",
"http2",
"json",
"cookies",
"gzip",
"brotli",
"zstd",
"deflate"
] }
tower = { version = "0.5", features = ["util"] }
tower-http = { version = "0.6", features = ["cors", "trace"] }
http = "1"
# Database
sqlx = { version = "0.8", features = [
"chrono",
"postgres",
"runtime-tokio-rustls",
"sqlite",
"time",
"uuid"
] }
# Documentation/API
utoipa = { version = "5", features = ["axum_extras"] }
utoipa-axum = { version = "0.2" }
utoipa-swagger-ui = { version = "9", features = [
"axum",
"vendored"
], default-features = false }
schemars = { version = "0.8", features = ["chrono", "url"] }
# Time/Date
chrono = { version = "0.4", features = ["serde"] }
time = { version = "0.3", features = ["serde"] }
# Templating/Text Processing
minijinja = { version = "2", features = [
"json",
"loader",
"loop_controls",
"speedups"
] }
regex = "1"
htmd = "0.2" # HTML to Markdown
# Authentication/Security
jsonwebtoken = "9.0"
uuid = { version = "1.17", features = ["v4", "serde"] }
# Data Processing
jsonpath-rust = "1"
url = "2.5"
# CLI (when needed)
clap = { version = "4.0", features = ["derive"] }
# Utilities
rand = "0.8"
getrandom = "0.3"
atomic_enum = "0.3" # Atomic enumerations
# Logging/Observability
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
```
### Version Strategy ### Version Strategy
- **Always use latest versions** when adding new dependencies - **Always use latest versions** when adding new dependencies
- **Request permission** before modifying `Cargo.toml` - **Request permission** before modifying `Cargo.toml`
- **Check workspace first** - never duplicate dependencies unnecessarily - **Check workspace first** - never duplicate dependencies unnecessarily
- **Use specific feature flags** to minimize compilation time and binary size
- **Prefer rustls over openssl** for TLS (better for cross-compilation)
## 🏗️ CODE STRUCTURE PATTERNS ## 🏗️ CODE STRUCTURE PATTERNS
### Data Structure Organization ### Data Structure Organization
```rust ```rust
// ✅ Good: Functionality-based organization // ✅ Good: Functionality-based organization
// src/workflow.rs - All workflow-related types and logic // src/user.rs - All user-related types and logic
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] // Always use camelCase for JSON serialization #[serde(rename_all = "camelCase")] // Always use camelCase for JSON serialization
pub struct WorkflowDefinition { pub struct User {
pub workflow_id: String, pub user_id: String,
pub display_name: String, pub display_name: String,
pub email: String,
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
} }
// ✅ Good: Meaningful trait names // ✅ Good: Meaningful trait names
pub trait WorkflowValidator { pub trait UserValidator {
fn validate(&self, workflow: &WorkflowDefinition) -> Result<(), ValidationError>; fn validate(&self, user: &User) -> Result<(), ValidationError>;
} }
// ❌ Bad: Generic file organization // ❌ Bad: Generic file organization
// src/models.rs, src/traits.rs, src/types.rs // src/models.rs, src/traits.rs, src/types.rs
// ❌ Bad: Poor naming // ❌ Bad: Poor naming
// struct WorkflowValidatorImpl // struct UserValidatorImpl
``` ```
### Serde Configuration ### Serde Configuration
@@ -71,13 +172,13 @@ pub trait WorkflowValidator {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ApiResponse { pub struct ApiResponse {
pub workflow_id: String, pub user_id: String,
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub is_active: bool, pub is_active: bool,
} }
// This serializes to: // This serializes to:
// {"workflowId": "...", "createdAt": "...", "isActive": true} // {"userId": "...", "createdAt": "...", "isActive": true}
``` ```
## 🔧 BUILD AND QUALITY CHECKS ## 🔧 BUILD AND QUALITY CHECKS
@@ -115,30 +216,30 @@ expect_used = "deny"
```rust ```rust
// ✅ Good: Feature-based modules // ✅ Good: Feature-based modules
src/ src/
├── workflow/ ├── user/
│ ├── mod.rs │ ├── mod.rs
│ ├── validator.rs // WorkflowValidator trait and implementations │ ├── service.rs // UserService logic
│ ├── executor.rs // WorkflowExecutor logic │ ├── repository.rs // User data access
│ └── definition.rs // WorkflowDefinition types │ └── validator.rs // User validation
├── node/ ├── product/
│ ├── mod.rs │ ├── mod.rs
│ ├── registry.rs // NodeRegistry (not NodeTypeRegistry) │ ├── catalog.rs // Product catalog logic
│ └── executor.rs // Node execution logic │ └── pricing.rs // Product pricing logic
└── storage/ └── auth/
├── mod.rs ├── mod.rs
├── entities.rs // Database entities ├── token.rs // Token management
└── repositories.rs // Data access patterns └── session.rs // Session handling
``` ```
### Naming Best Practices ### Naming Best Practices
```rust ```rust
// ✅ Good naming examples // ✅ Good naming examples
pub struct WorkflowValidator; // Clear, specific pub struct UserService; // Clear, specific
pub struct NodeExecutor; // Action-oriented pub struct ProductCatalog; // Action-oriented
pub struct DatabaseConnection; // Descriptive pub struct DatabaseConnection; // Descriptive
// ❌ Bad naming examples // ❌ Bad naming examples
pub struct WorkflowValidatorImpl; // Unnecessary "Impl" suffix pub struct UserServiceImpl; // Unnecessary "Impl" suffix
pub struct Helper; // Too generic pub struct Helper; // Too generic
pub struct Manager; // Vague responsibility pub struct Manager; // Vague responsibility
``` ```
@@ -153,15 +254,15 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_workflow_validation() { fn test_user_validation() {
let validator = WorkflowValidator::new(); let validator = UserValidator::new();
let workflow = WorkflowDefinition::default(); let user = User::default();
assert!(validator.validate(&workflow).is_ok()); assert!(validator.validate(&user).is_ok());
} }
} }
// ❌ Don't create separate test files for unit tests // ❌ Don't create separate test files for unit tests
// tests/workflow_test.rs (this is for integration tests only) // tests/user_test.rs (this is for integration tests only)
``` ```
### Test Naming ### Test Naming
@@ -171,12 +272,12 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_valid_workflow_passes_validation() { fn test_valid_email_passes_validation() {
// Test name clearly describes the scenario // Test name clearly describes the scenario
} }
#[test] #[test]
fn test_empty_workflow_id_returns_error() { fn test_empty_email_returns_error() {
// Specific about what's being tested // Specific about what's being tested
} }
} }
@@ -186,26 +287,27 @@ mod tests {
### Code Documentation ### Code Documentation
```rust ```rust
/// Validates workflow definitions according to business rules. /// Validates user data according to business rules.
/// ///
/// # Examples /// # Examples
/// ///
/// ```rust /// ```rust
/// let validator = WorkflowValidator::new(); /// let validator = UserValidator::new();
/// let workflow = WorkflowDefinition::builder() /// let user = User::builder()
/// .workflow_id("test-workflow") /// .email("user@example.com")
/// .display_name("John Doe")
/// .build(); /// .build();
/// ///
/// assert!(validator.validate(&workflow).is_ok()); /// assert!(validator.validate(&user).is_ok());
/// ``` /// ```
/// ///
/// # Errors /// # Errors
/// ///
/// Returns `ValidationError` if: /// Returns `ValidationError` if:
/// - Workflow ID is empty or invalid /// - Email is empty or invalid format
/// - Display name is too long
/// - Required fields are missing /// - Required fields are missing
/// - Business rules are violated pub struct UserValidator {
pub struct WorkflowValidator {
rules: Vec<ValidationRule>, rules: Vec<ValidationRule>,
} }
``` ```
@@ -220,7 +322,7 @@ pub struct WorkflowValidator {
// src/helpers.rs - unclear responsibility // src/helpers.rs - unclear responsibility
// ❌ Don't use implementation suffixes // ❌ Don't use implementation suffixes
pub struct WorkflowValidatorImpl; pub struct UserValidatorImpl;
pub struct DatabaseManagerImpl; pub struct DatabaseManagerImpl;
// ❌ Don't mix concerns in single files // ❌ Don't mix concerns in single files

File diff suppressed because it is too large Load Diff

View File

@@ -18,7 +18,15 @@ alwaysApply: false
```toml ```toml
# Cargo.toml - Tokio configuration # Cargo.toml - Tokio configuration
[dependencies] [dependencies]
tokio = { workspace = true, features = ["rt-multi-thread", "macros", "sync", "time", "fs"] } tokio = { version = "1.45", features = [
"macros",
"rt-multi-thread",
"signal",
"sync"
] }
dashmap = { version = "6", features = ["serde"] }
async-trait = "0.1"
futures = "0.3"
``` ```
## 🔒 SYNCHRONIZATION PRIMITIVES ## 🔒 SYNCHRONIZATION PRIMITIVES
@@ -30,28 +38,28 @@ use tokio::sync::{RwLock, Mutex, broadcast, mpsc, oneshot};
use std::sync::Arc; use std::sync::Arc;
// ✅ Good: Async-friendly RwLock // ✅ Good: Async-friendly RwLock
pub struct WorkflowCache { pub struct UserCache {
data: Arc<RwLock<HashMap<String, WorkflowDefinition>>>, data: Arc<RwLock<HashMap<String, User>>>,
} }
impl WorkflowCache { impl UserCache {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
data: Arc::new(RwLock::new(HashMap::new())), data: Arc::new(RwLock::new(HashMap::new())),
} }
} }
pub async fn get(&self, id: &str) -> Option<WorkflowDefinition> { pub async fn get(&self, id: &str) -> Option<User> {
let data = self.data.read().await; let data = self.data.read().await;
data.get(id).cloned() data.get(id).cloned()
} }
pub async fn insert(&self, id: String, workflow: WorkflowDefinition) { pub async fn insert(&self, id: String, user: User) {
let mut data = self.data.write().await; let mut data = self.data.write().await;
data.insert(id, workflow); data.insert(id, user);
} }
pub async fn remove(&self, id: &str) -> Option<WorkflowDefinition> { pub async fn remove(&self, id: &str) -> Option<User> {
let mut data = self.data.write().await; let mut data = self.data.write().await;
data.remove(id) data.remove(id)
} }
@@ -68,24 +76,24 @@ use dashmap::DashMap;
use std::sync::Arc; use std::sync::Arc;
// ✅ Preferred: DashMap for concurrent hash maps // ✅ Preferred: DashMap for concurrent hash maps
pub struct NodeRegistry { pub struct ServiceRegistry {
nodes: Arc<DashMap<String, Box<dyn NodeType>>>, services: Arc<DashMap<String, Box<dyn Service>>>,
categories: Arc<DashMap<String, Vec<String>>>, categories: Arc<DashMap<String, Vec<String>>>,
} }
impl NodeRegistry { impl ServiceRegistry {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
nodes: Arc::new(DashMap::new()), services: Arc::new(DashMap::new()),
categories: Arc::new(DashMap::new()), categories: Arc::new(DashMap::new()),
} }
} }
pub fn register_node(&self, id: String, node: Box<dyn NodeType>) { pub fn register_service(&self, id: String, service: Box<dyn Service>) {
let category = node.category().to_string(); let category = service.category().to_string();
// Insert the node // Insert the service
self.nodes.insert(id.clone(), node); self.services.insert(id.clone(), service);
// Update category index // Update category index
self.categories self.categories
@@ -94,8 +102,8 @@ impl NodeRegistry {
.push(id); .push(id);
} }
pub fn get_node(&self, id: &str) -> Option<dashmap::mapref::one::Ref<String, Box<dyn NodeType>>> { pub fn get_service(&self, id: &str) -> Option<dashmap::mapref::one::Ref<String, Box<dyn Service>>> {
self.nodes.get(id) self.services.get(id)
} }
pub fn list_by_category(&self, category: &str) -> Vec<String> { pub fn list_by_category(&self, category: &str) -> Vec<String> {
@@ -105,14 +113,14 @@ impl NodeRegistry {
.unwrap_or_default() .unwrap_or_default()
} }
pub fn list_all_nodes(&self) -> Vec<String> { pub fn list_all_services(&self) -> Vec<String> {
self.nodes.iter().map(|entry| entry.key().clone()).collect() self.services.iter().map(|entry| entry.key().clone()).collect()
} }
} }
// ❌ Avoid: Mutex<HashMap> for concurrent access // ❌ Avoid: Mutex<HashMap> for concurrent access
// pub struct BadNodeRegistry { // pub struct BadServiceRegistry {
// nodes: Arc<Mutex<HashMap<String, Box<dyn NodeType>>>> // services: Arc<Mutex<HashMap<String, Box<dyn Service>>>>
// } // }
``` ```
@@ -124,7 +132,7 @@ use tokio::sync::mpsc;
use tracing::{info, error}; use tracing::{info, error};
pub struct EventProcessor { pub struct EventProcessor {
sender: mpsc::UnboundedSender<WorkflowEvent>, sender: mpsc::UnboundedSender<SystemEvent>,
} }
impl EventProcessor { impl EventProcessor {
@@ -137,17 +145,17 @@ impl EventProcessor {
(processor, handle) (processor, handle)
} }
pub fn send_event(&self, event: WorkflowEvent) -> Result<(), mpsc::error::SendError<WorkflowEvent>> { pub fn send_event(&self, event: SystemEvent) -> Result<(), mpsc::error::SendError<SystemEvent>> {
self.sender.send(event) self.sender.send(event)
} }
} }
pub struct EventProcessorHandle { pub struct EventProcessorHandle {
receiver: mpsc::UnboundedReceiver<WorkflowEvent>, receiver: mpsc::UnboundedReceiver<SystemEvent>,
} }
impl EventProcessorHandle { impl EventProcessorHandle {
fn new(receiver: mpsc::UnboundedReceiver<WorkflowEvent>) -> Self { fn new(receiver: mpsc::UnboundedReceiver<SystemEvent>) -> Self {
Self { receiver } Self { receiver }
} }
@@ -160,19 +168,19 @@ impl EventProcessorHandle {
info!("Event processor stopped"); info!("Event processor stopped");
} }
async fn process_event(&self, event: WorkflowEvent) -> Result<(), ProcessingError> { async fn process_event(&self, event: SystemEvent) -> Result<(), ProcessingError> {
match event { match event {
WorkflowEvent::Started { workflow_id, .. } => { SystemEvent::UserRegistered { user_id, .. } => {
info!("Workflow {} started", workflow_id); info!("User {} registered", user_id);
// Process workflow start // Process user registration
} }
WorkflowEvent::Completed { workflow_id, .. } => { SystemEvent::OrderCompleted { order_id, .. } => {
info!("Workflow {} completed", workflow_id); info!("Order {} completed", order_id);
// Process workflow completion // Process order completion
} }
WorkflowEvent::Failed { workflow_id, error, .. } => { SystemEvent::PaymentFailed { payment_id, error, .. } => {
error!("Workflow {} failed: {}", workflow_id, error); error!("Payment {} failed: {}", payment_id, error);
// Process workflow failure // Process payment failure
} }
} }
Ok(()) Ok(())
@@ -210,11 +218,11 @@ pub async fn start_event_monitoring(event_bus: Arc<EventBus>) {
tokio::spawn(async move { tokio::spawn(async move {
while let Ok(event) = receiver.recv().await { while let Ok(event) = receiver.recv().await {
match event { match event {
SystemEvent::NodeExecutionStarted { node_id, .. } => { SystemEvent::UserRegistered { user_id, .. } => {
info!("Node {} started execution", node_id); info!("User {} registered", user_id);
} }
SystemEvent::NodeExecutionCompleted { node_id, .. } => { SystemEvent::OrderCompleted { order_id, .. } => {
info!("Node {} completed execution", node_id); info!("Order {} completed", order_id);
} }
SystemEvent::SystemShutdown => { SystemEvent::SystemShutdown => {
info!("System shutdown requested"); info!("System shutdown requested");
@@ -235,13 +243,13 @@ pub struct AsyncValidator {
} }
impl AsyncValidator { impl AsyncValidator {
pub async fn validate_workflow(&self, workflow: WorkflowDefinition) -> Result<ValidationResult, ValidationError> { pub async fn validate_user(&self, user: User) -> Result<ValidationResult, ValidationError> {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
// Spawn validation task // Spawn validation task
let workflow_clone = workflow.clone(); let user_clone = user.clone();
tokio::spawn(async move { tokio::spawn(async move {
let result = perform_validation(workflow_clone).await; let result = perform_validation(user_clone).await;
let _ = tx.send(result); let _ = tx.send(result);
}); });
@@ -251,12 +259,12 @@ impl AsyncValidator {
} }
} }
async fn perform_validation(workflow: WorkflowDefinition) -> Result<ValidationResult, ValidationError> { async fn perform_validation(user: User) -> Result<ValidationResult, ValidationError> {
// Expensive validation logic // Expensive validation logic
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
if workflow.nodes.is_empty() { if user.email.is_empty() {
return Err(ValidationError::EmptyWorkflow); return Err(ValidationError::EmptyEmail);
} }
Ok(ValidationResult::Valid) Ok(ValidationResult::Valid)
@@ -270,24 +278,24 @@ async fn perform_validation(workflow: WorkflowDefinition) -> Result<ValidationRe
use tokio::task::JoinSet; use tokio::task::JoinSet;
use std::collections::HashMap; use std::collections::HashMap;
pub struct WorkflowExecutor { pub struct BatchProcessor {
// Internal state // Internal state
} }
impl WorkflowExecutor { impl BatchProcessor {
pub async fn execute_workflow_parallel(&self, workflow: &WorkflowDefinition) -> Result<ExecutionResult, ExecutionError> { pub async fn process_batch_parallel(&self, items: &[ProcessingItem]) -> Result<BatchResult, ProcessingError> {
let mut join_set = JoinSet::new(); let mut join_set = JoinSet::new();
let mut results = HashMap::new(); let mut results = HashMap::new();
// Execute nodes in parallel where possible // Process items in parallel where possible
for node in &workflow.nodes { for item in items {
if self.can_execute_parallel(node, &results) { if self.can_process_parallel(item, &results) {
let node_clone = node.clone(); let item_clone = item.clone();
let executor = self.clone(); let processor = self.clone();
join_set.spawn(async move { join_set.spawn(async move {
let result = executor.execute_node(&node_clone).await; let result = processor.process_item(&item_clone).await;
(node_clone.id.clone(), result) (item_clone.id.clone(), result)
}); });
} }
} }
@@ -295,21 +303,21 @@ impl WorkflowExecutor {
// Collect results // Collect results
while let Some(result) = join_set.join_next().await { while let Some(result) = join_set.join_next().await {
match result { match result {
Ok((node_id, execution_result)) => { Ok((item_id, processing_result)) => {
results.insert(node_id, execution_result?); results.insert(item_id, processing_result?);
} }
Err(join_error) => { Err(join_error) => {
return Err(ExecutionError::TaskFailed(join_error.to_string())); return Err(ProcessingError::TaskFailed(join_error.to_string()));
} }
} }
} }
Ok(ExecutionResult { node_results: results }) Ok(BatchResult { item_results: results })
} }
fn can_execute_parallel(&self, node: &NodeDefinition, completed_results: &HashMap<String, NodeResult>) -> bool { fn can_process_parallel(&self, item: &ProcessingItem, completed_results: &HashMap<String, ItemResult>) -> bool {
// Check if all dependencies are satisfied // Check if all dependencies are satisfied
node.dependencies.iter().all(|dep| completed_results.contains_key(dep)) item.dependencies.iter().all(|dep| completed_results.contains_key(dep))
} }
} }
``` ```
@@ -334,7 +342,7 @@ impl Application {
pub async fn start(&mut self) -> Result<(), ApplicationError> { pub async fn start(&mut self) -> Result<(), ApplicationError> {
// Start background services // Start background services
self.start_workflow_executor().await?; self.start_user_service().await?;
self.start_event_processor().await?; self.start_event_processor().await?;
self.start_health_monitor().await?; self.start_health_monitor().await?;
@@ -345,18 +353,18 @@ impl Application {
self.shutdown_gracefully().await self.shutdown_gracefully().await
} }
async fn start_workflow_executor(&mut self) -> Result<(), ApplicationError> { async fn start_user_service(&mut self) -> Result<(), ApplicationError> {
let token = self.shutdown_token.clone(); let token = self.shutdown_token.clone();
let handle = tokio::spawn(async move { let handle = tokio::spawn(async move {
loop { loop {
tokio::select! { tokio::select! {
_ = token.cancelled() => { _ = token.cancelled() => {
info!("Workflow executor shutdown requested"); info!("User service shutdown requested");
break; break;
} }
_ = tokio::time::sleep(tokio::time::Duration::from_secs(1)) => { _ = tokio::time::sleep(tokio::time::Duration::from_secs(1)) => {
// Process workflows // Process user operations
} }
} }
} }
@@ -409,19 +417,19 @@ mod tests {
use tokio::time::{timeout, Duration}; use tokio::time::{timeout, Duration};
#[tokio::test] #[tokio::test]
async fn test_workflow_cache_concurrent_access() { async fn test_user_cache_concurrent_access() {
let cache = WorkflowCache::new(); let cache = UserCache::new();
let workflow = WorkflowDefinition::default(); let user = User::default();
// Test concurrent insertions // Test concurrent insertions
let mut handles = Vec::new(); let mut handles = Vec::new();
for i in 0..10 { for i in 0..10 {
let cache_clone = cache.clone(); let cache_clone = cache.clone();
let workflow_clone = workflow.clone(); let user_clone = user.clone();
handles.push(tokio::spawn(async move { handles.push(tokio::spawn(async move {
cache_clone.insert(format!("workflow_{}", i), workflow_clone).await; cache_clone.insert(format!("user_{}", i), user_clone).await;
})); }));
} }
@@ -430,9 +438,9 @@ mod tests {
handle.await.unwrap(); handle.await.unwrap();
} }
// Verify all workflows were inserted // Verify all users were inserted
for i in 0..10 { for i in 0..10 {
let result = cache.get(&format!("workflow_{}", i)).await; let result = cache.get(&format!("user_{}", i)).await;
assert!(result.is_some()); assert!(result.is_some());
} }
} }
@@ -445,8 +453,8 @@ mod tests {
let processor_task = tokio::spawn(handle.run()); let processor_task = tokio::spawn(handle.run());
// Send test events // Send test events
let event = WorkflowEvent::Started { let event = SystemEvent::UserRegistered {
workflow_id: "test-workflow".to_string(), user_id: "test-user".to_string(),
timestamp: Utc::now(), timestamp: Utc::now(),
}; };

View File

@@ -18,7 +18,13 @@ alwaysApply: false
```toml ```toml
# Cargo.toml - SQLx configuration # Cargo.toml - SQLx configuration
[dependencies] [dependencies]
sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "sqlite", "uuid", "chrono", "json"] } sqlx = { version = "0.8", features = [
"chrono",
"postgres",
"runtime-tokio-rustls",
"sqlite",
"uuid"
] }
``` ```
## 🔧 QUERY PATTERNS ## 🔧 QUERY PATTERNS
@@ -28,23 +34,23 @@ sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "sqli
// ✅ Preferred: Use sqlx::query_as with custom types // ✅ Preferred: Use sqlx::query_as with custom types
#[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)] #[derive(Debug, Clone, sqlx::FromRow, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct WorkflowExecution { pub struct User {
pub id: Uuid, pub id: Uuid,
pub workflow_id: String, pub username: String,
pub status: ExecutionStatus, pub email: String,
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
pub metadata: Option<serde_json::Value>, pub is_active: bool,
} }
impl WorkflowExecution { impl User {
pub async fn find_by_id( pub async fn find_by_id(
pool: &PgPool, pool: &PgPool,
id: Uuid id: Uuid
) -> Result<Option<Self>, sqlx::Error> { ) -> Result<Option<Self>, sqlx::Error> {
sqlx::query_as::<_, WorkflowExecution>( sqlx::query_as::<_, User>(
"SELECT id, workflow_id, status, created_at, updated_at, metadata "SELECT id, username, email, created_at, updated_at, is_active
FROM workflow_executions FROM users
WHERE id = $1" WHERE id = $1"
) )
.bind(id) .bind(id)
@@ -52,20 +58,18 @@ impl WorkflowExecution {
.await .await
} }
pub async fn list_by_workflow( pub async fn list_active_users(
pool: &PgPool, pool: &PgPool,
workflow_id: &str,
limit: i64, limit: i64,
offset: i64, offset: i64,
) -> Result<Vec<Self>, sqlx::Error> { ) -> Result<Vec<Self>, sqlx::Error> {
sqlx::query_as::<_, WorkflowExecution>( sqlx::query_as::<_, User>(
"SELECT id, workflow_id, status, created_at, updated_at, metadata "SELECT id, username, email, created_at, updated_at, is_active
FROM workflow_executions FROM users
WHERE workflow_id = $1 WHERE is_active = true
ORDER BY created_at DESC ORDER BY created_at DESC
LIMIT $2 OFFSET $3" LIMIT $1 OFFSET $2"
) )
.bind(workflow_id)
.bind(limit) .bind(limit)
.bind(offset) .bind(offset)
.fetch_all(pool) .fetch_all(pool)
@@ -88,28 +92,33 @@ use chrono::{DateTime, Utc};
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] #[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct User { pub struct Product {
pub id: Uuid, pub id: Uuid,
pub username: String, pub name: String,
pub email: String, pub description: Option<String>,
pub price: rust_decimal::Decimal,
pub category_id: Uuid,
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
pub is_active: bool, pub is_available: bool,
} }
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CreateUserRequest { pub struct CreateProductRequest {
pub username: String, pub name: String,
pub email: String, pub description: Option<String>,
pub price: rust_decimal::Decimal,
pub category_id: Uuid,
} }
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct UpdateUserRequest { pub struct UpdateProductRequest {
pub username: Option<String>, pub name: Option<String>,
pub email: Option<String>, pub description: Option<String>,
pub is_active: Option<bool>, pub price: Option<rust_decimal::Decimal>,
pub is_available: Option<bool>,
} }
``` ```
@@ -344,6 +353,37 @@ CREATE TRIGGER update_users_updated_at
EXECUTE FUNCTION update_updated_at_column(); EXECUTE FUNCTION update_updated_at_column();
``` ```
### Product Table Example
```sql
-- migrations/20240501000002_create_products_table.sql
CREATE TABLE categories (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) NOT NULL UNIQUE,
description TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE products (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name VARCHAR(255) NOT NULL,
description TEXT,
price DECIMAL(10,2) NOT NULL,
category_id UUID NOT NULL REFERENCES categories(id),
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
is_available BOOLEAN NOT NULL DEFAULT true
);
CREATE INDEX idx_products_category ON products(category_id);
CREATE INDEX idx_products_price ON products(price);
CREATE INDEX idx_products_name ON products(name);
CREATE TRIGGER update_products_updated_at
BEFORE UPDATE ON products
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
```
## 🔧 CONNECTION MANAGEMENT ## 🔧 CONNECTION MANAGEMENT
### Database Pool Configuration ### Database Pool Configuration

View File

@@ -0,0 +1,225 @@
---
description:
globs:
alwaysApply: false
---
# 🌐 HTTP CLIENT BEST PRACTICES
> **TL;DR:** Modern HTTP client patterns using reqwest with proper error handling, timeouts, and security configurations.
## 🔧 REQWEST CONFIGURATION
### Standard Dependencies
```toml
# Cargo.toml - HTTP client configuration
[dependencies]
reqwest = { version = "0.12", default-features = false, features = [
"charset",
"rustls-tls-webpki-roots",
"http2",
"json",
"cookies",
"gzip",
"brotli",
"zstd",
"deflate"
] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tokio = { version = "1.45", features = ["macros", "rt-multi-thread"] }
anyhow = "1.0"
thiserror = "2.0"
url = "2.5"
```
## 🏗️ CLIENT BUILDER PATTERN
### Configurable HTTP Client
```rust
use reqwest::{Client, ClientBuilder, Response};
use serde::{Deserialize, Serialize};
use std::time::Duration;
use url::Url;
pub struct HttpClient {
client: Client,
base_url: Url,
default_timeout: Duration,
}
impl HttpClient {
pub fn builder() -> HttpClientBuilder {
HttpClientBuilder::new()
}
pub async fn get<T>(&self, path: &str) -> Result<T, HttpError>
where
T: for<'de> Deserialize<'de>,
{
let url = self.base_url.join(path)?;
let response = self
.client
.get(url)
.timeout(self.default_timeout)
.send()
.await?;
self.handle_response(response).await
}
pub async fn post<T, B>(&self, path: &str, body: &B) -> Result<T, HttpError>
where
T: for<'de> Deserialize<'de>,
B: Serialize,
{
let url = self.base_url.join(path)?;
let response = self
.client
.post(url)
.json(body)
.timeout(self.default_timeout)
.send()
.await?;
self.handle_response(response).await
}
async fn handle_response<T>(&self, response: Response) -> Result<T, HttpError>
where
T: for<'de> Deserialize<'de>,
{
let status = response.status();
if status.is_success() {
let text = response.text().await?;
serde_json::from_str(&text).map_err(|e| HttpError::Deserialization {
error: e.to_string(),
body: text,
})
} else {
let body = response.text().await.unwrap_or_default();
Err(HttpError::UnexpectedStatus {
status: status.as_u16(),
body,
})
}
}
}
pub struct HttpClientBuilder {
base_url: Option<String>,
timeout: Option<Duration>,
user_agent: Option<String>,
headers: Vec<(String, String)>,
accept_invalid_certs: bool,
}
impl HttpClientBuilder {
pub fn new() -> Self {
Self {
base_url: None,
timeout: Some(Duration::from_secs(30)),
user_agent: Some("rust-http-client/1.0".to_string()),
headers: Vec::new(),
accept_invalid_certs: false,
}
}
pub fn base_url(mut self, url: &str) -> Self {
self.base_url = Some(url.to_string());
self
}
pub fn timeout(mut self, timeout: Duration) -> Self {
self.timeout = Some(timeout);
self
}
pub fn build(self) -> Result<HttpClient, HttpError> {
let base_url = self.base_url
.ok_or_else(|| HttpError::Configuration("Base URL is required".to_string()))?;
let mut client_builder = ClientBuilder::new()
.danger_accept_invalid_certs(self.accept_invalid_certs);
if let Some(timeout) = self.timeout {
client_builder = client_builder.timeout(timeout);
}
if let Some(user_agent) = &self.user_agent {
client_builder = client_builder.user_agent(user_agent);
}
let client = client_builder.build()?;
let parsed_url = Url::parse(&base_url)?;
Ok(HttpClient {
client,
base_url: parsed_url,
default_timeout: self.timeout.unwrap_or(Duration::from_secs(30)),
})
}
}
```
## 🚨 ERROR HANDLING
### Comprehensive Error Types
```rust
#[derive(thiserror::Error, Debug)]
pub enum HttpError {
#[error("HTTP request error: {0}")]
Request(#[from] reqwest::Error),
#[error("URL parsing error: {0}")]
UrlParse(#[from] url::ParseError),
#[error("JSON serialization error: {0}")]
Serialization(#[from] serde_json::Error),
#[error("Deserialization error: {error}, body: {body}")]
Deserialization { error: String, body: String },
#[error("Unexpected HTTP status {status}: {body}")]
UnexpectedStatus { status: u16, body: String },
#[error("Configuration error: {0}")]
Configuration(String),
#[error("Timeout occurred")]
Timeout,
#[error("Authentication failed")]
Authentication,
}
impl HttpError {
pub fn is_retryable(&self) -> bool {
matches!(
self,
HttpError::Timeout
| HttpError::UnexpectedStatus { status: 502..=504, .. }
)
}
}
```
## ✅ HTTP CLIENT CHECKLIST
```markdown
### HTTP Client Implementation Verification
- [ ] Uses reqwest with rustls-tls (not native-tls)
- [ ] Compression features enabled (gzip, brotli, deflate)
- [ ] Proper timeout configuration
- [ ] User-Agent header configured
- [ ] Structured error handling with retryable errors
- [ ] Authentication patterns implemented
- [ ] Response type definitions with camelCase
- [ ] Base URL configuration pattern
- [ ] JSON serialization/deserialization
- [ ] Proper status code handling
```
This HTTP client standard ensures robust, secure, and maintainable HTTP communication in Rust applications.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,705 @@
---
description:
globs:
alwaysApply: false
---
# 🛠️ UTILITY LIBRARIES BEST PRACTICES
> **TL;DR:** Essential utility patterns for authentication, CLI tools, data structures, and common development tasks.
## 🔐 AUTHENTICATION AND SECURITY
### JWT with jsonwebtoken
```toml
# Cargo.toml - JWT configuration
[dependencies]
jsonwebtoken = "9.0"
serde = { version = "1.0", features = ["derive"] }
chrono = { version = "0.4", features = ["serde"] }
```
```rust
use jsonwebtoken::{decode, encode, Algorithm, DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
use chrono::{DateTime, Utc};
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Claims {
pub sub: String, // Subject (user ID)
pub exp: i64, // Expiration time
pub iat: i64, // Issued at
pub user_role: String, // Custom claim
pub session_id: String, // Session identifier
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TokenPair {
pub access_token: String,
pub refresh_token: String,
pub expires_in: i64,
}
pub struct JwtService {
encoding_key: EncodingKey,
decoding_key: DecodingKey,
access_token_expiry: i64, // seconds
refresh_token_expiry: i64, // seconds
}
impl JwtService {
pub fn new(secret: &str) -> Self {
Self {
encoding_key: EncodingKey::from_secret(secret.as_bytes()),
decoding_key: DecodingKey::from_secret(secret.as_bytes()),
access_token_expiry: 3600, // 1 hour
refresh_token_expiry: 604800, // 7 days
}
}
pub fn generate_token_pair(&self, user_id: &str, role: &str) -> Result<TokenPair, JwtError> {
let now = Utc::now().timestamp();
let session_id = uuid::Uuid::new_v4().to_string();
// Access token
let access_claims = Claims {
sub: user_id.to_string(),
exp: now + self.access_token_expiry,
iat: now,
user_role: role.to_string(),
session_id: session_id.clone(),
};
let access_token = encode(&Header::default(), &access_claims, &self.encoding_key)?;
// Refresh token (longer expiry, minimal claims)
let refresh_claims = Claims {
sub: user_id.to_string(),
exp: now + self.refresh_token_expiry,
iat: now,
user_role: "refresh".to_string(),
session_id,
};
let refresh_token = encode(&Header::default(), &refresh_claims, &self.encoding_key)?;
Ok(TokenPair {
access_token,
refresh_token,
expires_in: self.access_token_expiry,
})
}
pub fn validate_token(&self, token: &str) -> Result<Claims, JwtError> {
let validation = Validation::new(Algorithm::HS256);
let token_data = decode::<Claims>(token, &self.decoding_key, &validation)?;
Ok(token_data.claims)
}
pub fn refresh_access_token(&self, refresh_token: &str) -> Result<TokenPair, JwtError> {
let claims = self.validate_token(refresh_token)?;
// Verify it's a refresh token
if claims.user_role != "refresh" {
return Err(JwtError::InvalidTokenType);
}
// Generate new token pair
self.generate_token_pair(&claims.sub, "user") // Default role, should be fetched from DB
}
}
#[derive(thiserror::Error, Debug)]
pub enum JwtError {
#[error("JWT encoding/decoding error: {0}")]
Token(#[from] jsonwebtoken::errors::Error),
#[error("Invalid token type")]
InvalidTokenType,
#[error("Token expired")]
Expired,
}
```
## 🖥️ COMMAND LINE INTERFACES
### CLI with clap
```toml
# Cargo.toml - CLI configuration
[dependencies]
clap = { version = "4.0", features = ["derive"] }
anyhow = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_yaml = "0.9"
```
```rust
use clap::{Parser, Subcommand, ValueEnum};
use std::path::PathBuf;
#[derive(Parser)]
#[command(name = "myapp")]
#[command(about = "A comprehensive application with multiple commands")]
#[command(version)]
pub struct Cli {
/// Global configuration file
#[arg(short, long, value_name = "FILE")]
pub config: Option<PathBuf>,
/// Verbose output
#[arg(short, long, action = clap::ArgAction::Count)]
pub verbose: u8,
/// Output format
#[arg(long, value_enum, default_value_t = OutputFormat::Text)]
pub format: OutputFormat,
#[command(subcommand)]
pub command: Commands,
}
#[derive(Subcommand)]
pub enum Commands {
/// User management commands
User {
#[command(subcommand)]
action: UserAction,
},
/// Server operations
Server {
#[command(subcommand)]
action: ServerAction,
},
/// Database operations
Database {
#[command(subcommand)]
action: DatabaseAction,
},
}
#[derive(Subcommand)]
pub enum UserAction {
/// Create a new user
Create {
/// Username
#[arg(short, long)]
username: String,
/// Email address
#[arg(short, long)]
email: String,
/// User role
#[arg(short, long, value_enum, default_value_t = UserRole::User)]
role: UserRole,
},
/// List all users
List {
/// Maximum number of users to display
#[arg(short, long, default_value_t = 50)]
limit: usize,
/// Filter by role
#[arg(short, long)]
role: Option<UserRole>,
},
/// Delete a user
Delete {
/// User ID or username
#[arg(short, long)]
identifier: String,
/// Force deletion without confirmation
#[arg(short, long)]
force: bool,
},
}
#[derive(Subcommand)]
pub enum ServerAction {
/// Start the server
Start {
/// Port to bind to
#[arg(short, long, default_value_t = 8080)]
port: u16,
/// Host to bind to
#[arg(long, default_value = "127.0.0.1")]
host: String,
},
/// Stop the server
Stop,
/// Show server status
Status,
}
#[derive(Subcommand)]
pub enum DatabaseAction {
/// Run database migrations
Migrate {
/// Migration direction
#[arg(value_enum, default_value_t = MigrationDirection::Up)]
direction: MigrationDirection,
},
/// Seed the database with test data
Seed {
/// Environment to seed
#[arg(short, long, default_value = "development")]
env: String,
},
/// Reset the database
Reset {
/// Skip confirmation prompt
#[arg(short, long)]
yes: bool,
},
}
#[derive(ValueEnum, Clone)]
pub enum OutputFormat {
Text,
Json,
Yaml,
}
#[derive(ValueEnum, Clone)]
pub enum UserRole {
Admin,
User,
Guest,
}
#[derive(ValueEnum, Clone)]
pub enum MigrationDirection {
Up,
Down,
}
// CLI execution logic
pub async fn run_cli() -> anyhow::Result<()> {
let cli = Cli::parse();
// Initialize logging based on verbosity
let log_level = match cli.verbose {
0 => "warn",
1 => "info",
2 => "debug",
_ => "trace",
};
std::env::set_var("RUST_LOG", log_level);
tracing_subscriber::fmt::init();
match cli.command {
Commands::User { action } => handle_user_command(action, cli.format).await,
Commands::Server { action } => handle_server_command(action, cli.format).await,
Commands::Database { action } => handle_database_command(action, cli.format).await,
}
}
async fn handle_user_command(action: UserAction, format: OutputFormat) -> anyhow::Result<()> {
match action {
UserAction::Create { username, email, role } => {
println!("Creating user: {} ({}) with role: {:?}", username, email, role);
// Implementation
}
UserAction::List { limit, role } => {
println!("Listing up to {} users", limit);
if let Some(role) = role {
println!("Filtering by role: {:?}", role);
}
// Implementation
}
UserAction::Delete { identifier, force } => {
if !force {
println!("Are you sure you want to delete user '{}'? [y/N]", identifier);
// Confirmation logic
}
// Implementation
}
}
Ok(())
}
```
## 🏗️ BUILDER PATTERNS
### Typed Builder
```toml
# Cargo.toml - Builder configuration
[dependencies]
typed-builder = "0.21"
serde = { version = "1.0", features = ["derive"] }
```
```rust
use typed_builder::TypedBuilder;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, TypedBuilder, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UserConfig {
/// Required: User's email address
pub email: String,
/// Required: Username
pub username: String,
/// Optional: Display name (defaults to username)
#[builder(default = self.username.clone())]
pub display_name: String,
/// Optional: User role
#[builder(default = UserRole::User)]
pub role: UserRole,
/// Optional: Whether user is active
#[builder(default = true)]
pub is_active: bool,
/// Optional: User preferences
#[builder(default)]
pub preferences: UserPreferences,
/// Optional: Profile image URL
#[builder(default, setter(strip_option))]
pub avatar_url: Option<String>,
/// Optional: User tags (for organization)
#[builder(default)]
pub tags: Vec<String>,
}
#[derive(Debug, Clone, TypedBuilder, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UserPreferences {
#[builder(default = String::from("en"))]
pub language: String,
#[builder(default = String::from("UTC"))]
pub timezone: String,
#[builder(default = true)]
pub email_notifications: bool,
#[builder(default = false)]
pub dark_mode: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum UserRole {
Admin,
User,
Guest,
}
impl Default for UserRole {
fn default() -> Self {
Self::User
}
}
impl Default for UserPreferences {
fn default() -> Self {
Self {
language: "en".to_string(),
timezone: "UTC".to_string(),
email_notifications: true,
dark_mode: false,
}
}
}
// Usage examples
pub fn create_user_examples() {
// Minimal required fields
let user1 = UserConfig::builder()
.email("john@example.com".to_string())
.username("john_doe".to_string())
.build();
// Full configuration
let user2 = UserConfig::builder()
.email("admin@example.com".to_string())
.username("admin".to_string())
.display_name("System Administrator".to_string())
.role(UserRole::Admin)
.is_active(true)
.avatar_url("https://example.com/avatar.jpg".to_string())
.tags(vec!["admin".to_string(), "system".to_string()])
.preferences(
UserPreferences::builder()
.language("en".to_string())
.timezone("America/New_York".to_string())
.email_notifications(false)
.dark_mode(true)
.build()
)
.build();
println!("User 1: {:?}", user1);
println!("User 2: {:?}", user2);
}
```
## 🧮 RANDOM GENERATION AND UTILITIES
### Random Data Generation
```toml
# Cargo.toml - Random utilities
[dependencies]
rand = "0.8"
getrandom = "0.3"
uuid = { version = "1.17", features = ["v4", "serde"] }
base64 = "0.22"
```
```rust
use rand::{Rng, thread_rng, distributions::Alphanumeric};
use uuid::Uuid;
use base64::{Engine as _, engine::general_purpose};
pub struct RandomGenerator;
impl RandomGenerator {
/// Generate a secure random string for API keys, tokens, etc.
pub fn secure_string(length: usize) -> String {
thread_rng()
.sample_iter(&Alphanumeric)
.take(length)
.map(char::from)
.collect()
}
/// Generate a UUID v4
pub fn uuid() -> String {
Uuid::new_v4().to_string()
}
/// Generate a short ID (URL-safe)
pub fn short_id() -> String {
let uuid_bytes = Uuid::new_v4().as_bytes();
general_purpose::URL_SAFE_NO_PAD.encode(&uuid_bytes[..8])
}
/// Generate a random integer within range
pub fn int_range(min: i32, max: i32) -> i32 {
thread_rng().gen_range(min..=max)
}
/// Generate random bytes
pub fn bytes(length: usize) -> Vec<u8> {
let mut bytes = vec![0u8; length];
getrandom::getrandom(&mut bytes).expect("Failed to generate random bytes");
bytes
}
/// Generate a base64-encoded random string
pub fn base64_string(byte_length: usize) -> String {
let bytes = Self::bytes(byte_length);
general_purpose::STANDARD.encode(&bytes)
}
/// Generate a session ID
pub fn session_id() -> String {
format!("sess_{}", Self::secure_string(32))
}
/// Generate a API key
pub fn api_key() -> String {
format!("ak_{}", Self::base64_string(24))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_secure_string_length() {
let str32 = RandomGenerator::secure_string(32);
assert_eq!(str32.len(), 32);
let str64 = RandomGenerator::secure_string(64);
assert_eq!(str64.len(), 64);
}
#[test]
fn test_uuid_format() {
let uuid = RandomGenerator::uuid();
assert!(Uuid::parse_str(&uuid).is_ok());
}
#[test]
fn test_short_id_uniqueness() {
let id1 = RandomGenerator::short_id();
let id2 = RandomGenerator::short_id();
assert_ne!(id1, id2);
assert!(id1.len() > 0);
}
#[test]
fn test_int_range() {
for _ in 0..100 {
let val = RandomGenerator::int_range(1, 10);
assert!(val >= 1 && val <= 10);
}
}
}
```
## 📊 ENHANCED DERIVE MACROS
### Using derive_more
```toml
# Cargo.toml - Enhanced derives
[dependencies]
derive_more = { version = "2", features = ["full"] }
serde = { version = "1.0", features = ["derive"] }
```
```rust
use derive_more::{Display, Error, From, Into, Constructor, Deref, DerefMut};
use serde::{Deserialize, Serialize};
// Custom string wrapper with validation
#[derive(Debug, Clone, Display, From, Into, Deref, Serialize, Deserialize)]
#[serde(try_from = "String")]
pub struct EmailAddress(String);
impl TryFrom<String> for EmailAddress {
type Error = ValidationError;
fn try_from(value: String) -> Result<Self, Self::Error> {
if value.contains('@') && value.len() > 5 {
Ok(EmailAddress(value))
} else {
Err(ValidationError::InvalidEmail)
}
}
}
// Enhanced error types
#[derive(Debug, Display, Error)]
pub enum ServiceError {
#[display(fmt = "User not found: {}", user_id)]
UserNotFound { user_id: String },
#[display(fmt = "Database error: {}", source)]
Database {
#[error(source)]
source: sqlx::Error
},
#[display(fmt = "Validation failed: {}", field)]
Validation { field: String },
#[display(fmt = "Authentication failed")]
Authentication,
}
#[derive(Debug, Display, Error)]
pub enum ValidationError {
#[display(fmt = "Invalid email format")]
InvalidEmail,
#[display(fmt = "Field '{}' is required", field)]
Required { field: String },
#[display(fmt = "Value '{}' is too long (max: {})", value, max)]
TooLong { value: String, max: usize },
}
// Constructor patterns
#[derive(Debug, Clone, Constructor, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UserSession {
pub user_id: String,
pub session_id: String,
pub created_at: chrono::DateTime<chrono::Utc>,
pub expires_at: chrono::DateTime<chrono::Utc>,
#[new(default)]
pub is_active: bool,
}
// Wrapper types with automatic conversions
#[derive(Debug, Clone, From, Into, Deref, DerefMut, Serialize, Deserialize)]
pub struct UserId(String);
#[derive(Debug, Clone, From, Into, Deref, DerefMut, Serialize, Deserialize)]
pub struct SessionToken(String);
impl UserId {
pub fn new() -> Self {
Self(RandomGenerator::uuid())
}
}
impl SessionToken {
pub fn new() -> Self {
Self(RandomGenerator::session_id())
}
}
// Usage examples
pub fn demonstrate_enhanced_types() -> Result<(), Box<dyn std::error::Error>> {
// Email validation
let email = EmailAddress::try_from("user@example.com".to_string())?;
println!("Valid email: {}", email);
// Constructor usage
let session = UserSession::new(
"user_123".to_string(),
"sess_abc".to_string(),
chrono::Utc::now(),
chrono::Utc::now() + chrono::Duration::hours(24),
);
println!("Session: {:?}", session);
// Wrapper types
let user_id = UserId::new();
let token = SessionToken::new();
println!("User ID: {}, Token: {}", *user_id, *token);
Ok(())
}
```
## 🚨 UTILITIES ANTI-PATTERNS
### What to Avoid
```rust
// ❌ Don't use outdated JWT libraries
// use frank_jwt; // Use jsonwebtoken instead
// ❌ Don't use structopt (deprecated)
// use structopt::StructOpt; // Use clap with derive instead
// ❌ Don't manually implement builders
// pub struct ConfigBuilder {
// field1: Option<String>,
// field2: Option<i32>,
// } // Use typed-builder instead
// ❌ Don't use thread_rng() for cryptographic purposes
// let password = thread_rng().gen::<u64>().to_string(); // Use getrandom for security
// ❌ Don't ignore JWT validation
// let claims = decode::<Claims>(token, key, &Validation::default()); // Configure properly
```
## ✅ UTILITIES CHECKLIST
```markdown
### Utilities Implementation Verification
- [ ] JWT authentication with proper validation and expiry
- [ ] CLI with comprehensive subcommands and help text
- [ ] Builder patterns using typed-builder
- [ ] Enhanced error types with derive_more
- [ ] Secure random generation for sensitive data
- [ ] Proper validation for wrapper types
- [ ] Constructor patterns for complex types
- [ ] Base64 encoding for binary data
- [ ] UUID generation for identifiers
- [ ] Comprehensive error handling
- [ ] Input validation and sanitization
- [ ] Type safety with wrapper types
```
This utilities standard provides robust patterns for common development tasks while maintaining type safety and security best practices.