sharenet/backend/crates/integration-tests/src/performance_tests.rs
continuist 61117b6fa6
Some checks are pending
CI/CD Pipeline / Test Backend (push) Waiting to run
CI/CD Pipeline / Test Frontend (push) Waiting to run
CI/CD Pipeline / Build and Push Docker Images (push) Blocked by required conditions
CI/CD Pipeline / Deploy to Production (push) Blocked by required conditions
Created test_setup.rs file that consolidates test setup code for interface tests, including db migrations
2025-06-28 01:57:12 -04:00

532 lines
No EOL
18 KiB
Rust

/*
* This file is part of Sharenet.
*
* Sharenet is licensed under the Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.
*
* You may obtain a copy of the license at:
* https://creativecommons.org/licenses/by-nc-sa/4.0/
*
* Copyright (c) 2024 Continuist <continuist02@gmail.com>
*/
use axum::{
body::Body,
http::{Request, StatusCode},
Router,
};
use std::time::{Duration, Instant};
use tokio::time::sleep;
use serde_json::json;
use serial_test::serial;
use tower::ServiceExt;
// Import the centralized test setup
use crate::test_setup::unique_test_data;
// Reuse AppState and helper functions from api_postgres_tests
use crate::api_postgres_tests::create_test_app;
/// Performance metrics structure
#[derive(Debug)]
struct PerformanceMetrics {
operation: String,
total_requests: usize,
successful_requests: usize,
failed_requests: usize,
total_duration: Duration,
avg_duration: Duration,
min_duration: Duration,
max_duration: Duration,
requests_per_second: f64,
}
impl PerformanceMetrics {
fn new(operation: String, total_requests: usize, successful_requests: usize, failed_requests: usize, total_duration: Duration, durations: &[Duration]) -> Self {
let avg_duration = if !durations.is_empty() {
Duration::from_nanos(durations.iter().map(|d| d.as_nanos() as u64).sum::<u64>() / durations.len() as u64)
} else {
Duration::ZERO
};
let min_duration = durations.iter().min().copied().unwrap_or(Duration::ZERO);
let max_duration = durations.iter().max().copied().unwrap_or(Duration::ZERO);
let requests_per_second = if total_duration.as_secs_f64() > 0.0 {
successful_requests as f64 / total_duration.as_secs_f64()
} else {
0.0
};
Self {
operation,
total_requests,
successful_requests,
failed_requests,
total_duration,
avg_duration,
min_duration,
max_duration,
requests_per_second,
}
}
fn print_report(&self) {
println!("\n=== Performance Report: {} ===", self.operation);
println!("Total Requests: {}", self.total_requests);
println!("Successful: {}", self.successful_requests);
println!("Failed: {}", self.failed_requests);
println!("Success Rate: {:.2}%", (self.successful_requests as f64 / self.total_requests as f64) * 100.0);
println!("Total Duration: {:.2?}", self.total_duration);
println!("Average Duration: {:.2?}", self.avg_duration);
println!("Min Duration: {:.2?}", self.min_duration);
println!("Max Duration: {:.2?}", self.max_duration);
println!("Requests/Second: {:.2}", self.requests_per_second);
println!("=====================================");
}
}
/// Run concurrent user creation test
async fn run_concurrent_user_creation(app: Router, num_concurrent: usize) -> PerformanceMetrics {
println!("Starting concurrent user creation test with {} concurrent requests...", num_concurrent);
let start_time = Instant::now();
let mut durations = Vec::new();
let mut successful = 0;
let mut failed = 0;
let handles: Vec<_> = (0..num_concurrent)
.map(|i| {
let app = app.clone();
let (username, email) = unique_test_data(&format!("perf_user_{}", i));
tokio::spawn(async move {
let request_start = Instant::now();
let response = app
.oneshot(
Request::builder()
.method("POST")
.uri("/users")
.header("content-type", "application/json")
.body(Body::from(
json!({
"username": username,
"email": email
})
.to_string(),
))
.unwrap(),
)
.await;
let duration = request_start.elapsed();
match response {
Ok(resp) if resp.status() == StatusCode::CREATED => {
(duration, true)
}
_ => (duration, false)
}
})
})
.collect();
for handle in handles {
match handle.await {
Ok((duration, success)) => {
durations.push(duration);
if success {
successful += 1;
} else {
failed += 1;
}
}
Err(_) => {
failed += 1;
}
}
}
let total_duration = start_time.elapsed();
PerformanceMetrics::new(
format!("Concurrent User Creation ({} requests)", num_concurrent),
num_concurrent,
successful,
failed,
total_duration,
&durations,
)
}
/// Run concurrent product creation test
async fn run_concurrent_product_creation(app: Router, num_concurrent: usize) -> PerformanceMetrics {
println!("Starting concurrent product creation test with {} concurrent requests...", num_concurrent);
let start_time = Instant::now();
let mut durations = Vec::new();
let mut successful = 0;
let mut failed = 0;
let handles: Vec<_> = (0..num_concurrent)
.map(|i| {
let app = app.clone();
let (name, _) = unique_test_data(&format!("perf_product_{}", i));
tokio::spawn(async move {
let request_start = Instant::now();
let response = app
.oneshot(
Request::builder()
.method("POST")
.uri("/products")
.header("content-type", "application/json")
.body(Body::from(
json!({
"name": name,
"description": format!("Performance test product {}", i)
})
.to_string(),
))
.unwrap(),
)
.await;
let duration = request_start.elapsed();
match response {
Ok(resp) if resp.status() == StatusCode::CREATED => {
(duration, true)
}
_ => (duration, false)
}
})
})
.collect();
for handle in handles {
match handle.await {
Ok((duration, success)) => {
durations.push(duration);
if success {
successful += 1;
} else {
failed += 1;
}
}
Err(_) => {
failed += 1;
}
}
}
let total_duration = start_time.elapsed();
PerformanceMetrics::new(
format!("Concurrent Product Creation ({} requests)", num_concurrent),
num_concurrent,
successful,
failed,
total_duration,
&durations,
)
}
/// Run mixed workload test (users and products)
async fn run_mixed_workload_test(app: Router, num_operations: usize) -> PerformanceMetrics {
println!("Starting mixed workload test with {} operations...", num_operations);
let start_time = Instant::now();
let mut durations = Vec::new();
let mut successful = 0;
let mut failed = 0;
let handles: Vec<_> = (0..num_operations)
.map(|i| {
let app = app.clone();
tokio::spawn(async move {
let request_start = Instant::now();
let is_user_operation = i % 2 == 0;
let response = if is_user_operation {
let (username, email) = unique_test_data(&format!("mixed_user_{}", i));
app.clone()
.oneshot(
Request::builder()
.method("POST")
.uri("/users")
.header("content-type", "application/json")
.body(Body::from(
json!({
"username": username,
"email": email
})
.to_string(),
))
.unwrap(),
)
.await
} else {
let (name, _) = unique_test_data(&format!("mixed_product_{}", i));
app.oneshot(
Request::builder()
.method("POST")
.uri("/products")
.header("content-type", "application/json")
.body(Body::from(
json!({
"name": name,
"description": format!("Mixed workload product {}", i)
})
.to_string(),
))
.unwrap(),
)
.await
};
let duration = request_start.elapsed();
match response {
Ok(resp) if resp.status() == StatusCode::CREATED => {
(duration, true)
}
_ => (duration, false)
}
})
})
.collect();
for handle in handles {
match handle.await {
Ok((duration, success)) => {
durations.push(duration);
if success {
successful += 1;
} else {
failed += 1;
}
}
Err(_) => {
failed += 1;
}
}
}
let total_duration = start_time.elapsed();
PerformanceMetrics::new(
format!("Mixed Workload ({} operations)", num_operations),
num_operations,
successful,
failed,
total_duration,
&durations,
)
}
/// Run database connection pool stress test
async fn run_connection_pool_stress_test(app: Router, num_connections: usize) -> PerformanceMetrics {
println!("Starting connection pool stress test with {} concurrent connections...", num_connections);
let start_time = Instant::now();
let mut durations = Vec::new();
let mut successful = 0;
let mut failed = 0;
let handles: Vec<_> = (0..num_connections)
.map(|i| {
let app = app.clone();
tokio::spawn(async move {
let request_start = Instant::now();
// Make multiple requests to stress the connection pool
let mut success_count = 0;
for j in 0..5 {
let (username, email) = unique_test_data(&format!("pool_user_{}_{}", i, j));
let response = app
.clone()
.oneshot(
Request::builder()
.method("POST")
.uri("/users")
.header("content-type", "application/json")
.body(Body::from(
json!({
"username": username,
"email": email
})
.to_string(),
))
.unwrap(),
)
.await;
if response.unwrap().status() == StatusCode::CREATED {
success_count += 1;
}
// Small delay to simulate real-world usage
sleep(Duration::from_millis(10)).await;
}
let duration = request_start.elapsed();
(duration, success_count == 5)
})
})
.collect();
for handle in handles {
match handle.await {
Ok((duration, success)) => {
durations.push(duration);
if success {
successful += 1;
} else {
failed += 1;
}
}
Err(_) => {
failed += 1;
}
}
}
let total_duration = start_time.elapsed();
PerformanceMetrics::new(
format!("Connection Pool Stress ({} connections)", num_connections),
num_connections,
successful,
failed,
total_duration,
&durations,
)
}
#[tokio::test]
#[serial]
async fn test_performance_and_stress() {
println!("\n🚀 Starting Performance and Stress Tests");
println!("=========================================");
let app = create_test_app().await;
// Test 1: Concurrent user creation
let user_metrics = run_concurrent_user_creation(app.clone(), 50).await;
user_metrics.print_report();
// Test 2: Concurrent product creation
let product_metrics = run_concurrent_product_creation(app.clone(), 50).await;
product_metrics.print_report();
// Test 3: Mixed workload
let mixed_metrics = run_mixed_workload_test(app.clone(), 100).await;
mixed_metrics.print_report();
// Test 4: Connection pool stress
let pool_metrics = run_connection_pool_stress_test(app.clone(), 20).await;
pool_metrics.print_report();
// Summary
println!("\n📊 Performance Test Summary");
println!("============================");
println!("All performance tests completed successfully!");
println!("Check the individual reports above for detailed metrics.");
// Assert reasonable performance (adjust thresholds as needed)
assert!(user_metrics.requests_per_second > 10.0, "User creation should handle at least 10 req/s");
assert!(product_metrics.requests_per_second > 10.0, "Product creation should handle at least 10 req/s");
assert!(mixed_metrics.requests_per_second > 8.0, "Mixed workload should handle at least 8 req/s");
assert!(pool_metrics.successful_requests > 0, "Connection pool should handle some requests");
}
#[tokio::test]
#[serial]
async fn test_concurrent_user_creation() {
println!("\n🚀 Starting Concurrent User Creation Performance Test");
println!("=====================================================");
let app = create_test_app().await;
let num_concurrent = 50;
println!("Testing with {} concurrent requests...", num_concurrent);
let start_time = Instant::now();
let mut durations = Vec::new();
let mut successful = 0;
let mut failed = 0;
let handles: Vec<_> = (0..num_concurrent)
.map(|i| {
let app = app.clone();
let (username, email) = unique_test_data(&format!("perf_user_{}", i));
tokio::spawn(async move {
let request_start = Instant::now();
let response = app
.oneshot(
Request::builder()
.method("POST")
.uri("/users")
.header("content-type", "application/json")
.body(Body::from(
json!({
"username": username,
"email": email
})
.to_string(),
))
.unwrap(),
)
.await;
let duration = request_start.elapsed();
match response {
Ok(resp) if resp.status() == StatusCode::CREATED => {
(duration, true)
}
_ => (duration, false)
}
})
})
.collect();
for handle in handles {
match handle.await {
Ok((duration, success)) => {
durations.push(duration);
if success {
successful += 1;
} else {
failed += 1;
}
}
Err(_) => {
failed += 1;
}
}
}
let total_duration = start_time.elapsed();
let metrics = PerformanceMetrics::new(
format!("Concurrent User Creation ({} requests)", num_concurrent),
num_concurrent,
successful,
failed,
total_duration,
&durations,
);
metrics.print_report();
// Assert reasonable performance
assert!(metrics.requests_per_second > 10.0, "User creation should handle at least 10 req/s");
assert!(metrics.successful_requests > 0, "Should have some successful requests");
}