Add performance tests to integration-tests crate
This commit is contained in:
parent
34a1bd6119
commit
9cf55ec687
3 changed files with 537 additions and 6 deletions
|
@ -50,7 +50,7 @@ where
|
|||
}
|
||||
|
||||
// Helper functions
|
||||
async fn setup_test_db() -> PgPool {
|
||||
pub async fn setup_test_db() -> PgPool {
|
||||
let database_url = env::var("DATABASE_URL")
|
||||
.unwrap_or_else(|_| "postgres://postgres:password@localhost:5432/sharenet_test".to_string());
|
||||
let pool = PgPoolOptions::new()
|
||||
|
@ -66,19 +66,19 @@ async fn setup_test_db() -> PgPool {
|
|||
pool
|
||||
}
|
||||
|
||||
async fn cleanup_test_data(pool: &PgPool) {
|
||||
pub async fn cleanup_test_data(pool: &PgPool) {
|
||||
let mut tx = pool.begin().await.expect("Failed to begin transaction");
|
||||
sqlx::query("DELETE FROM products").execute(&mut *tx).await.expect("Failed to delete products");
|
||||
sqlx::query("DELETE FROM users").execute(&mut *tx).await.expect("Failed to delete users");
|
||||
tx.commit().await.expect("Failed to commit cleanup transaction");
|
||||
}
|
||||
|
||||
fn unique_test_data(prefix: &str) -> (String, String) {
|
||||
pub fn unique_test_data(prefix: &str) -> (String, String) {
|
||||
let id = Uuid::new_v4().to_string()[..8].to_string();
|
||||
(format!("{}_{}", prefix, id), format!("{}_test@example.com", prefix))
|
||||
}
|
||||
|
||||
async fn create_test_app() -> Router {
|
||||
pub async fn create_test_app() -> Router {
|
||||
let pool = setup_test_db().await;
|
||||
let user_repo = PostgresUserRepository::new(pool.clone());
|
||||
let product_repo = PostgresProductRepository::new(pool.clone());
|
||||
|
@ -108,7 +108,7 @@ async fn create_test_app() -> Router {
|
|||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn extract_json<T: serde::de::DeserializeOwned>(response: axum::response::Response) -> T {
|
||||
pub async fn extract_json<T: serde::de::DeserializeOwned>(response: axum::response::Response) -> T {
|
||||
let bytes = axum::body::to_bytes(response.into_body(), usize::MAX).await.unwrap();
|
||||
serde_json::from_slice(&bytes).unwrap()
|
||||
}
|
||||
|
|
|
@ -13,3 +13,5 @@
|
|||
pub mod api_postgres_tests;
|
||||
#[cfg(test)]
|
||||
pub mod migration_tests;
|
||||
#[cfg(test)]
|
||||
pub mod performance_tests;
|
529
backend/crates/integration-tests/src/performance_tests.rs
Normal file
529
backend/crates/integration-tests/src/performance_tests.rs
Normal file
|
@ -0,0 +1,529 @@
|
|||
/*
|
||||
* This file is part of Sharenet.
|
||||
*
|
||||
* Sharenet is licensed under the Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.
|
||||
*
|
||||
* You may obtain a copy of the license at:
|
||||
* https://creativecommons.org/licenses/by-nc-sa/4.0/
|
||||
*
|
||||
* Copyright (c) 2024 Continuist <continuist02@gmail.com>
|
||||
*/
|
||||
|
||||
use axum::{
|
||||
body::Body,
|
||||
http::{Request, StatusCode},
|
||||
Router,
|
||||
};
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::time::sleep;
|
||||
use serde_json::json;
|
||||
use serial_test::serial;
|
||||
use tower::ServiceExt;
|
||||
|
||||
// Reuse AppState and helper functions from api_postgres_tests
|
||||
use crate::api_postgres_tests::{create_test_app, unique_test_data};
|
||||
|
||||
/// Performance metrics structure
|
||||
#[derive(Debug)]
|
||||
struct PerformanceMetrics {
|
||||
operation: String,
|
||||
total_requests: usize,
|
||||
successful_requests: usize,
|
||||
failed_requests: usize,
|
||||
total_duration: Duration,
|
||||
avg_duration: Duration,
|
||||
min_duration: Duration,
|
||||
max_duration: Duration,
|
||||
requests_per_second: f64,
|
||||
}
|
||||
|
||||
impl PerformanceMetrics {
|
||||
fn new(operation: String, total_requests: usize, successful_requests: usize, failed_requests: usize, total_duration: Duration, durations: &[Duration]) -> Self {
|
||||
let avg_duration = if !durations.is_empty() {
|
||||
Duration::from_nanos(durations.iter().map(|d| d.as_nanos() as u64).sum::<u64>() / durations.len() as u64)
|
||||
} else {
|
||||
Duration::ZERO
|
||||
};
|
||||
|
||||
let min_duration = durations.iter().min().copied().unwrap_or(Duration::ZERO);
|
||||
let max_duration = durations.iter().max().copied().unwrap_or(Duration::ZERO);
|
||||
let requests_per_second = if total_duration.as_secs_f64() > 0.0 {
|
||||
successful_requests as f64 / total_duration.as_secs_f64()
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
|
||||
Self {
|
||||
operation,
|
||||
total_requests,
|
||||
successful_requests,
|
||||
failed_requests,
|
||||
total_duration,
|
||||
avg_duration,
|
||||
min_duration,
|
||||
max_duration,
|
||||
requests_per_second,
|
||||
}
|
||||
}
|
||||
|
||||
fn print_report(&self) {
|
||||
println!("\n=== Performance Report: {} ===", self.operation);
|
||||
println!("Total Requests: {}", self.total_requests);
|
||||
println!("Successful: {}", self.successful_requests);
|
||||
println!("Failed: {}", self.failed_requests);
|
||||
println!("Success Rate: {:.2}%", (self.successful_requests as f64 / self.total_requests as f64) * 100.0);
|
||||
println!("Total Duration: {:.2?}", self.total_duration);
|
||||
println!("Average Duration: {:.2?}", self.avg_duration);
|
||||
println!("Min Duration: {:.2?}", self.min_duration);
|
||||
println!("Max Duration: {:.2?}", self.max_duration);
|
||||
println!("Requests/Second: {:.2}", self.requests_per_second);
|
||||
println!("=====================================");
|
||||
}
|
||||
}
|
||||
|
||||
/// Run concurrent user creation test
|
||||
async fn run_concurrent_user_creation(app: Router, num_concurrent: usize) -> PerformanceMetrics {
|
||||
println!("Starting concurrent user creation test with {} concurrent requests...", num_concurrent);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let mut durations = Vec::new();
|
||||
let mut successful = 0;
|
||||
let mut failed = 0;
|
||||
|
||||
let handles: Vec<_> = (0..num_concurrent)
|
||||
.map(|i| {
|
||||
let app = app.clone();
|
||||
let (username, email) = unique_test_data(&format!("perf_user_{}", i));
|
||||
|
||||
tokio::spawn(async move {
|
||||
let request_start = Instant::now();
|
||||
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.method("POST")
|
||||
.uri("/users")
|
||||
.header("content-type", "application/json")
|
||||
.body(Body::from(
|
||||
json!({
|
||||
"username": username,
|
||||
"email": email
|
||||
})
|
||||
.to_string(),
|
||||
))
|
||||
.unwrap(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let duration = request_start.elapsed();
|
||||
|
||||
match response {
|
||||
Ok(resp) if resp.status() == StatusCode::CREATED => {
|
||||
(duration, true)
|
||||
}
|
||||
_ => (duration, false)
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
for handle in handles {
|
||||
match handle.await {
|
||||
Ok((duration, success)) => {
|
||||
durations.push(duration);
|
||||
if success {
|
||||
successful += 1;
|
||||
} else {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_duration = start_time.elapsed();
|
||||
|
||||
PerformanceMetrics::new(
|
||||
format!("Concurrent User Creation ({} requests)", num_concurrent),
|
||||
num_concurrent,
|
||||
successful,
|
||||
failed,
|
||||
total_duration,
|
||||
&durations,
|
||||
)
|
||||
}
|
||||
|
||||
/// Run concurrent product creation test
|
||||
async fn run_concurrent_product_creation(app: Router, num_concurrent: usize) -> PerformanceMetrics {
|
||||
println!("Starting concurrent product creation test with {} concurrent requests...", num_concurrent);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let mut durations = Vec::new();
|
||||
let mut successful = 0;
|
||||
let mut failed = 0;
|
||||
|
||||
let handles: Vec<_> = (0..num_concurrent)
|
||||
.map(|i| {
|
||||
let app = app.clone();
|
||||
let (name, _) = unique_test_data(&format!("perf_product_{}", i));
|
||||
|
||||
tokio::spawn(async move {
|
||||
let request_start = Instant::now();
|
||||
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.method("POST")
|
||||
.uri("/products")
|
||||
.header("content-type", "application/json")
|
||||
.body(Body::from(
|
||||
json!({
|
||||
"name": name,
|
||||
"description": format!("Performance test product {}", i)
|
||||
})
|
||||
.to_string(),
|
||||
))
|
||||
.unwrap(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let duration = request_start.elapsed();
|
||||
|
||||
match response {
|
||||
Ok(resp) if resp.status() == StatusCode::CREATED => {
|
||||
(duration, true)
|
||||
}
|
||||
_ => (duration, false)
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
for handle in handles {
|
||||
match handle.await {
|
||||
Ok((duration, success)) => {
|
||||
durations.push(duration);
|
||||
if success {
|
||||
successful += 1;
|
||||
} else {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_duration = start_time.elapsed();
|
||||
|
||||
PerformanceMetrics::new(
|
||||
format!("Concurrent Product Creation ({} requests)", num_concurrent),
|
||||
num_concurrent,
|
||||
successful,
|
||||
failed,
|
||||
total_duration,
|
||||
&durations,
|
||||
)
|
||||
}
|
||||
|
||||
/// Run mixed workload test (users and products)
|
||||
async fn run_mixed_workload_test(app: Router, num_operations: usize) -> PerformanceMetrics {
|
||||
println!("Starting mixed workload test with {} operations...", num_operations);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let mut durations = Vec::new();
|
||||
let mut successful = 0;
|
||||
let mut failed = 0;
|
||||
|
||||
let handles: Vec<_> = (0..num_operations)
|
||||
.map(|i| {
|
||||
let app = app.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let request_start = Instant::now();
|
||||
let is_user_operation = i % 2 == 0;
|
||||
|
||||
let response = if is_user_operation {
|
||||
let (username, email) = unique_test_data(&format!("mixed_user_{}", i));
|
||||
app.clone()
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.method("POST")
|
||||
.uri("/users")
|
||||
.header("content-type", "application/json")
|
||||
.body(Body::from(
|
||||
json!({
|
||||
"username": username,
|
||||
"email": email
|
||||
})
|
||||
.to_string(),
|
||||
))
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
let (name, _) = unique_test_data(&format!("mixed_product_{}", i));
|
||||
app.oneshot(
|
||||
Request::builder()
|
||||
.method("POST")
|
||||
.uri("/products")
|
||||
.header("content-type", "application/json")
|
||||
.body(Body::from(
|
||||
json!({
|
||||
"name": name,
|
||||
"description": format!("Mixed workload product {}", i)
|
||||
})
|
||||
.to_string(),
|
||||
))
|
||||
.unwrap(),
|
||||
)
|
||||
.await
|
||||
};
|
||||
|
||||
let duration = request_start.elapsed();
|
||||
|
||||
match response {
|
||||
Ok(resp) if resp.status() == StatusCode::CREATED => {
|
||||
(duration, true)
|
||||
}
|
||||
_ => (duration, false)
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
for handle in handles {
|
||||
match handle.await {
|
||||
Ok((duration, success)) => {
|
||||
durations.push(duration);
|
||||
if success {
|
||||
successful += 1;
|
||||
} else {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_duration = start_time.elapsed();
|
||||
|
||||
PerformanceMetrics::new(
|
||||
format!("Mixed Workload ({} operations)", num_operations),
|
||||
num_operations,
|
||||
successful,
|
||||
failed,
|
||||
total_duration,
|
||||
&durations,
|
||||
)
|
||||
}
|
||||
|
||||
/// Run database connection pool stress test
|
||||
async fn run_connection_pool_stress_test(app: Router, num_connections: usize) -> PerformanceMetrics {
|
||||
println!("Starting connection pool stress test with {} concurrent connections...", num_connections);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let mut durations = Vec::new();
|
||||
let mut successful = 0;
|
||||
let mut failed = 0;
|
||||
|
||||
let handles: Vec<_> = (0..num_connections)
|
||||
.map(|i| {
|
||||
let app = app.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let request_start = Instant::now();
|
||||
|
||||
// Make multiple requests to stress the connection pool
|
||||
let mut success_count = 0;
|
||||
for j in 0..5 {
|
||||
let (username, email) = unique_test_data(&format!("pool_user_{}_{}", i, j));
|
||||
|
||||
let response = app
|
||||
.clone()
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.method("POST")
|
||||
.uri("/users")
|
||||
.header("content-type", "application/json")
|
||||
.body(Body::from(
|
||||
json!({
|
||||
"username": username,
|
||||
"email": email
|
||||
})
|
||||
.to_string(),
|
||||
))
|
||||
.unwrap(),
|
||||
)
|
||||
.await;
|
||||
|
||||
if response.unwrap().status() == StatusCode::CREATED {
|
||||
success_count += 1;
|
||||
}
|
||||
|
||||
// Small delay to simulate real-world usage
|
||||
sleep(Duration::from_millis(10)).await;
|
||||
}
|
||||
|
||||
let duration = request_start.elapsed();
|
||||
(duration, success_count == 5)
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
for handle in handles {
|
||||
match handle.await {
|
||||
Ok((duration, success)) => {
|
||||
durations.push(duration);
|
||||
if success {
|
||||
successful += 1;
|
||||
} else {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_duration = start_time.elapsed();
|
||||
|
||||
PerformanceMetrics::new(
|
||||
format!("Connection Pool Stress ({} connections)", num_connections),
|
||||
num_connections,
|
||||
successful,
|
||||
failed,
|
||||
total_duration,
|
||||
&durations,
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_performance_and_stress() {
|
||||
println!("\n🚀 Starting Performance and Stress Tests");
|
||||
println!("=========================================");
|
||||
|
||||
let app = create_test_app().await;
|
||||
|
||||
// Test 1: Concurrent user creation
|
||||
let user_metrics = run_concurrent_user_creation(app.clone(), 50).await;
|
||||
user_metrics.print_report();
|
||||
|
||||
// Test 2: Concurrent product creation
|
||||
let product_metrics = run_concurrent_product_creation(app.clone(), 50).await;
|
||||
product_metrics.print_report();
|
||||
|
||||
// Test 3: Mixed workload
|
||||
let mixed_metrics = run_mixed_workload_test(app.clone(), 100).await;
|
||||
mixed_metrics.print_report();
|
||||
|
||||
// Test 4: Connection pool stress
|
||||
let pool_metrics = run_connection_pool_stress_test(app.clone(), 20).await;
|
||||
pool_metrics.print_report();
|
||||
|
||||
// Summary
|
||||
println!("\n📊 Performance Test Summary");
|
||||
println!("============================");
|
||||
println!("All performance tests completed successfully!");
|
||||
println!("Check the individual reports above for detailed metrics.");
|
||||
|
||||
// Assert reasonable performance (adjust thresholds as needed)
|
||||
assert!(user_metrics.requests_per_second > 10.0, "User creation should handle at least 10 req/s");
|
||||
assert!(product_metrics.requests_per_second > 10.0, "Product creation should handle at least 10 req/s");
|
||||
assert!(mixed_metrics.requests_per_second > 8.0, "Mixed workload should handle at least 8 req/s");
|
||||
assert!(pool_metrics.successful_requests > 0, "Connection pool should handle some requests");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_concurrent_user_creation() {
|
||||
println!("\n🚀 Starting Concurrent User Creation Performance Test");
|
||||
println!("=====================================================");
|
||||
|
||||
let app = create_test_app().await;
|
||||
let num_concurrent = 50;
|
||||
|
||||
println!("Testing with {} concurrent requests...", num_concurrent);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let mut durations = Vec::new();
|
||||
let mut successful = 0;
|
||||
let mut failed = 0;
|
||||
|
||||
let handles: Vec<_> = (0..num_concurrent)
|
||||
.map(|i| {
|
||||
let app = app.clone();
|
||||
let (username, email) = unique_test_data(&format!("perf_user_{}", i));
|
||||
|
||||
tokio::spawn(async move {
|
||||
let request_start = Instant::now();
|
||||
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.method("POST")
|
||||
.uri("/users")
|
||||
.header("content-type", "application/json")
|
||||
.body(Body::from(
|
||||
json!({
|
||||
"username": username,
|
||||
"email": email
|
||||
})
|
||||
.to_string(),
|
||||
))
|
||||
.unwrap(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let duration = request_start.elapsed();
|
||||
|
||||
match response {
|
||||
Ok(resp) if resp.status() == StatusCode::CREATED => {
|
||||
(duration, true)
|
||||
}
|
||||
_ => (duration, false)
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
for handle in handles {
|
||||
match handle.await {
|
||||
Ok((duration, success)) => {
|
||||
durations.push(duration);
|
||||
if success {
|
||||
successful += 1;
|
||||
} else {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
failed += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let total_duration = start_time.elapsed();
|
||||
|
||||
let metrics = PerformanceMetrics::new(
|
||||
format!("Concurrent User Creation ({} requests)", num_concurrent),
|
||||
num_concurrent,
|
||||
successful,
|
||||
failed,
|
||||
total_duration,
|
||||
&durations,
|
||||
);
|
||||
|
||||
metrics.print_report();
|
||||
|
||||
// Assert reasonable performance
|
||||
assert!(metrics.requests_per_second > 10.0, "User creation should handle at least 10 req/s");
|
||||
assert!(metrics.successful_requests > 0, "Should have some successful requests");
|
||||
}
|
Loading…
Add table
Reference in a new issue