From f4adafe2bd80ee41f10f05ae9698658ebecb864d Mon Sep 17 00:00:00 2001 From: perf3ct Date: Sat, 28 Jun 2025 19:25:15 +0000 Subject: [PATCH] feat(tests): add regression tests and better sql type safety tests --- src/routes/queue.rs | 2 +- src/tests/mod.rs | 3 + src/tests/regression_tests.rs | 119 +++++++++++ src/tests/route_compilation_tests.rs | 71 +++++++ src/tests/sql_type_safety_tests.rs | 284 +++++++++++++++++++++++++++ 5 files changed, 478 insertions(+), 1 deletion(-) create mode 100644 src/tests/regression_tests.rs create mode 100644 src/tests/route_compilation_tests.rs create mode 100644 src/tests/sql_type_safety_tests.rs diff --git a/src/routes/queue.rs b/src/routes/queue.rs index 7b7ef4e..ddd8804 100644 --- a/src/routes/queue.rs +++ b/src/routes/queue.rs @@ -10,7 +10,7 @@ use std::sync::Arc; use crate::{auth::AuthUser, ocr::queue::OcrQueueService, AppState, models::UserRole}; -fn require_admin(auth_user: &AuthUser) -> Result<(), StatusCode> { +pub fn require_admin(auth_user: &AuthUser) -> Result<(), StatusCode> { if auth_user.user.role != UserRole::Admin { Err(StatusCode::FORBIDDEN) } else { diff --git a/src/tests/mod.rs b/src/tests/mod.rs index e26cd3f..42bdc86 100644 --- a/src/tests/mod.rs +++ b/src/tests/mod.rs @@ -11,6 +11,9 @@ mod ocr_tests; mod enhanced_ocr_tests; mod oidc_tests; mod enhanced_search_tests; +mod regression_tests; +mod route_compilation_tests; mod settings_tests; +mod sql_type_safety_tests; mod users_tests; mod generic_migration_tests; diff --git a/src/tests/regression_tests.rs b/src/tests/regression_tests.rs new file mode 100644 index 0000000..b62eb6f --- /dev/null +++ b/src/tests/regression_tests.rs @@ -0,0 +1,119 @@ +/// Regression tests specifically targeting the issues that weren't caught before: +/// 1. SQL Row trait import issues +/// 2. SQL NUMERIC vs BIGINT type mismatches + +#[cfg(test)] +mod tests { + use sqlx::Row; // This import would have been missing before the fix + + #[test] + fn test_sqlx_row_trait_is_imported() { + // This test ensures that the Row trait is available for import + // The original bug was that routes/queue.rs was missing this import + + // This is a compile-time test - if Row trait cannot be imported, this test fails to compile + use sqlx::postgres::PgRow; + + // Test that Row trait methods are available (would fail without import) + let _row_get_method_exists = |row: &PgRow| { + let _test: Result = row.try_get("test_column"); + let _test2: Result = row.try_get(0); + }; + } + + #[test] + fn test_sql_type_casting_understanding() { + // This test documents the SQL type casting fix we implemented + // PostgreSQL SUM() returns NUMERIC, but Rust expects BIGINT for i64 + + // The problematic pattern (would cause runtime error): + // SELECT COALESCE(SUM(file_size), 0) as total_size -- Returns NUMERIC + // let value: i64 = row.get("total_size"); -- ERROR: type mismatch + + // The fixed pattern: + // SELECT COALESCE(SUM(file_size), 0)::BIGINT as total_size -- Returns BIGINT + // let value: i64 = row.get("total_size"); -- SUCCESS + + // This test just verifies our understanding is correct + assert!(true, "SQL type casting documented"); + } + + #[test] + fn test_numeric_vs_bigint_sql_patterns() { + // Document the SQL patterns we fixed + + let problematic_queries = vec![ + "SELECT COALESCE(SUM(file_size), 0) as total_size FROM documents", + "SELECT source_type, COALESCE(SUM(file_size), 0) as total_size FROM ignored_files GROUP BY source_type", + ]; + + let fixed_queries = vec![ + "SELECT COALESCE(SUM(file_size), 0)::BIGINT as total_size FROM documents", + "SELECT source_type, COALESCE(SUM(file_size), 0)::BIGINT as total_size FROM ignored_files GROUP BY source_type", + ]; + + // Verify we have the same number of fixed queries as problematic ones + assert_eq!(problematic_queries.len(), fixed_queries.len()); + + // Each fixed query should contain the ::BIGINT cast + for fixed_query in &fixed_queries { + assert!( + fixed_query.contains("::BIGINT"), + "Fixed query should contain ::BIGINT cast: {}", + fixed_query + ); + } + } + + #[test] + fn test_queue_module_compiles() { + // Test that the queue module compiles (tests the Row import fix) + let _router = crate::routes::queue::router(); + + // Test that the require_admin function works + use crate::models::{UserRole, AuthProvider}; + let admin_user = crate::auth::AuthUser { + user: crate::models::User { + id: uuid::Uuid::new_v4(), + username: "admin".to_string(), + email: "admin@example.com".to_string(), + password_hash: Some("hash".to_string()), + role: UserRole::Admin, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + oidc_subject: None, + oidc_issuer: None, + oidc_email: None, + auth_provider: AuthProvider::Local, + }, + }; + + let regular_user = crate::auth::AuthUser { + user: crate::models::User { + id: uuid::Uuid::new_v4(), + username: "user".to_string(), + email: "user@example.com".to_string(), + password_hash: Some("hash".to_string()), + role: UserRole::User, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + oidc_subject: None, + oidc_issuer: None, + oidc_email: None, + auth_provider: AuthProvider::Local, + }, + }; + + // Test admin access + assert!(crate::routes::queue::require_admin(&admin_user).is_ok()); + + // Test non-admin rejection + assert!(crate::routes::queue::require_admin(®ular_user).is_err()); + } + + #[test] + fn test_ignored_files_module_compiles() { + // Test that the ignored_files module compiles (tests the SQL type fix) + let _router = crate::routes::ignored_files::ignored_files_routes(); + } +} \ No newline at end of file diff --git a/src/tests/route_compilation_tests.rs b/src/tests/route_compilation_tests.rs new file mode 100644 index 0000000..50cf447 --- /dev/null +++ b/src/tests/route_compilation_tests.rs @@ -0,0 +1,71 @@ +/// Tests to ensure route compilation and basic functionality +/// These tests focus on catching compilation errors in route modules + +#[cfg(test)] +mod tests { + use axum::http::StatusCode; + + #[test] + fn test_queue_routes_module_compiles() { + // This test ensures the queue routes module compiles without errors + // It would catch missing imports like the Row trait issue + let _router = crate::routes::queue::router(); + + // Test that required_admin function compiles + use crate::models::{UserRole, AuthProvider}; + let test_user = crate::auth::AuthUser { + user: crate::models::User { + id: uuid::Uuid::new_v4(), + username: "test".to_string(), + email: "test@example.com".to_string(), + password_hash: Some("hash".to_string()), + role: UserRole::User, + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + oidc_subject: None, + oidc_issuer: None, + oidc_email: None, + auth_provider: AuthProvider::Local, + }, + }; + + // This function call would fail if there were compilation issues + let result = crate::routes::queue::require_admin(&test_user); + assert_eq!(result, Err(StatusCode::FORBIDDEN)); + } + + #[test] + fn test_ignored_files_routes_module_compiles() { + // This test ensures the ignored_files routes module compiles + let _router = crate::routes::ignored_files::ignored_files_routes(); + } + + #[test] + fn test_all_route_modules_compile() { + // Test that all main route modules compile + let _auth_router = crate::routes::auth::router(); + let _documents_router = crate::routes::documents::router(); + let _labels_router = crate::routes::labels::router(); + let _metrics_router = crate::routes::metrics::router(); + let _prometheus_router = crate::routes::prometheus_metrics::router(); + let _queue_router = crate::routes::queue::router(); + let _search_router = crate::routes::search::router(); + let _settings_router = crate::routes::settings::router(); + let _sources_router = crate::routes::sources::router(); + let _users_router = crate::routes::users::router(); + let _webdav_router = crate::routes::webdav::router(); + let _ignored_files_router = crate::routes::ignored_files::ignored_files_routes(); + } + + #[test] + fn test_sql_imports_are_available() { + // Test that required SQL traits are available + // This would catch missing Row trait imports + use sqlx::Row; + + // This is a compile-time test - if Row trait is not available, this won't compile + let _row_method_exists = |row: &sqlx::postgres::PgRow| { + let _: Result = row.try_get("test"); + }; + } +} \ No newline at end of file diff --git a/src/tests/sql_type_safety_tests.rs b/src/tests/sql_type_safety_tests.rs new file mode 100644 index 0000000..ed4707d --- /dev/null +++ b/src/tests/sql_type_safety_tests.rs @@ -0,0 +1,284 @@ +/// Tests specifically designed to catch SQL type mismatches and import issues +/// These tests target the exact problems that weren't caught before: +/// 1. NUMERIC vs BIGINT type mismatches in aggregate functions +/// 2. Missing Row trait imports +/// 3. SQL compilation issues that only appear with real database queries + +#[cfg(test)] +mod tests { + use crate::db::Database; + use sqlx::Row; + use uuid::Uuid; + + async fn create_test_db() -> Database { + let db_url = std::env::var("TEST_DATABASE_URL") + .unwrap_or_else(|_| "postgresql://postgres:postgres@localhost:5432/readur_test".to_string()); + + let db = Database::new(&db_url).await.expect("Failed to connect to test database"); + db.migrate().await.expect("Failed to migrate test database"); + db + } + + #[tokio::test] + async fn test_row_trait_import_is_available() { + let db = create_test_db().await; + let pool = db.get_pool(); + + // This test ensures Row trait is imported and available + // The .get() method would fail to compile if Row trait is missing + let result = sqlx::query("SELECT 1::BIGINT as test_value") + .fetch_one(pool) + .await + .unwrap(); + + // These calls require Row trait to be in scope + let _value: i64 = result.get("test_value"); + let _value_by_index: i64 = result.get(0); + let _optional_value: Option = result.get("test_value"); + } + + #[tokio::test] + async fn test_sum_aggregate_type_safety() { + let db = create_test_db().await; + let pool = db.get_pool(); + + // Create test data + let user_id = Uuid::new_v4(); + sqlx::query( + "INSERT INTO users (id, username, email, password_hash, role) + VALUES ($1, $2, $3, $4, $5)" + ) + .bind(user_id) + .bind("test_aggregate_user") + .bind("test_agg@example.com") + .bind("hash") + .bind("user") + .execute(pool) + .await + .unwrap(); + + // Insert test documents + for i in 0..3 { + let doc_id = Uuid::new_v4(); + sqlx::query( + r#" + INSERT INTO documents (id, filename, original_filename, file_path, file_size, mime_type, user_id) + VALUES ($1, $2, $3, $4, $5, $6, $7) + "# + ) + .bind(doc_id) + .bind(format!("test_{}.pdf", i)) + .bind(format!("test_{}.pdf", i)) + .bind(format!("/test/test_{}.pdf", i)) + .bind(1024i64 * (i + 1) as i64) // Different file sizes + .bind("application/pdf") + .bind(user_id) + .execute(pool) + .await + .unwrap(); + } + + // Test the exact SQL pattern from ignored_files.rs that was failing + let result = sqlx::query( + r#" + SELECT + COUNT(*) as total_files, + COALESCE(SUM(file_size), 0)::BIGINT as total_size_bytes + FROM documents + WHERE user_id = $1 + "# + ) + .bind(user_id) + .fetch_one(pool) + .await + .unwrap(); + + // This extraction would fail if ::BIGINT cast was missing + let total_files: i64 = result.get("total_files"); + let total_size_bytes: i64 = result.get("total_size_bytes"); + + assert_eq!(total_files, 3); + assert_eq!(total_size_bytes, 1024 + 2048 + 3072); // Sum of file sizes + } + + #[tokio::test] + async fn test_group_by_aggregate_type_safety() { + let db = create_test_db().await; + let pool = db.get_pool(); + + // Test the exact SQL pattern from ignored_files.rs GROUP BY query + let results = sqlx::query( + r#" + SELECT + mime_type, + COUNT(*) as count, + COALESCE(SUM(file_size), 0)::BIGINT as total_size_bytes + FROM documents + GROUP BY mime_type + ORDER BY count DESC + "# + ) + .fetch_all(pool) + .await + .unwrap(); + + // Test that we can extract all values without type errors + for row in results { + let _mime_type: String = row.get("mime_type"); + let _count: i64 = row.get("count"); + let _total_size_bytes: i64 = row.get("total_size_bytes"); + } + } + + #[tokio::test] + async fn test_numeric_vs_bigint_difference() { + let db = create_test_db().await; + let pool = db.get_pool(); + + // Demonstrate the difference between NUMERIC and BIGINT return types + + // This query returns NUMERIC (the original problematic pattern) + let numeric_result = sqlx::query("SELECT COALESCE(SUM(file_size), 0) as total_size FROM documents") + .fetch_one(pool) + .await + .unwrap(); + + // This query returns BIGINT (the fixed pattern) + let bigint_result = sqlx::query("SELECT COALESCE(SUM(file_size), 0)::BIGINT as total_size FROM documents") + .fetch_one(pool) + .await + .unwrap(); + + // The BIGINT version should work with i64 extraction + let _bigint_value: i64 = bigint_result.get("total_size"); + + // The NUMERIC version would fail with i64 extraction but works with f64 + let _numeric_as_f64: Option = numeric_result.try_get("total_size").ok(); + + // Trying to get NUMERIC as i64 would fail (this is what was causing the original error) + let numeric_as_i64_result: Result = numeric_result.try_get("total_size"); + assert!(numeric_as_i64_result.is_err()); // This demonstrates the original problem + } + + #[tokio::test] + async fn test_ignored_files_aggregate_queries() { + let db = create_test_db().await; + let pool = db.get_pool(); + + // Create test user + let user_id = Uuid::new_v4(); + sqlx::query( + "INSERT INTO users (id, username, email, password_hash, role) + VALUES ($1, $2, $3, $4, $5)" + ) + .bind(user_id) + .bind("test_ignored_user") + .bind("test_ignored@example.com") + .bind("hash") + .bind("admin") + .execute(pool) + .await + .unwrap(); + + // Add test ignored files + for i in 0..2 { + let file_id = Uuid::new_v4(); + sqlx::query( + r#" + INSERT INTO ignored_files (id, ignored_by, filename, file_path, file_size, mime_type, source_type, reason) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + "# + ) + .bind(file_id) + .bind(user_id) + .bind(format!("ignored_{}.pdf", i)) + .bind(format!("/test/ignored_{}.pdf", i)) + .bind(1024i64 * (i + 1) as i64) + .bind("application/pdf") + .bind("source_sync") + .bind(Some("Test reason")) + .execute(pool) + .await + .unwrap(); + } + + // Test the exact queries from ignored_files.rs that were failing + + // Main stats query + let stats_result = sqlx::query( + r#" + SELECT + COUNT(*) as total_ignored_files, + COALESCE(SUM(file_size), 0)::BIGINT as total_size_bytes, + MAX(ignored_at) as most_recent_ignored_at + FROM ignored_files + WHERE ignored_by = $1 + "# + ) + .bind(user_id) + .fetch_one(pool) + .await + .unwrap(); + + // These extractions would fail without proper type casting + let total_files: i64 = stats_result.get("total_ignored_files"); + let total_size: i64 = stats_result.get("total_size_bytes"); + + assert_eq!(total_files, 2); + assert_eq!(total_size, 1024 + 2048); + + // Group by source type query + let by_source_results = sqlx::query( + r#" + SELECT + source_type, + COUNT(*) as count, + COALESCE(SUM(file_size), 0)::BIGINT as total_size_bytes + FROM ignored_files + WHERE ignored_by = $1 + GROUP BY source_type + ORDER BY count DESC + "# + ) + .bind(user_id) + .fetch_all(pool) + .await + .unwrap(); + + // Test extraction from GROUP BY results + for row in by_source_results { + let _source_type: String = row.get("source_type"); + let _count: i64 = row.get("count"); + let _total_size_bytes: i64 = row.get("total_size_bytes"); + } + } + + #[tokio::test] + async fn test_queue_enqueue_pending_sql_patterns() { + let db = create_test_db().await; + let pool = db.get_pool(); + + // Test the SQL patterns from queue.rs that need Row trait + let pending_documents = sqlx::query( + r#" + SELECT d.id, d.file_size + FROM documents d + LEFT JOIN ocr_queue oq ON d.id = oq.document_id + WHERE d.ocr_status = 'pending' + AND oq.document_id IS NULL + AND d.file_path IS NOT NULL + AND (d.mime_type LIKE 'image/%' OR d.mime_type = 'application/pdf' OR d.mime_type = 'text/plain') + ORDER BY d.created_at ASC + "# + ) + .fetch_all(pool) + .await + .unwrap(); + + // Test that Row trait methods work (these would fail without proper import) + for row in pending_documents { + let _document_id: uuid::Uuid = row.get("id"); + let _file_size: i64 = row.get("file_size"); + } + } +} \ No newline at end of file