From aad6036b4c72f14538102938809720a419212884 Mon Sep 17 00:00:00 2001 From: perf3ct Date: Fri, 1 Aug 2025 20:34:42 +0000 Subject: [PATCH] feat(tests): bring back the test helpers --- fix_all_remaining_tests.sh | 56 ----- src/lib.rs | 3 + src/test_helpers.rs | 225 ++++++++++++++++++ ...on_document_upload_hash_duplicate_tests.rs | 17 +- ...ration_per_user_watch_directories_tests.rs | 9 +- tests/integration_webdav_integration_tests.rs | 51 ++-- tests/unit_webdav_smart_scan_logic_tests.rs | 4 +- 7 files changed, 272 insertions(+), 93 deletions(-) delete mode 100755 fix_all_remaining_tests.sh create mode 100644 src/test_helpers.rs diff --git a/fix_all_remaining_tests.sh b/fix_all_remaining_tests.sh deleted file mode 100755 index 9f418e5..0000000 --- a/fix_all_remaining_tests.sh +++ /dev/null @@ -1,56 +0,0 @@ -#\!/bin/bash - -# Files with missing Config fields -config_files=( -"tests/integration_smart_sync_targeted_scan.rs" -"tests/integration_s3_sync_tests.rs" -"tests/unit_webdav_edge_cases_tests.rs" -"tests/unit_webdav_url_management_tests.rs" -"tests/integration_webdav_concurrency_tests.rs" -"tests/integration_smart_sync_error_handling.rs" -"tests/integration_webdav_sync_tests.rs" -"tests/unit_webdav_directory_tracking_tests.rs" -"tests/unit_basic_sync_tests.rs" -"tests/unit_webdav_unit_tests.rs" -"tests/integration_webdav_smart_scanning_tests.rs" -"tests/unit_webdav_enhanced_unit_tests.rs" -"tests/integration_webdav_scheduler_concurrency_tests.rs" -"tests/integration_smart_sync_deep_scan.rs" -"tests/integration_smart_sync_no_changes.rs" -"tests/integration_local_folder_sync_tests.rs" -"tests/webdav_production_flow_integration_tests.rs" -"tests/integration_webdav_first_time_scan_tests.rs" -"tests/unit_webdav_targeted_rescan_tests.rs" -"tests/integration_smart_sync_first_time.rs" -"tests/unit_smart_sync_service_tests.rs" -"tests/unit_webdav_smart_scan_logic_tests.rs" -) - -echo "Fixing Config structs in ${#config_files[@]} files..." - -for file in "${config_files[@]}"; do - if [ -f "$file" ]; then - echo "Processing $file..." - # Check if file has Config struct and missing fields - if grep -q "Config {" "$file" && \! grep -q "user_watch_base_dir" "$file"; then - # Add the missing fields after watch_folder line - sed -i.bak '/watch_folder:/a\ - user_watch_base_dir: "./user_watch".to_string(),\ - enable_per_user_watch: false,' "$file" - - # Clean up formatting - sed -i 's/enable_per_user_watch: false, /enable_per_user_watch: false,\n /' "$file" - sed -i 's/enable_per_user_watch: false, /enable_per_user_watch: false,\n /' "$file" - - # Remove backup - rm "${file}.bak" 2>/dev/null || true - echo "Fixed Config in $file" - else - echo "Skipping $file (no Config struct or already fixed)" - fi - else - echo "File not found: $file" - fi -done - -echo "Config fixes completed." diff --git a/src/lib.rs b/src/lib.rs index e8c4134..6410bc7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,6 +25,9 @@ mod tests; #[cfg(any(test, feature = "test-utils"))] pub mod test_utils; +#[cfg(any(test, feature = "test-utils"))] +pub mod test_helpers; + use axum::{http::StatusCode, Json}; use utoipa; use config::Config; diff --git a/src/test_helpers.rs b/src/test_helpers.rs new file mode 100644 index 0000000..7c82005 --- /dev/null +++ b/src/test_helpers.rs @@ -0,0 +1,225 @@ +/*! + * Test Helpers and Utilities + * + * This module provides utilities for creating test configurations and services + * with sensible defaults. Tests can modify the returned objects as needed. + */ + +use crate::{ + config::Config, + db::Database, + services::file_service::FileService, + storage::{StorageConfig, factory::create_storage_backend}, + AppState, + ocr::queue::OcrQueueService, + services::sync_progress_tracker::SyncProgressTracker, +}; +use std::sync::Arc; +use sqlx::PgPool; + +/// Creates a test configuration with sensible defaults +/// All fields are populated to avoid compilation errors when new fields are added +pub fn create_test_config() -> Config { + Config { + database_url: std::env::var("TEST_DATABASE_URL") + .or_else(|_| std::env::var("DATABASE_URL")) + .unwrap_or_else(|_| "postgresql://readur:readur@localhost:5432/readur".to_string()), + server_address: "127.0.0.1:0".to_string(), + jwt_secret: "test_jwt_secret_for_integration_tests".to_string(), + upload_path: "/tmp/test_uploads".to_string(), + watch_folder: "/tmp/test_watch".to_string(), + user_watch_base_dir: "/tmp/user_watch".to_string(), + enable_per_user_watch: false, + allowed_file_types: vec!["pdf".to_string(), "png".to_string(), "jpg".to_string(), "txt".to_string()], + watch_interval_seconds: Some(10), + file_stability_check_ms: Some(500), + max_file_age_hours: Some(24), + + // OCR Configuration + ocr_language: "eng".to_string(), + concurrent_ocr_jobs: 2, + ocr_timeout_seconds: 60, + max_file_size_mb: 50, + + // Performance + memory_limit_mb: 256, + cpu_priority: "normal".to_string(), + + // OIDC Configuration (disabled for tests) + oidc_enabled: false, + oidc_client_id: None, + oidc_client_secret: None, + oidc_issuer_url: None, + oidc_redirect_uri: None, + + // S3 Configuration (disabled for tests by default) + s3_enabled: false, + s3_config: None, + } +} + +/// Creates a default test database URL +pub fn default_test_db_url() -> String { + std::env::var("TEST_DATABASE_URL") + .or_else(|_| std::env::var("DATABASE_URL")) + .unwrap_or_else(|_| "postgresql://readur:readur@localhost:5432/readur".to_string()) +} + +/// Creates a test FileService with local storage +pub async fn create_test_file_service(upload_path: Option<&str>) -> Arc { + let path = upload_path.unwrap_or("/tmp/test_uploads"); + let storage_config = StorageConfig::Local { + upload_path: path.to_string() + }; + let storage_backend = create_storage_backend(storage_config) + .await + .expect("Failed to create test storage backend"); + + Arc::new(FileService::with_storage(path.to_string(), storage_backend)) +} + +/// Creates a test Database instance +pub async fn create_test_database() -> Database { + let database_url = default_test_db_url(); + Database::new(&database_url) + .await + .expect("Failed to connect to test database") +} + +/// Creates a test Database instance with custom pool configuration +pub async fn create_test_database_with_pool(max_connections: u32, min_connections: u32) -> Database { + let database_url = default_test_db_url(); + Database::new_with_pool_config(&database_url, max_connections, min_connections) + .await + .expect("Failed to connect to test database with custom pool") +} + +/// Creates a test OcrQueueService +pub fn create_test_queue_service(db: Database, pool: PgPool, file_service: Arc) -> Arc { + Arc::new(OcrQueueService::new(db, pool, 2, file_service)) +} + +/// Creates a test AppState with default configuration and services +/// This provides a convenient way to get a fully configured AppState for testing +pub async fn create_test_app_state() -> Arc { + let config = create_test_config(); + create_test_app_state_with_config(config).await +} + +/// Creates a test AppState with a custom configuration +/// This allows tests to customize config while still getting properly initialized services +pub async fn create_test_app_state_with_config(config: Config) -> Arc { + let db = create_test_database().await; + let file_service = create_test_file_service(Some(&config.upload_path)).await; + let pool = db.pool.clone(); + let queue_service = create_test_queue_service(db.clone(), pool, file_service.clone()); + let sync_progress_tracker = Arc::new(SyncProgressTracker::new()); + + Arc::new(AppState { + db, + config, + file_service, + webdav_scheduler: None, + source_scheduler: None, + queue_service, + oidc_client: None, + sync_progress_tracker, + user_watch_service: None, + }) +} + +/// Creates a test AppState with custom upload path +/// Convenient for tests that need a specific upload directory +pub async fn create_test_app_state_with_upload_path(upload_path: &str) -> Arc { + let mut config = create_test_config(); + config.upload_path = upload_path.to_string(); + create_test_app_state_with_config(config).await +} + +/// Creates a test AppState with user watch service enabled +/// Useful for tests that need per-user watch functionality +pub async fn create_test_app_state_with_user_watch(user_watch_base_dir: &str) -> Arc { + let mut config = create_test_config(); + config.enable_per_user_watch = true; + config.user_watch_base_dir = user_watch_base_dir.to_string(); + + let db = create_test_database().await; + let file_service = create_test_file_service(Some(&config.upload_path)).await; + let pool = db.pool.clone(); + let queue_service = create_test_queue_service(db.clone(), pool, file_service.clone()); + let sync_progress_tracker = Arc::new(SyncProgressTracker::new()); + + // Create user watch service + let user_watch_service = Some(Arc::new(crate::services::user_watch_service::UserWatchService::new( + &config.user_watch_base_dir + ))); + + Arc::new(AppState { + db, + config, + file_service, + webdav_scheduler: None, + source_scheduler: None, + queue_service, + oidc_client: None, + sync_progress_tracker, + user_watch_service, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_create_test_config() { + let config = create_test_config(); + assert!(!config.database_url.is_empty()); + assert!(!config.s3_enabled); // Default should be false + assert!(config.s3_config.is_none()); // Default should be None + assert!(!config.oidc_enabled); // Default should be false + } + + #[tokio::test] + async fn test_create_test_file_service() { + let file_service = create_test_file_service(None).await; + // Just verify it was created successfully + assert!(file_service.as_ref() as *const _ != std::ptr::null()); + } + + #[tokio::test] + async fn test_create_test_database() { + let db = create_test_database().await; + // Just verify it was created successfully + assert!(db.pool.is_closed() == false); + } + + #[tokio::test] + async fn test_create_test_app_state() { + let state = create_test_app_state().await; + // Verify all required fields are present + assert!(!state.config.database_url.is_empty()); + assert!(!state.config.s3_enabled); // Default should be false + assert!(!state.config.oidc_enabled); // Default should be false + assert!(state.user_watch_service.is_none()); // Default should be None + } + + #[tokio::test] + async fn test_create_test_app_state_with_custom_config() { + let mut config = create_test_config(); + config.upload_path = "/custom/test/path".to_string(); + config.s3_enabled = true; + + let state = create_test_app_state_with_config(config).await; + assert_eq!(state.config.upload_path, "/custom/test/path"); + assert!(state.config.s3_enabled); + } + + #[tokio::test] + async fn test_create_test_app_state_with_user_watch() { + let state = create_test_app_state_with_user_watch("/tmp/user_watch_test").await; + assert!(state.config.enable_per_user_watch); + assert_eq!(state.config.user_watch_base_dir, "/tmp/user_watch_test"); + assert!(state.user_watch_service.is_some()); + } +} \ No newline at end of file diff --git a/tests/integration_document_upload_hash_duplicate_tests.rs b/tests/integration_document_upload_hash_duplicate_tests.rs index a25464b..5d3cebc 100644 --- a/tests/integration_document_upload_hash_duplicate_tests.rs +++ b/tests/integration_document_upload_hash_duplicate_tests.rs @@ -9,6 +9,8 @@ use readur::{ db::Database, config::Config, models::{Document, CreateUser, UserRole}, + services::file_service::FileService, + storage::{StorageConfig, factory::create_storage_backend}, }; // Helper function to calculate file hash @@ -94,16 +96,29 @@ async fn create_test_app_state() -> Result> { oidc_client_secret: None, oidc_issuer_url: None, oidc_redirect_uri: None, + s3_enabled: false, + s3_config: None, } }); let db = Database::new(&config.database_url).await?; + + // Create file service + let storage_config = StorageConfig::Local { + upload_path: config.upload_path.clone() + }; + let storage_backend = create_storage_backend(storage_config) + .await + .expect("Failed to create test storage backend"); + let file_service = Arc::new(FileService::with_storage(config.upload_path.clone(), storage_backend)); + let queue_service = std::sync::Arc::new( - readur::ocr::queue::OcrQueueService::new(db.clone(), db.get_pool().clone(), 1) + readur::ocr::queue::OcrQueueService::new(db.clone(), db.get_pool().clone(), 1, file_service.clone()) ); Ok(Arc::new(AppState { db: db.clone(), config, + file_service, webdav_scheduler: None, source_scheduler: None, queue_service, diff --git a/tests/integration_per_user_watch_directories_tests.rs b/tests/integration_per_user_watch_directories_tests.rs index 2856dd7..b24af15 100644 --- a/tests/integration_per_user_watch_directories_tests.rs +++ b/tests/integration_per_user_watch_directories_tests.rs @@ -30,10 +30,11 @@ async fn test_per_user_watch_directory_lifecycle() -> Result<()> { config.enable_per_user_watch = true; // Update the state with the new config and user watch service - let user_watch_service = Some(Arc::new(readur::services::user_watch_service::UserWatchService::new(&config.user_watch_base_dir))); + let user_watch_service = Some(Arc::new(UserWatchService::new(&config.user_watch_base_dir))); let updated_state = Arc::new(AppState { db: ctx.state.db.clone(), config, + file_service: ctx.state.file_service.clone(), webdav_scheduler: None, source_scheduler: None, queue_service: ctx.state.queue_service.clone(), @@ -282,10 +283,11 @@ async fn test_user_watch_directory_file_processing_simulation() -> Result<()> { config.enable_per_user_watch = true; // Update the state with the new config and user watch service - let user_watch_service = Some(Arc::new(readur::services::user_watch_service::UserWatchService::new(&config.user_watch_base_dir))); + let user_watch_service = Some(Arc::new(UserWatchService::new(&config.user_watch_base_dir))); let state = Arc::new(AppState { db: ctx.state.db.clone(), config: config.clone(), + file_service: ctx.state.file_service.clone(), webdav_scheduler: None, source_scheduler: None, queue_service: ctx.state.queue_service.clone(), @@ -296,7 +298,7 @@ async fn test_user_watch_directory_file_processing_simulation() -> Result<()> { // Create user watch manager to test file path mapping let user_watch_service = state.user_watch_service.as_ref().unwrap(); - let user_watch_manager = readur::scheduling::user_watch_manager::UserWatchManager::new(state.db.clone(), Arc::clone(user_watch_service)); + let user_watch_manager = readur::scheduling::user_watch_manager::UserWatchManager::new(state.db.clone(), (**user_watch_service).clone()); // Create test user let test_user = readur::models::User { @@ -367,6 +369,7 @@ async fn test_per_user_watch_disabled() -> Result<()> { let updated_state = Arc::new(AppState { db: ctx.state.db.clone(), config, + file_service: ctx.state.file_service.clone(), webdav_scheduler: None, source_scheduler: None, queue_service: ctx.state.queue_service.clone(), diff --git a/tests/integration_webdav_integration_tests.rs b/tests/integration_webdav_integration_tests.rs index 83b43aa..310a7e0 100644 --- a/tests/integration_webdav_integration_tests.rs +++ b/tests/integration_webdav_integration_tests.rs @@ -14,6 +14,7 @@ use readur::{ models::*, routes, AppState, + test_helpers, }; // Removed constant - will use environment variables instead @@ -80,44 +81,34 @@ async fn setup_test_app() -> (Router, Arc) { .or_else(|_| std::env::var("DATABASE_URL")) .unwrap_or_else(|_| "postgresql://readur:readur@localhost:5432/readur".to_string()); - let config = Config { - database_url: database_url.clone(), - server_address: "127.0.0.1:0".to_string(), - upload_path: "/tmp/test_uploads".to_string(), - watch_folder: "/tmp/test_watch".to_string(), - user_watch_base_dir: "./user_watch".to_string(), - enable_per_user_watch: false, - jwt_secret: "test_jwt_secret_for_integration_tests".to_string(), - allowed_file_types: vec!["pdf".to_string(), "png".to_string()], - watch_interval_seconds: Some(10), - file_stability_check_ms: Some(1000), - max_file_age_hours: Some(24), - cpu_priority: "normal".to_string(), - memory_limit_mb: 512, - concurrent_ocr_jobs: 4, - max_file_size_mb: 50, - ocr_language: "eng".to_string(), - ocr_timeout_seconds: 300, - oidc_enabled: false, - oidc_client_id: None, - oidc_client_secret: None, - oidc_issuer_url: None, - oidc_redirect_uri: None, - }; + // Create test configuration with custom database URL + let mut config = test_helpers::create_test_config(); + config.database_url = database_url.clone(); + config.jwt_secret = "test_jwt_secret_for_integration_tests".to_string(); + config.allowed_file_types = vec!["pdf".to_string(), "png".to_string()]; + config.watch_interval_seconds = Some(10); + config.file_stability_check_ms = Some(1000); + config.max_file_age_hours = Some(24); + config.memory_limit_mb = 512; + config.concurrent_ocr_jobs = 4; + config.max_file_size_mb = 50; + config.ocr_timeout_seconds = 300; - // Use the environment-based database URL - let db_url = database_url; - - let db = Database::new(&db_url).await.expect("Failed to connect to test database"); - let queue_service = Arc::new(readur::ocr::queue::OcrQueueService::new(db.clone(), db.pool.clone(), 2)); + // Create test services + let db = test_helpers::create_test_database().await; + let file_service = test_helpers::create_test_file_service(Some("/tmp/test_uploads")).await; + let queue_service = test_helpers::create_test_queue_service(db.clone(), db.pool.clone(), file_service.clone()); + + // Create AppState let state = Arc::new(AppState { db: db.clone(), config, + file_service, webdav_scheduler: None, source_scheduler: None, queue_service, oidc_client: None, - sync_progress_tracker: std::sync::Arc::new(readur::services::sync_progress_tracker::SyncProgressTracker::new()), + sync_progress_tracker: Arc::new(readur::services::sync_progress_tracker::SyncProgressTracker::new()), user_watch_service: None, }); diff --git a/tests/unit_webdav_smart_scan_logic_tests.rs b/tests/unit_webdav_smart_scan_logic_tests.rs index 4530c65..10964e1 100644 --- a/tests/unit_webdav_smart_scan_logic_tests.rs +++ b/tests/unit_webdav_smart_scan_logic_tests.rs @@ -1,9 +1,7 @@ use tokio; -use uuid::Uuid; use chrono::Utc; use std::collections::HashMap; -use readur::models::FileIngestionInfo; -use readur::services::webdav::{WebDAVService, WebDAVConfig}; +use readur::{models::FileIngestionInfo, services::webdav::{WebDAVService, WebDAVConfig}}; // Helper function to create test WebDAV service for smart scanning fn create_nextcloud_webdav_service() -> WebDAVService {