fix(tests): resolve compilation error in tests and source scheduler
This commit is contained in:
parent
1a6efeacc5
commit
0ef233c3cc
|
|
@ -261,7 +261,7 @@ impl Database {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_sources_for_sync(&self) -> Result<Vec<crate::models::Source>> {
|
pub async fn get_sources_for_sync(&self) -> Result<Vec<crate::models::Source>> {
|
||||||
info!("🔍 Loading sources from database for sync check...");
|
crate::debug_log!("DB_SOURCES", "🔍 Loading sources from database for sync check...");
|
||||||
|
|
||||||
let rows = sqlx::query(
|
let rows = sqlx::query(
|
||||||
r#"SELECT id, user_id, name, source_type, enabled, config, status,
|
r#"SELECT id, user_id, name, source_type, enabled, config, status,
|
||||||
|
|
@ -278,7 +278,7 @@ impl Database {
|
||||||
e
|
e
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
info!("📊 Database query returned {} sources for sync processing", rows.len());
|
crate::debug_log!("DB_SOURCES", "📊 Database query returned {} sources for sync processing", rows.len());
|
||||||
|
|
||||||
let mut sources = Vec::new();
|
let mut sources = Vec::new();
|
||||||
for (index, row) in rows.iter().enumerate() {
|
for (index, row) in rows.iter().enumerate() {
|
||||||
|
|
@ -287,7 +287,7 @@ impl Database {
|
||||||
let source_type_str: String = row.get("source_type");
|
let source_type_str: String = row.get("source_type");
|
||||||
let config_json: serde_json::Value = row.get("config");
|
let config_json: serde_json::Value = row.get("config");
|
||||||
|
|
||||||
info!("📋 Processing source {}: ID={}, Name='{}', Type={}",
|
crate::debug_log!("DB_SOURCES", "📋 Processing source {}: ID={}, Name='{}', Type={}",
|
||||||
index + 1, source_id, source_name, source_type_str);
|
index + 1, source_id, source_name, source_type_str);
|
||||||
|
|
||||||
// Log config structure for debugging
|
// Log config structure for debugging
|
||||||
|
|
|
||||||
|
|
@ -309,7 +309,7 @@ impl SourceScheduler {
|
||||||
if elapsed_minutes < sync_interval_minutes as i64 {
|
if elapsed_minutes < sync_interval_minutes as i64 {
|
||||||
// Only log this occasionally to avoid spam
|
// Only log this occasionally to avoid spam
|
||||||
if elapsed_minutes % 10 == 0 {
|
if elapsed_minutes % 10 == 0 {
|
||||||
info!("Sync not due for source {} (last sync {} minutes ago, interval {} minutes)",
|
crate::debug_log!("SOURCE_SCHEDULER", "Sync not due for source {} (last sync {} minutes ago, interval {} minutes)",
|
||||||
source.name, elapsed_minutes, sync_interval_minutes);
|
source.name, elapsed_minutes, sync_interval_minutes);
|
||||||
}
|
}
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
|
|
@ -499,7 +499,7 @@ impl SourceScheduler {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("✅ WebDAV URL validation passed for source '{}': {}", source_name, server_url);
|
crate::debug_log!("SOURCE_SCHEDULER", "✅ WebDAV URL validation passed for source '{}': {}", source_name, server_url);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|
|
||||||
|
|
@ -60,12 +60,18 @@ macro_rules! debug_log {
|
||||||
($msg:expr) => {
|
($msg:expr) => {
|
||||||
crate::utils::debug::debug_log($msg)
|
crate::utils::debug::debug_log($msg)
|
||||||
};
|
};
|
||||||
($context:expr, $msg:expr) => {
|
// Structured logging pattern (must come before format pattern due to => token)
|
||||||
crate::utils::debug::debug_log_context($context, $msg)
|
|
||||||
};
|
|
||||||
($context:expr, $($key:expr => $value:expr),+ $(,)?) => {
|
($context:expr, $($key:expr => $value:expr),+ $(,)?) => {
|
||||||
crate::utils::debug::debug_log_structured($context, &[$(($key, &$value)),+])
|
crate::utils::debug::debug_log_structured($context, &[$(($key, &$value)),+])
|
||||||
};
|
};
|
||||||
|
// Format pattern with arguments
|
||||||
|
($context:expr, $msg:expr, $($args:expr),+ $(,)?) => {
|
||||||
|
crate::utils::debug::debug_log_context($context, &format!($msg, $($args),+))
|
||||||
|
};
|
||||||
|
// Simple context + message pattern
|
||||||
|
($context:expr, $msg:expr) => {
|
||||||
|
crate::utils::debug::debug_log_context($context, $msg)
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Macro for debug error logging
|
/// Macro for debug error logging
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,10 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
use tokio;
|
use tokio;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use std::sync::Arc;
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use readur::models::{FileInfo, CreateWebDAVDirectory};
|
use readur::models::{FileInfo, CreateWebDAVDirectory};
|
||||||
use readur::services::webdav_service::{WebDAVService, WebDAVConfig};
|
use readur::services::webdav_service::{WebDAVService, WebDAVConfig};
|
||||||
use readur::{db::Database, config::Config, AppState};
|
use readur::db::Database;
|
||||||
|
|
||||||
// Helper function to create test WebDAV service
|
// Helper function to create test WebDAV service
|
||||||
fn create_test_webdav_service() -> WebDAVService {
|
fn create_test_webdav_service() -> WebDAVService {
|
||||||
|
|
@ -173,29 +171,23 @@ fn mock_realistic_directory_structure() -> Vec<FileInfo> {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to create test database and app state
|
// Helper function to create test database
|
||||||
async fn create_test_app_state() -> Result<(Arc<AppState>, Uuid)> {
|
async fn create_test_database() -> Result<(Database, Uuid)> {
|
||||||
let db_url = std::env::var("DATABASE_URL")
|
let db_url = std::env::var("DATABASE_URL")
|
||||||
.or_else(|_| std::env::var("TEST_DATABASE_URL"))
|
.or_else(|_| std::env::var("TEST_DATABASE_URL"))
|
||||||
.unwrap_or_else(|_| "postgresql://readur:readur@localhost:5432/readur".to_string());
|
.unwrap_or_else(|_| "postgresql://readur:readur@localhost:5432/readur".to_string());
|
||||||
|
|
||||||
let database = Database::new(&db_url).await?;
|
let database = Database::new(&db_url).await?;
|
||||||
let config = Config::from_env().unwrap_or_default();
|
|
||||||
|
|
||||||
let app_state = Arc::new(AppState {
|
|
||||||
db: database,
|
|
||||||
config,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create a test user
|
// Create a test user
|
||||||
let user_id = Uuid::new_v4();
|
let user_id = Uuid::new_v4();
|
||||||
|
|
||||||
Ok((app_state, user_id))
|
Ok((database, user_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_first_time_directory_scan_with_subdirectories() {
|
async fn test_first_time_directory_scan_with_subdirectories() {
|
||||||
let (app_state, user_id) = create_test_app_state().await.unwrap();
|
let (database, user_id) = create_test_database().await.unwrap();
|
||||||
let service = create_test_webdav_service();
|
let service = create_test_webdav_service();
|
||||||
|
|
||||||
// Mock the scenario where we have files but no previously tracked directories
|
// Mock the scenario where we have files but no previously tracked directories
|
||||||
|
|
@ -206,7 +198,7 @@ async fn test_first_time_directory_scan_with_subdirectories() {
|
||||||
// 2. But no subdirectories are known in database (first-time scan)
|
// 2. But no subdirectories are known in database (first-time scan)
|
||||||
|
|
||||||
// Verify that list_webdav_directories returns empty (first-time scenario)
|
// Verify that list_webdav_directories returns empty (first-time scenario)
|
||||||
let known_dirs = app_state.db.list_webdav_directories(user_id).await.unwrap();
|
let known_dirs = database.list_webdav_directories(user_id).await.unwrap();
|
||||||
assert!(known_dirs.is_empty(), "Should have no known directories on first scan");
|
assert!(known_dirs.is_empty(), "Should have no known directories on first scan");
|
||||||
|
|
||||||
// This is the critical test: check_subdirectories_for_changes should fall back to full scan
|
// This is the critical test: check_subdirectories_for_changes should fall back to full scan
|
||||||
|
|
@ -225,10 +217,10 @@ async fn test_first_time_directory_scan_with_subdirectories() {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Insert the root directory to simulate it being "known" but without subdirectories
|
// Insert the root directory to simulate it being "known" but without subdirectories
|
||||||
app_state.db.create_or_update_webdav_directory(&root_dir).await.unwrap();
|
database.create_or_update_webdav_directory(&root_dir).await.unwrap();
|
||||||
|
|
||||||
// Now verify that known directories contains only the root
|
// Now verify that known directories contains only the root
|
||||||
let known_dirs_after = app_state.db.list_webdav_directories(user_id).await.unwrap();
|
let known_dirs_after = database.list_webdav_directories(user_id).await.unwrap();
|
||||||
assert_eq!(known_dirs_after.len(), 1);
|
assert_eq!(known_dirs_after.len(), 1);
|
||||||
assert_eq!(known_dirs_after[0].directory_path, "/FullerDocuments/JonDocuments");
|
assert_eq!(known_dirs_after[0].directory_path, "/FullerDocuments/JonDocuments");
|
||||||
|
|
||||||
|
|
@ -251,7 +243,7 @@ async fn test_first_time_directory_scan_with_subdirectories() {
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_subdirectory_tracking_after_full_scan() {
|
async fn test_subdirectory_tracking_after_full_scan() {
|
||||||
let (app_state, user_id) = create_test_app_state().await.unwrap();
|
let (database, user_id) = create_test_database().await.unwrap();
|
||||||
let service = create_test_webdav_service();
|
let service = create_test_webdav_service();
|
||||||
let mock_files = mock_realistic_directory_structure();
|
let mock_files = mock_realistic_directory_structure();
|
||||||
|
|
||||||
|
|
@ -323,11 +315,11 @@ async fn test_subdirectory_tracking_after_full_scan() {
|
||||||
total_size_bytes,
|
total_size_bytes,
|
||||||
};
|
};
|
||||||
|
|
||||||
app_state.db.create_or_update_webdav_directory(&directory_record).await.unwrap();
|
database.create_or_update_webdav_directory(&directory_record).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now verify that all directories are tracked
|
// Now verify that all directories are tracked
|
||||||
let tracked_dirs = app_state.db.list_webdav_directories(user_id).await.unwrap();
|
let tracked_dirs = database.list_webdav_directories(user_id).await.unwrap();
|
||||||
|
|
||||||
// We should have tracked all directories found in the file structure
|
// We should have tracked all directories found in the file structure
|
||||||
let expected_directories = vec![
|
let expected_directories = vec![
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue