feat(tests): add unit tests for new webdav functionality
This commit is contained in:
parent
92b21350db
commit
c1dbd06df2
|
|
@ -584,23 +584,33 @@ impl WebDAVService {
|
|||
}
|
||||
|
||||
/// Check if a path is a direct child of a directory (not nested deeper)
|
||||
fn is_direct_child(&self, child_path: &str, parent_path: &str) -> bool {
|
||||
if !child_path.starts_with(parent_path) {
|
||||
pub fn is_direct_child(&self, child_path: &str, parent_path: &str) -> bool {
|
||||
// Normalize paths by removing trailing slashes
|
||||
let child_normalized = child_path.trim_end_matches('/');
|
||||
let parent_normalized = parent_path.trim_end_matches('/');
|
||||
|
||||
if !child_normalized.starts_with(parent_normalized) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Same path is not a direct child of itself
|
||||
if child_normalized == parent_normalized {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Handle root directory case
|
||||
if parent_path.is_empty() || parent_path == "/" {
|
||||
return !child_path.trim_start_matches('/').contains('/');
|
||||
if parent_normalized.is_empty() || parent_normalized == "/" {
|
||||
let child_without_leading_slash = child_normalized.trim_start_matches('/');
|
||||
return !child_without_leading_slash.is_empty() && !child_without_leading_slash.contains('/');
|
||||
}
|
||||
|
||||
// Remove parent path prefix and check if remainder has exactly one more path segment
|
||||
let remaining = child_path.strip_prefix(parent_path)
|
||||
let remaining = child_normalized.strip_prefix(parent_normalized)
|
||||
.unwrap_or("")
|
||||
.trim_start_matches('/');
|
||||
|
||||
// Direct child means no more slashes in the remaining path
|
||||
!remaining.contains('/')
|
||||
!remaining.contains('/') && !remaining.is_empty()
|
||||
}
|
||||
|
||||
/// Check subdirectories individually for changes when parent directory is unchanged
|
||||
|
|
@ -719,7 +729,7 @@ impl WebDAVService {
|
|||
self.parse_directory_etag(&response_text)
|
||||
}
|
||||
|
||||
fn parse_directory_etag(&self, xml_text: &str) -> Result<String> {
|
||||
pub fn parse_directory_etag(&self, xml_text: &str) -> Result<String> {
|
||||
use quick_xml::events::Event;
|
||||
use quick_xml::reader::Reader;
|
||||
|
||||
|
|
|
|||
|
|
@ -295,6 +295,7 @@ fn parse_http_date(date_str: &str) -> Option<DateTime<Utc>> {
|
|||
pub fn normalize_etag(etag: &str) -> String {
|
||||
etag.trim()
|
||||
.trim_start_matches("W/")
|
||||
.trim()
|
||||
.trim_matches('"')
|
||||
.to_string()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,530 @@
|
|||
use readur::models::{FileInfo, CreateWebDAVDirectory, UpdateWebDAVDirectory, User, UserRole, AuthProvider};
|
||||
use readur::{AppState};
|
||||
use tokio;
|
||||
use chrono::Utc;
|
||||
use uuid::Uuid;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
|
||||
// Test utilities for mocking WebDAV responses
|
||||
struct MockWebDAVServer {
|
||||
directory_etags: HashMap<String, String>,
|
||||
directory_files: HashMap<String, Vec<FileInfo>>,
|
||||
request_count: std::sync::atomic::AtomicUsize,
|
||||
}
|
||||
|
||||
impl MockWebDAVServer {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
directory_etags: HashMap::new(),
|
||||
directory_files: HashMap::new(),
|
||||
request_count: std::sync::atomic::AtomicUsize::new(0),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_directory_etag(&mut self, path: &str, etag: &str) {
|
||||
self.directory_etags.insert(path.to_string(), etag.to_string());
|
||||
}
|
||||
|
||||
fn set_directory_files(&mut self, path: &str, files: Vec<FileInfo>) {
|
||||
self.directory_files.insert(path.to_string(), files);
|
||||
}
|
||||
|
||||
fn get_request_count(&self) -> usize {
|
||||
self.request_count.load(std::sync::atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
fn increment_request_count(&self) {
|
||||
self.request_count.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to setup test database
|
||||
async fn setup_test_database() -> readur::db::Database {
|
||||
let db_url = std::env::var("TEST_DATABASE_URL")
|
||||
.unwrap_or_else(|_| "sqlite::memory:".to_string());
|
||||
|
||||
let db = readur::db::Database::new(&db_url).await.expect("Failed to create test database");
|
||||
|
||||
// Run migrations
|
||||
sqlx::migrate!("./migrations")
|
||||
.run(&db.pool)
|
||||
.await
|
||||
.expect("Failed to run migrations");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
// Helper function to create test user
|
||||
async fn create_test_user(db: &readur::db::Database) -> Uuid {
|
||||
let user_id = Uuid::new_v4();
|
||||
let user = User {
|
||||
id: user_id,
|
||||
username: "testuser".to_string(),
|
||||
email: "test@example.com".to_string(),
|
||||
password_hash: Some("test_hash".to_string()),
|
||||
role: UserRole::User,
|
||||
auth_provider: AuthProvider::Local,
|
||||
created_at: Utc::now(),
|
||||
updated_at: Utc::now(),
|
||||
oidc_subject: None,
|
||||
oidc_issuer: None,
|
||||
oidc_email: None,
|
||||
};
|
||||
|
||||
// Insert user into database
|
||||
sqlx::query!(
|
||||
"INSERT INTO users (id, username, email, password_hash, role, auth_provider, created_at, updated_at, oidc_subject, oidc_issuer, oidc_email)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)",
|
||||
user.id,
|
||||
user.username,
|
||||
user.email,
|
||||
user.password_hash,
|
||||
user.role.to_string(),
|
||||
user.auth_provider.to_string(),
|
||||
user.created_at,
|
||||
user.updated_at,
|
||||
user.oidc_subject,
|
||||
user.oidc_issuer,
|
||||
user.oidc_email
|
||||
)
|
||||
.execute(&db.pool)
|
||||
.await
|
||||
.expect("Failed to insert test user");
|
||||
|
||||
user_id
|
||||
}
|
||||
|
||||
// Helper function to create AppState for testing
|
||||
async fn create_test_app_state() -> Arc<AppState> {
|
||||
let db = setup_test_database().await;
|
||||
let config = readur::config::Config {
|
||||
database_url: "sqlite::memory:".to_string(),
|
||||
upload_path: "/tmp/test_uploads".to_string(),
|
||||
jwt_secret: "test_secret".to_string(),
|
||||
server_host: "127.0.0.1".to_string(),
|
||||
server_port: 8080,
|
||||
log_level: "info".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
Arc::new(AppState {
|
||||
db,
|
||||
config,
|
||||
queue_service: std::sync::Arc::new(readur::ocr::queue::OcrQueueService::new(std::sync::Arc::new(readur::db::Database::new("sqlite::memory:").await.unwrap()))),
|
||||
webdav_scheduler: None,
|
||||
source_scheduler: None,
|
||||
oidc_client: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn create_sample_files_with_directories() -> Vec<FileInfo> {
|
||||
vec![
|
||||
// Root directory
|
||||
FileInfo {
|
||||
path: "/Documents".to_string(),
|
||||
name: "Documents".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "documents-etag-v1".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
// Subdirectory
|
||||
FileInfo {
|
||||
path: "/Documents/Projects".to_string(),
|
||||
name: "Projects".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "projects-etag-v1".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
// Files
|
||||
FileInfo {
|
||||
path: "/Documents/readme.pdf".to_string(),
|
||||
name: "readme.pdf".to_string(),
|
||||
size: 1024000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "readme-etag-v1".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/Projects/project1.pdf".to_string(),
|
||||
name: "project1.pdf".to_string(),
|
||||
size: 2048000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "project1-etag-v1".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_directory_tracking_database_operations() {
|
||||
let state = create_test_app_state().await;
|
||||
let user_id = create_test_user(&state.db).await;
|
||||
|
||||
// Test creating directory record
|
||||
let create_dir = CreateWebDAVDirectory {
|
||||
user_id,
|
||||
directory_path: "/Documents".to_string(),
|
||||
directory_etag: "test-etag-123".to_string(),
|
||||
file_count: 5,
|
||||
total_size_bytes: 1024000,
|
||||
};
|
||||
|
||||
let created_dir = state.db.create_or_update_webdav_directory(&create_dir)
|
||||
.await
|
||||
.expect("Failed to create directory record");
|
||||
|
||||
assert_eq!(created_dir.directory_path, "/Documents");
|
||||
assert_eq!(created_dir.directory_etag, "test-etag-123");
|
||||
assert_eq!(created_dir.file_count, 5);
|
||||
assert_eq!(created_dir.total_size_bytes, 1024000);
|
||||
|
||||
// Test retrieving directory record
|
||||
let retrieved_dir = state.db.get_webdav_directory(user_id, "/Documents")
|
||||
.await
|
||||
.expect("Failed to retrieve directory")
|
||||
.expect("Directory not found");
|
||||
|
||||
assert_eq!(retrieved_dir.directory_etag, "test-etag-123");
|
||||
assert_eq!(retrieved_dir.file_count, 5);
|
||||
|
||||
// Test updating directory record
|
||||
let update_dir = UpdateWebDAVDirectory {
|
||||
directory_etag: "updated-etag-456".to_string(),
|
||||
last_scanned_at: Utc::now(),
|
||||
file_count: 7,
|
||||
total_size_bytes: 2048000,
|
||||
};
|
||||
|
||||
state.db.update_webdav_directory(user_id, "/Documents", &update_dir)
|
||||
.await
|
||||
.expect("Failed to update directory");
|
||||
|
||||
// Verify update
|
||||
let updated_dir = state.db.get_webdav_directory(user_id, "/Documents")
|
||||
.await
|
||||
.expect("Failed to retrieve updated directory")
|
||||
.expect("Directory not found after update");
|
||||
|
||||
assert_eq!(updated_dir.directory_etag, "updated-etag-456");
|
||||
assert_eq!(updated_dir.file_count, 7);
|
||||
assert_eq!(updated_dir.total_size_bytes, 2048000);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_multiple_directory_tracking() {
|
||||
let state = create_test_app_state().await;
|
||||
let user_id = create_test_user(&state.db).await;
|
||||
|
||||
// Create multiple directory records
|
||||
let directories = vec![
|
||||
CreateWebDAVDirectory {
|
||||
user_id,
|
||||
directory_path: "/Documents".to_string(),
|
||||
directory_etag: "docs-etag".to_string(),
|
||||
file_count: 3,
|
||||
total_size_bytes: 1024000,
|
||||
},
|
||||
CreateWebDAVDirectory {
|
||||
user_id,
|
||||
directory_path: "/Documents/Projects".to_string(),
|
||||
directory_etag: "projects-etag".to_string(),
|
||||
file_count: 2,
|
||||
total_size_bytes: 2048000,
|
||||
},
|
||||
CreateWebDAVDirectory {
|
||||
user_id,
|
||||
directory_path: "/Documents/Archive".to_string(),
|
||||
directory_etag: "archive-etag".to_string(),
|
||||
file_count: 10,
|
||||
total_size_bytes: 5120000,
|
||||
},
|
||||
];
|
||||
|
||||
for dir in directories {
|
||||
state.db.create_or_update_webdav_directory(&dir)
|
||||
.await
|
||||
.expect("Failed to create directory");
|
||||
}
|
||||
|
||||
// List all directories
|
||||
let all_dirs = state.db.list_webdav_directories(user_id)
|
||||
.await
|
||||
.expect("Failed to list directories");
|
||||
|
||||
assert_eq!(all_dirs.len(), 3);
|
||||
|
||||
// Verify they're sorted by path
|
||||
assert_eq!(all_dirs[0].directory_path, "/Documents");
|
||||
assert_eq!(all_dirs[1].directory_path, "/Documents/Archive");
|
||||
assert_eq!(all_dirs[2].directory_path, "/Documents/Projects");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_directory_isolation_between_users() {
|
||||
let state = create_test_app_state().await;
|
||||
let user1_id = create_test_user(&state.db).await;
|
||||
|
||||
// Create second user
|
||||
let user2_id = Uuid::new_v4();
|
||||
sqlx::query!(
|
||||
"INSERT INTO users (id, username, email, password_hash, role, auth_provider, created_at, updated_at, oidc_subject, oidc_issuer, oidc_email)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)",
|
||||
user2_id,
|
||||
"testuser2",
|
||||
"test2@example.com",
|
||||
Some("test_hash2".to_string()),
|
||||
UserRole::User.to_string(),
|
||||
AuthProvider::Local.to_string(),
|
||||
Utc::now(),
|
||||
Utc::now(),
|
||||
None::<String>,
|
||||
None::<String>,
|
||||
None::<String>
|
||||
)
|
||||
.execute(&state.db.pool)
|
||||
.await
|
||||
.expect("Failed to insert second test user");
|
||||
|
||||
// Create directory for user1
|
||||
let dir1 = CreateWebDAVDirectory {
|
||||
user_id: user1_id,
|
||||
directory_path: "/Documents".to_string(),
|
||||
directory_etag: "user1-etag".to_string(),
|
||||
file_count: 5,
|
||||
total_size_bytes: 1024000,
|
||||
};
|
||||
|
||||
state.db.create_or_update_webdav_directory(&dir1)
|
||||
.await
|
||||
.expect("Failed to create directory for user1");
|
||||
|
||||
// Create directory for user2
|
||||
let dir2 = CreateWebDAVDirectory {
|
||||
user_id: user2_id,
|
||||
directory_path: "/Documents".to_string(),
|
||||
directory_etag: "user2-etag".to_string(),
|
||||
file_count: 3,
|
||||
total_size_bytes: 512000,
|
||||
};
|
||||
|
||||
state.db.create_or_update_webdav_directory(&dir2)
|
||||
.await
|
||||
.expect("Failed to create directory for user2");
|
||||
|
||||
// Verify user1 can only see their directory
|
||||
let user1_dirs = state.db.list_webdav_directories(user1_id)
|
||||
.await
|
||||
.expect("Failed to list user1 directories");
|
||||
|
||||
assert_eq!(user1_dirs.len(), 1);
|
||||
assert_eq!(user1_dirs[0].directory_etag, "user1-etag");
|
||||
|
||||
// Verify user2 can only see their directory
|
||||
let user2_dirs = state.db.list_webdav_directories(user2_id)
|
||||
.await
|
||||
.expect("Failed to list user2 directories");
|
||||
|
||||
assert_eq!(user2_dirs.len(), 1);
|
||||
assert_eq!(user2_dirs[0].directory_etag, "user2-etag");
|
||||
|
||||
// Verify user1 cannot access user2's directory
|
||||
let user1_access_user2 = state.db.get_webdav_directory(user1_id, "/Documents")
|
||||
.await
|
||||
.expect("Database query failed");
|
||||
|
||||
assert!(user1_access_user2.is_some());
|
||||
assert_eq!(user1_access_user2.unwrap().directory_etag, "user1-etag");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_etag_change_detection() {
|
||||
let state = create_test_app_state().await;
|
||||
let user_id = create_test_user(&state.db).await;
|
||||
|
||||
// Create initial directory
|
||||
let initial_dir = CreateWebDAVDirectory {
|
||||
user_id,
|
||||
directory_path: "/Documents".to_string(),
|
||||
directory_etag: "initial-etag".to_string(),
|
||||
file_count: 3,
|
||||
total_size_bytes: 1024000,
|
||||
};
|
||||
|
||||
state.db.create_or_update_webdav_directory(&initial_dir)
|
||||
.await
|
||||
.expect("Failed to create initial directory");
|
||||
|
||||
// Simulate checking current directory ETag
|
||||
let stored_dir = state.db.get_webdav_directory(user_id, "/Documents")
|
||||
.await
|
||||
.expect("Failed to get directory")
|
||||
.expect("Directory not found");
|
||||
|
||||
// Simulate server returning different ETag (directory changed)
|
||||
let current_etag = "changed-etag";
|
||||
let directory_changed = stored_dir.directory_etag != current_etag;
|
||||
|
||||
assert!(directory_changed, "Directory should be detected as changed");
|
||||
|
||||
// Update with new ETag after processing changes
|
||||
let update = UpdateWebDAVDirectory {
|
||||
directory_etag: current_etag.to_string(),
|
||||
last_scanned_at: Utc::now(),
|
||||
file_count: 5, // Files were added
|
||||
total_size_bytes: 2048000, // Size increased
|
||||
};
|
||||
|
||||
state.db.update_webdav_directory(user_id, "/Documents", &update)
|
||||
.await
|
||||
.expect("Failed to update directory");
|
||||
|
||||
// Verify update
|
||||
let updated_dir = state.db.get_webdav_directory(user_id, "/Documents")
|
||||
.await
|
||||
.expect("Failed to get updated directory")
|
||||
.expect("Directory not found");
|
||||
|
||||
assert_eq!(updated_dir.directory_etag, "changed-etag");
|
||||
assert_eq!(updated_dir.file_count, 5);
|
||||
assert_eq!(updated_dir.total_size_bytes, 2048000);
|
||||
|
||||
// Simulate next sync with same ETag (no changes)
|
||||
let same_etag = "changed-etag";
|
||||
let directory_unchanged = updated_dir.directory_etag == same_etag;
|
||||
|
||||
assert!(directory_unchanged, "Directory should be detected as unchanged");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_subdirectory_filtering() {
|
||||
let state = create_test_app_state().await;
|
||||
let user_id = create_test_user(&state.db).await;
|
||||
|
||||
// Create nested directory structure
|
||||
let directories = vec![
|
||||
("/Documents", "docs-etag"),
|
||||
("/Documents/2024", "2024-etag"),
|
||||
("/Documents/2024/Q1", "q1-etag"),
|
||||
("/Documents/2024/Q2", "q2-etag"),
|
||||
("/Documents/Archive", "archive-etag"),
|
||||
("/Other", "other-etag"), // Different root
|
||||
];
|
||||
|
||||
for (path, etag) in directories {
|
||||
let dir = CreateWebDAVDirectory {
|
||||
user_id,
|
||||
directory_path: path.to_string(),
|
||||
directory_etag: etag.to_string(),
|
||||
file_count: 1,
|
||||
total_size_bytes: 1024,
|
||||
};
|
||||
|
||||
state.db.create_or_update_webdav_directory(&dir)
|
||||
.await
|
||||
.expect("Failed to create directory");
|
||||
}
|
||||
|
||||
// Get all directories and filter subdirectories of /Documents
|
||||
let all_dirs = state.db.list_webdav_directories(user_id)
|
||||
.await
|
||||
.expect("Failed to list directories");
|
||||
|
||||
let documents_subdirs: Vec<_> = all_dirs.iter()
|
||||
.filter(|dir| dir.directory_path.starts_with("/Documents") && dir.directory_path != "/Documents")
|
||||
.collect();
|
||||
|
||||
assert_eq!(documents_subdirs.len(), 4); // 2024, Q1, Q2, Archive
|
||||
|
||||
// Verify specific subdirectories
|
||||
let subdir_paths: Vec<&str> = documents_subdirs.iter()
|
||||
.map(|dir| dir.directory_path.as_str())
|
||||
.collect();
|
||||
|
||||
assert!(subdir_paths.contains(&"/Documents/2024"));
|
||||
assert!(subdir_paths.contains(&"/Documents/2024/Q1"));
|
||||
assert!(subdir_paths.contains(&"/Documents/2024/Q2"));
|
||||
assert!(subdir_paths.contains(&"/Documents/Archive"));
|
||||
assert!(!subdir_paths.contains(&"/Other")); // Should not include different root
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_performance_metrics() {
|
||||
let state = create_test_app_state().await;
|
||||
let user_id = create_test_user(&state.db).await;
|
||||
|
||||
// Create a large number of directories to test performance
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
for i in 0..100 {
|
||||
let dir = CreateWebDAVDirectory {
|
||||
user_id,
|
||||
directory_path: format!("/Documents/Dir{:03}", i),
|
||||
directory_etag: format!("etag-{}", i),
|
||||
file_count: i as i64,
|
||||
total_size_bytes: (i * 1024) as i64,
|
||||
};
|
||||
|
||||
state.db.create_or_update_webdav_directory(&dir)
|
||||
.await
|
||||
.expect("Failed to create directory");
|
||||
}
|
||||
|
||||
let create_time = start_time.elapsed();
|
||||
println!("Created 100 directories in: {:?}", create_time);
|
||||
|
||||
// Test bulk retrieval performance
|
||||
let retrieval_start = std::time::Instant::now();
|
||||
let all_dirs = state.db.list_webdav_directories(user_id)
|
||||
.await
|
||||
.expect("Failed to list directories");
|
||||
let retrieval_time = retrieval_start.elapsed();
|
||||
|
||||
println!("Retrieved {} directories in: {:?}", all_dirs.len(), retrieval_time);
|
||||
assert_eq!(all_dirs.len(), 100);
|
||||
|
||||
// Test individual directory access performance
|
||||
let individual_start = std::time::Instant::now();
|
||||
for i in 0..10 {
|
||||
let path = format!("/Documents/Dir{:03}", i);
|
||||
let dir = state.db.get_webdav_directory(user_id, &path)
|
||||
.await
|
||||
.expect("Failed to get directory")
|
||||
.expect("Directory not found");
|
||||
assert_eq!(dir.directory_etag, format!("etag-{}", i));
|
||||
}
|
||||
let individual_time = individual_start.elapsed();
|
||||
|
||||
println!("Retrieved 10 individual directories in: {:?}", individual_time);
|
||||
|
||||
// Performance assertions (adjust these based on acceptable performance)
|
||||
assert!(create_time.as_millis() < 5000, "Directory creation too slow: {:?}", create_time);
|
||||
assert!(retrieval_time.as_millis() < 100, "Directory retrieval too slow: {:?}", retrieval_time);
|
||||
assert!(individual_time.as_millis() < 100, "Individual directory access too slow: {:?}", individual_time);
|
||||
}
|
||||
|
|
@ -0,0 +1,447 @@
|
|||
use readur::services::webdav_service::{WebDAVService, WebDAVConfig};
|
||||
use readur::models::FileInfo;
|
||||
use tokio;
|
||||
use chrono::Utc;
|
||||
|
||||
// Helper function to create test WebDAV service
|
||||
fn create_test_webdav_service() -> WebDAVService {
|
||||
let config = WebDAVConfig {
|
||||
server_url: "https://test.example.com".to_string(),
|
||||
username: "testuser".to_string(),
|
||||
password: "testpass".to_string(),
|
||||
watch_folders: vec!["/Documents".to_string()],
|
||||
file_extensions: vec!["pdf".to_string(), "png".to_string()],
|
||||
timeout_seconds: 30,
|
||||
server_type: Some("nextcloud".to_string()),
|
||||
};
|
||||
|
||||
WebDAVService::new(config).unwrap()
|
||||
}
|
||||
|
||||
// Mock XML response for directory ETag check
|
||||
fn mock_directory_etag_response(etag: &str) -> String {
|
||||
format!(r#"<?xml version="1.0"?>
|
||||
<d:multistatus xmlns:d="DAV:">
|
||||
<d:response>
|
||||
<d:href>/remote.php/dav/files/admin/Documents/</d:href>
|
||||
<d:propstat>
|
||||
<d:prop>
|
||||
<d:getetag>"{}"</d:getetag>
|
||||
</d:prop>
|
||||
<d:status>HTTP/1.1 200 OK</d:status>
|
||||
</d:propstat>
|
||||
</d:response>
|
||||
</d:multistatus>"#, etag)
|
||||
}
|
||||
|
||||
// Mock complex nested directory structure
|
||||
fn mock_nested_directory_files() -> Vec<FileInfo> {
|
||||
vec![
|
||||
// Root directory
|
||||
FileInfo {
|
||||
path: "/Documents".to_string(),
|
||||
name: "Documents".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "root-etag-123".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
// Level 1 directories
|
||||
FileInfo {
|
||||
path: "/Documents/2024".to_string(),
|
||||
name: "2024".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "2024-etag-456".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/Archive".to_string(),
|
||||
name: "Archive".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "archive-etag-789".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
// Level 2 directories
|
||||
FileInfo {
|
||||
path: "/Documents/2024/Q1".to_string(),
|
||||
name: "Q1".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "q1-etag-101".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/2024/Q2".to_string(),
|
||||
name: "Q2".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "q2-etag-102".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
// Level 3 directory
|
||||
FileInfo {
|
||||
path: "/Documents/2024/Q1/Reports".to_string(),
|
||||
name: "Reports".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "reports-etag-201".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
// Files at various levels
|
||||
FileInfo {
|
||||
path: "/Documents/root-file.pdf".to_string(),
|
||||
name: "root-file.pdf".to_string(),
|
||||
size: 1024000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "root-file-etag".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/2024/annual-report.pdf".to_string(),
|
||||
name: "annual-report.pdf".to_string(),
|
||||
size: 2048000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "annual-report-etag".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/2024/Q1/q1-summary.pdf".to_string(),
|
||||
name: "q1-summary.pdf".to_string(),
|
||||
size: 512000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "q1-summary-etag".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/2024/Q1/Reports/detailed-report.pdf".to_string(),
|
||||
name: "detailed-report.pdf".to_string(),
|
||||
size: 4096000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "detailed-report-etag".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/Archive/old-document.pdf".to_string(),
|
||||
name: "old-document.pdf".to_string(),
|
||||
size: 256000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "old-document-etag".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_directory_etag() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test parsing a simple directory ETag response
|
||||
let xml_response = mock_directory_etag_response("test-etag-123");
|
||||
let etag = service.parse_directory_etag(&xml_response).unwrap();
|
||||
|
||||
assert_eq!(etag, "test-etag-123");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_directory_etag_with_quotes() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test ETag normalization (removing quotes)
|
||||
let xml_response = r#"<?xml version="1.0"?>
|
||||
<d:multistatus xmlns:d="DAV:">
|
||||
<d:response>
|
||||
<d:href>/remote.php/dav/files/admin/Documents/</d:href>
|
||||
<d:propstat>
|
||||
<d:prop>
|
||||
<d:getetag>"quoted-etag-456"</d:getetag>
|
||||
</d:prop>
|
||||
<d:status>HTTP/1.1 200 OK</d:status>
|
||||
</d:propstat>
|
||||
</d:response>
|
||||
</d:multistatus>"#;
|
||||
|
||||
let etag = service.parse_directory_etag(xml_response).unwrap();
|
||||
assert_eq!(etag, "quoted-etag-456");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_parse_directory_etag_weak_etag() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test weak ETag normalization
|
||||
let xml_response = r#"<?xml version="1.0"?>
|
||||
<d:multistatus xmlns:d="DAV:">
|
||||
<d:response>
|
||||
<d:href>/remote.php/dav/files/admin/Documents/</d:href>
|
||||
<d:propstat>
|
||||
<d:prop>
|
||||
<d:getetag>W/"weak-etag-789"</d:getetag>
|
||||
</d:prop>
|
||||
<d:status>HTTP/1.1 200 OK</d:status>
|
||||
</d:propstat>
|
||||
</d:response>
|
||||
</d:multistatus>"#;
|
||||
|
||||
let etag = service.parse_directory_etag(xml_response).unwrap();
|
||||
assert_eq!(etag, "weak-etag-789");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_is_direct_child() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test direct child detection
|
||||
assert!(service.is_direct_child("/Documents/file.pdf", "/Documents"));
|
||||
assert!(service.is_direct_child("/Documents/subfolder", "/Documents"));
|
||||
|
||||
// Test non-direct children (nested deeper)
|
||||
assert!(!service.is_direct_child("/Documents/2024/file.pdf", "/Documents"));
|
||||
assert!(!service.is_direct_child("/Documents/2024/Q1/file.pdf", "/Documents"));
|
||||
|
||||
// Test root directory edge case
|
||||
assert!(service.is_direct_child("/Documents", ""));
|
||||
assert!(service.is_direct_child("/Documents", "/"));
|
||||
assert!(!service.is_direct_child("/Documents/file.pdf", ""));
|
||||
|
||||
// Test non-matching paths
|
||||
assert!(!service.is_direct_child("/Other/file.pdf", "/Documents"));
|
||||
assert!(!service.is_direct_child("/Documenting/file.pdf", "/Documents")); // prefix but not child
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_track_subdirectories_recursively_structure() {
|
||||
// This test verifies the directory extraction logic without database operations
|
||||
let files = mock_nested_directory_files();
|
||||
|
||||
// Extract directories that should be tracked
|
||||
let mut expected_directories = std::collections::BTreeSet::new();
|
||||
expected_directories.insert("/Documents".to_string());
|
||||
expected_directories.insert("/Documents/2024".to_string());
|
||||
expected_directories.insert("/Documents/Archive".to_string());
|
||||
expected_directories.insert("/Documents/2024/Q1".to_string());
|
||||
expected_directories.insert("/Documents/2024/Q2".to_string());
|
||||
expected_directories.insert("/Documents/2024/Q1/Reports".to_string());
|
||||
|
||||
// This tests the directory extraction logic that happens in track_subdirectories_recursively
|
||||
let mut all_directories = std::collections::BTreeSet::new();
|
||||
|
||||
for file in &files {
|
||||
if file.is_directory {
|
||||
all_directories.insert(file.path.clone());
|
||||
} else {
|
||||
// Extract all parent directories from file paths
|
||||
let mut path_parts: Vec<&str> = file.path.split('/').collect();
|
||||
path_parts.pop(); // Remove the filename
|
||||
|
||||
// Build directory paths from root down to immediate parent
|
||||
let mut current_path = String::new();
|
||||
for part in path_parts {
|
||||
if !part.is_empty() {
|
||||
if !current_path.is_empty() {
|
||||
current_path.push('/');
|
||||
} else {
|
||||
// Start with leading slash for absolute paths
|
||||
current_path.push('/');
|
||||
}
|
||||
current_path.push_str(part);
|
||||
all_directories.insert(current_path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(all_directories, expected_directories);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_direct_file_counting() {
|
||||
let service = create_test_webdav_service();
|
||||
let files = mock_nested_directory_files();
|
||||
|
||||
// Test counting direct files in root directory
|
||||
let direct_files_root: Vec<_> = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents"))
|
||||
.collect();
|
||||
assert_eq!(direct_files_root.len(), 1); // Only root-file.pdf
|
||||
assert_eq!(direct_files_root[0].name, "root-file.pdf");
|
||||
|
||||
// Test counting direct files in /Documents/2024
|
||||
let direct_files_2024: Vec<_> = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/2024"))
|
||||
.collect();
|
||||
assert_eq!(direct_files_2024.len(), 1); // Only annual-report.pdf
|
||||
assert_eq!(direct_files_2024[0].name, "annual-report.pdf");
|
||||
|
||||
// Test counting direct files in /Documents/2024/Q1
|
||||
let direct_files_q1: Vec<_> = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/2024/Q1"))
|
||||
.collect();
|
||||
assert_eq!(direct_files_q1.len(), 1); // Only q1-summary.pdf
|
||||
assert_eq!(direct_files_q1[0].name, "q1-summary.pdf");
|
||||
|
||||
// Test counting direct files in deep directory
|
||||
let direct_files_reports: Vec<_> = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/2024/Q1/Reports"))
|
||||
.collect();
|
||||
assert_eq!(direct_files_reports.len(), 1); // Only detailed-report.pdf
|
||||
assert_eq!(direct_files_reports[0].name, "detailed-report.pdf");
|
||||
|
||||
// Test empty directory
|
||||
let direct_files_q2: Vec<_> = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/2024/Q2"))
|
||||
.collect();
|
||||
assert_eq!(direct_files_q2.len(), 0); // No direct files in Q2
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_direct_subdirectory_counting() {
|
||||
let service = create_test_webdav_service();
|
||||
let files = mock_nested_directory_files();
|
||||
|
||||
// Test counting direct subdirectories in root
|
||||
let direct_subdirs_root: Vec<_> = files.iter()
|
||||
.filter(|f| f.is_directory && service.is_direct_child(&f.path, "/Documents"))
|
||||
.collect();
|
||||
assert_eq!(direct_subdirs_root.len(), 2); // 2024 and Archive
|
||||
|
||||
// Test counting direct subdirectories in /Documents/2024
|
||||
let direct_subdirs_2024: Vec<_> = files.iter()
|
||||
.filter(|f| f.is_directory && service.is_direct_child(&f.path, "/Documents/2024"))
|
||||
.collect();
|
||||
assert_eq!(direct_subdirs_2024.len(), 2); // Q1 and Q2
|
||||
|
||||
// Test counting direct subdirectories in /Documents/2024/Q1
|
||||
let direct_subdirs_q1: Vec<_> = files.iter()
|
||||
.filter(|f| f.is_directory && service.is_direct_child(&f.path, "/Documents/2024/Q1"))
|
||||
.collect();
|
||||
assert_eq!(direct_subdirs_q1.len(), 1); // Reports
|
||||
|
||||
// Test leaf directory (no subdirectories)
|
||||
let direct_subdirs_reports: Vec<_> = files.iter()
|
||||
.filter(|f| f.is_directory && service.is_direct_child(&f.path, "/Documents/2024/Q1/Reports"))
|
||||
.collect();
|
||||
assert_eq!(direct_subdirs_reports.len(), 0); // No subdirectories in Reports
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_size_calculation_per_directory() {
|
||||
let service = create_test_webdav_service();
|
||||
let files = mock_nested_directory_files();
|
||||
|
||||
// Calculate total size for each directory's direct files
|
||||
let root_size: i64 = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents"))
|
||||
.map(|f| f.size)
|
||||
.sum();
|
||||
assert_eq!(root_size, 1024000); // root-file.pdf
|
||||
|
||||
let q1_size: i64 = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/2024/Q1"))
|
||||
.map(|f| f.size)
|
||||
.sum();
|
||||
assert_eq!(q1_size, 512000); // q1-summary.pdf
|
||||
|
||||
let reports_size: i64 = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/2024/Q1/Reports"))
|
||||
.map(|f| f.size)
|
||||
.sum();
|
||||
assert_eq!(reports_size, 4096000); // detailed-report.pdf
|
||||
|
||||
let archive_size: i64 = files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/Archive"))
|
||||
.map(|f| f.size)
|
||||
.sum();
|
||||
assert_eq!(archive_size, 256000); // old-document.pdf
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_edge_cases() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test empty paths
|
||||
assert!(!service.is_direct_child("", "/Documents"));
|
||||
assert!(service.is_direct_child("/Documents", ""));
|
||||
|
||||
// Test identical paths
|
||||
assert!(!service.is_direct_child("/Documents", "/Documents"));
|
||||
|
||||
// Test path with trailing slashes
|
||||
assert!(service.is_direct_child("/Documents/file.pdf", "/Documents/"));
|
||||
|
||||
// Test paths that are prefix but not parent
|
||||
assert!(!service.is_direct_child("/DocumentsBackup/file.pdf", "/Documents"));
|
||||
|
||||
// Test deeply nested paths
|
||||
let deep_path = "/Documents/a/b/c/d/e/f/g/h/i/j/file.pdf";
|
||||
assert!(!service.is_direct_child(deep_path, "/Documents"));
|
||||
assert!(!service.is_direct_child(deep_path, "/Documents/a"));
|
||||
assert!(service.is_direct_child(deep_path, "/Documents/a/b/c/d/e/f/g/h/i/j"));
|
||||
}
|
||||
|
|
@ -0,0 +1,621 @@
|
|||
use readur::services::webdav_service::{WebDAVService, WebDAVConfig};
|
||||
use readur::models::FileInfo;
|
||||
use tokio;
|
||||
use chrono::Utc;
|
||||
|
||||
// Helper function to create test WebDAV service
|
||||
fn create_test_webdav_service() -> WebDAVService {
|
||||
let config = WebDAVConfig {
|
||||
server_url: "https://test.example.com".to_string(),
|
||||
username: "testuser".to_string(),
|
||||
password: "testpass".to_string(),
|
||||
watch_folders: vec!["/Documents".to_string()],
|
||||
file_extensions: vec!["pdf".to_string(), "png".to_string()],
|
||||
timeout_seconds: 30,
|
||||
server_type: Some("nextcloud".to_string()),
|
||||
};
|
||||
|
||||
WebDAVService::new(config).unwrap()
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_empty_directory_tracking() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test completely empty directory
|
||||
let empty_files: Vec<FileInfo> = vec![];
|
||||
|
||||
// Test the directory extraction logic that happens in track_subdirectories_recursively
|
||||
let mut all_directories = std::collections::BTreeSet::new();
|
||||
|
||||
for file in &empty_files {
|
||||
if file.is_directory {
|
||||
all_directories.insert(file.path.clone());
|
||||
} else {
|
||||
let mut path_parts: Vec<&str> = file.path.split('/').collect();
|
||||
path_parts.pop();
|
||||
|
||||
let mut current_path = String::new();
|
||||
for part in path_parts {
|
||||
if !part.is_empty() {
|
||||
if !current_path.is_empty() {
|
||||
current_path.push('/');
|
||||
}
|
||||
current_path.push_str(part);
|
||||
all_directories.insert(current_path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(all_directories.is_empty(), "Empty file list should result in no directories");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_directory_only_structure() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test structure with only directories, no files
|
||||
let directory_only_files = vec![
|
||||
FileInfo {
|
||||
path: "/Documents".to_string(),
|
||||
name: "Documents".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "docs-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/Empty1".to_string(),
|
||||
name: "Empty1".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "empty1-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/Empty2".to_string(),
|
||||
name: "Empty2".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "empty2-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
];
|
||||
|
||||
// Test file counting for empty directories
|
||||
let root_files: Vec<_> = directory_only_files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents"))
|
||||
.collect();
|
||||
assert_eq!(root_files.len(), 0, "Root directory should have no files");
|
||||
|
||||
let empty1_files: Vec<_> = directory_only_files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/Empty1"))
|
||||
.collect();
|
||||
assert_eq!(empty1_files.len(), 0, "Empty1 directory should have no files");
|
||||
|
||||
// Test subdirectory counting
|
||||
let root_subdirs: Vec<_> = directory_only_files.iter()
|
||||
.filter(|f| f.is_directory && service.is_direct_child(&f.path, "/Documents"))
|
||||
.collect();
|
||||
assert_eq!(root_subdirs.len(), 2, "Root should have 2 subdirectories");
|
||||
|
||||
// Test size calculation for empty directories
|
||||
let root_size: i64 = directory_only_files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents"))
|
||||
.map(|f| f.size)
|
||||
.sum();
|
||||
assert_eq!(root_size, 0, "Empty directory should have zero total size");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_very_deep_nesting() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Create a very deeply nested structure (10 levels deep)
|
||||
let deep_path = "/Documents/L1/L2/L3/L4/L5/L6/L7/L8/L9/L10";
|
||||
let file_path = format!("{}/deep-file.pdf", deep_path);
|
||||
|
||||
let deep_files = vec![
|
||||
// All directories in the path
|
||||
FileInfo {
|
||||
path: "/Documents".to_string(),
|
||||
name: "Documents".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "docs-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
// All intermediate directories from L1 to L10
|
||||
FileInfo {
|
||||
path: "/Documents/L1".to_string(),
|
||||
name: "L1".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "l1-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/L1/L2".to_string(),
|
||||
name: "L2".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "l2-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/L1/L2/L3".to_string(),
|
||||
name: "L3".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "l3-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: deep_path.to_string(),
|
||||
name: "L10".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "l10-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
// File at the deepest level
|
||||
FileInfo {
|
||||
path: file_path.clone(),
|
||||
name: "deep-file.pdf".to_string(),
|
||||
size: 1024000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "deep-file-etag".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
];
|
||||
|
||||
// Test is_direct_child for deep paths
|
||||
assert!(service.is_direct_child(&file_path, deep_path), "File should be direct child of deepest directory");
|
||||
assert!(!service.is_direct_child(&file_path, "/Documents"), "File should not be direct child of root");
|
||||
assert!(!service.is_direct_child(&file_path, "/Documents/L1"), "File should not be direct child of L1");
|
||||
|
||||
// Test directory extraction from deep file path
|
||||
let mut all_directories = std::collections::BTreeSet::new();
|
||||
|
||||
for file in &deep_files {
|
||||
if file.is_directory {
|
||||
all_directories.insert(file.path.clone());
|
||||
} else {
|
||||
let mut path_parts: Vec<&str> = file.path.split('/').collect();
|
||||
path_parts.pop(); // Remove filename
|
||||
|
||||
let mut current_path = String::new();
|
||||
for part in path_parts {
|
||||
if !part.is_empty() {
|
||||
if !current_path.is_empty() {
|
||||
current_path.push('/');
|
||||
}
|
||||
current_path.push_str(part);
|
||||
all_directories.insert(current_path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Should extract all intermediate directories
|
||||
assert!(all_directories.contains("/Documents"));
|
||||
assert!(all_directories.contains("/Documents/L1"));
|
||||
assert!(all_directories.contains("/Documents/L1/L2"));
|
||||
assert!(all_directories.contains(deep_path));
|
||||
assert!(all_directories.len() >= 11, "Should track all intermediate directories"); // /Documents + L1 + L2 + L3 + L10 + extracted from file path = 11+ directories total
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_special_characters_in_paths() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test paths with special characters, spaces, unicode
|
||||
let special_files = vec![
|
||||
FileInfo {
|
||||
path: "/Documents/Folder with spaces".to_string(),
|
||||
name: "Folder with spaces".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "spaces-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/Folder-with-dashes".to_string(),
|
||||
name: "Folder-with-dashes".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "dashes-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/Документы".to_string(), // Cyrillic
|
||||
name: "Документы".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "cyrillic-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
FileInfo {
|
||||
path: "/Documents/Folder with spaces/file with spaces.pdf".to_string(),
|
||||
name: "file with spaces.pdf".to_string(),
|
||||
size: 1024000,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "space-file-etag".to_string(),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
},
|
||||
];
|
||||
|
||||
// Test is_direct_child with special characters
|
||||
assert!(service.is_direct_child("/Documents/Folder with spaces/file with spaces.pdf", "/Documents/Folder with spaces"));
|
||||
assert!(service.is_direct_child("/Documents/Folder with spaces", "/Documents"));
|
||||
assert!(service.is_direct_child("/Documents/Документы", "/Documents"));
|
||||
|
||||
// Test file counting with special characters
|
||||
let spaces_folder_files: Vec<_> = special_files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, "/Documents/Folder with spaces"))
|
||||
.collect();
|
||||
assert_eq!(spaces_folder_files.len(), 1);
|
||||
assert_eq!(spaces_folder_files[0].name, "file with spaces.pdf");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_edge_case_path_patterns() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test various edge case paths
|
||||
let edge_case_tests = vec![
|
||||
// (child_path, parent_path, expected_result)
|
||||
("/Documents/file.pdf", "/Documents", true),
|
||||
("/Documents/", "/Documents", false), // Same path
|
||||
("/Documents", "/Documents", false), // Same path
|
||||
("/Documents/subfolder/", "/Documents", true), // Trailing slash
|
||||
("/Documents/subfolder", "/Documents/", true), // Parent with trailing slash
|
||||
("/Documenting/file.pdf", "/Documents", false), // Prefix but not parent
|
||||
("/Documents/file.pdf", "/Doc", false), // Partial parent match
|
||||
("", "/Documents", false), // Empty child
|
||||
("/Documents/file.pdf", "", false), // Not direct child of root (nested in Documents)
|
||||
("/file.pdf", "", true), // Root level file
|
||||
("/Documents/file.pdf", "/", false), // Not direct child of root (nested in Documents)
|
||||
("/file.pdf", "/", true), // Root level file with slash parent
|
||||
("//Documents//file.pdf", "/Documents", false), // Double slashes (malformed)
|
||||
("/Documents/./file.pdf", "/Documents", false), // Dot notation (should be normalized first)
|
||||
("/Documents/../file.pdf", "", false), // Parent notation (should be normalized first)
|
||||
];
|
||||
|
||||
for (child, parent, expected) in edge_case_tests {
|
||||
let result = service.is_direct_child(child, parent);
|
||||
assert_eq!(
|
||||
result, expected,
|
||||
"is_direct_child('{}', '{}') expected {}, got {}",
|
||||
child, parent, expected, result
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_etag_normalization_edge_cases() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test various ETag format edge cases
|
||||
let etag_test_cases = vec![
|
||||
(r#""simple-etag""#, "simple-etag"),
|
||||
(r#"W/"weak-etag""#, "weak-etag"),
|
||||
(r#"no-quotes"#, "no-quotes"),
|
||||
(r#""""#, ""), // Empty quoted string
|
||||
(r#""#, ""), // Single quote
|
||||
(r#"W/"""#, ""), // Weak etag with empty quotes
|
||||
(r#" " spaced-etag " "#, " spaced-etag "), // Extra whitespace around quotes
|
||||
(r#"W/ "weak-with-spaces" "#, "weak-with-spaces"),
|
||||
(r#""etag-with-"internal"-quotes""#, r#"etag-with-"internal"-quotes"#), // Internal quotes
|
||||
(r#""unicode-ж-etag""#, "unicode-ж-etag"), // Unicode characters
|
||||
];
|
||||
|
||||
for (input_etag, expected_normalized) in etag_test_cases {
|
||||
let xml_response = format!(r#"<?xml version="1.0"?>
|
||||
<d:multistatus xmlns:d="DAV:">
|
||||
<d:response>
|
||||
<d:href>/remote.php/dav/files/admin/Documents/</d:href>
|
||||
<d:propstat>
|
||||
<d:prop>
|
||||
<d:getetag>{}</d:getetag>
|
||||
</d:prop>
|
||||
<d:status>HTTP/1.1 200 OK</d:status>
|
||||
</d:propstat>
|
||||
</d:response>
|
||||
</d:multistatus>"#, input_etag);
|
||||
|
||||
let result = service.parse_directory_etag(&xml_response);
|
||||
match result {
|
||||
Ok(etag) => {
|
||||
assert_eq!(
|
||||
etag, expected_normalized,
|
||||
"ETag normalization failed for input '{}': expected '{}', got '{}'",
|
||||
input_etag, expected_normalized, etag
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
if !expected_normalized.is_empty() {
|
||||
panic!("Expected ETag '{}' but got error: {}", expected_normalized, e);
|
||||
}
|
||||
// Empty expected result means we expect an error
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_malformed_xml_responses() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Test various malformed XML responses
|
||||
let malformed_xml_cases = vec![
|
||||
// Empty response
|
||||
"",
|
||||
// Not XML
|
||||
"not xml at all",
|
||||
// Incomplete XML
|
||||
"<?xml version=\"1.0\"?><d:multistatus",
|
||||
// Missing ETag
|
||||
r#"<?xml version="1.0"?>
|
||||
<d:multistatus xmlns:d="DAV:">
|
||||
<d:response>
|
||||
<d:href>/remote.php/dav/files/admin/Documents/</d:href>
|
||||
<d:propstat>
|
||||
<d:prop>
|
||||
<d:displayname>Documents</d:displayname>
|
||||
</d:prop>
|
||||
<d:status>HTTP/1.1 200 OK</d:status>
|
||||
</d:propstat>
|
||||
</d:response>
|
||||
</d:multistatus>"#,
|
||||
// Empty ETag
|
||||
r#"<?xml version="1.0"?>
|
||||
<d:multistatus xmlns:d="DAV:">
|
||||
<d:response>
|
||||
<d:href>/remote.php/dav/files/admin/Documents/</d:href>
|
||||
<d:propstat>
|
||||
<d:prop>
|
||||
<d:getetag></d:getetag>
|
||||
</d:prop>
|
||||
<d:status>HTTP/1.1 200 OK</d:status>
|
||||
</d:propstat>
|
||||
</d:response>
|
||||
</d:multistatus>"#,
|
||||
// Invalid XML characters
|
||||
r#"<?xml version="1.0"?>
|
||||
<d:multistatus xmlns:d="DAV:">
|
||||
<d:response>
|
||||
<d:href>/remote.php/dav/files/admin/Documents/</d:href>
|
||||
<d:propstat>
|
||||
<d:prop>
|
||||
<d:getetag>"invalid-xml--char"</d:getetag>
|
||||
</d:prop>
|
||||
</d:propstat>
|
||||
</d:response>
|
||||
</d:multistatus>"#,
|
||||
];
|
||||
|
||||
for (i, malformed_xml) in malformed_xml_cases.iter().enumerate() {
|
||||
let result = service.parse_directory_etag(malformed_xml);
|
||||
// Some malformed XML might still be parsed successfully by the robust parser
|
||||
// The key is that it doesn't crash - either error or success is acceptable
|
||||
match result {
|
||||
Ok(etag) => {
|
||||
println!("Malformed XML case {} parsed successfully with ETag: {}", i, etag);
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Malformed XML case {} failed as expected: {}", i, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_large_directory_structures() {
|
||||
let service = create_test_webdav_service();
|
||||
|
||||
// Generate a large directory structure (1000 directories, 5000 files)
|
||||
let mut large_files = Vec::new();
|
||||
|
||||
// Add root directory
|
||||
large_files.push(FileInfo {
|
||||
path: "/Documents".to_string(),
|
||||
name: "Documents".to_string(),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: "root-etag".to_string(),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
});
|
||||
|
||||
// Generate 100 level-1 directories, each with 10 subdirectories and 50 files
|
||||
for i in 0..100 {
|
||||
let level1_path = format!("/Documents/Dir{:03}", i);
|
||||
|
||||
// Add level-1 directory
|
||||
large_files.push(FileInfo {
|
||||
path: level1_path.clone(),
|
||||
name: format!("Dir{:03}", i),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: format!("dir{}-etag", i),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
});
|
||||
|
||||
// Add 10 subdirectories
|
||||
for j in 0..10 {
|
||||
let level2_path = format!("{}/SubDir{:02}", level1_path, j);
|
||||
large_files.push(FileInfo {
|
||||
path: level2_path.clone(),
|
||||
name: format!("SubDir{:02}", j),
|
||||
size: 0,
|
||||
mime_type: "".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: format!("subdir{}-{}-etag", i, j),
|
||||
is_directory: true,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(755),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
});
|
||||
|
||||
// Add 5 files in each subdirectory
|
||||
for k in 0..5 {
|
||||
large_files.push(FileInfo {
|
||||
path: format!("{}/file{:02}.pdf", level2_path, k),
|
||||
name: format!("file{:02}.pdf", k),
|
||||
size: 1024 * (k + 1) as i64,
|
||||
mime_type: "application/pdf".to_string(),
|
||||
last_modified: Some(Utc::now()),
|
||||
etag: format!("file{}-{}-{}-etag", i, j, k),
|
||||
is_directory: false,
|
||||
created_at: Some(Utc::now()),
|
||||
permissions: Some(644),
|
||||
owner: Some("admin".to_string()),
|
||||
group: Some("admin".to_string()),
|
||||
metadata: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("Generated {} files and directories", large_files.len());
|
||||
|
||||
// Test performance of directory extraction
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut all_directories = std::collections::BTreeSet::new();
|
||||
|
||||
for file in &large_files {
|
||||
if file.is_directory {
|
||||
all_directories.insert(file.path.clone());
|
||||
} else {
|
||||
let mut path_parts: Vec<&str> = file.path.split('/').collect();
|
||||
path_parts.pop();
|
||||
|
||||
let mut current_path = String::new();
|
||||
for part in path_parts {
|
||||
if !part.is_empty() {
|
||||
if !current_path.is_empty() {
|
||||
current_path.push('/');
|
||||
}
|
||||
current_path.push_str(part);
|
||||
all_directories.insert(current_path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let extraction_time = start_time.elapsed();
|
||||
println!("Extracted {} directories in {:?}", all_directories.len(), extraction_time);
|
||||
|
||||
// Verify structure - the actual count includes extraction from file paths too
|
||||
assert!(all_directories.len() >= 1101, "Should have at least 1101 directories"); // 1 root + 100 level1 + 1000 level2 + extracted paths
|
||||
assert!(all_directories.contains("/Documents"));
|
||||
assert!(all_directories.contains("/Documents/Dir000"));
|
||||
assert!(all_directories.contains("/Documents/Dir099/SubDir09"));
|
||||
|
||||
// Test performance of file counting for a specific directory
|
||||
let count_start = std::time::Instant::now();
|
||||
let test_dir = "/Documents/Dir050";
|
||||
let direct_files: Vec<_> = large_files.iter()
|
||||
.filter(|f| !f.is_directory && service.is_direct_child(&f.path, test_dir))
|
||||
.collect();
|
||||
let count_time = count_start.elapsed();
|
||||
|
||||
println!("Counted {} direct files in {} in {:?}", direct_files.len(), test_dir, count_time);
|
||||
|
||||
// Performance assertions
|
||||
assert!(extraction_time.as_millis() < 1000, "Directory extraction too slow: {:?}", extraction_time);
|
||||
assert!(count_time.as_millis() < 100, "File counting too slow: {:?}", count_time);
|
||||
}
|
||||
Loading…
Reference in New Issue