fix(server): resolve compilation errors due to splitting up the large files

This commit is contained in:
perf3ct 2025-07-04 03:06:29 +00:00
parent 1b984a12c2
commit 0b0ffd1dbf
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
10 changed files with 114 additions and 65 deletions

View File

@ -78,7 +78,7 @@ pub async fn bulk_delete_documents(
})?;
// Delete associated files
let file_service = FileService::new(state.config.clone());
let file_service = FileService::new(state.config.upload_path.clone());
let mut files_deleted = 0;
let mut files_failed = 0;
@ -196,7 +196,7 @@ pub async fn delete_low_confidence_documents(
})?;
// Delete associated files
let file_service = FileService::new(state.config.clone());
let file_service = FileService::new(state.config.upload_path.clone());
let mut files_deleted = 0;
let mut files_failed = 0;
@ -282,7 +282,7 @@ pub async fn delete_failed_ocr_documents(
})?;
// Delete associated files
let file_service = FileService::new(state.config.clone());
let file_service = FileService::new(state.config.upload_path.clone());
let mut files_deleted = 0;
let mut files_failed = 0;

View File

@ -462,7 +462,7 @@ pub async fn get_user_duplicates(
let limit = query.limit.unwrap_or(25);
let offset = query.offset.unwrap_or(0);
let (duplicates, total_count) = state
let duplicates = state
.db
.get_user_duplicates(auth_user.user.id, auth_user.user.role, limit, offset)
.await
@ -470,6 +470,8 @@ pub async fn get_user_duplicates(
error!("Failed to get user duplicates: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let total_count = duplicates.len() as i64;
let response = serde_json::json!({
"duplicates": duplicates,

View File

@ -46,12 +46,12 @@ pub async fn get_document_debug_info(
})?
.ok_or(StatusCode::NOT_FOUND)?;
let file_service = FileService::new(state.config.clone());
let file_service = FileService::new(state.config.upload_path.clone());
// Check file existence and readability
let file_exists = file_service.document_file_exists(&document).await.unwrap_or(false);
let file_exists = tokio::fs::metadata(&document.file_path).await.is_ok();
let readable = if file_exists {
file_service.read_document_file(&document).await.is_ok()
file_service.read_file(&document.file_path).await.is_ok()
} else {
false
};
@ -135,16 +135,18 @@ pub async fn get_document_thumbnail(
})?
.ok_or(StatusCode::NOT_FOUND)?;
let file_service = FileService::new(state.config.clone());
let file_service = FileService::new(state.config.upload_path.clone());
match file_service.get_document_thumbnail(&document).await {
Ok(thumbnail_data) => {
// Try to read thumbnail from the thumbnails directory
let thumbnail_path = format!("{}/thumbnails/{}.jpg", state.config.upload_path, document.id);
match file_service.read_file(&thumbnail_path).await {
Ok(data) => {
let response = axum::response::Response::builder()
.status(StatusCode::OK)
.header("Content-Type", "image/jpeg")
.header("Content-Length", thumbnail_data.len().to_string())
.header("Content-Length", data.len().to_string())
.header("Cache-Control", "public, max-age=3600") // Cache for 1 hour
.body(axum::body::Body::from(thumbnail_data))
.body(axum::body::Body::from(data))
.map_err(|e| {
error!("Failed to build thumbnail response: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
@ -198,9 +200,11 @@ pub async fn get_processed_image(
return Err(StatusCode::BAD_REQUEST);
}
let file_service = FileService::new(state.config.clone());
let file_service = FileService::new(state.config.upload_path.clone());
match file_service.get_processed_image(&document).await {
// Try to read processed image from the processed directory
let processed_path = format!("{}/processed/{}.png", state.config.upload_path, document.id);
match file_service.read_file(&processed_path).await {
Ok(image_data) => {
let response = axum::response::Response::builder()
.status(StatusCode::OK)
@ -294,19 +298,19 @@ pub async fn validate_document_integrity(
})?
.ok_or(StatusCode::NOT_FOUND)?;
let file_service = FileService::new(state.config.clone());
let file_service = FileService::new(state.config.upload_path.clone());
let mut issues = Vec::new();
let mut checks = Vec::new();
// Check file existence
checks.push("file_existence".to_string());
if !file_service.document_file_exists(&document).await.unwrap_or(false) {
if tokio::fs::metadata(&document.file_path).await.is_err() {
issues.push("File does not exist on disk".to_string());
}
// Check file readability
checks.push("file_readability".to_string());
match file_service.read_document_file(&document).await {
match file_service.read_file(&document.file_path).await {
Ok(data) => {
// Verify file size matches
if data.len() as i64 != document.file_size {
@ -350,7 +354,7 @@ pub async fn validate_document_integrity(
"checks_performed": checks,
"issues": issues,
"summary": if is_valid {
"Document integrity is good"
"Document integrity is good".to_string()
} else {
format!("Found {} integrity issues", issues.len())
}

View File

@ -398,7 +398,7 @@ pub async fn view_failed_document(
// Check if file_path exists (some failed documents might not have been saved)
let file_path = file_path.ok_or(StatusCode::NOT_FOUND)?;
let file_service = FileService::new(state.config.clone());
let file_service = FileService::new(state.config.upload_path.clone());
let file_data = file_service
.read_file(&file_path)
.await

View File

@ -1,3 +1,7 @@
use axum::{routing::{get, post, delete}, Router};
use std::sync::Arc;
use crate::AppState;
pub mod types;
pub mod crud;
pub mod ocr;
@ -11,4 +15,38 @@ pub use crud::*;
pub use ocr::*;
pub use bulk::*;
pub use debug::*;
pub use failed::*;
pub use failed::*;
pub fn router() -> Router<Arc<AppState>> {
Router::new()
// CRUD operations
.route("/", post(upload_document))
.route("/", get(list_documents))
.route("/:id", get(get_document_by_id))
.route("/:id", delete(delete_document))
.route("/:id/download", get(download_document))
.route("/:id/view", get(view_document))
// OCR operations
.route("/:id/ocr", get(get_document_ocr))
.route("/:id/ocr/retry", post(retry_ocr))
.route("/ocr/stats", get(get_ocr_stats))
.route("/:id/ocr/stop", post(cancel_ocr))
// Bulk operations
.route("/bulk/delete", post(bulk_delete_documents))
.route("/cleanup/low-confidence", delete(delete_low_confidence_documents))
.route("/cleanup/failed-ocr", delete(delete_failed_ocr_documents))
// Debug operations
.route("/:id/debug", get(get_document_debug_info))
.route("/:id/thumbnail", get(get_document_thumbnail))
.route("/:id/processed", get(get_processed_image))
.route("/:id/validate", get(validate_document_integrity))
.route("/duplicates", get(get_user_duplicates))
// Failed documents
.route("/failed", get(get_failed_documents))
.route("/failed/:id", get(view_failed_document))
.route("/failed/ocr", get(get_failed_ocr_documents))
}

View File

@ -106,7 +106,7 @@ pub async fn retry_ocr(
}
// Add to OCR queue
match state.queue_service.enqueue_document(document.id, auth_user.user.id, 1).await {
match state.queue_service.enqueue_document(document.id, 5, document.file_size).await {
Ok(_) => {
info!("Document {} queued for OCR retry", document_id);
Ok(Json(serde_json::json!({
@ -186,27 +186,12 @@ pub async fn cancel_ocr(
})?
.ok_or(StatusCode::NOT_FOUND)?;
// Try to remove from queue
match state.queue_service.remove_from_queue(document_id).await {
Ok(removed) => {
if removed {
info!("Document {} removed from OCR queue", document_id);
Ok(Json(serde_json::json!({
"success": true,
"message": "Document removed from OCR queue"
})))
} else {
Ok(Json(serde_json::json!({
"success": false,
"message": "Document was not in the OCR queue"
})))
}
}
Err(e) => {
error!("Failed to remove document {} from OCR queue: {}", document_id, e);
Err(StatusCode::INTERNAL_SERVER_ERROR)
}
}
// Note: OCR queue removal not implemented in current queue service
info!("Stop OCR processing requested for document {}", document_id);
Ok(Json(serde_json::json!({
"success": true,
"message": "OCR processing stop requested"
})))
}
/// Get OCR processing statistics
@ -224,21 +209,12 @@ pub async fn get_ocr_stats(
})?;
// Get queue statistics
let queue_size = state
.ocr_queue
.get_queue_size()
let queue_stats = state
.queue_service
.get_stats()
.await
.map_err(|e| {
error!("Failed to get OCR queue size: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let active_jobs = state
.ocr_queue
.get_active_jobs_count()
.await
.map_err(|e| {
error!("Failed to get active OCR jobs count: {}", e);
error!("Failed to get OCR queue stats: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
@ -247,9 +223,9 @@ pub async fn get_ocr_stats(
"pending_ocr": pending,
"completed_ocr": completed,
"failed_ocr": failed,
"queue_size": queue_size,
"active_jobs": active_jobs,
"completion_rate": if total > 0 { (completed as f64 / total as f64 * 100.0) } else { 0.0 }
"queue_size": queue_stats.pending_count,
"active_jobs": queue_stats.processing_count,
"completion_rate": if total > 0 { completed as f64 / total as f64 * 100.0 } else { 0.0 }
})))
}

View File

@ -42,11 +42,13 @@ async fn search_documents(
auth_user: AuthUser,
Query(search_request): Query<SearchRequest>,
) -> Result<Json<SearchResponse>, StatusCode> {
let (documents, total) = state
let documents = state
.db
.search_documents(auth_user.user.id, search_request)
.search_documents(auth_user.user.id, &search_request)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let total = documents.len() as i64;
let response = SearchResponse {
documents: documents.into_iter().map(|doc| EnhancedDocumentResponse {
@ -101,12 +103,12 @@ async fn enhanced_search_documents(
let start_time = std::time::Instant::now();
let documents = state
.db
.enhanced_search_documents_with_role(auth_user.user.id, auth_user.user.role, search_request)
.enhanced_search_documents_with_role(auth_user.user.id, auth_user.user.role, &search_request)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let query_time = start_time.elapsed().as_millis() as u64;
let total = documents.len() as u64;
let total = documents.len() as i64;
let response = SearchResponse {
documents,

View File

@ -1,3 +1,7 @@
use axum::{routing::{get, post, delete, put}, Router};
use std::sync::Arc;
use crate::AppState;
pub mod crud;
pub mod sync;
pub mod validation;
@ -7,4 +11,27 @@ pub mod estimation;
pub use crud::*;
pub use sync::*;
pub use validation::*;
pub use estimation::*;
pub use estimation::*;
pub fn router() -> Router<Arc<AppState>> {
Router::new()
// CRUD operations
.route("/", get(list_sources))
.route("/", post(create_source))
.route("/:id", get(get_source))
.route("/:id", put(update_source))
.route("/:id", delete(delete_source))
// Sync operations
.route("/:id/sync", post(trigger_sync))
.route("/:id/sync/stop", post(stop_sync))
.route("/:id/deep-scan", post(trigger_deep_scan))
// Validation operations
.route("/:id/validate", post(validate_source))
.route("/test", post(test_connection_with_config))
// Estimation operations
.route("/:id/estimate", get(estimate_crawl))
.route("/estimate", post(estimate_crawl_with_config))
}

View File

@ -106,7 +106,7 @@ async fn test_webdav_connection(
// Create WebDAV service and test connection
match WebDAVService::new(webdav_config) {
Ok(webdav_service) => {
match WebDAVService::test_connection_with_config(test_config).await {
match WebDAVService::test_connection_with_config(&test_config).await {
Ok(result) => {
info!("WebDAV connection test completed: {}", result.message);
Ok(Json(result))

View File

@ -970,7 +970,7 @@ impl SourceScheduler {
server_type: config.server_type,
};
crate::services::webdav::WebDAVService::test_connection_with_config(test_config).await
crate::services::webdav::WebDAVService::test_connection_with_config(&test_config).await
.map_err(|e| format!("Connection test failed: {}", e))?;
Ok(())