feat(client/server): create endpoint for fetching individual files, and fix client not serving files
This commit is contained in:
parent
3ae542088b
commit
f0f90d71de
|
|
@ -310,7 +310,7 @@ const RecentDocuments: React.FC<RecentDocumentsProps> = ({ documents = [] }) =>
|
|||
sx={{
|
||||
px: 0,
|
||||
py: 1.5,
|
||||
borderBottom: index < Math.min(documents.length, 5) - 1 ? 1 : 0,
|
||||
borderBottom: index < Math.min(safeDocuments.length, 5) - 1 ? 1 : 0,
|
||||
borderColor: 'divider',
|
||||
}}
|
||||
>
|
||||
|
|
@ -361,7 +361,13 @@ const RecentDocuments: React.FC<RecentDocumentsProps> = ({ documents = [] }) =>
|
|||
<IconButton size="small" onClick={() => navigate(`/documents/${doc.id}`)}>
|
||||
<ViewIcon fontSize="small" />
|
||||
</IconButton>
|
||||
<IconButton size="small">
|
||||
<IconButton
|
||||
size="small"
|
||||
onClick={() => {
|
||||
const downloadUrl = `/api/documents/${doc.id}/download`;
|
||||
window.open(downloadUrl, '_blank');
|
||||
}}
|
||||
>
|
||||
<DownloadIcon fontSize="small" />
|
||||
</IconButton>
|
||||
</Box>
|
||||
|
|
@ -513,17 +519,28 @@ const Dashboard: React.FC = () => {
|
|||
useEffect(() => {
|
||||
const fetchDashboardData = async (): Promise<void> => {
|
||||
try {
|
||||
// Fetch both documents and metrics
|
||||
const [docsResponse, metricsResponse] = await Promise.all([
|
||||
api.get<Document[]>('/documents'),
|
||||
api.get<any>('/metrics')
|
||||
]);
|
||||
// Fetch documents with better error handling
|
||||
let docs: Document[] = [];
|
||||
try {
|
||||
const docsResponse = await api.get<Document[]>('/documents');
|
||||
docs = Array.isArray(docsResponse.data) ? docsResponse.data : [];
|
||||
} catch (docError) {
|
||||
console.error('Failed to fetch documents:', docError);
|
||||
// Continue with empty documents array
|
||||
}
|
||||
|
||||
const docs = docsResponse.data || [];
|
||||
setDocuments(docs);
|
||||
|
||||
const metricsData = metricsResponse.data;
|
||||
setMetrics(metricsData);
|
||||
// Fetch metrics with better error handling
|
||||
let metricsData: any = null;
|
||||
try {
|
||||
const metricsResponse = await api.get<any>('/metrics');
|
||||
metricsData = metricsResponse.data;
|
||||
setMetrics(metricsData);
|
||||
} catch (metricsError) {
|
||||
console.error('Failed to fetch metrics:', metricsError);
|
||||
// Continue with null metrics - will fall back to client calculation
|
||||
}
|
||||
|
||||
// Use backend metrics if available, otherwise fall back to client calculation
|
||||
if (metricsData?.documents) {
|
||||
|
|
@ -536,7 +553,7 @@ const Dashboard: React.FC = () => {
|
|||
} else {
|
||||
// Fallback to client-side calculation
|
||||
const totalSize = docs.reduce((sum, doc) => sum + (doc.file_size || 0), 0);
|
||||
const ocrProcessed = docs.filter(doc => doc.ocr_text).length;
|
||||
const ocrProcessed = docs.filter(doc => doc.has_ocr_text || doc.ocr_text).length;
|
||||
|
||||
setStats({
|
||||
totalDocuments: docs.length,
|
||||
|
|
@ -546,7 +563,15 @@ const Dashboard: React.FC = () => {
|
|||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch dashboard data:', error);
|
||||
console.error('Unexpected error in dashboard data fetch:', error);
|
||||
// Set default empty state
|
||||
setDocuments([]);
|
||||
setStats({
|
||||
totalDocuments: 0,
|
||||
totalSize: 0,
|
||||
ocrProcessed: 0,
|
||||
searchablePages: 0,
|
||||
});
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -142,20 +142,7 @@ export const documentService = {
|
|||
},
|
||||
|
||||
getById: (id: string) => {
|
||||
// Use the document list endpoint with pagination to find the specific document
|
||||
// This is a temporary solution until we have a proper document details endpoint
|
||||
return api.get<PaginatedResponse<Document>>('/documents', {
|
||||
params: {
|
||||
limit: 1000, // Fetch a reasonable amount to find our document
|
||||
offset: 0
|
||||
}
|
||||
}).then(response => {
|
||||
const document = response.data.documents.find(doc => doc.id === id);
|
||||
if (!document) {
|
||||
throw new Error('Document not found');
|
||||
}
|
||||
return { data: document };
|
||||
})
|
||||
return api.get<Document>(`/documents/${id}`)
|
||||
},
|
||||
|
||||
download: (id: string) => {
|
||||
|
|
|
|||
40
src/main.rs
40
src/main.rs
|
|
@ -1,6 +1,4 @@
|
|||
use axum::{
|
||||
http::StatusCode,
|
||||
response::Html,
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
|
|
@ -284,10 +282,40 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
.nest("/api/users", readur::routes::users::router())
|
||||
.nest("/api/webdav", readur::routes::webdav::router())
|
||||
.merge(readur::swagger::create_swagger_router())
|
||||
.fallback_service(ServeDir::new("frontend/dist").fallback(ServeFile::new("frontend/dist/index.html")))
|
||||
.fallback_service(
|
||||
ServeDir::new("dist")
|
||||
.precompressed_gzip()
|
||||
.precompressed_br()
|
||||
.fallback(ServeFile::new("dist/index.html"))
|
||||
)
|
||||
.layer(CorsLayer::permissive())
|
||||
.with_state(web_state.clone());
|
||||
|
||||
// Debug static file serving setup
|
||||
let current_dir = std::env::current_dir().unwrap_or_else(|_| std::path::PathBuf::from("."));
|
||||
info!("Server working directory: {}", current_dir.display());
|
||||
|
||||
let dist_path = current_dir.join("dist");
|
||||
info!("Looking for static files at: {}", dist_path.display());
|
||||
info!("dist directory exists: {}", dist_path.exists());
|
||||
|
||||
if dist_path.exists() {
|
||||
if let Ok(entries) = std::fs::read_dir(&dist_path) {
|
||||
info!("Contents of dist directory:");
|
||||
for entry in entries.flatten() {
|
||||
info!(" - {}", entry.file_name().to_string_lossy());
|
||||
}
|
||||
}
|
||||
|
||||
let index_path = dist_path.join("index.html");
|
||||
info!("index.html exists: {}", index_path.exists());
|
||||
if index_path.exists() {
|
||||
if let Ok(metadata) = std::fs::metadata(&index_path) {
|
||||
info!("index.html size: {} bytes", metadata.len());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(&config.server_address).await?;
|
||||
info!("Server starting on {}", config.server_address);
|
||||
|
||||
|
|
@ -297,9 +325,3 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|||
}
|
||||
|
||||
|
||||
async fn serve_spa() -> Result<Html<String>, StatusCode> {
|
||||
match tokio::fs::read_to_string("frontend/dist/index.html").await {
|
||||
Ok(html) => Ok(Html(html)),
|
||||
Err(_) => Err(StatusCode::NOT_FOUND),
|
||||
}
|
||||
}
|
||||
|
|
@ -29,6 +29,7 @@ pub fn router() -> Router<Arc<AppState>> {
|
|||
Router::new()
|
||||
.route("/", post(upload_document))
|
||||
.route("/", get(list_documents))
|
||||
.route("/{id}", get(get_document_by_id))
|
||||
.route("/{id}/download", get(download_document))
|
||||
.route("/{id}/view", get(view_document))
|
||||
.route("/{id}/thumbnail", get(get_document_thumbnail))
|
||||
|
|
@ -38,6 +39,59 @@ pub fn router() -> Router<Arc<AppState>> {
|
|||
.route("/failed-ocr", get(get_failed_ocr_documents))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/api/documents/{id}",
|
||||
tag = "documents",
|
||||
security(
|
||||
("bearer_auth" = [])
|
||||
),
|
||||
params(
|
||||
("id" = uuid::Uuid, Path, description = "Document ID")
|
||||
),
|
||||
responses(
|
||||
(status = 200, description = "Document details", body = DocumentResponse),
|
||||
(status = 404, description = "Document not found"),
|
||||
(status = 401, description = "Unauthorized")
|
||||
)
|
||||
)]
|
||||
async fn get_document_by_id(
|
||||
State(state): State<Arc<AppState>>,
|
||||
auth_user: AuthUser,
|
||||
Path(document_id): Path<uuid::Uuid>,
|
||||
) -> Result<Json<DocumentResponse>, StatusCode> {
|
||||
// Get documents for user with proper role-based access
|
||||
let documents = state
|
||||
.db
|
||||
.get_documents_by_user_with_role(auth_user.user.id, auth_user.user.role, 1000, 0)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
// Find the specific document
|
||||
let document = documents
|
||||
.into_iter()
|
||||
.find(|doc| doc.id == document_id)
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
// Convert to DocumentResponse
|
||||
let response = DocumentResponse {
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
original_filename: document.original_filename,
|
||||
file_size: document.file_size,
|
||||
mime_type: document.mime_type,
|
||||
created_at: document.created_at,
|
||||
has_ocr_text: document.ocr_text.is_some(),
|
||||
tags: document.tags,
|
||||
ocr_confidence: document.ocr_confidence,
|
||||
ocr_word_count: document.ocr_word_count,
|
||||
ocr_processing_time_ms: document.ocr_processing_time_ms,
|
||||
ocr_status: document.ocr_status,
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/api/documents",
|
||||
|
|
|
|||
|
|
@ -211,7 +211,7 @@ impl SourceSyncService {
|
|||
async fn perform_sync_internal<F, D, Fut1, Fut2>(
|
||||
&self,
|
||||
user_id: Uuid,
|
||||
_source_id: Uuid,
|
||||
source_id: Uuid,
|
||||
watch_folders: &[String],
|
||||
file_extensions: &[String],
|
||||
enable_background_ocr: bool,
|
||||
|
|
@ -270,7 +270,7 @@ impl SourceSyncService {
|
|||
Self::process_single_file(
|
||||
state_clone,
|
||||
user_id,
|
||||
_source_id,
|
||||
source_id,
|
||||
&file_info_clone,
|
||||
enable_background_ocr,
|
||||
semaphore_clone,
|
||||
|
|
@ -311,7 +311,7 @@ impl SourceSyncService {
|
|||
async fn perform_sync_internal_with_cancellation<F, D, Fut1, Fut2>(
|
||||
&self,
|
||||
user_id: Uuid,
|
||||
_source_id: Uuid,
|
||||
source_id: Uuid,
|
||||
watch_folders: &[String],
|
||||
file_extensions: &[String],
|
||||
enable_background_ocr: bool,
|
||||
|
|
@ -326,7 +326,54 @@ impl SourceSyncService {
|
|||
Fut2: std::future::Future<Output = Result<Vec<u8>>>,
|
||||
{
|
||||
let mut total_files_processed = 0;
|
||||
let mut total_files_discovered = 0;
|
||||
let mut total_size_bytes = 0i64;
|
||||
|
||||
// First pass: discover all files and calculate totals
|
||||
for folder_path in watch_folders {
|
||||
if cancellation_token.is_cancelled() {
|
||||
info!("Sync cancelled during folder discovery");
|
||||
return Err(anyhow!("Sync cancelled"));
|
||||
}
|
||||
|
||||
match discover_files(folder_path.clone()).await {
|
||||
Ok(files) => {
|
||||
let files_to_process: Vec<_> = files.into_iter()
|
||||
.filter(|file_info| {
|
||||
if file_info.is_directory {
|
||||
return false;
|
||||
}
|
||||
|
||||
let file_extension = Path::new(&file_info.name)
|
||||
.extension()
|
||||
.and_then(|ext| ext.to_str())
|
||||
.unwrap_or("")
|
||||
.to_lowercase();
|
||||
|
||||
file_extensions.contains(&file_extension)
|
||||
})
|
||||
.collect();
|
||||
|
||||
total_files_discovered += files_to_process.len();
|
||||
total_size_bytes += files_to_process.iter().map(|f| f.size).sum::<i64>();
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to discover files in folder {}: {}", folder_path, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update initial statistics with discovered files
|
||||
if let Err(e) = self.state.db.update_source_sync_stats(
|
||||
source_id,
|
||||
0, // files_synced starts at 0
|
||||
total_files_discovered as i64,
|
||||
total_size_bytes,
|
||||
).await {
|
||||
error!("Failed to update initial sync stats: {}", e);
|
||||
}
|
||||
|
||||
// Second pass: process files and update stats progressively
|
||||
for folder_path in watch_folders {
|
||||
// Check for cancellation before processing each folder
|
||||
if cancellation_token.is_cancelled() {
|
||||
|
|
@ -389,7 +436,7 @@ impl SourceSyncService {
|
|||
Self::process_single_file_with_cancellation(
|
||||
state_clone,
|
||||
user_id,
|
||||
_source_id,
|
||||
source_id,
|
||||
&file_info_clone,
|
||||
enable_background_ocr,
|
||||
semaphore_clone,
|
||||
|
|
@ -401,7 +448,7 @@ impl SourceSyncService {
|
|||
file_futures.push(future);
|
||||
}
|
||||
|
||||
// Process files concurrently
|
||||
// Process files concurrently and update stats periodically
|
||||
while let Some(result) = file_futures.next().await {
|
||||
// Check for cancellation during processing
|
||||
if cancellation_token.is_cancelled() {
|
||||
|
|
@ -413,7 +460,22 @@ impl SourceSyncService {
|
|||
Ok(processed) => {
|
||||
if processed {
|
||||
folder_files_processed += 1;
|
||||
info!("Successfully processed file ({} completed in this folder)", folder_files_processed);
|
||||
total_files_processed += 1;
|
||||
|
||||
// Update statistics every 10 files processed or every file if under 10 total
|
||||
if total_files_processed % 10 == 0 || total_files_discovered <= 10 {
|
||||
let files_pending = total_files_discovered as i64 - total_files_processed as i64;
|
||||
if let Err(e) = self.state.db.update_source_sync_stats(
|
||||
source_id,
|
||||
total_files_processed as i64,
|
||||
files_pending.max(0),
|
||||
total_size_bytes,
|
||||
).await {
|
||||
error!("Failed to update sync stats: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
info!("Successfully processed file ({} completed in this folder, {} total)", folder_files_processed, total_files_processed);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
|
|
@ -421,8 +483,6 @@ impl SourceSyncService {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
total_files_processed += folder_files_processed;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to discover files in folder {}: {}", folder_path, e);
|
||||
|
|
@ -430,6 +490,16 @@ impl SourceSyncService {
|
|||
}
|
||||
}
|
||||
|
||||
// Final statistics update
|
||||
if let Err(e) = self.state.db.update_source_sync_stats(
|
||||
source_id,
|
||||
total_files_processed as i64,
|
||||
0, // All files are now processed
|
||||
total_size_bytes,
|
||||
).await {
|
||||
error!("Failed to update final sync stats: {}", e);
|
||||
}
|
||||
|
||||
info!("Source sync completed: {} files processed", total_files_processed);
|
||||
Ok(total_files_processed)
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue