feat(source): update names of sourceerror, and update tests

This commit is contained in:
perf3ct 2025-08-17 22:37:41 +00:00
parent 6a64d9e6ed
commit d793509af9
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
17 changed files with 77 additions and 89 deletions

View File

@ -6,7 +6,6 @@ import {
IconButton, IconButton,
Divider, Divider,
Chip, Chip,
Grid,
Card, Card,
CardContent, CardContent,
Collapse, Collapse,
@ -22,6 +21,7 @@ import {
Tooltip, Tooltip,
Paper, Paper,
} from '@mui/material'; } from '@mui/material';
import Grid from '@mui/material/GridLegacy';
import { import {
ContentCopy as CopyIcon, ContentCopy as CopyIcon,
ExpandMore as ExpandMoreIcon, ExpandMore as ExpandMoreIcon,

View File

@ -356,7 +356,7 @@ const RecommendationsSection: React.FC<RecommendationsSectionProps> = ({ failure
<Stack spacing={3}> <Stack spacing={3}>
{sortedFailureTypes.map((failureType, index) => { {sortedFailureTypes.map((failureType, index) => {
const recommendation = getRecommendationsForFailureType(failureType); const recommendation = getRecommendationsForFailureType(failureType as SourceErrorType);
const Icon = recommendation.icon; const Icon = recommendation.icon;
const count = failureTypeStats[failureType]; const count = failureTypeStats[failureType];

View File

@ -17,13 +17,13 @@ import {
MenuItem, MenuItem,
Card, Card,
CardContent, CardContent,
Grid,
LinearProgress, LinearProgress,
Skeleton, Skeleton,
Stack, Stack,
Fade, Fade,
Collapse, Collapse,
} from '@mui/material'; } from '@mui/material';
import Grid from '@mui/material/GridLegacy';
import { import {
ExpandMore as ExpandMoreIcon, ExpandMore as ExpandMoreIcon,
Search as SearchIcon, Search as SearchIcon,

View File

@ -4,11 +4,11 @@ import {
Card, Card,
CardContent, CardContent,
Typography, Typography,
Grid,
LinearProgress, LinearProgress,
Stack, Stack,
Skeleton, Skeleton,
} from '@mui/material'; } from '@mui/material';
import Grid from '@mui/material/GridLegacy';
import { import {
Error as ErrorIcon, Error as ErrorIcon,
Warning as WarningIcon, Warning as WarningIcon,

View File

@ -6,7 +6,6 @@ import {
IconButton, IconButton,
Divider, Divider,
Chip, Chip,
Grid,
Card, Card,
CardContent, CardContent,
Collapse, Collapse,
@ -22,6 +21,7 @@ import {
Tooltip, Tooltip,
Paper, Paper,
} from '@mui/material'; } from '@mui/material';
import Grid from '@mui/material/GridLegacy';
import { import {
ContentCopy as CopyIcon, ContentCopy as CopyIcon,
ExpandMore as ExpandMoreIcon, ExpandMore as ExpandMoreIcon,

View File

@ -4,11 +4,11 @@ import {
Card, Card,
CardContent, CardContent,
Typography, Typography,
Grid,
LinearProgress, LinearProgress,
Stack, Stack,
Skeleton, Skeleton,
} from '@mui/material'; } from '@mui/material';
import Grid from '@mui/material/GridLegacy';
import { import {
Error as ErrorIcon, Error as ErrorIcon,
Warning as WarningIcon, Warning as WarningIcon,

View File

@ -17,13 +17,13 @@ import {
MenuItem, MenuItem,
Card, Card,
CardContent, CardContent,
Grid,
LinearProgress, LinearProgress,
Skeleton, Skeleton,
Stack, Stack,
Fade, Fade,
Collapse, Collapse,
} from '@mui/material'; } from '@mui/material';
import Grid from '@mui/material/GridLegacy';
import { import {
ExpandMore as ExpandMoreIcon, ExpandMore as ExpandMoreIcon,
Search as SearchIcon, Search as SearchIcon,

View File

@ -6,7 +6,7 @@ use std::collections::HashMap;
use super::Database; use super::Database;
use crate::models::{ use crate::models::{
CreateSourceScanFailure, SourceScanFailure, SourceScanFailureStats, CreateSourceScanFailure, SourceScanFailure, SourceScanFailureStats,
MonitoredSourceType, SourceErrorType, SourceErrorSeverity, ListFailuresQuery, ErrorSourceType, SourceErrorType, SourceErrorSeverity, ListFailuresQuery,
}; };
impl Database { impl Database {
@ -179,7 +179,7 @@ impl Database {
pub async fn is_source_known_failure( pub async fn is_source_known_failure(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: MonitoredSourceType, source_type: ErrorSourceType,
source_id: Option<Uuid>, source_id: Option<Uuid>,
resource_path: &str, resource_path: &str,
) -> Result<bool> { ) -> Result<bool> {
@ -210,7 +210,7 @@ impl Database {
pub async fn get_source_retry_candidates( pub async fn get_source_retry_candidates(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: Option<MonitoredSourceType>, source_type: Option<ErrorSourceType>,
limit: i32, limit: i32,
) -> Result<Vec<SourceScanFailure>> { ) -> Result<Vec<SourceScanFailure>> {
self.with_retry(|| async { self.with_retry(|| async {
@ -262,7 +262,7 @@ impl Database {
pub async fn reset_source_scan_failure( pub async fn reset_source_scan_failure(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: MonitoredSourceType, source_type: ErrorSourceType,
source_id: Option<Uuid>, source_id: Option<Uuid>,
resource_path: &str, resource_path: &str,
) -> Result<bool> { ) -> Result<bool> {
@ -286,7 +286,7 @@ impl Database {
pub async fn resolve_source_scan_failure( pub async fn resolve_source_scan_failure(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: MonitoredSourceType, source_type: ErrorSourceType,
source_id: Option<Uuid>, source_id: Option<Uuid>,
resource_path: &str, resource_path: &str,
resolution_method: &str, resolution_method: &str,
@ -312,7 +312,7 @@ impl Database {
pub async fn exclude_source_from_scan( pub async fn exclude_source_from_scan(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: MonitoredSourceType, source_type: ErrorSourceType,
source_id: Option<Uuid>, source_id: Option<Uuid>,
resource_path: &str, resource_path: &str,
user_notes: Option<&str>, user_notes: Option<&str>,
@ -344,7 +344,7 @@ impl Database {
pub async fn get_source_scan_failure_stats( pub async fn get_source_scan_failure_stats(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: Option<MonitoredSourceType>, source_type: Option<ErrorSourceType>,
) -> Result<SourceScanFailureStats> { ) -> Result<SourceScanFailureStats> {
self.with_retry(|| async { self.with_retry(|| async {
let mut sql = String::from( let mut sql = String::from(

View File

@ -8,24 +8,12 @@ pub mod source;
pub mod source_error; pub mod source_error;
pub mod responses; pub mod responses;
// Re-export commonly used types - being explicit to avoid naming conflicts // Re-export commonly used types
pub use user::*; pub use user::*;
pub use document::*; pub use document::*;
pub use search::*; pub use search::*;
pub use settings::*; pub use settings::*;
pub use source::*;
// Re-export source types with explicit naming to avoid conflicts
pub use source::{
Source, SourceStatus, CreateSource, UpdateSource,
SourceResponse, SourceWithStats, WebDAVSourceConfig,
LocalFolderSourceConfig, S3SourceConfig, Notification,
NotificationSummary, CreateNotification, WebDAVFolderInfo
};
// Use fully qualified path for source::SourceType to distinguish from source_error::MonitoredSourceType
pub use source::SourceType;
// Re-export source_error types with full qualification
pub use source_error::*; pub use source_error::*;
pub use responses::*; pub use responses::*;

View File

@ -10,7 +10,7 @@ use utoipa::ToSchema;
/// Generic source types that can be monitored for errors /// Generic source types that can be monitored for errors
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type, ToSchema)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, sqlx::Type, ToSchema)]
#[sqlx(type_name = "source_type", rename_all = "lowercase")] #[sqlx(type_name = "source_type", rename_all = "lowercase")]
pub enum MonitoredSourceType { pub enum ErrorSourceType {
#[sqlx(rename = "webdav")] #[sqlx(rename = "webdav")]
WebDAV, WebDAV,
#[sqlx(rename = "s3")] #[sqlx(rename = "s3")]
@ -25,15 +25,15 @@ pub enum MonitoredSourceType {
OneDrive, OneDrive,
} }
impl fmt::Display for MonitoredSourceType { impl fmt::Display for ErrorSourceType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
MonitoredSourceType::WebDAV => write!(f, "webdav"), ErrorSourceType::WebDAV => write!(f, "webdav"),
MonitoredSourceType::S3 => write!(f, "s3"), ErrorSourceType::S3 => write!(f, "s3"),
MonitoredSourceType::Local => write!(f, "local"), ErrorSourceType::Local => write!(f, "local"),
MonitoredSourceType::Dropbox => write!(f, "dropbox"), ErrorSourceType::Dropbox => write!(f, "dropbox"),
MonitoredSourceType::GDrive => write!(f, "gdrive"), ErrorSourceType::GDrive => write!(f, "gdrive"),
MonitoredSourceType::OneDrive => write!(f, "onedrive"), ErrorSourceType::OneDrive => write!(f, "onedrive"),
} }
} }
} }
@ -163,7 +163,7 @@ impl std::str::FromStr for RetryStrategy {
pub struct SourceScanFailure { pub struct SourceScanFailure {
pub id: Uuid, pub id: Uuid,
pub user_id: Uuid, pub user_id: Uuid,
pub source_type: MonitoredSourceType, pub source_type: ErrorSourceType,
pub source_id: Option<Uuid>, pub source_id: Option<Uuid>,
pub resource_path: String, pub resource_path: String,
@ -219,7 +219,7 @@ pub struct SourceScanFailure {
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CreateSourceScanFailure { pub struct CreateSourceScanFailure {
pub user_id: Uuid, pub user_id: Uuid,
pub source_type: MonitoredSourceType, pub source_type: ErrorSourceType,
pub source_id: Option<Uuid>, pub source_id: Option<Uuid>,
pub resource_path: String, pub resource_path: String,
pub error_type: SourceErrorType, pub error_type: SourceErrorType,
@ -236,7 +236,7 @@ pub struct CreateSourceScanFailure {
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub struct SourceScanFailureResponse { pub struct SourceScanFailureResponse {
pub id: Uuid, pub id: Uuid,
pub source_type: MonitoredSourceType, pub source_type: ErrorSourceType,
pub source_name: Option<String>, // From joined sources table pub source_name: Option<String>, // From joined sources table
pub resource_path: String, pub resource_path: String,
pub error_type: SourceErrorType, pub error_type: SourceErrorType,
@ -296,7 +296,7 @@ pub trait SourceErrorClassifier: Send + Sync {
fn should_retry(&self, failure: &SourceScanFailure) -> bool; fn should_retry(&self, failure: &SourceScanFailure) -> bool;
/// Get the source type this classifier handles /// Get the source type this classifier handles
fn source_type(&self) -> MonitoredSourceType; fn source_type(&self) -> ErrorSourceType;
} }
/// Context information available during error classification /// Context information available during error classification
@ -391,7 +391,7 @@ pub struct ExcludeResourceRequest {
/// Query parameters for listing failures /// Query parameters for listing failures
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub struct ListFailuresQuery { pub struct ListFailuresQuery {
pub source_type: Option<MonitoredSourceType>, pub source_type: Option<ErrorSourceType>,
pub source_id: Option<Uuid>, pub source_id: Option<Uuid>,
pub error_type: Option<SourceErrorType>, pub error_type: Option<SourceErrorType>,
pub severity: Option<SourceErrorSeverity>, pub severity: Option<SourceErrorSeverity>,

View File

@ -13,7 +13,7 @@ use uuid::Uuid;
use crate::{ use crate::{
auth::AuthUser, auth::AuthUser,
models::{ models::{
SourceScanFailureResponse, SourceScanFailureStats, MonitoredSourceType, SourceScanFailureResponse, SourceScanFailureStats, ErrorSourceType,
ListFailuresQuery, RetryFailureRequest, ExcludeResourceRequest, ListFailuresQuery, RetryFailureRequest, ExcludeResourceRequest,
}, },
services::source_error_tracker::SourceErrorTracker, services::source_error_tracker::SourceErrorTracker,
@ -25,12 +25,12 @@ pub fn router() -> Router<Arc<AppState>> {
.route("/", get(list_source_failures)) .route("/", get(list_source_failures))
.route("/stats", get(get_failure_stats)) .route("/stats", get(get_failure_stats))
.route("/retry-candidates", get(get_retry_candidates)) .route("/retry-candidates", get(get_retry_candidates))
.route("/:failure_id", get(get_source_failure)) .route("/{failure_id}", get(get_source_failure))
.route("/:failure_id/retry", post(retry_source_failure)) .route("/{failure_id}/retry", post(retry_source_failure))
.route("/:failure_id/exclude", post(exclude_source_failure)) .route("/{failure_id}/exclude", post(exclude_source_failure))
.route("/:failure_id/resolve", post(resolve_source_failure)) .route("/{failure_id}/resolve", post(resolve_source_failure))
.route("/type/:source_type", get(list_source_type_failures)) .route("/type/{source_type}", get(list_source_type_failures))
.route("/type/:source_type/stats", get(get_source_type_stats)) .route("/type/{source_type}/stats", get(get_source_type_stats))
} }
#[utoipa::path( #[utoipa::path(
@ -105,9 +105,9 @@ async fn get_failure_stats(
let source_type = params.get("source_type") let source_type = params.get("source_type")
.and_then(|v| v.as_str()) .and_then(|v| v.as_str())
.and_then(|s| match s.to_lowercase().as_str() { .and_then(|s| match s.to_lowercase().as_str() {
"webdav" => Some(MonitoredSourceType::WebDAV), "webdav" => Some(ErrorSourceType::WebDAV),
"s3" => Some(MonitoredSourceType::S3), "s3" => Some(ErrorSourceType::S3),
"local" => Some(MonitoredSourceType::Local), "local" => Some(ErrorSourceType::Local),
_ => None, _ => None,
}); });
@ -152,9 +152,9 @@ async fn get_retry_candidates(
let source_type = params.get("source_type") let source_type = params.get("source_type")
.and_then(|v| v.as_str()) .and_then(|v| v.as_str())
.and_then(|s| match s.to_lowercase().as_str() { .and_then(|s| match s.to_lowercase().as_str() {
"webdav" => Some(MonitoredSourceType::WebDAV), "webdav" => Some(ErrorSourceType::WebDAV),
"s3" => Some(MonitoredSourceType::S3), "s3" => Some(ErrorSourceType::S3),
"local" => Some(MonitoredSourceType::Local), "local" => Some(ErrorSourceType::Local),
_ => None, _ => None,
}); });
@ -419,9 +419,9 @@ async fn list_source_type_failures(
// Parse source type // Parse source type
let source_type = match source_type_str.to_lowercase().as_str() { let source_type = match source_type_str.to_lowercase().as_str() {
"webdav" => MonitoredSourceType::WebDAV, "webdav" => ErrorSourceType::WebDAV,
"s3" => MonitoredSourceType::S3, "s3" => ErrorSourceType::S3,
"local" => MonitoredSourceType::Local, "local" => ErrorSourceType::Local,
_ => return Err(StatusCode::BAD_REQUEST), _ => return Err(StatusCode::BAD_REQUEST),
}; };
@ -468,9 +468,9 @@ async fn get_source_type_stats(
// Parse source type // Parse source type
let source_type = match source_type_str.to_lowercase().as_str() { let source_type = match source_type_str.to_lowercase().as_str() {
"webdav" => MonitoredSourceType::WebDAV, "webdav" => ErrorSourceType::WebDAV,
"s3" => MonitoredSourceType::S3, "s3" => ErrorSourceType::S3,
"local" => MonitoredSourceType::Local, "local" => ErrorSourceType::Local,
_ => return Err(StatusCode::BAD_REQUEST), _ => return Err(StatusCode::BAD_REQUEST),
}; };

View File

@ -47,9 +47,9 @@ fn map_to_webdav_severity(source_severity: &SourceErrorSeverity) -> WebDAVScanFa
pub fn router() -> Router<Arc<AppState>> { pub fn router() -> Router<Arc<AppState>> {
Router::new() Router::new()
.route("/", get(list_scan_failures)) .route("/", get(list_scan_failures))
.route("/:id", get(get_scan_failure)) .route("/{id}", get(get_scan_failure))
.route("/:id/retry", post(retry_scan_failure)) .route("/{id}/retry", post(retry_scan_failure))
.route("/:id/exclude", post(exclude_scan_failure)) .route("/{id}/exclude", post(exclude_scan_failure))
.route("/retry-candidates", get(get_retry_candidates)) .route("/retry-candidates", get(get_retry_candidates))
} }
@ -109,9 +109,9 @@ pub async fn list_scan_failures(
); );
// Get WebDAV failures from generic system using source type filter // Get WebDAV failures from generic system using source type filter
use crate::models::{MonitoredSourceType, ListFailuresQuery}; use crate::models::{ErrorSourceType, ListFailuresQuery};
let query = ListFailuresQuery { let query = ListFailuresQuery {
source_type: Some(MonitoredSourceType::WebDAV), source_type: Some(ErrorSourceType::WebDAV),
include_resolved: Some(false), include_resolved: Some(false),
..Default::default() ..Default::default()
}; };
@ -124,7 +124,7 @@ pub async fn list_scan_failures(
})?; })?;
// Get statistics for WebDAV // Get statistics for WebDAV
let generic_stats = error_tracker.get_stats(auth_user.user.id, Some(MonitoredSourceType::WebDAV)).await let generic_stats = error_tracker.get_stats(auth_user.user.id, Some(ErrorSourceType::WebDAV)).await
.map_err(|e| { .map_err(|e| {
error!("Failed to get scan failure stats: {}", e); error!("Failed to get scan failure stats: {}", e);
StatusCode::INTERNAL_SERVER_ERROR StatusCode::INTERNAL_SERVER_ERROR
@ -413,7 +413,7 @@ pub async fn get_retry_candidates(
let error_tracker = crate::services::source_error_tracker::SourceErrorTracker::new(state.db.clone()); let error_tracker = crate::services::source_error_tracker::SourceErrorTracker::new(state.db.clone());
match error_tracker.get_retry_candidates(auth_user.user.id, Some(crate::models::MonitoredSourceType::WebDAV), Some(20)).await { match error_tracker.get_retry_candidates(auth_user.user.id, Some(crate::models::ErrorSourceType::WebDAV), Some(20)).await {
Ok(candidates) => { Ok(candidates) => {
let directories: Vec<String> = candidates.iter() let directories: Vec<String> = candidates.iter()
.map(|failure| failure.resource_path.clone()) .map(|failure| failure.resource_path.clone())

View File

@ -2,7 +2,7 @@ use anyhow::Result;
use std::collections::HashMap; use std::collections::HashMap;
use crate::models::{ use crate::models::{
MonitoredSourceType, SourceErrorType, SourceErrorSeverity, SourceErrorClassifier, ErrorSourceType, SourceErrorType, SourceErrorSeverity, SourceErrorClassifier,
ErrorContext, ErrorClassification, SourceScanFailure, RetryStrategy, ErrorContext, ErrorClassification, SourceScanFailure, RetryStrategy,
}; };
@ -356,7 +356,7 @@ impl SourceErrorClassifier for LocalFolderErrorClassifier {
} }
} }
fn source_type(&self) -> MonitoredSourceType { fn source_type(&self) -> ErrorSourceType {
MonitoredSourceType::Local ErrorSourceType::Local
} }
} }

View File

@ -2,7 +2,7 @@ use anyhow::Result;
use std::collections::HashMap; use std::collections::HashMap;
use crate::models::{ use crate::models::{
MonitoredSourceType, SourceErrorType, SourceErrorSeverity, SourceErrorClassifier, ErrorSourceType, SourceErrorType, SourceErrorSeverity, SourceErrorClassifier,
ErrorContext, ErrorClassification, SourceScanFailure, RetryStrategy, ErrorContext, ErrorClassification, SourceScanFailure, RetryStrategy,
}; };
@ -301,7 +301,7 @@ impl SourceErrorClassifier for S3ErrorClassifier {
} }
} }
fn source_type(&self) -> MonitoredSourceType { fn source_type(&self) -> ErrorSourceType {
MonitoredSourceType::S3 ErrorSourceType::S3
} }
} }

View File

@ -8,7 +8,7 @@ use uuid::Uuid;
use crate::db::Database; use crate::db::Database;
use crate::models::{ use crate::models::{
CreateSourceScanFailure, SourceScanFailure, SourceScanFailureResponse, CreateSourceScanFailure, SourceScanFailure, SourceScanFailureResponse,
SourceScanFailureStats, MonitoredSourceType, SourceErrorType, SourceErrorSeverity, SourceScanFailureStats, ErrorSourceType, SourceErrorType, SourceErrorSeverity,
SourceErrorClassifier, ErrorContext, ErrorClassification, SourceErrorClassifier, ErrorContext, ErrorClassification,
ListFailuresQuery, RetryFailureRequest, ExcludeResourceRequest, ListFailuresQuery, RetryFailureRequest, ExcludeResourceRequest,
}; };
@ -17,7 +17,7 @@ use crate::models::{
#[derive(Clone)] #[derive(Clone)]
pub struct SourceErrorTracker { pub struct SourceErrorTracker {
db: Database, db: Database,
classifiers: HashMap<MonitoredSourceType, Arc<dyn SourceErrorClassifier>>, classifiers: HashMap<ErrorSourceType, Arc<dyn SourceErrorClassifier>>,
} }
impl SourceErrorTracker { impl SourceErrorTracker {
@ -39,7 +39,7 @@ impl SourceErrorTracker {
pub async fn track_error( pub async fn track_error(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: MonitoredSourceType, source_type: ErrorSourceType,
source_id: Option<Uuid>, source_id: Option<Uuid>,
resource_path: &str, resource_path: &str,
error: &anyhow::Error, error: &anyhow::Error,
@ -89,7 +89,7 @@ impl SourceErrorTracker {
pub async fn should_skip_resource( pub async fn should_skip_resource(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: MonitoredSourceType, source_type: ErrorSourceType,
source_id: Option<Uuid>, source_id: Option<Uuid>,
resource_path: &str, resource_path: &str,
) -> Result<bool> { ) -> Result<bool> {
@ -118,7 +118,7 @@ impl SourceErrorTracker {
pub async fn mark_success( pub async fn mark_success(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: MonitoredSourceType, source_type: ErrorSourceType,
source_id: Option<Uuid>, source_id: Option<Uuid>,
resource_path: &str, resource_path: &str,
) -> Result<()> { ) -> Result<()> {
@ -152,7 +152,7 @@ impl SourceErrorTracker {
pub async fn get_retry_candidates( pub async fn get_retry_candidates(
&self, &self,
user_id: Uuid, user_id: Uuid,
source_type: Option<MonitoredSourceType>, source_type: Option<ErrorSourceType>,
limit: Option<i32>, limit: Option<i32>,
) -> Result<Vec<SourceScanFailure>> { ) -> Result<Vec<SourceScanFailure>> {
self.db.get_source_retry_candidates(user_id, source_type, limit.unwrap_or(10)).await self.db.get_source_retry_candidates(user_id, source_type, limit.unwrap_or(10)).await
@ -293,7 +293,7 @@ impl SourceErrorTracker {
} }
/// Get failure statistics /// Get failure statistics
pub async fn get_stats(&self, user_id: Uuid, source_type: Option<MonitoredSourceType>) -> Result<SourceScanFailureStats> { pub async fn get_stats(&self, user_id: Uuid, source_type: Option<ErrorSourceType>) -> Result<SourceScanFailureStats> {
self.db.get_source_scan_failure_stats(user_id, source_type).await self.db.get_source_scan_failure_stats(user_id, source_type).await
} }

View File

@ -3,7 +3,7 @@ use std::collections::HashMap;
use std::time::Duration; use std::time::Duration;
use crate::models::{ use crate::models::{
MonitoredSourceType, SourceErrorType, SourceErrorSeverity, SourceErrorClassifier, ErrorSourceType, SourceErrorType, SourceErrorSeverity, SourceErrorClassifier,
ErrorContext, ErrorClassification, SourceScanFailure, RetryStrategy, ErrorContext, ErrorClassification, SourceScanFailure, RetryStrategy,
}; };
use crate::models::source::{ use crate::models::source::{
@ -327,8 +327,8 @@ impl SourceErrorClassifier for WebDAVErrorClassifier {
} }
} }
fn source_type(&self) -> MonitoredSourceType { fn source_type(&self) -> ErrorSourceType {
MonitoredSourceType::WebDAV ErrorSourceType::WebDAV
} }
} }

View File

@ -6,7 +6,7 @@ use uuid::Uuid;
use crate::{AppState, models::{FileIngestionInfo}}; use crate::{AppState, models::{FileIngestionInfo}};
use crate::models::source::{CreateWebDAVDirectory}; use crate::models::source::{CreateWebDAVDirectory};
use crate::models::source_error::{MonitoredSourceType, ErrorContext}; use crate::models::source_error::{ErrorSourceType, ErrorContext};
use crate::webdav_xml_parser::compare_etags; use crate::webdav_xml_parser::compare_etags;
use crate::services::source_error_tracker::SourceErrorTracker; use crate::services::source_error_tracker::SourceErrorTracker;
use super::{WebDAVService, SyncProgress}; use super::{WebDAVService, SyncProgress};
@ -171,7 +171,7 @@ impl SmartSyncService {
if let Err(track_error) = self.error_tracker.track_error( if let Err(track_error) = self.error_tracker.track_error(
user_id, user_id,
MonitoredSourceType::WebDAV, ErrorSourceType::WebDAV,
None, // source_id - we don't have a specific source ID for this operation None, // source_id - we don't have a specific source ID for this operation
folder_path, folder_path,
&e, &e,
@ -243,7 +243,7 @@ impl SmartSyncService {
// Mark successful scan to resolve any previous failures // Mark successful scan to resolve any previous failures
if let Err(track_error) = self.error_tracker.mark_success( if let Err(track_error) = self.error_tracker.mark_success(
user_id, user_id,
MonitoredSourceType::WebDAV, ErrorSourceType::WebDAV,
None, None,
folder_path, folder_path,
).await { ).await {
@ -266,7 +266,7 @@ impl SmartSyncService {
if let Err(track_error) = self.error_tracker.track_error( if let Err(track_error) = self.error_tracker.track_error(
user_id, user_id,
MonitoredSourceType::WebDAV, ErrorSourceType::WebDAV,
None, None,
folder_path, folder_path,
&e, &e,
@ -401,7 +401,7 @@ impl SmartSyncService {
// Mark successful scan to resolve any previous failures // Mark successful scan to resolve any previous failures
if let Err(track_error) = self.error_tracker.mark_success( if let Err(track_error) = self.error_tracker.mark_success(
user_id, user_id,
MonitoredSourceType::WebDAV, ErrorSourceType::WebDAV,
None, None,
target_dir, target_dir,
).await { ).await {
@ -425,7 +425,7 @@ impl SmartSyncService {
if let Err(track_error) = self.error_tracker.track_error( if let Err(track_error) = self.error_tracker.track_error(
user_id, user_id,
MonitoredSourceType::WebDAV, ErrorSourceType::WebDAV,
None, None,
target_dir, target_dir,
&e, &e,