From 93c2863d010706d915f67e99e69114768532746b Mon Sep 17 00:00:00 2001 From: perf3ct Date: Thu, 14 Aug 2025 16:24:05 +0000 Subject: [PATCH] feat(webdav): support capturing individual directory errors in webdav --- .claude/agents/rust-storage-sync-expert.md | 2 +- .../FailureDetailsPanel.tsx | 619 ++++++++++++++++++ .../RecommendationsSection.tsx | 438 +++++++++++++ .../WebDAVScanFailures/StatsDashboard.tsx | 368 +++++++++++ .../WebDAVScanFailures/WebDAVScanFailures.tsx | 576 ++++++++++++++++ .../__tests__/FailureDetailsPanel.test.tsx | 356 ++++++++++ .../__tests__/StatsDashboard.test.tsx | 151 +++++ .../__tests__/WebDAVScanFailures.test.tsx | 429 ++++++++++++ .../components/WebDAVScanFailures/index.ts | 4 + frontend/src/services/api.ts | 111 ++++ ...0250813000001_add_webdav_scan_failures.sql | 299 +++++++++ src/db/webdav.rs | 356 ++++++++++ src/models/source.rs | 194 ++++++ src/routes/mod.rs | 3 +- src/routes/webdav.rs | 6 + src/routes/webdav_scan_failures.rs | 361 ++++++++++ src/services/webdav/error_tracking.rs | 348 ++++++++++ src/services/webdav/mod.rs | 1 + 18 files changed, 4620 insertions(+), 2 deletions(-) create mode 100644 frontend/src/components/WebDAVScanFailures/FailureDetailsPanel.tsx create mode 100644 frontend/src/components/WebDAVScanFailures/RecommendationsSection.tsx create mode 100644 frontend/src/components/WebDAVScanFailures/StatsDashboard.tsx create mode 100644 frontend/src/components/WebDAVScanFailures/WebDAVScanFailures.tsx create mode 100644 frontend/src/components/WebDAVScanFailures/__tests__/FailureDetailsPanel.test.tsx create mode 100644 frontend/src/components/WebDAVScanFailures/__tests__/StatsDashboard.test.tsx create mode 100644 frontend/src/components/WebDAVScanFailures/__tests__/WebDAVScanFailures.test.tsx create mode 100644 frontend/src/components/WebDAVScanFailures/index.ts create mode 100644 migrations/20250813000001_add_webdav_scan_failures.sql create mode 100644 src/routes/webdav_scan_failures.rs create mode 100644 src/services/webdav/error_tracking.rs diff --git a/.claude/agents/rust-storage-sync-expert.md b/.claude/agents/rust-storage-sync-expert.md index fdb389a..5baa815 100644 --- a/.claude/agents/rust-storage-sync-expert.md +++ b/.claude/agents/rust-storage-sync-expert.md @@ -12,7 +12,7 @@ You are an elite Rust systems engineer with deep expertise in OCR technologies, You possess mastery in: - **Rust Development**: Advanced knowledge of Rust's ownership system, lifetimes, trait systems, async/await patterns, and zero-cost abstractions - **OCR Technologies**: Experience with Tesseract, OpenCV, and Rust OCR libraries; understanding of image preprocessing, text extraction pipelines, and accuracy optimization -- **Concurrency & Parallelism**: Expert use of tokio, async-std, rayon, crossbeam; designing lock-free data structures, managing thread pools, and preventing race conditions +- **Concurrency & Parallelism**: Expert use of tokio, async-std, rayon, crossbeam; managing thread pools, and preventing race conditions - **Storage Systems**: Deep understanding of WebDAV protocol implementation, AWS S3 SDK usage, filesystem abstractions, and cross-platform file handling - **Synchronization Algorithms**: Implementing efficient diff algorithms, conflict resolution strategies, eventual consistency models, and bidirectional sync patterns - **API Design**: RESTful and gRPC API implementation, rate limiting, authentication, versioning, and error handling strategies diff --git a/frontend/src/components/WebDAVScanFailures/FailureDetailsPanel.tsx b/frontend/src/components/WebDAVScanFailures/FailureDetailsPanel.tsx new file mode 100644 index 0000000..d721b74 --- /dev/null +++ b/frontend/src/components/WebDAVScanFailures/FailureDetailsPanel.tsx @@ -0,0 +1,619 @@ +import React, { useState } from 'react'; +import { + Box, + Typography, + Button, + IconButton, + Divider, + Chip, + Grid, + Card, + CardContent, + Collapse, + Dialog, + DialogTitle, + DialogContent, + DialogActions, + TextField, + FormControlLabel, + Switch, + Alert, + Stack, + Tooltip, + Paper, +} from '@mui/material'; +import { + ContentCopy as CopyIcon, + ExpandMore as ExpandMoreIcon, + ExpandLess as ExpandLessIcon, + Refresh as RefreshIcon, + Block as BlockIcon, + Schedule as ScheduleIcon, + Speed as SpeedIcon, + Folder as FolderIcon, + CloudOff as CloudOffIcon, + Timer as TimerIcon, + Info as InfoIcon, + Warning as WarningIcon, +} from '@mui/icons-material'; +import { alpha } from '@mui/material/styles'; + +import { WebDAVScanFailure } from '../../services/api'; +import { modernTokens } from '../../theme'; +import { useNotification } from '../../contexts/NotificationContext'; + +interface FailureDetailsPanelProps { + failure: WebDAVScanFailure; + onRetry: (failure: WebDAVScanFailure, notes?: string) => Promise; + onExclude: (failure: WebDAVScanFailure, notes?: string, permanent?: boolean) => Promise; + isRetrying?: boolean; + isExcluding?: boolean; +} + +interface ConfirmationDialogProps { + open: boolean; + onClose: () => void; + onConfirm: (notes?: string, permanent?: boolean) => void; + title: string; + description: string; + confirmText: string; + confirmColor?: 'primary' | 'error' | 'warning'; + showPermanentOption?: boolean; + isLoading?: boolean; +} + +const ConfirmationDialog: React.FC = ({ + open, + onClose, + onConfirm, + title, + description, + confirmText, + confirmColor = 'primary', + showPermanentOption = false, + isLoading = false, +}) => { + const [notes, setNotes] = useState(''); + const [permanent, setPermanent] = useState(true); + + const handleConfirm = () => { + onConfirm(notes || undefined, showPermanentOption ? permanent : undefined); + setNotes(''); + setPermanent(true); + }; + + const handleClose = () => { + setNotes(''); + setPermanent(true); + onClose(); + }; + + return ( + + {title} + + + {description} + + + setNotes(e.target.value)} + multiline + rows={3} + sx={{ mb: 2 }} + /> + + {showPermanentOption && ( + setPermanent(e.target.checked)} + /> + } + label="Permanently exclude (recommended)" + sx={{ mt: 1 }} + /> + )} + + + + + + + ); +}; + +const FailureDetailsPanel: React.FC = ({ + failure, + onRetry, + onExclude, + isRetrying = false, + isExcluding = false, +}) => { + const [showDiagnostics, setShowDiagnostics] = useState(false); + const [retryDialogOpen, setRetryDialogOpen] = useState(false); + const [excludeDialogOpen, setExcludeDialogOpen] = useState(false); + + const { showNotification } = useNotification(); + + // Handle copy to clipboard + const handleCopy = async (text: string, label: string) => { + try { + await navigator.clipboard.writeText(text); + showNotification({ + type: 'success', + message: `${label} copied to clipboard`, + }); + } catch (error) { + showNotification({ + type: 'error', + message: `Failed to copy ${label}`, + }); + } + }; + + // Format bytes + const formatBytes = (bytes?: number) => { + if (!bytes) return 'N/A'; + const sizes = ['Bytes', 'KB', 'MB', 'GB']; + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + return `${(bytes / Math.pow(1024, i)).toFixed(1)} ${sizes[i]}`; + }; + + // Format duration + const formatDuration = (ms?: number) => { + if (!ms) return 'N/A'; + if (ms < 1000) return `${ms}ms`; + const seconds = Math.floor(ms / 1000); + if (seconds < 60) return `${seconds}s`; + const minutes = Math.floor(seconds / 60); + return `${minutes}m ${seconds % 60}s`; + }; + + // Get recommendation color and icon + const getRecommendationStyle = () => { + if (failure.diagnostic_summary.user_action_required) { + return { + color: modernTokens.colors.warning[600], + bgColor: modernTokens.colors.warning[50], + icon: WarningIcon, + }; + } + return { + color: modernTokens.colors.info[600], + bgColor: modernTokens.colors.info[50], + icon: InfoIcon, + }; + }; + + const recommendationStyle = getRecommendationStyle(); + const RecommendationIcon = recommendationStyle.icon; + + return ( + + {/* Error Message */} + {failure.error_message && ( + handleCopy(failure.error_message!, 'Error message')} + > + + + } + > + + {failure.error_message} + + + )} + + {/* Basic Information */} + + + + + + Directory Information + + + + + + Path + + + + {failure.directory_path} + + + handleCopy(failure.directory_path, 'Directory path')} + > + + + + + + + + + + + Failure Count + + + {failure.failure_count} total • {failure.consecutive_failures} consecutive + + + + + + Timeline + + + First failure: {new Date(failure.first_failure_at).toLocaleString()} + + + Last failure: {new Date(failure.last_failure_at).toLocaleString()} + + {failure.next_retry_at && ( + + Next retry: {new Date(failure.next_retry_at).toLocaleString()} + + )} + + + {failure.http_status_code && ( + + + HTTP Status + + + + )} + + + + + + + + + + + + Recommended Action + + + + + {failure.diagnostic_summary.recommended_action} + + + + {failure.diagnostic_summary.can_retry && ( + } + label="Can retry" + size="small" + sx={{ + backgroundColor: modernTokens.colors.success[100], + color: modernTokens.colors.success[700], + }} + /> + )} + {failure.diagnostic_summary.user_action_required && ( + } + label="Action required" + size="small" + sx={{ + backgroundColor: modernTokens.colors.warning[100], + color: modernTokens.colors.warning[700], + }} + /> + )} + + + + + + + {/* Diagnostic Information (Collapsible) */} + + + + + + + + {failure.diagnostic_summary.path_length && ( + + + + + {failure.diagnostic_summary.path_length} + + + Path Length (chars) + + + + )} + + {failure.diagnostic_summary.directory_depth && ( + + + + + {failure.diagnostic_summary.directory_depth} + + + Directory Depth + + + + )} + + {failure.diagnostic_summary.estimated_item_count && ( + + + + + {failure.diagnostic_summary.estimated_item_count.toLocaleString()} + + + Estimated Items + + + + )} + + {failure.diagnostic_summary.response_time_ms && ( + + + + + {formatDuration(failure.diagnostic_summary.response_time_ms)} + + + Response Time + + + + )} + + {failure.diagnostic_summary.response_size_mb && ( + + + + + {failure.diagnostic_summary.response_size_mb.toFixed(1)} MB + + + Response Size + + + + )} + + {failure.diagnostic_summary.server_type && ( + + + + Server Type + + + {failure.diagnostic_summary.server_type} + + + + )} + + + + + + + {/* User Notes */} + {failure.user_notes && ( + + + User Notes: {failure.user_notes} + + + )} + + {/* Action Buttons */} + {!failure.resolved && !failure.user_excluded && ( + + + + {failure.diagnostic_summary.can_retry && ( + + )} + + )} + + {/* Confirmation Dialogs */} + setRetryDialogOpen(false)} + onConfirm={(notes) => { + onRetry(failure, notes); + setRetryDialogOpen(false); + }} + title="Retry WebDAV Scan" + description={`This will attempt to scan "${failure.directory_path}" again. The failure will be reset and moved to the retry queue.`} + confirmText="Retry Now" + confirmColor="primary" + isLoading={isRetrying} + /> + + setExcludeDialogOpen(false)} + onConfirm={(notes, permanent) => { + onExclude(failure, notes, permanent); + setExcludeDialogOpen(false); + }} + title="Exclude Directory from Scanning" + description={`This will prevent "${failure.directory_path}" from being scanned in future synchronizations.`} + confirmText="Exclude Directory" + confirmColor="warning" + showPermanentOption + isLoading={isExcluding} + /> + + ); +}; + +export default FailureDetailsPanel; \ No newline at end of file diff --git a/frontend/src/components/WebDAVScanFailures/RecommendationsSection.tsx b/frontend/src/components/WebDAVScanFailures/RecommendationsSection.tsx new file mode 100644 index 0000000..8417996 --- /dev/null +++ b/frontend/src/components/WebDAVScanFailures/RecommendationsSection.tsx @@ -0,0 +1,438 @@ +import React from 'react'; +import { + Box, + Card, + CardContent, + Typography, + Stack, + Chip, + Alert, + Button, + List, + ListItem, + ListItemIcon, + ListItemText, + Divider, + Link, +} from '@mui/material'; +import { + Lightbulb as LightbulbIcon, + Schedule as ScheduleIcon, + Folder as FolderIcon, + Security as SecurityIcon, + Network as NetworkIcon, + Settings as SettingsIcon, + Speed as SpeedIcon, + Warning as WarningIcon, + Info as InfoIcon, + ExternalLink as ExternalLinkIcon, +} from '@mui/icons-material'; + +import { WebDAVScanFailure, WebDAVScanFailureType } from '../../services/api'; +import { modernTokens } from '../../theme'; + +interface RecommendationsSectionProps { + failures: WebDAVScanFailure[]; +} + +interface RecommendationInfo { + icon: React.ElementType; + title: string; + description: string; + actions: string[]; + learnMoreUrl?: string; + severity: 'info' | 'warning' | 'error'; +} + +const getRecommendationsForFailureType = (type: WebDAVScanFailureType): RecommendationInfo => { + const recommendations: Record = { + timeout: { + icon: ScheduleIcon, + title: 'Timeout Issues', + description: 'Directories are taking too long to scan. This often indicates large directories or slow server response.', + actions: [ + 'Consider organizing files into smaller subdirectories', + 'Check your network connection speed', + 'Verify the WebDAV server performance', + 'Try scanning during off-peak hours', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#timeout-issues', + severity: 'warning', + }, + path_too_long: { + icon: FolderIcon, + title: 'Path Length Limits', + description: 'File paths are exceeding the maximum allowed length (typically 260 characters on Windows, 4096 on Unix).', + actions: [ + 'Shorten directory and file names', + 'Reduce nesting depth of folders', + 'Move files to a shorter base path', + 'Consider using symbolic links for deep structures', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#path-length', + severity: 'error', + }, + permission_denied: { + icon: SecurityIcon, + title: 'Permission Issues', + description: 'The WebDAV client does not have sufficient permissions to access these directories.', + actions: [ + 'Verify your WebDAV username and password', + 'Check directory permissions on the server', + 'Ensure the user has read access to all subdirectories', + 'Contact your system administrator if needed', + ], + learnMoreUrl: '/docs/webdav-setup#permissions', + severity: 'error', + }, + invalid_characters: { + icon: WarningIcon, + title: 'Invalid Characters', + description: 'File or directory names contain characters that are not supported by the file system or WebDAV protocol.', + actions: [ + 'Remove or replace special characters in file names', + 'Avoid characters like: < > : " | ? * \\', + 'Use ASCII characters when possible', + 'Rename files with Unicode characters if causing issues', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#invalid-characters', + severity: 'warning', + }, + network_error: { + icon: NetworkIcon, + title: 'Network Connectivity', + description: 'Unable to establish a stable connection to the WebDAV server.', + actions: [ + 'Check your internet connection', + 'Verify the WebDAV server URL is correct', + 'Test connectivity with other WebDAV clients', + 'Check firewall settings', + 'Try using a different network', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#network-issues', + severity: 'error', + }, + server_error: { + icon: SettingsIcon, + title: 'Server Issues', + description: 'The WebDAV server returned an error. This may be temporary or indicate server configuration issues.', + actions: [ + 'Wait and retry - server issues are often temporary', + 'Check server logs for detailed error information', + 'Verify server configuration and resources', + 'Contact your WebDAV server administrator', + 'Try accessing the server with other clients', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#server-errors', + severity: 'warning', + }, + xml_parse_error: { + icon: WarningIcon, + title: 'Protocol Issues', + description: 'Unable to parse the server response. This may indicate WebDAV protocol compatibility issues.', + actions: [ + 'Verify the server supports WebDAV protocol', + 'Check if the server returns valid XML responses', + 'Try connecting with different WebDAV client settings', + 'Update the server software if possible', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#protocol-issues', + severity: 'warning', + }, + too_many_items: { + icon: SpeedIcon, + title: 'Large Directory Optimization', + description: 'Directories contain too many files, causing performance issues and potential timeouts.', + actions: [ + 'Organize files into multiple subdirectories', + 'Archive old files to reduce directory size', + 'Use date-based or category-based folder structures', + 'Consider excluding very large directories temporarily', + ], + learnMoreUrl: '/docs/webdav-optimization#large-directories', + severity: 'warning', + }, + depth_limit: { + icon: FolderIcon, + title: 'Directory Depth Limits', + description: 'Directory nesting is too deep, exceeding system or protocol limits.', + actions: [ + 'Flatten the directory structure', + 'Move deeply nested files to shallower locations', + 'Reorganize the folder hierarchy', + 'Use shorter path names at each level', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#depth-limits', + severity: 'warning', + }, + size_limit: { + icon: SpeedIcon, + title: 'Size Limitations', + description: 'Files or directories are too large for the current configuration.', + actions: [ + 'Check file size limits on the WebDAV server', + 'Split large files into smaller parts', + 'Exclude very large files from synchronization', + 'Increase server limits if possible', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#size-limits', + severity: 'warning', + }, + unknown: { + icon: InfoIcon, + title: 'Unknown Issues', + description: 'An unclassified error occurred. This may require manual investigation.', + actions: [ + 'Check the detailed error message for clues', + 'Try the operation again later', + 'Contact support with the full error details', + 'Check server and client logs', + ], + learnMoreUrl: '/docs/webdav-troubleshooting#general', + severity: 'info', + }, + }; + + return recommendations[type]; +}; + +const RecommendationsSection: React.FC = ({ failures }) => { + // Group failures by type and get unique types + const failureTypeStats = failures.reduce((acc, failure) => { + if (!failure.resolved && !failure.user_excluded) { + acc[failure.failure_type] = (acc[failure.failure_type] || 0) + 1; + } + return acc; + }, {} as Record); + + const activeFailureTypes = Object.keys(failureTypeStats) as WebDAVScanFailureType[]; + + if (activeFailureTypes.length === 0) { + return null; + } + + // Sort by frequency (most common issues first) + const sortedFailureTypes = activeFailureTypes.sort( + (a, b) => failureTypeStats[b] - failureTypeStats[a] + ); + + return ( + + + + + + Recommendations & Solutions + + + + + Based on your current scan failures, here are targeted recommendations to resolve common issues: + + + + {sortedFailureTypes.map((failureType, index) => { + const recommendation = getRecommendationsForFailureType(failureType); + const Icon = recommendation.icon; + const count = failureTypeStats[failureType]; + + return ( + + {index > 0 && } + + + + + + + + + + {recommendation.title} + + + + + + + {recommendation.description} + + + + Recommended Actions: + + + + {recommendation.actions.map((action, actionIndex) => ( + + + + + + + ))} + + + {recommendation.learnMoreUrl && ( + + + Learn more about this issue + + + + )} + + + + + + ); + })} + + + {/* General Tips */} + + + + General Troubleshooting Tips: + + + + + + + + + + + + + + + + + + + + ); +}; + +export default RecommendationsSection; \ No newline at end of file diff --git a/frontend/src/components/WebDAVScanFailures/StatsDashboard.tsx b/frontend/src/components/WebDAVScanFailures/StatsDashboard.tsx new file mode 100644 index 0000000..bd15c34 --- /dev/null +++ b/frontend/src/components/WebDAVScanFailures/StatsDashboard.tsx @@ -0,0 +1,368 @@ +import React from 'react'; +import { + Box, + Card, + CardContent, + Typography, + Grid, + LinearProgress, + Stack, + Skeleton, +} from '@mui/material'; +import { + Error as ErrorIcon, + Warning as WarningIcon, + Info as InfoIcon, + CheckCircle as CheckCircleIcon, + Refresh as RefreshIcon, + Block as BlockIcon, +} from '@mui/icons-material'; + +import { WebDAVScanFailureStats } from '../../services/api'; +import { modernTokens } from '../../theme'; + +interface StatsDashboardProps { + stats: WebDAVScanFailureStats; + isLoading?: boolean; +} + +interface StatCardProps { + title: string; + value: number; + icon: React.ElementType; + color: string; + bgColor: string; + description?: string; + percentage?: number; + trend?: 'up' | 'down' | 'stable'; +} + +const StatCard: React.FC = ({ + title, + value, + icon: Icon, + color, + bgColor, + description, + percentage, +}) => ( + + + + + + + + + + {value.toLocaleString()} + + + {title} + + {description && ( + + {description} + + )} + + + + {percentage !== undefined && ( + + + + {percentage.toFixed(1)}% of total + + + )} + + +); + +const StatsDashboard: React.FC = ({ stats, isLoading }) => { + if (isLoading) { + return ( + + {[1, 2, 3, 4, 5, 6].map((i) => ( + + + + + + + + + + + + + + ))} + + ); + } + + const totalFailures = stats.active_failures + stats.resolved_failures; + const criticalPercentage = totalFailures > 0 ? (stats.critical_failures / totalFailures) * 100 : 0; + const highPercentage = totalFailures > 0 ? (stats.high_failures / totalFailures) * 100 : 0; + const mediumPercentage = totalFailures > 0 ? (stats.medium_failures / totalFailures) * 100 : 0; + const lowPercentage = totalFailures > 0 ? (stats.low_failures / totalFailures) * 100 : 0; + const retryPercentage = stats.active_failures > 0 ? (stats.ready_for_retry / stats.active_failures) * 100 : 0; + + return ( + + + Scan Failure Statistics + + + + {/* Total Active Failures */} + + + + + {/* Critical Failures */} + + + + + {/* High Priority Failures */} + + + + + {/* Medium Priority Failures */} + + + + + {/* Low Priority Failures */} + + + + + {/* Ready for Retry */} + + + + + + {/* Summary Row */} + + + + + + + + + + + + + + + Success Rate + + + + {totalFailures > 0 ? ( + <> + + {((stats.resolved_failures / totalFailures) * 100).toFixed(1)}% + + + + {stats.resolved_failures} of {totalFailures} failures resolved + + + ) : ( + + 100% + + )} + + + + + + + + ); +}; + +export default StatsDashboard; \ No newline at end of file diff --git a/frontend/src/components/WebDAVScanFailures/WebDAVScanFailures.tsx b/frontend/src/components/WebDAVScanFailures/WebDAVScanFailures.tsx new file mode 100644 index 0000000..cb7cdc7 --- /dev/null +++ b/frontend/src/components/WebDAVScanFailures/WebDAVScanFailures.tsx @@ -0,0 +1,576 @@ +import React, { useState, useEffect, useMemo, useCallback } from 'react'; +import { + Box, + Paper, + Typography, + Accordion, + AccordionSummary, + AccordionDetails, + Alert, + Chip, + IconButton, + TextField, + InputAdornment, + FormControl, + InputLabel, + Select, + MenuItem, + Card, + CardContent, + Grid, + LinearProgress, + Skeleton, + Stack, + Fade, + Collapse, +} from '@mui/material'; +import { + ExpandMore as ExpandMoreIcon, + Search as SearchIcon, + FilterList as FilterIcon, + Refresh as RefreshIcon, + Error as ErrorIcon, + Warning as WarningIcon, + Info as InfoIcon, + CheckCircle as CheckCircleIcon, +} from '@mui/icons-material'; +import { alpha } from '@mui/material/styles'; + +import { webdavService, WebDAVScanFailure, WebDAVScanFailureSeverity, WebDAVScanFailureType } from '../../services/api'; +import { useNotification } from '../../contexts/NotificationContext'; +import { modernTokens } from '../../theme'; +import StatsDashboard from './StatsDashboard'; +import FailureDetailsPanel from './FailureDetailsPanel'; +import RecommendationsSection from './RecommendationsSection'; + +// Severity configuration for styling +const severityConfig = { + critical: { + color: modernTokens.colors.error[500], + bgColor: modernTokens.colors.error[50], + icon: ErrorIcon, + label: 'Critical', + }, + high: { + color: modernTokens.colors.warning[600], + bgColor: modernTokens.colors.warning[50], + icon: WarningIcon, + label: 'High', + }, + medium: { + color: modernTokens.colors.warning[500], + bgColor: modernTokens.colors.warning[50], + icon: InfoIcon, + label: 'Medium', + }, + low: { + color: modernTokens.colors.info[500], + bgColor: modernTokens.colors.info[50], + icon: InfoIcon, + label: 'Low', + }, +}; + +// Failure type configuration +const failureTypeConfig: Record = { + timeout: { label: 'Timeout', description: 'Request timed out' }, + path_too_long: { label: 'Path Too Long', description: 'File path exceeds system limits' }, + permission_denied: { label: 'Permission Denied', description: 'Access denied' }, + invalid_characters: { label: 'Invalid Characters', description: 'Path contains invalid characters' }, + network_error: { label: 'Network Error', description: 'Network connectivity issue' }, + server_error: { label: 'Server Error', description: 'Server returned an error' }, + xml_parse_error: { label: 'XML Parse Error', description: 'Failed to parse server response' }, + too_many_items: { label: 'Too Many Items', description: 'Directory contains too many files' }, + depth_limit: { label: 'Depth Limit', description: 'Directory nesting too deep' }, + size_limit: { label: 'Size Limit', description: 'Directory or file too large' }, + unknown: { label: 'Unknown', description: 'Unclassified error' }, +}; + +interface WebDAVScanFailuresProps { + autoRefresh?: boolean; + refreshInterval?: number; +} + +const WebDAVScanFailures: React.FC = ({ + autoRefresh = true, + refreshInterval = 30000, // 30 seconds +}) => { + const [searchQuery, setSearchQuery] = useState(''); + const [severityFilter, setSeverityFilter] = useState('all'); + const [typeFilter, setTypeFilter] = useState('all'); + const [expandedFailure, setExpandedFailure] = useState(null); + const [showResolved, setShowResolved] = useState(false); + + // Data state + const [scanFailuresData, setScanFailuresData] = useState(null); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + + // Action states + const [retryingFailures, setRetryingFailures] = useState>(new Set()); + const [excludingFailures, setExcludingFailures] = useState>(new Set()); + + const { showNotification } = useNotification(); + + // Fetch scan failures + const fetchScanFailures = useCallback(async () => { + try { + setError(null); + const response = await webdavService.getScanFailures(); + setScanFailuresData(response.data); + } catch (err: any) { + console.error('Failed to fetch scan failures:', err); + setError(err?.response?.data?.message || err.message || 'Failed to load scan failures'); + } finally { + setIsLoading(false); + } + }, []); + + // Auto-refresh effect + useEffect(() => { + fetchScanFailures(); + + if (autoRefresh && refreshInterval > 0) { + const interval = setInterval(fetchScanFailures, refreshInterval); + return () => clearInterval(interval); + } + }, [fetchScanFailures, autoRefresh, refreshInterval]); + + // Manual refetch + const refetch = useCallback(() => { + setIsLoading(true); + fetchScanFailures(); + }, [fetchScanFailures]); + + // Filter failures based on search and filters + const filteredFailures = useMemo(() => { + if (!scanFailuresData?.failures) return []; + + return scanFailuresData.failures.filter((failure) => { + // Search filter + if (searchQuery) { + const searchLower = searchQuery.toLowerCase(); + if (!failure.directory_path.toLowerCase().includes(searchLower) && + !failure.error_message?.toLowerCase().includes(searchLower)) { + return false; + } + } + + // Severity filter + if (severityFilter !== 'all' && failure.failure_severity !== severityFilter) { + return false; + } + + // Type filter + if (typeFilter !== 'all' && failure.failure_type !== typeFilter) { + return false; + } + + // Show resolved filter + if (!showResolved && failure.resolved) { + return false; + } + + return true; + }); + }, [scanFailuresData?.failures, searchQuery, severityFilter, typeFilter, showResolved]); + + // Handle accordion expansion + const handleAccordionChange = (failureId: string) => ( + event: React.SyntheticEvent, + isExpanded: boolean + ) => { + setExpandedFailure(isExpanded ? failureId : null); + }; + + // Handle retry action + const handleRetry = async (failure: WebDAVScanFailure, notes?: string) => { + try { + setRetryingFailures(prev => new Set(prev).add(failure.id)); + const response = await webdavService.retryFailure(failure.id, { notes }); + + showNotification({ + type: 'success', + message: `Retry scheduled for: ${response.data.directory_path}`, + }); + + // Refresh the data + await fetchScanFailures(); + } catch (error: any) { + console.error('Failed to retry scan failure:', error); + showNotification({ + type: 'error', + message: `Failed to schedule retry: ${error?.response?.data?.message || error.message}`, + }); + } finally { + setRetryingFailures(prev => { + const newSet = new Set(prev); + newSet.delete(failure.id); + return newSet; + }); + } + }; + + // Handle exclude action + const handleExclude = async (failure: WebDAVScanFailure, notes?: string, permanent = true) => { + try { + setExcludingFailures(prev => new Set(prev).add(failure.id)); + const response = await webdavService.excludeFailure(failure.id, { notes, permanent }); + + showNotification({ + type: 'success', + message: `Directory excluded: ${response.data.directory_path}`, + }); + + // Refresh the data + await fetchScanFailures(); + } catch (error: any) { + console.error('Failed to exclude directory:', error); + showNotification({ + type: 'error', + message: `Failed to exclude directory: ${error?.response?.data?.message || error.message}`, + }); + } finally { + setExcludingFailures(prev => { + const newSet = new Set(prev); + newSet.delete(failure.id); + return newSet; + }); + } + }; + + // Render severity chip + const renderSeverityChip = (severity: WebDAVScanFailureSeverity) => { + const config = severityConfig[severity]; + const Icon = config.icon; + + return ( + } + label={config.label} + size="small" + sx={{ + color: config.color, + backgroundColor: config.bgColor, + borderColor: config.color, + fontWeight: 500, + }} + /> + ); + }; + + // Render failure type chip + const renderFailureTypeChip = (type: WebDAVScanFailureType) => { + const config = failureTypeConfig[type]; + + return ( + + ); + }; + + if (error) { + return ( + + + + } + > + Failed to load WebDAV scan failures: {error} + + ); + } + + return ( + + {/* Header */} + + + WebDAV Scan Failures + + + Monitor and manage directories that failed to scan during WebDAV synchronization + + + {/* Statistics Dashboard */} + {scanFailuresData?.stats && ( + + )} + + + {/* Controls */} + + + + setSearchQuery(e.target.value)} + InputProps={{ + startAdornment: ( + + + + ), + }} + sx={{ + '& .MuiOutlinedInput-root': { + backgroundColor: modernTokens.colors.neutral[0], + }, + }} + /> + + + + Severity + + + + + + Type + + + + + refetch()} + disabled={isLoading} + sx={{ + backgroundColor: modernTokens.colors.primary[50], + color: modernTokens.colors.primary[600], + '&:hover': { + backgroundColor: modernTokens.colors.primary[100], + }, + }} + > + + + + + + + {/* Loading State */} + {isLoading && ( + + {[1, 2, 3].map((i) => ( + + ))} + + )} + + {/* Failures List */} + {!isLoading && ( + + + {filteredFailures.length === 0 ? ( + + + + + No Scan Failures Found + + + {scanFailuresData?.failures.length === 0 + ? 'All WebDAV directories are scanning successfully!' + : 'Try adjusting your search criteria or filters.'} + + + + ) : ( + + {filteredFailures.map((failure) => ( + + } + sx={{ + '& .MuiAccordionSummary-content': { + alignItems: 'center', + gap: 2, + }, + }} + > + + {renderSeverityChip(failure.failure_severity)} + {renderFailureTypeChip(failure.failure_type)} + + + + {failure.directory_path} + + + {failure.consecutive_failures} consecutive failures • + Last failed: {new Date(failure.last_failure_at).toLocaleString()} + + + + {failure.user_excluded && ( + + )} + + {failure.resolved && ( + + )} + + + + + + + + ))} + + )} + + {/* Recommendations Section */} + {filteredFailures.length > 0 && ( + + + + )} + + + )} + + ); +}; + +export default WebDAVScanFailures; \ No newline at end of file diff --git a/frontend/src/components/WebDAVScanFailures/__tests__/FailureDetailsPanel.test.tsx b/frontend/src/components/WebDAVScanFailures/__tests__/FailureDetailsPanel.test.tsx new file mode 100644 index 0000000..7b59a0d --- /dev/null +++ b/frontend/src/components/WebDAVScanFailures/__tests__/FailureDetailsPanel.test.tsx @@ -0,0 +1,356 @@ +import React from 'react'; +import { render, screen, fireEvent, waitFor } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; +import { vi, describe, it, expect, beforeEach } from 'vitest'; +import { ThemeProvider } from '@mui/material/styles'; + +import FailureDetailsPanel from '../FailureDetailsPanel'; +import { WebDAVScanFailure } from '../../../services/api'; +import { NotificationContext } from '../../../contexts/NotificationContext'; +import theme from '../../../theme'; + +const mockShowNotification = vi.fn(); + +const MockNotificationProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => ( + + {children} + +); + +const renderWithProviders = (component: React.ReactElement) => { + return render( + + + {component} + + + ); +}; + +const mockFailure: WebDAVScanFailure = { + id: '1', + directory_path: '/test/very/long/path/that/exceeds/normal/limits/and/causes/issues', + failure_type: 'path_too_long', + failure_severity: 'high', + failure_count: 5, + consecutive_failures: 3, + first_failure_at: '2024-01-01T10:00:00Z', + last_failure_at: '2024-01-01T12:00:00Z', + next_retry_at: '2024-01-01T13:00:00Z', + error_message: 'Path length exceeds maximum allowed (260 characters)', + http_status_code: 400, + user_excluded: false, + user_notes: 'Previous attempt to shorten path failed', + resolved: false, + diagnostic_summary: { + path_length: 85, + directory_depth: 8, + estimated_item_count: 500, + response_time_ms: 5000, + response_size_mb: 1.2, + server_type: 'Apache/2.4.41', + recommended_action: 'Shorten directory and file names to reduce the total path length.', + can_retry: true, + user_action_required: true, + }, +}; + +const mockOnRetry = vi.fn(); +const mockOnExclude = vi.fn(); + +// Mock clipboard API +Object.assign(navigator, { + clipboard: { + writeText: vi.fn().mockResolvedValue(undefined), + }, +}); + +describe('FailureDetailsPanel', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it('renders failure details correctly', () => { + renderWithProviders( + + ); + + // Check basic information + expect(screen.getByText('/test/very/long/path/that/exceeds/normal/limits/and/causes/issues')).toBeInTheDocument(); + expect(screen.getByText('5 total • 3 consecutive')).toBeInTheDocument(); + expect(screen.getByText('400')).toBeInTheDocument(); // HTTP status + + // Check recommended action + expect(screen.getByText('Recommended Action')).toBeInTheDocument(); + expect(screen.getByText('Shorten directory and file names to reduce the total path length.')).toBeInTheDocument(); + + // Check user notes + expect(screen.getByText('User Notes:')).toBeInTheDocument(); + expect(screen.getByText('Previous attempt to shorten path failed')).toBeInTheDocument(); + + // Check action buttons + expect(screen.getByText('Retry Scan')).toBeInTheDocument(); + expect(screen.getByText('Exclude Directory')).toBeInTheDocument(); + }); + + it('displays error message when present', () => { + renderWithProviders( + + ); + + expect(screen.getByText('Path length exceeds maximum allowed (260 characters)')).toBeInTheDocument(); + }); + + it('shows diagnostic details when expanded', async () => { + renderWithProviders( + + ); + + // Click to expand diagnostics + const diagnosticButton = screen.getByText('Diagnostic Details'); + await userEvent.click(diagnosticButton); + + // Check diagnostic values + expect(screen.getByText('85')).toBeInTheDocument(); // Path length + expect(screen.getByText('8')).toBeInTheDocument(); // Directory depth + expect(screen.getByText('500')).toBeInTheDocument(); // Estimated items + expect(screen.getByText('5.0s')).toBeInTheDocument(); // Response time + expect(screen.getByText('1.2 MB')).toBeInTheDocument(); // Response size + expect(screen.getByText('Apache/2.4.41')).toBeInTheDocument(); // Server type + }); + + it('handles copy path functionality', async () => { + renderWithProviders( + + ); + + // Find and click copy button + const copyButtons = screen.getAllByRole('button'); + const copyButton = copyButtons.find(button => button.getAttribute('aria-label') === 'Copy path' || + button.querySelector('svg[data-testid="ContentCopyIcon"]')); + + if (copyButton) { + await userEvent.click(copyButton); + + expect(navigator.clipboard.writeText).toHaveBeenCalledWith( + '/test/very/long/path/that/exceeds/normal/limits/and/causes/issues' + ); + expect(mockShowNotification).toHaveBeenCalledWith({ + type: 'success', + message: 'Directory path copied to clipboard', + }); + } + }); + + it('opens retry confirmation dialog when retry button is clicked', async () => { + renderWithProviders( + + ); + + const retryButton = screen.getByText('Retry Scan'); + await userEvent.click(retryButton); + + // Check dialog is open + expect(screen.getByText('Retry WebDAV Scan')).toBeInTheDocument(); + expect(screen.getByText(/This will attempt to scan/)).toBeInTheDocument(); + expect(screen.getByRole('button', { name: 'Retry Now' })).toBeInTheDocument(); + }); + + it('calls onRetry when retry is confirmed', async () => { + renderWithProviders( + + ); + + // Open retry dialog + const retryButton = screen.getByText('Retry Scan'); + await userEvent.click(retryButton); + + // Add notes + const notesInput = screen.getByLabelText('Notes (optional)'); + await userEvent.type(notesInput, 'Attempting retry after path optimization'); + + // Confirm retry + const confirmButton = screen.getByRole('button', { name: 'Retry Now' }); + await userEvent.click(confirmButton); + + expect(mockOnRetry).toHaveBeenCalledWith(mockFailure, 'Attempting retry after path optimization'); + }); + + it('opens exclude confirmation dialog when exclude button is clicked', async () => { + renderWithProviders( + + ); + + const excludeButton = screen.getByText('Exclude Directory'); + await userEvent.click(excludeButton); + + // Check dialog is open + expect(screen.getByText('Exclude Directory from Scanning')).toBeInTheDocument(); + expect(screen.getByText(/This will prevent/)).toBeInTheDocument(); + expect(screen.getByRole('button', { name: 'Exclude Directory' })).toBeInTheDocument(); + expect(screen.getByText('Permanently exclude (recommended)')).toBeInTheDocument(); + }); + + it('calls onExclude when exclude is confirmed', async () => { + renderWithProviders( + + ); + + // Open exclude dialog + const excludeButton = screen.getByText('Exclude Directory'); + await userEvent.click(excludeButton); + + // Add notes and toggle permanent setting + const notesInput = screen.getByLabelText('Notes (optional)'); + await userEvent.type(notesInput, 'Path too long to fix easily'); + + const permanentSwitch = screen.getByRole('checkbox'); + await userEvent.click(permanentSwitch); // Toggle off + await userEvent.click(permanentSwitch); // Toggle back on + + // Confirm exclude + const confirmButton = screen.getByRole('button', { name: 'Exclude Directory' }); + await userEvent.click(confirmButton); + + expect(mockOnExclude).toHaveBeenCalledWith(mockFailure, 'Path too long to fix easily', true); + }); + + it('shows loading states for retry and exclude buttons', () => { + renderWithProviders( + + ); + + const retryButton = screen.getByText('Retry Scan'); + const excludeButton = screen.getByText('Exclude Directory'); + + expect(retryButton).toBeDisabled(); + expect(excludeButton).toBeDisabled(); + }); + + it('hides action buttons for resolved failures', () => { + const resolvedFailure = { ...mockFailure, resolved: true }; + + renderWithProviders( + + ); + + expect(screen.queryByText('Retry Scan')).not.toBeInTheDocument(); + expect(screen.queryByText('Exclude Directory')).not.toBeInTheDocument(); + }); + + it('hides action buttons for excluded failures', () => { + const excludedFailure = { ...mockFailure, user_excluded: true }; + + renderWithProviders( + + ); + + expect(screen.queryByText('Retry Scan')).not.toBeInTheDocument(); + expect(screen.queryByText('Exclude Directory')).not.toBeInTheDocument(); + }); + + it('hides retry button when can_retry is false', () => { + const nonRetryableFailure = { + ...mockFailure, + diagnostic_summary: { + ...mockFailure.diagnostic_summary, + can_retry: false, + }, + }; + + renderWithProviders( + + ); + + expect(screen.queryByText('Retry Scan')).not.toBeInTheDocument(); + expect(screen.getByText('Exclude Directory')).toBeInTheDocument(); // Exclude should still be available + }); + + it('formats durations correctly', () => { + const failureWithDifferentTiming = { + ...mockFailure, + diagnostic_summary: { + ...mockFailure.diagnostic_summary, + response_time_ms: 500, // Should show as milliseconds + }, + }; + + renderWithProviders( + + ); + + // Expand diagnostics to see the timing + const diagnosticButton = screen.getByText('Diagnostic Details'); + fireEvent.click(diagnosticButton); + + expect(screen.getByText('500ms')).toBeInTheDocument(); + }); + + it('shows correct recommendation styling based on user action required', () => { + renderWithProviders( + + ); + + // Should show warning style since user_action_required is true + expect(screen.getByText('Action required')).toBeInTheDocument(); + expect(screen.getByText('Can retry')).toBeInTheDocument(); + }); +}); \ No newline at end of file diff --git a/frontend/src/components/WebDAVScanFailures/__tests__/StatsDashboard.test.tsx b/frontend/src/components/WebDAVScanFailures/__tests__/StatsDashboard.test.tsx new file mode 100644 index 0000000..403979e --- /dev/null +++ b/frontend/src/components/WebDAVScanFailures/__tests__/StatsDashboard.test.tsx @@ -0,0 +1,151 @@ +import React from 'react'; +import { render, screen } from '@testing-library/react'; +import { vi, describe, it, expect } from 'vitest'; +import { ThemeProvider } from '@mui/material/styles'; + +import StatsDashboard from '../StatsDashboard'; +import { WebDAVScanFailureStats } from '../../../services/api'; +import theme from '../../../theme'; + +const renderWithTheme = (component: React.ReactElement) => { + return render( + + {component} + + ); +}; + +const mockStats: WebDAVScanFailureStats = { + active_failures: 15, + resolved_failures: 35, + excluded_directories: 5, + critical_failures: 3, + high_failures: 7, + medium_failures: 4, + low_failures: 1, + ready_for_retry: 8, +}; + +describe('StatsDashboard', () => { + it('renders all stat cards with correct values', () => { + renderWithTheme(); + + // Check title + expect(screen.getByText('Scan Failure Statistics')).toBeInTheDocument(); + + // Check individual stat cards + expect(screen.getByText('15')).toBeInTheDocument(); // Active failures + expect(screen.getByText('3')).toBeInTheDocument(); // Critical failures + expect(screen.getByText('7')).toBeInTheDocument(); // High failures + expect(screen.getByText('4')).toBeInTheDocument(); // Medium failures + expect(screen.getByText('1')).toBeInTheDocument(); // Low failures + expect(screen.getByText('8')).toBeInTheDocument(); // Ready for retry + expect(screen.getByText('35')).toBeInTheDocument(); // Resolved failures + expect(screen.getByText('5')).toBeInTheDocument(); // Excluded directories + + // Check labels + expect(screen.getByText('Active Failures')).toBeInTheDocument(); + expect(screen.getByText('Critical')).toBeInTheDocument(); + expect(screen.getByText('High Priority')).toBeInTheDocument(); + expect(screen.getByText('Medium Priority')).toBeInTheDocument(); + expect(screen.getByText('Low Priority')).toBeInTheDocument(); + expect(screen.getByText('Ready for Retry')).toBeInTheDocument(); + expect(screen.getByText('Resolved Failures')).toBeInTheDocument(); + expect(screen.getByText('Excluded Directories')).toBeInTheDocument(); + }); + + it('calculates success rate correctly', () => { + renderWithTheme(); + + // Total failures = active (15) + resolved (35) = 50 + // Success rate = resolved (35) / total (50) = 70% + expect(screen.getByText('70.0%')).toBeInTheDocument(); + expect(screen.getByText('35 of 50 failures resolved')).toBeInTheDocument(); + }); + + it('displays 100% success rate when no failures exist', () => { + const noFailuresStats: WebDAVScanFailureStats = { + active_failures: 0, + resolved_failures: 0, + excluded_directories: 0, + critical_failures: 0, + high_failures: 0, + medium_failures: 0, + low_failures: 0, + ready_for_retry: 0, + }; + + renderWithTheme(); + + expect(screen.getByText('100%')).toBeInTheDocument(); + }); + + it('calculates percentages correctly for severity breakdown', () => { + renderWithTheme(); + + // Total failures = 50 + // Critical: 3/50 = 6% + // High: 7/50 = 14% + // Medium: 4/50 = 8% + // Low: 1/50 = 2% + expect(screen.getByText('6.0% of total')).toBeInTheDocument(); + expect(screen.getByText('14.0% of total')).toBeInTheDocument(); + expect(screen.getByText('8.0% of total')).toBeInTheDocument(); + expect(screen.getByText('2.0% of total')).toBeInTheDocument(); + }); + + it('calculates retry percentage correctly', () => { + renderWithTheme(); + + // Ready for retry: 8/15 active failures = 53.3% + expect(screen.getByText('53.3% of total')).toBeInTheDocument(); + }); + + it('renders loading state with skeletons', () => { + renderWithTheme(); + + // Should show skeleton cards instead of actual data + const skeletons = document.querySelectorAll('.MuiSkeleton-root'); + expect(skeletons.length).toBeGreaterThan(0); + }); + + it('handles zero active failures for retry percentage', () => { + const zeroActiveStats: WebDAVScanFailureStats = { + ...mockStats, + active_failures: 0, + ready_for_retry: 0, + }; + + renderWithTheme(); + + // Should not crash and should show 0% for retry percentage + expect(screen.getByText('0')).toBeInTheDocument(); // Active failures + expect(screen.getByText('0.0% of total')).toBeInTheDocument(); // Retry percentage when no active failures + }); + + it('displays descriptive text for each stat', () => { + renderWithTheme(); + + // Check descriptions + expect(screen.getByText('Requiring attention')).toBeInTheDocument(); + expect(screen.getByText('Immediate action needed')).toBeInTheDocument(); + expect(screen.getByText('Important issues')).toBeInTheDocument(); + expect(screen.getByText('Moderate issues')).toBeInTheDocument(); + expect(screen.getByText('Minor issues')).toBeInTheDocument(); + expect(screen.getByText('Can be retried now')).toBeInTheDocument(); + expect(screen.getByText('Successfully resolved')).toBeInTheDocument(); + expect(screen.getByText('Manually excluded')).toBeInTheDocument(); + }); + + it('applies correct hover effects to cards', () => { + renderWithTheme(); + + const cards = document.querySelectorAll('.MuiCard-root'); + expect(cards.length).toBeGreaterThan(0); + + // Cards should have transition styles for hover effects + cards.forEach(card => { + expect(card).toHaveStyle('transition: all 0.2s ease-in-out'); + }); + }); +}); \ No newline at end of file diff --git a/frontend/src/components/WebDAVScanFailures/__tests__/WebDAVScanFailures.test.tsx b/frontend/src/components/WebDAVScanFailures/__tests__/WebDAVScanFailures.test.tsx new file mode 100644 index 0000000..5ef36aa --- /dev/null +++ b/frontend/src/components/WebDAVScanFailures/__tests__/WebDAVScanFailures.test.tsx @@ -0,0 +1,429 @@ +import React from 'react'; +import { render, screen, fireEvent, waitFor } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; +import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { ThemeProvider } from '@mui/material/styles'; + +import WebDAVScanFailures from '../WebDAVScanFailures'; +import { webdavService } from '../../../services/api'; +import { NotificationContext } from '../../../contexts/NotificationContext'; +import theme from '../../../theme'; + +// Mock the webdav service +vi.mock('../../../services/api', () => ({ + webdavService: { + getScanFailures: vi.fn(), + retryFailure: vi.fn(), + excludeFailure: vi.fn(), + }, +})); + +const mockShowNotification = vi.fn(); + +const MockNotificationProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => ( + + {children} + +); + +const renderWithProviders = (component: React.ReactElement) => { + return render( + + + {component} + + + ); +}; + +const mockScanFailuresData = { + failures: [ + { + id: '1', + directory_path: '/test/path/long/directory/name', + failure_type: 'timeout', + failure_severity: 'high', + failure_count: 3, + consecutive_failures: 2, + first_failure_at: '2024-01-01T10:00:00Z', + last_failure_at: '2024-01-01T12:00:00Z', + next_retry_at: '2024-01-01T13:00:00Z', + error_message: 'Request timeout after 30 seconds', + http_status_code: 408, + user_excluded: false, + user_notes: null, + resolved: false, + diagnostic_summary: { + path_length: 45, + directory_depth: 5, + estimated_item_count: 1500, + response_time_ms: 30000, + response_size_mb: 2.5, + server_type: 'Apache/2.4.41', + recommended_action: 'Consider organizing files into smaller subdirectories or scanning during off-peak hours.', + can_retry: true, + user_action_required: false, + }, + }, + { + id: '2', + directory_path: '/test/path/permissions', + failure_type: 'permission_denied', + failure_severity: 'critical', + failure_count: 1, + consecutive_failures: 1, + first_failure_at: '2024-01-01T11:00:00Z', + last_failure_at: '2024-01-01T11:00:00Z', + next_retry_at: null, + error_message: '403 Forbidden', + http_status_code: 403, + user_excluded: false, + user_notes: null, + resolved: false, + diagnostic_summary: { + path_length: 20, + directory_depth: 3, + estimated_item_count: null, + response_time_ms: 1000, + response_size_mb: null, + server_type: 'Apache/2.4.41', + recommended_action: 'Check that your WebDAV user has read access to this directory.', + can_retry: false, + user_action_required: true, + }, + }, + ], + stats: { + active_failures: 2, + resolved_failures: 5, + excluded_directories: 1, + critical_failures: 1, + high_failures: 1, + medium_failures: 0, + low_failures: 0, + ready_for_retry: 1, + }, +}; + +describe('WebDAVScanFailures', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.clearAllTimers(); + }); + + it('renders loading state initially', () => { + vi.mocked(webdavService.getScanFailures).mockImplementation( + () => new Promise(() => {}) // Never resolves + ); + + renderWithProviders(); + + expect(screen.getByText('WebDAV Scan Failures')).toBeInTheDocument(); + // Should show skeleton loading + expect(document.querySelectorAll('.MuiSkeleton-root')).toHaveLength(6); // Stats dashboard skeletons + }); + + it('renders scan failures data successfully', async () => { + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText('WebDAV Scan Failures')).toBeInTheDocument(); + }); + + // Check if failures are rendered + expect(screen.getByText('/test/path/long/directory/name')).toBeInTheDocument(); + expect(screen.getByText('/test/path/permissions')).toBeInTheDocument(); + + // Check severity chips + expect(screen.getByText('High')).toBeInTheDocument(); + expect(screen.getByText('Critical')).toBeInTheDocument(); + + // Check failure type chips + expect(screen.getByText('Timeout')).toBeInTheDocument(); + expect(screen.getByText('Permission Denied')).toBeInTheDocument(); + }); + + it('renders error state when API fails', async () => { + const errorMessage = 'Failed to fetch data'; + vi.mocked(webdavService.getScanFailures).mockRejectedValue( + new Error(errorMessage) + ); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText(/Failed to load WebDAV scan failures/)).toBeInTheDocument(); + }); + + expect(screen.getByText(new RegExp(errorMessage))).toBeInTheDocument(); + }); + + it('handles search filtering correctly', async () => { + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText('/test/path/long/directory/name')).toBeInTheDocument(); + }); + + // Search for specific path + const searchInput = screen.getByPlaceholderText('Search directories or error messages...'); + await userEvent.type(searchInput, 'permissions'); + + // Should only show the permissions failure + expect(screen.queryByText('/test/path/long/directory/name')).not.toBeInTheDocument(); + expect(screen.getByText('/test/path/permissions')).toBeInTheDocument(); + }); + + it('handles severity filtering correctly', async () => { + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText('/test/path/long/directory/name')).toBeInTheDocument(); + }); + + // Filter by critical severity + const severitySelect = screen.getByLabelText('Severity'); + fireEvent.mouseDown(severitySelect); + await userEvent.click(screen.getByText('Critical')); + + // Should only show the critical failure + expect(screen.queryByText('/test/path/long/directory/name')).not.toBeInTheDocument(); + expect(screen.getByText('/test/path/permissions')).toBeInTheDocument(); + }); + + it('expands failure details when clicked', async () => { + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText('/test/path/long/directory/name')).toBeInTheDocument(); + }); + + // Click on the first failure to expand it + const firstFailure = screen.getByText('/test/path/long/directory/name'); + await userEvent.click(firstFailure); + + // Should show detailed information + await waitFor(() => { + expect(screen.getByText('Request timeout after 30 seconds')).toBeInTheDocument(); + expect(screen.getByText('Recommended Action')).toBeInTheDocument(); + }); + }); + + it('handles retry action correctly', async () => { + const mockRetryResponse = { + data: { + success: true, + message: 'Retry scheduled', + directory_path: '/test/path/long/directory/name', + }, + }; + + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + vi.mocked(webdavService.retryFailure).mockResolvedValue(mockRetryResponse as any); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText('/test/path/long/directory/name')).toBeInTheDocument(); + }); + + // Expand the first failure + const firstFailure = screen.getByText('/test/path/long/directory/name'); + await userEvent.click(firstFailure); + + // Wait for details to load and click retry + await waitFor(() => { + expect(screen.getByText('Retry Scan')).toBeInTheDocument(); + }); + + const retryButton = screen.getByText('Retry Scan'); + await userEvent.click(retryButton); + + // Should open confirmation dialog + await waitFor(() => { + expect(screen.getByText('Retry WebDAV Scan')).toBeInTheDocument(); + }); + + // Confirm retry + const confirmButton = screen.getByRole('button', { name: 'Retry Now' }); + await userEvent.click(confirmButton); + + // Should call the retry API + await waitFor(() => { + expect(webdavService.retryFailure).toHaveBeenCalledWith('1', { notes: undefined }); + }); + + // Should show success notification + expect(mockShowNotification).toHaveBeenCalledWith({ + type: 'success', + message: 'Retry scheduled for: /test/path/long/directory/name', + }); + }); + + it('handles exclude action correctly', async () => { + const mockExcludeResponse = { + data: { + success: true, + message: 'Directory excluded', + directory_path: '/test/path/long/directory/name', + permanent: true, + }, + }; + + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + vi.mocked(webdavService.excludeFailure).mockResolvedValue(mockExcludeResponse as any); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText('/test/path/long/directory/name')).toBeInTheDocument(); + }); + + // Expand the first failure + const firstFailure = screen.getByText('/test/path/long/directory/name'); + await userEvent.click(firstFailure); + + // Wait for details to load and click exclude + await waitFor(() => { + expect(screen.getByText('Exclude Directory')).toBeInTheDocument(); + }); + + const excludeButton = screen.getByText('Exclude Directory'); + await userEvent.click(excludeButton); + + // Should open confirmation dialog + await waitFor(() => { + expect(screen.getByText('Exclude Directory from Scanning')).toBeInTheDocument(); + }); + + // Confirm exclude + const confirmButton = screen.getByRole('button', { name: 'Exclude Directory' }); + await userEvent.click(confirmButton); + + // Should call the exclude API + await waitFor(() => { + expect(webdavService.excludeFailure).toHaveBeenCalledWith('1', { + notes: undefined, + permanent: true, + }); + }); + + // Should show success notification + expect(mockShowNotification).toHaveBeenCalledWith({ + type: 'success', + message: 'Directory excluded: /test/path/long/directory/name', + }); + }); + + it('displays empty state when no failures exist', async () => { + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: { + failures: [], + stats: { + active_failures: 0, + resolved_failures: 0, + excluded_directories: 0, + critical_failures: 0, + high_failures: 0, + medium_failures: 0, + low_failures: 0, + ready_for_retry: 0, + }, + }, + } as any); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText('No Scan Failures Found')).toBeInTheDocument(); + expect(screen.getByText('All WebDAV directories are scanning successfully!')).toBeInTheDocument(); + }); + }); + + it('refreshes data when refresh button is clicked', async () => { + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + + renderWithProviders(); + + await waitFor(() => { + expect(screen.getByText('/test/path/long/directory/name')).toBeInTheDocument(); + }); + + // Click refresh button + const refreshButton = screen.getByRole('button', { name: '' }); // IconButton without accessible name + await userEvent.click(refreshButton); + + // Should call API again + expect(webdavService.getScanFailures).toHaveBeenCalledTimes(2); + }); + + it('auto-refreshes data when autoRefresh is enabled', async () => { + vi.useFakeTimers(); + + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + + renderWithProviders(); + + await waitFor(() => { + expect(webdavService.getScanFailures).toHaveBeenCalledTimes(1); + }); + + // Fast-forward time + vi.advanceTimersByTime(1000); + + await waitFor(() => { + expect(webdavService.getScanFailures).toHaveBeenCalledTimes(2); + }); + + vi.useRealTimers(); + }); + + it('does not auto-refresh when autoRefresh is disabled', async () => { + vi.useFakeTimers(); + + vi.mocked(webdavService.getScanFailures).mockResolvedValue({ + data: mockScanFailuresData, + } as any); + + renderWithProviders(); + + await waitFor(() => { + expect(webdavService.getScanFailures).toHaveBeenCalledTimes(1); + }); + + // Fast-forward time + vi.advanceTimersByTime(30000); + + // Should still only be called once + expect(webdavService.getScanFailures).toHaveBeenCalledTimes(1); + + vi.useRealTimers(); + }); +}); \ No newline at end of file diff --git a/frontend/src/components/WebDAVScanFailures/index.ts b/frontend/src/components/WebDAVScanFailures/index.ts new file mode 100644 index 0000000..65c6c03 --- /dev/null +++ b/frontend/src/components/WebDAVScanFailures/index.ts @@ -0,0 +1,4 @@ +export { default } from './WebDAVScanFailures'; +export { default as StatsDashboard } from './StatsDashboard'; +export { default as FailureDetailsPanel } from './FailureDetailsPanel'; +export { default as RecommendationsSection } from './RecommendationsSection'; \ No newline at end of file diff --git a/frontend/src/services/api.ts b/frontend/src/services/api.ts index 599977e..8dece87 100644 --- a/frontend/src/services/api.ts +++ b/frontend/src/services/api.ts @@ -696,6 +696,117 @@ export const userWatchService = { }, } +// WebDAV Scan Failure Types +export interface WebDAVScanFailure { + id: string + directory_path: string + failure_type: WebDAVScanFailureType + failure_severity: WebDAVScanFailureSeverity + failure_count: number + consecutive_failures: number + first_failure_at: string + last_failure_at: string + next_retry_at?: string + error_message?: string + http_status_code?: number + user_excluded: boolean + user_notes?: string + resolved: boolean + diagnostic_summary: WebDAVFailureDiagnostics +} + +export type WebDAVScanFailureType = + | 'timeout' + | 'path_too_long' + | 'permission_denied' + | 'invalid_characters' + | 'network_error' + | 'server_error' + | 'xml_parse_error' + | 'too_many_items' + | 'depth_limit' + | 'size_limit' + | 'unknown' + +export type WebDAVScanFailureSeverity = + | 'low' + | 'medium' + | 'high' + | 'critical' + +export interface WebDAVFailureDiagnostics { + path_length?: number + directory_depth?: number + estimated_item_count?: number + response_time_ms?: number + response_size_mb?: number + server_type?: string + recommended_action: string + can_retry: boolean + user_action_required: boolean +} + +export interface WebDAVScanFailureStats { + active_failures: number + resolved_failures: number + excluded_directories: number + critical_failures: number + high_failures: number + medium_failures: number + low_failures: number + ready_for_retry: number +} + +export interface WebDAVScanFailuresResponse { + failures: WebDAVScanFailure[] + stats: WebDAVScanFailureStats +} + +export interface RetryFailureRequest { + notes?: string +} + +export interface ExcludeFailureRequest { + notes?: string + permanent: boolean +} + +export interface RetryResponse { + success: boolean + message: string + directory_path: string +} + +export interface ExcludeResponse { + success: boolean + message: string + directory_path: string + permanent: boolean +} + +// WebDAV Scan Failures Service +export const webdavService = { + getScanFailures: () => { + return api.get('/webdav/scan-failures') + }, + + getScanFailure: (id: string) => { + return api.get(`/webdav/scan-failures/${id}`) + }, + + retryFailure: (id: string, request: RetryFailureRequest) => { + return api.post(`/webdav/scan-failures/${id}/retry`, request) + }, + + excludeFailure: (id: string, request: ExcludeFailureRequest) => { + return api.post(`/webdav/scan-failures/${id}/exclude`, request) + }, + + getRetryCandidates: () => { + return api.get<{ directories: string[], count: number }>('/webdav/scan-failures/retry-candidates') + } +} + export const sourcesService = { triggerSync: (sourceId: string) => { return api.post(`/sources/${sourceId}/sync`) diff --git a/migrations/20250813000001_add_webdav_scan_failures.sql b/migrations/20250813000001_add_webdav_scan_failures.sql new file mode 100644 index 0000000..d53fce0 --- /dev/null +++ b/migrations/20250813000001_add_webdav_scan_failures.sql @@ -0,0 +1,299 @@ +-- WebDAV Scan Failures Tracking System +-- This migration creates a comprehensive failure tracking system for WebDAV directory scans + +-- Create enum for failure types +CREATE TYPE webdav_scan_failure_type AS ENUM ( + 'timeout', -- Directory scan took too long + 'path_too_long', -- Path exceeds filesystem limits + 'permission_denied', -- Access denied + 'invalid_characters',-- Invalid characters in path + 'network_error', -- Network connectivity issues + 'server_error', -- Server returned error (404, 500, etc.) + 'xml_parse_error', -- Malformed XML response + 'too_many_items', -- Directory has too many items + 'depth_limit', -- Directory depth exceeds limit + 'size_limit', -- Directory size exceeds limit + 'unknown' -- Unknown error type +); + +-- Create enum for failure severity +CREATE TYPE webdav_scan_failure_severity AS ENUM ( + 'low', -- Can be retried, likely temporary + 'medium', -- May succeed with adjustments + 'high', -- Unlikely to succeed without intervention + 'critical' -- Will never succeed, permanent issue +); + +-- Main table for tracking scan failures +CREATE TABLE IF NOT EXISTS webdav_scan_failures ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + directory_path TEXT NOT NULL, + + -- Failure tracking + failure_type webdav_scan_failure_type NOT NULL DEFAULT 'unknown', + failure_severity webdav_scan_failure_severity NOT NULL DEFAULT 'medium', + failure_count INTEGER NOT NULL DEFAULT 1, + consecutive_failures INTEGER NOT NULL DEFAULT 1, + + -- Timestamps + first_failure_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + last_failure_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + last_retry_at TIMESTAMP WITH TIME ZONE, + next_retry_at TIMESTAMP WITH TIME ZONE, + + -- Error details + error_message TEXT, + error_code TEXT, + http_status_code INTEGER, + + -- Diagnostic information + response_time_ms INTEGER, -- How long the request took + response_size_bytes BIGINT, -- Size of response (for timeout diagnosis) + path_length INTEGER, -- Length of the path + directory_depth INTEGER, -- How deep in the hierarchy + estimated_item_count INTEGER, -- Estimated number of items + server_type TEXT, -- WebDAV server type + server_version TEXT, -- Server version if available + + -- Additional context + diagnostic_data JSONB, -- Flexible field for additional diagnostics + + -- User actions + user_excluded BOOLEAN DEFAULT FALSE, -- User marked as permanently excluded + user_notes TEXT, -- User-provided notes about the issue + + -- Retry strategy + retry_strategy TEXT, -- Strategy for retrying (exponential, linear, etc.) + max_retries INTEGER DEFAULT 5, -- Maximum number of retries + retry_delay_seconds INTEGER DEFAULT 300, -- Base delay between retries + + -- Resolution tracking + resolved BOOLEAN DEFAULT FALSE, + resolved_at TIMESTAMP WITH TIME ZONE, + resolution_method TEXT, -- How it was resolved + + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + + -- Unique constraint to prevent duplicates + CONSTRAINT unique_user_directory_failure UNIQUE (user_id, directory_path) +); + +-- Create indexes for efficient querying +CREATE INDEX idx_webdav_scan_failures_user_id ON webdav_scan_failures(user_id); +CREATE INDEX idx_webdav_scan_failures_severity ON webdav_scan_failures(failure_severity); +CREATE INDEX idx_webdav_scan_failures_type ON webdav_scan_failures(failure_type); +CREATE INDEX idx_webdav_scan_failures_resolved ON webdav_scan_failures(resolved); +CREATE INDEX idx_webdav_scan_failures_next_retry ON webdav_scan_failures(next_retry_at) WHERE NOT resolved AND NOT user_excluded; +CREATE INDEX idx_webdav_scan_failures_path ON webdav_scan_failures(directory_path); + +-- Function to calculate next retry time with exponential backoff +CREATE OR REPLACE FUNCTION calculate_next_retry_time( + failure_count INTEGER, + base_delay_seconds INTEGER, + max_delay_seconds INTEGER DEFAULT 86400 -- 24 hours max +) RETURNS TIMESTAMP WITH TIME ZONE AS $$ +DECLARE + delay_seconds INTEGER; +BEGIN + -- Exponential backoff: delay = base * 2^(failure_count - 1) + -- Cap at max_delay_seconds + delay_seconds := LEAST( + base_delay_seconds * POWER(2, LEAST(failure_count - 1, 10)), + max_delay_seconds + ); + + RETURN NOW() + (delay_seconds || ' seconds')::INTERVAL; +END; +$$ LANGUAGE plpgsql IMMUTABLE; + +-- Function to record or update a scan failure +CREATE OR REPLACE FUNCTION record_webdav_scan_failure( + p_user_id UUID, + p_directory_path TEXT, + p_failure_type webdav_scan_failure_type, + p_error_message TEXT, + p_error_code TEXT DEFAULT NULL, + p_http_status_code INTEGER DEFAULT NULL, + p_response_time_ms INTEGER DEFAULT NULL, + p_response_size_bytes BIGINT DEFAULT NULL, + p_diagnostic_data JSONB DEFAULT NULL +) RETURNS UUID AS $$ +DECLARE + v_failure_id UUID; + v_existing_count INTEGER; + v_severity webdav_scan_failure_severity; +BEGIN + -- Determine severity based on failure type + v_severity := CASE p_failure_type + WHEN 'timeout' THEN 'medium'::webdav_scan_failure_severity + WHEN 'path_too_long' THEN 'critical'::webdav_scan_failure_severity + WHEN 'permission_denied' THEN 'high'::webdav_scan_failure_severity + WHEN 'invalid_characters' THEN 'critical'::webdav_scan_failure_severity + WHEN 'network_error' THEN 'low'::webdav_scan_failure_severity + WHEN 'server_error' THEN + CASE + WHEN p_http_status_code = 404 THEN 'critical'::webdav_scan_failure_severity + WHEN p_http_status_code >= 500 THEN 'medium'::webdav_scan_failure_severity + ELSE 'medium'::webdav_scan_failure_severity + END + WHEN 'xml_parse_error' THEN 'high'::webdav_scan_failure_severity + WHEN 'too_many_items' THEN 'high'::webdav_scan_failure_severity + WHEN 'depth_limit' THEN 'high'::webdav_scan_failure_severity + WHEN 'size_limit' THEN 'high'::webdav_scan_failure_severity + ELSE 'medium'::webdav_scan_failure_severity + END; + + -- Insert or update the failure record + INSERT INTO webdav_scan_failures ( + user_id, + directory_path, + failure_type, + failure_severity, + failure_count, + consecutive_failures, + error_message, + error_code, + http_status_code, + response_time_ms, + response_size_bytes, + path_length, + directory_depth, + diagnostic_data, + next_retry_at + ) VALUES ( + p_user_id, + p_directory_path, + p_failure_type, + v_severity, + 1, + 1, + p_error_message, + p_error_code, + p_http_status_code, + p_response_time_ms, + p_response_size_bytes, + LENGTH(p_directory_path), + array_length(string_to_array(p_directory_path, '/'), 1) - 1, + p_diagnostic_data, + calculate_next_retry_time(1, 300, 86400) + ) + ON CONFLICT (user_id, directory_path) DO UPDATE SET + failure_type = EXCLUDED.failure_type, + failure_severity = EXCLUDED.failure_severity, + failure_count = webdav_scan_failures.failure_count + 1, + consecutive_failures = webdav_scan_failures.consecutive_failures + 1, + last_failure_at = NOW(), + error_message = EXCLUDED.error_message, + error_code = EXCLUDED.error_code, + http_status_code = EXCLUDED.http_status_code, + response_time_ms = EXCLUDED.response_time_ms, + response_size_bytes = EXCLUDED.response_size_bytes, + diagnostic_data = COALESCE(EXCLUDED.diagnostic_data, webdav_scan_failures.diagnostic_data), + next_retry_at = calculate_next_retry_time( + webdav_scan_failures.failure_count + 1, + webdav_scan_failures.retry_delay_seconds, + 86400 + ), + resolved = FALSE, + updated_at = NOW() + RETURNING id INTO v_failure_id; + + RETURN v_failure_id; +END; +$$ LANGUAGE plpgsql; + +-- Function to reset a failure for retry +CREATE OR REPLACE FUNCTION reset_webdav_scan_failure( + p_user_id UUID, + p_directory_path TEXT +) RETURNS BOOLEAN AS $$ +DECLARE + v_updated INTEGER; +BEGIN + UPDATE webdav_scan_failures + SET + consecutive_failures = 0, + last_retry_at = NOW(), + next_retry_at = NOW(), -- Retry immediately + resolved = FALSE, + user_excluded = FALSE, + updated_at = NOW() + WHERE user_id = p_user_id + AND directory_path = p_directory_path + AND NOT resolved; + + GET DIAGNOSTICS v_updated = ROW_COUNT; + RETURN v_updated > 0; +END; +$$ LANGUAGE plpgsql; + +-- Function to mark a failure as resolved +CREATE OR REPLACE FUNCTION resolve_webdav_scan_failure( + p_user_id UUID, + p_directory_path TEXT, + p_resolution_method TEXT DEFAULT 'automatic' +) RETURNS BOOLEAN AS $$ +DECLARE + v_updated INTEGER; +BEGIN + UPDATE webdav_scan_failures + SET + resolved = TRUE, + resolved_at = NOW(), + resolution_method = p_resolution_method, + consecutive_failures = 0, + updated_at = NOW() + WHERE user_id = p_user_id + AND directory_path = p_directory_path + AND NOT resolved; + + GET DIAGNOSTICS v_updated = ROW_COUNT; + RETURN v_updated > 0; +END; +$$ LANGUAGE plpgsql; + +-- View for active failures that need attention +CREATE VIEW active_webdav_scan_failures AS +SELECT + wsf.*, + u.username, + u.email, + CASE + WHEN wsf.failure_count > 10 THEN 'chronic' + WHEN wsf.failure_count > 5 THEN 'persistent' + WHEN wsf.failure_count > 2 THEN 'recurring' + ELSE 'recent' + END as failure_status, + CASE + WHEN wsf.next_retry_at < NOW() THEN 'ready_for_retry' + WHEN wsf.user_excluded THEN 'excluded' + WHEN wsf.failure_severity = 'critical' THEN 'needs_intervention' + ELSE 'scheduled' + END as action_status +FROM webdav_scan_failures wsf +JOIN users u ON wsf.user_id = u.id +WHERE NOT wsf.resolved; + +-- Trigger to update the updated_at timestamp +CREATE OR REPLACE FUNCTION update_webdav_scan_failures_updated_at() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = NOW(); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER update_webdav_scan_failures_updated_at + BEFORE UPDATE ON webdav_scan_failures + FOR EACH ROW + EXECUTE FUNCTION update_webdav_scan_failures_updated_at(); + +-- Comments for documentation +COMMENT ON TABLE webdav_scan_failures IS 'Tracks failures during WebDAV directory scanning with detailed diagnostics'; +COMMENT ON COLUMN webdav_scan_failures.failure_type IS 'Categorized type of failure for analysis and handling'; +COMMENT ON COLUMN webdav_scan_failures.failure_severity IS 'Severity level determining retry strategy and user notification'; +COMMENT ON COLUMN webdav_scan_failures.diagnostic_data IS 'Flexible JSON field for storing additional diagnostic information'; +COMMENT ON COLUMN webdav_scan_failures.user_excluded IS 'User has marked this directory to be permanently excluded from scanning'; +COMMENT ON COLUMN webdav_scan_failures.consecutive_failures IS 'Number of consecutive failures without a successful scan'; \ No newline at end of file diff --git a/src/db/webdav.rs b/src/db/webdav.rs index f0883e8..816f1a3 100644 --- a/src/db/webdav.rs +++ b/src/db/webdav.rs @@ -611,4 +611,360 @@ impl Database { tx.commit().await?; Ok((updated_directories, deleted_count)) } + + // ===== WebDAV Scan Failure Tracking Methods ===== + + /// Record a new scan failure or increment existing failure count + pub async fn record_scan_failure(&self, failure: &crate::models::CreateWebDAVScanFailure) -> Result { + let failure_type_str = failure.failure_type.to_string(); + + // Classify the error to determine appropriate failure type and severity + let (failure_type, severity) = self.classify_scan_error(&failure); + + let mut diagnostic_data = failure.diagnostic_data.clone().unwrap_or(serde_json::json!({})); + + // Add additional diagnostic information + if let Some(data) = diagnostic_data.as_object_mut() { + data.insert("path_length".to_string(), serde_json::json!(failure.directory_path.len())); + data.insert("directory_depth".to_string(), serde_json::json!(failure.directory_path.matches('/').count())); + + if let Some(server_type) = &failure.server_type { + data.insert("server_type".to_string(), serde_json::json!(server_type)); + } + if let Some(server_version) = &failure.server_version { + data.insert("server_version".to_string(), serde_json::json!(server_version)); + } + } + + let row = sqlx::query( + r#"SELECT record_webdav_scan_failure($1, $2, $3, $4, $5, $6, $7, $8, $9) as failure_id"# + ) + .bind(failure.user_id) + .bind(&failure.directory_path) + .bind(failure_type_str) + .bind(&failure.error_message) + .bind(&failure.error_code) + .bind(failure.http_status_code) + .bind(failure.response_time_ms) + .bind(failure.response_size_bytes) + .bind(&diagnostic_data) + .fetch_one(&self.pool) + .await?; + + Ok(row.get("failure_id")) + } + + /// Get all scan failures for a user + pub async fn get_scan_failures(&self, user_id: Uuid, include_resolved: bool) -> Result> { + let query = if include_resolved { + r#"SELECT * FROM webdav_scan_failures + WHERE user_id = $1 + ORDER BY last_failure_at DESC"# + } else { + r#"SELECT * FROM webdav_scan_failures + WHERE user_id = $1 AND NOT resolved AND NOT user_excluded + ORDER BY failure_severity DESC, last_failure_at DESC"# + }; + + let rows = sqlx::query_as::<_, crate::models::WebDAVScanFailure>(query) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + + Ok(rows) + } + + /// Get failure count for a specific directory + pub async fn get_failure_count(&self, user_id: Uuid, directory_path: &str) -> Result> { + let row = sqlx::query( + r#"SELECT failure_count FROM webdav_scan_failures + WHERE user_id = $1 AND directory_path = $2"# + ) + .bind(user_id) + .bind(directory_path) + .fetch_optional(&self.pool) + .await?; + + Ok(row.map(|r| r.get("failure_count"))) + } + + /// Check if a directory is a known failure that should be skipped + pub async fn is_known_failure(&self, user_id: Uuid, directory_path: &str) -> Result { + let row = sqlx::query( + r#"SELECT 1 FROM webdav_scan_failures + WHERE user_id = $1 AND directory_path = $2 + AND NOT resolved + AND (user_excluded = TRUE OR + (failure_severity IN ('critical', 'high') AND failure_count > 3) OR + (next_retry_at IS NULL OR next_retry_at > NOW()))"# + ) + .bind(user_id) + .bind(directory_path) + .fetch_optional(&self.pool) + .await?; + + Ok(row.is_some()) + } + + /// Get directories ready for retry + pub async fn get_directories_ready_for_retry(&self, user_id: Uuid) -> Result> { + let rows = sqlx::query( + r#"SELECT directory_path FROM webdav_scan_failures + WHERE user_id = $1 + AND NOT resolved + AND NOT user_excluded + AND next_retry_at <= NOW() + AND failure_count < max_retries + ORDER BY failure_severity ASC, next_retry_at ASC + LIMIT 10"# + ) + .bind(user_id) + .fetch_all(&self.pool) + .await?; + + Ok(rows.into_iter().map(|row| row.get("directory_path")).collect()) + } + + /// Reset a failure for retry + pub async fn reset_scan_failure(&self, user_id: Uuid, directory_path: &str) -> Result { + let row = sqlx::query( + r#"SELECT reset_webdav_scan_failure($1, $2) as success"# + ) + .bind(user_id) + .bind(directory_path) + .fetch_one(&self.pool) + .await?; + + Ok(row.get("success")) + } + + /// Mark a failure as resolved + pub async fn resolve_scan_failure(&self, user_id: Uuid, directory_path: &str, resolution_method: &str) -> Result { + let row = sqlx::query( + r#"SELECT resolve_webdav_scan_failure($1, $2, $3) as success"# + ) + .bind(user_id) + .bind(directory_path) + .bind(resolution_method) + .fetch_one(&self.pool) + .await?; + + Ok(row.get("success")) + } + + /// Mark a directory as permanently excluded by user + pub async fn exclude_directory_from_scan(&self, user_id: Uuid, directory_path: &str, user_notes: Option<&str>) -> Result<()> { + sqlx::query( + r#"UPDATE webdav_scan_failures + SET user_excluded = TRUE, + user_notes = COALESCE($3, user_notes), + updated_at = NOW() + WHERE user_id = $1 AND directory_path = $2"# + ) + .bind(user_id) + .bind(directory_path) + .bind(user_notes) + .execute(&self.pool) + .await?; + + Ok(()) + } + + /// Get scan failure statistics for a user + pub async fn get_scan_failure_stats(&self, user_id: Uuid) -> Result { + let row = sqlx::query( + r#"SELECT + COUNT(*) FILTER (WHERE NOT resolved) as active_failures, + COUNT(*) FILTER (WHERE resolved) as resolved_failures, + COUNT(*) FILTER (WHERE user_excluded) as excluded_directories, + COUNT(*) FILTER (WHERE failure_severity = 'critical' AND NOT resolved) as critical_failures, + COUNT(*) FILTER (WHERE failure_severity = 'high' AND NOT resolved) as high_failures, + COUNT(*) FILTER (WHERE failure_severity = 'medium' AND NOT resolved) as medium_failures, + COUNT(*) FILTER (WHERE failure_severity = 'low' AND NOT resolved) as low_failures, + COUNT(*) FILTER (WHERE next_retry_at <= NOW() AND NOT resolved AND NOT user_excluded) as ready_for_retry + FROM webdav_scan_failures + WHERE user_id = $1"# + ) + .bind(user_id) + .fetch_one(&self.pool) + .await?; + + Ok(serde_json::json!({ + "active_failures": row.get::("active_failures"), + "resolved_failures": row.get::("resolved_failures"), + "excluded_directories": row.get::("excluded_directories"), + "critical_failures": row.get::("critical_failures"), + "high_failures": row.get::("high_failures"), + "medium_failures": row.get::("medium_failures"), + "low_failures": row.get::("low_failures"), + "ready_for_retry": row.get::("ready_for_retry"), + })) + } + + /// Helper function to classify scan errors + fn classify_scan_error(&self, failure: &crate::models::CreateWebDAVScanFailure) -> (String, String) { + use crate::models::WebDAVScanFailureType; + + let failure_type = &failure.failure_type; + let error_msg = failure.error_message.to_lowercase(); + let status_code = failure.http_status_code; + + // Determine severity based on error characteristics + let severity = match failure_type { + WebDAVScanFailureType::PathTooLong | + WebDAVScanFailureType::InvalidCharacters => "critical", + + WebDAVScanFailureType::PermissionDenied | + WebDAVScanFailureType::XmlParseError | + WebDAVScanFailureType::TooManyItems | + WebDAVScanFailureType::DepthLimit | + WebDAVScanFailureType::SizeLimit => "high", + + WebDAVScanFailureType::Timeout | + WebDAVScanFailureType::ServerError => { + if let Some(code) = status_code { + if code == 404 { + "critical" + } else if code >= 500 { + "medium" + } else { + "medium" + } + } else { + "medium" + } + }, + + WebDAVScanFailureType::NetworkError => "low", + + WebDAVScanFailureType::Unknown => { + // Try to infer from error message + if error_msg.contains("timeout") || error_msg.contains("timed out") { + "medium" + } else if error_msg.contains("permission") || error_msg.contains("forbidden") { + "high" + } else if error_msg.contains("not found") || error_msg.contains("404") { + "critical" + } else { + "medium" + } + } + }; + + (failure_type.to_string(), severity.to_string()) + } + + /// Get detailed failure information with diagnostics + pub async fn get_scan_failure_with_diagnostics(&self, user_id: Uuid, failure_id: Uuid) -> Result> { + let failure = sqlx::query_as::<_, crate::models::WebDAVScanFailure>( + r#"SELECT * FROM webdav_scan_failures + WHERE user_id = $1 AND id = $2"# + ) + .bind(user_id) + .bind(failure_id) + .fetch_optional(&self.pool) + .await?; + + match failure { + Some(f) => { + let diagnostics = self.build_failure_diagnostics(&f); + + Ok(Some(crate::models::WebDAVScanFailureResponse { + id: f.id, + directory_path: f.directory_path, + failure_type: f.failure_type, + failure_severity: f.failure_severity, + failure_count: f.failure_count, + consecutive_failures: f.consecutive_failures, + first_failure_at: f.first_failure_at, + last_failure_at: f.last_failure_at, + next_retry_at: f.next_retry_at, + error_message: f.error_message, + http_status_code: f.http_status_code, + user_excluded: f.user_excluded, + user_notes: f.user_notes, + resolved: f.resolved, + diagnostic_summary: diagnostics, + })) + }, + None => Ok(None) + } + } + + /// Build diagnostic summary for a failure + fn build_failure_diagnostics(&self, failure: &crate::models::WebDAVScanFailure) -> crate::models::WebDAVFailureDiagnostics { + use crate::models::{WebDAVScanFailureType, WebDAVScanFailureSeverity}; + + let response_size_mb = failure.response_size_bytes.map(|b| b as f64 / 1_048_576.0); + + let (recommended_action, can_retry, user_action_required) = match (&failure.failure_type, &failure.failure_severity) { + (WebDAVScanFailureType::PathTooLong, _) => ( + "Path exceeds system limits. Consider reorganizing directory structure.".to_string(), + false, + true + ), + (WebDAVScanFailureType::InvalidCharacters, _) => ( + "Path contains invalid characters. Rename the directory to remove special characters.".to_string(), + false, + true + ), + (WebDAVScanFailureType::PermissionDenied, _) => ( + "Access denied. Check WebDAV permissions for this directory.".to_string(), + false, + true + ), + (WebDAVScanFailureType::TooManyItems, _) => ( + "Directory contains too many items. Consider splitting into subdirectories.".to_string(), + false, + true + ), + (WebDAVScanFailureType::Timeout, _) if failure.failure_count > 3 => ( + "Repeated timeouts. Directory may be too large or server is slow.".to_string(), + true, + false + ), + (WebDAVScanFailureType::NetworkError, _) => ( + "Network error. Will retry automatically.".to_string(), + true, + false + ), + (WebDAVScanFailureType::ServerError, _) if failure.http_status_code == Some(404) => ( + "Directory not found on server. It may have been deleted.".to_string(), + false, + false + ), + (WebDAVScanFailureType::ServerError, _) => ( + "Server error. Will retry when server is available.".to_string(), + true, + false + ), + _ if failure.failure_severity == WebDAVScanFailureSeverity::Critical => ( + "Critical error that requires manual intervention.".to_string(), + false, + true + ), + _ if failure.failure_count > 10 => ( + "Multiple failures. Consider excluding this directory.".to_string(), + true, + true + ), + _ => ( + "Temporary error. Will retry automatically.".to_string(), + true, + false + ) + }; + + crate::models::WebDAVFailureDiagnostics { + path_length: failure.path_length, + directory_depth: failure.directory_depth, + estimated_item_count: failure.estimated_item_count, + response_time_ms: failure.response_time_ms, + response_size_mb, + server_type: failure.server_type.clone(), + recommended_action, + can_retry, + user_action_required, + } + } } \ No newline at end of file diff --git a/src/models/source.rs b/src/models/source.rs index 4140c25..7ec4170 100644 --- a/src/models/source.rs +++ b/src/models/source.rs @@ -324,6 +324,200 @@ pub struct UpdateWebDAVDirectory { pub total_size_bytes: i64, } +// WebDAV Scan Failure Tracking Models + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, ToSchema)] +#[serde(rename_all = "snake_case")] +pub enum WebDAVScanFailureType { + Timeout, + PathTooLong, + PermissionDenied, + InvalidCharacters, + NetworkError, + ServerError, + XmlParseError, + TooManyItems, + DepthLimit, + SizeLimit, + Unknown, +} + +impl std::fmt::Display for WebDAVScanFailureType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Timeout => write!(f, "timeout"), + Self::PathTooLong => write!(f, "path_too_long"), + Self::PermissionDenied => write!(f, "permission_denied"), + Self::InvalidCharacters => write!(f, "invalid_characters"), + Self::NetworkError => write!(f, "network_error"), + Self::ServerError => write!(f, "server_error"), + Self::XmlParseError => write!(f, "xml_parse_error"), + Self::TooManyItems => write!(f, "too_many_items"), + Self::DepthLimit => write!(f, "depth_limit"), + Self::SizeLimit => write!(f, "size_limit"), + Self::Unknown => write!(f, "unknown"), + } + } +} + +impl TryFrom for WebDAVScanFailureType { + type Error = String; + + fn try_from(value: String) -> Result { + match value.as_str() { + "timeout" => Ok(Self::Timeout), + "path_too_long" => Ok(Self::PathTooLong), + "permission_denied" => Ok(Self::PermissionDenied), + "invalid_characters" => Ok(Self::InvalidCharacters), + "network_error" => Ok(Self::NetworkError), + "server_error" => Ok(Self::ServerError), + "xml_parse_error" => Ok(Self::XmlParseError), + "too_many_items" => Ok(Self::TooManyItems), + "depth_limit" => Ok(Self::DepthLimit), + "size_limit" => Ok(Self::SizeLimit), + "unknown" => Ok(Self::Unknown), + _ => Err(format!("Invalid WebDAV scan failure type: {}", value)), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, ToSchema)] +#[serde(rename_all = "snake_case")] +pub enum WebDAVScanFailureSeverity { + Low, + Medium, + High, + Critical, +} + +impl std::fmt::Display for WebDAVScanFailureSeverity { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Low => write!(f, "low"), + Self::Medium => write!(f, "medium"), + Self::High => write!(f, "high"), + Self::Critical => write!(f, "critical"), + } + } +} + +impl TryFrom for WebDAVScanFailureSeverity { + type Error = String; + + fn try_from(value: String) -> Result { + match value.as_str() { + "low" => Ok(Self::Low), + "medium" => Ok(Self::Medium), + "high" => Ok(Self::High), + "critical" => Ok(Self::Critical), + _ => Err(format!("Invalid WebDAV scan failure severity: {}", value)), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, FromRow, ToSchema)] +pub struct WebDAVScanFailure { + pub id: Uuid, + pub user_id: Uuid, + pub directory_path: String, + + // Failure tracking + #[sqlx(try_from = "String")] + pub failure_type: WebDAVScanFailureType, + #[sqlx(try_from = "String")] + pub failure_severity: WebDAVScanFailureSeverity, + pub failure_count: i32, + pub consecutive_failures: i32, + + // Timestamps + pub first_failure_at: DateTime, + pub last_failure_at: DateTime, + pub last_retry_at: Option>, + pub next_retry_at: Option>, + + // Error details + pub error_message: Option, + pub error_code: Option, + pub http_status_code: Option, + + // Diagnostic information + pub response_time_ms: Option, + pub response_size_bytes: Option, + pub path_length: Option, + pub directory_depth: Option, + pub estimated_item_count: Option, + pub server_type: Option, + pub server_version: Option, + + // Additional context + pub diagnostic_data: Option, + + // User actions + pub user_excluded: bool, + pub user_notes: Option, + + // Retry strategy + pub retry_strategy: Option, + pub max_retries: i32, + pub retry_delay_seconds: i32, + + // Resolution tracking + pub resolved: bool, + pub resolved_at: Option>, + pub resolution_method: Option, + + pub created_at: DateTime, + pub updated_at: DateTime, +} + +#[derive(Debug, Serialize, Deserialize, ToSchema)] +pub struct CreateWebDAVScanFailure { + pub user_id: Uuid, + pub directory_path: String, + pub failure_type: WebDAVScanFailureType, + pub error_message: String, + pub error_code: Option, + pub http_status_code: Option, + pub response_time_ms: Option, + pub response_size_bytes: Option, + pub diagnostic_data: Option, + pub server_type: Option, + pub server_version: Option, + pub estimated_item_count: Option, +} + +#[derive(Debug, Serialize, Deserialize, ToSchema)] +pub struct WebDAVScanFailureResponse { + pub id: Uuid, + pub directory_path: String, + pub failure_type: WebDAVScanFailureType, + pub failure_severity: WebDAVScanFailureSeverity, + pub failure_count: i32, + pub consecutive_failures: i32, + pub first_failure_at: DateTime, + pub last_failure_at: DateTime, + pub next_retry_at: Option>, + pub error_message: Option, + pub http_status_code: Option, + pub user_excluded: bool, + pub user_notes: Option, + pub resolved: bool, + pub diagnostic_summary: WebDAVFailureDiagnostics, +} + +#[derive(Debug, Serialize, Deserialize, ToSchema)] +pub struct WebDAVFailureDiagnostics { + pub path_length: Option, + pub directory_depth: Option, + pub estimated_item_count: Option, + pub response_time_ms: Option, + pub response_size_mb: Option, + pub server_type: Option, + pub recommended_action: String, + pub can_retry: bool, + pub user_action_required: bool, +} + // Notification-related structs #[derive(Debug, Serialize, Deserialize, ToSchema)] pub struct Notification { diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 08d2500..6188a67 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -12,4 +12,5 @@ pub mod search; pub mod settings; pub mod sources; pub mod users; -pub mod webdav; \ No newline at end of file +pub mod webdav; +pub mod webdav_scan_failures; \ No newline at end of file diff --git a/src/routes/webdav.rs b/src/routes/webdav.rs index 4bc7881..372313c 100644 --- a/src/routes/webdav.rs +++ b/src/routes/webdav.rs @@ -30,6 +30,12 @@ pub fn router() -> Router> { .route("/sync-status", get(get_webdav_sync_status)) .route("/start-sync", post(start_webdav_sync)) .route("/cancel-sync", post(cancel_webdav_sync)) + // Scan failure tracking endpoints + .route("/scan-failures", get(crate::routes::webdav_scan_failures::list_scan_failures)) + .route("/scan-failures/:id", get(crate::routes::webdav_scan_failures::get_scan_failure)) + .route("/scan-failures/:id/retry", post(crate::routes::webdav_scan_failures::retry_scan_failure)) + .route("/scan-failures/:id/exclude", post(crate::routes::webdav_scan_failures::exclude_scan_failure)) + .route("/scan-failures/retry-candidates", get(crate::routes::webdav_scan_failures::get_retry_candidates)) } async fn get_user_webdav_config(state: &Arc, user_id: uuid::Uuid) -> Result { diff --git a/src/routes/webdav_scan_failures.rs b/src/routes/webdav_scan_failures.rs new file mode 100644 index 0000000..34dc7fc --- /dev/null +++ b/src/routes/webdav_scan_failures.rs @@ -0,0 +1,361 @@ +use std::sync::Arc; + +use axum::{ + extract::{Path, State}, + http::StatusCode, + Json, +}; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use tracing::{error, info, warn}; +use uuid::Uuid; +use utoipa::ToSchema; + +use crate::auth::AuthUser; +use crate::models::{WebDAVScanFailure, WebDAVScanFailureResponse}; +use crate::AppState; + +#[derive(Debug, Deserialize, ToSchema)] +pub struct RetryFailureRequest { + /// Optional notes about why the retry is being attempted + pub notes: Option, +} + +#[derive(Debug, Deserialize, ToSchema)] +pub struct ExcludeFailureRequest { + /// User notes about why the directory is being excluded + pub notes: Option, + /// Whether to permanently exclude (true) or just temporarily (false) + pub permanent: bool, +} + +#[derive(Debug, Serialize, ToSchema)] +pub struct ScanFailureStatsResponse { + pub active_failures: i64, + pub resolved_failures: i64, + pub excluded_directories: i64, + pub critical_failures: i64, + pub high_failures: i64, + pub medium_failures: i64, + pub low_failures: i64, + pub ready_for_retry: i64, +} + +#[derive(Debug, Serialize, ToSchema)] +pub struct ScanFailuresListResponse { + pub failures: Vec, + pub stats: ScanFailureStatsResponse, +} + +/// GET /api/webdav/scan-failures - List all scan failures for the authenticated user +#[utoipa::path( + get, + path = "/api/webdav/scan-failures", + responses( + (status = 200, description = "List of scan failures", body = ScanFailuresListResponse), + (status = 401, description = "Unauthorized"), + (status = 500, description = "Internal server error") + ), + security( + ("bearer_auth" = []) + ), + tag = "WebDAV" +)] +pub async fn list_scan_failures( + State(state): State>, + auth_user: AuthUser, +) -> Result, StatusCode> { + info!( + "📋 Listing WebDAV scan failures for user: {}", + auth_user.user.id + ); + + // Get failures from database + let failures = state.db.get_scan_failures(auth_user.user.id, false).await + .map_err(|e| { + error!("Failed to get scan failures: {}", e); + StatusCode::INTERNAL_SERVER_ERROR + })?; + + // Get statistics + let stats = state.db.get_scan_failure_stats(auth_user.user.id).await + .map_err(|e| { + error!("Failed to get scan failure stats: {}", e); + StatusCode::INTERNAL_SERVER_ERROR + })?; + + // Convert failures to response format with diagnostics + let mut failure_responses = Vec::new(); + for failure in failures { + if let Ok(Some(response)) = state.db.get_scan_failure_with_diagnostics(auth_user.user.id, failure.id).await { + failure_responses.push(response); + } + } + + // Convert stats to response format + let stats_response = ScanFailureStatsResponse { + active_failures: stats.get("active_failures") + .and_then(|v| v.as_i64()) + .unwrap_or(0), + resolved_failures: stats.get("resolved_failures") + .and_then(|v| v.as_i64()) + .unwrap_or(0), + excluded_directories: stats.get("excluded_directories") + .and_then(|v| v.as_i64()) + .unwrap_or(0), + critical_failures: stats.get("critical_failures") + .and_then(|v| v.as_i64()) + .unwrap_or(0), + high_failures: stats.get("high_failures") + .and_then(|v| v.as_i64()) + .unwrap_or(0), + medium_failures: stats.get("medium_failures") + .and_then(|v| v.as_i64()) + .unwrap_or(0), + low_failures: stats.get("low_failures") + .and_then(|v| v.as_i64()) + .unwrap_or(0), + ready_for_retry: stats.get("ready_for_retry") + .and_then(|v| v.as_i64()) + .unwrap_or(0), + }; + + info!( + "Found {} active scan failures for user", + failure_responses.len() + ); + + Ok(Json(ScanFailuresListResponse { + failures: failure_responses, + stats: stats_response, + })) +} + +/// GET /api/webdav/scan-failures/{id} - Get detailed information about a specific scan failure +#[utoipa::path( + get, + path = "/api/webdav/scan-failures/{id}", + params( + ("id" = Uuid, Path, description = "Scan failure ID") + ), + responses( + (status = 200, description = "Scan failure details", body = WebDAVScanFailureResponse), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Failure not found"), + (status = 500, description = "Internal server error") + ), + security( + ("bearer_auth" = []) + ), + tag = "WebDAV" +)] +pub async fn get_scan_failure( + State(state): State>, + auth_user: AuthUser, + Path(failure_id): Path, +) -> Result, StatusCode> { + info!( + "🔍 Getting scan failure details for ID: {} (user: {})", + failure_id, auth_user.user.id + ); + + match state.db.get_scan_failure_with_diagnostics(auth_user.user.id, failure_id).await { + Ok(Some(failure)) => { + info!("Found scan failure: {}", failure.directory_path); + Ok(Json(serde_json::to_value(failure).unwrap())) + } + Ok(None) => { + warn!("Scan failure not found: {}", failure_id); + Err(StatusCode::NOT_FOUND) + } + Err(e) => { + error!("Failed to get scan failure: {}", e); + Err(StatusCode::INTERNAL_SERVER_ERROR) + } + } +} + +/// POST /api/webdav/scan-failures/{id}/retry - Reset and retry a failed scan +#[utoipa::path( + post, + path = "/api/webdav/scan-failures/{id}/retry", + params( + ("id" = Uuid, Path, description = "Scan failure ID") + ), + request_body = RetryFailureRequest, + responses( + (status = 200, description = "Failure reset for retry"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Failure not found"), + (status = 500, description = "Internal server error") + ), + security( + ("bearer_auth" = []) + ), + tag = "WebDAV" +)] +pub async fn retry_scan_failure( + State(state): State>, + auth_user: AuthUser, + Path(failure_id): Path, + Json(request): Json, +) -> Result, StatusCode> { + info!( + "🔄 Retrying scan failure {} for user: {}", + failure_id, auth_user.user.id + ); + + // First get the failure to find the directory path + let failure = match state.db.get_scan_failure_with_diagnostics(auth_user.user.id, failure_id).await { + Ok(Some(f)) => f, + Ok(None) => { + warn!("Scan failure not found for retry: {}", failure_id); + return Err(StatusCode::NOT_FOUND); + } + Err(e) => { + error!("Failed to get scan failure for retry: {}", e); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } + }; + + // Reset the failure for retry + match state.db.reset_scan_failure(auth_user.user.id, &failure.directory_path).await { + Ok(success) => { + if success { + info!( + "✅ Reset scan failure for directory '{}' - ready for retry", + failure.directory_path + ); + + // TODO: Trigger an immediate scan of this directory + // This would integrate with the WebDAV scheduler + + Ok(Json(json!({ + "success": true, + "message": format!("Directory '{}' has been reset and will be retried", failure.directory_path), + "directory_path": failure.directory_path + }))) + } else { + warn!( + "Failed to reset scan failure for directory '{}'", + failure.directory_path + ); + Err(StatusCode::BAD_REQUEST) + } + } + Err(e) => { + error!("Failed to reset scan failure: {}", e); + Err(StatusCode::INTERNAL_SERVER_ERROR) + } + } +} + +/// POST /api/webdav/scan-failures/{id}/exclude - Mark a directory as permanently excluded +#[utoipa::path( + post, + path = "/api/webdav/scan-failures/{id}/exclude", + params( + ("id" = Uuid, Path, description = "Scan failure ID") + ), + request_body = ExcludeFailureRequest, + responses( + (status = 200, description = "Directory excluded from scanning"), + (status = 401, description = "Unauthorized"), + (status = 404, description = "Failure not found"), + (status = 500, description = "Internal server error") + ), + security( + ("bearer_auth" = []) + ), + tag = "WebDAV" +)] +pub async fn exclude_scan_failure( + State(state): State>, + auth_user: AuthUser, + Path(failure_id): Path, + Json(request): Json, +) -> Result, StatusCode> { + info!( + "🚫 Excluding scan failure {} for user: {}", + failure_id, auth_user.user.id + ); + + // First get the failure to find the directory path + let failure = match state.db.get_scan_failure_with_diagnostics(auth_user.user.id, failure_id).await { + Ok(Some(f)) => f, + Ok(None) => { + warn!("Scan failure not found for exclusion: {}", failure_id); + return Err(StatusCode::NOT_FOUND); + } + Err(e) => { + error!("Failed to get scan failure for exclusion: {}", e); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } + }; + + // Exclude the directory + match state.db.exclude_directory_from_scan( + auth_user.user.id, + &failure.directory_path, + request.notes.as_deref(), + ).await { + Ok(()) => { + info!( + "✅ Excluded directory '{}' from scanning", + failure.directory_path + ); + + Ok(Json(json!({ + "success": true, + "message": format!("Directory '{}' has been excluded from scanning", failure.directory_path), + "directory_path": failure.directory_path, + "permanent": request.permanent + }))) + } + Err(e) => { + error!("Failed to exclude directory from scanning: {}", e); + Err(StatusCode::INTERNAL_SERVER_ERROR) + } + } +} + +/// GET /api/webdav/scan-failures/retry-candidates - Get directories ready for retry +#[utoipa::path( + get, + path = "/api/webdav/scan-failures/retry-candidates", + responses( + (status = 200, description = "List of directories ready for retry", body = Vec), + (status = 401, description = "Unauthorized"), + (status = 500, description = "Internal server error") + ), + security( + ("bearer_auth" = []) + ), + tag = "WebDAV" +)] +pub async fn get_retry_candidates( + State(state): State>, + auth_user: AuthUser, +) -> Result, StatusCode> { + info!( + "🔍 Getting retry candidates for user: {}", + auth_user.user.id + ); + + match state.db.get_directories_ready_for_retry(auth_user.user.id).await { + Ok(directories) => { + info!( + "Found {} directories ready for retry", + directories.len() + ); + Ok(Json(json!({ + "directories": directories, + "count": directories.len() + }))) + } + Err(e) => { + error!("Failed to get retry candidates: {}", e); + Err(StatusCode::INTERNAL_SERVER_ERROR) + } + } +} \ No newline at end of file diff --git a/src/services/webdav/error_tracking.rs b/src/services/webdav/error_tracking.rs new file mode 100644 index 0000000..29f0c7e --- /dev/null +++ b/src/services/webdav/error_tracking.rs @@ -0,0 +1,348 @@ +use anyhow::{anyhow, Result}; +use std::time::{Duration, Instant}; +use tracing::{debug, error, info, warn}; +use uuid::Uuid; + +use crate::db::Database; +use crate::models::{ + CreateWebDAVScanFailure, WebDAVScanFailureType, WebDAVScanFailure, + WebDAVScanFailureResponse, WebDAVFailureDiagnostics, +}; + +/// Helper for tracking and analyzing WebDAV scan failures +pub struct WebDAVErrorTracker { + db: Database, +} + +impl WebDAVErrorTracker { + pub fn new(db: Database) -> Self { + Self { db } + } + + /// Analyze an error and record it as a scan failure + pub async fn track_scan_error( + &self, + user_id: Uuid, + directory_path: &str, + error: &anyhow::Error, + response_time: Option, + response_size: Option, + server_type: Option<&str>, + ) -> Result<()> { + let failure_type = self.classify_error_type(error); + let http_status = self.extract_http_status(error); + + // Build diagnostic data + let mut diagnostic_data = serde_json::json!({ + "error_chain": format!("{:?}", error), + "timestamp": chrono::Utc::now().to_rfc3339(), + }); + + // Add stack trace if available + if let Some(backtrace) = error.backtrace().to_string().as_str() { + if !backtrace.is_empty() { + diagnostic_data["backtrace"] = serde_json::json!(backtrace); + } + } + + // Estimate item count from error message if possible + let estimated_items = self.estimate_item_count_from_error(error); + + let failure = CreateWebDAVScanFailure { + user_id, + directory_path: directory_path.to_string(), + failure_type, + error_message: error.to_string(), + error_code: self.extract_error_code(error), + http_status_code: http_status, + response_time_ms: response_time.map(|d| d.as_millis() as i32), + response_size_bytes: response_size.map(|s| s as i64), + diagnostic_data: Some(diagnostic_data), + server_type: server_type.map(|s| s.to_string()), + server_version: None, // Could be extracted from headers if available + estimated_item_count: estimated_items, + }; + + match self.db.record_scan_failure(&failure).await { + Ok(failure_id) => { + warn!( + "📝 Recorded scan failure for directory '{}': {} (ID: {})", + directory_path, error, failure_id + ); + } + Err(e) => { + error!( + "Failed to record scan failure for directory '{}': {}", + directory_path, e + ); + } + } + + Ok(()) + } + + /// Check if a directory should be skipped due to previous failures + pub async fn should_skip_directory( + &self, + user_id: Uuid, + directory_path: &str, + ) -> Result { + match self.db.is_known_failure(user_id, directory_path).await { + Ok(should_skip) => { + if should_skip { + debug!( + "⏭️ Skipping directory '{}' due to previous failures", + directory_path + ); + } + Ok(should_skip) + } + Err(e) => { + // If we can't check, err on the side of trying to scan + warn!( + "Failed to check failure status for directory '{}': {}", + directory_path, e + ); + Ok(false) + } + } + } + + /// Mark a directory scan as successful (resolves any previous failures) + pub async fn mark_scan_successful( + &self, + user_id: Uuid, + directory_path: &str, + ) -> Result<()> { + match self.db.resolve_scan_failure(user_id, directory_path, "successful_scan").await { + Ok(resolved) => { + if resolved { + info!( + "✅ Resolved previous scan failures for directory '{}'", + directory_path + ); + } + } + Err(e) => { + debug!( + "Failed to mark scan as successful for directory '{}': {}", + directory_path, e + ); + } + } + Ok(()) + } + + /// Get directories that are ready for retry + pub async fn get_retry_candidates(&self, user_id: Uuid) -> Result> { + self.db.get_directories_ready_for_retry(user_id).await + } + + /// Classify the type of error based on error message and context + fn classify_error_type(&self, error: &anyhow::Error) -> WebDAVScanFailureType { + let error_str = error.to_string().to_lowercase(); + + // Check for specific error patterns + if error_str.contains("timeout") || error_str.contains("timed out") { + WebDAVScanFailureType::Timeout + } else if error_str.contains("name too long") || error_str.contains("path too long") { + WebDAVScanFailureType::PathTooLong + } else if error_str.contains("permission denied") || error_str.contains("forbidden") || error_str.contains("401") || error_str.contains("403") { + WebDAVScanFailureType::PermissionDenied + } else if error_str.contains("invalid character") || error_str.contains("illegal character") { + WebDAVScanFailureType::InvalidCharacters + } else if error_str.contains("connection refused") || error_str.contains("network") || error_str.contains("dns") { + WebDAVScanFailureType::NetworkError + } else if error_str.contains("500") || error_str.contains("502") || error_str.contains("503") || error_str.contains("504") { + WebDAVScanFailureType::ServerError + } else if error_str.contains("xml") || error_str.contains("parse") || error_str.contains("malformed") { + WebDAVScanFailureType::XmlParseError + } else if error_str.contains("too many") || error_str.contains("limit exceeded") { + WebDAVScanFailureType::TooManyItems + } else if error_str.contains("depth") || error_str.contains("nested") { + WebDAVScanFailureType::DepthLimit + } else if error_str.contains("size") || error_str.contains("too large") { + WebDAVScanFailureType::SizeLimit + } else if error_str.contains("404") || error_str.contains("not found") { + WebDAVScanFailureType::ServerError // Will be further classified by HTTP status + } else { + WebDAVScanFailureType::Unknown + } + } + + /// Extract HTTP status code from error if present + fn extract_http_status(&self, error: &anyhow::Error) -> Option { + let error_str = error.to_string(); + + // Look for common HTTP status code patterns + if error_str.contains("404") { + Some(404) + } else if error_str.contains("401") { + Some(401) + } else if error_str.contains("403") { + Some(403) + } else if error_str.contains("500") { + Some(500) + } else if error_str.contains("502") { + Some(502) + } else if error_str.contains("503") { + Some(503) + } else if error_str.contains("504") { + Some(504) + } else if error_str.contains("405") { + Some(405) + } else { + // Try to extract any 3-digit number that looks like an HTTP status + let re = regex::Regex::new(r"\b([4-5]\d{2})\b").ok()?; + re.captures(&error_str) + .and_then(|cap| cap.get(1)) + .and_then(|m| m.as_str().parse::().ok()) + } + } + + /// Extract error code if present (e.g., system error codes) + fn extract_error_code(&self, error: &anyhow::Error) -> Option { + let error_str = error.to_string(); + + // Look for common error code patterns + if let Some(caps) = regex::Regex::new(r"(?i)error[:\s]+([A-Z0-9_]+)") + .ok() + .and_then(|re| re.captures(&error_str)) + { + return caps.get(1).map(|m| m.as_str().to_string()); + } + + // Look for OS error codes + if let Some(caps) = regex::Regex::new(r"(?i)os error (\d+)") + .ok() + .and_then(|re| re.captures(&error_str)) + { + return caps.get(1).map(|m| format!("OS_{}", m.as_str())); + } + + None + } + + /// Try to estimate item count from error message + fn estimate_item_count_from_error(&self, error: &anyhow::Error) -> Option { + let error_str = error.to_string(); + + // Look for patterns like "1000 items", "contains 500 files", etc. + if let Some(caps) = regex::Regex::new(r"(\d+)\s*(?:items?|files?|directories|folders?|entries)") + .ok() + .and_then(|re| re.captures(&error_str)) + { + return caps.get(1) + .and_then(|m| m.as_str().parse::().ok()); + } + + None + } + + /// Build a user-friendly error message with recommendations + pub fn build_user_friendly_error_message( + &self, + failure: &WebDAVScanFailure, + ) -> String { + use crate::models::WebDAVScanFailureType; + + let base_message = match &failure.failure_type { + WebDAVScanFailureType::Timeout => { + format!( + "The directory '{}' is taking too long to scan. This might be due to a large number of files or slow server response.", + failure.directory_path + ) + } + WebDAVScanFailureType::PathTooLong => { + format!( + "The path '{}' exceeds system limits ({}+ characters). Consider shortening directory names.", + failure.directory_path, + failure.path_length.unwrap_or(0) + ) + } + WebDAVScanFailureType::PermissionDenied => { + format!( + "Access denied to '{}'. Please check your WebDAV permissions.", + failure.directory_path + ) + } + WebDAVScanFailureType::TooManyItems => { + format!( + "Directory '{}' contains too many items (estimated: {}). Consider organizing into subdirectories.", + failure.directory_path, + failure.estimated_item_count.unwrap_or(0) + ) + } + WebDAVScanFailureType::ServerError if failure.http_status_code == Some(404) => { + format!( + "Directory '{}' was not found on the server. It may have been deleted or moved.", + failure.directory_path + ) + } + _ => { + format!( + "Failed to scan directory '{}': {}", + failure.directory_path, + failure.error_message.as_ref().unwrap_or(&"Unknown error".to_string()) + ) + } + }; + + // Add retry information if applicable + let retry_info = if failure.consecutive_failures > 1 { + format!( + " This has failed {} times.", + failure.consecutive_failures + ) + } else { + String::new() + }; + + // Add next retry time if scheduled + let next_retry = if let Some(next_retry_at) = failure.next_retry_at { + if !failure.user_excluded && !failure.resolved { + let duration = next_retry_at.signed_duration_since(chrono::Utc::now()); + if duration.num_seconds() > 0 { + format!( + " Will retry in {} minutes.", + duration.num_minutes().max(1) + ) + } else { + " Ready for retry.".to_string() + } + } else { + String::new() + } + } else { + String::new() + }; + + format!("{}{}{}", base_message, retry_info, next_retry) + } +} + +/// Extension trait for WebDAV service to add error tracking capabilities +pub trait WebDAVServiceErrorTracking { + /// Track an error that occurred during scanning + async fn track_scan_error( + &self, + user_id: Uuid, + directory_path: &str, + error: anyhow::Error, + scan_duration: Duration, + ) -> Result<()>; + + /// Check if directory should be skipped + async fn should_skip_for_failures( + &self, + user_id: Uuid, + directory_path: &str, + ) -> Result; + + /// Mark directory scan as successful + async fn mark_scan_success( + &self, + user_id: Uuid, + directory_path: &str, + ) -> Result<()>; +} \ No newline at end of file diff --git a/src/services/webdav/mod.rs b/src/services/webdav/mod.rs index 0eb427e..34bf831 100644 --- a/src/services/webdav/mod.rs +++ b/src/services/webdav/mod.rs @@ -4,6 +4,7 @@ pub mod config; pub mod service; pub mod smart_sync; pub mod progress_shim; // Backward compatibility shim for simplified progress tracking +pub mod error_tracking; // WebDAV scan failure tracking and analysis // Re-export main types for convenience pub use config::{WebDAVConfig, RetryConfig, ConcurrencyConfig};