feat(client): also update sources page and the various buttons
This commit is contained in:
parent
5dfc6e29f7
commit
11c68c3d9f
|
|
@ -244,11 +244,14 @@ const SourcesPage: React.FC = () => {
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleTestConnection = async () => {
|
const handleTestConnection = async () => {
|
||||||
if (!editingSource) return;
|
|
||||||
|
|
||||||
setTestingConnection(true);
|
setTestingConnection(true);
|
||||||
try {
|
try {
|
||||||
const response = await api.post(`/sources/${editingSource.id}/test`);
|
const response = await api.post('/webdav/test-connection', {
|
||||||
|
server_url: formData.server_url,
|
||||||
|
username: formData.username,
|
||||||
|
password: formData.password,
|
||||||
|
server_type: formData.server_type,
|
||||||
|
});
|
||||||
if (response.data.success) {
|
if (response.data.success) {
|
||||||
showSnackbar('Connection successful!', 'success');
|
showSnackbar('Connection successful!', 'success');
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -318,18 +321,25 @@ const SourcesPage: React.FC = () => {
|
||||||
|
|
||||||
// Crawl estimation function
|
// Crawl estimation function
|
||||||
const estimateCrawl = async () => {
|
const estimateCrawl = async () => {
|
||||||
if (!editingSource) return;
|
|
||||||
|
|
||||||
setEstimatingCrawl(true);
|
setEstimatingCrawl(true);
|
||||||
try {
|
try {
|
||||||
const response = await api.post('/webdav/estimate', {
|
let response;
|
||||||
|
if (editingSource) {
|
||||||
|
// Use the source-specific endpoint for existing sources
|
||||||
|
response = await api.post(`/sources/${editingSource.id}/estimate`);
|
||||||
|
} else {
|
||||||
|
// Use the general endpoint with provided config for new sources
|
||||||
|
response = await api.post('/sources/estimate', {
|
||||||
server_url: formData.server_url,
|
server_url: formData.server_url,
|
||||||
username: formData.username,
|
username: formData.username,
|
||||||
password: formData.password,
|
password: formData.password,
|
||||||
watch_folders: formData.watch_folders,
|
watch_folders: formData.watch_folders,
|
||||||
file_extensions: formData.file_extensions,
|
file_extensions: formData.file_extensions,
|
||||||
|
auto_sync: formData.auto_sync,
|
||||||
|
sync_interval_minutes: formData.sync_interval_minutes,
|
||||||
server_type: formData.server_type,
|
server_type: formData.server_type,
|
||||||
});
|
});
|
||||||
|
}
|
||||||
setCrawlEstimate(response.data);
|
setCrawlEstimate(response.data);
|
||||||
showSnackbar('Crawl estimation completed', 'success');
|
showSnackbar('Crawl estimation completed', 'success');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
||||||
12
src/main.rs
12
src/main.rs
|
|
@ -38,7 +38,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
|
||||||
// Check current migration status
|
// Check current migration status
|
||||||
let applied_result = sqlx::query("SELECT version, description FROM _sqlx_migrations ORDER BY version")
|
let applied_result = sqlx::query("SELECT version, description FROM _sqlx_migrations ORDER BY version")
|
||||||
.fetch_all(&db.pool)
|
.fetch_all(web_db.get_pool())
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
match applied_result {
|
match applied_result {
|
||||||
|
|
@ -57,7 +57,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
|
|
||||||
// Check if ocr_error column exists
|
// Check if ocr_error column exists
|
||||||
let check_column = sqlx::query("SELECT column_name FROM information_schema.columns WHERE table_name = 'documents' AND column_name = 'ocr_error'")
|
let check_column = sqlx::query("SELECT column_name FROM information_schema.columns WHERE table_name = 'documents' AND column_name = 'ocr_error'")
|
||||||
.fetch_optional(&db.pool)
|
.fetch_optional(web_db.get_pool())
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
match check_column {
|
match check_column {
|
||||||
|
|
@ -67,12 +67,12 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
// Try to add the column manually as a fallback
|
// Try to add the column manually as a fallback
|
||||||
info!("Attempting to add missing columns...");
|
info!("Attempting to add missing columns...");
|
||||||
if let Err(e) = sqlx::query("ALTER TABLE documents ADD COLUMN IF NOT EXISTS ocr_error TEXT")
|
if let Err(e) = sqlx::query("ALTER TABLE documents ADD COLUMN IF NOT EXISTS ocr_error TEXT")
|
||||||
.execute(&db.pool)
|
.execute(web_db.get_pool())
|
||||||
.await {
|
.await {
|
||||||
error!("Failed to add ocr_error column: {}", e);
|
error!("Failed to add ocr_error column: {}", e);
|
||||||
}
|
}
|
||||||
if let Err(e) = sqlx::query("ALTER TABLE documents ADD COLUMN IF NOT EXISTS ocr_completed_at TIMESTAMPTZ")
|
if let Err(e) = sqlx::query("ALTER TABLE documents ADD COLUMN IF NOT EXISTS ocr_completed_at TIMESTAMPTZ")
|
||||||
.execute(&db.pool)
|
.execute(web_db.get_pool())
|
||||||
.await {
|
.await {
|
||||||
error!("Failed to add ocr_completed_at column: {}", e);
|
error!("Failed to add ocr_completed_at column: {}", e);
|
||||||
}
|
}
|
||||||
|
|
@ -81,7 +81,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
Err(e) => error!("Failed to check for ocr_error column: {}", e),
|
Err(e) => error!("Failed to check for ocr_error column: {}", e),
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = migrations.run(&db.pool).await;
|
let result = migrations.run(web_db.get_pool()).await;
|
||||||
match result {
|
match result {
|
||||||
Ok(_) => info!("SQLx migrations completed successfully"),
|
Ok(_) => info!("SQLx migrations completed successfully"),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|
@ -96,7 +96,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
WHERE table_name = 'documents' AND table_schema = 'public'
|
WHERE table_name = 'documents' AND table_schema = 'public'
|
||||||
ORDER BY ordinal_position"
|
ORDER BY ordinal_position"
|
||||||
)
|
)
|
||||||
.fetch_all(&db.pool)
|
.fetch_all(web_db.get_pool())
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
match columns_result {
|
match columns_result {
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,8 @@ pub fn router() -> Router<Arc<AppState>> {
|
||||||
.route("/{id}", get(get_source).put(update_source).delete(delete_source))
|
.route("/{id}", get(get_source).put(update_source).delete(delete_source))
|
||||||
.route("/{id}/sync", post(trigger_sync))
|
.route("/{id}/sync", post(trigger_sync))
|
||||||
.route("/{id}/test", post(test_connection))
|
.route("/{id}/test", post(test_connection))
|
||||||
|
.route("/{id}/estimate", post(estimate_crawl))
|
||||||
|
.route("/estimate", post(estimate_crawl_with_config))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
|
|
@ -360,3 +362,110 @@ fn validate_config_for_type(
|
||||||
_ => Ok(()), // Other types not implemented yet
|
_ => Ok(()), // Other types not implemented yet
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/sources/{id}/estimate",
|
||||||
|
tag = "sources",
|
||||||
|
security(
|
||||||
|
("bearer_auth" = [])
|
||||||
|
),
|
||||||
|
params(
|
||||||
|
("id" = Uuid, Path, description = "Source ID")
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Crawl estimate result", body = serde_json::Value),
|
||||||
|
(status = 404, description = "Source not found"),
|
||||||
|
(status = 401, description = "Unauthorized")
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
async fn estimate_crawl(
|
||||||
|
auth_user: AuthUser,
|
||||||
|
Path(source_id): Path<Uuid>,
|
||||||
|
State(state): State<Arc<AppState>>,
|
||||||
|
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||||
|
let source = state
|
||||||
|
.db
|
||||||
|
.get_source(auth_user.user.id, source_id)
|
||||||
|
.await
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||||
|
.ok_or(StatusCode::NOT_FOUND)?;
|
||||||
|
|
||||||
|
match source.source_type {
|
||||||
|
crate::models::SourceType::WebDAV => {
|
||||||
|
let config: crate::models::WebDAVSourceConfig = serde_json::from_value(source.config)
|
||||||
|
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||||
|
|
||||||
|
estimate_webdav_crawl_internal(&config).await
|
||||||
|
}
|
||||||
|
_ => Ok(Json(serde_json::json!({
|
||||||
|
"error": "Source type not supported for estimation"
|
||||||
|
}))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/api/sources/estimate",
|
||||||
|
tag = "sources",
|
||||||
|
security(
|
||||||
|
("bearer_auth" = [])
|
||||||
|
),
|
||||||
|
request_body = serde_json::Value,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Crawl estimate result", body = serde_json::Value),
|
||||||
|
(status = 400, description = "Bad request - invalid configuration"),
|
||||||
|
(status = 401, description = "Unauthorized")
|
||||||
|
)
|
||||||
|
)]
|
||||||
|
async fn estimate_crawl_with_config(
|
||||||
|
_auth_user: AuthUser,
|
||||||
|
State(_state): State<Arc<AppState>>,
|
||||||
|
Json(config_data): Json<serde_json::Value>,
|
||||||
|
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||||
|
// Parse the WebDAV config from the request
|
||||||
|
let config: crate::models::WebDAVSourceConfig = serde_json::from_value(config_data)
|
||||||
|
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||||
|
|
||||||
|
estimate_webdav_crawl_internal(&config).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn estimate_webdav_crawl_internal(
|
||||||
|
config: &crate::models::WebDAVSourceConfig,
|
||||||
|
) -> Result<Json<serde_json::Value>, StatusCode> {
|
||||||
|
// Create WebDAV service config
|
||||||
|
let webdav_config = crate::webdav_service::WebDAVConfig {
|
||||||
|
server_url: config.server_url.clone(),
|
||||||
|
username: config.username.clone(),
|
||||||
|
password: config.password.clone(),
|
||||||
|
watch_folders: config.watch_folders.clone(),
|
||||||
|
file_extensions: config.file_extensions.clone(),
|
||||||
|
timeout_seconds: 300,
|
||||||
|
server_type: config.server_type.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create WebDAV service and estimate crawl
|
||||||
|
match crate::webdav_service::WebDAVService::new(webdav_config) {
|
||||||
|
Ok(webdav_service) => {
|
||||||
|
match webdav_service.estimate_crawl(&config.watch_folders).await {
|
||||||
|
Ok(estimate) => Ok(Json(serde_json::to_value(estimate).unwrap())),
|
||||||
|
Err(e) => Ok(Json(serde_json::json!({
|
||||||
|
"error": format!("Crawl estimation failed: {}", e),
|
||||||
|
"folders": [],
|
||||||
|
"total_files": 0,
|
||||||
|
"total_supported_files": 0,
|
||||||
|
"total_estimated_time_hours": 0.0,
|
||||||
|
"total_size_mb": 0.0,
|
||||||
|
}))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => Ok(Json(serde_json::json!({
|
||||||
|
"error": format!("Failed to create WebDAV service: {}", e),
|
||||||
|
"folders": [],
|
||||||
|
"total_files": 0,
|
||||||
|
"total_supported_files": 0,
|
||||||
|
"total_estimated_time_hours": 0.0,
|
||||||
|
"total_size_mb": 0.0,
|
||||||
|
}))),
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue