Merge branch 'main' into feat/document-labels

This commit is contained in:
aaldebs99 2025-06-19 18:40:50 -07:00 committed by GitHub
commit ec497a4a08
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 216 additions and 37 deletions

View File

@ -18,7 +18,12 @@ env:
jobs:
build:
runs-on: ubuntu-latest
runs-on: ${{ matrix.platform == 'linux/arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
strategy:
matrix:
platform:
- linux/amd64
- linux/arm64
permissions:
contents: read
packages: write
@ -27,6 +32,9 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@ -49,15 +57,82 @@ jobs:
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=raw,value=latest,enable={{is_default_branch}}
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/v') && !contains(github.ref_name, '-') }}
- name: Build and push Docker image
- name: Build and push by digest
id: build
uses: docker/build-push-action@v5
with:
context: .
platforms: linux/amd64,linux/arm64
platforms: ${{ matrix.platform }}
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
cache-to: type=gha,mode=max
outputs: |
type=image,name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }}
- name: Export digest
if: github.event_name != 'pull_request'
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v4
with:
name: digests-${{ matrix.platform == 'linux/amd64' && 'amd64' || 'arm64' }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
push:
needs: build
if: github.event_name != 'pull_request'
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
pattern: digests-*
merge-multiple: true
path: /tmp/digests
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/v') && !contains(github.ref_name, '-') }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}

58
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,58 @@
name: Create Release
on:
push:
tags:
- 'v*'
permissions:
contents: write
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Generate changelog
id: changelog
run: |
# Get the previous tag
PREVIOUS_TAG=$(git tag --sort=-version:refname | sed -n '2p')
CURRENT_TAG=${GITHUB_REF#refs/tags/}
echo "## Changes" > changelog.md
echo "" >> changelog.md
if [ -n "$PREVIOUS_TAG" ]; then
echo "### Commits since $PREVIOUS_TAG:" >> changelog.md
git log --pretty=format:"- %s (%h)" $PREVIOUS_TAG..$CURRENT_TAG >> changelog.md
else
echo "### All commits:" >> changelog.md
git log --pretty=format:"- %s (%h)" $CURRENT_TAG >> changelog.md
fi
echo "" >> changelog.md
echo "**Full Changelog**: https://github.com/${{ github.repository }}/compare/$PREVIOUS_TAG...$CURRENT_TAG" >> changelog.md
# Set output for use in release step
{
echo 'CHANGELOG<<EOF'
cat changelog.md
echo 'EOF'
} >> $GITHUB_OUTPUT
- name: Create Release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref_name }}
release_name: Release ${{ github.ref_name }}
body: ${{ steps.changelog.outputs.CHANGELOG }}
draft: false
prerelease: ${{ contains(github.ref_name, '-') }}

View File

@ -74,6 +74,7 @@ jobs:
run: cargo build --release
env:
DATABASE_URL: postgres://postgres:postgres@localhost:5432/readur_test
RUST_BACKTRACE: 1
- name: Build frontend
working-directory: ./frontend
@ -92,6 +93,7 @@ jobs:
FRONTEND_PATH: ./frontend/dist
UPLOAD_PATH: ./uploads
WATCH_PATH: ./watch
RUST_BACKTRACE: 1
- name: Wait for backend to be ready
run: |
@ -104,6 +106,7 @@ jobs:
sleep 2
done
- name: Install Playwright browsers
working-directory: ./frontend
run: npx playwright install --with-deps
@ -113,6 +116,7 @@ jobs:
run: npm run test:e2e
env:
VITE_API_URL: http://localhost:8000
PLAYWRIGHT_BASE_URL: http://localhost:8000
- name: Stop backend server
if: always()

View File

@ -109,6 +109,7 @@ jobs:
DATABASE_URL: ${{ env.DATABASE_URL }}
TEST_DATABASE_URL: ${{ env.DATABASE_URL }}
RUST_LOG: debug
RUST_BACKTRACE: 1
- name: Stop readur server
if: always()

View File

@ -53,6 +53,8 @@ jobs:
run: |
cargo test --lib
cargo test --tests unit_tests
env:
RUST_BACKTRACE: 1
frontend-unit-tests:
runs-on: ubuntu-latest

View File

@ -58,6 +58,7 @@ services:
# Test-specific environment variables
RUST_LOG: debug
RUST_BACKTRACE: 1
TEST_ENV: true
ports:

View File

@ -9,6 +9,7 @@ test.describe('Document Management', () => {
helpers = new TestHelpers(authenticatedPage);
await helpers.navigateToPage('/documents');
// Ensure we have test documents for tests that need them
await helpers.ensureTestDocumentsExist();
});
test('should display document list', async ({ authenticatedPage: page }) => {

View File

@ -8,6 +8,8 @@ test.describe('Search Functionality', () => {
test.beforeEach(async ({ authenticatedPage }) => {
helpers = new TestHelpers(authenticatedPage);
await helpers.navigateToPage('/search');
// Ensure we have test documents for search functionality
await helpers.ensureTestDocumentsExist();
});
test('should display search interface', async ({ authenticatedPage: page }) => {

View File

@ -17,7 +17,7 @@ export default defineConfig({
],
outputDir: 'test-results/e2e-artifacts',
use: {
baseURL: 'http://localhost:5173',
baseURL: process.env.PLAYWRIGHT_BASE_URL || 'http://localhost:5173',
trace: 'on-first-retry',
screenshot: 'only-on-failure',
video: 'retain-on-failure',
@ -36,7 +36,7 @@ export default defineConfig({
use: { ...devices['Desktop Safari'] },
},
],
webServer: {
webServer: process.env.CI ? undefined : {
command: 'npm run dev',
url: 'http://localhost:5173',
reuseExistingServer: !process.env.CI,

View File

@ -185,6 +185,7 @@ run_unit_tests() {
local exit_code
output=$(DATABASE_URL="postgresql://readur_test:readur_test@localhost:5433/readur_test" \
RUST_BACKTRACE=1 \
cargo test --lib --no-fail-fast 2>&1)
exit_code=$?
@ -217,6 +218,7 @@ run_integration_tests() {
DATABASE_URL="postgresql://readur_test:readur_test@localhost:5433/readur_test" \
TEST_DATABASE_URL="postgresql://readur_test:readur_test@localhost:5433/readur_test" \
API_URL="http://localhost:8001" \
RUST_BACKTRACE=1 \
cargo test --test '*' --no-fail-fast 2>&1 | tee "$output_file"
exit_code=${PIPESTATUS[0]}

View File

@ -73,7 +73,9 @@ async fn create_source(
Json(source_data): Json<CreateSource>,
) -> Result<Json<SourceResponse>, StatusCode> {
// Validate source configuration based on type
if let Err(_) = validate_source_config(&source_data) {
if let Err(validation_error) = validate_source_config(&source_data) {
error!("Source validation failed: {}", validation_error);
error!("Invalid source data received: {:?}", source_data);
return Err(StatusCode::BAD_REQUEST);
}
@ -81,7 +83,10 @@ async fn create_source(
.db
.create_source(auth_user.user.id, &source_data)
.await
.map_err(|_| StatusCode::BAD_REQUEST)?;
.map_err(|e| {
error!("Failed to create source in database: {}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
Ok(Json(source.into()))
}

View File

@ -112,8 +112,9 @@ impl SourceTestClient {
let source_data = json!({
"name": name,
"source_type": "webdav",
"enabled": true,
"config": {
"server_url": "https://cloud.example.com",
"server_url": "https://cloud.example.com/remote.php/dav/files/testuser/",
"username": "testuser",
"password": "testpass",
"watch_folders": ["/Documents", "/Pictures"],
@ -132,7 +133,8 @@ impl SourceTestClient {
.await?;
if !response.status().is_success() {
return Err(format!("Source creation failed: {}", response.text().await?).into());
let error_text = response.text().await?;
return Err(format!("WebDAV source creation failed: {}", error_text).into());
}
let source: Value = response.json().await?;
@ -146,13 +148,15 @@ impl SourceTestClient {
let source_data = json!({
"name": name,
"source_type": "s3",
"enabled": true,
"config": {
"bucket": "test-documents-bucket",
"bucket_name": "test-documents-bucket",
"region": "us-east-1",
"access_key_id": "AKIAIOSFODNN7EXAMPLE",
"secret_access_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
"prefix": "documents/",
"endpoint_url": null,
"watch_folders": ["/documents", "/uploads"],
"auto_sync": true,
"sync_interval_minutes": 120,
"file_extensions": [".pdf", ".txt", ".docx"]
@ -167,7 +171,8 @@ impl SourceTestClient {
.await?;
if !response.status().is_success() {
return Err(format!("S3 source creation failed: {}", response.text().await?).into());
let error_text = response.text().await?;
return Err(format!("S3 source creation failed: {}", error_text).into());
}
let source: Value = response.json().await?;
@ -178,15 +183,20 @@ impl SourceTestClient {
async fn create_local_folder_source(&self, name: &str) -> Result<Value, Box<dyn std::error::Error>> {
let token = self.token.as_ref().ok_or("Not authenticated")?;
// Create the test directory first to ensure it exists
std::fs::create_dir_all("/tmp/test_documents").ok();
let source_data = json!({
"name": name,
"source_type": "local_folder",
"enabled": true,
"config": {
"folder_path": "/tmp/test_documents",
"watch_subdirectories": true,
"watch_folders": ["/tmp/test_documents"],
"file_extensions": [".pdf", ".txt", ".jpg"],
"auto_sync": true,
"sync_interval_minutes": 30
"sync_interval_minutes": 30,
"recursive": true,
"follow_symlinks": false
}
});
@ -198,7 +208,8 @@ impl SourceTestClient {
.await?;
if !response.status().is_success() {
return Err(format!("Local folder source creation failed: {}", response.text().await?).into());
let error_text = response.text().await?;
return Err(format!("Local folder source creation failed: {}", error_text).into());
}
let source: Value = response.json().await?;
@ -374,20 +385,26 @@ async fn test_webdav_source_crud_operations() {
assert!(source["config"]["server_url"].as_str().unwrap().contains("cloud.example.com"));
assert_eq!(source["config"]["auto_sync"], true);
assert_eq!(source["config"]["sync_interval_minutes"], 60);
assert_eq!(source["enabled"], true);
// Get source by ID
let retrieved_source = client.get_source(source_id).await
.expect("Failed to get source by ID");
assert_eq!(retrieved_source["id"], source["id"]);
assert_eq!(retrieved_source["name"], source["name"]);
// The get_source endpoint returns a SourceWithStats structure
let retrieved_source_data = &retrieved_source["source"];
assert_eq!(retrieved_source_data["id"], source["id"]);
assert_eq!(retrieved_source_data["name"], source["name"]);
assert!(retrieved_source["recent_documents"].is_array());
println!("✅ Source retrieved by ID");
// Update source
let updates = json!({
"name": "Updated WebDAV Source",
"enabled": true,
"config": {
"server_url": "https://cloud.example.com",
"server_url": "https://cloud.example.com/remote.php/dav/files/testuser/",
"username": "testuser",
"password": "testpass",
"watch_folders": ["/Documents", "/Pictures", "/Videos"],
@ -448,7 +465,7 @@ async fn test_s3_source_operations() {
// Validate S3-specific configuration
assert_eq!(source["source_type"], "s3");
assert_eq!(source["config"]["bucket"], "test-documents-bucket");
assert_eq!(source["config"]["bucket_name"], "test-documents-bucket");
assert_eq!(source["config"]["region"], "us-east-1");
assert_eq!(source["config"]["prefix"], "documents/");
assert!(source["config"]["endpoint_url"].is_null());
@ -457,12 +474,13 @@ async fn test_s3_source_operations() {
let minio_updates = json!({
"name": "MinIO S3 Source",
"config": {
"bucket": "minio-test-bucket",
"bucket_name": "minio-test-bucket",
"region": "us-east-1",
"access_key_id": "minioadmin",
"secret_access_key": "minioadmin",
"prefix": "",
"endpoint_url": "https://minio.example.com",
"watch_folders": ["/"],
"auto_sync": true,
"sync_interval_minutes": 60,
"file_extensions": [".pdf", ".jpg"]
@ -500,27 +518,29 @@ async fn test_local_folder_source_operations() {
// Validate Local Folder-specific configuration
assert_eq!(source["source_type"], "local_folder");
assert_eq!(source["config"]["folder_path"], "/tmp/test_documents");
assert_eq!(source["config"]["watch_subdirectories"], true);
assert_eq!(source["config"]["watch_folders"][0], "/tmp/test_documents");
assert_eq!(source["config"]["recursive"], true);
assert_eq!(source["config"]["sync_interval_minutes"], 30);
// Update with different path and settings
let updates = json!({
"name": "Updated Local Folder",
"enabled": true,
"config": {
"folder_path": "/home/user/documents",
"watch_subdirectories": false,
"watch_folders": ["/tmp/updated_documents", "/tmp/more_documents"],
"file_extensions": [".pdf", ".txt", ".docx", ".xlsx"],
"auto_sync": false,
"sync_interval_minutes": 15
"sync_interval_minutes": 15,
"recursive": false,
"follow_symlinks": true
}
});
let updated_source = client.update_source(source_id, updates).await
.expect("Failed to update local folder source");
assert_eq!(updated_source["config"]["folder_path"], "/home/user/documents");
assert_eq!(updated_source["config"]["watch_subdirectories"], false);
assert_eq!(updated_source["config"]["watch_folders"][0], "/tmp/updated_documents");
assert_eq!(updated_source["config"]["recursive"], false);
assert_eq!(updated_source["config"]["auto_sync"], false);
println!("✅ Local folder source updated");
@ -630,9 +650,17 @@ async fn test_source_sync_operations() {
let updated_source = client.get_source(source_id).await
.expect("Failed to get updated source");
// The get_source endpoint returns a SourceWithStats structure
let source_data = &updated_source["source"];
// Source should still exist with some status
assert!(updated_source["status"].as_str().is_some());
println!("✅ Source status after operations: {}", updated_source["status"]);
if let Some(status) = source_data["status"].as_str() {
println!("✅ Source status after operations: {}", status);
} else {
println!("⚠️ Source status field is missing or null");
}
// The source should still exist
assert!(source_data["id"].as_str().is_some());
// Clean up
client.delete_source(source_id).await
@ -700,13 +728,13 @@ async fn test_all_source_types_comprehensive() {
.expect("Failed to register and login");
// Create all three source types
let webdav_source = client.create_webdav_source("Comprehensive WebDAV").await
let _webdav_source = client.create_webdav_source("Comprehensive WebDAV").await
.expect("Failed to create WebDAV source");
let s3_source = client.create_s3_source("Comprehensive S3").await
let _s3_source = client.create_s3_source("Comprehensive S3").await
.expect("Failed to create S3 source");
let local_source = client.create_local_folder_source("Comprehensive Local").await
let _local_source = client.create_local_folder_source("Comprehensive Local").await
.expect("Failed to create local folder source");
println!("✅ All three source types created");
@ -733,8 +761,8 @@ async fn test_all_source_types_comprehensive() {
let detailed_source = client.get_source(source_id).await
.expect(&format!("Failed to get {} source details", source_type));
assert_eq!(detailed_source["id"], source["id"]);
assert_eq!(detailed_source["source_type"], source_type);
assert_eq!(detailed_source["source"]["id"], source["id"]);
assert_eq!(detailed_source["source"]["source_type"], source_type);
// Test connection for each source
let _test_result = client.test_source_connection(source_id).await;