diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index dd84ea7..0000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: '' -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Browser [e.g. chrome, safari] - - Version [e.g. 22] - -**Smartphone (please complete the following information):** - - Device: [e.g. iPhone6] - - OS: [e.g. iOS8.1] - - Browser [e.g. stock browser, safari] - - Version [e.g. 22] - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index bbcbbe7..0000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: '' -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/ct/chart-schema.yaml b/.github/ct/chart-schema.yaml deleted file mode 100644 index 7b3fb0a..0000000 --- a/.github/ct/chart-schema.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: str() -home: str() -version: str() -apiVersion: str() -appVersion: any(str(), num(), required=False) -type: str() -dependencies: any(required=False) -description: str() -keywords: list(str(), required=False) -sources: list(str(), required=False) -maintainers: list(include('maintainer'), required=False) -icon: str(required=False) -engine: str(required=False) -condition: str(required=False) -tags: str(required=False) -deprecated: bool(required=False) -kubeVersion: str(required=False) -annotations: map(str(), str(), required=False) ---- -maintainer: - name: str() - email: str(required=False) - url: str(required=False) diff --git a/.github/ct/config.yaml b/.github/ct/config.yaml deleted file mode 100644 index 3721957..0000000 --- a/.github/ct/config.yaml +++ /dev/null @@ -1,15 +0,0 @@ -debug: true -remote: origin -chart-yaml-schema: .github/ct/chart-schema.yaml -validate-maintainers: false -validate-chart-schema: true -validate-yaml: true -check-version-increment: true -chart-dirs: - - charts -helm-extra-args: --timeout 300s -upgrade: true -skip-missing-values: true -release-label: release -release-name-template: "helm-v{{ .Version }}" -target-branch: master diff --git a/.github/workflows/build-images.yaml b/.github/workflows/build-images.yaml deleted file mode 100644 index 542bd64..0000000 --- a/.github/workflows/build-images.yaml +++ /dev/null @@ -1,84 +0,0 @@ -name: Build Container Images - -on: - push: - tags: - - "image/v*" - paths-ignore: - - "charts/**" - pull_request: - branches: [master] - paths-ignore: - - "charts/**" -env: - IMAGES: docker.io/hapiproject/hapi - PLATFORMS: linux/amd64,linux/arm64/v8 - -jobs: - build: - name: Build - runs-on: ubuntu-22.04 - steps: - - name: Container meta for default (distroless) image - id: docker_meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.IMAGES }} - tags: | - type=match,pattern=image/(.*),group=1,enable=${{github.event_name != 'pull_request'}} - - - - name: Container meta for tomcat image - id: docker_tomcat_meta - uses: docker/metadata-action@v5 - with: - images: ${{ env.IMAGES }} - tags: | - type=match,pattern=image/(.*),group=1,enable=${{github.event_name != 'pull_request'}} - flavor: | - suffix=-tomcat,onlatest=true - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Login to DockerHub - uses: docker/login-action@v3 - if: github.event_name != 'pull_request' - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Cache Docker layers - uses: actions/cache@v3 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-buildx- - - - name: Build and push default (distroless) image - id: docker_build - uses: docker/build-push-action@v5 - with: - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache - push: ${{ github.event_name != 'pull_request' }} - tags: ${{ steps.docker_meta.outputs.tags }} - labels: ${{ steps.docker_meta.outputs.labels }} - platforms: ${{ env.PLATFORMS }} - target: default - - - name: Build and push tomcat image - id: docker_build_tomcat - uses: docker/build-push-action@v5 - with: - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache - push: ${{ github.event_name != 'pull_request' }} - tags: ${{ steps.docker_tomcat_meta.outputs.tags }} - labels: ${{ steps.docker_tomcat_meta.outputs.labels }} - platforms: ${{ env.PLATFORMS }} - target: tomcat diff --git a/.github/workflows/chart-release.yaml b/.github/workflows/chart-release.yaml deleted file mode 100644 index 39c8f0a..0000000 --- a/.github/workflows/chart-release.yaml +++ /dev/null @@ -1,41 +0,0 @@ -name: Release Charts - -on: - push: - branches: - - main - paths: - - "charts/**" - -jobs: - release: - runs-on: ubuntu-22.04 - steps: - - name: Add workspace as safe directory - run: | - git config --global --add safe.directory /__w/FHIRFLARE-IG-Toolkit/FHIRFLARE-IG-Toolkit - - - name: Checkout - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 - with: - fetch-depth: 0 - - - name: Configure Git - run: | - git config user.name "$GITHUB_ACTOR" - git config user.email "$GITHUB_ACTOR@users.noreply.github.com" - - - name: Update dependencies - run: find charts/ ! -path charts/ -maxdepth 1 -type d -exec helm dependency update {} \; - - - name: Add Helm Repositories - run: | - helm repo add hapifhir https://hapifhir.github.io/hapi-fhir-jpaserver-starter/ - helm repo update - - - name: Run chart-releaser - uses: helm/chart-releaser-action@be16258da8010256c6e82849661221415f031968 # v1.5.0 - with: - config: .github/ct/config.yaml - env: - CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.github/workflows/chart-test.yaml b/.github/workflows/chart-test.yaml deleted file mode 100644 index ef83c32..0000000 --- a/.github/workflows/chart-test.yaml +++ /dev/null @@ -1,73 +0,0 @@ -name: Lint and Test Charts - -on: - pull_request: - branches: - - master - paths: - - "charts/**" - -jobs: - lint: - runs-on: ubuntu-22.04 - container: quay.io/helmpack/chart-testing:v3.11.0@sha256:f2fd21d30b64411105c7eafb1862783236a219d29f2292219a09fe94ca78ad2a - steps: - - name: Install helm-docs - working-directory: /tmp - env: - HELM_DOCS_URL: https://github.com/norwoodj/helm-docs/releases/download/v1.14.2/helm-docs_1.14.2_Linux_x86_64.tar.gz - run: | - curl -LSs $HELM_DOCS_URL | tar xz && \ - mv ./helm-docs /usr/local/bin/helm-docs && \ - chmod +x /usr/local/bin/helm-docs && \ - helm-docs --version - - - name: Add workspace as safe directory - run: | - git config --global --add safe.directory /__w/hapi-fhir-jpaserver-starter/hapi-fhir-jpaserver-starter - - - name: Checkout - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - - name: Check if documentation is up-to-date - run: helm-docs && git diff --exit-code HEAD - - - name: Run chart-testing (lint) - run: ct lint --config .github/ct/config.yaml - - test: - runs-on: ubuntu-22.04 - strategy: - matrix: - k8s-version: [1.30.8, 1.31.4, 1.32.0] - needs: - - lint - steps: - - name: Checkout - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 0 - - - name: Set up chart-testing - uses: helm/chart-testing-action@e6669bcd63d7cb57cb4380c33043eebe5d111992 # v2.6.1 - - - name: Run chart-testing (list-changed) - id: list-changed - run: | - changed=$(ct list-changed --config .github/ct/config.yaml) - if [[ -n "$changed" ]]; then - echo "::set-output name=changed::true" - fi - - - name: Create k8s Kind Cluster - uses: helm/kind-action@dda0770415bac9fc20092cacbc54aa298604d140 # v1.8.0 - if: ${{ steps.list-changed.outputs.changed == 'true' }} - with: - cluster_name: kind-cluster-k8s-${{ matrix.k8s-version }} - node_image: kindest/node:v${{ matrix.k8s-version }} - - - name: Run chart-testing (install) - run: ct install --config .github/ct/config.yaml - if: ${{ steps.list-changed.outputs.changed == 'true' }} diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml deleted file mode 100644 index e55d2d1..0000000 --- a/.github/workflows/docker-publish.yml +++ /dev/null @@ -1,58 +0,0 @@ -# This workflow builds and pushes a multi-architecture Docker image to GitHub Container Registry (ghcr.io). -# -# The Docker meta step is required because GitHub repository names can contain uppercase letters, but Docker image tags must be lowercase. -# The docker/metadata-action@v5 normalizes the repository name to lowercase, ensuring the build and push steps use a valid image tag. -# -# This workflow builds for both AMD64 and ARM64 architectures using Docker Buildx and QEMU emulation. - -name: Build and Push Docker image - -on: - push: - branches: - - main - - '*' # This will run the workflow on any branch - workflow_dispatch: # This enables manual triggering - -jobs: - build-and-push: - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Log in to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Docker meta - id: meta - uses: docker/metadata-action@v5 - with: - images: ghcr.io/${{ github.repository }} - - - name: Set normalized image name - run: | - if [[ "${{ github.ref_name }}" == "main" ]]; then - echo "IMAGE_NAME=$(echo ${{ steps.meta.outputs.tags }} | sed 's/:main/:latest/')" >> $GITHUB_ENV - else - echo "IMAGE_NAME=${{ steps.meta.outputs.tags }}" >> $GITHUB_ENV - fi - - - name: Build and push multi-architecture Docker image - uses: docker/build-push-action@v5 - with: - context: . - file: ./docker/Dockerfile - platforms: linux/amd64,linux/arm64 - push: true - tags: ${{ env.IMAGE_NAME }} \ No newline at end of file diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 5b62952..0000000 --- a/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -/instance/ -/logs/ -/.pydevproject -/__pycache__/ -/myenv/ -/tmp/ diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/.project b/.project deleted file mode 100644 index f6266a1..0000000 --- a/.project +++ /dev/null @@ -1,23 +0,0 @@ - - - FHIRFLARE-IG-Toolkit - - - - - - org.python.pydev.PyDevBuilder - - - - - org.eclipse.wst.validation.validationbuilder - - - - - - org.eclipse.wst.jsdt.core.jsNature - org.python.pydev.pythonNature - - diff --git a/.settings/.jsdtscope b/.settings/.jsdtscope deleted file mode 100644 index cca691f..0000000 --- a/.settings/.jsdtscope +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - diff --git a/.settings/org.eclipse.wst.jsdt.ui.superType.container b/.settings/org.eclipse.wst.jsdt.ui.superType.container deleted file mode 100644 index 49c8cd4..0000000 --- a/.settings/org.eclipse.wst.jsdt.ui.superType.container +++ /dev/null @@ -1 +0,0 @@ -org.eclipse.wst.jsdt.launching.JRE_CONTAINER \ No newline at end of file diff --git a/.settings/org.eclipse.wst.jsdt.ui.superType.name b/.settings/org.eclipse.wst.jsdt.ui.superType.name deleted file mode 100644 index 11006e2..0000000 --- a/.settings/org.eclipse.wst.jsdt.ui.superType.name +++ /dev/null @@ -1 +0,0 @@ -Global \ No newline at end of file diff --git a/Build and Run for first time.bat b/Build and Run for first time.bat deleted file mode 100644 index 9179731..0000000 --- a/Build and Run for first time.bat +++ /dev/null @@ -1,211 +0,0 @@ -@echo off -setlocal enabledelayedexpansion - -REM --- Configuration --- -set REPO_URL=https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git -set CLONE_DIR=hapi-fhir-jpaserver -set SOURCE_CONFIG_DIR=hapi-fhir-setup -set CONFIG_FILE=application.yaml - -REM --- Define Paths --- -set SOURCE_CONFIG_PATH=..\%SOURCE_CONFIG_DIR%\target\classes\%CONFIG_FILE% -set DEST_CONFIG_PATH=%CLONE_DIR%\target\classes\%CONFIG_FILE% - -REM === CORRECTED: Prompt for Version === -:GetModeChoice -SET "APP_MODE=" REM Clear the variable first -echo Select Installation Mode: -echo 1. Standalone (Includes local HAPI FHIR Server - Requires Git & Maven) -echo 2. Lite (Excludes local HAPI FHIR Server - No Git/Maven needed) -CHOICE /C 12 /N /M "Enter your choice (1 or 2):" - -IF ERRORLEVEL 2 ( - SET APP_MODE=lite - goto :ModeSet -) -IF ERRORLEVEL 1 ( - SET APP_MODE=standalone - goto :ModeSet -) -REM If somehow neither was chosen (e.g., Ctrl+C), loop back -echo Invalid input. Please try again. -goto :GetModeChoice - -:ModeSet -IF "%APP_MODE%"=="" ( - echo Invalid choice detected after checks. Exiting. - goto :eof -) -echo Selected Mode: %APP_MODE% -echo. -REM === END CORRECTION === - - -REM === Conditionally Execute HAPI Setup === -IF "%APP_MODE%"=="standalone" ( - echo Running Standalone setup including HAPI FHIR... - echo. - - REM --- Step 0: Clean up previous clone (optional) --- - echo Checking for existing directory: %CLONE_DIR% - if exist "%CLONE_DIR%" ( - echo Found existing directory, removing it... - rmdir /s /q "%CLONE_DIR%" - if errorlevel 1 ( - echo ERROR: Failed to remove existing directory: %CLONE_DIR% - goto :error - ) - echo Existing directory removed. - ) else ( - echo Directory does not exist, proceeding with clone. - ) - echo. - - REM --- Step 1: Clone the HAPI FHIR server repository --- - echo Cloning repository: %REPO_URL% into %CLONE_DIR%... - git clone "%REPO_URL%" "%CLONE_DIR%" - if errorlevel 1 ( - echo ERROR: Failed to clone repository. Check Git installation and network connection. - goto :error - ) - echo Repository cloned successfully. - echo. - - REM --- Step 2: Navigate into the cloned directory --- - echo Changing directory to %CLONE_DIR%... - cd "%CLONE_DIR%" - if errorlevel 1 ( - echo ERROR: Failed to change directory to %CLONE_DIR%. - goto :error - ) - echo Current directory: %CD% - echo. - - REM --- Step 3: Build the HAPI server using Maven --- - echo ===> "Starting Maven build (Step 3)..."" - cmd /c "mvn clean package -DskipTests=true -Pboot" - echo ===> Maven command finished. Checking error level... - if errorlevel 1 ( - echo ERROR: Maven build failed or cmd /c failed - cd .. - goto :error - ) - echo Maven build completed successfully. ErrorLevel: %errorlevel% - echo. - - REM --- Step 4: Copy the configuration file --- - echo ===> "Starting file copy (Step 4)..." - echo Copying configuration file... - echo Source: %SOURCE_CONFIG_PATH% - echo Destination: target\classes\%CONFIG_FILE% - xcopy "%SOURCE_CONFIG_PATH%" "target\classes\" /Y /I - echo ===> xcopy command finished. Checking error level... - if errorlevel 1 ( - echo WARNING: Failed to copy configuration file. Check if the source file exists. - echo The script will continue, but the server might use default configuration. - ) else ( - echo Configuration file copied successfully. ErrorLevel: %errorlevel% - ) - echo. - - REM --- Step 5: Navigate back to the parent directory --- - echo ===> "Changing directory back (Step 5)..." - cd .. - if errorlevel 1 ( - echo ERROR: Failed to change back to the parent directory. ErrorLevel: %errorlevel% - goto :error - ) - echo Current directory: %CD% - echo. - -) ELSE ( - echo Running Lite setup, skipping HAPI FHIR build... - REM Ensure the hapi-fhir-jpaserver directory doesn't exist or is empty if Lite mode is chosen after a standalone attempt - if exist "%CLONE_DIR%" ( - echo Found existing HAPI directory in Lite mode. Removing it to avoid build issues... - rmdir /s /q "%CLONE_DIR%" - ) - REM Create empty target directories expected by Dockerfile COPY, even if not used - mkdir "%CLONE_DIR%\target\classes" 2> nul - mkdir "%CLONE_DIR%\custom" 2> nul - REM Create a placeholder empty WAR file to satisfy Dockerfile COPY - echo. > "%CLONE_DIR%\target\ROOT.war" - echo. > "%CLONE_DIR%\target\classes\application.yaml" - echo Placeholder files created for Lite mode build. - echo. -) - -REM === Modify docker-compose.yml to set APP_MODE === -echo Updating docker-compose.yml with APP_MODE=%APP_MODE%... -( - echo version: '3.8' - echo services: - echo fhirflare: - echo build: - echo context: . - echo dockerfile: Dockerfile - echo ports: - echo - "5000:5000" - echo - "8080:8080" # Keep port exposed, even if Tomcat isn't running useful stuff in Lite - echo volumes: - echo - ./instance:/app/instance - echo - ./static/uploads:/app/static/uploads - echo - ./instance/hapi-h2-data/:/app/h2-data # Keep volume mounts consistent - echo - ./logs:/app/logs - echo environment: - echo - FLASK_APP=app.py - echo - FLASK_ENV=development - echo - NODE_PATH=/usr/lib/node_modules - echo - APP_MODE=%APP_MODE% - echo - APP_BASE_URL=http://localhost:5000 - echo - HAPI_FHIR_URL=http://localhost:8080/fhir - echo command: supervisord -c /etc/supervisord.conf -) > docker-compose.yml.tmp - -REM Check if docker-compose.yml.tmp was created successfully -if not exist docker-compose.yml.tmp ( - echo ERROR: Failed to create temporary docker-compose file. - goto :error -) - -REM Replace the original docker-compose.yml -del docker-compose.yml /Q > nul 2>&1 -ren docker-compose.yml.tmp docker-compose.yml -echo docker-compose.yml updated successfully. -echo. - -REM --- Step 6: Build Docker images --- -echo ===> Starting Docker build (Step 6)... -docker-compose build --no-cache -if errorlevel 1 ( - echo ERROR: Docker Compose build failed. Check Docker installation and docker-compose.yml file. ErrorLevel: %errorlevel% - goto :error -) -echo Docker images built successfully. ErrorLevel: %errorlevel% -echo. - -REM --- Step 7: Start Docker containers --- -echo ===> Starting Docker containers (Step 7)... -docker-compose up -d -if errorlevel 1 ( - echo ERROR: Docker Compose up failed. Check Docker installation and container configurations. ErrorLevel: %errorlevel% - goto :error -) -echo Docker containers started successfully. ErrorLevel: %errorlevel% -echo. - -echo ==================================== -echo Script finished successfully! (Mode: %APP_MODE%) -echo ==================================== -goto :eof - -:error -echo ------------------------------------ -echo An error occurred. Script aborted. -echo ------------------------------------ -pause -exit /b 1 - -:eof -echo Script execution finished. -pause \ No newline at end of file diff --git a/DockerCommands.MD b/DockerCommands.MD deleted file mode 100644 index 1dba080..0000000 --- a/DockerCommands.MD +++ /dev/null @@ -1,26 +0,0 @@ -Docker Commands.MD - - - -to pull and clone: -git clone https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git hapi-fhir-jpaserver - -to build: -mvn clean package -DskipTests=true -Pboot - -to run: -java -jar target/ROOT.war - - - - -docker-compose build --no-cache -docker-compose up -d - - - - - -cp :/app/PATH/Filename.ext . - . copies to the root folder you ran it from - -docker exec -it bash - to get a bash - session in the container - \ No newline at end of file diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index f75083b..0000000 --- a/Dockerfile +++ /dev/null @@ -1,57 +0,0 @@ -# Base image with Python and Java -FROM tomcat:10.1-jdk17 - -# Install build dependencies, Node.js 18, and coreutils (for stdbuf) -RUN apt-get update && apt-get install -y --no-install-recommends \ - python3 python3-pip python3-venv curl coreutils \ - && curl -fsSL https://deb.nodesource.com/setup_18.x | bash - \ - && apt-get install -y --no-install-recommends nodejs \ - && rm -rf /var/lib/apt/lists/* - -# Install specific versions of GoFSH and SUSHI -# REMOVED pip install fhirpath from this line -RUN npm install -g gofsh fsh-sushi - -# Set up Python environment -WORKDIR /app -RUN python3 -m venv /app/venv -ENV PATH="/app/venv/bin:$PATH" - -# ADDED: Uninstall old fhirpath just in case it's in requirements.txt -RUN pip uninstall -y fhirpath || true -# ADDED: Install the new fhirpathpy library -RUN pip install --no-cache-dir fhirpathpy - -# Copy Flask files -COPY requirements.txt . -# Install requirements (including Pydantic - check version compatibility if needed) -RUN pip install --no-cache-dir -r requirements.txt -COPY app.py . -COPY services.py . -COPY forms.py . -COPY package.py . -COPY templates/ templates/ -COPY static/ static/ -COPY tests/ tests/ - -# Ensure /tmp, /app/h2-data, /app/static/uploads, and /app/logs are writable -RUN mkdir -p /tmp /app/h2-data /app/static/uploads /app/logs && chmod 777 /tmp /app/h2-data /app/static/uploads /app/logs - -# Copy pre-built HAPI WAR and configuration -COPY hapi-fhir-jpaserver/target/ROOT.war /usr/local/tomcat/webapps/ -COPY hapi-fhir-jpaserver/target/classes/application.yaml /usr/local/tomcat/conf/ -COPY hapi-fhir-jpaserver/target/classes/application.yaml /app/config/application.yaml -COPY hapi-fhir-jpaserver/target/classes/application.yaml /usr/local/tomcat/webapps/app/config/application.yaml -COPY hapi-fhir-jpaserver/custom/ /usr/local/tomcat/webapps/custom/ - -# Install supervisord -RUN pip install supervisor - -# Configure supervisord -COPY supervisord.conf /etc/supervisord.conf - -# Expose ports -EXPOSE 5000 8080 - -# Start supervisord -CMD ["supervisord", "-c", "/etc/supervisord.conf"] \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md deleted file mode 100644 index 67e06e2..0000000 --- a/LICENSE.md +++ /dev/null @@ -1,3 +0,0 @@ -# License - -This project, FHIRFLARE-IG-Toolkit, is licensed under the Apache License, Version 2.0. diff --git a/README.html b/README.html new file mode 100644 index 0000000..e5d1784 --- /dev/null +++ b/README.html @@ -0,0 +1,855 @@ + + + + + + + +FHIRFLARE IG Toolkit | Helm chart for deploying the fhirflare-ig-toolkit application + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+

FHIRFLARE IG Toolkit

+ +

Helm chart for deploying the fhirflare-ig-toolkit application

+ +

View the Project on GitHub

+ +
+
+ +

FHIRFLARE IG Toolkit

+ +

FHIRFLARE Logo

+ +

Overview

+ +

The FHIRFLARE IG Toolkit is a Flask-based web application designed to streamline the management, processing, validation, and deployment of FHIR Implementation Guides (IGs) and test data. It offers a user-friendly interface for importing IG packages, extracting metadata, validating FHIR resources or bundles, pushing IGs to FHIR servers, converting FHIR resources to FHIR Shorthand (FSH), uploading complex test data sets with dependency management, and retrieving/splitting FHIR bundles. The toolkit includes live consoles for real-time feedback, making it an essential tool for FHIR developers and implementers.

+ +

The application can run in two modes:

+ +
    +
  • Standalone: Includes a Dockerized Flask frontend, SQLite database, and an embedded HAPI FHIR server for local validation and interaction.
  • +
  • Lite: Includes only the Dockerized Flask frontend and SQLite database, excluding the local HAPI FHIR server. Requires connection to external FHIR servers for certain features.
  • +
+ +

Installation Modes (Lite vs. Standalone)

+ +

This toolkit offers two primary installation modes to suit different needs:

+ +
    +
  • Standalone Version: +
      +
    • Includes the full FHIRFLARE Toolkit application and an embedded HAPI FHIR server running locally within the Docker environment.
    • +
    • Allows for local FHIR resource validation using HAPI FHIR’s capabilities.
    • +
    • Enables the “Use Local HAPI” option in the FHIR API Explorer and FHIR UI Operations pages, proxying requests to the internal HAPI server (http://localhost:8080/fhir).
    • +
    • Requires Git and Maven during the initial build process (via the .bat script or manual steps) to prepare the HAPI FHIR server.
    • +
    • Ideal for users who want a self-contained environment for development and testing or who don’t have readily available external FHIR servers.
    • +
    +
  • +
  • Lite Version: +
      +
    • Includes the FHIRFLARE Toolkit application without the embedded HAPI FHIR server.
    • +
    • Requires users to provide URLs for external FHIR servers when using features like the FHIR API Explorer and FHIR UI Operations pages. The “Use Local HAPI” option will be disabled in the UI.
    • +
    • Resource validation relies solely on local checks against downloaded StructureDefinitions, which may be less comprehensive than HAPI FHIR’s validation (e.g., for terminology bindings or complex invariants).
    • +
    • Does not require Git or Maven for setup if using the .bat script or running the pre-built Docker image.
    • +
    • Ideal for users who primarily want to use the IG management, processing, and FSH conversion features, or who will always connect to existing external FHIR servers.
    • +
    +
  • +
+ +

Features

+ +
    +
  • Import IGs: Download FHIR IG packages and dependencies from a package registry, supporting flexible version formats (e.g., 1.2.3, 1.1.0-preview, current) and dependency pulling modes (Recursive, Patch Canonical, Tree Shaking).
  • +
  • Enhanced Package Search and Import: +
      +
    • Interactive page (/search-and-import) to search for FHIR IG packages from configured registries.
    • +
    • Displays package details, version history, dependencies, and dependents.
    • +
    • Utilizes a local database cache (CachedPackage) for faster subsequent searches.
    • +
    • Background task to refresh the package cache from registries (/api/refresh-cache-task).
    • +
    • Direct import from search results.
    • +
    +
  • +
  • Manage IGs: View, process, unload, or delete downloaded IGs, with duplicate detection and resolution.
  • +
  • Process IGs: Extract resource types, profiles, must-support elements, examples, and profile relationships (structuredefinition-compliesWithProfile and structuredefinition-imposeProfile).
  • +
  • Validate FHIR Resources/Bundles: Validate single FHIR resources or bundles against selected IGs, with detailed error and warning reports (alpha feature). Note: Lite version uses local SD checks only.
  • +
  • Push IGs: Upload IG resources (and optionally dependencies) to a target FHIR server. Features include: +
      +
    • Real-time console output.
    • +
    • Authentication support (Bearer Token).
    • +
    • Filtering by resource type or specific files to skip.
    • +
    • Semantic comparison to skip uploading identical resources (override with Force Upload option).
    • +
    • Correct handling of canonical resources (searching by URL/version before deciding POST/PUT).
    • +
    • Dry run mode for simulation.
    • +
    • Verbose logging option.
    • +
    +
  • +
  • Upload Test Data: Upload complex sets of test data (individual JSON/XML files or ZIP archives) to a target FHIR server. Features include: +
      +
    • Robust parsing of JSON and XML (using fhir.resources library when available).
    • +
    • Automatic dependency analysis based on resource references within the uploaded set.
    • +
    • Topological sorting to ensure resources are uploaded in the correct order.
    • +
    • Cycle detection in dependencies.
    • +
    • Choice of individual resource uploads or a single transaction bundle.
    • +
    • Optional Pre-Upload Validation: Validate resources against a selected profile package before uploading.
    • +
    • Optional Conditional Uploads (Individual Mode): Check resource existence (GET) and use conditional If-Match headers for updates (PUT) or create resources (PUT/POST). Falls back to simple PUT if unchecked.
    • +
    • Configurable error handling (stop on first error or continue).
    • +
    • Authentication support (Bearer Token).
    • +
    • Streaming progress log via the UI.
    • +
    • Handles large numbers of files using a custom form parser.
    • +
    +
  • +
  • Profile Relationships: Display and validate compliesWithProfile and imposeProfile extensions in the UI (configurable).
  • +
  • FSH Converter: Convert FHIR JSON/XML resources to FHIR Shorthand (FSH) using GoFSH, with advanced options (Package context, Output styles, Log levels, FHIR versions, Fishing Trip, Dependencies, Indentation, Meta Profile handling, Alias File, No Alias). Includes a waiting spinner.
  • +
  • Retrieve and Split Bundles: +
      +
    • Retrieve specified resource types as bundles from a FHIR server.
    • +
    • Optionally fetch referenced resources, either individually or as full bundles for each referenced type.
    • +
    • Split uploaded ZIP files containing bundles into individual resource JSON files.
    • +
    • Download retrieved/split resources as a ZIP archive.
    • +
    • Streaming progress log via the UI for retrieval operations.
    • +
    +
  • +
  • FHIR Interaction UIs: Explore FHIR server capabilities and interact with resources using the “FHIR API Explorer” (simple GET/POST/PUT/DELETE) and “FHIR UI Operations” (Swagger-like interface based on CapabilityStatement). Note: Lite version requires custom server URLs.
  • +
  • HAPI FHIR Configuration (Standalone Mode): +
      +
    • A dedicated page (/config-hapi) to view and edit the application.yaml configuration for the embedded HAPI FHIR server.
    • +
    • Allows modification of HAPI FHIR properties directly from the UI.
    • +
    • Option to restart the HAPI FHIR server (Tomcat) to apply changes.
    • +
    +
  • +
  • API Support: RESTful API endpoints for importing, pushing, retrieving metadata, validating, uploading test data, and retrieving/splitting bundles.
  • +
  • Live Console: Real-time logs for push, validation, upload test data, FSH conversion, and bundle retrieval operations.
  • +
  • Configurable Behavior: Control validation modes, display options via app.config.
  • +
  • Theming: Supports light and dark modes.
  • +
+ +

Technology Stack

+ +
    +
  • Python 3.12+, Flask 2.3.3, Flask-SQLAlchemy 3.0.5, Flask-WTF 1.2.1
  • +
  • Jinja2, Bootstrap 5.3.3, JavaScript (ES6), Lottie-Web 5.12.2
  • +
  • SQLite
  • +
  • Docker, Docker Compose, Supervisor
  • +
  • Node.js 18+ (for GoFSH/SUSHI), GoFSH, SUSHI
  • +
  • HAPI FHIR (Standalone version only)
  • +
  • Requests 2.31.0, Tarfile, Logging, Werkzeug
  • +
  • fhir.resources (optional, for robust XML parsing)
  • +
+ +

Prerequisites

+ +
    +
  • Docker: Required for containerized deployment (both versions).
  • +
  • Git & Maven: Required only for building the Standalone version from source using the .bat script or manual steps. Not required for the Lite version build or for running pre-built Docker Hub images.
  • +
  • Windows: Required if using the .bat scripts.
  • +
+ +

Setup Instructions

+ +

Running Pre-built Images (General Users)

+ +

This is the easiest way to get started without needing Git or Maven. Choose the version you need:

+ +

Lite Version (No local HAPI FHIR):

+ +
# Pull the latest Lite image
+docker pull ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest
+
+# Run the Lite version (maps port 5000 for the UI)
+# You'll need to create local directories for persistent data first:
+# mkdir instance logs static static/uploads instance/hapi-h2-data
+docker run -d \
+  -p 5000:5000 \
+  -v ./instance:/app/instance \
+  -v ./static/uploads:/app/static/uploads \
+  -v ./instance/hapi-h2-data:/app/h2-data \
+  -v ./logs:/app/logs \
+  --name fhirflare-lite \
+  ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest
+Standalone Version (Includes local HAPI FHIR):
+
+

Bash

+ +
# Pull the latest Standalone image
+docker pull ghcr.io/sudo-jhare/fhirflare-ig-toolkit-standalone:latest
+
+# Run the Standalone version (maps ports 5000 and 8080)
+# You'll need to create local directories for persistent data first:
+# mkdir instance logs static static/uploads instance/hapi-h2-data
+docker run -d \
+  -p 5000:5000 \
+  -p 8080:8080 \
+  -v ./instance:/app/instance \
+  -v ./static/uploads:/app/static/uploads \
+  -v ./instance/hapi-h2-data:/app/h2-data \
+  -v ./logs:/app/logs \
+  --name fhirflare-standalone \
+  ghcr.io/sudo-jhare/fhirflare-ig-toolkit-standalone:latest
+
+ +

Building from Source (Developers) +Using Windows .bat Scripts (Standalone Version Only):

+ +

First Time Setup:

+ +

Run Build and Run for first time.bat:

+ +

Code snippet

+ +
cd "<project folder>"
+git clone [https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git](https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git) hapi-fhir-jpaserver
+copy .\\hapi-fhir-Setup\\target\\classes\\application.yaml .\\hapi-fhir-jpaserver\\target\\classes\\application.yaml
+mvn clean package -DskipTests=true -Pboot
+docker-compose build --no-cache
+docker-compose up -d
+
+ +

This clones the HAPI FHIR server, copies configuration, builds the project, and starts the containers.

+ +

Subsequent Runs:

+ +

Run Run.bat:

+ +

Code snippet

+
cd "<project folder>"
+docker-compose up -d
+
+

This starts the Flask app (port 5000) and HAPI FHIR server (port 8080).

+ +

Access the Application:

+ +
    +
  • Flask UI: http://localhost:5000
  • +
  • HAPI FHIR server: http://localhost:8080
  • +
  • Manual Setup (Linux/MacOS/Windows):
  • +
+ +

Preparation (Standalone Version Only):

+ +
cd <project folder>
+git clone [https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git](https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git) hapi-fhir-jpaserver
+cp ./hapi-fhir-Setup/target/classes/application.yaml ./hapi-fhir-jpaserver/target/classes/application.yaml
+
+ +

Build:

+ +
# Build HAPI FHIR (Standalone Version Only)
+mvn clean package -DskipTests=true -Pboot
+
+# Build Docker Image (Specify APP_MODE=lite in docker-compose.yml for Lite version)
+docker-compose build --no-cache
+
+ +

Run:

+ +
docker-compose up -d
+Access the Application:
+
+ +
    +
  • Flask UI: http://localhost:5000
  • +
  • HAPI FHIR server (Standalone only): http://localhost:8080
  • +
  • Local Development (Without Docker):
  • +
+ +

Clone the Repository:

+ +
git clone [https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit.git](https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit.git)
+cd FHIRFLARE-IG-Toolkit
+
+ +

Install Dependencies:

+ +
python -m venv venv
+source venv/bin/activate  # On Windows: venv\Scripts\activate
+pip install -r requirements.txt
+
+ +

Install Node.js, GoFSH, and SUSHI (for FSH Converter):

+ +
# Example for Debian/Ubuntu
+curl -fsSL [https://deb.nodesource.com/setup_18.x](https://deb.nodesource.com/setup_18.x) | sudo bash -
+sudo apt-get install -y nodejs
+# Install globally
+npm install -g gofsh fsh-sushi
+Set Environment Variables:
+
+ +
export FLASK_SECRET_KEY='your-secure-secret-key'
+export API_KEY='your-api-key'
+# Optional: Set APP_MODE to 'lite' if desired
+# export APP_MODE='lite'
+
+ +

Initialize Directories:

+ +
mkdir -p instance static/uploads logs
+# Ensure write permissions if needed
+# chmod -R 777 instance static/uploads logs
+
+ +

Run the Application:

+ +
export FLASK_APP=app.py
+flask run
+
+

Access at http://localhost:5000.

+ +

Usage +Import an IG

+

Search, View Details, and Import Packages

+

Navigate to Search and Import Packages (/search-and-import).

+
    +
  1. The page will load a list of available FHIR Implementation Guide packages from a local cache or by fetching from configured registries. +
      +
    • A loading animation and progress messages are shown if fetching from registries.
    • +
    • The timestamp of the last cache update is displayed.
    • +
    +
  2. +
  3. Use the search bar to filter packages by name or author.
  4. +
  5. Packages are paginated for easier Browse.
  6. +
  7. For each package, you can: +
      +
    • View its latest official and absolute versions.
    • +
    • Click on the package name to navigate to a detailed view (/package-details/<name>) showing: +
        +
      • Comprehensive metadata (author, FHIR version, canonical URL, description).
      • +
      • A full list of available versions with publication dates.
      • +
      • Declared dependencies.
      • +
      • Other packages that depend on it (dependents).
      • +
      • Version history (logs).
      • +
      +
    • +
    • Directly import a specific version using the “Import” button on the search page or the details page.
    • +
    +
  8. +
  9. Cache Management: +
      +
    • A “Clear & Refresh Cache” button is available to trigger a background task (/api/refresh-cache-task) that clears the local database and in-memory cache and fetches the latest package information from all configured registries. Progress is shown via a live log.
    • +
    +
  10. +
+ +
    +
  • Enter a package name (e.g., hl7.fhir.au.core) and version (e.g., 1.1.0-preview).
  • +
  • Choose a dependency mode:
  • +
  • Current Recursive: Import all dependencies listed in package.json recursively.
  • +
  • Patch Canonical Versions: Import only canonical FHIR packages (e.g., hl7.fhir.r4.core).
  • +
  • Tree Shaking: Import only dependencies containing resources actually used by the main package.
  • +
  • Click Import to download the package and dependencies.
  • +
+ +

Manage IGs +Go to Manage FHIR Packages (/view-igs) to view downloaded and processed IGs.

+ +

Actions:

+
    +
  • Process: Extract metadata (resource types, profiles, must-support elements, examples).
  • +
  • Unload: Remove processed IG data from the database.
  • +
  • Delete: Remove package files from the filesystem.
  • +
+ +

Duplicates are highlighted for resolution.

+ +

View Processed IGs

+ +

After processing, view IG details (/view-ig/), including:

+ +
    +
  • Resource types and profiles.
  • +
  • Must-support elements and examples.
  • +
  • Profile relationships (compliesWithProfile, imposeProfile) if enabled (DISPLAY_PROFILE_RELATIONSHIPS).
  • +
+ +

Interactive StructureDefinition viewer (Differential, Snapshot, Must Support, Key Elements, Constraints, Terminology, Search Params).

+
    +
  • Validate FHIR Resources/Bundles
  • +
  • Navigate to Validate FHIR Sample (/validate-sample).
  • +
+ +

Select a package (e.g., hl7.fhir.au.core#1.1.0-preview).

+ +
    +
  • Choose Single Resource or Bundle mode.
  • +
  • Paste or upload FHIR JSON/XML (e.g., a Patient resource).
  • +
  • Submit to view validation errors/warnings. Note: Alpha feature; report issues to GitHub (remove PHI).
  • +
  • Push IGs to a FHIR Server
  • +
  • Go to Push IGs (/push-igs).
  • +
+ +

Select a downloaded package.

+ +
    +
  • Enter the Target FHIR Server URL.
  • +
  • Configure Authentication (None, Bearer Token).
  • +
  • Choose options: Include Dependencies, Force Upload (skips comparison check), Dry Run, Verbose Log.
  • +
  • Optionally filter by Resource Types (comma-separated) or Skip Specific Files (paths within package, comma/newline separated).
  • +
  • Click Push to FHIR Server to upload resources. Canonical resources are checked before upload. Identical resources are skipped unless Force Upload is checked.
  • +
  • Monitor progress in the live console.
  • +
  • Upload Test Data
  • +
  • Navigate to Upload Test Data (/upload-test-data).
  • +
  • Enter the Target FHIR Server URL.
  • +
  • Configure Authentication (None, Bearer Token).
  • +
  • Select one or more .json, .xml files, or a single .zip file containing test resources.
  • +
  • Optionally check Validate Resources Before Upload? and select a Validation Profile Package.
  • +
  • Choose Upload Mode:
  • +
  • Individual Resources: Uploads each resource one by one in dependency order.
  • +
  • Transaction Bundle: Uploads all resources in a single transaction.
  • +
  • Optionally check Use Conditional Upload (Individual Mode Only)? to use If-Match headers for updates.
  • +
  • Choose Error Handling:
  • +
  • Stop on First Error: Halts the process if any validation or upload fails.
  • +
  • Continue on Error: Reports errors but attempts to process/upload remaining resources.
  • +
  • Click Upload and Process. The tool parses files, optionally validates, analyzes dependencies, topologically sorts resources, and uploads them according to selected options.
  • +
  • Monitor progress in the streaming log output.
  • +
+ +

Convert FHIR to FSH

+
    +
  • Navigate to FSH Converter (/fsh-converter).
  • +
+ +

Optionally select a package for context (e.g., hl7.fhir.au.core#1.1.0-preview). +Choose input mode: +Upload File: Upload a FHIR JSON/XML file. +Paste Text: Paste FHIR JSON/XML content. +Configure options: +Output Style: file-per-definition, group-by-fsh-type, group-by-profile, single-file. +Log Level: error, warn, info, debug. +FHIR Version: R4, R4B, R5, or auto-detect. +Fishing Trip: Enable round-trip validation with SUSHI, generating a comparison report. +Dependencies: Specify additional packages (e.g., hl7.fhir.us.core@6.1.0, one per line). +Indent Rules: Enable context path indentation for readable FSH. +Meta Profile: Choose only-one, first, or none for meta.profile handling. +Alias File: Upload an FSH file with aliases (e.g., $MyAlias = http://example.org). +No Alias: Disable automatic alias generation. +Click Convert to FSH to generate and display FSH output, with a waiting spinner (light/dark theme) during processing. +If Fishing Trip is enabled, view the comparison report via the “Click here for SUSHI Validation” badge button. +Download the result as a .fsh file. +Retrieve and Split Bundles +Navigate to Retrieve/Split Data (/retrieve-split-data).

+ +

Retrieve Bundles from Server:

+ +
    +
  • Enter the FHIR Server URL (defaults to the proxy if empty).
  • +
  • Select one or more Resource Types to retrieve (e.g., Patient, Observation).
  • +
  • Optionally check Fetch Referenced Resources.
  • +
  • If checked, further optionally check Fetch Full Reference Bundles to retrieve entire bundles for each referenced type (e.g., all Patients if a Patient is referenced) instead of individual resources by ID.
  • +
  • Click Retrieve Bundles.
  • +
  • Monitor progress in the streaming log. A ZIP file containing the retrieved bundles/resources will be prepared for download.
  • +
+ +

Split Uploaded Bundles:

+ +
    +
  • Upload a ZIP file containing FHIR bundles (JSON format).
  • +
  • Click Split Bundles.
  • +
  • A ZIP file containing individual resources extracted from the bundles will be prepared for download.
  • +
  • Explore FHIR Operations
  • +
  • Navigate to FHIR UI Operations (/fhir-ui-operations).
  • +
+ +

Toggle between local HAPI (/fhir) or a custom FHIR server.

+ +
    +
  • Click Fetch Metadata to load the server’s CapabilityStatement.
  • +
  • Select a resource type (e.g., Patient, Observation) or System to view operations:
  • +
  • System operations: GET /metadata, POST /, GET /_history, GET/POST /$diff, POST /$reindex, POST /$expunge, etc.
  • +
  • Resource operations: GET Patient/:id, POST Observation/_search, etc.
  • +
  • Use Try it out to input parameters or request bodies, then Execute to view results in JSON, XML, or narrative formats.
  • +
+ +

Configure Embedded HAPI FHIR Server (Standalone Mode)

+

For users running the Standalone version, which includes an embedded HAPI FHIR server.

+
    +
  1. Navigate to Configure HAPI FHIR (/config-hapi).
  2. +
  3. The page displays the content of the HAPI FHIR server’s application.yaml file.
  4. +
  5. You can edit the configuration directly in the text area. +
      +
    • Caution: Incorrect modifications can break the HAPI FHIR server.
    • +
    +
  6. +
  7. Click Save Configuration to apply your changes to the application.yaml file.
  8. +
  9. Click Restart Tomcat to restart the HAPI FHIR server and load the new configuration. The restart process may take a few moments.
  10. +
+ +

API Usage +Import IG +Bash

+ +

curl -X POST http://localhost:5000/api/import-ig
+-H “Content-Type: application/json”
+-H “X-API-Key: your-api-key”
+-d ‘{“package_name”: “hl7.fhir.au.core”, “version”: “1.1.0-preview”, “dependency_mode”: “recursive”}’ +Returns complies_with_profiles, imposed_profiles, and duplicate_packages_present info.

+ +

Refresh Package Cache (Background Task)

+
curl -X POST http://localhost:5000/api/refresh-cache-task \
+-H "X-API-Key: your-api-key"
+
+ +

Push IG +Bash

+ +
curl -X POST http://localhost:5000/api/push-ig \
+-H "Content-Type: application/json" \
+-H "Accept: application/x-ndjson" \
+-H "X-API-Key: your-api-key" \
+-d '{
+      "package_name": "hl7.fhir.au.core",
+      "version": "1.1.0-preview",
+      "fhir_server_url": "http://localhost:8080/fhir",
+      "include_dependencies": true,
+      "force_upload": false,
+      "dry_run": false,
+      "verbose": false,
+      "auth_type": "none"
+    }'
+
+ +

Returns a streaming NDJSON response with progress and final summary.

+ +

Upload Test Data +Bash

+ +
curl -X POST http://localhost:5000/api/upload-test-data \
+-H "X-API-Key: your-api-key" \
+-H "Accept: application/x-ndjson" \
+-F "fhir_server_url=http://your-fhir-server/fhir" \
+-F "auth_type=bearerToken" \
+-F "auth_token=YOUR_TOKEN" \
+-F "upload_mode=individual" \
+-F "error_handling=continue" \
+-F "validate_before_upload=true" \
+-F "validation_package_id=hl7.fhir.r4.core#4.0.1" \
+-F "use_conditional_uploads=true" \
+-F "test_data_files=@/path/to/your/patient.json" \
+-F "test_data_files=@/path/to/your/observations.zip"
+
+ +

Returns a streaming NDJSON response with progress and final summary. Uses multipart/form-data for file uploads.

+ +

Retrieve Bundles +Bash

+ +
curl -X POST http://localhost:5000/api/retrieve-bundles \
+-H "X-API-Key: your-api-key" \
+-H "Accept: application/x-ndjson" \
+-F "fhir_server_url=http://your-fhir-server/fhir" \
+-F "resources=Patient" \
+-F "resources=Observation" \
+-F "validate_references=true" \
+-F "fetch_reference_bundles=false"
+
+ +

Returns a streaming NDJSON response with progress. The X-Zip-Path header in the final response part will contain the path to download the ZIP archive (e.g., /tmp/retrieved_bundles_datetime.zip).

+ +

Split Bundles +Bash

+ +
curl -X POST http://localhost:5000/api/split-bundles \
+-H "X-API-Key: your-api-key" \
+-H "Accept: application/x-ndjson" \
+-F "split_bundle_zip_path=@/path/to/your/bundles.zip"
+
+ +

Returns a streaming NDJSON response. The X-Zip-Path header in the final response part will contain the path to download the ZIP archive of split resources.

+ +

Validate Resource/Bundle +Not yet exposed via API; use the UI at /validate-sample.

+ +

Configuration Options +Located in app.py:

+ +
    +
  • VALIDATE_IMPOSED_PROFILES: (Default: True) Validates resources against imposed profiles during push.
  • +
  • DISPLAY_PROFILE_RELATIONSHIPS: (Default: True) Shows compliesWithProfile and imposeProfile in the UI.
  • +
  • FHIR_PACKAGES_DIR: (Default: /app/instance/fhir_packages) Stores .tgz packages and metadata.
  • +
  • UPLOAD_FOLDER: (Default: /app/static/uploads) Stores GoFSH output files and FSH comparison reports.
  • +
  • SECRET_KEY: Required for CSRF protection and sessions. Set via environment variable or directly.
  • +
  • API_KEY: Required for API authentication. Set via environment variable or directly.
  • +
  • MAX_CONTENT_LENGTH: (Default: Flask default) Max size for HTTP request body (e.g., 16 * 1024 * 1024 for 16MB). Important for large uploads.
  • +
  • MAX_FORM_PARTS: (Default: Werkzeug default, often 1000) Default max number of form parts. Overridden for /api/upload-test-data by CustomFormDataParser.
  • +
+ +

Get HAPI FHIR Configuration (Standalone Mode)

+
curl -X GET http://localhost:5000/api/config \
+-H "X-API-Key: your-api-key"
+
+

Save HAPI FHIR Configuration:

+
curl -X POST http://localhost:5000/api/config \
+-H "Content-Type: application/json" \
+-H "X-API-Key: your-api-key" \
+-d '{"your_yaml_key": "your_value", ...}' # Send the full YAML content as JSON
+
+

Restart HAPI FHIR Server:

+ +
curl -X POST http://localhost:5000/api/restart-tomcat \
+-H "X-API-Key: your-api-key"
+
+ +

Testing +The project includes a test suite covering UI, API, database, file operations, and security.

+ +

Test Prerequisites:

+ +

pytest: For running tests. +pytest-mock: For mocking dependencies. Install: pip install pytest pytest-mock +Running Tests:

+ +

Bash

+ +
cd <project folder>
+pytest tests/test_app.py -v
+
+

Test Coverage:

+ +
    +
  • UI Pages: Homepage, Import IG, Manage IGs, Push IGs, Validate Sample, View Processed IG, FSH Converter, Upload Test Data, Retrieve/Split Data.
  • +
  • API Endpoints: POST /api/import-ig, POST /api/push-ig, GET /get-structure, GET /get-example, POST /api/upload-test-data, POST /api/retrieve-bundles, POST /api/split-bundles.
  • +
  • Database: IG processing, unloading, viewing.
  • +
  • File Operations: Package processing, deletion, FSH output, ZIP handling.
  • +
  • Security: CSRF protection, flash messages, secret key.
  • +
  • FSH Converter: Form submission, file/text input, GoFSH execution, Fishing Trip comparison.
  • +
  • Upload Test Data: Parsing, dependency graph, sorting, upload modes, validation, conditional uploads.
  • +
+ +

Development Notes

+ +

Background

+ +

The toolkit addresses the need for a comprehensive FHIR IG management tool, with recent enhancements for resource validation, FSH conversion with advanced GoFSH features, flexible versioning, improved IG pushing, dependency-aware test data uploading, and bundle retrieval/splitting, making it a versatile platform for FHIR developers.

+ +

Technical Decisions

+ +
    +
  • Flask: Lightweight and flexible for web development.
  • +
  • SQLite: Simple for development; consider PostgreSQL for production.
  • +
  • Bootstrap 5.3.3: Responsive UI with custom styling.
  • +
  • Lottie-Web: Renders themed animations for FSH conversion waiting spinner.
  • +
  • GoFSH/SUSHI: Integrated via Node.js for advanced FSH conversion and round-trip validation.
  • +
  • Docker: Ensures consistent deployment with Flask and HAPI FHIR.
  • +
  • Flexible Versioning: Supports non-standard IG versions (e.g., -preview, -ballot).
  • +
  • Live Console/Streaming: Real-time feedback for complex operations (Push, Upload Test Data, FSH, Retrieve Bundles).
  • +
  • Validation: Alpha feature with ongoing FHIRPath improvements.
  • +
  • Dependency Management: Uses topological sort for Upload Test Data feature.
  • +
  • Form Parsing: Uses custom Werkzeug parser for Upload Test Data to handle large numbers of files.
  • +
+ +

Recent Updates

+ +
    +
  • Enhanced package search page with caching, detailed views (dependencies, dependents, version history), and background cache refresh.
  • +
  • Upload Test Data Enhancements (April 2025):
  • +
  • Added optional Pre-Upload Validation against selected IG profiles.
  • +
  • Added optional Conditional Uploads (GET + POST/PUT w/ If-Match) for individual mode.
  • +
  • Implemented robust XML parsing using fhir.resources library (when available).
  • +
  • Fixed 413 Request Entity Too Large errors for large file counts using a custom Werkzeug FormDataParser.
  • +
  • Path: templates/upload_test_data.html, app.py, services.py, forms.py.
  • +
  • Push IG Enhancements (April 2025):
  • +
  • Added semantic comparison to skip uploading identical resources.
  • +
  • Added “Force Upload” option to bypass comparison.
  • +
  • Improved handling of canonical resources (search before PUT/POST).
  • +
  • Added filtering by specific files to skip during push.
  • +
  • More detailed summary report in stream response.
  • +
  • Path: templates/cp_push_igs.html, app.py, services.py.
  • +
  • Waiting Spinner for FSH Converter (April 2025):
  • +
  • Added a themed (light/dark) Lottie animation spinner during FSH execution.
  • +
  • Path: templates/fsh_converter.html, static/animations/, static/js/lottie-web.min.js.
  • +
  • Advanced FSH Converter (April 2025):
  • +
  • Added support for GoFSH advanced options: –fshing-trip, –dependency, –indent, –meta-profile, –alias-file, –no-alias.
  • +
  • Displays Fishing Trip comparison reports.
  • +
  • Path: templates/fsh_converter.html, app.py, services.py, forms.py.
  • +
  • (New) Retrieve and Split Data (May 2025):
  • +
  • Added UI and API for retrieving bundles from a FHIR server by resource type.
  • +
  • Added options to fetch referenced resources (individually or as full type bundles).
  • +
  • Added functionality to split uploaded ZIP files of bundles into individual resources.
  • +
  • Streaming log for retrieval and ZIP download for results.
  • +
  • Paths: templates/retrieve_split_data.html, app.py, services.py, forms.py.
  • +
  • Known Issues and Workarounds
  • +
  • Favicon 404: Clear browser cache or verify /app/static/favicon.ico.
  • +
  • CSRF Errors: Set FLASK_SECRET_KEY and ensure in forms.
  • +
  • Import Fails: Check package name/version and connectivity.
  • +
  • Validation Accuracy: Alpha feature; report issues to GitHub (remove PHI).
  • +
  • Package Parsing: Non-standard .tgz filenames may parse incorrectly. Fallback uses name-only parsing.
  • +
  • Permissions: Ensure instance/ and static/uploads/ are writable.
  • +
  • GoFSH/SUSHI Errors: Check ./logs/flask_err.log for ERROR:services:GoFSH failed. Ensure valid FHIR inputs and SUSHI installation.
  • +
  • Upload Test Data XML Parsing: Relies on fhir.resources library for full validation; basic parsing used as fallback. Complex XML structures might not be fully analyzed for dependencies with basic parsing. Prefer JSON for reliable dependency analysis.
  • +
  • 413 Request Entity Too Large: Primarily handled by CustomFormDataParser for /api/upload-test-data. Check the parser’s max_form_parts limit if still occurring. MAX_CONTENT_LENGTH in app.py controls overall size. Reverse proxy limits (client_max_body_size in Nginx) might also apply.
  • +
+ +

Future Improvements

+ +
    +
  • Upload Test Data: Improve XML parsing further (direct XML->fhir.resource object if possible), add visual progress bar, add upload order preview, implement transaction bundle size splitting, add ‘Clear Target Server’ option (with confirmation).
  • +
  • Validation: Enhance FHIRPath for complex constraints; add API endpoint.
  • +
  • Sorting: Sort IG versions in /view-igs (e.g., ascending).
  • +
  • Duplicate Resolution: Options to keep latest version or merge resources.
  • +
  • Production Database: Support PostgreSQL.
  • +
  • Error Reporting: Detailed validation error paths in the UI.
  • +
  • FSH Enhancements: Add API endpoint for FSH conversion; support inline instance construction.
  • +
  • FHIR Operations: Add complex parameter support (e.g., /$diff with left/right).
  • +
  • Retrieve/Split Data: Add option to filter resources during retrieval (e.g., by date, specific IDs).
  • +
+ +

Completed Items

+ +
    +
  • Testing suite with basic coverage.
  • +
  • API endpoints for POST /api/import-ig and POST /api/push-ig.
  • +
  • Flexible versioning (-preview, -ballot).
  • +
  • CSRF fixes for forms.
  • +
  • Resource validation UI (alpha).
  • +
  • FSH Converter with advanced GoFSH features and waiting spinner.
  • +
  • Push IG enhancements (force upload, semantic comparison, canonical handling, skip files).
  • +
  • Upload Test Data feature with dependency sorting, multiple upload modes, pre-upload validation, conditional uploads, robust XML parsing, and fix for large file counts.
  • +
  • Retrieve and Split Data functionality with reference fetching and ZIP download.
  • +
  • Far-Distant Improvements
  • +
  • Cache Service: Use Redis for IG metadata caching.
  • +
  • Database Optimization: Composite index on ProcessedIg.package_name and ProcessedIg.version.
  • +
+ +

Directory Structure

+
FHIRFLARE-IG-Toolkit/
+├── app.py                              # Main Flask application
+├── Build and Run for first time.bat    # Windows script for first-time Docker setup
+├── docker-compose.yml                  # Docker Compose configuration
+├── Dockerfile                          # Docker configuration
+├── forms.py                            # Form definitions
+├── LICENSE.md                          # Apache 2.0 License
+├── README.md                           # Project documentation
+├── requirements.txt                    # Python dependencies
+├── Run.bat                             # Windows script for running Docker
+├── services.py                         # Logic for IG import, processing, validation, pushing, FSH conversion, test data upload, retrieve/split
+├── supervisord.conf                    # Supervisor configuration
+├── hapi-fhir-Setup/
+│   ├── README.md                       # HAPI FHIR setup instructions
+│   └── target/
+│       └── classes/
+│           └── application.yaml        # HAPI FHIR configuration
+├── instance/
+│   ├── fhir_ig.db                      # SQLite database
+│   ├── fhir_ig.db.old                  # Database backup
+│   └── fhir_packages/                  # Stored IG packages and metadata
+│       ├── ... (example packages) ...
+├── logs/
+│   ├── flask.log                       # Flask application logs
+│   ├── flask_err.log                   # Flask error logs
+│   ├── supervisord.log                 # Supervisor logs
+│   ├── supervisord.pid                 # Supervisor PID file
+│   ├── tomcat.log                      # Tomcat logs for HAPI FHIR
+│   └── tomcat_err.log                  # Tomcat error logs
+├── static/
+│   ├── animations/
+│   │   ├── loading-dark.json           # Dark theme spinner animation
+│   │   └── loading-light.json          # Light theme spinner animation
+│   ├── favicon.ico                     # Application favicon
+│   ├── FHIRFLARE.png                   # Application logo
+│   ├── js/
+│   │   └── lottie-web.min.js           # Lottie library for spinner
+│   └── uploads/
+│       ├── output.fsh                  # Generated FSH output (temp location)
+│       └── fsh_output/                 # GoFSH output directory
+│           ├── ... (example GoFSH output) ...
+├── templates/
+│   ├── base.html                       # Base template
+│   ├── cp_downloaded_igs.html          # UI for managing IGs
+│   ├── cp_push_igs.html                # UI for pushing IGs
+│   ├── cp_view_processed_ig.html       # UI for viewing processed IGs
+│   ├── fhir_ui.html                    # UI for FHIR API explorer
+│   ├── fhir_ui_operations.html         # UI for FHIR server operations
+│   ├── fsh_converter.html              # UI for FSH conversion
+│   ├── import_ig.html                  # UI for importing IGs
+│   ├── index.html                      # Homepage
+│   ├── retrieve_split_data.html        # UI for Retrieve and Split Data
+│   ├── upload_test_data.html           # UI for Uploading Test Data
+│   ├── validate_sample.html            # UI for validating resources/bundles
+│   ├── config_hapi.html                # UI for HAPI FHIR Configuration
+│   └── _form_helpers.html              # Form helper macros
+├── tests/
+│   └── test_app.py                     # Test suite
+└── hapi-fhir-jpaserver/                # HAPI FHIR server resources (if Standalone)
+
+ +

Contributing

+ +
    +
  1. Fork the repository.
  2. +
  3. Create a feature branch (git checkout -b feature/your-feature).
  4. +
  5. Commit changes (git commit -m “Add your feature”).
  6. +
  7. Push to your branch (git push origin feature/your-feature).
  8. +
  9. Open a Pull Request.
  10. +
  11. Ensure code follows PEP 8 and includes tests in tests/test_app.py.
  12. +
+ +

Troubleshooting

+ +
    +
  • Favicon 404: Clear browser cache or verify /app/static/favicon.ico: docker exec -it curl http://localhost:5000/static/favicon.ico
  • +
  • CSRF Errors: Set FLASK_SECRET_KEY and ensure in forms.
  • +
  • Import Fails: Check package name/version and connectivity.
  • +
  • Validation Accuracy: Alpha feature; report issues to GitHub (remove PHI).
  • +
  • Package Parsing: Non-standard .tgz filenames may parse incorrectly. Fallback uses name-only parsing.
  • +
  • Permissions: Ensure instance/ and static/uploads/ are writable: chmod -R 777 instance static/uploads logs
  • +
  • GoFSH/SUSHI Errors: Check ./logs/flask_err.log for ERROR:services:GoFSH failed. Ensure valid FHIR inputs and SUSHI installation: docker exec -it sushi --version
  • +
  • 413 Request Entity Too Large: Increase MAX_CONTENT_LENGTH and MAX_FORM_PARTS in app.py. If using a reverse proxy (e.g., Nginx), increase its client_max_body_size setting as well. Ensure the application/container is fully restarted/rebuilt.
  • +
+ +

License

+ +

Licensed under the Apache 2.0 License. See LICENSE.md for details.

+ + +
+
+ + + + diff --git a/README.md b/README.md deleted file mode 100644 index 030e215..0000000 --- a/README.md +++ /dev/null @@ -1,661 +0,0 @@ -# FHIRFLARE IG Toolkit -![FHIRFLARE Logo](static/FHIRFLARE.png) - -## Overview - -The FHIRFLARE IG Toolkit is a Flask-based web application designed to streamline the management, processing, validation, and deployment of FHIR Implementation Guides (IGs) and test data. It offers a user-friendly interface for importing IG packages, extracting metadata, validating FHIR resources or bundles, pushing IGs to FHIR servers, converting FHIR resources to FHIR Shorthand (FSH), uploading complex test data sets with dependency management, and retrieving/splitting FHIR bundles. The toolkit includes live consoles for real-time feedback, making it an essential tool for FHIR developers and implementers. - -The application can run in two modes: - -* **Standalone:** Includes a Dockerized Flask frontend, SQLite database, and an embedded HAPI FHIR server for local validation and interaction. -* **Lite:** Includes only the Dockerized Flask frontend and SQLite database, excluding the local HAPI FHIR server. Requires connection to external FHIR servers for certain features. - -## Installation Modes (Lite vs. Standalone) - -This toolkit offers two primary installation modes to suit different needs: - -* **Standalone Version:** - * Includes the full FHIRFLARE Toolkit application **and** an embedded HAPI FHIR server running locally within the Docker environment. - * Allows for local FHIR resource validation using HAPI FHIR's capabilities. - * Enables the "Use Local HAPI" option in the FHIR API Explorer and FHIR UI Operations pages, proxying requests to the internal HAPI server (`http://localhost:8080/fhir`). - * Requires Git and Maven during the initial build process (via the `.bat` script or manual steps) to prepare the HAPI FHIR server. - * Ideal for users who want a self-contained environment for development and testing or who don't have readily available external FHIR servers. - -* **Lite Version:** - * Includes the FHIRFLARE Toolkit application **without** the embedded HAPI FHIR server. - * Requires users to provide URLs for external FHIR servers when using features like the FHIR API Explorer and FHIR UI Operations pages. The "Use Local HAPI" option will be disabled in the UI. - * Resource validation relies solely on local checks against downloaded StructureDefinitions, which may be less comprehensive than HAPI FHIR's validation (e.g., for terminology bindings or complex invariants). - * **Does not require Git or Maven** for setup if using the `.bat` script or running the pre-built Docker image. - * Ideal for users who primarily want to use the IG management, processing, and FSH conversion features, or who will always connect to existing external FHIR servers. - -## Features - -* **Import IGs:** Download FHIR IG packages and dependencies from a package registry, supporting flexible version formats (e.g., `1.2.3`, `1.1.0-preview`, `current`) and dependency pulling modes (Recursive, Patch Canonical, Tree Shaking). -* **Enhanced Package Search and Import:** - * Interactive page (`/search-and-import`) to search for FHIR IG packages from configured registries. - * Displays package details, version history, dependencies, and dependents. - * Utilizes a local database cache (`CachedPackage`) for faster subsequent searches. - * Background task to refresh the package cache from registries (`/api/refresh-cache-task`). - * Direct import from search results. -* **Manage IGs:** View, process, unload, or delete downloaded IGs, with duplicate detection and resolution. -* **Process IGs:** Extract resource types, profiles, must-support elements, examples, and profile relationships (`structuredefinition-compliesWithProfile` and `structuredefinition-imposeProfile`). -* **Validate FHIR Resources/Bundles:** Validate single FHIR resources or bundles against selected IGs, with detailed error and warning reports (alpha feature). *Note: Lite version uses local SD checks only.* -* **Push IGs:** Upload IG resources (and optionally dependencies) to a target FHIR server. Features include: - * Real-time console output. - * Authentication support (Bearer Token). - * Filtering by resource type or specific files to skip. - * Semantic comparison to skip uploading identical resources (override with **Force Upload** option). - * Correct handling of canonical resources (searching by URL/version before deciding POST/PUT). - * Dry run mode for simulation. - * Verbose logging option. -* **Upload Test Data:** Upload complex sets of test data (individual JSON/XML files or ZIP archives) to a target FHIR server. Features include: - * Robust parsing of JSON and XML (using `fhir.resources` library when available). - * Automatic dependency analysis based on resource references within the uploaded set. - * Topological sorting to ensure resources are uploaded in the correct order. - * Cycle detection in dependencies. - * Choice of individual resource uploads or a single transaction bundle. - * **Optional Pre-Upload Validation:** Validate resources against a selected profile package before uploading. - * **Optional Conditional Uploads (Individual Mode):** Check resource existence (GET) and use conditional `If-Match` headers for updates (PUT) or create resources (PUT/POST). Falls back to simple PUT if unchecked. - * Configurable error handling (stop on first error or continue). - * Authentication support (Bearer Token). - * Streaming progress log via the UI. - * Handles large numbers of files using a custom form parser. -* **Profile Relationships:** Display and validate `compliesWithProfile` and `imposeProfile` extensions in the UI (configurable). -* **FSH Converter:** Convert FHIR JSON/XML resources to FHIR Shorthand (FSH) using GoFSH, with advanced options (Package context, Output styles, Log levels, FHIR versions, Fishing Trip, Dependencies, Indentation, Meta Profile handling, Alias File, No Alias). Includes a waiting spinner. -* **Retrieve and Split Bundles:** - * Retrieve specified resource types as bundles from a FHIR server. - * Optionally fetch referenced resources, either individually or as full bundles for each referenced type. - * Split uploaded ZIP files containing bundles into individual resource JSON files. - * Download retrieved/split resources as a ZIP archive. - * Streaming progress log via the UI for retrieval operations. -* **FHIR Interaction UIs:** Explore FHIR server capabilities and interact with resources using the "FHIR API Explorer" (simple GET/POST/PUT/DELETE) and "FHIR UI Operations" (Swagger-like interface based on CapabilityStatement). *Note: Lite version requires custom server URLs.* -* **HAPI FHIR Configuration (Standalone Mode):** - * A dedicated page (`/config-hapi`) to view and edit the `application.yaml` configuration for the embedded HAPI FHIR server. - * Allows modification of HAPI FHIR properties directly from the UI. - * Option to restart the HAPI FHIR server (Tomcat) to apply changes. -* **API Support:** RESTful API endpoints for importing, pushing, retrieving metadata, validating, uploading test data, and retrieving/splitting bundles. -* **Live Console:** Real-time logs for push, validation, upload test data, FSH conversion, and bundle retrieval operations. -* **Configurable Behavior:** Control validation modes, display options via `app.config`. -* **Theming:** Supports light and dark modes. - -## Technology Stack - -* Python 3.12+, Flask 2.3.3, Flask-SQLAlchemy 3.0.5, Flask-WTF 1.2.1 -* Jinja2, Bootstrap 5.3.3, JavaScript (ES6), Lottie-Web 5.12.2 -* SQLite -* Docker, Docker Compose, Supervisor -* Node.js 18+ (for GoFSH/SUSHI), GoFSH, SUSHI -* HAPI FHIR (Standalone version only) -* Requests 2.31.0, Tarfile, Logging, Werkzeug -* fhir.resources (optional, for robust XML parsing) - -## Prerequisites - -* **Docker:** Required for containerized deployment (both versions). -* **Git & Maven:** Required **only** for building the **Standalone** version from source using the `.bat` script or manual steps. Not required for the Lite version build or for running pre-built Docker Hub images. -* **Windows:** Required if using the `.bat` scripts. - -## Setup Instructions - -### Running Pre-built Images (General Users) - -This is the easiest way to get started without needing Git or Maven. Choose the version you need: - -**Lite Version (No local HAPI FHIR):** - -```bash -# Pull the latest Lite image -docker pull ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest - -# Run the Lite version (maps port 5000 for the UI) -# You'll need to create local directories for persistent data first: -# mkdir instance logs static static/uploads instance/hapi-h2-data -docker run -d \ - -p 5000:5000 \ - -v ./instance:/app/instance \ - -v ./static/uploads:/app/static/uploads \ - -v ./instance/hapi-h2-data:/app/h2-data \ - -v ./logs:/app/logs \ - --name fhirflare-lite \ - ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest -Standalone Version (Includes local HAPI FHIR): - -Bash - -# Pull the latest Standalone image -docker pull ghcr.io/sudo-jhare/fhirflare-ig-toolkit-standalone:latest - -# Run the Standalone version (maps ports 5000 and 8080) -# You'll need to create local directories for persistent data first: -# mkdir instance logs static static/uploads instance/hapi-h2-data -docker run -d \ - -p 5000:5000 \ - -p 8080:8080 \ - -v ./instance:/app/instance \ - -v ./static/uploads:/app/static/uploads \ - -v ./instance/hapi-h2-data:/app/h2-data \ - -v ./logs:/app/logs \ - --name fhirflare-standalone \ - ghcr.io/sudo-jhare/fhirflare-ig-toolkit-standalone:latest -Building from Source (Developers) -Using Windows .bat Scripts (Standalone Version Only): - -First Time Setup: - -Run Build and Run for first time.bat: - -Code snippet - -cd "" -git clone [https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git](https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git) hapi-fhir-jpaserver -copy .\\hapi-fhir-Setup\\target\\classes\\application.yaml .\\hapi-fhir-jpaserver\\target\\classes\\application.yaml -mvn clean package -DskipTests=true -Pboot -docker-compose build --no-cache -docker-compose up -d -This clones the HAPI FHIR server, copies configuration, builds the project, and starts the containers. - -Subsequent Runs: - -Run Run.bat: - -Code snippet - -cd "" -docker-compose up -d -This starts the Flask app (port 5000) and HAPI FHIR server (port 8080). - -Access the Application: - -Flask UI: http://localhost:5000 -HAPI FHIR server: http://localhost:8080 -Manual Setup (Linux/MacOS/Windows): - -Preparation (Standalone Version Only): - -Bash - -cd -git clone [https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git](https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git) hapi-fhir-jpaserver -cp ./hapi-fhir-Setup/target/classes/application.yaml ./hapi-fhir-jpaserver/target/classes/application.yaml -Build: - -Bash - -# Build HAPI FHIR (Standalone Version Only) -mvn clean package -DskipTests=true -Pboot - -# Build Docker Image (Specify APP_MODE=lite in docker-compose.yml for Lite version) -docker-compose build --no-cache -Run: - -Bash - -docker-compose up -d -Access the Application: - -Flask UI: http://localhost:5000 -HAPI FHIR server (Standalone only): http://localhost:8080 -Local Development (Without Docker): - -Clone the Repository: - -Bash - -git clone [https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit.git](https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit.git) -cd FHIRFLARE-IG-Toolkit -Install Dependencies: - -Bash - -python -m venv venv -source venv/bin/activate # On Windows: venv\Scripts\activate -pip install -r requirements.txt -Install Node.js, GoFSH, and SUSHI (for FSH Converter): - -Bash - -# Example for Debian/Ubuntu -curl -fsSL [https://deb.nodesource.com/setup_18.x](https://deb.nodesource.com/setup_18.x) | sudo bash - -sudo apt-get install -y nodejs -# Install globally -npm install -g gofsh fsh-sushi -Set Environment Variables: - -Bash - -export FLASK_SECRET_KEY='your-secure-secret-key' -export API_KEY='your-api-key' -# Optional: Set APP_MODE to 'lite' if desired -# export APP_MODE='lite' -Initialize Directories: - -Bash - -mkdir -p instance static/uploads logs -# Ensure write permissions if needed -# chmod -R 777 instance static/uploads logs -Run the Application: - -Bash - -export FLASK_APP=app.py -flask run -Access at http://localhost:5000. - -Usage -Import an IG -### Search, View Details, and Import Packages -Navigate to **Search and Import Packages** (`/search-and-import`). -1. The page will load a list of available FHIR Implementation Guide packages from a local cache or by fetching from configured registries. - * A loading animation and progress messages are shown if fetching from registries. - * The timestamp of the last cache update is displayed. -2. Use the search bar to filter packages by name or author. -3. Packages are paginated for easier Browse. -4. For each package, you can: - * View its latest official and absolute versions. - * Click on the package name to navigate to a **detailed view** (`/package-details/`) showing: - * Comprehensive metadata (author, FHIR version, canonical URL, description). - * A full list of available versions with publication dates. - * Declared dependencies. - * Other packages that depend on it (dependents). - * Version history (logs). - * Directly import a specific version using the "Import" button on the search page or the details page. -5. **Cache Management:** - * A "Clear & Refresh Cache" button is available to trigger a background task (`/api/refresh-cache-task`) that clears the local database and in-memory cache and fetches the latest package information from all configured registries. Progress is shown via a live log. - -Enter a package name (e.g., hl7.fhir.au.core) and version (e.g., 1.1.0-preview). -Choose a dependency mode: -Current Recursive: Import all dependencies listed in package.json recursively. -Patch Canonical Versions: Import only canonical FHIR packages (e.g., hl7.fhir.r4.core). -Tree Shaking: Import only dependencies containing resources actually used by the main package. -Click Import to download the package and dependencies. -Manage IGs -Go to Manage FHIR Packages (/view-igs) to view downloaded and processed IGs. - -Actions: -Process: Extract metadata (resource types, profiles, must-support elements, examples). -Unload: Remove processed IG data from the database. -Delete: Remove package files from the filesystem. -Duplicates are highlighted for resolution. -View Processed IGs -After processing, view IG details (/view-ig/), including: - -Resource types and profiles. -Must-support elements and examples. -Profile relationships (compliesWithProfile, imposeProfile) if enabled (DISPLAY_PROFILE_RELATIONSHIPS). -Interactive StructureDefinition viewer (Differential, Snapshot, Must Support, Key Elements, Constraints, Terminology, Search Params). -Validate FHIR Resources/Bundles -Navigate to Validate FHIR Sample (/validate-sample). - -Select a package (e.g., hl7.fhir.au.core#1.1.0-preview). -Choose Single Resource or Bundle mode. -Paste or upload FHIR JSON/XML (e.g., a Patient resource). -Submit to view validation errors/warnings. Note: Alpha feature; report issues to GitHub (remove PHI). -Push IGs to a FHIR Server -Go to Push IGs (/push-igs). - -Select a downloaded package. -Enter the Target FHIR Server URL. -Configure Authentication (None, Bearer Token). -Choose options: Include Dependencies, Force Upload (skips comparison check), Dry Run, Verbose Log. -Optionally filter by Resource Types (comma-separated) or Skip Specific Files (paths within package, comma/newline separated). -Click Push to FHIR Server to upload resources. Canonical resources are checked before upload. Identical resources are skipped unless Force Upload is checked. -Monitor progress in the live console. -Upload Test Data -Navigate to Upload Test Data (/upload-test-data). - -Enter the Target FHIR Server URL. -Configure Authentication (None, Bearer Token). -Select one or more .json, .xml files, or a single .zip file containing test resources. -Optionally check Validate Resources Before Upload? and select a Validation Profile Package. -Choose Upload Mode: -Individual Resources: Uploads each resource one by one in dependency order. -Transaction Bundle: Uploads all resources in a single transaction. -Optionally check Use Conditional Upload (Individual Mode Only)? to use If-Match headers for updates. -Choose Error Handling: -Stop on First Error: Halts the process if any validation or upload fails. -Continue on Error: Reports errors but attempts to process/upload remaining resources. -Click Upload and Process. The tool parses files, optionally validates, analyzes dependencies, topologically sorts resources, and uploads them according to selected options. -Monitor progress in the streaming log output. -Convert FHIR to FSH -Navigate to FSH Converter (/fsh-converter). - -Optionally select a package for context (e.g., hl7.fhir.au.core#1.1.0-preview). -Choose input mode: -Upload File: Upload a FHIR JSON/XML file. -Paste Text: Paste FHIR JSON/XML content. -Configure options: -Output Style: file-per-definition, group-by-fsh-type, group-by-profile, single-file. -Log Level: error, warn, info, debug. -FHIR Version: R4, R4B, R5, or auto-detect. -Fishing Trip: Enable round-trip validation with SUSHI, generating a comparison report. -Dependencies: Specify additional packages (e.g., hl7.fhir.us.core@6.1.0, one per line). -Indent Rules: Enable context path indentation for readable FSH. -Meta Profile: Choose only-one, first, or none for meta.profile handling. -Alias File: Upload an FSH file with aliases (e.g., $MyAlias = http://example.org). -No Alias: Disable automatic alias generation. -Click Convert to FSH to generate and display FSH output, with a waiting spinner (light/dark theme) during processing. -If Fishing Trip is enabled, view the comparison report via the "Click here for SUSHI Validation" badge button. -Download the result as a .fsh file. -Retrieve and Split Bundles -Navigate to Retrieve/Split Data (/retrieve-split-data). - -Retrieve Bundles from Server: - -Enter the FHIR Server URL (defaults to the proxy if empty). -Select one or more Resource Types to retrieve (e.g., Patient, Observation). -Optionally check Fetch Referenced Resources. -If checked, further optionally check Fetch Full Reference Bundles to retrieve entire bundles for each referenced type (e.g., all Patients if a Patient is referenced) instead of individual resources by ID. -Click Retrieve Bundles. -Monitor progress in the streaming log. A ZIP file containing the retrieved bundles/resources will be prepared for download. -Split Uploaded Bundles: - -Upload a ZIP file containing FHIR bundles (JSON format). -Click Split Bundles. -A ZIP file containing individual resources extracted from the bundles will be prepared for download. -Explore FHIR Operations -Navigate to FHIR UI Operations (/fhir-ui-operations). - -Toggle between local HAPI (/fhir) or a custom FHIR server. -Click Fetch Metadata to load the server’s CapabilityStatement. -Select a resource type (e.g., Patient, Observation) or System to view operations: -System operations: GET /metadata, POST /, GET /_history, GET/POST /$diff, POST /$reindex, POST /$expunge, etc. -Resource operations: GET Patient/:id, POST Observation/_search, etc. -Use Try it out to input parameters or request bodies, then Execute to view results in JSON, XML, or narrative formats. - -### Configure Embedded HAPI FHIR Server (Standalone Mode) -For users running the **Standalone version**, which includes an embedded HAPI FHIR server. -1. Navigate to **Configure HAPI FHIR** (`/config-hapi`). -2. The page displays the content of the HAPI FHIR server's `application.yaml` file. -3. You can edit the configuration directly in the text area. - * *Caution: Incorrect modifications can break the HAPI FHIR server.* -4. Click **Save Configuration** to apply your changes to the `application.yaml` file. -5. Click **Restart Tomcat** to restart the HAPI FHIR server and load the new configuration. The restart process may take a few moments. - -API Usage -Import IG -Bash - -curl -X POST http://localhost:5000/api/import-ig \ --H "Content-Type: application/json" \ --H "X-API-Key: your-api-key" \ --d '{"package_name": "hl7.fhir.au.core", "version": "1.1.0-preview", "dependency_mode": "recursive"}' -Returns complies_with_profiles, imposed_profiles, and duplicate_packages_present info. - -### Refresh Package Cache (Background Task) -```bash -curl -X POST http://localhost:5000/api/refresh-cache-task \ --H "X-API-Key: your-api-key" - -Push IG -Bash - -curl -X POST http://localhost:5000/api/push-ig \ --H "Content-Type: application/json" \ --H "Accept: application/x-ndjson" \ --H "X-API-Key: your-api-key" \ --d '{ - "package_name": "hl7.fhir.au.core", - "version": "1.1.0-preview", - "fhir_server_url": "http://localhost:8080/fhir", - "include_dependencies": true, - "force_upload": false, - "dry_run": false, - "verbose": false, - "auth_type": "none" - }' -Returns a streaming NDJSON response with progress and final summary. - -Upload Test Data -Bash - -curl -X POST http://localhost:5000/api/upload-test-data \ --H "X-API-Key: your-api-key" \ --H "Accept: application/x-ndjson" \ --F "fhir_server_url=http://your-fhir-server/fhir" \ --F "auth_type=bearerToken" \ --F "auth_token=YOUR_TOKEN" \ --F "upload_mode=individual" \ --F "error_handling=continue" \ --F "validate_before_upload=true" \ --F "validation_package_id=hl7.fhir.r4.core#4.0.1" \ --F "use_conditional_uploads=true" \ --F "test_data_files=@/path/to/your/patient.json" \ --F "test_data_files=@/path/to/your/observations.zip" -Returns a streaming NDJSON response with progress and final summary. Uses multipart/form-data for file uploads. - -Retrieve Bundles -Bash - -curl -X POST http://localhost:5000/api/retrieve-bundles \ --H "X-API-Key: your-api-key" \ --H "Accept: application/x-ndjson" \ --F "fhir_server_url=http://your-fhir-server/fhir" \ --F "resources=Patient" \ --F "resources=Observation" \ --F "validate_references=true" \ --F "fetch_reference_bundles=false" -Returns a streaming NDJSON response with progress. The X-Zip-Path header in the final response part will contain the path to download the ZIP archive (e.g., /tmp/retrieved_bundles_datetime.zip). - -Split Bundles -Bash - -curl -X POST http://localhost:5000/api/split-bundles \ --H "X-API-Key: your-api-key" \ --H "Accept: application/x-ndjson" \ --F "split_bundle_zip_path=@/path/to/your/bundles.zip" -Returns a streaming NDJSON response. The X-Zip-Path header in the final response part will contain the path to download the ZIP archive of split resources. - -Validate Resource/Bundle -Not yet exposed via API; use the UI at /validate-sample. - -Configuration Options -Located in app.py: - -VALIDATE_IMPOSED_PROFILES: (Default: True) Validates resources against imposed profiles during push. -DISPLAY_PROFILE_RELATIONSHIPS: (Default: True) Shows compliesWithProfile and imposeProfile in the UI. -FHIR_PACKAGES_DIR: (Default: /app/instance/fhir_packages) Stores .tgz packages and metadata. -UPLOAD_FOLDER: (Default: /app/static/uploads) Stores GoFSH output files and FSH comparison reports. -SECRET_KEY: Required for CSRF protection and sessions. Set via environment variable or directly. -API_KEY: Required for API authentication. Set via environment variable or directly. -MAX_CONTENT_LENGTH: (Default: Flask default) Max size for HTTP request body (e.g., 16 * 1024 * 1024 for 16MB). Important for large uploads. -MAX_FORM_PARTS: (Default: Werkzeug default, often 1000) Default max number of form parts. Overridden for /api/upload-test-data by CustomFormDataParser. - -### Get HAPI FHIR Configuration (Standalone Mode) -```bash -curl -X GET http://localhost:5000/api/config \ --H "X-API-Key: your-api-key" - -Save HAPI FHIR Configuration: -curl -X POST http://localhost:5000/api/config \ --H "Content-Type: application/json" \ --H "X-API-Key: your-api-key" \ --d '{"your_yaml_key": "your_value", ...}' # Send the full YAML content as JSON - -Restart HAPI FHIR Server: -curl -X POST http://localhost:5000/api/restart-tomcat \ --H "X-API-Key: your-api-key" - -Testing -The project includes a test suite covering UI, API, database, file operations, and security. - -Test Prerequisites: - -pytest: For running tests. -pytest-mock: For mocking dependencies. Install: pip install pytest pytest-mock -Running Tests: - -Bash - -cd -pytest tests/test_app.py -v -Test Coverage: - -UI Pages: Homepage, Import IG, Manage IGs, Push IGs, Validate Sample, View Processed IG, FSH Converter, Upload Test Data, Retrieve/Split Data. -API Endpoints: POST /api/import-ig, POST /api/push-ig, GET /get-structure, GET /get-example, POST /api/upload-test-data, POST /api/retrieve-bundles, POST /api/split-bundles. -Database: IG processing, unloading, viewing. -File Operations: Package processing, deletion, FSH output, ZIP handling. -Security: CSRF protection, flash messages, secret key. -FSH Converter: Form submission, file/text input, GoFSH execution, Fishing Trip comparison. -Upload Test Data: Parsing, dependency graph, sorting, upload modes, validation, conditional uploads. -Development Notes -Background -The toolkit addresses the need for a comprehensive FHIR IG management tool, with recent enhancements for resource validation, FSH conversion with advanced GoFSH features, flexible versioning, improved IG pushing, dependency-aware test data uploading, and bundle retrieval/splitting, making it a versatile platform for FHIR developers. - -Technical Decisions -Flask: Lightweight and flexible for web development. -SQLite: Simple for development; consider PostgreSQL for production. -Bootstrap 5.3.3: Responsive UI with custom styling. -Lottie-Web: Renders themed animations for FSH conversion waiting spinner. -GoFSH/SUSHI: Integrated via Node.js for advanced FSH conversion and round-trip validation. -Docker: Ensures consistent deployment with Flask and HAPI FHIR. -Flexible Versioning: Supports non-standard IG versions (e.g., -preview, -ballot). -Live Console/Streaming: Real-time feedback for complex operations (Push, Upload Test Data, FSH, Retrieve Bundles). -Validation: Alpha feature with ongoing FHIRPath improvements. -Dependency Management: Uses topological sort for Upload Test Data feature. -Form Parsing: Uses custom Werkzeug parser for Upload Test Data to handle large numbers of files. -Recent Updates -* Enhanced package search page with caching, detailed views (dependencies, dependents, version history), and background cache refresh. -Upload Test Data Enhancements (April 2025): -Added optional Pre-Upload Validation against selected IG profiles. -Added optional Conditional Uploads (GET + POST/PUT w/ If-Match) for individual mode. -Implemented robust XML parsing using fhir.resources library (when available). -Fixed 413 Request Entity Too Large errors for large file counts using a custom Werkzeug FormDataParser. -Path: templates/upload_test_data.html, app.py, services.py, forms.py. -Push IG Enhancements (April 2025): -Added semantic comparison to skip uploading identical resources. -Added "Force Upload" option to bypass comparison. -Improved handling of canonical resources (search before PUT/POST). -Added filtering by specific files to skip during push. -More detailed summary report in stream response. -Path: templates/cp_push_igs.html, app.py, services.py. -Waiting Spinner for FSH Converter (April 2025): -Added a themed (light/dark) Lottie animation spinner during FSH execution. -Path: templates/fsh_converter.html, static/animations/, static/js/lottie-web.min.js. -Advanced FSH Converter (April 2025): -Added support for GoFSH advanced options: --fshing-trip, --dependency, --indent, --meta-profile, --alias-file, --no-alias. -Displays Fishing Trip comparison reports. -Path: templates/fsh_converter.html, app.py, services.py, forms.py. -(New) Retrieve and Split Data (May 2025): -Added UI and API for retrieving bundles from a FHIR server by resource type. -Added options to fetch referenced resources (individually or as full type bundles). -Added functionality to split uploaded ZIP files of bundles into individual resources. -Streaming log for retrieval and ZIP download for results. -Paths: templates/retrieve_split_data.html, app.py, services.py, forms.py. -Known Issues and Workarounds -Favicon 404: Clear browser cache or verify /app/static/favicon.ico. -CSRF Errors: Set FLASK_SECRET_KEY and ensure {{ form.hidden_tag() }} in forms. -Import Fails: Check package name/version and connectivity. -Validation Accuracy: Alpha feature; report issues to GitHub (remove PHI). -Package Parsing: Non-standard .tgz filenames may parse incorrectly. Fallback uses name-only parsing. -Permissions: Ensure instance/ and static/uploads/ are writable. -GoFSH/SUSHI Errors: Check ./logs/flask_err.log for ERROR:services:GoFSH failed. Ensure valid FHIR inputs and SUSHI installation. -Upload Test Data XML Parsing: Relies on fhir.resources library for full validation; basic parsing used as fallback. Complex XML structures might not be fully analyzed for dependencies with basic parsing. Prefer JSON for reliable dependency analysis. -413 Request Entity Too Large: Primarily handled by CustomFormDataParser for /api/upload-test-data. Check the parser's max_form_parts limit if still occurring. MAX_CONTENT_LENGTH in app.py controls overall size. Reverse proxy limits (client_max_body_size in Nginx) might also apply. - - -Future Improvements -Upload Test Data: Improve XML parsing further (direct XML->fhir.resource object if possible), add visual progress bar, add upload order preview, implement transaction bundle size splitting, add 'Clear Target Server' option (with confirmation). -Validation: Enhance FHIRPath for complex constraints; add API endpoint. -Sorting: Sort IG versions in /view-igs (e.g., ascending). -Duplicate Resolution: Options to keep latest version or merge resources. -Production Database: Support PostgreSQL. -Error Reporting: Detailed validation error paths in the UI. -FSH Enhancements: Add API endpoint for FSH conversion; support inline instance construction. -FHIR Operations: Add complex parameter support (e.g., /$diff with left/right). -Retrieve/Split Data: Add option to filter resources during retrieval (e.g., by date, specific IDs). -Completed Items -Testing suite with basic coverage. -API endpoints for POST /api/import-ig and POST /api/push-ig. -Flexible versioning (-preview, -ballot). -CSRF fixes for forms. -Resource validation UI (alpha). -FSH Converter with advanced GoFSH features and waiting spinner. -Push IG enhancements (force upload, semantic comparison, canonical handling, skip files). -Upload Test Data feature with dependency sorting, multiple upload modes, pre-upload validation, conditional uploads, robust XML parsing, and fix for large file counts. -Retrieve and Split Data functionality with reference fetching and ZIP download. -Far-Distant Improvements -Cache Service: Use Redis for IG metadata caching. -Database Optimization: Composite index on ProcessedIg.package_name and ProcessedIg.version. - - -Directory Structure -FHIRFLARE-IG-Toolkit/ -├── app.py # Main Flask application -├── Build and Run for first time.bat # Windows script for first-time Docker setup -├── docker-compose.yml # Docker Compose configuration -├── Dockerfile # Docker configuration -├── forms.py # Form definitions -├── LICENSE.md # Apache 2.0 License -├── README.md # Project documentation -├── requirements.txt # Python dependencies -├── Run.bat # Windows script for running Docker -├── services.py # Logic for IG import, processing, validation, pushing, FSH conversion, test data upload, retrieve/split -├── supervisord.conf # Supervisor configuration -├── hapi-fhir-Setup/ -│ ├── README.md # HAPI FHIR setup instructions -│ └── target/ -│ └── classes/ -│ └── application.yaml # HAPI FHIR configuration -├── instance/ -│ ├── fhir_ig.db # SQLite database -│ ├── fhir_ig.db.old # Database backup -│ └── fhir_packages/ # Stored IG packages and metadata -│ ├── ... (example packages) ... -├── logs/ -│ ├── flask.log # Flask application logs -│ ├── flask_err.log # Flask error logs -│ ├── supervisord.log # Supervisor logs -│ ├── supervisord.pid # Supervisor PID file -│ ├── tomcat.log # Tomcat logs for HAPI FHIR -│ └── tomcat_err.log # Tomcat error logs -├── static/ -│ ├── animations/ -│ │ ├── loading-dark.json # Dark theme spinner animation -│ │ └── loading-light.json # Light theme spinner animation -│ ├── favicon.ico # Application favicon -│ ├── FHIRFLARE.png # Application logo -│ ├── js/ -│ │ └── lottie-web.min.js # Lottie library for spinner -│ └── uploads/ -│ ├── output.fsh # Generated FSH output (temp location) -│ └── fsh_output/ # GoFSH output directory -│ ├── ... (example GoFSH output) ... -├── templates/ -│ ├── base.html # Base template -│ ├── cp_downloaded_igs.html # UI for managing IGs -│ ├── cp_push_igs.html # UI for pushing IGs -│ ├── cp_view_processed_ig.html # UI for viewing processed IGs -│ ├── fhir_ui.html # UI for FHIR API explorer -│ ├── fhir_ui_operations.html # UI for FHIR server operations -│ ├── fsh_converter.html # UI for FSH conversion -│ ├── import_ig.html # UI for importing IGs -│ ├── index.html # Homepage -│ ├── retrieve_split_data.html # UI for Retrieve and Split Data -│ ├── upload_test_data.html # UI for Uploading Test Data -│ ├── validate_sample.html # UI for validating resources/bundles -│ ├── config_hapi.html # UI for HAPI FHIR Configuration -│ └── _form_helpers.html # Form helper macros -├── tests/ -│ └── test_app.py # Test suite -└── hapi-fhir-jpaserver/ # HAPI FHIR server resources (if Standalone) - -Contributing -Fork the repository. -Create a feature branch (git checkout -b feature/your-feature). -Commit changes (git commit -m "Add your feature"). -Push to your branch (git push origin feature/your-feature). -Open a Pull Request. -Ensure code follows PEP 8 and includes tests in tests/test_app.py. - -Troubleshooting -Favicon 404: Clear browser cache or verify /app/static/favicon.ico: docker exec -it curl http://localhost:5000/static/favicon.ico -CSRF Errors: Set FLASK_SECRET_KEY and ensure {{ form.hidden_tag() }} in forms. -Import Fails: Check package name/version and connectivity. -Validation Accuracy: Alpha feature; report issues to GitHub (remove PHI). -Package Parsing: Non-standard .tgz filenames may parse incorrectly. Fallback uses name-only parsing. -Permissions: Ensure instance/ and static/uploads/ are writable: chmod -R 777 instance static/uploads logs -GoFSH/SUSHI Errors: Check ./logs/flask_err.log for ERROR:services:GoFSH failed. Ensure valid FHIR inputs and SUSHI installation: docker exec -it sushi --version -413 Request Entity Too Large: Increase MAX_CONTENT_LENGTH and MAX_FORM_PARTS in app.py. If using a reverse proxy (e.g., Nginx), increase its client_max_body_size setting as well. Ensure the application/container is fully restarted/rebuilt. -License -Licensed under the Apache 2.0 License. See LICENSE.md for details. \ No newline at end of file diff --git a/README_INTEGRATION FHIRVINE as Moduel in FLARE.md b/README_INTEGRATION FHIRVINE as Moduel in FLARE.md deleted file mode 100644 index 650805a..0000000 --- a/README_INTEGRATION FHIRVINE as Moduel in FLARE.md +++ /dev/null @@ -1,151 +0,0 @@ -# Integrating FHIRVINE as a Module in FHIRFLARE - -## Overview - -FHIRFLARE is a Flask-based FHIR Implementation Guide (IG) toolkit for managing and validating FHIR packages. This guide explains how to integrate FHIRVINE—a SMART on FHIR proxy—as a module within FHIRFLARE, enabling OAuth2 authentication and FHIR request proxying directly in the application. This modular approach embeds FHIRVINE’s functionality into FHIRFLARE, avoiding the need for a separate proxy service. - -## Prerequisites - -- FHIRFLARE repository cloned: `https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit`. -- FHIRVINE repository cloned: ``. -- Python 3.11 and dependencies installed (`requirements.txt` from both projects). -- A FHIR server (e.g., `http://hapi.fhir.org/baseR4`). - -## Integration Steps - -### 1. Prepare FHIRFLARE Structure - -Ensure FHIRFLARE’s file structure supports modular integration. It should look like: - -``` -FHIRFLARE-IG-Toolkit/ -├── app.py -├── services.py -├── templates/ -├── static/ -└── requirements.txt -``` - -### 2. Copy FHIRVINE Files into FHIRFLARE - -FHIRVINE’s core functionality (OAuth2 proxy, app registration) will be integrated as a Flask Blueprint. - -- **Copy Files**: - - - Copy `smart_proxy.py`, `forms.py`, `models.py`, and `app.py` (relevant parts) from FHIRVINE into a new `fhirvine/` directory in FHIRFLARE: - - ``` - FHIRFLARE-IG-Toolkit/ - ├── fhirvine/ - │ ├── smart_proxy.py - │ ├── forms.py - │ ├── models.py - │ └── __init__.py - ``` - - - Copy FHIRVINE’s templates (e.g., `app_gallery/`, `configure/`, `test_client.html`) into `FHIRFLARE-IG-Toolkit/templates/` while maintaining their folder structure. - -- **Add Dependencies**: - - - Add FHIRVINE’s dependencies to `requirements.txt` (e.g., `authlib`, `flasgger`, `flask-sqlalchemy`). - -### 3. Modify FHIRVINE Code as a Module - -- **Create Blueprint in** `fhirvine/__init__.py`: - - ```python - from flask import Blueprint - - fhirvine_bp = Blueprint('fhirvine', __name__, template_folder='templates') - - from .smart_proxy import * - ``` - - This registers FHIRVINE as a Flask Blueprint. - -- **Update** `smart_proxy.py`: - - - Replace direct `app.route` decorators with `fhirvine_bp.route`. For example: - - ```python - @fhirvine_bp.route('/authorize', methods=['GET', 'POST']) - def authorize(): - # Existing authorization logic - ``` - -### 4. Integrate FHIRVINE Blueprint into FHIRFLARE - -- **Update** `app.py` **in FHIRFLARE**: - - - Import and register the FHIRVINE Blueprint: - - ```python - from fhirvine import fhirvine_bp - from fhirvine.models import database, RegisteredApp, OAuthToken, AuthorizationCode, Configuration - from fhirvine.smart_proxy import configure_oauth - - app = Flask(__name__) - app.config.from_mapping( - SECRET_KEY='your-secure-random-key', - SQLALCHEMY_DATABASE_URI='sqlite:////app/instance/fhirflare.db', - SQLALCHEMY_TRACK_MODIFICATIONS=False, - FHIR_SERVER_URL='http://hapi.fhir.org/baseR4', - PROXY_TIMEOUT=10, - TOKEN_DURATION=3600, - REFRESH_TOKEN_DURATION=86400, - ALLOWED_SCOPES='openid profile launch launch/patient patient/*.read offline_access' - ) - - database.init_app(app) - configure_oauth(app, db=database, registered_app_model=RegisteredApp, oauth_token_model=OAuthToken, auth_code_model=AuthorizationCode) - - app.register_blueprint(fhirvine_bp, url_prefix='/fhirvine') - ``` - -### 5. Update FHIRFLARE Templates - -- **Add FHIRVINE Links to Navbar**: - - - In `templates/base.html`, add links to FHIRVINE features: - - ```html - - - ``` - -### 6. Run and Test - -- **Install Dependencies**: - - ```bash - pip install -r requirements.txt - ``` - -- **Run FHIRFLARE**: - - ```bash - flask db upgrade - flask run --host=0.0.0.0 --port=8080 - ``` - -- **Access FHIRVINE Features**: - - - App Gallery: `http://localhost:8080/fhirvine/app-gallery` - - Test Client: `http://localhost:8080/fhirvine/test-client` - - Proxy Requests: Use `/fhirvine/oauth2/proxy/` within FHIRFLARE. - -## Using FHIRVINE in FHIRFLARE - -- **Register Apps**: Use `/fhirvine/app-gallery` to register SMART apps within FHIRFLARE. -- **Authenticate**: Use `/fhirvine/oauth2/authorize` for OAuth2 flows. -- **Proxy FHIR Requests**: FHIRFLARE can now make FHIR requests via `/fhirvine/oauth2/proxy`, leveraging FHIRVINE’s authentication. - -## Troubleshooting - -- **Route Conflicts**: Ensure no overlapping routes between FHIRFLARE and FHIRVINE. -- **Database Issues**: Verify `SQLALCHEMY_DATABASE_URI` points to the same database. -- **Logs**: Check `flask run` logs for errors. \ No newline at end of file diff --git a/Starting b/Starting deleted file mode 100644 index 2eefb56..0000000 --- a/Starting +++ /dev/null @@ -1 +0,0 @@ -=== Docker containers (Step 7)... diff --git a/app.py b/app.py deleted file mode 100644 index 8790493..0000000 --- a/app.py +++ /dev/null @@ -1,3096 +0,0 @@ -import sys -import os -# Make paths relative to the current directory instead of absolute '/app' paths -CURRENT_DIR = os.path.abspath(os.path.dirname(__file__)) -# Introduce app_dir variable that can be overridden by environment -app_dir = os.environ.get('APP_DIR', CURRENT_DIR) -sys.path.append(CURRENT_DIR) -import datetime -import shutil -import queue -from flask import Flask, render_template, request, redirect, url_for, flash, jsonify, Response, current_app, session, send_file, make_response, g -from flask_sqlalchemy import SQLAlchemy -from flask_migrate import Migrate -from flask_wtf import FlaskForm -from flask_wtf.csrf import CSRFProtect -from werkzeug.utils import secure_filename -from werkzeug.formparser import FormDataParser -from werkzeug.exceptions import RequestEntityTooLarge -from urllib.parse import urlparse -from cachetools import TTLCache -from types import SimpleNamespace -import tarfile -import base64 -import json -import logging -import requests -import re -import yaml -import threading -import time # Add time import -import services -from services import ( - services_bp, - construct_tgz_filename, - parse_package_filename, - import_package_and_dependencies, - retrieve_bundles, - split_bundles, - fetch_packages_from_registries, - normalize_package_data, - cache_packages, - HAS_PACKAGING_LIB, - pkg_version, - get_package_description, - safe_parse_version, - import_manual_package_and_dependencies -) -from forms import IgImportForm, ManualIgImportForm, ValidationForm, FSHConverterForm, TestDataUploadForm, RetrieveSplitDataForm -from wtforms import SubmitField -from package import package_bp -from flasgger import Swagger, swag_from # Import Flasgger -from copy import deepcopy -import tempfile -from logging.handlers import RotatingFileHandler - -#app setup -app = Flask(__name__) -app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'your-fallback-secret-key-here') - -# Update paths to be relative to current directory -instance_path = os.path.join(CURRENT_DIR, 'instance') -app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', f'sqlite:///{os.path.join(instance_path, "fhir_ig.db")}') -app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False -app.config['FHIR_PACKAGES_DIR'] = os.path.join(instance_path, 'fhir_packages') -app.config['API_KEY'] = os.environ.get('API_KEY', 'your-fallback-api-key-here') -app.config['VALIDATE_IMPOSED_PROFILES'] = True -app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True -app.config['UPLOAD_FOLDER'] = os.path.join(CURRENT_DIR, 'static', 'uploads') # For GoFSH output -app.config['APP_BASE_URL'] = os.environ.get('APP_BASE_URL', 'http://localhost:5000') -app.config['HAPI_FHIR_URL'] = os.environ.get('HAPI_FHIR_URL', 'http://localhost:8080/fhir') -CONFIG_PATH = os.environ.get('CONFIG_PATH', '/usr/local/tomcat/conf/application.yaml') - -# Basic Swagger configuration -app.config['SWAGGER'] = { - 'title': 'FHIRFLARE IG Toolkit API', - 'uiversion': 3, # Use Swagger UI 3 - 'version': '1.0.0', - 'description': 'API documentation for the FHIRFLARE IG Toolkit. This provides access to various FHIR IG management and validation functionalities.', - 'termsOfService': 'https://example.com/terms', # Replace with your terms - 'contact': { - 'name': 'FHIRFLARE Support', - 'url': 'https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/issues', # Replace with your support URL - 'email': 'xsannz@gmail.com', # Replace with your support email - }, - 'license': { - 'name': 'MIT License', # Or your project's license - 'url': 'https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/blob/main/LICENSE.md', # Link to your license - }, - 'securityDefinitions': { # Defines how API key security is handled - 'ApiKeyAuth': { - 'type': 'apiKey', - 'name': 'X-API-Key', # The header name for the API key - 'in': 'header', - 'description': 'API Key for accessing protected endpoints.' - } - }, - # 'security': [{'ApiKeyAuth': []}], # Optional: Apply ApiKeyAuth globally to all Flasgger-documented API endpoints by default - # If you set this, individual public endpoints would need 'security': [] in their swag_from spec. - # It's often better to define security per-endpoint in @swag_from. - 'specs_route': '/apidocs/' # URL for the Swagger UI. This makes url_for('flasgger.apidocs') work. -} -swagger = Swagger(app) # Initialize Flasgger with the app. This registers its routes. - - -# Register blueprints immediately after app setup -app.register_blueprint(services_bp, url_prefix='/api') -app.register_blueprint(package_bp) -logging.getLogger(__name__).info("Registered package_bp blueprint") - - - -# Set max upload size (e.g., 12 MB, adjust as needed) -app.config['MAX_CONTENT_LENGTH'] = 6 * 1024 * 1024 - -# In-memory cache with 5-minute TTL -package_cache = TTLCache(maxsize=100, ttl=300) - -# Increase max number of form parts (default is often 1000) -#app.config['MAX_FORM_PARTS'] = 1000 # Allow up to 1000 parts this is a hard coded stop limit in MAX_FORM_PARTS of werkzeug - - -#----------------------------------------------------------------------------------------------------------------------- -# --- Basic Logging Setup (adjust level and format as needed) --- -# Configure root logger first - This sets the foundation -# Set level to DEBUG initially to capture everything, handlers can filter later -logging.basicConfig(level=logging.DEBUG, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', - # Force=True might be needed if basicConfig was called elsewhere implicitly - # force=True - ) - -# Get the application logger (for app-specific logs) -logger = logging.getLogger(__name__) -# Explicitly set the app logger's level (can be different from root) -logger.setLevel(logging.DEBUG) - -# --- Optional: Add File Handler for Debugging --- -# Ensure the instance path exists before setting up the file handler -# Note: This assumes app.instance_path is correctly configured later -# If running this setup *before* app = Flask(), define instance path manually. -instance_folder_path_for_log = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'instance') -os.makedirs(instance_folder_path_for_log, exist_ok=True) -log_file_path = os.path.join(instance_folder_path_for_log, 'fhirflare_debug.log') - -file_handler = None # Initialize file_handler to None -try: - # Rotate logs: 5 files, 5MB each - file_handler = RotatingFileHandler(log_file_path, maxBytes=5*1024*1024, backupCount=5, encoding='utf-8') - file_handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) - # Set the file handler level - DEBUG will capture everything - file_handler.setLevel(logging.DEBUG) - # Add handler to the *root* logger to capture logs from all modules (like services) - logging.getLogger().addHandler(file_handler) - logger.info(f"--- File logging initialized to {log_file_path} (Level: DEBUG) ---") -except Exception as e: - # Log error if file handler setup fails, but continue execution - logger.error(f"Failed to set up file logging to {log_file_path}: {e}", exc_info=True) -# --- End File Handler Setup --- - -#----------------------------------------------------------------------------------------------------------------------- - -try: - import packaging.version as pkg_version - HAS_PACKAGING_LIB = True -except ImportError: - HAS_PACKAGING_LIB = False - # Define a simple fallback parser if needed - class BasicVersion: - def __init__(self, v_str): self.v_str = str(v_str) # Ensure string - def __gt__(self, other): return self.v_str > str(other) - def __lt__(self, other): return self.v_str < str(other) - def __eq__(self, other): return self.v_str == str(other) - def __str__(self): return self.v_str - pkg_version = SimpleNamespace(parse=BasicVersion, InvalidVersion=ValueError) -# --- End Imports --- - - -# --- NEW: Define Custom Form Parser --- -class CustomFormDataParser(FormDataParser): - """Subclass to increase the maximum number of form parts.""" - def __init__(self, *args, **kwargs): - # Set a higher limit for max_form_parts. Adjust value as needed. - # This overrides the default limit checked by Werkzeug's parser. - # Set to a sufficiently high number for your expected maximum file count. - super().__init__(*args, max_form_parts=2000, **kwargs) # Example: Allow 2000 parts -# --- END NEW --- - -# Custom logging handler to capture INFO logs from services module -log_queue = queue.Queue() -class StreamLogHandler(logging.Handler): - def __init__(self): - super().__init__(level=logging.INFO) - self.formatter = logging.Formatter('%(levelname)s:%(name)s:%(message)s') - - def emit(self, record): - if record.name == 'services' and record.levelno == logging.INFO: - msg = self.format(record) - log_queue.put(msg) - -# Add custom handler to services logger -services_logger = logging.getLogger('services') -stream_handler = StreamLogHandler() -services_logger.addHandler(stream_handler) - -# <<< ADD THIS CONTEXT PROCESSOR >>> -@app.context_processor -def inject_app_mode(): - """Injects the app_mode into template contexts.""" - return dict(app_mode=app.config.get('APP_MODE', 'standalone')) -# <<< END ADD >>> - -# Read application mode from environment variable, default to 'standalone' -app.config['APP_MODE'] = os.environ.get('APP_MODE', 'standalone').lower() -logger.info(f"Application running in mode: {app.config['APP_MODE']}") -# --- END mode check --- - -# Ensure directories exist and are writable -instance_path = '/app/instance' -packages_path = app.config['FHIR_PACKAGES_DIR'] -logger.debug(f"Instance path configuration: {instance_path}") -logger.debug(f"Database URI: {app.config['SQLALCHEMY_DATABASE_URI']}") -logger.debug(f"Packages path: {packages_path}") - -try: - instance_folder_path = app.instance_path - logger.debug(f"Flask instance folder path: {instance_folder_path}") - os.makedirs(instance_folder_path, exist_ok=True) - os.makedirs(packages_path, exist_ok=True) - os.makedirs(app.config['UPLOAD_FOLDER'], exist_ok=True) - logger.debug(f"Directories created/verified: Instance: {instance_folder_path}, Packages: {packages_path}") -except Exception as e: - logger.error(f"Failed to create/verify directories: {e}", exc_info=True) - -db = SQLAlchemy(app) -csrf = CSRFProtect(app) -migrate = Migrate(app, db) - -# Add a global application state dictionary for sharing state between threads -app_state = { - 'fetch_failed': False -} - -# @app.route('/clear-cache') -# def clear_cache(): -# """Clears the in-memory package cache, the DB timestamp, and the CachedPackage table.""" -# # Clear in-memory cache -# app.config['MANUAL_PACKAGE_CACHE'] = None -# app.config['MANUAL_CACHE_TIMESTAMP'] = None -# logger.info("In-memory package cache cleared.") - -# # Clear DB timestamp and CachedPackage table -# try: -# # Clear the timestamp -# timestamp_info = RegistryCacheInfo.query.first() -# if timestamp_info: -# timestamp_info.last_fetch_timestamp = None -# db.session.commit() -# logger.info("Database timestamp cleared.") -# else: -# logger.info("No database timestamp found to clear.") - -# # Clear the CachedPackage table -# num_deleted = db.session.query(CachedPackage).delete() -# db.session.commit() -# logger.info(f"Cleared {num_deleted} entries from CachedPackage table.") -# except Exception as db_err: -# db.session.rollback() -# logger.error(f"Failed to clear DB timestamp or CachedPackage table: {db_err}", exc_info=True) -# flash("Failed to clear database cache.", "warning") - -# flash("Package cache cleared. Fetching fresh list from registries...", "info") -# # Redirect back to the search page to force a reload and fetch -# return redirect(url_for('search_and_import')) - -# Remove logic from /clear-cache route - it's now handled by the API + background task -@app.route('/clear-cache') -def clear_cache(): - """ - This route is now effectively deprecated if the button uses the API. - If accessed directly, it could just redirect or show a message. - For safety, let it clear only the in-memory part and redirect. - """ - app.config['MANUAL_PACKAGE_CACHE'] = None - app.config['MANUAL_CACHE_TIMESTAMP'] = None - session['fetch_failed'] = False # Reset flag - logger.info("Direct /clear-cache access: Cleared in-memory cache only.") - flash("Cache refresh must be initiated via the 'Clear & Refresh Cache' button.", "info") - return redirect(url_for('search_and_import')) - -# No changes needed in search_and_import logic itself for this fix. - -class ProcessedIg(db.Model): - id = db.Column(db.Integer, primary_key=True) - package_name = db.Column(db.String(128), nullable=False) - version = db.Column(db.String(64), nullable=False) - processed_date = db.Column(db.DateTime, nullable=False) - resource_types_info = db.Column(db.JSON, nullable=False) - must_support_elements = db.Column(db.JSON, nullable=True) - examples = db.Column(db.JSON, nullable=True) - complies_with_profiles = db.Column(db.JSON, nullable=True) - imposed_profiles = db.Column(db.JSON, nullable=True) - optional_usage_elements = db.Column(db.JSON, nullable=True) - # --- ADD THIS LINE --- - search_param_conformance = db.Column(db.JSON, nullable=True) # Stores the extracted conformance map - # --- END ADD --- - __table_args__ = (db.UniqueConstraint('package_name', 'version', name='uq_package_version'),) - -class CachedPackage(db.Model): - id = db.Column(db.Integer, primary_key=True) - package_name = db.Column(db.String(128), nullable=False) - version = db.Column(db.String(64), nullable=False) - author = db.Column(db.String(128)) - fhir_version = db.Column(db.String(64)) - version_count = db.Column(db.Integer) - url = db.Column(db.String(256)) - all_versions = db.Column(db.JSON, nullable=True) - dependencies = db.Column(db.JSON, nullable=True) - latest_absolute_version = db.Column(db.String(64)) - latest_official_version = db.Column(db.String(64)) - canonical = db.Column(db.String(256)) - registry = db.Column(db.String(256)) - __table_args__ = (db.UniqueConstraint('package_name', 'version', name='uq_cached_package_version'),) - -class RegistryCacheInfo(db.Model): - id = db.Column(db.Integer, primary_key=True) # Simple primary key - last_fetch_timestamp = db.Column(db.DateTime(timezone=True), nullable=True) # Store UTC timestamp - - def __repr__(self): - return f'' - -# --- Make sure to handle database migration if you use Flask-Migrate --- -# (e.g., flask db migrate -m "Add search_param_conformance to ProcessedIg", flask db upgrade) -# If not using migrations, you might need to drop and recreate the table (losing existing processed data) -# or manually alter the table using SQLite tools. - -def check_api_key(): - api_key = request.headers.get('X-API-Key') - if not api_key and request.is_json: - api_key = request.json.get('api_key') - if not api_key: - logger.error("API key missing in request") - return jsonify({"status": "error", "message": "API key missing"}), 401 - if api_key != app.config['API_KEY']: - logger.error("Invalid API key provided.") - return jsonify({"status": "error", "message": "Invalid API key"}), 401 - logger.debug("API key validated successfully") - return None - -def list_downloaded_packages(packages_dir): - packages = [] - errors = [] - duplicate_groups = {} - logger.debug(f"Scanning packages directory: {packages_dir}") - if not os.path.exists(packages_dir): - logger.warning(f"Packages directory not found: {packages_dir}") - return packages, errors, duplicate_groups - for filename in os.listdir(packages_dir): - if filename.endswith('.tgz'): - full_path = os.path.join(packages_dir, filename) - name = filename[:-4] - version = '' - parsed_name, parsed_version = services.parse_package_filename(filename) - if parsed_name: - name = parsed_name - version = parsed_version - else: - logger.warning(f"Could not parse version from {filename}, using default name.") - errors.append(f"Could not parse {filename}") - try: - with tarfile.open(full_path, "r:gz") as tar: - # Ensure correct path within tarfile - pkg_json_member_path = "package/package.json" - try: - pkg_json_member = tar.getmember(pkg_json_member_path) - fileobj = tar.extractfile(pkg_json_member) - if fileobj: - pkg_data = json.loads(fileobj.read().decode('utf-8-sig')) - name = pkg_data.get('name', name) - version = pkg_data.get('version', version) - fileobj.close() - except KeyError: - logger.warning(f"{pkg_json_member_path} not found in {filename}") - # Keep parsed name/version if package.json is missing - except (tarfile.TarError, json.JSONDecodeError, UnicodeDecodeError) as e: - logger.warning(f"Could not read package.json from {filename}: {e}") - errors.append(f"Error reading {filename}: {str(e)}") - except Exception as e: - logger.error(f"Unexpected error reading package.json from {filename}: {e}", exc_info=True) - errors.append(f"Unexpected error for {filename}: {str(e)}") - - if name and version: # Only add if both name and version are valid - packages.append({'name': name, 'version': version, 'filename': filename}) - else: - logger.warning(f"Skipping package {filename} due to invalid name ('{name}') or version ('{version}')") - errors.append(f"Invalid package {filename}: name='{name}', version='{version}'") - - # Group duplicates - name_counts = {} - for pkg in packages: - name_val = pkg['name'] - name_counts[name_val] = name_counts.get(name_val, 0) + 1 - for name_val, count in name_counts.items(): - if count > 1: - duplicate_groups[name_val] = sorted([p['version'] for p in packages if p['name'] == name_val]) - - logger.debug(f"Found packages: {len(packages)}") - logger.debug(f"Errors during package listing: {errors}") - logger.debug(f"Duplicate groups: {duplicate_groups}") - return packages, errors, duplicate_groups - -@app.route('/') -def index(): - return render_template('index.html', site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - -@app.route('/debug-routes') -@swag_from({ - 'tags': ['Debugging'], - 'summary': 'List all application routes.', - 'description': 'Provides a JSON list of all registered URL rules and their endpoints. Useful for development and debugging.', - 'responses': { - '200': { - 'description': 'A list of route strings.', - 'schema': { - 'type': 'array', - 'items': { - 'type': 'string', - 'example': 'Endpoint: my_endpoint, Methods: GET,POST, URL: /my/url' - } - } - } - } - # No API key needed for this one, so you can add: - # 'security': [] -}) -def debug_routes(): - """ - Debug endpoint to list all registered routes and their endpoints. - """ - routes = [] - for rule in app.url_map.iter_rules(): - routes.append(f"Endpoint: {rule.endpoint}, URL: {rule}") - return jsonify(routes) - -@app.route('/api/config', methods=['GET']) -@csrf.exempt -@swag_from({ - 'tags': ['HAPI Configuration'], - 'summary': 'Get HAPI FHIR server configuration.', - 'description': 'Retrieves the current HAPI FHIR server configuration from the application.yaml file.', - 'security': [{'ApiKeyAuth': []}], # Requires API Key - 'responses': { - '200': { - 'description': 'HAPI FHIR configuration.', - 'schema': { 'type': 'object' } # You can be more specific if you know the YAML structure - }, - '500': {'description': 'Error reading configuration file.'} - } -}) -def get_config(): - try: - with open(CONFIG_PATH, 'r') as file: - config = yaml.safe_load(file) - return jsonify(config) - except Exception as e: - logger.error(f"Error reading config file: {e}") - return jsonify({'error': str(e)}), 500 - -@app.route('/api/config', methods=['POST']) -@csrf.exempt -@swag_from({ - 'tags': ['HAPI Configuration'], - 'summary': 'Save HAPI FHIR server configuration.', - 'description': 'Saves the provided HAPI FHIR server configuration to the application.yaml file.', - 'security': [{'ApiKeyAuth': []}], # Requires API Key - 'parameters': [ - { - 'name': 'config_payload', # Changed name to avoid conflict with function arg - 'in': 'body', - 'required': True, - 'description': 'The HAPI FHIR configuration object.', - 'schema': { - 'type': 'object', - # Add example properties if you know them - 'example': {'fhir_server': {'base_url': 'http://localhost:8080/fhir'}} - } - } - ], - 'responses': { - '200': {'description': 'Configuration saved successfully.'}, - '400': {'description': 'Invalid request body.'}, - '500': {'description': 'Error saving configuration file.'} - } -}) -def save_config(): - try: - config = request.get_json() - with open(CONFIG_PATH, 'w') as file: - yaml.safe_dump(config, file, default_flow_style=False) - logger.info("Configuration saved successfully") - return jsonify({'message': 'Configuration saved'}) - except Exception as e: - logger.error(f"Error saving config file: {e}") - return jsonify({'error': str(e)}), 500 - -@app.route('/api/restart-tomcat', methods=['POST']) -@csrf.exempt -@swag_from({ - 'tags': ['HAPI Configuration'], - 'summary': 'Restart the Tomcat server.', - 'description': 'Attempts to restart the Tomcat server using supervisorctl. Requires appropriate server permissions.', - 'security': [{'ApiKeyAuth': []}], # Requires API Key - 'responses': { - '200': {'description': 'Tomcat restart initiated successfully.'}, - '500': {'description': 'Error restarting Tomcat (e.g., supervisorctl not found or command failed).'} - } -}) -def restart_tomcat(): - try: - result = subprocess.run(['supervisorctl', 'restart', 'tomcat'], capture_output=True, text=True) - if result.returncode == 0: - logger.info("Tomcat restarted successfully") - return jsonify({'message': 'Tomcat restarted'}) - else: - logger.error(f"Failed to restart Tomcat: {result.stderr}") - return jsonify({'error': result.stderr}), 500 - except Exception as e: - logger.error(f"Error restarting Tomcat: {e}") - return jsonify({'error': str(e)}), 500 - -@app.route('/config-hapi') -def config_hapi(): - return render_template('config_hapi.html', site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - -@app.route('/manual-import-ig', methods=['GET', 'POST']) -def manual_import_ig(): - """ - Handle manual import of FHIR Implementation Guides using file or URL uploads. - Uses ManualIgImportForm to support file and URL inputs without registry option. - """ - form = ManualIgImportForm() - is_ajax = request.headers.get('X-Requested-With') == 'XMLHttpRequest' or request.headers.get('HX-Request') == 'true' - - if form.validate_on_submit(): - import_mode = form.import_mode.data - dependency_mode = form.dependency_mode.data - resolve_dependencies = form.resolve_dependencies.data - while not log_queue.empty(): - log_queue.get() - - try: - if import_mode == 'file': - tgz_file = form.tgz_file.data - temp_dir = tempfile.mkdtemp() - temp_path = os.path.join(temp_dir, secure_filename(tgz_file.filename)) - tgz_file.save(temp_path) - result = import_manual_package_and_dependencies(temp_path, dependency_mode=dependency_mode, is_file=True, resolve_dependencies=resolve_dependencies) - identifier = result.get('requested', tgz_file.filename) - shutil.rmtree(temp_dir, ignore_errors=True) - elif import_mode == 'url': - tgz_url = form.tgz_url.data - result = import_manual_package_and_dependencies(tgz_url, dependency_mode=dependency_mode, is_url=True, resolve_dependencies=resolve_dependencies) - identifier = result.get('requested', tgz_url) - - if result['errors'] and not result['downloaded']: - error_msg = result['errors'][0] - simplified_msg = error_msg - if "HTTP error" in error_msg and "404" in error_msg: - simplified_msg = "Package not found (404). Check input." - elif "HTTP error" in error_msg: - simplified_msg = f"Error: {error_msg.split(': ', 1)[-1]}" - elif "Connection error" in error_msg: - simplified_msg = "Could not connect to source." - flash(f"Failed to import {identifier}: {simplified_msg}", "error") - logger.error(f"Manual import failed for {identifier}: {error_msg}") - if is_ajax: - return jsonify({"status": "error", "message": simplified_msg}), 400 - return render_template('manual_import_ig.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - else: - if result['errors']: - flash(f"Partially imported {identifier} with errors. Check logs.", "warning") - for err in result['errors']: - logger.warning(f"Manual import warning for {identifier}: {err}") - else: - flash(f"Successfully imported {identifier}! Mode: {dependency_mode}", "success") - if is_ajax: - return jsonify({"status": "success", "message": f"Imported {identifier}", "redirect": url_for('view_igs')}), 200 - return redirect(url_for('view_igs')) - except Exception as e: - logger.error(f"Unexpected error during manual IG import: {str(e)}", exc_info=True) - flash(f"An unexpected error occurred: {str(e)}", "error") - if is_ajax: - return jsonify({"status": "error", "message": str(e)}), 500 - return render_template('manual_import_ig.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - else: - for field, errors in form.errors.items(): - for error in errors: - flash(f"Error in {getattr(form, field).label.text}: {error}", "danger") - if is_ajax: - return jsonify({"status": "error", "message": "Form validation failed", "errors": form.errors}), 400 - return render_template('manual_import_ig.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - -@app.route('/import-ig', methods=['GET', 'POST']) -def import_ig(): - form = IgImportForm() - # Check for HTMX request using both X-Requested-With and HX-Request headers - is_ajax = request.headers.get('X-Requested-With') == 'XMLHttpRequest' or request.headers.get('HX-Request') == 'true' - - if form.validate_on_submit(): - name = form.package_name.data - version = form.package_version.data - dependency_mode = form.dependency_mode.data - - # Clear log queue for this request - while not log_queue.empty(): - log_queue.get() - - try: - result = import_package_and_dependencies(name, version, dependency_mode=dependency_mode) - if result['errors'] and not result['downloaded']: - error_msg = result['errors'][0] - simplified_msg = error_msg - if "HTTP error" in error_msg and "404" in error_msg: - simplified_msg = "Package not found on registry (404). Check name and version." - elif "HTTP error" in error_msg: - simplified_msg = f"Registry error: {error_msg.split(': ', 1)[-1]}" - elif "Connection error" in error_msg: - simplified_msg = "Could not connect to the FHIR package registry." - flash(f"Failed to import {name}#{version}: {simplified_msg}", "error") - logger.error(f"Import failed critically for {name}#{version}: {error_msg}") - if is_ajax: - return jsonify({"status": "error", "message": simplified_msg}), 400 - return render_template('import_ig.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - else: - if result['errors']: - flash(f"Partially imported {name}#{version} with errors during dependency processing. Check logs.", "warning") - for err in result['errors']: - logger.warning(f"Import warning for {name}#{version}: {err}") - else: - flash(f"Successfully downloaded {name}#{version} and dependencies! Mode: {dependency_mode}", "success") - if is_ajax: - return jsonify({"status": "success", "message": f"Imported {name}#{version}", "redirect": url_for('view_igs')}), 200 - return redirect(url_for('view_igs')) - except Exception as e: - logger.error(f"Unexpected error during IG import: {str(e)}", exc_info=True) - flash(f"An unexpected error occurred downloading the IG: {str(e)}", "error") - if is_ajax: - return jsonify({"status": "error", "message": str(e)}), 500 - return render_template('import_ig.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - else: - for field, errors in form.errors.items(): - for error in errors: - flash(f"Error in {getattr(form, field).label.text}: {error}", "danger") - if is_ajax: - return jsonify({"status": "error", "message": "Form validation failed", "errors": form.errors}), 400 - return render_template('import_ig.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - -# Function to perform the actual refresh logic in the background -def perform_cache_refresh_and_log(): - """Clears caches, fetches, normalizes, and caches packages, logging progress.""" - # Ensure this runs within an app context to access db, config etc. - with app.app_context(): - logger.info("--- Starting Background Cache Refresh ---") - try: - # 1. Clear In-Memory Cache - app.config['MANUAL_PACKAGE_CACHE'] = None - app.config['MANUAL_CACHE_TIMESTAMP'] = None - logger.info("In-memory cache cleared.") - - # 2. Clear DB Timestamp and CachedPackage Table - try: - timestamp_info = RegistryCacheInfo.query.first() - if timestamp_info: - timestamp_info.last_fetch_timestamp = None - # Don't commit yet, commit at the end - num_deleted = db.session.query(CachedPackage).delete() - db.session.flush() # Apply delete within transaction - logger.info(f"Cleared {num_deleted} entries from CachedPackage table (DB).") - except Exception as db_clear_err: - db.session.rollback() - logger.error(f"Failed to clear DB cache tables: {db_clear_err}", exc_info=True) - log_queue.put(f"ERROR: Failed to clear DB - {db_clear_err}") - log_queue.put("[DONE]") # Signal completion even on error - return # Stop processing - - # 3. Fetch from Registries - logger.info("Fetching fresh package list from registries...") - fetch_failed = False - try: - raw_packages = fetch_packages_from_registries(search_term='') # Uses services logger internally - if not raw_packages: - logger.warning("No packages returned from registries during refresh.") - fetch_failed = True - normalized_packages = [] - else: - # 4. Normalize Data - logger.info("Normalizing fetched package data...") - normalized_packages = normalize_package_data(raw_packages) # Uses services logger - - except Exception as fetch_norm_err: - logger.error(f"Error during fetch/normalization: {fetch_norm_err}", exc_info=True) - fetch_failed = True - normalized_packages = [] - log_queue.put(f"ERROR: Failed during fetch/normalization - {fetch_norm_err}") - - - # 5. Update In-Memory Cache (always update, even if empty on failure) - now_ts = datetime.datetime.now(datetime.timezone.utc) - app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages - app.config['MANUAL_CACHE_TIMESTAMP'] = now_ts - app_state['fetch_failed'] = fetch_failed # Update app_state instead of session - logger.info(f"Updated in-memory cache with {len(normalized_packages)} packages. Fetch failed: {fetch_failed}") - - # 6. Cache in Database (if successful fetch) - if not fetch_failed and normalized_packages: - try: - logger.info("Caching packages in database...") - cache_packages(normalized_packages, db, CachedPackage) # Uses services logger - except Exception as cache_err: - db.session.rollback() # Rollback DB changes on caching error - logger.error(f"Failed to cache packages in database: {cache_err}", exc_info=True) - log_queue.put(f"ERROR: Failed to cache packages in DB - {cache_err}") - log_queue.put("[DONE]") # Signal completion - return # Stop processing - elif fetch_failed: - logger.warning("Skipping database caching due to fetch failure.") - else: # No packages but fetch didn't fail (edge case?) - logger.info("No packages to cache in database.") - - - # 7. Update DB Timestamp (only if fetch didn't fail) - if not fetch_failed: - if timestamp_info: - timestamp_info.last_fetch_timestamp = now_ts - else: - timestamp_info = RegistryCacheInfo(last_fetch_timestamp=now_ts) - db.session.add(timestamp_info) - logger.info(f"Set DB timestamp to {now_ts}.") - else: - # Ensure timestamp_info is not added if fetch failed and it was new - if timestamp_info and timestamp_info in db.new: - db.session.expunge(timestamp_info) - logger.warning("Skipping DB timestamp update due to fetch failure.") - - - # 8. Commit all DB changes (only commit if successful) - if not fetch_failed: - db.session.commit() - logger.info("Database changes committed.") - else: - # Rollback any potential flushed changes if fetch failed - db.session.rollback() - logger.info("Rolled back DB changes due to fetch failure.") - - except Exception as e: - db.session.rollback() # Rollback on any other unexpected error - logger.error(f"Critical error during background cache refresh: {e}", exc_info=True) - log_queue.put(f"CRITICAL ERROR: {e}") - finally: - logger.info("--- Background Cache Refresh Finished ---") - log_queue.put("[DONE]") # Signal completion - - -@app.route('/api/refresh-cache-task', methods=['POST']) -@csrf.exempt # Ensure CSRF is handled if needed, or keep exempt -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Refresh FHIR package cache.', - 'description': 'Triggers an asynchronous background task to clear and refresh the FHIR package cache from configured registries.', - 'security': [{'ApiKeyAuth': []}], # Requires API Key - 'responses': { - '202': {'description': 'Cache refresh process started in the background.'}, - # Consider if other error codes are possible before task starts - } -}) -def refresh_cache_task(): - """API endpoint to trigger the background cache refresh.""" - # Note: Clearing queue here might interfere if multiple users click concurrently. - # A more robust solution uses per-request queues or task IDs. - # For simplicity, we clear it assuming low concurrency for this action. - while not log_queue.empty(): - try: log_queue.get_nowait() - except queue.Empty: break - - logger.info("Received API request to refresh cache.") - thread = threading.Thread(target=perform_cache_refresh_and_log, daemon=True) - thread.start() - logger.info("Background cache refresh thread started.") - # Return 202 Accepted: Request accepted, processing in background. - return jsonify({"status": "accepted", "message": "Cache refresh process started in the background."}), 202 - - -# Modify stream_import_logs - Simpler version: relies on thread putting [DONE] -@app.route('/stream-import-logs') -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Stream package import logs.', - 'description': 'Provides a Server-Sent Events (SSE) stream of logs generated during package import or cache refresh operations. The client should listen for "data:" events. The stream ends with "data: [DONE]".', - 'produces': ['text/event-stream'], - # No API key usually for SSE streams if they are tied to an existing user session/action - # 'security': [], - 'responses': { - '200': { - 'description': 'An event stream of log messages.', - 'schema': { - 'type': 'string', - 'format': 'text/event-stream', - 'example': "data: INFO: Starting import...\ndata: INFO: Package downloaded.\ndata: [DONE]\n\n" - } - } - } -}) -def stream_import_logs(): - logger.debug("SSE connection established to stream-import-logs") - def generate(): - # Directly consume from the shared queue - while True: - try: - # Block-wait on the shared queue with a timeout - msg = log_queue.get(timeout=300) # 5 min timeout on get - clean_msg = str(msg).replace('INFO:services:', '').replace('INFO:app:', '').strip() - yield f"data: {clean_msg}\n\n" - - if msg == '[DONE]': - logger.debug("SSE stream received [DONE] from queue, closing stream.") - break # Exit the generate loop - except queue.Empty: - # Timeout occurred waiting for message or [DONE] - logger.warning("SSE stream timed out waiting for logs. Closing.") - yield "data: ERROR: Timeout waiting for logs.\n\n" - yield "data: [DONE]\n\n" # Still send DONE to signal client closure - break - except GeneratorExit: - logger.debug("SSE client disconnected.") - break # Exit loop if client disconnects - except Exception as e: - logger.error(f"Error in SSE generate loop: {e}", exc_info=True) - yield f"data: ERROR: Server error in log stream - {e}\n\n" - yield "data: [DONE]\n\n" # Send DONE to signal client closure on error - break - - response = Response(generate(), mimetype='text/event-stream') - response.headers['Cache-Control'] = 'no-cache' - response.headers['X-Accel-Buffering'] = 'no' # Useful for Nginx proxying - return response - -@app.route('/view-igs') -def view_igs(): - form = FlaskForm() - processed_igs = ProcessedIg.query.order_by(ProcessedIg.package_name, ProcessedIg.version).all() - processed_ids = {(ig.package_name, ig.version) for ig in processed_igs} - packages_dir = app.config['FHIR_PACKAGES_DIR'] - packages, errors, duplicate_groups = list_downloaded_packages(packages_dir) - if errors: - flash(f"Warning: Errors encountered while listing packages: {', '.join(errors)}", "warning") - colors = ['bg-warning', 'bg-info', 'bg-success', 'bg-danger', 'bg-secondary'] - group_colors = {} - for i, name in enumerate(duplicate_groups.keys()): - group_colors[name] = colors[i % len(colors)] - return render_template('cp_downloaded_igs.html', form=form, packages=packages, - processed_list=processed_igs, processed_ids=processed_ids, - duplicate_groups=duplicate_groups, group_colors=group_colors, - site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now(), - config=app.config) - -@app.route('/about') -def about(): - """Renders the about page.""" - # The app_mode is automatically injected by the context processor - return render_template('about.html', - title="About", # Optional title for the page - site_name='FHIRFLARE IG Toolkit') # Or get from config - - -@app.route('/push-igs', methods=['GET']) -def push_igs(): - # form = FlaskForm() # OLD - Replace this line - form = IgImportForm() # Use a real form class that has CSRF handling built-in - processed_igs = ProcessedIg.query.order_by(ProcessedIg.package_name, ProcessedIg.version).all() - processed_ids = {(ig.package_name, ig.version) for ig in processed_igs} - packages_dir = app.config['FHIR_PACKAGES_DIR'] - packages, errors, duplicate_groups = list_downloaded_packages(packages_dir) - if errors: - flash(f"Warning: Errors encountered while listing packages: {', '.join(errors)}", "warning") - colors = ['bg-warning', 'bg-info', 'bg-success', 'bg-danger', 'bg-secondary'] - group_colors = {} - for i, name in enumerate(duplicate_groups.keys()): - group_colors[name] = colors[i % len(colors)] - return render_template('cp_push_igs.html', form=form, packages=packages, # Pass the form instance - processed_list=processed_igs, processed_ids=processed_ids, - duplicate_groups=duplicate_groups, group_colors=group_colors, - site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now(), - api_key=app.config['API_KEY'], config=app.config) - -@app.route('/process-igs', methods=['POST']) -def process_ig(): - form = FlaskForm() # Assuming a basic FlaskForm for CSRF protection - if form.validate_on_submit(): - filename = request.form.get('filename') - # --- Keep existing filename and path validation --- - if not filename or not filename.endswith('.tgz'): - flash("Invalid package file selected.", "error") - return redirect(url_for('view_igs')) - tgz_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], filename) - if not os.path.exists(tgz_path): - flash(f"Package file not found: {filename}", "error") - return redirect(url_for('view_igs')) - - name, version = services.parse_package_filename(filename) - if not name: # Add fallback naming if parse fails - name = filename[:-4].replace('_', '.') # Basic guess - version = 'unknown' - logger.warning(f"Using fallback naming for {filename} -> {name}#{version}") - - try: - logger.info(f"Starting processing for {name}#{version} from file {filename}") - # This now returns the conformance map too - package_info = services.process_package_file(tgz_path) - - if package_info.get('errors'): - flash(f"Processing completed with errors for {name}#{version}: {', '.join(package_info['errors'])}", "warning") - - # (Keep existing optional_usage_dict logic) - optional_usage_dict = { - info['name']: True - for info in package_info.get('resource_types_info', []) - if info.get('optional_usage') - } - logger.debug(f"Optional usage elements identified: {optional_usage_dict}") - - # Find existing or create new DB record - existing_ig = ProcessedIg.query.filter_by(package_name=name, version=version).first() - - if existing_ig: - logger.info(f"Updating existing processed record for {name}#{version}") - processed_ig = existing_ig - else: - logger.info(f"Creating new processed record for {name}#{version}") - processed_ig = ProcessedIg(package_name=name, version=version) - db.session.add(processed_ig) - - # Update all fields - processed_ig.processed_date = datetime.datetime.now(tz=datetime.timezone.utc) - processed_ig.resource_types_info = package_info.get('resource_types_info', []) - processed_ig.must_support_elements = package_info.get('must_support_elements') - processed_ig.examples = package_info.get('examples') - processed_ig.complies_with_profiles = package_info.get('complies_with_profiles', []) - processed_ig.imposed_profiles = package_info.get('imposed_profiles', []) - processed_ig.optional_usage_elements = optional_usage_dict - # --- ADD THIS LINE: Save the extracted conformance map --- - processed_ig.search_param_conformance = package_info.get('search_param_conformance') # Get map from results - # --- END ADD --- - - db.session.commit() # Commit all changes - flash(f"Successfully processed {name}#{version}!", "success") - - except Exception as e: - db.session.rollback() # Rollback on error - logger.error(f"Error processing IG {filename}: {str(e)}", exc_info=True) - flash(f"Error processing IG '{filename}': {str(e)}", "error") - else: - # Handle CSRF or other form validation errors - logger.warning(f"Form validation failed for process-igs: {form.errors}") - flash("CSRF token missing or invalid, or other form error.", "error") - - return redirect(url_for('view_igs')) - -# --- End of /process-igs Function --- - -@app.route('/delete-ig', methods=['POST']) -def delete_ig(): - form = FlaskForm() - if form.validate_on_submit(): - filename = request.form.get('filename') - if not filename or not filename.endswith('.tgz'): - flash("Invalid package file specified.", "error") - return redirect(url_for('view_igs')) - tgz_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], filename) - metadata_path = tgz_path.replace('.tgz', '.metadata.json') - deleted_files = [] - errors = [] - if os.path.exists(tgz_path): - try: - os.remove(tgz_path) - deleted_files.append(filename) - logger.info(f"Deleted package file: {tgz_path}") - except OSError as e: - errors.append(f"Could not delete {filename}: {e}") - logger.error(f"Error deleting {tgz_path}: {e}") - else: - flash(f"Package file not found: {filename}", "warning") - if os.path.exists(metadata_path): - try: - os.remove(metadata_path) - deleted_files.append(os.path.basename(metadata_path)) - logger.info(f"Deleted metadata file: {metadata_path}") - except OSError as e: - errors.append(f"Could not delete metadata for {filename}: {e}") - logger.error(f"Error deleting {metadata_path}: {e}") - if errors: - for error in errors: - flash(error, "error") - elif deleted_files: - flash(f"Deleted: {', '.join(deleted_files)}", "success") - else: - flash("No files found to delete.", "info") - else: - logger.warning(f"Form validation failed for delete-ig: {form.errors}") - flash("CSRF token missing or invalid.", "error") - return redirect(url_for('view_igs')) - -@app.route('/unload-ig', methods=['POST']) -def unload_ig(): - form = FlaskForm() - if form.validate_on_submit(): - ig_id = request.form.get('ig_id') - try: - ig_id_int = int(ig_id) - processed_ig = db.session.get(ProcessedIg, ig_id_int) - if processed_ig: - try: - pkg_name = processed_ig.package_name - pkg_version = processed_ig.version - db.session.delete(processed_ig) - db.session.commit() - flash(f"Unloaded processed data for {pkg_name}#{pkg_version}", "success") - logger.info(f"Unloaded DB record for {pkg_name}#{pkg_version} (ID: {ig_id_int})") - except Exception as e: - db.session.rollback() - flash(f"Error unloading package data: {str(e)}", "error") - logger.error(f"Error deleting ProcessedIg record ID {ig_id_int}: {e}", exc_info=True) - else: - flash(f"Processed package data not found with ID: {ig_id}", "error") - logger.warning(f"Attempted to unload non-existent ProcessedIg record ID: {ig_id}") - except ValueError: - flash("Invalid package ID provided.", "error") - logger.warning(f"Invalid ID format received for unload-ig: {ig_id}") - except Exception as e: - flash(f"An unexpected error occurred during unload: {str(e)}", "error") - logger.error(f"Unexpected error in unload_ig for ID {ig_id}: {e}", exc_info=True) - else: - logger.warning(f"Form validation failed for unload-ig: {form.errors}") - flash("CSRF token missing or invalid.", "error") - return redirect(url_for('view_igs')) - -@app.route('/view-ig/') -def view_ig(processed_ig_id): - processed_ig = db.session.get(ProcessedIg, processed_ig_id) - if not processed_ig: - flash(f"Processed IG with ID {processed_ig_id} not found.", "error") - return redirect(url_for('view_igs')) - profile_list = [t for t in processed_ig.resource_types_info if t.get('is_profile')] - base_list = [t for t in processed_ig.resource_types_info if not t.get('is_profile')] - examples_by_type = processed_ig.examples or {} - optional_usage_elements = processed_ig.optional_usage_elements or {} - complies_with_profiles = processed_ig.complies_with_profiles or [] - imposed_profiles = processed_ig.imposed_profiles or [] - logger.debug(f"Viewing IG {processed_ig.package_name}#{processed_ig.version}: " - f"{len(profile_list)} profiles, {len(base_list)} base resources, " - f"{len(optional_usage_elements)} optional elements") - return render_template('cp_view_processed_ig.html', - title=f"View {processed_ig.package_name}#{processed_ig.version}", - processed_ig=processed_ig, - profile_list=profile_list, - base_list=base_list, - examples_by_type=examples_by_type, - site_name='FHIRFLARE IG Toolkit', - now=datetime.datetime.now(), - complies_with_profiles=complies_with_profiles, - imposed_profiles=imposed_profiles, - optional_usage_elements=optional_usage_elements, - config=current_app.config) - -@app.route('/get-example') -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Get a specific example resource from a package.', - 'description': 'Retrieves the content of an example JSON file from a specified FHIR package and version.', - 'parameters': [ - {'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Name of the FHIR package.'}, - {'name': 'version', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Version of the FHIR package.'}, - {'name': 'filename', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Path to the example file within the package (e.g., "package/Patient-example.json").'}, - {'name': 'include_narrative', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False, 'description': 'Whether to include the HTML narrative in the response.'} - ], - 'responses': { - '200': {'description': 'The example FHIR resource in JSON format.', 'schema': {'type': 'object'}}, - '400': {'description': 'Missing required query parameters or invalid file path.'}, - '404': {'description': 'Package or example file not found.'}, - '500': {'description': 'Server error during file retrieval or processing.'} - } -}) -def get_example(): - package_name = request.args.get('package_name') - version = request.args.get('version') - filename = request.args.get('filename') - include_narrative = request.args.get('include_narrative', 'false').lower() == 'true' - if not all([package_name, version, filename]): - logger.warning("get_example: Missing query parameters: package_name=%s, version=%s, filename=%s", package_name, version, filename) - return jsonify({"error": "Missing required query parameters: package_name, version, filename"}), 400 - if not filename.startswith('package/') or '..' in filename: - logger.warning(f"Invalid example file path requested: {filename}") - return jsonify({"error": "Invalid example file path."}), 400 - packages_dir = current_app.config.get('FHIR_PACKAGES_DIR') - if not packages_dir: - logger.error("FHIR_PACKAGES_DIR not configured.") - return jsonify({"error": "Server configuration error: Package directory not set."}), 500 - tgz_filename = services.construct_tgz_filename(package_name, version) - tgz_path = os.path.join(packages_dir, tgz_filename) - if not os.path.exists(tgz_path): - logger.error(f"Package file not found: {tgz_path}") - return jsonify({"error": f"Package {package_name}#{version} not found"}), 404 - try: - with tarfile.open(tgz_path, "r:gz") as tar: - try: - example_member = tar.getmember(filename) - with tar.extractfile(example_member) as example_fileobj: - content_bytes = example_fileobj.read() - content_string = content_bytes.decode('utf-8-sig') - content = json.loads(content_string) - if not include_narrative: - content = services.remove_narrative(content, include_narrative=False) - filtered_content_string = json.dumps(content, separators=(',', ':'), sort_keys=False) - return Response(filtered_content_string, mimetype='application/json') - except KeyError: - logger.error(f"Example file '{filename}' not found within {tgz_filename}") - return jsonify({"error": f"Example file '{os.path.basename(filename)}' not found in package."}), 404 - except json.JSONDecodeError as e: - logger.error(f"JSON parsing error for example '{filename}' in {tgz_filename}: {e}") - return jsonify({"error": f"Invalid JSON in example file: {str(e)}"}), 500 - except UnicodeDecodeError as e: - logger.error(f"Encoding error reading example '{filename}' from {tgz_filename}: {e}") - return jsonify({"error": f"Error decoding example file (invalid UTF-8?): {str(e)}"}), 500 - except tarfile.TarError as e: - logger.error(f"TarError reading example '{filename}' from {tgz_filename}: {e}") - return jsonify({"error": f"Error reading package archive: {str(e)}"}), 500 - except tarfile.TarError as e: - logger.error(f"Error opening package file {tgz_path}: {e}") - return jsonify({"error": f"Error reading package archive: {str(e)}"}), 500 - except FileNotFoundError: - logger.error(f"Package file disappeared: {tgz_path}") - return jsonify({"error": f"Package file not found: {package_name}#{version}"}), 404 - except Exception as e: - logger.error(f"Unexpected error getting example '{filename}' from {tgz_filename}: {e}", exc_info=True) - return jsonify({"error": f"Unexpected error: {str(e)}"}), 500 - -#----------------------------------------------------------------------new -def collect_all_structure_definitions(tgz_path): - """Collect all StructureDefinitions from a .tgz package.""" - structure_definitions = {} - try: - with tarfile.open(tgz_path, "r:gz") as tar: - for member in tar: - if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): - continue - if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: - continue - fileobj = None - try: - fileobj = tar.extractfile(member) - if fileobj: - content_bytes = fileobj.read() - content_string = content_bytes.decode('utf-8-sig') - data = json.loads(content_string) - if isinstance(data, dict) and data.get('resourceType') == 'StructureDefinition': - sd_url = data.get('url') - if sd_url: - structure_definitions[sd_url] = data - except Exception as e: - logger.warning(f"Could not read/parse potential SD {member.name}, skipping: {e}") - finally: - if fileobj: - fileobj.close() - except Exception as e: - logger.error(f"Unexpected error collecting StructureDefinitions from {tgz_path}: {e}", exc_info=True) - return structure_definitions - -def generate_snapshot(structure_def, core_package_path, local_package_path): - """Generate a snapshot by merging the differential with the base StructureDefinition.""" - if 'snapshot' in structure_def: - return structure_def - - # Fetch all StructureDefinitions from the local package for reference resolution - local_sds = collect_all_structure_definitions(local_package_path) - - # Get the base StructureDefinition from the core package - base_url = structure_def.get('baseDefinition') - if not base_url: - logger.error("No baseDefinition found in StructureDefinition.") - return structure_def - - resource_type = structure_def.get('type') - base_sd_data, _ = services.find_and_extract_sd(core_package_path, resource_type, profile_url=base_url) - if not base_sd_data or 'snapshot' not in base_sd_data: - logger.error(f"Could not fetch or find snapshot in base StructureDefinition: {base_url}") - return structure_def - - # Copy the base snapshot elements - snapshot_elements = deepcopy(base_sd_data['snapshot']['element']) - differential_elements = structure_def.get('differential', {}).get('element', []) - - # Map snapshot elements by path and id for easier lookup - snapshot_by_path = {el['path']: el for el in snapshot_elements} - snapshot_by_id = {el['id']: el for el in snapshot_elements if 'id' in el} - - # Process differential elements - for diff_el in differential_elements: - diff_path = diff_el.get('path') - diff_id = diff_el.get('id') - - # Resolve extensions or referenced types - if 'type' in diff_el: - for type_info in diff_el['type']: - if 'profile' in type_info: - for profile_url in type_info['profile']: - if profile_url in local_sds: - # Add elements from the referenced StructureDefinition - ref_sd = local_sds[profile_url] - ref_elements = ref_sd.get('snapshot', {}).get('element', []) or ref_sd.get('differential', {}).get('element', []) - for ref_el in ref_elements: - # Adjust paths to fit within the current structure - ref_path = ref_el.get('path') - if ref_path.startswith(ref_sd.get('type')): - new_path = diff_path + ref_path[len(ref_sd.get('type')):] - new_el = deepcopy(ref_el) - new_el['path'] = new_path - new_el['id'] = diff_id + ref_path[len(ref_sd.get('type')):] - snapshot_elements.append(new_el) - - # Find matching element in snapshot - target_el = snapshot_by_id.get(diff_id) or snapshot_by_path.get(diff_path) - if target_el: - # Update existing element with differential constraints - target_el.update(diff_el) - else: - # Add new element (e.g., extensions or new slices) - snapshot_elements.append(diff_el) - - structure_def['snapshot'] = {'element': snapshot_elements} - return structure_def - -@app.route('/get-structure') -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Get a StructureDefinition from a package.', - 'description': 'Retrieves a StructureDefinition, optionally generating or filtering for snapshot/differential views.', - 'parameters': [ - {'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True}, - {'name': 'version', 'in': 'query', 'type': 'string', 'required': True}, - {'name': 'resource_type', 'in': 'query', 'type': 'string', 'required': True, 'description': 'The resource type or profile ID.'}, - {'name': 'view', 'in': 'query', 'type': 'string', 'required': False, 'default': 'snapshot', 'enum': ['snapshot', 'differential']}, - {'name': 'include_narrative', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False}, - {'name': 'raw', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False, 'description': 'If true, returns the raw SD JSON.'}, - {'name': 'profile_url', 'in': 'query', 'type': 'string', 'required': False, 'description': 'Canonical URL of the profile to retrieve.'} - ], - 'responses': { - '200': { - 'description': 'The StructureDefinition data.', - 'schema': { - 'type': 'object', - 'properties': { - 'elements': {'type': 'array', 'items': {'type': 'object'}}, - 'must_support_paths': {'type': 'array', 'items': {'type': 'string'}}, - 'search_parameters': {'type': 'array', 'items': {'type': 'object'}}, - 'fallback_used': {'type': 'boolean'}, - 'source_package': {'type': 'string'} - } - } - }, - '400': {'description': 'Missing required parameters.'}, - '404': {'description': 'StructureDefinition not found.'}, - '500': {'description': 'Server error.'} - } -}) -def get_structure(): - package_name = request.args.get('package_name') - version = request.args.get('version') - resource_type = request.args.get('resource_type') - view = request.args.get('view', 'snapshot') - include_narrative = request.args.get('include_narrative', 'false').lower() == 'true' - raw = request.args.get('raw', 'false').lower() == 'true' - profile_url = request.args.get('profile_url') - if not all([package_name, version, resource_type]): - logger.warning("get_structure: Missing query parameters: package_name=%s, version=%s, resource_type=%s", package_name, version, resource_type) - return jsonify({"error": "Missing required query parameters: package_name, version, resource_type"}), 400 - packages_dir = current_app.config.get('FHIR_PACKAGES_DIR') - if not packages_dir: - logger.error("FHIR_PACKAGES_DIR not configured.") - return jsonify({"error": "Server configuration error: Package directory not set."}), 500 - tgz_filename = services.construct_tgz_filename(package_name, version) - tgz_path = os.path.join(packages_dir, tgz_filename) - core_package_name, core_package_version = services.CANONICAL_PACKAGE - core_tgz_filename = services.construct_tgz_filename(core_package_name, core_package_version) - core_tgz_path = os.path.join(packages_dir, core_tgz_filename) - sd_data = None - search_params_data = [] - fallback_used = False - source_package_id = f"{package_name}#{version}" - base_resource_type_for_sp = None - logger.debug(f"Attempting to find SD for '{resource_type}' in {tgz_filename}") - primary_package_exists = os.path.exists(tgz_path) - core_package_exists = os.path.exists(core_tgz_path) - if primary_package_exists: - try: - sd_data, _ = services.find_and_extract_sd(tgz_path, resource_type, profile_url=profile_url, include_narrative=include_narrative, raw=raw) - if sd_data: - base_resource_type_for_sp = sd_data.get('type') - logger.debug(f"Determined base resource type '{base_resource_type_for_sp}' from primary SD '{resource_type}'") - except Exception as e: - logger.error(f"Unexpected error extracting SD '{resource_type}' from primary package {tgz_path}: {e}", exc_info=True) - sd_data = None - if sd_data is None: - logger.info(f"SD for '{resource_type}' not found or failed to load from {source_package_id}. Attempting fallback to {services.CANONICAL_PACKAGE_ID}.") - if not core_package_exists: - error_message = f"SD for '{resource_type}' not found in primary package, and core package is missing." if primary_package_exists else f"Primary package {package_name}#{version} and core package are missing." - return jsonify({"error": error_message}), 500 if primary_package_exists else 404 - try: - sd_data, _ = services.find_and_extract_sd(core_tgz_path, resource_type, profile_url=profile_url, include_narrative=include_narrative, raw=raw) - if sd_data is not None: - fallback_used = True - source_package_id = services.CANONICAL_PACKAGE_ID - base_resource_type_for_sp = sd_data.get('type') - logger.info(f"Found SD for '{resource_type}' in fallback package {source_package_id}. Base type: '{base_resource_type_for_sp}'") - except Exception as e: - logger.error(f"Unexpected error extracting SD '{resource_type}' from fallback {core_tgz_path}: {e}", exc_info=True) - return jsonify({"error": f"Unexpected error reading fallback StructureDefinition: {str(e)}"}), 500 - if not sd_data: - logger.error(f"SD for '{resource_type}' could not be found in primary or fallback packages.") - return jsonify({"error": f"StructureDefinition for '{resource_type}' not found."}), 404 - - # Generate snapshot if missing - if 'snapshot' not in sd_data: - logger.info(f"Snapshot missing for {resource_type}. Generating snapshot...") - sd_data = generate_snapshot(sd_data, core_tgz_path, tgz_path) - - if raw: - return Response(json.dumps(sd_data, indent=None, separators=(',', ':')), mimetype='application/json') - - # Prepare elements based on the view - snapshot_elements = sd_data.get('snapshot', {}).get('element', []) - differential_elements = sd_data.get('differential', {}).get('element', []) - differential_ids = {el.get('id') for el in differential_elements if el.get('id')} - logger.debug(f"Found {len(differential_ids)} unique IDs in differential.") - - # Select elements based on the view - enriched_elements = [] - if view == 'snapshot': - if snapshot_elements: - logger.debug(f"Processing {len(snapshot_elements)} snapshot elements for Snapshot view.") - for element in snapshot_elements: - element_id = element.get('id') - element['isInDifferential'] = bool(element_id and element_id in differential_ids) - enriched_elements.append(element) - else: - logger.warning(f"No snapshot elements found for {resource_type} in {source_package_id} for Snapshot view.") - else: # Differential, Must Support, Key Elements views use differential elements as a base - if differential_elements: - logger.debug(f"Processing {len(differential_elements)} differential elements for {view} view.") - for element in differential_elements: - element['isInDifferential'] = True - enriched_elements.append(element) - else: - logger.warning(f"No differential elements found for {resource_type} in {source_package_id} for {view} view.") - - enriched_elements = [services.remove_narrative(el, include_narrative=include_narrative) for el in enriched_elements] - - must_support_paths = [] - processed_ig_record = ProcessedIg.query.filter_by(package_name=package_name, version=version).first() - if processed_ig_record and processed_ig_record.must_support_elements: - ms_elements_dict = processed_ig_record.must_support_elements - must_support_paths = ms_elements_dict.get(resource_type, []) - if not must_support_paths and base_resource_type_for_sp: - must_support_paths = ms_elements_dict.get(base_resource_type_for_sp, []) - if must_support_paths: - logger.debug(f"Retrieved {len(must_support_paths)} MS paths using base type key '{base_resource_type_for_sp}' from DB.") - elif must_support_paths: - logger.debug(f"Retrieved {len(must_support_paths)} MS paths using profile key '{resource_type}' from DB.") - else: - logger.debug(f"No specific MS paths found for keys '{resource_type}' or '{base_resource_type_for_sp}' in DB.") - else: - logger.debug(f"No processed IG record or no must_support_elements found in DB for {package_name}#{version}") - - if base_resource_type_for_sp and primary_package_exists: - try: - logger.info(f"Fetching SearchParameters for base type '{base_resource_type_for_sp}' from primary package {tgz_path}") - search_params_data = services.find_and_extract_search_params(tgz_path, base_resource_type_for_sp) - except Exception as e: - logger.error(f"Error extracting SearchParameters for '{base_resource_type_for_sp}' from primary package {tgz_path}: {e}", exc_info=True) - search_params_data = [] - elif not primary_package_exists: - logger.warning(f"Original package {tgz_path} not found, cannot search it for specific SearchParameters.") - elif not base_resource_type_for_sp: - logger.warning(f"Base resource type could not be determined for '{resource_type}', cannot search for SearchParameters.") - if not search_params_data and base_resource_type_for_sp and core_package_exists: - logger.info(f"No relevant SearchParameters found in primary package for '{base_resource_type_for_sp}'. Searching core package {core_tgz_path}.") - try: - search_params_data = services.find_and_extract_search_params(core_tgz_path, base_resource_type_for_sp) - if search_params_data: - logger.info(f"Found {len(search_params_data)} SearchParameters for '{base_resource_type_for_sp}' in core package.") - except Exception as e: - logger.error(f"Error extracting SearchParameters for '{base_resource_type_for_sp}' from core package {core_tgz_path}: {e}", exc_info=True) - search_params_data = [] - elif not search_params_data and not core_package_exists: - logger.warning(f"Core package {core_tgz_path} not found, cannot perform fallback search for SearchParameters.") - search_param_conformance_rules = {} - if base_resource_type_for_sp: - if processed_ig_record: - if hasattr(processed_ig_record, 'search_param_conformance') and processed_ig_record.search_param_conformance: - all_conformance_data = processed_ig_record.search_param_conformance - search_param_conformance_rules = all_conformance_data.get(base_resource_type_for_sp, {}) - logger.debug(f"Retrieved conformance rules for {base_resource_type_for_sp} from DB: {search_param_conformance_rules}") - else: - logger.warning(f"ProcessedIg record found, but 'search_param_conformance' attribute/data is missing or empty for {package_name}#{version}.") - else: - logger.warning(f"No ProcessedIg record found for {package_name}#{version} to get conformance rules.") - if search_params_data: - logger.debug(f"Merging conformance data into {len(search_params_data)} search parameters.") - for param in search_params_data: - param_code = param.get('code') - if param_code: - conformance_level = search_param_conformance_rules.get(param_code, 'Optional') - param['conformance'] = conformance_level - else: - param['conformance'] = 'Unknown' - logger.debug("Finished merging conformance data.") - else: - logger.debug(f"No search parameters found for {base_resource_type_for_sp} to merge conformance data into.") - else: - logger.warning(f"Cannot fetch conformance data because base resource type (e.g., Patient) for '{resource_type}' could not be determined.") - for param in search_params_data: - if 'conformance' not in param or param['conformance'] == 'N/A': - param['conformance'] = 'Optional' - response_data = { - 'elements': enriched_elements, - 'must_support_paths': must_support_paths, - 'search_parameters': search_params_data, - 'fallback_used': fallback_used, - 'source_package': source_package_id - } - return Response(json.dumps(response_data, indent=None, separators=(',', ':')), mimetype='application/json') -#------------------------------------------------------------------------ - - -@app.route('/get-package-metadata') -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Get metadata for a downloaded package.', - 'parameters': [ - {'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True}, - {'name': 'version', 'in': 'query', 'type': 'string', 'required': True} - ], - 'responses': { - '200': { - 'description': 'Package metadata.', - 'schema': { - 'type': 'object', - 'properties': { - 'package_name': {'type': 'string'}, - 'version': {'type': 'string'}, - 'dependency_mode': {'type': 'string'}, - 'imported_dependencies': {'type': 'array', 'items': {'type': 'object'}}, - 'complies_with_profiles': {'type': 'array', 'items': {'type': 'string'}}, - 'imposed_profiles': {'type': 'array', 'items': {'type': 'string'}} - } - } - }, - '400': {'description': 'Missing parameters.'}, - '404': {'description': 'Metadata not found.'}, - '500': {'description': 'Server error.'} - } -}) -def get_package_metadata(): - package_name = request.args.get('package_name') - version = request.args.get('version') - if not package_name or not version: - return jsonify({'error': 'Missing package_name or version parameter'}), 400 - try: - metadata = services.get_package_metadata(package_name, version) - if metadata: - return jsonify({ - 'package_name': metadata.get('package_name'), - 'version': metadata.get('version'), - 'dependency_mode': metadata.get('dependency_mode'), - 'imported_dependencies': metadata.get('imported_dependencies', []), - 'complies_with_profiles': metadata.get('complies_with_profiles', []), - 'imposed_profiles': metadata.get('imposed_profiles', []) - }) - else: - return jsonify({'error': 'Metadata file not found for this package version.'}), 404 - except Exception as e: - logger.error(f"Error retrieving metadata for {package_name}#{version}: {e}", exc_info=True) - return jsonify({'error': f'Error retrieving metadata: {str(e)}'}), 500 - -@app.route('/api/import-ig', methods=['POST']) -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Import a FHIR Implementation Guide via API.', - 'description': 'Downloads and processes a FHIR IG and its dependencies.', - 'security': [{'ApiKeyAuth': []}], - 'consumes': ['application/json'], - 'parameters': [ - { - 'name': 'body', - 'in': 'body', - 'required': True, - 'schema': { - 'type': 'object', - 'required': ['package_name', 'version'], - 'properties': { - 'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'}, - 'version': {'type': 'string', 'example': '6.1.0'}, - 'dependency_mode': { - 'type': 'string', 'enum': ['recursive', 'patch-canonical', 'tree-shaking', 'direct'], - 'default': 'recursive' - } - } - } - } - ], - 'responses': { - '200': {'description': 'Package imported successfully or with warnings.'}, - '400': {'description': 'Invalid request (e.g., missing fields, invalid mode).'}, - '404': {'description': 'Package not found on registry.'}, - '500': {'description': 'Server error during import.'} - } -}) -def api_import_ig(): - auth_error = check_api_key() - if auth_error: - return auth_error - if not request.is_json: - return jsonify({"status": "error", "message": "Request must be JSON"}), 400 - data = request.get_json() - package_name = data.get('package_name') - version = data.get('version') - dependency_mode = data.get('dependency_mode', 'recursive') - if not package_name or not version: - return jsonify({"status": "error", "message": "Missing package_name or version"}), 400 - if not (isinstance(package_name, str) and isinstance(version, str) and - re.match(r'^[a-zA-Z0-9\-\.]+$', package_name) and - re.match(r'^[a-zA-Z0-9\.\-\+]+$', version)): - return jsonify({"status": "error", "message": "Invalid characters in package name or version"}), 400 - valid_modes = ['recursive', 'patch-canonical', 'tree-shaking', 'direct'] - if dependency_mode not in valid_modes: - return jsonify({"status": "error", "message": f"Invalid dependency mode: {dependency_mode}. Must be one of {valid_modes}"}), 400 - try: - result = services.import_package_and_dependencies(package_name, version, dependency_mode=dependency_mode) - if result['errors'] and not result['downloaded']: - error_msg = f"Failed to import {package_name}#{version}: {result['errors'][0]}" - logger.error(f"[API] Import failed: {error_msg}") - status_code = 404 if "404" in result['errors'][0] else 500 - return jsonify({"status": "error", "message": error_msg}), status_code - package_filename = services.construct_tgz_filename(package_name, version) - packages_dir = current_app.config.get('FHIR_PACKAGES_DIR', '/app/instance/fhir_packages') - package_path = os.path.join(packages_dir, package_filename) - complies_with_profiles = [] - imposed_profiles = [] - processing_errors = [] - if os.path.exists(package_path): - logger.info(f"[API] Processing downloaded package {package_path} for metadata.") - process_result = services.process_package_file(package_path) - complies_with_profiles = process_result.get('complies_with_profiles', []) - imposed_profiles = process_result.get('imposed_profiles', []) - if process_result.get('errors'): - processing_errors.extend(process_result['errors']) - logger.warning(f"[API] Errors during post-import processing of {package_name}#{version}: {processing_errors}") - else: - logger.warning(f"[API] Package file {package_path} not found after reported successful download.") - processing_errors.append("Package file disappeared after download.") - all_packages, errors, duplicate_groups_after = list_downloaded_packages(packages_dir) - duplicates_found = [] - for name, versions in duplicate_groups_after.items(): - duplicates_found.append(f"{name} (Versions present: {', '.join(versions)})") - response_status = "success" - response_message = "Package imported successfully." - if result['errors'] or processing_errors: - response_status = "warning" - response_message = "Package imported, but some errors occurred during processing or dependency handling." - all_issues = result.get('errors', []) + processing_errors - logger.warning(f"[API] Import for {package_name}#{version} completed with warnings/errors: {all_issues}") - response = { - "status": response_status, - "message": response_message, - "package_name": package_name, - "version": version, - "dependency_mode": dependency_mode, - "dependencies_processed": result.get('dependencies', []), - "complies_with_profiles": complies_with_profiles, - "imposed_profiles": imposed_profiles, - "processing_issues": result.get('errors', []) + processing_errors, - "duplicate_packages_present": duplicates_found - } - return jsonify(response), 200 - except Exception as e: - logger.error(f"[API] Unexpected error in api_import_ig for {package_name}#{version}: {str(e)}", exc_info=True) - return jsonify({"status": "error", "message": f"Unexpected server error during import: {str(e)}"}), 500 - -@app.route('/api/push-ig', methods=['POST']) -@csrf.exempt # Retain CSRF exemption as specified -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Push a FHIR Implementation Guide to a server via API.', - 'description': 'Uploads resources from a specified FHIR IG (and optionally its dependencies) to a target FHIR server. Returns an NDJSON stream of progress.', - 'security': [{'ApiKeyAuth': []}], - 'consumes': ['application/json'], - 'produces': ['application/x-ndjson'], - 'parameters': [ - { - 'name': 'body', - 'in': 'body', - 'required': True, - 'schema': { - 'type': 'object', - 'required': ['package_name', 'version', 'fhir_server_url'], - 'properties': { - 'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'}, - 'version': {'type': 'string', 'example': '6.1.0'}, - 'fhir_server_url': {'type': 'string', 'format': 'url', 'example': 'http://localhost:8080/fhir'}, - 'include_dependencies': {'type': 'boolean', 'default': True}, - 'auth_type': {'type': 'string', 'enum': ['apiKey', 'bearerToken', 'basic', 'none'], 'default': 'none'}, - 'auth_token': {'type': 'string', 'description': 'Required if auth_type is bearerToken or basic (for basic, use "Basic ")'}, - 'username': {'type': 'string', 'description': 'Required if auth_type is basic'}, - 'password': {'type': 'string', 'format': 'password', 'description': 'Required if auth_type is basic'}, - 'resource_types_filter': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of resource types to include.'}, - 'skip_files': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of specific file paths within packages to skip.'}, - 'dry_run': {'type': 'boolean', 'default': False}, - 'verbose': {'type': 'boolean', 'default': False}, - 'force_upload': {'type': 'boolean', 'default': False, 'description': 'If true, uploads resources even if they appear identical to server versions.'} - } - } - } - ], - 'responses': { - '200': {'description': 'NDJSON stream of push progress and results.'}, - '400': {'description': 'Invalid request parameters.'}, - '401': {'description': 'Authentication error.'}, - '404': {'description': 'Package not found locally.'}, - '500': {'description': 'Server error during push operation setup.'} - } -}) -def api_push_ig(): - auth_error = check_api_key() - if auth_error: return auth_error - if not request.is_json: return jsonify({"status": "error", "message": "Request must be JSON"}), 400 - - data = request.get_json() - package_name = data.get('package_name') - version = data.get('version') - fhir_server_url = data.get('fhir_server_url') - include_dependencies = data.get('include_dependencies', True) - auth_type = data.get('auth_type', 'none') - auth_token = data.get('auth_token') - username = data.get('username') # ADD: Extract username - password = data.get('password') # ADD: Extract password - resource_types_filter_raw = data.get('resource_types_filter') - skip_files_raw = data.get('skip_files') - dry_run = data.get('dry_run', False) - verbose = data.get('verbose', False) - force_upload = data.get('force_upload', False) - - # --- Input Validation --- - if not all([package_name, version, fhir_server_url]): return jsonify({"status": "error", "message": "Missing required fields"}), 400 - valid_auth_types = ['apiKey', 'bearerToken', 'basic', 'none'] # ADD: 'basic' to valid auth types - if auth_type not in valid_auth_types: return jsonify({"status": "error", "message": f"Invalid auth_type."}), 400 - if auth_type == 'bearerToken' and not auth_token: return jsonify({"status": "error", "message": "auth_token required for bearerToken."}), 400 - if auth_type == 'basic' and (not username or not password): # ADD: Validate Basic Auth inputs - return jsonify({"status": "error", "message": "Username and password required for Basic Authentication."}), 400 - - # Parse filters (unchanged) - resource_types_filter = None - if resource_types_filter_raw: - if isinstance(resource_types_filter_raw, list): resource_types_filter = [s for s in resource_types_filter_raw if isinstance(s, str)] - elif isinstance(resource_types_filter_raw, str): resource_types_filter = [s.strip() for s in resource_types_filter_raw.split(',') if s.strip()] - else: return jsonify({"status": "error", "message": "Invalid resource_types_filter format."}), 400 - skip_files = None - if skip_files_raw: - if isinstance(skip_files_raw, list): skip_files = [s.strip().replace('\\', '/') for s in skip_files_raw if isinstance(s, str) and s.strip()] - elif isinstance(skip_files_raw, str): skip_files = [s.strip().replace('\\', '/') for s in re.split(r'[,\n]', skip_files_raw) if s.strip()] - else: return jsonify({"status": "error", "message": "Invalid skip_files format."}), 400 - - # --- File Path Setup (unchanged) --- - packages_dir = current_app.config.get('FHIR_PACKAGES_DIR') - if not packages_dir: return jsonify({"status": "error", "message": "Server config error: Package dir missing."}), 500 - tgz_filename = services.construct_tgz_filename(package_name, version) - tgz_path = os.path.join(packages_dir, tgz_filename) - if not os.path.exists(tgz_path): return jsonify({"status": "error", "message": f"Package not found locally: {package_name}#{version}"}), 404 - - # ADD: Handle Basic Authentication - if auth_type == 'basic': - credentials = f"{username}:{password}" - auth_token = f"Basic {base64.b64encode(credentials.encode('utf-8')).decode('utf-8')}" - - # --- Streaming Response --- - def generate_stream_wrapper(): - yield from services.generate_push_stream( - package_name=package_name, version=version, fhir_server_url=fhir_server_url, - include_dependencies=include_dependencies, auth_type=auth_type, - auth_token=auth_token, resource_types_filter=resource_types_filter, - skip_files=skip_files, dry_run=dry_run, verbose=verbose, - force_upload=force_upload, packages_dir=packages_dir - ) - return Response(generate_stream_wrapper(), mimetype='application/x-ndjson') - -# Ensure csrf.exempt(api_push_ig) remains - -@app.route('/validate-sample', methods=['GET']) -def validate_sample(): - form = ValidationForm() - packages = [] - packages_dir = app.config['FHIR_PACKAGES_DIR'] - if os.path.exists(packages_dir): - for filename in os.listdir(packages_dir): - if filename.endswith('.tgz'): - try: - with tarfile.open(os.path.join(packages_dir, filename), 'r:gz') as tar: - package_json = tar.extractfile('package/package.json') - if package_json: - pkg_info = json.load(package_json) - name = pkg_info.get('name') - version = pkg_info.get('version') - if name and version: - packages.append({'name': name, 'version': version}) - except Exception as e: - logger.warning(f"Error reading package {filename}: {e}") - continue - return render_template( - 'validate_sample.html', - form=form, - packages=packages, - validation_report=None, - site_name='FHIRFLARE IG Toolkit', - now=datetime.datetime.now(), app_mode=app.config['APP_MODE'] - ) - -# Exempt specific API views defined directly on 'app' -csrf.exempt(api_import_ig) # Add this line -csrf.exempt(api_push_ig) # Add this line - -# Exempt the entire API blueprint (for routes defined IN services.py, like /api/validate-sample) -csrf.exempt(services_bp) # Keep this line for routes defined in the blueprint - -def create_db(): - logger.debug(f"Attempting to create database tables for URI: {app.config['SQLALCHEMY_DATABASE_URI']}") - try: - db.create_all() # This will create RegistryCacheInfo if it doesn't exist - # Optionally initialize the timestamp row if it's missing - with app.app_context(): - if RegistryCacheInfo.query.first() is None: - initial_info = RegistryCacheInfo(last_fetch_timestamp=None) - db.session.add(initial_info) - db.session.commit() - logger.info("Initialized RegistryCacheInfo table.") - logger.info("Database tables created/verified successfully.") - except Exception as e: - logger.error(f"Failed to initialize database tables: {e}", exc_info=True) - #db.session.rollback() # Rollback in case of error during init - raise - -with app.app_context(): - create_db() - - -class FhirRequestForm(FlaskForm): - submit = SubmitField('Send Request') - -@app.route('/fhir-ui') -def fhir_ui(): - form = FhirRequestForm() - return render_template('fhir_ui.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now(), app_mode=app.config['APP_MODE']) - -@app.route('/fhir-ui-operations') -def fhir_ui_operations(): - form = FhirRequestForm() - return render_template('fhir_ui_operations.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now(), app_mode=app.config['APP_MODE']) - -# --- CORRECTED PROXY FUNCTION DEFINITION (Simplified Decorator) --- - -# Use a single route to capture everything after /fhir/ -# The 'path' converter handles slashes. 'subpath' can be empty. -@app.route('/fhir', defaults={'subpath': ''}, methods=['GET', 'POST', 'PUT', 'DELETE']) -@app.route('/fhir/', defaults={'subpath': ''}, methods=['GET', 'POST', 'PUT', 'DELETE']) -@app.route('/fhir/', methods=['GET', 'POST', 'PUT', 'DELETE']) -def proxy_hapi(subpath): - """ - Proxies FHIR requests to either the local HAPI server or a custom - target server specified by the 'X-Target-FHIR-Server' header. - Handles requests to /fhir/ (base, subpath='') and /fhir/. - The route '/fhir' (no trailing slash) is handled separately for the UI. - """ - # Clean subpath just in case prefixes were somehow included - clean_subpath = subpath.replace('r4/', '', 1).replace('fhir/', '', 1).strip('/') - logger.debug(f"Proxy received request for path: '/fhir/{subpath}', cleaned subpath: '{clean_subpath}'") - - # Determine the target FHIR server base URL - target_server_header = request.headers.get('X-Target-FHIR-Server') - final_base_url = None - is_custom_target = False - - if target_server_header: - try: - parsed_url = urlparse(target_server_header) - if not parsed_url.scheme or not parsed_url.netloc: - raise ValueError("Invalid URL format in X-Target-FHIR-Server header") - final_base_url = target_server_header.rstrip('/') - is_custom_target = True - logger.info(f"Proxy target identified from header: {final_base_url}") - except ValueError as e: - logger.warning(f"Invalid URL in X-Target-FHIR-Server header: '{target_server_header}'. Falling back. Error: {e}") - final_base_url = current_app.config['HAPI_FHIR_URL'].rstrip('/') - logger.debug(f"Falling back to default local HAPI due to invalid header: {final_base_url}") - else: - final_base_url = current_app.config['HAPI_FHIR_URL'].rstrip('/') - logger.debug(f"No target header found, proxying to default local HAPI: {final_base_url}") - - # Construct the final URL for the target server request - # Append the cleaned subpath only if it's not empty - final_url = f"{final_base_url}/{clean_subpath}" if clean_subpath else final_base_url - - # Prepare headers to forward - headers_to_forward = { - k: v for k, v in request.headers.items() - if k.lower() not in [ - 'host', 'x-target-fhir-server', 'content-length', 'connection', - 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', - 'trailers', 'transfer-encoding', 'upgrade' - ] - } - if 'Content-Type' in request.headers: - headers_to_forward['Content-Type'] = request.headers['Content-Type'] - if 'Accept' in request.headers: - headers_to_forward['Accept'] = request.headers['Accept'] - elif 'Accept' not in headers_to_forward: - headers_to_forward['Accept'] = 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8' - - logger.info(f"Proxying request: {request.method} {final_url}") - request_data = request.get_data() - - try: - # Make the request - response = requests.request( - method=request.method, - url=final_url, - headers=headers_to_forward, - data=request_data, - cookies=request.cookies, - allow_redirects=False, - timeout=60 - ) - logger.info(f"Target server '{final_base_url}' responded with status: {response.status_code}") - response.raise_for_status() - - # Filter hop-by-hop headers - response_headers = { k: v for k, v in response.headers.items() if k.lower() not in ('transfer-encoding', 'connection', 'content-encoding', 'content-length', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'upgrade', 'server', 'date', 'x-powered-by', 'via', 'x-forwarded-for', 'x-forwarded-proto', 'x-request-id') } - response_content = response.content - response_headers['Content-Length'] = str(len(response_content)) - - # Create Flask response - resp = make_response(response_content) - resp.status_code = response.status_code - for key, value in response_headers.items(): resp.headers[key] = value - if 'Content-Type' in response.headers: resp.headers['Content-Type'] = response.headers['Content-Type'] - return resp - - # --- Exception Handling (same as previous version) --- - except requests.exceptions.Timeout: - error_msg = f"Request to the target FHIR server timed out: {final_url}" - logger.error(f"Proxy timeout error: {error_msg}") - return jsonify({'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'code': 'timeout', 'diagnostics': error_msg}]}), 504 - except requests.exceptions.ConnectionError as e: - target_name = 'custom server' if is_custom_target else 'local HAPI' - error_message = f"Could not connect to the target FHIR server ({target_name} at {final_base_url}). Please check the URL and server status." - logger.error(f"Proxy connection error: {error_message} - {str(e)}") - return jsonify({'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'code': 'exception', 'diagnostics': error_message, 'details': {'text': str(e)}}]}), 503 - except requests.exceptions.HTTPError as e: - logger.warning(f"Proxy received HTTP error from target {final_url}: {e.response.status_code}") - try: - error_response_headers = { k: v for k, v in e.response.headers.items() if k.lower() not in ('transfer-encoding', 'connection', 'content-encoding','content-length', 'keep-alive', 'proxy-authenticate','proxy-authorization', 'te', 'trailers', 'upgrade','server', 'date', 'x-powered-by', 'via', 'x-forwarded-for','x-forwarded-proto', 'x-request-id') } - error_content = e.response.content - error_response_headers['Content-Length'] = str(len(error_content)) - error_resp = make_response(error_content) - error_resp.status_code = e.response.status_code - for key, value in error_response_headers.items(): error_resp.headers[key] = value - if 'Content-Type' in e.response.headers: error_resp.headers['Content-Type'] = e.response.headers['Content-Type'] - return error_resp - except Exception as inner_e: - logger.error(f"Failed to process target server's error response: {inner_e}") - diag_text = f'Target server returned status {e.response.status_code}, but failed to forward its error details.' - return jsonify({'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'code': 'exception', 'diagnostics': diag_text, 'details': {'text': str(e)}}]}), e.response.status_code or 502 - except requests.exceptions.RequestException as e: - logger.error(f"Proxy request error for {final_url}: {str(e)}") - return jsonify({'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'code': 'exception', 'diagnostics': 'Error communicating with the target FHIR server.', 'details': {'text': str(e)}}]}), 502 - except Exception as e: - logger.error(f"Unexpected proxy error for {final_url}: {str(e)}", exc_info=True) - return jsonify({'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'code': 'exception', 'diagnostics': 'An unexpected error occurred within the FHIR proxy.', 'details': {'text': str(e)}}]}), 500 - -# --- End of corrected proxy_hapi function --- - - -@app.route('/api/load-ig-to-hapi', methods=['POST']) -@swag_from({ - 'tags': ['HAPI Integration'], - 'summary': 'Load an IG into the local HAPI FHIR server.', - 'description': 'Extracts all resources from a specified IG package and PUTs them to the configured HAPI FHIR server.', - 'security': [{'ApiKeyAuth': []}], - 'consumes': ['application/json'], - 'parameters': [ - { - 'name': 'body', - 'in': 'body', - 'required': True, - 'schema': { - 'type': 'object', - 'required': ['package_name', 'version'], - 'properties': { - 'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'}, - 'version': {'type': 'string', 'example': '6.1.0'} - } - } - } - ], - 'responses': { - '200': {'description': 'Package loaded to HAPI successfully.'}, - '400': {'description': 'Invalid request (e.g., missing package_name/version).'}, - '404': {'description': 'Package not found locally.'}, - '500': {'description': 'Error loading IG to HAPI (e.g., HAPI server connection issue, resource upload failure).'} - } -}) -def load_ig_to_hapi(): - data = request.get_json() - package_name = data.get('package_name') - version = data.get('version') - tgz_path = os.path.join(current_app.config['FHIR_PACKAGES_DIR'], construct_tgz_filename(package_name, version)) - if not os.path.exists(tgz_path): - return jsonify({"error": "Package not found"}), 404 - try: - with tarfile.open(tgz_path, "r:gz") as tar: - for member in tar.getmembers(): - if member.name.endswith('.json') and member.name not in ['package/package.json', 'package/.index.json']: - resource = json.load(tar.extractfile(member)) - resource_type = resource.get('resourceType') - resource_id = resource.get('id') - if resource_type and resource_id: - response = requests.put( - f"{current_app.config['HAPI_FHIR_URL'].rstrip('/')}/{resource_type}/{resource_id}", - json=resource, - headers={'Content-Type': 'application/fhir+json'} - ) - response.raise_for_status() - return jsonify({"status": "success", "message": f"Loaded {package_name}#{version} to HAPI"}) - except Exception as e: - logger.error(f"Failed to load IG to HAPI: {e}") - return jsonify({"error": str(e)}), 500 - - -# Assuming 'app' and 'logger' are defined, and other necessary imports are present above - -@app.route('/fsh-converter', methods=['GET', 'POST']) -def fsh_converter(): - form = FSHConverterForm() - fsh_output = None - error = None - comparison_report = None - - # --- Populate package choices --- - packages = [] - packages_dir = app.config.get('FHIR_PACKAGES_DIR', '/app/instance/fhir_packages') # Use .get with default - logger.debug(f"Scanning packages directory: {packages_dir}") - if os.path.exists(packages_dir): - tgz_files = [f for f in os.listdir(packages_dir) if f.endswith('.tgz')] - logger.debug(f"Found {len(tgz_files)} .tgz files: {tgz_files}") - for filename in tgz_files: - package_file_path = os.path.join(packages_dir, filename) - try: - # Check if it's a valid tar.gz file before opening - if not tarfile.is_tarfile(package_file_path): - logger.warning(f"Skipping non-tarfile or corrupted file: {filename}") - continue - - with tarfile.open(package_file_path, 'r:gz') as tar: - # Find package.json case-insensitively and handle potential path variations - package_json_path = next((m for m in tar.getmembers() if m.name.lower().endswith('package.json') and m.isfile() and ('/' not in m.name.replace('package/','', 1).lower())), None) # Handle package/ prefix better - - if package_json_path: - package_json_stream = tar.extractfile(package_json_path) - if package_json_stream: - try: - pkg_info = json.load(package_json_stream) - name = pkg_info.get('name') - version = pkg_info.get('version') - if name and version: - package_id = f"{name}#{version}" - packages.append((package_id, package_id)) - logger.debug(f"Added package: {package_id}") - else: - logger.warning(f"Missing name or version in {filename}/package.json: name={name}, version={version}") - except json.JSONDecodeError as json_e: - logger.warning(f"Error decoding package.json from {filename}: {json_e}") - except Exception as read_e: - logger.warning(f"Error reading stream from package.json in {filename}: {read_e}") - finally: - package_json_stream.close() # Ensure stream is closed - else: - logger.warning(f"Could not extract package.json stream from {filename} (path: {package_json_path.name})") - else: - logger.warning(f"No suitable package.json found in {filename}") - except tarfile.ReadError as tar_e: - logger.warning(f"Tarfile read error for {filename}: {tar_e}") - except Exception as e: - logger.warning(f"Error processing package {filename}: {str(e)}") - continue # Continue to next file - else: - logger.warning(f"Packages directory does not exist: {packages_dir}") - - unique_packages = sorted(list(set(packages)), key=lambda x: x[0]) - form.package.choices = [('', 'None')] + unique_packages - logger.debug(f"Set package choices: {form.package.choices}") - # --- End package choices --- - - if form.validate_on_submit(): # This block handles POST requests - input_mode = form.input_mode.data - # Use request.files.get to safely access file data - fhir_file_storage = request.files.get(form.fhir_file.name) - fhir_file = fhir_file_storage if fhir_file_storage and fhir_file_storage.filename != '' else None - - fhir_text = form.fhir_text.data - - alias_file_storage = request.files.get(form.alias_file.name) - alias_file = alias_file_storage if alias_file_storage and alias_file_storage.filename != '' else None - - output_style = form.output_style.data - log_level = form.log_level.data - fhir_version = form.fhir_version.data if form.fhir_version.data != 'auto' else None - fishing_trip = form.fishing_trip.data - dependencies = [dep.strip() for dep in form.dependencies.data.splitlines() if dep.strip()] if form.dependencies.data else None # Use splitlines() - indent_rules = form.indent_rules.data - meta_profile = form.meta_profile.data - no_alias = form.no_alias.data - - logger.debug(f"Processing input: mode={input_mode}, has_file={bool(fhir_file)}, has_text={bool(fhir_text)}, has_alias={bool(alias_file)}") - # Pass the FileStorage object directly if needed by process_fhir_input - input_file, temp_dir, alias_path, input_error = services.process_fhir_input(input_mode, fhir_file, fhir_text, alias_file) - - if input_error: - error = input_error - flash(error, 'error') - logger.error(f"Input processing error: {error}") - if temp_dir and os.path.exists(temp_dir): - try: shutil.rmtree(temp_dir, ignore_errors=True) - except Exception as cleanup_e: logger.warning(f"Error removing temp dir after input error {temp_dir}: {cleanup_e}") - else: - # Proceed only if input processing was successful - output_dir = os.path.join(app.config.get('UPLOAD_FOLDER', '/app/static/uploads'), 'fsh_output') # Use .get - os.makedirs(output_dir, exist_ok=True) - logger.debug(f"Running GoFSH with input: {input_file}, output_dir: {output_dir}") - # Pass form data directly to run_gofsh - fsh_output, comparison_report, gofsh_error = services.run_gofsh( - input_file, output_dir, output_style, log_level, fhir_version, - fishing_trip, dependencies, indent_rules, meta_profile, alias_path, no_alias - ) - # Clean up temp dir after GoFSH run - if temp_dir and os.path.exists(temp_dir): - try: - shutil.rmtree(temp_dir, ignore_errors=True) - logger.debug(f"Successfully removed temp directory: {temp_dir}") - except Exception as cleanup_e: - logger.warning(f"Error removing temp directory {temp_dir}: {cleanup_e}") - - if gofsh_error: - error = gofsh_error - flash(error, 'error') - logger.error(f"GoFSH error: {error}") - else: - # Store potentially large output carefully - session might have limits - session['fsh_output'] = fsh_output - flash('Conversion successful!', 'success') - logger.info("FSH conversion successful") - - # Return response for POST (AJAX or full page) - if request.headers.get('X-Requested-With') == 'XMLHttpRequest': - logger.debug("Returning partial HTML for AJAX POST request.") - return render_template('_fsh_output.html', form=form, error=error, fsh_output=fsh_output, comparison_report=comparison_report) - else: - # For standard POST, re-render the full page with results/errors - logger.debug("Handling standard POST request, rendering full page.") - return render_template('fsh_converter.html', form=form, error=error, fsh_output=fsh_output, comparison_report=comparison_report, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - - # --- Handle GET request (Initial Page Load or Failed POST Validation) --- - else: - if request.method == 'POST': # POST but validation failed - logger.warning("POST request failed form validation.") - # Render the full page, WTForms errors will be displayed by render_field - return render_template('fsh_converter.html', form=form, error="Form validation failed. Please check fields.", fsh_output=None, comparison_report=None, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - else: - # This is the initial GET request - logger.debug("Handling GET request for FSH converter page.") - # **** FIX APPLIED HERE **** - # Make the response object to add headers - response = make_response(render_template( - 'fsh_converter.html', - form=form, # Pass the empty form - error=None, - fsh_output=None, - comparison_report=None, - site_name='FHIRFLARE IG Toolkit', - now=datetime.datetime.now() - )) - # Add headers to prevent caching - response.headers['Cache-Control'] = 'no-store, no-cache, must-revalidate, max-age=0' - response.headers['Pragma'] = 'no-cache' - response.headers['Expires'] = '0' - return response - # **** END OF FIX **** - -@app.route('/download-fsh') -def download_fsh(): - fsh_output = session.get('fsh_output') - if not fsh_output: - flash('No FSH output available for download.', 'error') - return redirect(url_for('fsh_converter')) - - temp_file = os.path.join(app.config['UPLOAD_FOLDER'], 'output.fsh') - with open(temp_file, 'w', encoding='utf-8') as f: - f.write(fsh_output) - - return send_file(temp_file, as_attachment=True, download_name='output.fsh') - -@app.route('/upload-test-data', methods=['GET']) -def upload_test_data(): - """Renders the page for uploading test data.""" - form = TestDataUploadForm() - try: - processed_igs = ProcessedIg.query.order_by(ProcessedIg.package_name, ProcessedIg.version).all() - form.validation_package_id.choices = [('', '-- Select Package for Validation --')] + [ - (f"{ig.package_name}#{ig.version}", f"{ig.package_name}#{ig.version}") for ig in processed_igs ] - except Exception as e: - logger.error(f"Error fetching processed IGs: {e}") - flash("Could not load processed packages for validation.", "warning") - form.validation_package_id.choices = [('', '-- Error Loading Packages --')] - api_key = current_app.config.get('API_KEY', '') - return render_template('upload_test_data.html', title="Upload Test Data", form=form, api_key=api_key) - - -# --- Updated /api/upload-test-data Endpoint --- -@app.route('/api/upload-test-data', methods=['POST']) -@csrf.exempt -@swag_from({ - 'tags': ['Test Data Management'], - 'summary': 'Upload and process FHIR test data.', - 'description': 'Handles multipart/form-data uploads of FHIR resources (JSON, XML, or ZIP containing these) for processing and uploading to a target FHIR server. Returns an NDJSON stream of progress.', - 'security': [{'ApiKeyAuth': []}], - 'consumes': ['multipart/form-data'], - 'produces': ['application/x-ndjson'], - 'parameters': [ - {'name': 'fhir_server_url', 'in': 'formData', 'type': 'string', 'required': True, 'format': 'url', 'description': 'Target FHIR server URL.'}, - {'name': 'auth_type', 'in': 'formData', 'type': 'string', 'enum': ['none', 'bearerToken', 'basic'], 'default': 'none'}, - {'name': 'auth_token', 'in': 'formData', 'type': 'string', 'description': 'Bearer token if auth_type is bearerToken.'}, - {'name': 'username', 'in': 'formData', 'type': 'string', 'description': 'Username if auth_type is basic.'}, - {'name': 'password', 'in': 'formData', 'type': 'string', 'format': 'password', 'description': 'Password if auth_type is basic.'}, - {'name': 'test_data_files', 'in': 'formData', 'type': 'file', 'required': True, 'description': 'One or more FHIR resource files (JSON, XML) or ZIP archives containing them.'}, - {'name': 'validate_before_upload', 'in': 'formData', 'type': 'boolean', 'default': False}, - {'name': 'validation_package_id', 'in': 'formData', 'type': 'string', 'description': 'Package ID (name#version) for validation, if validate_before_upload is true.'}, - {'name': 'upload_mode', 'in': 'formData', 'type': 'string', 'enum': ['individual', 'transaction'], 'default': 'individual'}, - {'name': 'use_conditional_uploads', 'in': 'formData', 'type': 'boolean', 'default': True, 'description': 'For individual mode, use conditional logic (GET then PUT/POST).'}, - {'name': 'error_handling', 'in': 'formData', 'type': 'string', 'enum': ['stop', 'continue'], 'default': 'stop'} - ], - 'responses': { - '200': {'description': 'NDJSON stream of upload progress and results.'}, - '400': {'description': 'Invalid request parameters or file types.'}, - '401': {'description': 'Authentication error.'}, - '413': {'description': 'Request entity too large.'}, - '500': {'description': 'Server error during upload processing.'} - } -}) -def api_upload_test_data(): - """API endpoint to handle test data upload and processing, using custom parser.""" - auth_error = check_api_key() - if auth_error: return auth_error - - temp_dir = None - try: - parser = CustomFormDataParser() - stream = request.stream - mimetype = request.mimetype - content_length = request.content_length - options = request.mimetype_params - _, form_data, files_data = parser.parse(stream, mimetype, content_length, options) - logger.debug(f"Form parsed using CustomFormDataParser. Form fields: {len(form_data)}, Files: {len(files_data)}") - - # --- Extract Form Data --- - fhir_server_url = form_data.get('fhir_server_url') - auth_type = form_data.get('auth_type', 'none') - auth_token = form_data.get('auth_token') - username = form_data.get('username') - password = form_data.get('password') - upload_mode = form_data.get('upload_mode', 'individual') - error_handling = form_data.get('error_handling', 'stop') - validate_before_upload_str = form_data.get('validate_before_upload', 'false') - validate_before_upload = validate_before_upload_str.lower() == 'true' - validation_package_id = form_data.get('validation_package_id') if validate_before_upload else None - use_conditional_uploads_str = form_data.get('use_conditional_uploads', 'false') - use_conditional_uploads = use_conditional_uploads_str.lower() == 'true' - - logger.debug(f"API Upload Request Params: validate={validate_before_upload}, pkg_id={validation_package_id}, conditional={use_conditional_uploads}") - - # --- Basic Validation --- - if not fhir_server_url or not fhir_server_url.startswith(('http://', 'https://')): - return jsonify({"status": "error", "message": "Invalid Target FHIR Server URL."}), 400 - if auth_type not in ['none', 'bearerToken', 'basic']: - return jsonify({"status": "error", "message": "Invalid Authentication Type."}), 400 - if auth_type == 'bearerToken' and not auth_token: - return jsonify({"status": "error", "message": "auth_token required for bearerToken."}), 400 - if auth_type == 'basic' and (not username or not password): - return jsonify({"status": "error", "message": "Username and Password required for Basic Authentication."}), 400 - if upload_mode not in ['individual', 'transaction']: - return jsonify({"status": "error", "message": "Invalid Upload Mode."}), 400 - if error_handling not in ['stop', 'continue']: - return jsonify({"status": "error", "message": "Invalid Error Handling mode."}), 400 - if validate_before_upload and not validation_package_id: - return jsonify({"status": "error", "message": "Validation Package ID required."}), 400 - - # --- Handle File Uploads --- - uploaded_files = files_data.getlist('test_data_files') - if not uploaded_files or all(f.filename == '' for f in uploaded_files): - return jsonify({"status": "error", "message": "No files selected."}), 400 - - temp_dir = tempfile.mkdtemp(prefix='fhirflare_upload_') - saved_file_paths = [] - allowed_extensions = {'.json', '.xml', '.zip'} - try: - for file_storage in uploaded_files: - if file_storage and file_storage.filename: - filename = secure_filename(file_storage.filename) - file_ext = os.path.splitext(filename)[1].lower() - if file_ext not in allowed_extensions: - raise ValueError(f"Invalid file type: '{filename}'. Only JSON, XML, ZIP allowed.") - save_path = os.path.join(temp_dir, filename) - file_storage.save(save_path) - saved_file_paths.append(save_path) - if not saved_file_paths: - raise ValueError("No valid files saved.") - logger.debug(f"Saved {len(saved_file_paths)} files to {temp_dir}") - except ValueError as ve: - if temp_dir and os.path.exists(temp_dir): - shutil.rmtree(temp_dir) - logger.warning(f"Upload rejected: {ve}") - return jsonify({"status": "error", "message": str(ve)}), 400 - except Exception as file_err: - if temp_dir and os.path.exists(temp_dir): - shutil.rmtree(temp_dir) - logger.error(f"Error saving uploaded files: {file_err}", exc_info=True) - return jsonify({"status": "error", "message": "Error saving uploaded files."}), 500 - - # --- Prepare Server Info and Options --- - server_info = {'url': fhir_server_url, 'auth_type': auth_type} - if auth_type == 'bearer': - server_info['auth_token'] = auth_token - elif auth_type == 'basic': - credentials = f"{username}:{password}" - encoded_credentials = base64.b64encode(credentials.encode('utf-8')).decode('utf-8') - server_info['auth_token'] = f"Basic {encoded_credentials}" - options = { - 'upload_mode': upload_mode, - 'error_handling': error_handling, - 'validate_before_upload': validate_before_upload, - 'validation_package_id': validation_package_id, - 'use_conditional_uploads': use_conditional_uploads - } - - # --- Call Service Function (Streaming Response) --- - def generate_stream_wrapper(): - try: - with app.app_context(): - yield from services.process_and_upload_test_data(server_info, options, temp_dir) - finally: - try: - logger.debug(f"Cleaning up temp dir: {temp_dir}") - shutil.rmtree(temp_dir) - except Exception as cleanup_e: - logger.error(f"Error cleaning up temp dir {temp_dir}: {cleanup_e}") - - return Response(generate_stream_wrapper(), mimetype='application/x-ndjson') - - except RequestEntityTooLarge as e: - logger.error(f"RequestEntityTooLarge error in /api/upload-test-data despite custom parser: {e}", exc_info=True) - if temp_dir and os.path.exists(temp_dir): - try: - shutil.rmtree(temp_dir) - except Exception as cleanup_e: - logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}") - return jsonify({"status": "error", "message": f"Upload failed: Request entity too large. Try increasing parser limit or reducing files/size. ({str(e)})"}), 413 - - except Exception as e: - logger.error(f"Error in /api/upload-test-data: {e}", exc_info=True) - if temp_dir and os.path.exists(temp_dir): - try: - shutil.rmtree(temp_dir) - except Exception as cleanup_e: - logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}") - return jsonify({"status": "error", "message": f"Unexpected server error: {str(e)}"}), 500 - -@app.route('/retrieve-split-data', methods=['GET', 'POST']) -def retrieve_split_data(): - form = RetrieveSplitDataForm() - if form.validate_on_submit(): - if form.submit_retrieve.data: - session['retrieve_params'] = { - 'fhir_server_url': form.fhir_server_url.data, - 'validate_references': form.validate_references.data, - 'resources': request.form.getlist('resources') - } - if form.bundle_zip.data: - # Save uploaded ZIP to temporary file - temp_dir = tempfile.gettempdir() - zip_path = os.path.join(temp_dir, 'uploaded_bundles.zip') - form.bundle_zip.data.save(zip_path) - session['retrieve_params']['bundle_zip_path'] = zip_path - flash('Bundle retrieval initiated. Download will start after processing.', 'info') - elif form.submit_split.data: - # Save uploaded ZIP to temporary file - temp_dir = tempfile.gettempdir() - zip_path = os.path.join(temp_dir, 'split_bundles.zip') - form.split_bundle_zip.data.save(zip_path) - session['split_params'] = {'split_bundle_zip_path': zip_path} - flash('Bundle splitting initiated. Download will start after processing.', 'info') - return render_template('retrieve_split_data.html', form=form, site_name='FHIRFLARE IG Toolkit', - now=datetime.datetime.now(), app_mode=app.config['APP_MODE'], - api_key=app.config['API_KEY']) - -@app.route('/api/retrieve-bundles', methods=['POST']) -@csrf.exempt -@swag_from({ - 'tags': ['Test Data Management'], - 'summary': 'Retrieve FHIR resource bundles from a server.', - 'description': 'Fetches bundles for specified resource types from a FHIR server. Optionally fetches referenced resources. Returns an NDJSON stream and prepares a ZIP file for download.', - 'security': [{'ApiKeyAuth': []}], - 'consumes': ['application/x-www-form-urlencoded'], # Or multipart/form-data if files are involved - 'produces': ['application/x-ndjson'], - 'parameters': [ - {'name': 'fhir_server_url', 'in': 'formData', 'type': 'string', 'required': False, 'format': 'url', 'description': 'Target FHIR server URL. Defaults to local proxy (/fhir).'}, - {'name': 'resources', 'in': 'formData', 'type': 'array', 'items': {'type': 'string'}, 'collectionFormat': 'multi', 'required': True, 'description': 'List of resource types to retrieve (e.g., Patient, Observation).'}, - {'name': 'validate_references', 'in': 'formData', 'type': 'boolean', 'default': False, 'description': 'Fetch resources referenced by the initial bundles.'}, - {'name': 'fetch_reference_bundles', 'in': 'formData', 'type': 'boolean', 'default': False, 'description': 'If fetching references, get full bundles for referenced types instead of individual resources.'}, - {'name': 'auth_type', 'in': 'formData', 'type': 'string', 'enum': ['none', 'bearer', 'basic'], 'default': 'none'}, - {'name': 'bearer_token', 'in': 'formData', 'type': 'string', 'description': 'Bearer token if auth_type is bearer.'}, - {'name': 'username', 'in': 'formData', 'type': 'string', 'description': 'Username if auth_type is basic.'}, - {'name': 'password', 'in': 'formData', 'type': 'string', 'format': 'password', 'description': 'Password if auth_type is basic.'} - ], - 'responses': { - '200': { - 'description': 'NDJSON stream of retrieval progress. X-Zip-Path header indicates path to the created ZIP file.', - 'headers': { - 'X-Zip-Path': {'type': 'string', 'description': 'Server path to the generated ZIP file.'} - } - }, - '400': {'description': 'Invalid request parameters.'}, - '401': {'description': 'Authentication error.'}, - '500': {'description': 'Server error during retrieval.'} - } -}) -def api_retrieve_bundles(): - auth_error = check_api_key() - if auth_error: - return auth_error - - # Use request.form for standard form data - params = request.form.to_dict() - resources = request.form.getlist('resources') - validate_references = params.get('validate_references', 'false').lower() == 'true' - fetch_reference_bundles = params.get('fetch_reference_bundles', 'false').lower() == 'true' - auth_type = params.get('auth_type', 'none') - bearer_token = params.get('bearer_token') - username = params.get('username') - password = params.get('password') - - # Get FHIR server URL, default to '/fhir' (local proxy) - fhir_server_url = params.get('fhir_server_url', '/fhir').strip() - if not fhir_server_url: - fhir_server_url = '/fhir' - - # Validation - if not resources: - return jsonify({"status": "error", "message": "No resources selected."}), 400 - valid_auth_types = ['none', 'bearer', 'basic'] - if auth_type not in valid_auth_types: - return jsonify({"status": "error", "message": f"Invalid auth_type. Must be one of {valid_auth_types}."}), 400 - if auth_type == 'bearer' and not bearer_token: - return jsonify({"status": "error", "message": "Bearer token required for bearer authentication."}), 400 - if auth_type == 'basic' and (not username or not password): - return jsonify({"status": "error", "message": "Username and password required for basic authentication."}), 400 - - # Handle authentication - auth_token = None - if auth_type == 'bearer': - auth_token = f"Bearer {bearer_token}" - elif auth_type == 'basic': - credentials = f"{username}:{password}" - auth_token = f"Basic {base64.b64encode(credentials.encode('utf-8')).decode('utf-8')}" - - logger.info(f"Retrieve API: Server='{fhir_server_url}', Resources={resources}, ValidateRefs={validate_references}, FetchRefBundles={fetch_reference_bundles}, AuthType={auth_type}") - - # Ensure the temp directory exists - temp_dir = tempfile.gettempdir() - zip_filename = f"retrieved_bundles_{datetime.datetime.now().strftime('%Y%m%d%H%M%S')}.zip" - output_zip = os.path.join(temp_dir, zip_filename) - - def generate(): - try: - yield from services.retrieve_bundles( - fhir_server_url=fhir_server_url, - resources=resources, - output_zip=output_zip, - validate_references=validate_references, - fetch_reference_bundles=fetch_reference_bundles, - auth_type=auth_type, - auth_token=auth_token - ) - except Exception as e: - logger.error(f"Error in retrieve_bundles: {e}", exc_info=True) - yield json.dumps({"type": "error", "message": f"Unexpected error: {str(e)}"}) + "\n" - - response = Response(generate(), mimetype='application/x-ndjson') - response.headers['X-Zip-Path'] = os.path.join('/tmp', zip_filename) - return response - -@app.route('/api/split-bundles', methods=['POST']) -@swag_from({ - 'tags': ['Test Data Management'], - 'summary': 'Split FHIR bundles from a ZIP into individual resources.', - 'description': 'Takes a ZIP file containing FHIR bundles, extracts individual resources, and creates a new ZIP file with these resources. Returns an NDJSON stream of progress.', - 'security': [{'ApiKeyAuth': []}], - 'consumes': ['multipart/form-data'], # Assuming split_bundle_zip_path comes from a form that might include a file upload in other contexts, or it's a path string. If it's always a path string from a JSON body, change consumes. - 'produces': ['application/x-ndjson'], - 'parameters': [ - # If split_bundle_zip_path is a path sent in form data: - {'name': 'split_bundle_zip_path', 'in': 'formData', 'type': 'string', 'required': True, 'description': 'Path to the input ZIP file containing bundles (server-side path).'}, - # If it's an uploaded file: - # {'name': 'split_bundle_zip_file', 'in': 'formData', 'type': 'file', 'required': True, 'description': 'ZIP file containing bundles to split.'} - ], - 'responses': { - '200': { - 'description': 'NDJSON stream of splitting progress. X-Zip-Path header indicates path to the output ZIP file.', - 'headers': { - 'X-Zip-Path': {'type': 'string', 'description': 'Server path to the generated ZIP file with split resources.'} - } - }, - '400': {'description': 'Invalid request (e.g., missing input ZIP path/file).'}, - '401': {'description': 'Authentication error.'}, - '500': {'description': 'Server error during splitting.'} - } -}) -def api_split_bundles(): - auth_error = check_api_key() - if auth_error: - return auth_error - params = request.form.to_dict() - input_zip_path = params.get('split_bundle_zip_path') - if not input_zip_path: - return jsonify({"status": "error", "message": "Missing input ZIP file."}), 400 - temp_dir = tempfile.gettempdir() - output_zip = os.path.join(temp_dir, 'split_resources.zip') - def generate(): - for message in split_bundles(input_zip_path, output_zip): - yield message - response = Response(generate(), mimetype='application/x-ndjson') - response.headers['X-Zip-Path'] = output_zip - return response - -@app.route('/tmp/', methods=['GET']) -def serve_zip(filename): - file_path = os.path.join('/tmp', filename) - if not os.path.exists(file_path): - logger.error(f"ZIP file not found: {file_path}") - return jsonify({'error': 'File not found'}), 404 - try: - return send_file(file_path, as_attachment=True, download_name=filename) - except Exception as e: - logger.error(f"Error serving ZIP file {file_path}: {str(e)}") - return jsonify({'error': 'Error serving file', 'details': str(e)}), 500 - -@app.route('/clear-session', methods=['POST']) -def clear_session(): - session.pop('retrieve_params', None) - session.pop('split_params', None) - return jsonify({"status": "success", "message": "Session cleared"}) - - -@app.route('/api/package/', methods=['GET']) -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Get details for a specific FHIR package.', - 'description': 'Retrieves details for a FHIR IG package by its name. Data is sourced from ProcessedIg, CachedPackage, or fetched live from registries.', - 'parameters': [ - {'name': 'name', 'in': 'path', 'type': 'string', 'required': True, 'description': 'The canonical name of the package (e.g., hl7.fhir.us.core).'} - ], - 'responses': { - '200': { - 'description': 'Package details.', - 'schema': { - 'type': 'object', - 'properties': { - 'name': {'type': 'string'}, - 'latest': {'type': 'string', 'description': 'Latest known version.'}, - 'author': {'type': 'string'}, - 'fhir_version': {'type': 'string'}, - 'version_count': {'type': 'integer'}, - 'url': {'type': 'string', 'format': 'url'} - } - } - }, - '404': {'description': 'Package not found.'} - } -}) -def package_details(name): - """ - Retrieve details for a specific FHIR Implementation Guide package by name. - Fetches from ProcessedIg or CachedPackage if not found in the database. - - Args: - name (str): The name of the package (e.g., 'hl7.fhir.us.core'). - - Returns: - JSON with package details (name, latest version, author, FHIR version, version count, URL) - or a 404 error if the package is not found. - """ - from services import fetch_packages_from_registries, normalize_package_data - - # Check ProcessedIg first (processed IGs) - package = ProcessedIg.query.filter_by(package_name=name).first() - if package: - return jsonify({ - 'name': package.package_name, - 'latest': package.version, - 'author': package.author, - 'fhir_version': package.fhir_version, - 'version_count': package.version_count, - 'url': package.url - }) - - # Check CachedPackage (cached packages) - package = CachedPackage.query.filter_by(package_name=name).first() - if package: - return jsonify({ - 'name': package.package_name, - 'latest': package.version, - 'author': package.author, - 'fhir_version': package.fhir_version, - 'version_count': package.version_count, - 'url': package.url - }) - - # Fetch from registries if not in database - logger.info(f"Package {name} not found in database. Fetching from registries.") - raw_packages = fetch_packages_from_registries(search_term=name) - normalized_packages = normalize_package_data(raw_packages) - package = next((pkg for pkg in normalized_packages if pkg['name'].lower() == name.lower()), None) - - if not package: - return jsonify({'error': 'Package not found'}), 404 - - return jsonify({ - 'name': package['name'], - 'latest': package['version'], - 'author': package['author'], - 'fhir_version': package['fhir_version'], - 'version_count': package['version_count'], - 'url': package['url'] - }) - -@app.route('/search-and-import') -def search_and_import(): - """ - Render the Search and Import page. Uses the database (CachedPackage) to load the package cache if available. - If not available, fetches from registries and caches the result. Displays latest official version if available, - otherwise falls back to latest absolute version. Shows fire animation and logs during cache loading. - """ - logger.debug("--- Entering search_and_import route (DB Cache Logic) ---") - page = request.args.get('page', 1, type=int) - per_page = 50 - - in_memory_packages = app.config.get('MANUAL_PACKAGE_CACHE') - in_memory_timestamp = app.config.get('MANUAL_CACHE_TIMESTAMP') - db_timestamp_info = RegistryCacheInfo.query.first() - db_timestamp = db_timestamp_info.last_fetch_timestamp if db_timestamp_info else None - logger.debug(f"DB Timestamp: {db_timestamp}, In-Memory Timestamp: {in_memory_timestamp}") - - normalized_packages = None - fetch_failed_flag = False - display_timestamp = None - is_fetching = False - - # Check if a fetch is in progress (stored in session) - fetch_in_progress = session.get('fetch_in_progress', False) - - if fetch_in_progress and in_memory_packages is not None: - # Fetch has completed, clear the session flag and proceed - session['fetch_in_progress'] = False - logger.info("Fetch completed, clearing fetch_in_progress flag.") - normalized_packages = in_memory_packages - display_timestamp = in_memory_timestamp - fetch_failed_flag = session.get('fetch_failed', False) - elif in_memory_packages is not None: - logger.info(f"Using in-memory cached package list from {in_memory_timestamp}.") - normalized_packages = in_memory_packages - display_timestamp = in_memory_timestamp - fetch_failed_flag = session.get('fetch_failed', False) - else: - # Check if there are cached packages in the database - try: - cached_packages = CachedPackage.query.all() - if cached_packages: - logger.info(f"Loading {len(cached_packages)} packages from CachedPackage table.") - # Reconstruct the normalized package format from the database entries - normalized_packages = [] - packages_by_name = {} - for pkg in cached_packages: - # Use getattr to provide defaults for potentially missing fields - pkg_data = { - 'name': pkg.package_name, - 'version': pkg.version, - 'latest_absolute_version': getattr(pkg, 'latest_absolute_version', pkg.version), - 'latest_official_version': getattr(pkg, 'latest_official_version', None), - 'author': getattr(pkg, 'author', ''), - 'fhir_version': getattr(pkg, 'fhir_version', ''), - 'url': getattr(pkg, 'url', ''), - 'canonical': getattr(pkg, 'canonical', ''), - 'dependencies': getattr(pkg, 'dependencies', []) or [], - 'version_count': getattr(pkg, 'version_count', 1), - 'all_versions': getattr(pkg, 'all_versions', [{'version': pkg.version, 'pubDate': ''}]) or [], - 'versions_data': [], - 'registry': getattr(pkg, 'registry', '') - } - # Group by package name to handle version aggregation - if pkg_data['name'] not in packages_by_name: - packages_by_name[pkg_data['name']] = pkg_data - normalized_packages.append(pkg_data) - else: - # Update all_versions for the existing package - existing_pkg = packages_by_name[pkg_data['name']] - if pkg_data['all_versions']: - existing_pkg['all_versions'].extend(pkg_data['all_versions']) - # Update version_count - existing_pkg['version_count'] = len(existing_pkg['all_versions']) - - # Sort all_versions within each package - for pkg in normalized_packages: - pkg['all_versions'].sort(key=lambda x: safe_parse_version(x.get('version', '0.0.0a0')), reverse=True) - - app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages - app.config['MANUAL_CACHE_TIMESTAMP'] = db_timestamp or datetime.datetime.now(datetime.timezone.utc) - display_timestamp = app.config['MANUAL_CACHE_TIMESTAMP'] - fetch_failed_flag = session.get('fetch_failed', False) - logger.info(f"Loaded {len(normalized_packages)} packages into in-memory cache from database.") - else: - logger.info("No packages found in CachedPackage table. Fetching from registries...") - is_fetching = True - except Exception as db_err: - logger.error(f"Error loading packages from CachedPackage table: {db_err}", exc_info=True) - flash("Error loading package cache from database. Fetching from registries...", "warning") - is_fetching = True - - # If no packages were loaded from the database, fetch from registries - if normalized_packages is None: - logger.info("Fetching package list from registries...") - try: - # Clear the log queue to capture fetch logs - while not log_queue.empty(): - log_queue.get() - - # Set session flag to indicate fetch is in progress - session['fetch_in_progress'] = True - - raw_packages = fetch_packages_from_registries(search_term='') - logger.debug(f"fetch_packages_from_registries returned {len(raw_packages)} raw packages.") - if not raw_packages: - logger.warning("No packages returned from registries during refresh.") - normalized_packages = [] - fetch_failed_flag = True - session['fetch_failed'] = True - app.config['MANUAL_PACKAGE_CACHE'] = [] - app.config['MANUAL_CACHE_TIMESTAMP'] = None - display_timestamp = db_timestamp - else: - logger.debug("Normalizing fetched packages...") - normalized_packages = normalize_package_data(raw_packages) - logger.debug(f"Normalization resulted in {len(normalized_packages)} unique packages.") - now_ts = datetime.datetime.now(datetime.timezone.utc) - app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages - app.config['MANUAL_CACHE_TIMESTAMP'] = now_ts - app_state['fetch_failed'] = False - logger.info(f"Stored {len(normalized_packages)} packages in manual cache (memory).") - - # Save to CachedPackage table - try: - cache_packages(normalized_packages, db, CachedPackage) - except Exception as cache_err: - logger.error(f"Failed to cache packages in database: {cache_err}", exc_info=True) - flash("Error saving package cache to database.", "warning") - - if db_timestamp_info: - db_timestamp_info.last_fetch_timestamp = now_ts - else: - db_timestamp_info = RegistryCacheInfo(last_fetch_timestamp=now_ts) - db.session.add(db_timestamp_info) - try: - db.session.commit() - logger.info(f"Updated DB timestamp to {now_ts}") - except Exception as db_err: - db.session.rollback() - logger.error(f"Failed to update DB timestamp: {db_err}", exc_info=True) - flash("Failed to save cache timestamp to database.", "warning") - session['fetch_failed'] = False - fetch_failed_flag = False - display_timestamp = now_ts - - # Do not redirect here; let the template render with is_fetching=True - except Exception as fetch_err: - logger.error(f"Error during package fetch/normalization: {fetch_err}", exc_info=True) - normalized_packages = [] - fetch_failed_flag = True - session['fetch_failed'] = True - app.config['MANUAL_PACKAGE_CACHE'] = [] - app.config['MANUAL_CACHE_TIMESTAMP'] = None - display_timestamp = db_timestamp - flash("Error fetching package list from registries.", "error") - - if not isinstance(normalized_packages, list): - logger.error(f"normalized_packages is not a list (type: {type(normalized_packages)}). Using empty list.") - normalized_packages = [] - fetch_failed_flag = True - session['fetch_failed'] = True - display_timestamp = None - - total_packages = len(normalized_packages) if normalized_packages else 0 - start = (page - 1) * per_page - end = start + per_page - packages_processed_for_page = [] - if normalized_packages: - for pkg_data in normalized_packages: - # Fall back to latest_absolute_version if latest_official_version is None - display_version = pkg_data.get('latest_official_version') or pkg_data.get('latest_absolute_version') or 'N/A' - pkg_data['display_version'] = display_version - packages_processed_for_page.append(pkg_data) - - packages_on_page = packages_processed_for_page[start:end] - total_pages_calc = max(1, (total_packages + per_page - 1) // per_page) - - def iter_pages(left_edge=1, left_current=1, right_current=2, right_edge=1): - pages = [] - last_page = 0 - for i in range(1, min(left_edge + 1, total_pages_calc + 1)): - pages.append(i) - last_page = i - if last_page < page - left_current - 1: - pages.append(None) - for i in range(max(last_page + 1, page - left_current), min(page + right_current + 1, total_pages_calc + 1)): - pages.append(i) - last_page = i - if last_page < total_pages_calc - right_edge: - pages.append(None) - for i in range(max(last_page + 1, total_pages_calc - right_edge + 1), total_pages_calc + 1): - pages.append(i) - return pages - - pagination = SimpleNamespace( - items=packages_on_page, - page=page, - pages=total_pages_calc, - total=total_packages, - per_page=per_page, - has_prev=(page > 1), - has_next=(page < total_pages_calc), - prev_num=(page - 1 if page > 1 else None), - next_num=(page + 1 if page < total_pages_calc else None), - iter_pages=iter_pages() - ) - - form = IgImportForm() - logger.debug(f"--- Rendering search_and_import template (Page: {page}, Total: {total_packages}, Failed Fetch: {fetch_failed_flag}, Display TS: {display_timestamp}) ---") - - return render_template('search_and_import_ig.html', - packages=packages_on_page, - pagination=pagination, - form=form, - fetch_failed=fetch_failed_flag, - last_cached_timestamp=display_timestamp, - is_fetching=is_fetching) - -@app.route('/api/search-packages', methods=['GET'], endpoint='api_search_packages') -@swag_from({ - 'tags': ['Package Management'], - 'summary': 'Search FHIR packages (HTMX).', - 'description': 'Searches the in-memory package cache. Returns an HTML fragment for HTMX to display matching packages. Primarily for UI interaction.', - 'parameters': [ - {'name': 'search', 'in': 'query', 'type': 'string', 'required': False, 'description': 'Search term for package name or author.'}, - {'name': 'page', 'in': 'query', 'type': 'integer', 'required': False, 'default': 1} - ], - 'produces': ['text/html'], - 'responses': { - '200': {'description': 'HTML fragment containing the search results table.'} - } -}) -def api_search_packages(): - """ - Handles HTMX search requests. Filters packages from the in-memory cache. - Returns an HTML fragment (_search_results_table.html) displaying the - latest official version if available, otherwise falls back to latest absolute version. - """ - search_term = request.args.get('search', '').lower() - page = request.args.get('page', 1, type=int) - per_page = 50 - logger.debug(f"API search request: term='{search_term}', page={page}") - - all_cached_packages = app.config.get('MANUAL_PACKAGE_CACHE') - if all_cached_packages is None: - logger.warning("API search called but in-memory cache is empty. Returning no results.") - return render_template('_search_results_table.html', packages=[], pagination=None) - - if search_term: - filtered_packages_raw = [ - pkg for pkg in all_cached_packages - if isinstance(pkg, dict) and ( - search_term in pkg.get('name', '').lower() or - search_term in pkg.get('author', '').lower() - ) - ] - logger.debug(f"Filtered {len(all_cached_packages)} cached packages down to {len(filtered_packages_raw)} for term '{search_term}'") - else: - filtered_packages_raw = all_cached_packages - logger.debug(f"No search term provided, using all {len(filtered_packages_raw)} cached packages.") - - filtered_packages_processed = [] - for pkg_data in filtered_packages_raw: - # Fall back to latest_absolute_version if latest_official_version is None - display_version = pkg_data.get('latest_official_version') or pkg_data.get('latest_absolute_version') or 'N/A' - pkg_data['display_version'] = display_version - filtered_packages_processed.append(pkg_data) - - total_filtered = len(filtered_packages_processed) - start = (page - 1) * per_page - end = start + per_page - packages_on_page = filtered_packages_processed[start:end] - total_pages_calc = max(1, (total_filtered + per_page - 1) // per_page) - - def iter_pages(left_edge=1, left_current=1, right_current=2, right_edge=1): - pages = [] - last_page = 0 - for i in range(1, min(left_edge + 1, total_pages_calc + 1)): - pages.append(i) - last_page = i - if last_page < page - left_current - 1: - pages.append(None) - for i in range(max(last_page + 1, page - left_current), min(page + right_current + 1, total_pages_calc + 1)): - pages.append(i) - last_page = i - if last_page < total_pages_calc - right_edge: - pages.append(None) - for i in range(max(last_page + 1, total_pages_calc - right_edge + 1), total_pages_calc + 1): - pages.append(i) - return pages - - pagination = SimpleNamespace( - items=packages_on_page, - page=page, - pages=total_pages_calc, - total=total_filtered, - per_page=per_page, - has_prev=(page > 1), - has_next=(page < total_pages_calc), - prev_num=(page - 1 if page > 1 else None), - next_num=(page + 1 if page < total_pages_calc else None), - iter_pages=iter_pages() - ) - - logger.debug(f"Rendering _search_results_table.html for API response (found {len(packages_on_page)} packages for page {page})") - html_response = render_template('_search_results_table.html', - packages=packages_on_page, - pagination=pagination) - return html_response - -def safe_parse_version_local(v_str): # Use different name - """ - Local copy of safe version parser for package_details_view. - """ - if not v_str or not isinstance(v_str, str): - return pkg_version_local.parse("0.0.0a0") - try: - return pkg_version_local.parse(v_str) - except pkg_version_local.InvalidVersion: - original_v_str = v_str - v_str_norm = v_str.lower() - base_part = v_str_norm.split('-', 1)[0] if '-' in v_str_norm else v_str_norm - suffix = v_str_norm.split('-', 1)[1] if '-' in v_str_norm else None - if re.match(r'^\d+(\.\d+)*$', base_part): - try: - if suffix in ['dev', 'snapshot', 'ci-build']: return pkg_version_local.parse(f"{base_part}a0") - elif suffix in ['draft', 'ballot', 'preview']: return pkg_version_local.parse(f"{base_part}b0") - elif suffix and suffix.startswith('rc'): return pkg_version_local.parse(f"{base_part}rc{ ''.join(filter(str.isdigit, suffix)) or '0'}") - return pkg_version_local.parse(base_part) - except pkg_version_local.InvalidVersion: - logger_details.warning(f"[DetailsView] Invalid base version '{base_part}' after splitting '{original_v_str}'. Treating as alpha.") - return pkg_version_local.parse("0.0.0a0") - except Exception as e: - logger_details.error(f"[DetailsView] Unexpected error parsing FHIR-suffixed version '{original_v_str}': {e}") - return pkg_version_local.parse("0.0.0a0") - else: - logger_details.warning(f"[DetailsView] Unparseable version '{original_v_str}' (base '{base_part}' not standard). Treating as alpha.") - return pkg_version_local.parse("0.0.0a0") - except Exception as e: - logger_details.error(f"[DetailsView] Unexpected error in safe_parse_version_local for '{v_str}': {e}") - return pkg_version_local.parse("0.0.0a0") -# --- End Local Helper Definition --- - -@app.route('/package-details/') -def package_details_view(name): - """Renders package details, using cache/db/fetch.""" - from services import get_package_description - packages = None - source = "Not Found" - - def safe_parse_version_local(v_str): - """ - Local version parser to handle FHIR package versions. - Uses pkg_version from services or falls back to basic comparison. - """ - if not v_str or not isinstance(v_str, str): - logger.warning(f"Invalid version string: {v_str}. Treating as 0.0.0a0.") - return pkg_version.parse("0.0.0a0") - try: - return pkg_version.parse(v_str) - except pkg_version.InvalidVersion: - original_v_str = v_str - v_str_norm = v_str.lower() - base_part = v_str_norm.split('-', 1)[0] if '-' in v_str_norm else v_str_norm - suffix = v_str_norm.split('-', 1)[1] if '-' in v_str_norm else None - if re.match(r'^\d+(\.\d+)+$', base_part): - try: - if suffix in ['dev', 'snapshot', 'ci-build']: - return pkg_version.parse(f"{base_part}a0") - elif suffix in ['draft', 'ballot', 'preview']: - return pkg_version.parse(f"{base_part}b0") - elif suffix and suffix.startswith('rc'): - rc_num = ''.join(filter(str.isdigit, suffix)) or '0' - return pkg_version.parse(f"{base_part}rc{rc_num}") - return pkg_version.parse(base_part) - except pkg_version.InvalidVersion: - logger.warning(f"Invalid base version '{base_part}' after splitting '{original_v_str}'. Treating as alpha.") - return pkg_version.parse("0.0.0a0") - except Exception as e: - logger.error(f"Unexpected error parsing FHIR-suffixed version '{original_v_str}': {e}") - return pkg_version.parse("0.0.0a0") - else: - logger.warning(f"Unparseable version '{original_v_str}' (base '{base_part}' not standard). Treating as alpha.") - return pkg_version.parse("0.0.0a0") - except Exception as e: - logger.error(f"Unexpected error in safe_parse_version_local for '{v_str}': {e}") - return pkg_version.parse("0.0.0a0") - - in_memory_cache = app.config.get('MANUAL_PACKAGE_CACHE') - if in_memory_cache: - cached_data = [pkg for pkg in in_memory_cache if isinstance(pkg, dict) and pkg.get('name', '').lower() == name.lower()] - if cached_data: - packages = cached_data - source = "In-Memory Cache" - logger.debug(f"Package '{name}' found in in-memory cache.") - - if packages is None: - logger.debug(f"Package '{name}' not in memory cache. Checking database.") - try: - db_packages = CachedPackage.query.filter(CachedPackage.package_name.ilike(name)).all() - if db_packages: - packages = db_packages - source = "Database (CachedPackage)" - logger.debug(f"Package '{name}' found in CachedPackage DB.") - except Exception as db_err: - logger.error(f"Database error querying package '{name}': {db_err}", exc_info=True) - - if packages is None: - logger.info(f"Package '{name}' not found in cache or DB. Fetching from registries.") - source = "Fetched from Registries" - try: - raw_packages = fetch_packages_from_registries(search_term=name) - normalized_packages = normalize_package_data(raw_packages) - packages = [pkg for pkg in normalized_packages if pkg.get('name', '').lower() == name.lower()] - if not packages: - logger.warning(f"Fetch/Normalization for '{name}' resulted in zero packages.") - else: - logger.debug(f"Fetch/Normalization successful for '{name}'. Found {len(packages)} versions.") - except Exception as fetch_err: - logger.error(f"Error fetching/normalizing from registries for '{name}': {fetch_err}", exc_info=True) - flash(f"Error fetching package details for {name} from registries.", "error") - return redirect(url_for('search_and_import')) - - if not packages: - logger.warning(f"Package '{name}' could not be found from any source ({source}).") - flash(f"Package {name} not found.", "error") - return redirect(url_for('search_and_import')) - - is_dict_list = bool(isinstance(packages[0], dict)) - latest_absolute_version_str = None - latest_official_version_str = None - latest_absolute_data = None - all_versions = [] - dependencies = [] - - try: - if is_dict_list: - package = packages[0] - latest_absolute_version_str = package.get('latest_absolute_version') - latest_official_version_str = package.get('latest_official_version') - latest_absolute_data = package - all_versions = package.get('all_versions', []) - dependencies = package.get('dependencies', []) - else: - package = packages[0] - latest_absolute_version_str = getattr(package, 'version', None) - latest_official_version_str = getattr(package, 'latest_official_version', None) - latest_absolute_data = package - all_versions = getattr(package, 'all_versions', []) - dependencies = getattr(package, 'dependencies', []) - - if not all_versions: - logger.error(f"No versions found for package '{name}'. Package data: {package}") - flash(f"No versions found for package {name}.", "error") - return redirect(url_for('search_and_import')) - - except Exception as e: - logger.error(f"Error processing versions for {name}: {e}", exc_info=True) - flash(f"Error determining latest versions for {name}.", "error") - return redirect(url_for('search_and_import')) - - if not latest_absolute_data or not latest_absolute_version_str: - logger.error(f"Failed to determine latest version for '{name}'. Latest data: {latest_absolute_data}, Version: {latest_absolute_version_str}") - flash(f"Could not determine latest version details for {name}.", "error") - return redirect(url_for('search_and_import')) - - actual_package_name = None - package_json = {} - if isinstance(latest_absolute_data, dict): - actual_package_name = latest_absolute_data.get('name', name) - package_json = { - 'name': actual_package_name, - 'version': latest_absolute_version_str, - 'author': latest_absolute_data.get('author'), - 'fhir_version': latest_absolute_data.get('fhir_version'), - 'canonical': latest_absolute_data.get('canonical', ''), - 'dependencies': latest_absolute_data.get('dependencies', []), - 'url': latest_absolute_data.get('url'), - 'registry': latest_absolute_data.get('registry', 'https://packages.simplifier.net'), - 'description': get_package_description(actual_package_name, latest_absolute_version_str, app.config['FHIR_PACKAGES_DIR']) - } - else: - actual_package_name = getattr(latest_absolute_data, 'package_name', getattr(latest_absolute_data, 'name', name)) - package_json = { - 'name': actual_package_name, - 'version': latest_absolute_version_str, - 'author': getattr(latest_absolute_data, 'author', None), - 'fhir_version': getattr(latest_absolute_data, 'fhir_version', None), - 'canonical': getattr(latest_absolute_data, 'canonical', ''), - 'dependencies': getattr(latest_absolute_data, 'dependencies', []), - 'url': getattr(latest_absolute_data, 'url', None), - 'registry': getattr(latest_absolute_data, 'registry', 'https://packages.simplifier.net'), - 'description': get_package_description(actual_package_name, latest_absolute_version_str, app.config['FHIR_PACKAGES_DIR']) - } - - # Since all_versions now contains dictionaries with version and pubDate, extract just the version for display - versions_sorted = [] - try: - versions_sorted = sorted(all_versions, key=lambda x: safe_parse_version_local(x['version']), reverse=True) - except Exception as sort_err: - logger.warning(f"Version sorting failed for {name}: {sort_err}. Using basic reverse sort.") - versions_sorted = sorted(all_versions, key=lambda x: x['pubDate'], reverse=True) - - logger.info(f"Rendering details for package '{package_json.get('name')}' (Source: {source}). Latest: {latest_absolute_version_str}, Official: {latest_official_version_str}") - return render_template('package_details.html', - package_json=package_json, - dependencies=dependencies, - versions=[v['version'] for v in versions_sorted], - package_name=actual_package_name, - latest_official_version=latest_official_version_str) - - - -@app.route('/favicon.ico') -def favicon(): - return send_file(os.path.join(app.static_folder, 'favicon.ico'), mimetype='image/x-icon') - - -if __name__ == '__main__': - with app.app_context(): - logger.debug(f"Instance path configuration: {app.instance_path}") - logger.debug(f"Database URI: {app.config['SQLALCHEMY_DATABASE_URI']}") - logger.debug(f"Packages path: {app.config['FHIR_PACKAGES_DIR']}") - logger.debug(f"Flask instance folder path: {app.instance_path}") - logger.debug(f"Directories created/verified: Instance: {app.instance_path}, Packages: {app.config['FHIR_PACKAGES_DIR']}") - logger.debug(f"Attempting to create database tables for URI: {app.config['SQLALCHEMY_DATABASE_URI']}") - db.create_all() - logger.info("Database tables created successfully (if they didn't exist).") - app.run(host='0.0.0.0', port=5000, debug=False) diff --git a/assets/css/style.css b/assets/css/style.css new file mode 100644 index 0000000..ab5049f --- /dev/null +++ b/assets/css/style.css @@ -0,0 +1,137 @@ +/* generated by rouge http://rouge.jneen.net/ original base16 by Chris Kempson (https://github.com/chriskempson/base16) +*/ +@import url("https://fonts.googleapis.com/css?family=Lato:300italic,700italic,300,700"); +.highlight table td { padding: 5px; } + +.highlight table pre { margin: 0; } + +.highlight, .highlight .w { color: #d0d0d0; } + +.highlight .err { color: #151515; background-color: #ac4142; } + +.highlight .c, .highlight .cd, .highlight .cm, .highlight .c1, .highlight .cs { color: #888; } + +.highlight .cp { color: #f4bf75; } + +.highlight .nt { color: #f4bf75; } + +.highlight .o, .highlight .ow { color: #d0d0d0; } + +.highlight .p, .highlight .pi { color: #d0d0d0; } + +.highlight .gi { color: #90a959; } + +.highlight .gd { color: #ac4142; } + +.highlight .gh { color: #6a9fb5; font-weight: bold; } + +.highlight .k, .highlight .kn, .highlight .kp, .highlight .kr, .highlight .kv { color: #aa759f; } + +.highlight .kc { color: #d28445; } + +.highlight .kt { color: #d28445; } + +.highlight .kd { color: #d28445; } + +.highlight .s, .highlight .sb, .highlight .sc, .highlight .sd, .highlight .s2, .highlight .sh, .highlight .sx, .highlight .s1 { color: #90a959; } + +.highlight .sr { color: #75b5aa; } + +.highlight .si { color: #8f5536; } + +.highlight .se { color: #8f5536; } + +.highlight .nn { color: #f4bf75; } + +.highlight .nc { color: #f4bf75; } + +.highlight .no { color: #f4bf75; } + +.highlight .na { color: #6a9fb5; } + +.highlight .m, .highlight .mf, .highlight .mh, .highlight .mi, .highlight .il, .highlight .mo, .highlight .mb, .highlight .mx { color: #90a959; } + +.highlight .ss { color: #90a959; } + +html { background: #6C7989; background: #6C7989 linear-gradient(#6C7989, #434B55) fixed; height: 100%; } + +body { padding: 50px 0; margin: 0; font: 14px/1.5 Lato, "Helvetica Neue", Helvetica, Arial, sans-serif; color: #555; font-weight: 300; background: url("../images/checker.png") fixed; min-height: calc(100% - 100px); } + +.wrapper { width: 740px; margin: 0 auto; background: #DEDEDE; border-radius: 8px; box-shadow: rgba(0, 0, 0, 0.2) 0 0 0 1px, rgba(0, 0, 0, 0.45) 0 3px 10px; } + +header, section, footer { display: block; } + +a { color: #069; text-decoration: none; } + +p { margin: 0 0 20px; padding: 0; } + +strong { color: #222; font-weight: 700; } + +header { border-radius: 8px 8px 0 0; background: #C6EAFA; background: linear-gradient(#DDFBFC, #C6EAFA); position: relative; padding: 15px 20px; border-bottom: 1px solid #B2D2E1; } +header h1 { margin: 0; padding: 0; font-size: 24px; line-height: 1.2; color: #069; text-shadow: rgba(255, 255, 255, 0.9) 0 1px 0; } +header.without-description h1 { margin: 10px 0; } +header p { margin: 0; color: #61778B; width: 300px; font-size: 13px; } +header p.view { display: none; font-weight: 700; text-shadow: rgba(255, 255, 255, 0.9) 0 1px 0; -webkit-font-smoothing: antialiased; } +header p.view a { color: #06c; } +header p.view small { font-weight: 400; } +header ul { margin: 0; padding: 0; list-style: none; position: absolute; z-index: 1; right: 20px; top: 20px; height: 38px; padding: 1px 0; background: #5198DF; background: linear-gradient(#77B9FB, #3782CD); border-radius: 5px; box-shadow: inset rgba(255, 255, 255, 0.45) 0 1px 0, inset rgba(0, 0, 0, 0.2) 0 -1px 0; width: auto; } +header ul:before { content: ''; position: absolute; z-index: -1; left: -5px; top: -4px; right: -5px; bottom: -6px; background: rgba(0, 0, 0, 0.1); border-radius: 8px; box-shadow: rgba(0, 0, 0, 0.2) 0 -1px 0, inset rgba(255, 255, 255, 0.7) 0 -1px 0; } +header ul li { width: 79px; float: left; border-right: 1px solid #3A7CBE; height: 38px; } +header ul li.single { border: none; } +header ul li + li { width: 78px; border-left: 1px solid #8BBEF3; } +header ul li + li + li { border-right: none; width: 79px; } +header ul a { line-height: 1; font-size: 11px; color: #fff; color: rgba(255, 255, 255, 0.8); display: block; text-align: center; font-weight: 400; padding-top: 6px; height: 40px; text-shadow: rgba(0, 0, 0, 0.4) 0 -1px 0; } +header ul a strong { font-size: 14px; display: block; color: #fff; -webkit-font-smoothing: antialiased; } + +section { padding: 15px 20px; font-size: 15px; border-top: 1px solid #fff; background: linear-gradient(#fafafa, #DEDEDE 700px); border-radius: 0 0 8px 8px; position: relative; } + +h1, h2, h3, h4, h5, h6 { color: #222; padding: 0; margin: 0 0 20px; line-height: 1.2; } + +p, ul, ol, table, pre, dl { margin: 0 0 20px; } + +h1, h2, h3 { line-height: 1.1; } + +h1 { font-size: 28px; } + +h2 { color: #393939; } + +h3, h4, h5, h6 { color: #494949; } + +blockquote { margin: 0 -20px 20px; padding: 15px 20px 1px 40px; font-style: italic; background: #ccc; background: rgba(0, 0, 0, 0.06); color: #222; } + +img { max-width: 100%; } + +code, pre { font-family: Monaco, Bitstream Vera Sans Mono, Lucida Console, Terminal; color: #333; font-size: 12px; overflow-x: auto; } + +pre { padding: 20px; background: #3A3C42; color: #f8f8f2; margin: 0 -20px 20px; } +pre code { color: #f8f8f2; } +li pre { margin-left: -60px; padding-left: 60px; } + +table { width: 100%; border-collapse: collapse; } + +th, td { text-align: left; padding: 5px 10px; border-bottom: 1px solid #aaa; } + +dt { color: #222; font-weight: 700; } + +th { color: #222; } + +small { font-size: 11px; } + +hr { border: 0; background: #aaa; height: 1px; margin: 0 0 20px; } + +kbd { background-color: #fafbfc; border: 1px solid #c6cbd1; border-bottom-color: #959da5; border-radius: 3px; box-shadow: inset 0 -1px 0 #959da5; color: #444d56; display: inline-block; font-size: 11px; line-height: 10px; padding: 3px 5px; vertical-align: middle; } + +footer { width: 640px; margin: 0 auto; padding: 20px 0 0; color: #ccc; overflow: hidden; } +footer a { color: #fff; font-weight: bold; } +footer p { float: left; } +footer p + p { float: right; } + +@media print, screen and (max-width: 740px) { body { padding: 0; } + .wrapper { border-radius: 0; box-shadow: none; width: 100%; } + footer { border-radius: 0; padding: 20px; width: auto; } + footer p { float: none; margin: 0; } + footer p + p { float: none; } } +@media print, screen and (max-width: 580px) { header ul { display: none; } + header p.view { display: block; } + header p { width: 100%; } } +@media print { header p.view a small:before { content: 'at https://github.com/'; } } diff --git a/assets/images/checker.png b/assets/images/checker.png new file mode 100644 index 0000000..7a65b23 Binary files /dev/null and b/assets/images/checker.png differ diff --git a/assets/js/scale.fix.js b/assets/js/scale.fix.js new file mode 100644 index 0000000..08716c0 --- /dev/null +++ b/assets/js/scale.fix.js @@ -0,0 +1,20 @@ +fixScale = function(doc) { + + var addEvent = 'addEventListener', + type = 'gesturestart', + qsa = 'querySelectorAll', + scales = [1, 1], + meta = qsa in doc ? doc[qsa]('meta[name=viewport]') : []; + + function fix() { + meta.content = 'width=device-width,minimum-scale=' + scales[0] + ',maximum-scale=' + scales[1]; + doc.removeEventListener(type, fix, true); + } + + if ((meta = meta[meta.length - 1]) && addEvent in doc) { + fix(); + scales = [.25, 1.6]; + doc[addEvent](type, fix, true); + } + +}; \ No newline at end of file diff --git a/charts/.gitignore b/charts/.gitignore deleted file mode 100644 index 7368961..0000000 --- a/charts/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/hapi-fhir-jpaserver-0.20.0.tgz diff --git a/charts/fhirflare-ig-toolkit/.gitignore b/charts/fhirflare-ig-toolkit/.gitignore deleted file mode 100644 index 10d10c5..0000000 --- a/charts/fhirflare-ig-toolkit/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/rendered/ diff --git a/charts/fhirflare-ig-toolkit/Chart.yaml b/charts/fhirflare-ig-toolkit/Chart.yaml deleted file mode 100644 index 57c9cc2..0000000 --- a/charts/fhirflare-ig-toolkit/Chart.yaml +++ /dev/null @@ -1,16 +0,0 @@ -apiVersion: v2 -name: fhirflare-ig-toolkit -version: 0.4.0 -description: Helm chart for deploying the fhirflare-ig-toolkit application -type: application -appVersion: "latest" -icon: https://github.com/jgsuess/FHIRFLARE-IG-Toolkit/raw/main/static/FHIRFLARE.png -keywords: - - fhir - - healthcare - - ig-toolkit - - implementation-guide -home: https://github.com/jgsuess/FHIRFLARE-IG-Toolkit -maintainers: - - name: Jörn Guy Süß - email: jgsuess@gmail.com \ No newline at end of file diff --git a/charts/fhirflare-ig-toolkit/templates/_helpers.tpl b/charts/fhirflare-ig-toolkit/templates/_helpers.tpl deleted file mode 100644 index 6383d80..0000000 --- a/charts/fhirflare-ig-toolkit/templates/_helpers.tpl +++ /dev/null @@ -1,152 +0,0 @@ -{{/* -Expand the name of the chart. -*/}} -{{- define "fhirflare-ig-toolkit.name" -}} -{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} -{{- end }} - -{{/* -Create a default fully qualified app name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -If release name contains chart name it will be used as a full name. -*/}} -{{- define "fhirflare-ig-toolkit.fullname" -}} -{{- if .Values.fullnameOverride }} -{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} -{{- else }} -{{- $name := default .Chart.Name .Values.nameOverride }} -{{- if contains $name .Release.Name }} -{{- .Release.Name | trunc 63 | trimSuffix "-" }} -{{- else }} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} -{{- end }} -{{- end }} -{{- end }} - -{{/* -Create chart name and version as used by the chart label. -*/}} -{{- define "fhirflare-ig-toolkit.chart" -}} -{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} -{{- end }} - -{{/* -Common labels -*/}} -{{- define "fhirflare-ig-toolkit.labels" -}} -helm.sh/chart: {{ include "fhirflare-ig-toolkit.chart" . }} -{{ include "fhirflare-ig-toolkit.selectorLabels" . }} -{{- if .Chart.AppVersion }} -app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} -{{- end }} -app.kubernetes.io/managed-by: {{ .Release.Service }} -{{- end }} - -{{/* -Selector labels -*/}} -{{- define "fhirflare-ig-toolkit.selectorLabels" -}} -app.kubernetes.io/name: {{ include "fhirflare-ig-toolkit.name" . }} -app.kubernetes.io/instance: {{ .Release.Name }} -{{- end }} - -{{/* -Create the name of the service account to use -*/}} -{{- define "hapi-fhir-jpaserver.serviceAccountName" -}} -{{- if .Values.serviceAccount.create }} -{{- default (include "hapi-fhir-jpaserver.fullname" .) .Values.serviceAccount.name }} -{{- else }} -{{- default "default" .Values.serviceAccount.name }} -{{- end }} -{{- end }} - -{{/* -Create a default fully qualified postgresql name. -We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). -*/}} -{{- define "hapi-fhir-jpaserver.postgresql.fullname" -}} -{{- $name := default "postgresql" .Values.postgresql.nameOverride -}} -{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} -{{- end -}} - -{{/* -Get the Postgresql credentials secret name. -*/}} -{{- define "hapi-fhir-jpaserver.postgresql.secretName" -}} -{{- if .Values.postgresql.enabled -}} - {{- if .Values.postgresql.auth.existingSecret -}} - {{- printf "%s" .Values.postgresql.auth.existingSecret -}} - {{- else -}} - {{- printf "%s" (include "hapi-fhir-jpaserver.postgresql.fullname" .) -}} - {{- end -}} -{{- else }} - {{- if .Values.externalDatabase.existingSecret -}} - {{- printf "%s" .Values.externalDatabase.existingSecret -}} - {{- else -}} - {{ printf "%s-%s" (include "hapi-fhir-jpaserver.fullname" .) "external-db" }} - {{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Get the Postgresql credentials secret key. -*/}} -{{- define "hapi-fhir-jpaserver.postgresql.secretKey" -}} -{{- if .Values.postgresql.enabled -}} - {{- if .Values.postgresql.auth.username -}} - {{- printf "%s" .Values.postgresql.auth.secretKeys.userPasswordKey -}} - {{- else -}} - {{- printf "%s" .Values.postgresql.auth.secretKeys.adminPasswordKey -}} - {{- end -}} -{{- else }} - {{- if .Values.externalDatabase.existingSecret -}} - {{- printf "%s" .Values.externalDatabase.existingSecretKey -}} - {{- else -}} - {{- printf "postgres-password" -}} - {{- end -}} -{{- end -}} -{{- end -}} - -{{/* -Add environment variables to configure database values -*/}} -{{- define "hapi-fhir-jpaserver.database.host" -}} -{{- ternary (include "hapi-fhir-jpaserver.postgresql.fullname" .) .Values.externalDatabase.host .Values.postgresql.enabled -}} -{{- end -}} - -{{/* -Add environment variables to configure database values -*/}} -{{- define "hapi-fhir-jpaserver.database.user" -}} -{{- if .Values.postgresql.enabled -}} - {{- printf "%s" .Values.postgresql.auth.username | default "postgres" -}} -{{- else -}} - {{- printf "%s" .Values.externalDatabase.user -}} -{{- end -}} -{{- end -}} - -{{/* -Add environment variables to configure database values -*/}} -{{- define "hapi-fhir-jpaserver.database.name" -}} -{{- ternary .Values.postgresql.auth.database .Values.externalDatabase.database .Values.postgresql.enabled -}} -{{- end -}} - -{{/* -Add environment variables to configure database values -*/}} -{{- define "hapi-fhir-jpaserver.database.port" -}} -{{- ternary "5432" .Values.externalDatabase.port .Values.postgresql.enabled -}} -{{- end -}} - -{{/* -Create the JDBC URL from the host, port and database name. -*/}} -{{- define "hapi-fhir-jpaserver.database.jdbcUrl" -}} -{{- $host := (include "hapi-fhir-jpaserver.database.host" .) -}} -{{- $port := (include "hapi-fhir-jpaserver.database.port" .) -}} -{{- $name := (include "hapi-fhir-jpaserver.database.name" .) -}} -{{- $appName := .Release.Name -}} -{{ printf "jdbc:postgresql://%s:%d/%s?ApplicationName=%s" $host (int $port) $name $appName }} -{{- end -}} \ No newline at end of file diff --git a/charts/fhirflare-ig-toolkit/templates/deployment.yaml b/charts/fhirflare-ig-toolkit/templates/deployment.yaml deleted file mode 100644 index a4ceae0..0000000 --- a/charts/fhirflare-ig-toolkit/templates/deployment.yaml +++ /dev/null @@ -1,91 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "fhirflare-ig-toolkit.fullname" . }} - labels: - {{- include "fhirflare-ig-toolkit.labels" . | nindent 4 }} -spec: - replicas: {{ .Values.replicaCount | default 1 }} - selector: - matchLabels: - {{- include "fhirflare-ig-toolkit.selectorLabels" . | nindent 6 }} - strategy: - type: Recreate - template: - metadata: - labels: - {{- include "fhirflare-ig-toolkit.selectorLabels" . | nindent 8 }} - {{- with .Values.podAnnotations }} - annotations: - {{- toYaml . | nindent 8 }} - {{- end }} - spec: - {{- with .Values.imagePullSecrets }} - imagePullSecrets: - {{- toYaml . | nindent 8 }} - {{- end }} - securityContext: - {{- toYaml .Values.podSecurityContext | nindent 8 }} - containers: - - name: {{ .Chart.Name }} - securityContext: - {{- toYaml .Values.securityContext | nindent 12 }} - image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" - imagePullPolicy: {{ .Values.image.pullPolicy }} - args: ["supervisord", "-c", "/etc/supervisord.conf"] - env: - - name: APP_BASE_URL - value: {{ .Values.config.appBaseUrl | default "http://localhost:5000" | quote }} - - name: APP_MODE - value: {{ .Values.config.appMode | default "lite" | quote }} - - name: FLASK_APP - value: {{ .Values.config.flaskApp | default "app.py" | quote }} - - name: FLASK_ENV - value: {{ .Values.config.flaskEnv | default "development" | quote }} - - name: HAPI_FHIR_URL - value: {{ .Values.config.externalHapiServerUrl | default "http://external-hapi-fhir:8080/fhir" | quote }} - - name: NODE_PATH - value: {{ .Values.config.nodePath | default "/usr/lib/node_modules" | quote }} - - name: TMPDIR - value: "/tmp-dir" - ports: - - name: http - containerPort: {{ .Values.service.port | default 5000 }} - protocol: TCP - volumeMounts: - - name: logs - mountPath: /app/logs - - name: tmp-dir - mountPath: /tmp-dir - {{- with .Values.resources }} - resources: - {{- toYaml . | nindent 12 }} - {{- end }} - {{- with .Values.livenessProbe }} - livenessProbe: - {{- toYaml . | nindent 12 }} - {{- end }} - {{- with .Values.readinessProbe }} - readinessProbe: - {{- toYaml . | nindent 12 }} - {{- end }} - volumes: - - name: logs - emptyDir: {} - - name: tmp-dir - emptyDir: {} - # Always require Intel 64-bit architecture nodes - nodeSelector: - kubernetes.io/arch: amd64 - {{- with .Values.nodeSelector }} - # Merge with user-defined nodeSelectors if any - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.affinity }} - affinity: - {{- toYaml . | nindent 8 }} - {{- end }} - {{- with .Values.tolerations }} - tolerations: - {{- toYaml . | nindent 8 }} - {{- end }} \ No newline at end of file diff --git a/charts/fhirflare-ig-toolkit/templates/ingress.yaml b/charts/fhirflare-ig-toolkit/templates/ingress.yaml deleted file mode 100644 index a2286ba..0000000 --- a/charts/fhirflare-ig-toolkit/templates/ingress.yaml +++ /dev/null @@ -1,36 +0,0 @@ -{{- if .Values.ingress.enabled -}} -{{- $fullName := include "fhirflare-ig-toolkit.fullname" . -}} -{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion }} -apiVersion: networking.k8s.io/v1 -{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion }} -apiVersion: networking.k8s.io/v1beta1 -{{- else }} -apiVersion: extensions/v1beta1 -{{- end }} -kind: Ingress -metadata: - name: {{ $fullName }} - labels: - {{- include "fhirflare-ig-toolkit.labels" . | nindent 4 }} - {{- with .Values.ingress.annotations }} - annotations: - {{- toYaml . | nindent 4 }} - {{- end }} -spec: - rules: - - http: - paths: - - path: / - {{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion }} - pathType: Prefix - backend: - service: - name: {{ $fullName }} - port: - number: {{ .Values.service.port | default 5000 }} - {{- else }} - backend: - serviceName: {{ $fullName }} - servicePort: {{ .Values.service.port | default 5000 }} - {{- end }} -{{- end }} \ No newline at end of file diff --git a/charts/fhirflare-ig-toolkit/templates/service.yaml b/charts/fhirflare-ig-toolkit/templates/service.yaml deleted file mode 100644 index 467d861..0000000 --- a/charts/fhirflare-ig-toolkit/templates/service.yaml +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: {{ include "fhirflare-ig-toolkit.fullname" . }} - labels: - {{- include "fhirflare-ig-toolkit.labels" . | nindent 4 }} -spec: - type: {{ .Values.service.type | default "ClusterIP" }} - ports: - - name: http - port: {{ .Values.service.port | default 5000 }} - targetPort: {{ .Values.service.port | default 5000 }} - protocol: TCP - {{- if and (eq .Values.service.type "NodePort") .Values.service.nodePort }} - nodePort: {{ .Values.service.nodePort }} - {{- end }} - selector: - {{- include "fhirflare-ig-toolkit.selectorLabels" . | nindent 4 }} \ No newline at end of file diff --git a/charts/fhirflare-ig-toolkit/templates/tests/test-endpoints.yaml b/charts/fhirflare-ig-toolkit/templates/tests/test-endpoints.yaml deleted file mode 100644 index 024b446..0000000 --- a/charts/fhirflare-ig-toolkit/templates/tests/test-endpoints.yaml +++ /dev/null @@ -1,41 +0,0 @@ -apiVersion: v1 -kind: Pod -metadata: - name: "{{ .Release.Name }}-fhirflare-test-endpoint" - labels: - helm.sh/chart: "{{ .Chart.Name }}-{{ .Chart.Version }}" - app.kubernetes.io/name: {{ .Chart.Name }} - app.kubernetes.io/instance: {{ .Release.Name }} - app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} - app.kubernetes.io/managed-by: {{ .Release.Service }} - app.kubernetes.io/component: tests - annotations: - "helm.sh/hook": test -spec: - restartPolicy: Never - containers: - - name: test-fhirflare-endpoint - image: curlimages/curl:8.12.1 - command: ["curl", "--fail-with-body", "--retry", "5", "--retry-delay", "10"] - args: ["http://fhirflare:5000"] - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - privileged: false - readOnlyRootFilesystem: true - runAsGroup: 65534 - runAsNonRoot: true - runAsUser: 65534 - seccompProfile: - type: RuntimeDefault - resources: - limits: - cpu: 150m - ephemeral-storage: 2Gi - memory: 192Mi - requests: - cpu: 100m - ephemeral-storage: 50Mi - memory: 128Mi \ No newline at end of file diff --git a/charts/fhirflare-ig-toolkit/values.yaml b/charts/fhirflare-ig-toolkit/values.yaml deleted file mode 100644 index 5283cf3..0000000 --- a/charts/fhirflare-ig-toolkit/values.yaml +++ /dev/null @@ -1,89 +0,0 @@ -# Default values for fhirflare-ig-toolkit -replicaCount: 1 - -image: - repository: ghcr.io/jgsuess/fhirflare-ig-toolkit - pullPolicy: Always - tag: "latest" - -imagePullSecrets: [] -nameOverride: "" -fullnameOverride: "" - -# FHIRflare specific configuration -config: - # Application mode: "lite" means using external HAPI server, "standalone" means running with embedded HAPI server - appMode: "lite" - # URL for the external HAPI FHIR server when in lite mode - externalHapiServerUrl: "http://external-hapi-fhir:8080/fhir" - appBaseUrl: "http://localhost:5000" - flaskApp: "app.py" - flaskEnv: "development" - nodePath: "/usr/lib/node_modules" - -service: - type: ClusterIP - port: 5000 - nodePort: null - -podAnnotations: {} - -# podSecurityContext: -# fsGroup: 65532 -# fsGroupChangePolicy: OnRootMismatch -# runAsNonRoot: true -# runAsGroup: 65532 -# runAsUser: 65532 -# seccompProfile: -# type: RuntimeDefault - -# securityContext: -# allowPrivilegeEscalation: false -# capabilities: -# drop: -# - ALL -# privileged: false -# readOnlyRootFilesystem: true -# runAsGroup: 65532 -# runAsNonRoot: true -# runAsUser: 65532 -# seccompProfile: -# type: RuntimeDefault - -resources: - limits: - cpu: 500m - memory: 512Mi - ephemeral-storage: 1Gi - requests: - cpu: 100m - memory: 128Mi - ephemeral-storage: 100Mi - -livenessProbe: - httpGet: - path: / - port: http - initialDelaySeconds: 30 - periodSeconds: 10 - timeoutSeconds: 5 - failureThreshold: 6 - successThreshold: 1 - -readinessProbe: - httpGet: - path: / - port: http - initialDelaySeconds: 5 - periodSeconds: 10 - timeoutSeconds: 5 - failureThreshold: 6 - successThreshold: 1 - -nodeSelector: {} -tolerations: [] -affinity: {} - -ingress: - # -- whether to create a primitive Ingress to expose the FHIR server HTTP endpoint - enabled: false \ No newline at end of file diff --git a/charts/install.sh b/charts/install.sh deleted file mode 100755 index de29f80..0000000 --- a/charts/install.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -# -# FHIRFLARE-IG-Toolkit Installation Script -# -# Description: -# This script installs the FHIRFLARE-IG-Toolkit Helm chart into a Kubernetes cluster. -# It adds the FHIRFLARE-IG-Toolkit Helm repository and then installs the chart -# in the 'flare' namespace, creating the namespace if it doesn't exist. -# -# Usage: -# ./install.sh -# -# Requirements: -# - Helm (v3+) -# - kubectl configured with access to your Kubernetes cluster -# - -# Add the FHIRFLARE-IG-Toolkit Helm repository -helm repo add flare https://jgsuess.github.io/FHIRFLARE-IG-Toolkit/ - -# Install the FHIRFLARE-IG-Toolkit chart in the 'flare' namespace - -helm install flare/fhirflare-ig-toolkit --namespace flare --create-namespace --generate-name --set hapi-fhir-jpaserver.postgresql.primary.persistence.storageClass=gp2 --atomic \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 0c1823f..0000000 --- a/docker-compose.yml +++ /dev/null @@ -1,22 +0,0 @@ -version: '3.8' -services: - fhirflare: - build: - context: . - dockerfile: Dockerfile - ports: - - "5000:5000" - - "8080:8080" # Keep port exposed, even if Tomcat isn't running useful stuff in Lite - volumes: - - ./instance:/app/instance - - ./static/uploads:/app/static/uploads - - ./instance/hapi-h2-data/:/app/h2-data # Keep volume mounts consistent - - ./logs:/app/logs - environment: - - FLASK_APP=app.py - - FLASK_ENV=development - - NODE_PATH=/usr/lib/node_modules - - APP_MODE=standalone - - APP_BASE_URL=http://localhost:5000 - - HAPI_FHIR_URL=http://localhost:8080/fhir - command: supervisord -c /etc/supervisord.conf diff --git a/docker-compose/all-in-one/docker-compose.yml b/docker-compose/all-in-one/docker-compose.yml deleted file mode 100644 index 73bcc63..0000000 --- a/docker-compose/all-in-one/docker-compose.yml +++ /dev/null @@ -1,22 +0,0 @@ -# This docker-compose file uses ephemeral Docker named volumes for all data storage. -# These volumes persist only as long as the Docker volumes exist and are deleted if you run `docker-compose down -v`. -# No data is stored on the host filesystem. If you want persistent storage, replace these with host mounts. -services: - fhirflare-standalone: - image: ${FHIRFLARE_IMAGE:-ghcr.io/sudo-jhare/fhirflare-ig-toolkit-standalone:latest} - container_name: fhirflare-standalone - ports: - - "5000:5000" - - "8080:8080" - volumes: - - fhirflare-instance:/app/instance - - fhirflare-uploads:/app/static/uploads - - fhirflare-h2-data:/app/h2-data - - fhirflare-logs:/app/logs - restart: unless-stopped - -volumes: - fhirflare-instance: - fhirflare-uploads: - fhirflare-h2-data: - fhirflare-logs: \ No newline at end of file diff --git a/docker-compose/all-in-one/down.sh b/docker-compose/all-in-one/down.sh deleted file mode 100755 index 672f4de..0000000 --- a/docker-compose/all-in-one/down.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -# Stop and remove all containers defined in the Docker Compose file, -# along with any anonymous volumes attached to them. -docker compose down --volumes \ No newline at end of file diff --git a/docker-compose/all-in-one/up.sh b/docker-compose/all-in-one/up.sh deleted file mode 100755 index 2e87977..0000000 --- a/docker-compose/all-in-one/up.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -# Run Docker Compose - -docker compose up --detach --force-recreate --renew-anon-volumes --always-recreate-deps diff --git a/docker-compose/lite/local/application.yaml b/docker-compose/lite/local/application.yaml deleted file mode 100644 index 7e3fc4c..0000000 --- a/docker-compose/lite/local/application.yaml +++ /dev/null @@ -1,18 +0,0 @@ -hapi.fhir: - ig_runtime_upload_enabled: false - narrative_enabled: true - logical_urls: - - http://terminology.hl7.org/* - - https://terminology.hl7.org/* - - http://snomed.info/* - - https://snomed.info/* - - http://unitsofmeasure.org/* - - https://unitsofmeasure.org/* - - http://loinc.org/* - - https://loinc.org/* - cors: - allow_Credentials: true - allowed_origin: - - '*' - tester.home.name: FHIRFLARE Tester - inline_resource_storage_below_size: 4000 diff --git a/docker-compose/lite/local/docker-compose.yml b/docker-compose/lite/local/docker-compose.yml deleted file mode 100644 index 625279a..0000000 --- a/docker-compose/lite/local/docker-compose.yml +++ /dev/null @@ -1,50 +0,0 @@ -services: - fhirflare: - image: ${FHIRFLARE_IMAGE:-ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest} - ports: - - "5000:5000" - # Ephemeral Docker named volumes for all data storage. No data is stored on the host filesystem. - volumes: - - fhirflare-instance:/app/instance - - fhirflare-uploads:/app/static/uploads - - fhirflare-h2-data:/app/h2-data - - fhirflare-logs:/app/logs - environment: - - FLASK_APP=app.py - - FLASK_ENV=development - - NODE_PATH=/usr/lib/node_modules - - APP_MODE=lite - - APP_BASE_URL=http://localhost:5000 - - HAPI_FHIR_URL=http://fhir:8080/fhir - command: supervisord -c /etc/supervisord.conf - - fhir: - container_name: hapi - image: "hapiproject/hapi:v8.2.0-1" - ports: - - "8080:8080" - configs: - - source: hapi - target: /app/config/application.yaml - depends_on: - - db - - db: - image: "postgres:17.2-bookworm" - restart: always - environment: - POSTGRES_PASSWORD: admin - POSTGRES_USER: admin - POSTGRES_DB: hapi - volumes: - - ./hapi.postgress.data:/var/lib/postgresql/data - -configs: - hapi: - file: ./application.yaml - -volumes: - fhirflare-instance: - fhirflare-uploads: - fhirflare-h2-data: - fhirflare-logs: \ No newline at end of file diff --git a/docker-compose/lite/local/down.sh b/docker-compose/lite/local/down.sh deleted file mode 100755 index 672f4de..0000000 --- a/docker-compose/lite/local/down.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -# Stop and remove all containers defined in the Docker Compose file, -# along with any anonymous volumes attached to them. -docker compose down --volumes \ No newline at end of file diff --git a/docker-compose/lite/local/readme.md b/docker-compose/lite/local/readme.md deleted file mode 100644 index 6051cd5..0000000 --- a/docker-compose/lite/local/readme.md +++ /dev/null @@ -1,19 +0,0 @@ -# FHIRFLARE IG Toolkit - -This directory provides scripts and configuration to start and stop a FHIRFLARE instance with an attached HAPI FHIR server using Docker Compose. - -## Usage - -- To start the FHIRFLARE toolkit and HAPI server: - ```sh - ./docker-compose/up.sh - ``` - -- To stop and remove the containers and volumes: - ```sh - ./docker-compose/down.sh - ``` - -The web interface will be available at [http://localhost:5000](http://localhost:5000) and the HAPI FHIR server at [http://localhost:8080/fhir](http://localhost:8080/fhir). - -For more details, see the configuration files in this directory. \ No newline at end of file diff --git a/docker-compose/lite/local/up.sh b/docker-compose/lite/local/up.sh deleted file mode 100755 index 2e87977..0000000 --- a/docker-compose/lite/local/up.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -# Run Docker Compose - -docker compose up --detach --force-recreate --renew-anon-volumes --always-recreate-deps diff --git a/docker-compose/lite/remote/docker-compose.yml b/docker-compose/lite/remote/docker-compose.yml deleted file mode 100644 index 4e7bf3b..0000000 --- a/docker-compose/lite/remote/docker-compose.yml +++ /dev/null @@ -1,25 +0,0 @@ -services: - fhirflare: - image: ${FHIRFLARE_IMAGE:-ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest} - ports: - - "5000:5000" - # Ephemeral Docker named volumes for all data storage. No data is stored on the host filesystem. - volumes: - - fhirflare-instance:/app/instance - - fhirflare-uploads:/app/static/uploads - - fhirflare-h2-data:/app/h2-data - - fhirflare-logs:/app/logs - environment: - - FLASK_APP=app.py - - FLASK_ENV=development - - NODE_PATH=/usr/lib/node_modules - - APP_MODE=lite - - APP_BASE_URL=http://localhost:5000 - - HAPI_FHIR_URL=https://cdr.fhirlab.net/fhir - command: supervisord -c /etc/supervisord.conf - -volumes: - fhirflare-instance: - fhirflare-uploads: - fhirflare-h2-data: - fhirflare-logs: \ No newline at end of file diff --git a/docker-compose/lite/remote/down.sh b/docker-compose/lite/remote/down.sh deleted file mode 100755 index 672f4de..0000000 --- a/docker-compose/lite/remote/down.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -# Stop and remove all containers defined in the Docker Compose file, -# along with any anonymous volumes attached to them. -docker compose down --volumes \ No newline at end of file diff --git a/docker-compose/lite/remote/readme.md b/docker-compose/lite/remote/readme.md deleted file mode 100644 index 6051cd5..0000000 --- a/docker-compose/lite/remote/readme.md +++ /dev/null @@ -1,19 +0,0 @@ -# FHIRFLARE IG Toolkit - -This directory provides scripts and configuration to start and stop a FHIRFLARE instance with an attached HAPI FHIR server using Docker Compose. - -## Usage - -- To start the FHIRFLARE toolkit and HAPI server: - ```sh - ./docker-compose/up.sh - ``` - -- To stop and remove the containers and volumes: - ```sh - ./docker-compose/down.sh - ``` - -The web interface will be available at [http://localhost:5000](http://localhost:5000) and the HAPI FHIR server at [http://localhost:8080/fhir](http://localhost:8080/fhir). - -For more details, see the configuration files in this directory. \ No newline at end of file diff --git a/docker-compose/lite/remote/up.sh b/docker-compose/lite/remote/up.sh deleted file mode 100755 index 2e87977..0000000 --- a/docker-compose/lite/remote/up.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -# Run Docker Compose - -docker compose up --detach --force-recreate --renew-anon-volumes --always-recreate-deps diff --git a/docker/Dockerfile b/docker/Dockerfile deleted file mode 100644 index 0978d89..0000000 --- a/docker/Dockerfile +++ /dev/null @@ -1,66 +0,0 @@ -# ------------------------------------------------------------------------------ -# Dockerfile for FHIRFLARE-IG-Toolkit (Optimized for Python/Flask) -# -# This Dockerfile builds a container for the FHIRFLARE-IG-Toolkit application. -# -# Key Features: -# - Uses python:3.11-slim as the base image for a minimal, secure Python runtime. -# - Installs Node.js and global NPM packages (gofsh, fsh-sushi) for FHIR IG tooling. -# - Sets up a Python virtual environment and installs all Python dependencies. -# - Installs and configures Supervisor to manage the Flask app and related processes. -# - Copies all necessary application code, templates, static files, and configuration. -# - Exposes ports 5000 (Flask) and 8080 (optional, for compatibility). -# - Entrypoint runs Supervisor for process management. -# -# Notes: -# - The Dockerfile is optimized for Python. Tomcat/Java is not included. -# - Node.js is only installed if needed for FHIR IG tooling. -# - The image is suitable for development and production with minimal changes. -# ------------------------------------------------------------------------------ - -# Optimized Dockerfile for Python (Flask) -FROM python:3.11-slim AS base - -# Install system dependencies -RUN apt-get update && apt-get install -y --no-install-recommends \ - curl \ - coreutils \ - && rm -rf /var/lib/apt/lists/* - -# Optional: Install Node.js if needed for GoFSH/SUSHI -RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - \ - && apt-get install -y --no-install-recommends nodejs \ - && npm install -g gofsh fsh-sushi \ - && rm -rf /var/lib/apt/lists/* - -# Set workdir -WORKDIR /app - -# Copy requirements and install Python dependencies -COPY requirements.txt . -RUN python -m venv /app/venv \ - && . /app/venv/bin/activate \ - && pip install --upgrade pip \ - && pip install --no-cache-dir -r requirements.txt \ - && pip uninstall -y fhirpath || true \ - && pip install --no-cache-dir fhirpathpy \ - && pip install supervisor - -# Copy application files -COPY app.py . -COPY services.py . -COPY forms.py . -COPY package.py . -COPY templates/ templates/ -COPY static/ static/ -COPY tests/ tests/ -COPY supervisord.conf /etc/supervisord.conf - -# Expose ports -EXPOSE 5000 8080 - -# Set environment -ENV PATH="/app/venv/bin:$PATH" - -# Start supervisord -CMD ["supervisord", "-c", "/etc/supervisord.conf"] \ No newline at end of file diff --git a/docker/build-docker.sh b/docker/build-docker.sh deleted file mode 100755 index f0eea0b..0000000 --- a/docker/build-docker.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -# Build FHIRFLARE-IG-Toolkit Docker image - -# Build the image using the Dockerfile in the docker directory -docker build -f Dockerfile -t fhirflare-ig-toolkit:latest .. - -echo "Docker image built successfully" \ No newline at end of file diff --git a/forms.py b/forms.py deleted file mode 100644 index 71b26e8..0000000 --- a/forms.py +++ /dev/null @@ -1,309 +0,0 @@ -# forms.py -from flask_wtf import FlaskForm -from wtforms import StringField, SelectField, TextAreaField, BooleanField, SubmitField, FileField, PasswordField -from wtforms.validators import DataRequired, Regexp, ValidationError, URL, Optional, InputRequired -from flask import request -import json -import xml.etree.ElementTree as ET -import re -import logging -import os - -logger = logging.getLogger(__name__) - -class RetrieveSplitDataForm(FlaskForm): - """Form for retrieving FHIR bundles and splitting them into individual resources.""" - fhir_server_url = StringField('FHIR Server URL', validators=[URL(), Optional()], - render_kw={'placeholder': 'e.g., https://hapi.fhir.org/baseR4'}) - auth_type = SelectField('Authentication Type (for Custom URL)', choices=[ - ('none', 'None'), - ('bearerToken', 'Bearer Token'), - ('basicAuth', 'Basic Authentication') - ], default='none', validators=[Optional()]) - auth_token = StringField('Bearer Token', validators=[Optional()], - render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'}) - basic_auth_username = StringField('Username', validators=[Optional()], - render_kw={'placeholder': 'Enter Basic Auth Username'}) - basic_auth_password = PasswordField('Password', validators=[Optional()], - render_kw={'placeholder': 'Enter Basic Auth Password'}) - validate_references = BooleanField('Fetch Referenced Resources', default=False, - description="If checked, fetches resources referenced by the initial bundles.") - fetch_reference_bundles = BooleanField('Fetch Full Reference Bundles (instead of individual resources)', default=False, - description="Requires 'Fetch Referenced Resources'. Fetches e.g. /Patient instead of Patient/id for each reference.", - render_kw={'data-dependency': 'validate_references'}) - split_bundle_zip = FileField('Upload Bundles to Split (ZIP)', validators=[Optional()], - render_kw={'accept': '.zip'}) - submit_retrieve = SubmitField('Retrieve Bundles') - submit_split = SubmitField('Split Bundles') - - def validate(self, extra_validators=None): - if not super().validate(extra_validators): - return False - if self.fetch_reference_bundles.data and not self.validate_references.data: - self.fetch_reference_bundles.errors.append('Cannot fetch full reference bundles unless "Fetch Referenced Resources" is also checked.') - return False - if self.auth_type.data == 'bearerToken' and self.submit_retrieve.data and not self.auth_token.data: - self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected.') - return False - if self.auth_type.data == 'basicAuth' and self.submit_retrieve.data: - if not self.basic_auth_username.data: - self.basic_auth_username.errors.append('Username is required for Basic Authentication.') - return False - if not self.basic_auth_password.data: - self.basic_auth_password.errors.append('Password is required for Basic Authentication.') - return False - if self.split_bundle_zip.data: - if not self.split_bundle_zip.data.filename.lower().endswith('.zip'): - self.split_bundle_zip.errors.append('File must be a ZIP file.') - return False - return True - -class IgImportForm(FlaskForm): - """Form for importing Implementation Guides.""" - package_name = StringField('Package Name', validators=[ - DataRequired(), - Regexp(r'^[a-zA-Z0-9][a-zA-Z0-9\-\.]*[a-zA-Z0-9]$', message="Invalid package name format.") - ], render_kw={'placeholder': 'e.g., hl7.fhir.au.core'}) - package_version = StringField('Package Version', validators=[ - DataRequired(), - Regexp(r'^[a-zA-Z0-9\.\-]+$', message="Invalid version format. Use alphanumeric characters, dots, or hyphens (e.g., 1.2.3, 1.1.0-preview, current).") - ], render_kw={'placeholder': 'e.g., 1.1.0-preview'}) - dependency_mode = SelectField('Dependency Mode', choices=[ - ('recursive', 'Current Recursive'), - ('patch-canonical', 'Patch Canonical Versions'), - ('tree-shaking', 'Tree Shaking (Only Used Dependencies)') - ], default='recursive') - submit = SubmitField('Import') - -class ManualIgImportForm(FlaskForm): - """Form for manual importing Implementation Guides via file or URL.""" - import_mode = SelectField('Import Mode', choices=[ - ('file', 'Upload File'), - ('url', 'From URL') - ], default='file', validators=[DataRequired()]) - tgz_file = FileField('IG Package File (.tgz)', validators=[Optional()], - render_kw={'accept': '.tgz'}) - tgz_url = StringField('IG Package URL', validators=[Optional(), URL()], - render_kw={'placeholder': 'e.g., https://example.com/hl7.fhir.au.core-1.1.0-preview.tgz'}) - dependency_mode = SelectField('Dependency Mode', choices=[ - ('recursive', 'Current Recursive'), - ('patch-canonical', 'Patch Canonical Versions'), - ('tree-shaking', 'Tree Shaking (Only Used Dependencies)') - ], default='recursive') - resolve_dependencies = BooleanField('Resolve Dependencies', default=True, - render_kw={'class': 'form-check-input'}) - submit = SubmitField('Import') - - def validate(self, extra_validators=None): - if not super().validate(extra_validators): - return False - mode = self.import_mode.data - has_file = request and request.files and self.tgz_file.name in request.files and request.files[self.tgz_file.name].filename != '' - has_url = bool(self.tgz_url.data) # Convert to boolean: True if non-empty string - - # Ensure exactly one input method is used - inputs_provided = sum([has_file, has_url]) - if inputs_provided != 1: - if inputs_provided == 0: - self.import_mode.errors.append('Please provide input for one import method (File or URL).') - else: - self.import_mode.errors.append('Please use only one import method at a time.') - return False - - # Validate based on import mode - if mode == 'file': - if not has_file: - self.tgz_file.errors.append('A .tgz file is required for File import.') - return False - if not self.tgz_file.data.filename.lower().endswith('.tgz'): - self.tgz_file.errors.append('File must be a .tgz file.') - return False - elif mode == 'url': - if not has_url: - self.tgz_url.errors.append('A valid URL is required for URL import.') - return False - if not self.tgz_url.data.lower().endswith('.tgz'): - self.tgz_url.errors.append('URL must point to a .tgz file.') - return False - else: - self.import_mode.errors.append('Invalid import mode selected.') - return False - - return True - -class ValidationForm(FlaskForm): - """Form for validating FHIR samples.""" - package_name = StringField('Package Name', validators=[DataRequired()]) - version = StringField('Package Version', validators=[DataRequired()]) - include_dependencies = BooleanField('Include Dependencies', default=True) - mode = SelectField('Validation Mode', choices=[ - ('single', 'Single Resource'), - ('bundle', 'Bundle') - ], default='single') - sample_input = TextAreaField('Sample Input', validators=[ - DataRequired(), - ]) - submit = SubmitField('Validate') - -class FSHConverterForm(FlaskForm): - """Form for converting FHIR resources to FSH.""" - package = SelectField('FHIR Package (Optional)', choices=[('', 'None')], validators=[Optional()]) - input_mode = SelectField('Input Mode', choices=[ - ('file', 'Upload File'), - ('text', 'Paste Text') - ], validators=[DataRequired()]) - fhir_file = FileField('FHIR Resource File (JSON/XML)', validators=[Optional()]) - fhir_text = TextAreaField('FHIR Resource Text (JSON/XML)', validators=[Optional()]) - output_style = SelectField('Output Style', choices=[ - ('file-per-definition', 'File per Definition'), - ('group-by-fsh-type', 'Group by FSH Type'), - ('group-by-profile', 'Group by Profile'), - ('single-file', 'Single File') - ], validators=[DataRequired()]) - log_level = SelectField('Log Level', choices=[ - ('error', 'Error'), - ('warn', 'Warn'), - ('info', 'Info'), - ('debug', 'Debug') - ], validators=[DataRequired()]) - fhir_version = SelectField('FHIR Version', choices=[ - ('', 'Auto-detect'), - ('4.0.1', 'R4'), - ('4.3.0', 'R4B'), - ('5.0.0', 'R5') - ], validators=[Optional()]) - fishing_trip = BooleanField('Run Fishing Trip (Round-Trip Validation with SUSHI)', default=False) - dependencies = TextAreaField('Dependencies (e.g., hl7.fhir.us.core@6.1.0)', validators=[Optional()]) - indent_rules = BooleanField('Indent Rules with Context Paths', default=False) - meta_profile = SelectField('Meta Profile Handling', choices=[ - ('only-one', 'Only One Profile (Default)'), - ('first', 'First Profile'), - ('none', 'Ignore Profiles') - ], validators=[DataRequired()]) - alias_file = FileField('Alias FSH File', validators=[Optional()]) - no_alias = BooleanField('Disable Alias Generation', default=False) - submit = SubmitField('Convert to FSH') - - def validate(self, extra_validators=None): - if not super().validate(extra_validators): - return False - has_file_in_request = request and request.files and self.fhir_file.name in request.files and request.files[self.fhir_file.name].filename != '' - if self.input_mode.data == 'file' and not has_file_in_request: - if not self.fhir_file.data: - self.fhir_file.errors.append('File is required when input mode is Upload File.') - return False - if self.input_mode.data == 'text' and not self.fhir_text.data: - self.fhir_text.errors.append('Text input is required when input mode is Paste Text.') - return False - if self.input_mode.data == 'text' and self.fhir_text.data: - try: - content = self.fhir_text.data.strip() - if not content: pass - elif content.startswith('{'): json.loads(content) - elif content.startswith('<'): ET.fromstring(content) - else: - self.fhir_text.errors.append('Text input must be valid JSON or XML.') - return False - except (json.JSONDecodeError, ET.ParseError): - self.fhir_text.errors.append('Invalid JSON or XML format.') - return False - if self.dependencies.data: - for dep in self.dependencies.data.splitlines(): - dep = dep.strip() - if dep and not re.match(r'^[a-zA-Z0-9\-\.]+@[a-zA-Z0-9\.\-]+$', dep): - self.dependencies.errors.append(f'Invalid dependency format: "{dep}". Use package@version (e.g., hl7.fhir.us.core@6.1.0).') - return False - has_alias_file_in_request = request and request.files and self.alias_file.name in request.files and request.files[self.alias_file.name].filename != '' - alias_file_data = self.alias_file.data or (request.files.get(self.alias_file.name) if request else None) - if alias_file_data and alias_file_data.filename: - if not alias_file_data.filename.lower().endswith('.fsh'): - self.alias_file.errors.append('Alias file should have a .fsh extension.') - return True - -class TestDataUploadForm(FlaskForm): - """Form for uploading FHIR test data.""" - fhir_server_url = StringField('Target FHIR Server URL', validators=[DataRequired(), URL()], - render_kw={'placeholder': 'e.g., http://localhost:8080/fhir'}) - auth_type = SelectField('Authentication Type', choices=[ - ('none', 'None'), - ('bearerToken', 'Bearer Token'), - ('basic', 'Basic Authentication') - ], default='none') - auth_token = StringField('Bearer Token', validators=[Optional()], - render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'}) - username = StringField('Username', validators=[Optional()], - render_kw={'placeholder': 'Enter Basic Auth Username'}) - password = PasswordField('Password', validators=[Optional()], - render_kw={'placeholder': 'Enter Basic Auth Password'}) - test_data_file = FileField('Select Test Data File(s)', validators=[InputRequired("Please select at least one file.")], - render_kw={'multiple': True, 'accept': '.json,.xml,.zip'}) - validate_before_upload = BooleanField('Validate Resources Before Upload?', default=False, - description="Validate resources against selected package profile before uploading.") - validation_package_id = SelectField('Validation Profile Package (Optional)', - choices=[('', '-- Select Package for Validation --')], - validators=[Optional()], - description="Select the processed IG package to use for validation.") - upload_mode = SelectField('Upload Mode', choices=[ - ('individual', 'Individual Resources'), - ('transaction', 'Transaction Bundle') - ], default='individual') - use_conditional_uploads = BooleanField('Use Conditional Upload (Individual Mode Only)?', default=True, - description="If checked, checks resource existence (GET) and uses If-Match (PUT) or creates (PUT). If unchecked, uses simple PUT for all.") - error_handling = SelectField('Error Handling', choices=[ - ('stop', 'Stop on First Error'), - ('continue', 'Continue on Error') - ], default='stop') - submit = SubmitField('Upload and Process') - - def validate(self, extra_validators=None): - if not super().validate(extra_validators): - return False - if self.validate_before_upload.data and not self.validation_package_id.data: - self.validation_package_id.errors.append('Please select a package to validate against when pre-upload validation is enabled.') - return False - if self.use_conditional_uploads.data and self.upload_mode.data == 'transaction': - self.use_conditional_uploads.errors.append('Conditional Uploads only apply to the "Individual Resources" mode.') - return False - if self.auth_type.data == 'bearerToken' and not self.auth_token.data: - self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected.') - return False - if self.auth_type.data == 'basic': - if not self.username.data: - self.username.errors.append('Username is required for Basic Authentication.') - return False - if not self.password.data: - self.password.errors.append('Password is required for Basic Authentication.') - return False - return True - -class FhirRequestForm(FlaskForm): - fhir_server_url = StringField('FHIR Server URL', validators=[URL(), Optional()], - render_kw={'placeholder': 'e.g., https://hapi.fhir.org/baseR4'}) - auth_type = SelectField('Authentication Type (for Custom URL)', choices=[ - ('none', 'None'), - ('bearerToken', 'Bearer Token'), - ('basicAuth', 'Basic Authentication') - ], default='none', validators=[Optional()]) - auth_token = StringField('Bearer Token', validators=[Optional()], - render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'}) - basic_auth_username = StringField('Username', validators=[Optional()], - render_kw={'placeholder': 'Enter Basic Auth Username'}) - basic_auth_password = PasswordField('Password', validators=[Optional()], - render_kw={'placeholder': 'Enter Basic Auth Password'}) - submit = SubmitField('Send Request') - - def validate(self, extra_validators=None): - if not super().validate(extra_validators): - return False - if self.fhir_server_url.data: - if self.auth_type.data == 'bearerToken' and not self.auth_token.data: - self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected for a custom URL.') - return False - if self.auth_type.data == 'basicAuth': - if not self.basic_auth_username.data: - self.basic_auth_username.errors.append('Username is required for Basic Authentication with a custom URL.') - return False - if not self.basic_auth_password.data: - self.basic_auth_password.errors.append('Password is required for Basic Authentication with a custom URL.') - return False - return True \ No newline at end of file diff --git a/hapi-fhir-Setup/README.md b/hapi-fhir-Setup/README.md deleted file mode 100644 index ea613b1..0000000 --- a/hapi-fhir-Setup/README.md +++ /dev/null @@ -1,111 +0,0 @@ -# Application Build and Run Guide - MANUAL STEPS - -This guide outlines the steps to set up, build, and run the application, including the HAPI FHIR server component and the rest of the application managed via Docker Compose. - -## Prerequisites - -Before you begin, ensure you have the following installed on your system: - -* [Git](https://git-scm.com/) -* [Maven](https://maven.apache.org/) -* [Java Development Kit (JDK)](https://www.oracle.com/java/technologies/downloads/) (Ensure compatibility with the HAPI FHIR version) -* [Docker](https://www.docker.com/products/docker-desktop/) -* [Docker Compose](https://docs.docker.com/compose/install/) (Often included with Docker Desktop) - -## Setup and Build - -Follow these steps to clone the necessary repository and build the components. - -### 1. Clone and Build the HAPI FHIR Server - -First, clone the HAPI FHIR JPA Server Starter project and build the server application. - - -# Step 1: Clone the repository -git clone https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git hapi-fhir-jpaserver hapi-fhir-jpaserver - -# Navigate into the cloned directory -cd hapi-fhir-jpaserver - -copy the folder from hapi-fhir-setup/target/classes/application.yaml to the hapi-fhir-jpaserver/target/classes/application.yaml folder created above - -# Step 2: Build the HAPI server package (skipping tests, using 'boot' profile) -# This creates the runnable WAR file in the 'target/' directory -mvn clean package -DskipTests=true -Pboot - -# Return to the parent directory (or your project root) -cd .. -2. Build the Rest of the Application (Docker) -Next, build the Docker images for the remaining parts of the application as defined in your docker-compose.yml file. Run this command from the root directory where your docker-compose.yml file is located. - - - -# Step 3: Build Docker images without using cache -docker-compose build --no-cache -Running the Application -Option A: Running the Full Application (Recommended) -Use Docker Compose to start all services, including (presumably) the HAPI FHIR server if it's configured in your docker-compose.yml. Run this from the root directory containing your docker-compose.yml. - - - -# Step 4: Start all services defined in docker-compose.yml in detached mode -docker-compose up -d -Option B: Running the HAPI FHIR Server Standalone (Debugging Only) -This method runs only the HAPI FHIR server directly using the built WAR file. Use this primarily for debugging the server in isolation. - - - -# Navigate into the HAPI server directory where you built it -cd hapi-fhir-jpaserver - -# Run the WAR file directly using Java -java -jar target/ROOT.war - -# Note: You might need to configure ports or database connections -# separately when running this way, depending on the application's needs. - -# Remember to navigate back when done -# cd .. -Useful Docker Commands -Here are some helpful commands for interacting with your running Docker containers: - -Copying files from a container: -To copy a file from a running container to your local machine's current directory: - - - -# Syntax: docker cp : -docker cp :/app/PATH/Filename.ext . -(Replace , /app/PATH/Filename.ext with actual values. . refers to the current directory on your host machine.) - -Accessing a container's shell: -To get an interactive bash shell inside a running container: - - - -# Syntax: docker exec -it bash -docker exec -it bash -(Replace with the actual container ID or name. You can find this using docker ps.) - -Viewing running containers: - - - -docker ps -Viewing application logs: - - - -# Follow logs for all services -docker-compose logs -f - -# Follow logs for a specific service -docker-compose logs -f -(Replace with the name defined in your docker-compose.yml) - -Stopping the application: -To stop the services started with docker-compose up -d: - - - -docker-compose down diff --git a/hapi-fhir-Setup/target/classes/application.yaml b/hapi-fhir-Setup/target/classes/application.yaml deleted file mode 100644 index a79d07e..0000000 --- a/hapi-fhir-Setup/target/classes/application.yaml +++ /dev/null @@ -1,342 +0,0 @@ -#Uncomment the "servlet" and "context-path" lines below to make the fhir endpoint available at /example/path/fhir instead of the default value of /fhir -server: - # servlet: - # context-path: /example/path - port: 8080 -#Adds the option to go to eg. http://localhost:8080/actuator/health for seeing the running configuration -#see https://docs.spring.io/spring-boot/docs/current/reference/html/actuator.html#actuator.endpoints -management: - #The following configuration will enable the actuator endpoints at /actuator/health, /actuator/info, /actuator/prometheus, /actuator/metrics. For security purposes, only /actuator/health is enabled by default. - endpoints: - enabled-by-default: false - web: - exposure: - include: 'health' # or e.g. 'info,health,prometheus,metrics' or '*' for all' - endpoint: - info: - enabled: true - metrics: - enabled: true - health: - enabled: true - probes: - enabled: true - group: - liveness: - include: - - livenessState - - readinessState - prometheus: - enabled: true - prometheus: - metrics: - export: - enabled: true -spring: - main: - allow-circular-references: true - flyway: - enabled: false - baselineOnMigrate: true - fail-on-missing-locations: false - datasource: - #url: 'jdbc:h2:file:./target/database/h2' - url: jdbc:h2:file:/app/h2-data/fhir;DB_CLOSE_DELAY=-1;AUTO_SERVER=TRUE - #url: jdbc:h2:mem:test_mem - username: sa - password: null - driverClassName: org.h2.Driver - max-active: 15 - - # database connection pool size - hikari: - maximum-pool-size: 10 - jpa: - properties: - hibernate.format_sql: false - hibernate.show_sql: false - - #Hibernate dialect is automatically detected except Postgres and H2. - #If using H2, then supply the value of ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect - #If using postgres, then supply the value of ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect - hibernate.dialect: ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect - # hibernate.hbm2ddl.auto: update - # hibernate.jdbc.batch_size: 20 - # hibernate.cache.use_query_cache: false - # hibernate.cache.use_second_level_cache: false - # hibernate.cache.use_structured_entries: false - # hibernate.cache.use_minimal_puts: false - - ### These settings will enable fulltext search with lucene or elastic - hibernate.search.enabled: false - ### lucene parameters -# hibernate.search.backend.type: lucene -# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiLuceneAnalysisConfigurer -# hibernate.search.backend.directory.type: local-filesystem -# hibernate.search.backend.directory.root: target/lucenefiles -# hibernate.search.backend.lucene_version: lucene_current - ### elastic parameters ===> see also elasticsearch section below <=== -# hibernate.search.backend.type: elasticsearch -# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiElasticAnalysisConfigurer -hapi: - fhir: - ### This flag when enabled to true, will avail evaluate measure operations from CR Module. - ### Flag is false by default, can be passed as command line argument to override. - cr: - enabled: false - caregaps: - reporter: "default" - section_author: "default" - cql: - use_embedded_libraries: true - compiler: - ### These are low-level compiler options. - ### They are not typically needed by most users. - # validate_units: true - # verify_only: false - # compatibility_level: "1.5" - error_level: Info - signature_level: All - # analyze_data_requirements: false - # collapse_data_requirements: false - # translator_format: JSON - # enable_date_range_optimization: true - enable_annotations: true - enable_locators: true - enable_results_type: true - enable_detailed_errors: true - # disable_list_traversal: false - # disable_list_demotion: false - # enable_interval_demotion: false - # enable_interval_promotion: false - # disable_method_invocation: false - # require_from_keyword: false - # disable_default_model_info_load: false - runtime: - debug_logging_enabled: false - # enable_validation: false - # enable_expression_caching: true - terminology: - valueset_preexpansion_mode: REQUIRE # USE_IF_PRESENT, REQUIRE, IGNORE - valueset_expansion_mode: PERFORM_NAIVE_EXPANSION # AUTO, USE_EXPANSION_OPERATION, PERFORM_NAIVE_EXPANSION - valueset_membership_mode: USE_EXPANSION # AUTO, USE_VALIDATE_CODE_OPERATION, USE_EXPANSION - code_lookup_mode: USE_VALIDATE_CODE_OPERATION # AUTO, USE_VALIDATE_CODE_OPERATION, USE_CODESYSTEM_URL - data: - search_parameter_mode: USE_SEARCH_PARAMETERS # AUTO, USE_SEARCH_PARAMETERS, FILTER_IN_MEMORY - terminology_parameter_mode: FILTER_IN_MEMORY # AUTO, USE_VALUE_SET_URL, USE_INLINE_CODES, FILTER_IN_MEMORY - profile_mode: DECLARED # ENFORCED, DECLARED, OPTIONAL, TRUST, OFF - - cdshooks: - enabled: false - clientIdHeaderName: client_id - - ### This enables the swagger-ui at /fhir/swagger-ui/index.html as well as the /fhir/api-docs (see https://hapifhir.io/hapi-fhir/docs/server_plain/openapi.html) - openapi_enabled: true - ### This is the FHIR version. Choose between, DSTU2, DSTU3, R4 or R5 - fhir_version: R4 - ### Flag is false by default. This flag enables runtime installation of IG's. - ig_runtime_upload_enabled: false - ### This flag when enabled to true, will avail evaluate measure operations from CR Module. - - ### enable to use the ApacheProxyAddressStrategy which uses X-Forwarded-* headers - ### to determine the FHIR server address - # use_apache_address_strategy: false - ### forces the use of the https:// protocol for the returned server address. - ### alternatively, it may be set using the X-Forwarded-Proto header. - # use_apache_address_strategy_https: false - ### enables the server to overwrite defaults on HTML, css, etc. under the url pattern of eg. /content/custom ** - ### Folder with custom content MUST be named custom. If omitted then default content applies - custom_content_path: ./custom - ### enables the server host custom content. If e.g. the value ./configs/app is supplied then the content - ### will be served under /web/app - #app_content_path: ./configs/app - ### enable to set the Server URL - # server_address: http://hapi.fhir.org/baseR4 - # defer_indexing_for_codesystems_of_size: 101 - ### Flag is true by default. This flag filters resources during package installation, allowing only those resources with a valid status (e.g. active) to be installed. - # validate_resource_status_for_package_upload: false - # install_transitive_ig_dependencies: true - #implementationguides: - ### example from registry (packages.fhir.org) - # swiss: - # name: swiss.mednet.fhir - # version: 0.8.0 - # reloadExisting: false - # installMode: STORE_AND_INSTALL - # example not from registry - # ips_1_0_0: - # packageUrl: https://build.fhir.org/ig/HL7/fhir-ips/package.tgz - # name: hl7.fhir.uv.ips - # version: 1.0.0 - # supported_resource_types: - # - Patient - # - Observation - ################################################## - # Allowed Bundle Types for persistence (defaults are: COLLECTION,DOCUMENT,MESSAGE) - ################################################## - # allowed_bundle_types: COLLECTION,DOCUMENT,MESSAGE,TRANSACTION,TRANSACTIONRESPONSE,BATCH,BATCHRESPONSE,HISTORY,SEARCHSET - # allow_cascading_deletes: true - # allow_contains_searches: true - # allow_external_references: true - # allow_multiple_delete: true - # allow_override_default_search_params: true - # auto_create_placeholder_reference_targets: false - # mass_ingestion_mode_enabled: false - ### tells the server to automatically append the current version of the target resource to references at these paths - # auto_version_reference_at_paths: Device.patient, Device.location, Device.parent, DeviceMetric.parent, DeviceMetric.source, Observation.device, Observation.subject - # ips_enabled: false - # default_encoding: JSON - # default_pretty_print: true - # default_page_size: 20 - # delete_expunge_enabled: true - # enable_repository_validating_interceptor: true - # enable_index_missing_fields: false - # enable_index_of_type: true - # enable_index_contained_resource: false - # upliftedRefchains_enabled: true - # resource_dbhistory_enabled: false - ### !!Extended Lucene/Elasticsearch Indexing is still a experimental feature, expect some features (e.g. _total=accurate) to not work as expected!! - ### more information here: https://hapifhir.io/hapi-fhir/docs/server_jpa/elastic.html - advanced_lucene_indexing: false - bulk_export_enabled: false - bulk_import_enabled: false - # language_search_parameter_enabled: true - # enforce_referential_integrity_on_delete: false - # This is an experimental feature, and does not fully support _total and other FHIR features. - # enforce_referential_integrity_on_delete: false - # enforce_referential_integrity_on_write: false - # etag_support_enabled: true - # expunge_enabled: true - # client_id_strategy: ALPHANUMERIC - # server_id_strategy: SEQUENTIAL_NUMERIC - # fhirpath_interceptor_enabled: false - # filter_search_enabled: true - # graphql_enabled: true - narrative_enabled: true - mdm_enabled: false - mdm_rules_json_location: "mdm-rules.json" - ## see: https://hapifhir.io/hapi-fhir/docs/interceptors/built_in_server_interceptors.html#jpa-server-retry-on-version-conflicts - # userRequestRetryVersionConflictsInterceptorEnabled : false - # local_base_urls: - # - https://hapi.fhir.org/baseR4 - # pre_expand_value_sets: true - # enable_task_pre_expand_value_sets: true - # pre_expand_value_sets_default_count: 1000 - # pre_expand_value_sets_max_count: 1000 - # maximum_expansion_size: 1000 - - logical_urls: - - http://terminology.hl7.org/* - - https://terminology.hl7.org/* - - http://snomed.info/* - - https://snomed.info/* - - http://unitsofmeasure.org/* - - https://unitsofmeasure.org/* - - http://loinc.org/* - - https://loinc.org/* - # partitioning: - # allow_references_across_partitions: false - # partitioning_include_in_search_hashes: false - # conditional_create_duplicate_identifiers_enabled: false - cors: - allow_Credentials: true - # These are allowed_origin patterns, see: https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/web/cors/CorsConfiguration.html#setAllowedOriginPatterns-java.util.List- - allowed_origin: - - '*' - - # Search coordinator thread pool sizes - search-coord-core-pool-size: 20 - search-coord-max-pool-size: 100 - search-coord-queue-capacity: 200 - - # Search Prefetch Thresholds. - - # This setting sets the number of search results to prefetch. For example, if this list - # is set to [100, 1000, -1] then the server will initially load 100 results and not - # attempt to load more. If the user requests subsequent page(s) of results and goes - # past 100 results, the system will load the next 900 (up to the following threshold of 1000). - # The system will progressively work through these thresholds. - # A threshold of -1 means to load all results. Note that if the final threshold is a - # number other than -1, the system will never prefetch more than the given number. - search_prefetch_thresholds: 13,503,2003,-1 - - # comma-separated package names, will be @ComponentScan'ed by Spring to allow for creating custom Spring beans - #custom-bean-packages: - - # comma-separated list of fully qualified interceptor classes. - # classes listed here will be fetched from the Spring context when combined with 'custom-bean-packages', - # or will be instantiated via reflection using an no-arg contructor; then registered with the server - #custom-interceptor-classes: - - # comma-separated list of fully qualified provider classes. - # classes listed here will be fetched from the Spring context when combined with 'custom-bean-packages', - # or will be instantiated via reflection using an no-arg contructor; then registered with the server - #custom-provider-classes: - - # Threadpool size for BATCH'ed GETs in a bundle. - # bundle_batch_pool_size: 10 - # bundle_batch_pool_max_size: 50 - - # logger: - # error_format: 'ERROR - ${requestVerb} ${requestUrl}' - # format: >- - # Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] - # Operation[${operationType} ${operationName} ${idOrResourceName}] - # UA[${requestHeader.user-agent}] Params[${requestParameters}] - # ResponseEncoding[${responseEncodingNoDefault}] - # log_exceptions: true - # name: fhirtest.access - # max_binary_size: 104857600 - # max_page_size: 200 - # retain_cached_searches_mins: 60 - # reuse_cached_search_results_millis: 60000 - tester: - home: - name: FHIRFLARE Tester - server_address: http://localhost:8080/fhir - refuse_to_fetch_third_party_urls: false - fhir_version: R4 - global: - name: Global Tester - server_address: "http://hapi.fhir.org/baseR4" - refuse_to_fetch_third_party_urls: false - fhir_version: R4 - # validation: - # requests_enabled: true - # responses_enabled: true - # binary_storage_enabled: true - inline_resource_storage_below_size: 4000 -# bulk_export_enabled: true -# subscription: -# resthook_enabled: true -# websocket_enabled: false -# polling_interval_ms: 5000 -# immediately_queued: false -# email: -# from: some@test.com -# host: google.com -# port: -# username: -# password: -# auth: -# startTlsEnable: -# startTlsRequired: -# quitWait: -# lastn_enabled: true -# store_resource_in_lucene_index_enabled: true -### This is configuration for normalized quantity search level default is 0 -### 0: NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED - default -### 1: NORMALIZED_QUANTITY_STORAGE_SUPPORTED -### 2: NORMALIZED_QUANTITY_SEARCH_SUPPORTED -# normalized_quantity_search_level: 2 -#elasticsearch: -# debug: -# pretty_print_json_log: false -# refresh_after_write: false -# enabled: false -# password: SomePassword -# required_index_status: YELLOW -# rest_url: 'localhost:9200' -# protocol: 'http' -# schema_management_strategy: CREATE -# username: SomeUsername diff --git a/index.html b/index.html new file mode 100644 index 0000000..71210ec --- /dev/null +++ b/index.html @@ -0,0 +1,92 @@ + + + + + + + +FHIRFLARE IG Toolkit | Helm chart for deploying the fhirflare-ig-toolkit application + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+

FHIRFLARE IG Toolkit

+ +

Helm chart for deploying the fhirflare-ig-toolkit application

+ +

View the Project on GitHub

+ +
+
+ +

FHIRFLARE IG Toolkit

+ +

Helm chart for deploying the fhirflare-ig-toolkit application

+ +

Overview

+ +

FHIRFLARE-IG-Toolkit is a comprehensive solution for working with FHIR Implementation Guides.

+ +

Features

+ +
    +
  • Helm chart deployment
  • +
  • FHIR resources management
  • +
  • Implementation Guide toolkit
  • +
+ +

Getting Started

+ +

Check out the documentation to get started with FHIRFLARE IG Toolkit.

+ + +
+
+ + + + diff --git a/index.yaml b/index.yaml deleted file mode 100644 index 1ddaa4c..0000000 --- a/index.yaml +++ /dev/null @@ -1,24 +0,0 @@ -apiVersion: v1 -entries: - fhirflare-ig-toolkit: - - apiVersion: v2 - appVersion: latest - created: "2025-08-04T05:53:00.152693988Z" - description: Helm chart for deploying the fhirflare-ig-toolkit application - digest: faef7991101501ae64e368fd2fb8021ec623d73e8cc808ea8c3df9920dcefb6a - home: https://github.com/jgsuess/FHIRFLARE-IG-Toolkit - icon: https://github.com/jgsuess/FHIRFLARE-IG-Toolkit/raw/main/static/FHIRFLARE.png - keywords: - - fhir - - healthcare - - ig-toolkit - - implementation-guide - maintainers: - - email: jgsuess@gmail.com - name: Jörn Guy Süß - name: fhirflare-ig-toolkit - type: application - urls: - - https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/releases/download/helm-v0.5.0/fhirflare-ig-toolkit-0.5.0.tgz - version: 0.5.0 -generated: "2025-08-04T05:53:00.152703546Z" diff --git a/migrations/README b/migrations/README deleted file mode 100644 index 0e04844..0000000 --- a/migrations/README +++ /dev/null @@ -1 +0,0 @@ -Single-database configuration for Flask. diff --git a/migrations/__pycache__/env.cpython-312.pyc b/migrations/__pycache__/env.cpython-312.pyc deleted file mode 100644 index 5c294b5..0000000 Binary files a/migrations/__pycache__/env.cpython-312.pyc and /dev/null differ diff --git a/migrations/alembic.ini b/migrations/alembic.ini deleted file mode 100644 index ec9d45c..0000000 --- a/migrations/alembic.ini +++ /dev/null @@ -1,50 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic,flask_migrate - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[logger_flask_migrate] -level = INFO -handlers = -qualname = flask_migrate - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py deleted file mode 100644 index 4c97092..0000000 --- a/migrations/env.py +++ /dev/null @@ -1,113 +0,0 @@ -import logging -from logging.config import fileConfig - -from flask import current_app - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - - -def get_engine(): - try: - # this works with Flask-SQLAlchemy<3 and Alchemical - return current_app.extensions['migrate'].db.get_engine() - except (TypeError, AttributeError): - # this works with Flask-SQLAlchemy>=3 - return current_app.extensions['migrate'].db.engine - - -def get_engine_url(): - try: - return get_engine().url.render_as_string(hide_password=False).replace( - '%', '%%') - except AttributeError: - return str(get_engine().url).replace('%', '%%') - - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -config.set_main_option('sqlalchemy.url', get_engine_url()) -target_db = current_app.extensions['migrate'].db - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def get_metadata(): - if hasattr(target_db, 'metadatas'): - return target_db.metadatas[None] - return target_db.metadata - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=get_metadata(), literal_binds=True - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): - script = directives[0] - if script.upgrade_ops.is_empty(): - directives[:] = [] - logger.info('No changes in schema detected.') - - conf_args = current_app.extensions['migrate'].configure_args - if conf_args.get("process_revision_directives") is None: - conf_args["process_revision_directives"] = process_revision_directives - - connectable = get_engine() - - with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=get_metadata(), - **conf_args - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako deleted file mode 100644 index 2c01563..0000000 --- a/migrations/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/package.py b/package.py deleted file mode 100644 index acd96d9..0000000 --- a/package.py +++ /dev/null @@ -1,123 +0,0 @@ -from flask import Blueprint, jsonify, current_app, render_template -import os -import tarfile -import json -from datetime import datetime -import time -from services import pkg_version, safe_parse_version - -package_bp = Blueprint('package', __name__) - -@package_bp.route('/logs/') -def logs(name): - """ - Fetch logs for a package, listing each version with its publication date. - - Args: - name (str): The name of the package. - - Returns: - Rendered template with logs or an error message. - """ - try: - in_memory_cache = current_app.config.get('MANUAL_PACKAGE_CACHE', []) - if not in_memory_cache: - current_app.logger.error(f"No in-memory cache found for package logs: {name}") - return "

Package cache not found.

" - - package_data = next((pkg for pkg in in_memory_cache if isinstance(pkg, dict) and pkg.get('name', '').lower() == name.lower()), None) - if not package_data: - current_app.logger.error(f"Package not found in cache: {name}") - return "

Package not found.

" - - # Get the versions list with pubDate - versions = package_data.get('all_versions', []) - if not versions: - current_app.logger.warning(f"No versions found for package: {name}. Package data: {package_data}") - return "

No version history found for this package.

" - - current_app.logger.debug(f"Found {len(versions)} versions for package {name}: {versions[:5]}...") - - logs = [] - now = time.time() - for version_info in versions: - if not isinstance(version_info, dict): - current_app.logger.warning(f"Invalid version info for {name}: {version_info}") - continue - version = version_info.get('version', '') - pub_date_str = version_info.get('pubDate', '') - if not version or not pub_date_str: - current_app.logger.warning(f"Skipping version info with missing version or pubDate: {version_info}") - continue - - # Parse pubDate and calculate "when" - when = "Unknown" - try: - pub_date = datetime.strptime(pub_date_str, "%a, %d %b %Y %H:%M:%S %Z") - pub_time = pub_date.timestamp() - time_diff = now - pub_time - days_ago = int(time_diff / 86400) - if days_ago < 1: - hours_ago = int(time_diff / 3600) - if hours_ago < 1: - minutes_ago = int(time_diff / 60) - when = f"{minutes_ago} minute{'s' if minutes_ago != 1 else ''} ago" - else: - when = f"{hours_ago} hour{'s' if hours_ago != 1 else ''} ago" - else: - when = f"{days_ago} day{'s' if days_ago != 1 else ''} ago" - except ValueError as e: - current_app.logger.warning(f"Failed to parse pubDate '{pub_date_str}' for version {version}: {e}") - - logs.append({ - "version": version, - "pubDate": pub_date_str, - "when": when - }) - - if not logs: - current_app.logger.warning(f"No valid version entries with pubDate for package: {name}") - return "

No version history found for this package.

" - - # Sort logs by version number (newest first) - logs.sort(key=lambda x: safe_parse_version(x.get('version', '0.0.0a0')), reverse=True) - - current_app.logger.debug(f"Rendering logs for {name} with {len(logs)} entries") - return render_template('package.logs.html', logs=logs) - - except Exception as e: - current_app.logger.error(f"Error in logs endpoint for {name}: {str(e)}", exc_info=True) - return "

Error loading version history.

", 500 - -@package_bp.route('/dependents/') -def dependents(name): - """ - HTMX endpoint to fetch packages that depend on the current package. - Returns an HTML fragment with a table of dependent packages. - """ - in_memory_cache = current_app.config.get('MANUAL_PACKAGE_CACHE', []) - package_data = next((pkg for pkg in in_memory_cache if isinstance(pkg, dict) and pkg.get('name', '').lower() == name.lower()), None) - - if not package_data: - return "

Package not found.

" - - # Find dependents: packages whose dependencies include the current package - dependents = [] - for pkg in in_memory_cache: - if not isinstance(pkg, dict): - continue - dependencies = pkg.get('dependencies', []) - for dep in dependencies: - dep_name = dep.get('name', '') - if dep_name.lower() == name.lower(): - dependents.append({ - "name": pkg.get('name', 'Unknown'), - "version": pkg.get('latest_absolute_version', 'N/A'), - "author": pkg.get('author', 'N/A'), - "fhir_version": pkg.get('fhir_version', 'N/A'), - "version_count": pkg.get('version_count', 0), - "canonical": pkg.get('canonical', 'N/A') - }) - break - - return render_template('package.dependents.html', dependents=dependents) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index cec64a5..0000000 --- a/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -Flask==2.3.3 -Flask-SQLAlchemy==3.0.5 -Werkzeug==2.3.7 -requests==2.31.0 -Flask-WTF==1.2.1 -WTForms==3.1.2 -Pytest -pyyaml==6.0.1 -fhir.resources==8.0.0 -Flask-Migrate==4.1.0 -cachetools -beautifulsoup4 -feedparser==6.0.11 -flasgger \ No newline at end of file diff --git a/services.py b/services.py deleted file mode 100644 index 8685550..0000000 --- a/services.py +++ /dev/null @@ -1,4985 +0,0 @@ -import requests -import os -import tarfile -import json -import re -import logging -import shutil -import sqlite3 -import feedparser -from flask import current_app, Blueprint, request, jsonify -from fhirpathpy import evaluate -from collections import defaultdict, deque -from pathlib import Path -from urllib.parse import quote, urlparse -from types import SimpleNamespace -import datetime -import subprocess -import tempfile -import zipfile -import xml.etree.ElementTree as ET -from flasgger import swag_from # Import swag_from here - -# Define Blueprint -services_bp = Blueprint('services', __name__) - -# Configure logging -if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') -else: - pass -logger = logging.getLogger(__name__) - -# --- ADD fhir.resources imports --- -try: - from fhir.resources import get_fhir_model_class - from fhir.resources.fhirtypesvalidators import FHIRValidationError # Updated import path - FHIR_RESOURCES_AVAILABLE = True - logger.info("fhir.resources library found. XML parsing will use this.") -except ImportError as e: - FHIR_RESOURCES_AVAILABLE = False - logger.warning(f"fhir.resources library failed to import. XML parsing will be basic and dependency analysis for XML may be incomplete. Error: {str(e)}") - # Define dummy classes if library not found to avoid NameErrors later - class FHIRValidationError(Exception): pass - def get_fhir_model_class(resource_type): raise NotImplementedError("fhir.resources not installed") -except Exception as e: - FHIR_RESOURCES_AVAILABLE = False - logger.error(f"Unexpected error importing fhir.resources library: {str(e)}") - class FHIRValidationError(Exception): pass - def get_fhir_model_class(resource_type): raise NotImplementedError("fhir.resources not installed") -# --- END fhir.resources imports --- - -# --- Check for optional 'packaging' library --- -try: - import packaging.version as pkg_version - HAS_PACKAGING_LIB = True - logger.info("Optional 'packaging' library found. Using for robust version comparison.") -except ImportError: - HAS_PACKAGING_LIB = False - logger.warning("Optional 'packaging' library not found. Using basic string comparison for versions.") - # Define a simple fallback class if packaging is missing - class BasicVersion: - def __init__(self, v_str): self.v_str = str(v_str) - # Define comparison methods for sorting compatibility - def __lt__(self, other): return self.v_str < str(other) - def __gt__(self, other): return self.v_str > str(other) - def __eq__(self, other): return self.v_str == str(other) - def __le__(self, other): return self.v_str <= str(other) - def __ge__(self, other): return self.v_str >= str(other) - def __ne__(self, other): return self.v_str != str(other) - def __str__(self): return self.v_str - pkg_version = SimpleNamespace(parse=BasicVersion, InvalidVersion=ValueError) # Mock parse and InvalidVersion - -# --- Constants --- -FHIR_REGISTRY_BASE_URL = "https://packages.fhir.org" -DOWNLOAD_DIR_NAME = "fhir_packages" -CANONICAL_PACKAGE = ("hl7.fhir.r4.core", "4.0.1") -CANONICAL_PACKAGE_ID = f"{CANONICAL_PACKAGE[0]}#{CANONICAL_PACKAGE[1]}" - -# --- Define Canonical Types --- -CANONICAL_RESOURCE_TYPES = { - "StructureDefinition", "ValueSet", "CodeSystem", "SearchParameter", - "CapabilityStatement", "ImplementationGuide", "ConceptMap", "NamingSystem", - "OperationDefinition", "MessageDefinition", "CompartmentDefinition", - "GraphDefinition", "StructureMap", "Questionnaire" -} -# ----------------------------- - -# Define standard FHIR R4 base types -FHIR_R4_BASE_TYPES = { - "Account", "ActivityDefinition", "AdministrableProductDefinition", "AdverseEvent", "AllergyIntolerance", - "Appointment", "AppointmentResponse", "AuditEvent", "Basic", "Binary", "BiologicallyDerivedProduct", - "BodyStructure", "Bundle", "CapabilityStatement", "CarePlan", "CareTeam", "CatalogEntry", "ChargeItem", - "ChargeItemDefinition", "Claim", "ClaimResponse", "ClinicalImpression", "CodeSystem", "Communication", - "CommunicationRequest", "CompartmentDefinition", "Composition", "ConceptMap", "Condition", "Consent", - "Contract", "Coverage", "CoverageEligibilityRequest", "CoverageEligibilityResponse", "DetectedIssue", - "Device", "DeviceDefinition", "DeviceMetric", "DeviceRequest", "DeviceUseStatement", "DiagnosticReport", - "DocumentManifest", "DocumentReference", "DomainResource", "EffectEvidenceSynthesis", "Encounter", - "Endpoint", "EnrollmentRequest", "EnrollmentResponse", "EpisodeOfCare", "EventDefinition", "Evidence", - "EvidenceVariable", "ExampleScenario", "ExplanationOfBenefit", "FamilyMemberHistory", "Flag", "Goal", - "GraphDefinition", "Group", "GuidanceResponse", "HealthcareService", "ImagingStudy", "Immunization", - "ImmunizationEvaluation", "ImmunizationRecommendation", "ImplementationGuide", "InsurancePlan", - "Invoice", "Library", "Linkage", "List", "Location", "Measure", "MeasureReport", "Media", "Medication", - "MedicationAdministration", "MedicationDispense", "MedicationKnowledge", "MedicationRequest", - "MedicationStatement", "MedicinalProduct", "MedicinalProductAuthorization", "MedicinalProductContraindication", - "MedicinalProductIndication", "MedicinalProductIngredient", "MedicinalProductInteraction", - "MedicinalProductManufactured", "MedicinalProductPackaged", "MedicinalProductPharmaceutical", - "MedicinalProductUndesirableEffect", "MessageDefinition", "MessageHeader", "MolecularSequence", - "NamingSystem", "NutritionOrder", "Observation", "ObservationDefinition", "OperationDefinition", - "OperationOutcome", "Organization", "OrganizationAffiliation", "Patient", "PaymentNotice", - "PaymentReconciliation", "Person", "PlanDefinition", "Practitioner", "PractitionerRole", "Procedure", - "Provenance", "Questionnaire", "QuestionnaireResponse", "RelatedPerson", "RequestGroup", "ResearchDefinition", - "ResearchElementDefinition", "ResearchStudy", "ResearchSubject", "Resource", "RiskAssessment", - "RiskEvidenceSynthesis", "Schedule", "SearchParameter", "ServiceRequest", "Slot", "Specimen", - "SpecimenDefinition", "StructureDefinition", "StructureMap", "Subscription", "Substance", - "SubstanceNucleicAcid", "SubstancePolymer", "SubstanceProtein", "SubstanceReferenceInformation", - "SubstanceSourceMaterial", "SubstanceSpecification", "SupplyDelivery", "SupplyRequest", "Task", - "TerminologyCapabilities", "TestReport", "TestScript", "ValueSet", "VerificationResult", "VisionPrescription" -} - - -# ------------------------------------------------------------------- -#Helper function to support normalize: - -def safe_parse_version(v_str): - """ - Attempts to parse a version string using packaging.version. - Handles common FHIR suffixes like -dev, -ballot, -draft, -preview - by treating them as standard pre-releases (-a0, -b0, -rc0) for comparison. - Returns a comparable Version object or a fallback for unparseable strings. - """ - if not v_str or not isinstance(v_str, str): - # Handle None or non-string input, treat as lowest possible version - return pkg_version.parse("0.0.0a0") # Use alpha pre-release - - # Try standard parsing first - try: - return pkg_version.parse(v_str) - except pkg_version.InvalidVersion: - # Handle common FHIR suffixes if standard parsing fails - original_v_str = v_str # Keep original for logging - v_str_norm = v_str.lower() - # Split into base version and suffix - base_part = v_str_norm - suffix = None - if '-' in v_str_norm: - parts = v_str_norm.split('-', 1) - base_part = parts[0] - suffix = parts[1] - - # Check if base looks like a version number - if re.match(r'^\d+(\.\d+)*$', base_part): - try: - # Map FHIR suffixes to PEP 440 pre-release types for sorting - if suffix in ['dev', 'snapshot', 'ci-build']: - # Treat as alpha (earliest pre-release) - return pkg_version.parse(f"{base_part}a0") - elif suffix in ['draft', 'ballot', 'preview']: - # Treat as beta (after alpha) - return pkg_version.parse(f"{base_part}b0") - # Add more mappings if needed (e.g., -rc -> rc0) - elif suffix and suffix.startswith('rc'): - rc_num = ''.join(filter(str.isdigit, suffix)) or '0' - return pkg_version.parse(f"{base_part}rc{rc_num}") - - # If suffix isn't recognized, still try parsing base as final/post - # This might happen for odd suffixes like -final (though unlikely) - # If base itself parses, use that (treats unknown suffix as > pre-release) - return pkg_version.parse(base_part) - - except pkg_version.InvalidVersion: - # If base_part itself is invalid after splitting - logger.warning(f"Invalid base version '{base_part}' after splitting '{original_v_str}'. Treating as alpha.") - return pkg_version.parse("0.0.0a0") - except Exception as e: - logger.error(f"Unexpected error parsing FHIR-suffixed version '{original_v_str}': {e}") - return pkg_version.parse("0.0.0a0") - else: - # Base part doesn't look like numbers/dots (e.g., "current", "dev") - logger.warning(f"Unparseable version '{original_v_str}' (base '{base_part}' not standard). Treating as alpha.") - return pkg_version.parse("0.0.0a0") # Treat fully non-standard versions as very early - - except Exception as e: - # Catch any other unexpected parsing errors - logger.error(f"Unexpected error in safe_parse_version for '{v_str}': {e}") - return pkg_version.parse("0.0.0a0") # Fallback - -# --- MODIFIED FUNCTION with Enhanced Logging --- -def get_additional_registries(): - """Fetches the list of additional FHIR IG registries from the master feed.""" - logger.debug("Entering get_additional_registries function") - feed_registry_url = 'https://raw.githubusercontent.com/FHIR/ig-registry/master/package-feeds.json' - feeds = [] # Default to empty list - try: - logger.info(f"Attempting to fetch feed registry from {feed_registry_url}") - # Use a reasonable timeout - response = requests.get(feed_registry_url, timeout=15) - logger.debug(f"Feed registry request to {feed_registry_url} returned status code: {response.status_code}") - # Raise HTTPError for bad responses (4xx or 5xx) - response.raise_for_status() - - # Log successful fetch - logger.debug(f"Successfully fetched feed registry. Response text (first 500 chars): {response.text[:500]}...") - - try: - # Attempt to parse JSON - data = json.loads(response.text) - feeds_raw = data.get('feeds', []) - # Ensure structure is as expected before adding - feeds = [{'name': feed['name'], 'url': feed['url']} - for feed in feeds_raw - if isinstance(feed, dict) and 'name' in feed and 'url' in feed] - logger.info(f"Successfully parsed {len(feeds)} valid feeds from {feed_registry_url}") - - except json.JSONDecodeError as e: - # Log JSON parsing errors specifically - logger.error(f"JSON decoding error for feed registry from {feed_registry_url}: {e}") - # Log the problematic text snippet to help diagnose - logger.error(f"Problematic JSON text snippet: {response.text[:500]}...") - # feeds remains [] - - # --- Specific Exception Handling --- - except requests.exceptions.HTTPError as e: - logger.error(f"HTTP error fetching feed registry from {feed_registry_url}: {e}", exc_info=True) - # feeds remains [] - except requests.exceptions.ConnectionError as e: - logger.error(f"Connection error fetching feed registry from {feed_registry_url}: {e}", exc_info=True) - # feeds remains [] - except requests.exceptions.Timeout as e: - logger.error(f"Timeout fetching feed registry from {feed_registry_url}: {e}", exc_info=True) - # feeds remains [] - except requests.exceptions.RequestException as e: - # Catch other potential request-related errors - logger.error(f"General request error fetching feed registry from {feed_registry_url}: {e}", exc_info=True) - # feeds remains [] - except Exception as e: - # Catch any other unexpected errors during the process - logger.error(f"Unexpected error fetching feed registry from {feed_registry_url}: {e}", exc_info=True) - # feeds remains [] - - logger.debug(f"Exiting get_additional_registries function, returning {len(feeds)} feeds.") - return feeds -# --- END MODIFIED FUNCTION --- - -def import_manual_package_and_dependencies(input_source, version=None, dependency_mode='recursive', is_file=False, is_url=False, resolve_dependencies=True): - """ - Import a FHIR Implementation Guide package manually, cloning import_package_and_dependencies. - Supports registry, file, or URL inputs with dependency handling. - - Args: - input_source (str): Package name (for registry), file path (for file), or URL (for URL). - version (str, optional): Package version for registry imports. - dependency_mode (str): Dependency import mode ('recursive', 'patch-canonical', 'tree-shaking'). - is_file (bool): True if input_source is a file path. - is_url (bool): True if input_source is a URL. - resolve_dependencies (bool): Whether to resolve and import dependencies. - - Returns: - dict: Import results with 'requested', 'downloaded', 'dependencies', and 'errors'. - """ - logger.info(f"Starting manual import for {input_source} (mode={dependency_mode}, resolve_deps={resolve_dependencies})") - download_dir = _get_download_dir() - if not download_dir: - return { - "requested": input_source, - "downloaded": {}, - "dependencies": [], - "errors": ["Failed to get download directory."] - } - - results = { - "requested": input_source, - "downloaded": {}, - "dependencies": [], - "errors": [] - } - - try: - if is_file: - tgz_path = input_source - if not os.path.exists(tgz_path): - results['errors'].append(f"File not found: {tgz_path}") - return results - name, version = parse_package_filename(os.path.basename(tgz_path)) - if not name: - name = os.path.splitext(os.path.basename(tgz_path))[0] - version = "unknown" - target_filename = construct_tgz_filename(name, version) - target_path = os.path.join(download_dir, target_filename) - shutil.copy(tgz_path, target_path) - results['downloaded'][name, version] = target_path - elif is_url: - tgz_path = download_manual_package_from_url(input_source, download_dir) - if not tgz_path: - results['errors'].append(f"Failed to download package from URL: {input_source}") - return results - name, version = parse_package_filename(os.path.basename(tgz_path)) - if not name: - name = os.path.splitext(os.path.basename(tgz_path))[0] - version = "unknown" - results['downloaded'][name, version] = tgz_path - else: - tgz_path = download_manual_package(input_source, version, download_dir) - if not tgz_path: - results['errors'].append(f"Failed to download {input_source}#{version}") - return results - results['downloaded'][input_source, version] = tgz_path - name = input_source - - if resolve_dependencies: - pkg_info = process_manual_package_file(tgz_path) - if pkg_info.get('errors'): - results['errors'].extend(pkg_info['errors']) - dependencies = pkg_info.get('dependencies', []) - results['dependencies'] = dependencies - - if dependencies and dependency_mode != 'tree-shaking': - for dep in dependencies: - dep_name = dep.get('name') - dep_version = dep.get('version', 'latest') - if not dep_name: - continue - logger.info(f"Processing dependency {dep_name}#{dep_version}") - dep_result = import_manual_package_and_dependencies( - dep_name, - dep_version, - dependency_mode=dependency_mode, - resolve_dependencies=True - ) - results['downloaded'].update(dep_result['downloaded']) - results['dependencies'].extend(dep_result['dependencies']) - results['errors'].extend(dep_result['errors']) - - save_package_metadata(name, version, dependency_mode, results['dependencies']) - return results - except Exception as e: - logger.error(f"Error during manual import of {input_source}: {str(e)}", exc_info=True) - results['errors'].append(f"Unexpected error: {str(e)}") - return results - -def download_manual_package(package_name, version, download_dir): - """ - Download a FHIR package from the registry, cloning download_package. - - Args: - package_name (str): Package name. - version (str): Package version. - download_dir (str): Directory to save the package. - - Returns: - str: Path to the downloaded file, or None if failed. - """ - logger.info(f"Attempting manual download of {package_name}#{version}") - tgz_filename = construct_tgz_filename(package_name, version) - if not tgz_filename: - logger.error(f"Invalid filename constructed for {package_name}#{version}") - return None - target_path = os.path.join(download_dir, tgz_filename) - if os.path.exists(target_path): - logger.info(f"Manual package {package_name}#{version} already exists at {target_path}") - return target_path - - url = f"{FHIR_REGISTRY_BASE_URL}/{package_name}/{version}" - try: - response = requests.get(url, stream=True, timeout=30) - response.raise_for_status() - with open(target_path, 'wb') as f: - for chunk in response.iter_content(chunk_size=8192): - f.write(chunk) - logger.info(f"Manually downloaded {package_name}#{version} to {target_path}") - return target_path - except requests.exceptions.HTTPError as e: - logger.error(f"HTTP error downloading {package_name}#{version}: {e}") - return None - except requests.exceptions.RequestException as e: - logger.error(f"Request error downloading {package_name}#{version}: {e}") - return None - except Exception as e: - logger.error(f"Unexpected error downloading {package_name}#{version}: {e}", exc_info=True) - return None - -def download_manual_package_from_url(url, download_dir): - """ - Download a FHIR package from a URL, cloning download_package logic. - - Args: - url (str): URL to the .tgz file. - download_dir (str): Directory to save the package. - - Returns: - str: Path to the downloaded file, or None if failed. - """ - logger.info(f"Attempting manual download from URL: {url}") - parsed_url = urlparse(url) - filename = os.path.basename(parsed_url.path) - if not filename.endswith('.tgz'): - logger.error(f"URL does not point to a .tgz file: {filename}") - return None - target_path = os.path.join(download_dir, filename) - if os.path.exists(target_path): - logger.info(f"Package from {url} already exists at {target_path}") - return target_path - - try: - response = requests.get(url, stream=True, timeout=30) - response.raise_for_status() - with open(target_path, 'wb') as f: - for chunk in response.iter_content(chunk_size=8192): - f.write(chunk) - logger.info(f"Manually downloaded package from {url} to {target_path}") - return target_path - except requests.exceptions.HTTPError as e: - logger.error(f"HTTP error downloading from {url}: {e}") - return None - except requests.exceptions.RequestException as e: - logger.error(f"Request error downloading from {url}: {e}") - return None - except Exception as e: - logger.error(f"Unexpected error downloading from {url}: {e}", exc_info=True) - return None - -def process_manual_package_file(tgz_path): - """ - Process a .tgz package file to extract metadata, cloning process_package_file. - - Args: - tgz_path (str): Path to the .tgz file. - - Returns: - dict: Package metadata including dependencies and errors. - """ - if not tgz_path or not os.path.exists(tgz_path): - logger.error(f"Package file not found for manual processing: {tgz_path}") - return {'errors': [f"Package file not found: {tgz_path}"], 'dependencies': []} - - pkg_basename = os.path.basename(tgz_path) - name, version = parse_package_filename(tgz_path) - logger.info(f"Manually processing package: {pkg_basename} ({name}#{version})") - - results = { - 'dependencies': [], - 'errors': [] - } - - try: - with tarfile.open(tgz_path, "r:gz") as tar: - pkg_json_member = next((m for m in tar if m.name == 'package/package.json'), None) - if pkg_json_member: - with tar.extractfile(pkg_json_member) as f: - pkg_data = json.load(f) - dependencies = pkg_data.get('dependencies', {}) - results['dependencies'] = [ - {'name': dep_name, 'version': dep_version} - for dep_name, dep_version in dependencies.items() - ] - else: - results['errors'].append("package.json not found in archive") - except Exception as e: - logger.error(f"Error manually processing {tgz_path}: {e}", exc_info=True) - results['errors'].append(f"Error processing package: {str(e)}") - - return results - -def fetch_packages_from_registries(search_term=''): - logger.debug("Entering fetch_packages_from_registries function with search_term: %s", search_term) - packages_dict = defaultdict(list) - - try: - logger.debug("Calling get_additional_registries") - feed_registries = get_additional_registries() - logger.debug("Returned from get_additional_registries with %d registries: %s", len(feed_registries), feed_registries) - - if not feed_registries: - logger.warning("No feed registries available. Cannot fetch packages.") - return [] - - logger.info(f"Processing {len(feed_registries)} feed registries") - for feed in feed_registries: - try: - logger.info(f"Fetching feed: {feed['name']} from {feed['url']}") - response = requests.get(feed['url'], timeout=30) - response.raise_for_status() - - # Log the raw response content for debugging - response_text = response.text[:500] # Limit to first 500 chars for logging - logger.debug(f"Raw response from {feed['url']}: {response_text}") - - try: - data = json.loads(response.text) - num_feed_packages = len(data.get('packages', [])) - logger.info(f"Fetched from feed {feed['name']}: {num_feed_packages} packages (JSON)") - for pkg in data.get('packages', []): - if not isinstance(pkg, dict): - continue - pkg_name = pkg.get('name', '') - if not pkg_name: - continue - packages_dict[pkg_name].append(pkg) - except json.JSONDecodeError: - feed_data = feedparser.parse(response.text) - if not feed_data.entries: - logger.warning(f"No entries found in feed {feed['name']}") - continue - num_rss_packages = len(feed_data.entries) - logger.info(f"Fetched from feed {feed['name']}: {num_rss_packages} packages (Atom/RSS)") - logger.info(f"Sample feed entries from {feed['name']}: {feed_data.entries[:2]}") - for entry in feed_data.entries: - try: - # Extract package name and version from title (e.g., "hl7.fhir.au.ereq#0.3.0-preview") - title = entry.get('title', '') - if '#' in title: - pkg_name, version = title.split('#', 1) - else: - pkg_name = title - version = entry.get('version', '') - if not pkg_name: - pkg_name = entry.get('id', '') or entry.get('summary', '') - if not pkg_name: - continue - - package = { - 'name': pkg_name, - 'version': version, - 'author': entry.get('author', ''), - 'fhirVersion': entry.get('fhir_version', [''])[0] or '', - 'url': entry.get('link', ''), - 'canonical': entry.get('canonical', ''), - 'dependencies': entry.get('dependencies', []), - 'pubDate': entry.get('published', entry.get('pubdate', '')), - 'registry': feed['url'] - } - if search_term and package['name'] and search_term.lower() not in package['name'].lower(): - continue - packages_dict[pkg_name].append(package) - except Exception as entry_error: - logger.error(f"Error processing entry in feed {feed['name']}: {entry_error}") - logger.info(f"Problematic entry: {entry}") - except requests.exceptions.HTTPError as e: - if e.response.status_code == 404: - logger.warning(f"Feed endpoint not found for {feed['name']}: {feed['url']} - 404 Not Found") - else: - logger.error(f"HTTP error fetching from feed {feed['name']}: {e}") - except requests.exceptions.RequestException as e: - logger.error(f"Request error fetching from feed {feed['name']}: {e}") - except Exception as error: - logger.error(f"Unexpected error fetching from feed {feed['name']}: {error}") - except Exception as e: - logger.error(f"Unexpected error in fetch_packages_from_registries: {e}") - - # Convert packages_dict to a list of packages with aggregated versions - packages = [] - for pkg_name, entries in packages_dict.items(): - # Aggregate versions with their publication dates - versions = [ - { - "version": entry.get('version', ''), - "pubDate": entry.get('pubDate', '') - } - for entry in entries - if entry.get('version', '') - ] - # Sort versions by pubDate (newest first) - versions.sort(key=lambda x: x.get('pubDate', ''), reverse=True) - if not versions: - continue - - # Take the latest entry for the main package fields - latest_entry = entries[0] - package = { - 'name': pkg_name, - 'version': latest_entry.get('version', ''), - 'latestVersion': latest_entry.get('version', ''), - 'author': latest_entry.get('author', ''), - 'fhirVersion': latest_entry.get('fhirVersion', ''), - 'url': latest_entry.get('url', ''), - 'canonical': latest_entry.get('canonical', ''), - 'dependencies': latest_entry.get('dependencies', []), - 'versions': versions, # List of versions with pubDate - 'registry': latest_entry.get('registry', '') - } - packages.append(package) - - logger.info(f"Total packages fetched: {len(packages)}") - return packages - -def normalize_package_data(raw_packages): - """ - Normalizes package data, identifying latest absolute and latest official versions. - Uses safe_parse_version for robust comparison. - """ - packages_grouped = defaultdict(list) - skipped_raw_count = 0 - for entry in raw_packages: - if not isinstance(entry, dict): - skipped_raw_count += 1 - logger.warning(f"Skipping raw package entry, not a dict: {entry}") - continue - raw_name = entry.get('name') or entry.get('title') or '' - if not isinstance(raw_name, str): - raw_name = str(raw_name) - name_part = raw_name.split('#', 1)[0].strip().lower() - if name_part: - packages_grouped[name_part].append(entry) - else: - if not entry.get('id'): - skipped_raw_count += 1 - logger.warning(f"Skipping raw package entry, no name or id: {entry}") - logger.info(f"Initial grouping: {len(packages_grouped)} unique package names found. Skipped {skipped_raw_count} raw entries.") - - normalized_list = [] - skipped_norm_count = 0 - total_entries_considered = 0 - - for name_key, entries in packages_grouped.items(): - total_entries_considered += len(entries) - latest_absolute_data = None - latest_official_data = None - latest_absolute_ver_for_comp = safe_parse_version("0.0.0a0") - latest_official_ver_for_comp = safe_parse_version("0.0.0a0") - all_versions = [] - package_name_display = name_key - - # Aggregate all versions from entries - processed_versions = set() - for package_entry in entries: - versions_list = package_entry.get('versions', []) - for version_info in versions_list: - if isinstance(version_info, dict) and 'version' in version_info: - version_str = version_info.get('version', '') - if version_str and version_str not in processed_versions: - all_versions.append(version_info) - processed_versions.add(version_str) - - processed_entries = [] - for package_entry in entries: - version_str = None - raw_name_entry = package_entry.get('name') or package_entry.get('title') or '' - if not isinstance(raw_name_entry, str): - raw_name_entry = str(raw_name_entry) - version_keys = ['version', 'latestVersion'] - for key in version_keys: - val = package_entry.get(key) - if isinstance(val, str) and val: - version_str = val.strip() - break - elif isinstance(val, list) and val and isinstance(val[0], str) and val[0]: - version_str = val[0].strip() - break - if not version_str and '#' in raw_name_entry: - parts = raw_name_entry.split('#', 1) - if len(parts) == 2 and parts[1]: - version_str = parts[1].strip() - - if not version_str: - logger.warning(f"Skipping entry for {raw_name_entry}: no valid version found. Entry: {package_entry}") - skipped_norm_count += 1 - continue - - version_str = version_str.strip() - current_display_name = str(raw_name_entry).split('#')[0].strip() - if current_display_name and current_display_name != name_key: - package_name_display = current_display_name - - entry_with_version = package_entry.copy() - entry_with_version['version'] = version_str - processed_entries.append(entry_with_version) - - try: - current_ver_obj_for_comp = safe_parse_version(version_str) - if latest_absolute_data is None or current_ver_obj_for_comp > latest_absolute_ver_for_comp: - latest_absolute_ver_for_comp = current_ver_obj_for_comp - latest_absolute_data = entry_with_version - - if re.match(r'^\d+\.\d+\.\d+(?:-[a-zA-Z0-9\.]+)?$', version_str): - if latest_official_data is None or current_ver_obj_for_comp > latest_official_ver_for_comp: - latest_official_ver_for_comp = current_ver_obj_for_comp - latest_official_data = entry_with_version - except Exception as comp_err: - logger.error(f"Error comparing version '{version_str}' for package '{package_name_display}': {comp_err}", exc_info=True) - - if latest_absolute_data: - final_absolute_version = latest_absolute_data.get('version', 'unknown') - final_official_version = latest_official_data.get('version') if latest_official_data else None - - author_raw = latest_absolute_data.get('author') or latest_absolute_data.get('publisher') or '' - if isinstance(author_raw, dict): - author = author_raw.get('name', str(author_raw)) - elif not isinstance(author_raw, str): - author = str(author_raw) - else: - author = author_raw - - fhir_version_str = None - fhir_keys = ['fhirVersion', 'fhirVersions', 'fhir_version'] - for key in fhir_keys: - val = latest_absolute_data.get(key) - if isinstance(val, list) and val and isinstance(val[0], str): - fhir_version_str = val[0] - break - elif isinstance(val, str) and val: - fhir_version_str = val - break - fhir_version_str = fhir_version_str or 'unknown' - - url_raw = latest_absolute_data.get('url') or latest_absolute_data.get('link') or '' - url = str(url_raw) if not isinstance(url_raw, str) else url_raw - canonical_raw = latest_absolute_data.get('canonical') or url - canonical = str(canonical_raw) if not isinstance(canonical_raw, str) else canonical_raw - - dependencies_raw = latest_absolute_data.get('dependencies', []) - dependencies = [] - if isinstance(dependencies_raw, dict): - dependencies = [{"name": str(dn), "version": str(dv)} for dn, dv in dependencies_raw.items()] - elif isinstance(dependencies_raw, list): - for dep in dependencies_raw: - if isinstance(dep, str): - if '@' in dep: - dep_name, dep_version = dep.split('@', 1) - dependencies.append({"name": dep_name, "version": dep_version}) - else: - dependencies.append({"name": dep, "version": "N/A"}) - elif isinstance(dep, dict) and 'name' in dep and 'version' in dep: - dependencies.append(dep) - - # Sort all_versions by pubDate (newest first) - all_versions.sort(key=lambda x: x.get('pubDate', ''), reverse=True) - - normalized_entry = { - 'name': package_name_display, - 'version': final_absolute_version, - 'latest_absolute_version': final_absolute_version, - 'latest_official_version': final_official_version, - 'author': author.strip(), - 'fhir_version': fhir_version_str.strip(), - 'url': url.strip(), - 'canonical': canonical.strip(), - 'dependencies': dependencies, - 'version_count': len(all_versions), - 'all_versions': all_versions, # Preserve the full versions list with pubDate - 'versions_data': processed_entries, - 'registry': latest_absolute_data.get('registry', '') - } - normalized_list.append(normalized_entry) - if not final_official_version: - logger.warning(f"No official version found for package '{package_name_display}'. Versions: {[v['version'] for v in all_versions]}") - else: - logger.warning(f"No valid entries found to determine details for package name key '{name_key}'. Entries: {entries}") - skipped_norm_count += len(entries) - - logger.info(f"Normalization complete. Entries considered: {total_entries_considered}, Skipped during norm: {skipped_norm_count}, Unique Packages Found: {len(normalized_list)}") - normalized_list.sort(key=lambda x: x.get('name', '').lower()) - return normalized_list - -def cache_packages(normalized_packages, db, CachedPackage): - """ - Cache normalized FHIR Implementation Guide packages in the CachedPackage database. - Updates existing records or adds new ones to improve performance for other routes. - - Args: - normalized_packages (list): List of normalized package dictionaries. - db: The SQLAlchemy database instance. - CachedPackage: The CachedPackage model class. - """ - try: - for package in normalized_packages: - existing = CachedPackage.query.filter_by(package_name=package['name'], version=package['version']).first() - if existing: - existing.author = package['author'] - existing.fhir_version = package['fhir_version'] - existing.version_count = package['version_count'] - existing.url = package['url'] - existing.all_versions = package['all_versions'] - existing.dependencies = package['dependencies'] - existing.latest_absolute_version = package['latest_absolute_version'] - existing.latest_official_version = package['latest_official_version'] - existing.canonical = package['canonical'] - existing.registry = package.get('registry', '') - else: - new_package = CachedPackage( - package_name=package['name'], - version=package['version'], - author=package['author'], - fhir_version=package['fhir_version'], - version_count=package['version_count'], - url=package['url'], - all_versions=package['all_versions'], - dependencies=package['dependencies'], - latest_absolute_version=package['latest_absolute_version'], - latest_official_version=package['latest_official_version'], - canonical=package['canonical'], - registry=package.get('registry', '') - ) - db.session.add(new_package) - db.session.commit() - logger.info(f"Cached {len(normalized_packages)} packages in CachedPackage.") - except Exception as error: - db.session.rollback() - logger.error(f"Error caching packages: {error}") - raise - -#----------------------------------------------------------------------- - -# --- Helper Functions --- - -def _get_download_dir(): - """Gets the absolute path to the configured FHIR package download directory.""" - packages_dir = None - try: - packages_dir = current_app.config.get('FHIR_PACKAGES_DIR') - if packages_dir: - logger.debug(f"Using FHIR_PACKAGES_DIR from current_app config: {packages_dir}") - else: - logger.warning("FHIR_PACKAGES_DIR not found in current_app config.") - instance_path = current_app.instance_path - packages_dir = os.path.join(instance_path, DOWNLOAD_DIR_NAME) - logger.warning(f"Falling back to default packages path: {packages_dir}") - except RuntimeError: - logger.warning("No app context found. Constructing default relative path for packages.") - script_dir = os.path.dirname(__file__) - instance_path_fallback = os.path.abspath(os.path.join(script_dir, '..', 'instance')) - packages_dir = os.path.join(instance_path_fallback, DOWNLOAD_DIR_NAME) - logger.debug(f"Constructed fallback packages path: {packages_dir}") - if not packages_dir: - logger.error("Fatal Error: Could not determine FHIR packages directory.") - return None - try: - os.makedirs(packages_dir, exist_ok=True) - return packages_dir - except OSError as e: - logger.error(f"Fatal Error creating packages directory {packages_dir}: {e}", exc_info=True) - return None - except Exception as e: - logger.error(f"Unexpected error getting/creating packages directory {packages_dir}: {e}", exc_info=True) - return None - -# --- Helper to get description (Add this to services.py) --- -def get_package_description(package_name, package_version, packages_dir): - """Reads package.json from a tgz and returns the description.""" - tgz_filename = construct_tgz_filename(package_name, package_version) - if not tgz_filename: return "Error: Could not construct filename." - tgz_path = os.path.join(packages_dir, tgz_filename) - if not os.path.exists(tgz_path): - return f"Error: Package file not found ({tgz_filename})." - - try: - with tarfile.open(tgz_path, "r:gz") as tar: - pkg_json_member = next((m for m in tar if m.name == 'package/package.json'), None) - if pkg_json_member: - with tar.extractfile(pkg_json_member) as f: - pkg_data = json.load(f) - return pkg_data.get('description', 'No description found in package.json.') - else: - return "Error: package.json not found in archive." - except (tarfile.TarError, json.JSONDecodeError, KeyError, IOError, Exception) as e: - logger.error(f"Error reading description from {tgz_filename}: {e}") - return f"Error reading package details: {e}" - -def sanitize_filename_part(text): - """Basic sanitization for name/version parts of filename.""" - if not isinstance(text, str): - text = str(text) - safe_text = "".join(c if c.isalnum() or c in ['.', '-'] else '_' for c in text) - safe_text = re.sub(r'_+', '_', safe_text) - safe_text = safe_text.strip('_-.') - return safe_text if safe_text else "invalid_name" - -def construct_tgz_filename(name, version): - """Constructs the standard FHIR package filename using sanitized parts.""" - if not name or not version: - logger.error(f"Cannot construct filename with missing name ('{name}') or version ('{version}')") - return None - return f"{sanitize_filename_part(name)}-{sanitize_filename_part(version)}.tgz" - -def construct_metadata_filename(name, version): - """Constructs the standard metadata filename.""" - if not name or not version: - logger.error(f"Cannot construct metadata filename with missing name ('{name}') or version ('{version}')") - return None - return f"{sanitize_filename_part(name)}-{sanitize_filename_part(version)}.metadata.json" - -# --- Helper Function to Find References (Keep as before) --- -def find_references(element, refs_list): - """ - Recursively finds all 'reference' strings within a FHIR resource element (dict or list). - Appends found reference strings to refs_list. - """ - if isinstance(element, dict): - for key, value in element.items(): - if key == 'reference' and isinstance(value, str): - refs_list.append(value) - elif isinstance(value, (dict, list)): - find_references(value, refs_list) # Recurse - elif isinstance(element, list): - for item in element: - if isinstance(item, (dict, list)): - find_references(item, refs_list) # Recurse - -# --- NEW: Helper Function for Basic FHIR XML to Dict --- -def basic_fhir_xml_to_dict(xml_string): - """ - Very basic conversion of FHIR XML to a dictionary. - Focuses on resourceType, id, and finding reference elements/attributes. - NOTE: This is NOT a complete or robust FHIR XML parser. Use with caution. - Returns a dictionary representation or None if parsing fails. - """ - try: - # Replace namespace prefixes for easier parsing with ElementTree find methods - # This is a common simplification but might fail for complex XML namespaces - xml_string_no_ns = re.sub(r' xmlns="[^"]+"', '', xml_string, count=1) - xml_string_no_ns = re.sub(r' xmlns:[^=]+="[^"]+"', '', xml_string_no_ns) - root = ET.fromstring(xml_string_no_ns) - - resource_dict = {"resourceType": root.tag} - - # Find 'id' element usually directly under the root - id_element = root.find("./id[@value]") - if id_element is not None: - resource_dict["id"] = id_element.get("value") - else: # Check if id is an attribute of the root (less common) - res_id = root.get("id") - if res_id: resource_dict["id"] = res_id - - # Recursively find 'reference' elements and extract their 'value' attribute - references = [] - for ref_element in root.findall(".//reference[@value]"): - ref_value = ref_element.get("value") - if ref_value: - references.append({"reference": ref_value}) # Store in a way find_references can find - - # Find other potential references (e.g., url attributes on certain elements) - # This needs to be expanded based on common FHIR patterns if needed - for url_element in root.findall(".//*[@url]"): # Find any element with a 'url' attribute - url_value = url_element.get("url") - # Basic check if it looks like a resource reference (simplistic) - if url_value and ('/' in url_value or url_value.startswith('urn:')): - # Decide how to store this - maybe add to a specific key? - # For now, let's add it to a generic '_references_from_url' list - if '_references_from_url' not in resource_dict: - resource_dict['_references_from_url'] = [] - resource_dict['_references_from_url'].append({"reference": url_value}) - - - # Add references found into the main dict structure so find_references can process them - if references or '_references_from_url' in resource_dict: - # Combine them - choose a suitable key, e.g., '_extracted_references' - resource_dict['_extracted_references'] = references + resource_dict.get('_references_from_url', []) - - # Include raw XML for debugging or potential later use - # resource_dict["_xml_content"] = xml_string - return resource_dict - - except ET.ParseError as e: - logger.error(f"XML Parse Error during basic conversion: {e}") - return None - except Exception as e: - logger.error(f"Unexpected error during basic_fhir_xml_to_dict: {e}", exc_info=True) - return None - -# def parse_package_filename(filename): -# """Parses a standard FHIR package filename into name and version.""" -# if not filename or not filename.endswith('.tgz'): -# logger.debug(f"Filename '{filename}' does not end with .tgz") -# return None, None -# base_name = filename[:-4] -# last_hyphen_index = base_name.rfind('-') -# while last_hyphen_index != -1: -# potential_name = base_name[:last_hyphen_index] -# potential_version = base_name[last_hyphen_index + 1:] -# if potential_version and (potential_version[0].isdigit() or any(potential_version.startswith(kw) for kw in ['v', 'dev', 'draft', 'preview', 'release', 'alpha', 'beta'])): -# name = potential_name.replace('_', '.') -# version = potential_version -# logger.debug(f"Parsed '{filename}' -> name='{name}', version='{version}'") -# return name, version -# else: -# last_hyphen_index = base_name.rfind('-', 0, last_hyphen_index) -# logger.warning(f"Could not parse version from '{filename}'. Treating '{base_name}' as name.") -# name = base_name.replace('_', '.') -# version = "" -# return name, version - -def parse_package_filename(filename): - """ - Parses a standard FHIR package filename into name and version. - Handles various version formats including semantic versions, pre-releases, snapshots, and complex suffixes. - """ - if not filename or not filename.endswith('.tgz'): - logger.debug(f"Filename '{filename}' does not end with .tgz") - return None, None - - base_name = filename[:-4] # Remove '.tgz' - - # Define a comprehensive pattern for FHIR package versions as a single string - # Matches versions like: - # - 1.0.0, 4.0.2 - # - 1.1.0-preview, 0.1.0-draft, 1.0.0-ballot-3 - # - 1.0.0-alpha.1, 1.0.0-RC2, 0.9.0-alpha1.0.8 - # - 1.1.0-snapshot-3, 0.0.1-snapshot - # - 2.3.5-buildnumbersuffix2 - version_pattern = r'(\d+\.\d+\.\d+)(?:-(?:preview|ballot|draft|snapshot|alpha|beta|RC\d*|buildnumbersuffix\d*|alpha\d+\.\d+\.\d+|snapshot-\d+|ballot-\d+|alpha\.\d+))?$' - - # Find the last occurrence of the version pattern in the base_name - match = None - for i in range(len(base_name), 0, -1): - substring = base_name[:i] - if re.search(version_pattern, substring): - match = re.search(version_pattern, base_name[:i]) - if match: - break - - if not match: - logger.warning(f"Could not parse version from '{filename}'. Treating '{base_name}' as name.") - name = base_name.replace('_', '.') - version = "" - return name, version - - # Extract the matched version - version_start_idx = match.start(1) # Start of the version (e.g., start of "1.1.0" in "1.1.0-preview") - name = base_name[:version_start_idx].rstrip('-').replace('_', '.') # Everything before the version - version = base_name[version_start_idx:] # The full version string - - # Validate the name and version - if not name or not version: - logger.warning(f"Invalid parse for '{filename}': name='{name}', version='{version}'. Using fallback.") - name = base_name.replace('_', '.') - version = "" - return name, version - - logger.debug(f"Parsed '{filename}' -> name='{name}', version='{version}'") - return name, version - -def remove_narrative(resource, include_narrative=False): - """Remove narrative text element from a FHIR resource if not including narrative.""" - if isinstance(resource, dict) and not include_narrative: - if 'text' in resource: - logger.debug(f"Removing narrative text from resource: {resource.get('resourceType', 'unknown')}") - del resource['text'] - if resource.get('resourceType') == 'Bundle' and 'entry' in resource: - resource['entry'] = [ - dict(entry, resource=remove_narrative(entry.get('resource'), include_narrative)) - if entry.get('resource') else entry - for entry in resource['entry'] - ] - return resource - -def get_cached_structure(package_name, package_version, resource_type, view): - """Retrieve cached StructureDefinition from SQLite.""" - try: - conn = sqlite3.connect(os.path.join(current_app.instance_path, 'fhir_ig.db')) - cursor = conn.cursor() - cursor.execute(""" - SELECT structure_data FROM structure_cache - WHERE package_name = ? AND package_version = ? AND resource_type = ? AND view = ? - """, (package_name, package_version, resource_type, view)) - result = cursor.fetchone() - conn.close() - if result: - logger.debug(f"Cache hit for {package_name}#{package_version}:{resource_type}:{view}") - return json.loads(result[0]) - logger.debug(f"No cache entry for {package_name}#{package_version}:{resource_type}:{view}") - return None - except Exception as e: - logger.error(f"Error accessing structure cache: {e}", exc_info=True) - return None - -def cache_structure(package_name, package_version, resource_type, view, structure_data): - """Cache StructureDefinition in SQLite.""" - try: - conn = sqlite3.connect(os.path.join(current_app.instance_path, 'fhir_ig.db')) - cursor = conn.cursor() - cursor.execute(""" - CREATE TABLE IF NOT EXISTS structure_cache ( - package_name TEXT, - package_version TEXT, - resource_type TEXT, - view TEXT, - structure_data TEXT, - PRIMARY KEY (package_name, package_version, resource_type, view) - ) - """) - cursor.execute(""" - INSERT OR REPLACE INTO structure_cache - (package_name, package_version, resource_type, view, structure_data) - VALUES (?, ?, ?, ?, ?) - """, (package_name, package_version, resource_type, view, json.dumps(structure_data))) - conn.commit() - conn.close() - logger.debug(f"Cached structure for {package_name}#{package_version}:{resource_type}:{view}") - except Exception as e: - logger.error(f"Error caching structure: {e}", exc_info=True) - - -#----OLD CODE HERE -# def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include_narrative=False, raw=False): -# """Helper to find and extract StructureDefinition json from a tgz path, prioritizing profile match.""" -# sd_data = None -# found_path = None -# if not tgz_path or not os.path.exists(tgz_path): -# logger.error(f"File not found in find_and_extract_sd: {tgz_path}") -# return None, None -# try: -# with tarfile.open(tgz_path, "r:gz") as tar: -# logger.debug(f"Searching for SD matching '{resource_identifier}' with profile '{profile_url}' in {os.path.basename(tgz_path)}") -# potential_matches = [] -# for member in tar: -# if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): -# continue -# if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: -# continue -# fileobj = None -# try: -# fileobj = tar.extractfile(member) -# if fileobj: -# content_bytes = fileobj.read() -# content_string = content_bytes.decode('utf-8-sig') -# data = json.loads(content_string) -# if isinstance(data, dict) and data.get('resourceType') == 'StructureDefinition': -# sd_id = data.get('id') -# sd_name = data.get('name') -# sd_type = data.get('type') -# sd_url = data.get('url') -# sd_filename_base = os.path.splitext(os.path.basename(member.name))[0] -# sd_filename_lower = sd_filename_base.lower() -# resource_identifier_lower = resource_identifier.lower() if resource_identifier else None -# match_score = 0 -# if profile_url and sd_url == profile_url: -# match_score = 5 -# sd_data = remove_narrative(data, include_narrative) -# found_path = member.name -# logger.info(f"Found definitive SD matching profile '{profile_url}' at path: {found_path}") -# break -# elif resource_identifier_lower: -# if sd_id and resource_identifier_lower == sd_id.lower(): -# match_score = 4 -# elif sd_name and resource_identifier_lower == sd_name.lower(): -# match_score = 4 -# elif sd_filename_lower == f"structuredefinition-{resource_identifier_lower}": -# match_score = 3 -# elif sd_type and resource_identifier_lower == sd_type.lower() and not re.search(r'[-.]', resource_identifier): -# match_score = 2 -# elif resource_identifier_lower in sd_filename_lower: -# match_score = 1 -# elif sd_url and resource_identifier_lower in sd_url.lower(): -# match_score = 1 -# if match_score > 0: -# potential_matches.append((match_score, remove_narrative(data, include_narrative), member.name)) -# if match_score >= 3: -# sd_data = remove_narrative(data, include_narrative) -# found_path = member.name -# break -# except json.JSONDecodeError as e: -# logger.debug(f"Could not parse JSON in {member.name}, skipping: {e}") -# except UnicodeDecodeError as e: -# logger.warning(f"Could not decode UTF-8 in {member.name}, skipping: {e}") -# except tarfile.TarError as e: -# logger.warning(f"Tar error reading member {member.name}, skipping: {e}") -# except Exception as e: -# logger.warning(f"Could not read/parse potential SD {member.name}, skipping: {e}") -# finally: -# if fileobj: -# fileobj.close() -# if not sd_data and potential_matches: -# potential_matches.sort(key=lambda x: x[0], reverse=True) -# best_match = potential_matches[0] -# sd_data = best_match[1] -# found_path = best_match[2] -# logger.info(f"Selected best match for '{resource_identifier}' from potential matches (Score: {best_match[0]}): {found_path}") -# if sd_data is None: -# logger.info(f"SD matching identifier '{resource_identifier}' or profile '{profile_url}' not found within archive {os.path.basename(tgz_path)}") -# elif raw: -# # Return the full, unprocessed StructureDefinition JSON -# with tarfile.open(tgz_path, "r:gz") as tar: -# fileobj = tar.extractfile(found_path) -# content_bytes = fileobj.read() -# content_string = content_bytes.decode('utf-8-sig') -# raw_data = json.loads(content_string) -# return remove_narrative(raw_data, include_narrative), found_path -# except tarfile.ReadError as e: -# logger.error(f"Tar ReadError reading {tgz_path}: {e}") -# return None, None -# except tarfile.TarError as e: -# logger.error(f"TarError reading {tgz_path} in find_and_extract_sd: {e}") -# raise -# except FileNotFoundError: -# logger.error(f"FileNotFoundError reading {tgz_path} in find_and_extract_sd.") -# raise -# except Exception as e: -# logger.error(f"Unexpected error in find_and_extract_sd for {tgz_path}: {e}", exc_info=True) -# raise -# return sd_data, found_path -#--- OLD - -# --- UPDATED: find_and_extract_sd function --- -def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include_narrative=False, raw=False): - """ - Helper to find and extract StructureDefinition json from a tgz path, prioritizing profile match. - - This version includes logic to handle canonical URLs with version numbers (e.g., `|5.2.0`) - and to prioritize a direct profile URL match. - """ - sd_data = None - found_path = None - if not tgz_path or not os.path.exists(tgz_path): - logger.error(f"File not found in find_and_extract_sd: {tgz_path}") - return None, None - try: - with tarfile.open(tgz_path, "r:gz") as tar: - logger.debug(f"Searching for SD matching '{resource_identifier}' with profile '{profile_url}' in {os.path.basename(tgz_path)}") - potential_matches = [] - - # --- Work Item 3: Sanitize profile URL to strip version --- - clean_profile_url = profile_url.split('|')[0] if profile_url else None - logger.debug(f"Cleaned profile URL for search: '{clean_profile_url}'") - - for member in tar: - if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): - continue - if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: - continue - fileobj = None - try: - fileobj = tar.extractfile(member) - if fileobj: - content_bytes = fileobj.read() - content_string = content_bytes.decode('utf-8-sig') - data = json.loads(content_string) - if isinstance(data, dict) and data.get('resourceType') == 'StructureDefinition': - sd_id = data.get('id') - sd_name = data.get('name') - sd_type = data.get('type') - sd_url = data.get('url') - sd_filename_base = os.path.splitext(os.path.basename(member.name))[0] - sd_filename_lower = sd_filename_base.lower() - resource_identifier_lower = resource_identifier.lower() if resource_identifier else None - match_score = 0 - - # --- Prioritize exact match on the canonical URL (without version) --- - if clean_profile_url and sd_url == clean_profile_url: - match_score = 5 - sd_data = remove_narrative(data, include_narrative) - found_path = member.name - logger.info(f"Found definitive SD matching profile '{clean_profile_url}' at path: {found_path}") - break - - elif resource_identifier_lower: - if sd_id and resource_identifier_lower == sd_id.lower(): - match_score = 4 - elif sd_name and resource_identifier_lower == sd_name.lower(): - match_score = 4 - elif sd_filename_lower == f"structuredefinition-{resource_identifier_lower}": - match_score = 3 - # --- Work Item 2: Score match on resourceType for fallback logic --- - elif sd_type and resource_identifier_lower == sd_type.lower() and not re.search(r'[-.]', resource_identifier): - match_score = 2 - elif resource_identifier_lower in sd_filename_lower: - match_score = 1 - elif sd_url and resource_identifier_lower in sd_url.lower(): - match_score = 1 - if match_score > 0: - potential_matches.append((match_score, remove_narrative(data, include_narrative), member.name)) - if match_score >= 3: - sd_data = remove_narrative(data, include_narrative) - found_path = member.name - break - except json.JSONDecodeError as e: - logger.debug(f"Could not parse JSON in {member.name}, skipping: {e}") - except UnicodeDecodeError as e: - logger.warning(f"Could not decode UTF-8 in {member.name}, skipping: {e}") - except tarfile.TarError as e: - logger.warning(f"Tar error reading member {member.name}, skipping: {e}") - except Exception as e: - logger.warning(f"Could not read/parse potential SD {member.name}, skipping: {e}") - finally: - if fileobj: - fileobj.close() - if not sd_data and potential_matches: - potential_matches.sort(key=lambda x: x[0], reverse=True) - best_match = potential_matches[0] - sd_data = best_match[1] - found_path = best_match[2] - logger.info(f"Selected best match for '{resource_identifier}' from potential matches (Score: {best_match[0]}): {found_path}") - if sd_data is None: - logger.info(f"SD matching identifier '{resource_identifier}' or profile '{profile_url}' not found within archive {os.path.basename(tgz_path)}") - elif raw: - with tarfile.open(tgz_path, "r:gz") as tar: - fileobj = tar.extractfile(found_path) - content_bytes = fileobj.read() - content_string = content_bytes.decode('utf-8-sig') - raw_data = json.loads(content_string) - return remove_narrative(raw_data, include_narrative), found_path - except tarfile.ReadError as e: - logger.error(f"Tar ReadError reading {tgz_path}: {e}") - return None, None - except tarfile.TarError as e: - logger.error(f"TarError reading {tgz_path} in find_and_extract_sd: {e}") - raise - except FileNotFoundError: - logger.error(f"FileNotFoundError reading {tgz_path} in find_and_extract_sd.") - raise - except Exception as e: - logger.error(f"Unexpected error in find_and_extract_sd for {tgz_path}: {e}", exc_info=True) - raise - return sd_data, found_path - - - -# --- Metadata Saving/Loading --- -def save_package_metadata(name, version, dependency_mode, dependencies, complies_with_profiles=None, imposed_profiles=None): - """Saves dependency mode, imported dependencies, and profile relationships as metadata.""" - download_dir = _get_download_dir() - if not download_dir: - logger.error("Could not get download directory for metadata saving.") - return False - metadata_filename = construct_metadata_filename(name, version) - if not metadata_filename: return False - metadata_path = os.path.join(download_dir, metadata_filename) - metadata = { - 'package_name': name, - 'version': version, - 'dependency_mode': dependency_mode, - 'imported_dependencies': dependencies or [], - 'complies_with_profiles': complies_with_profiles or [], - 'imposed_profiles': imposed_profiles or [], - 'timestamp': datetime.datetime.now(datetime.timezone.utc).isoformat() - } - try: - with open(metadata_path, 'w', encoding='utf-8') as f: - json.dump(metadata, f, indent=2) - logger.info(f"Saved metadata for {name}#{version} at {metadata_path}") - return True - except IOError as e: - logger.error(f"Failed to write metadata file {metadata_path}: {e}") - return False - except Exception as e: - logger.error(f"Unexpected error saving metadata for {name}#{version}: {e}", exc_info=True) - return False - -def get_package_metadata(name, version): - """Retrieves the metadata for a given package.""" - download_dir = _get_download_dir() - if not download_dir: - logger.error("Could not get download directory for metadata retrieval.") - return None - metadata_filename = construct_metadata_filename(name, version) - if not metadata_filename: return None - metadata_path = os.path.join(download_dir, metadata_filename) - if os.path.exists(metadata_path): - try: - with open(metadata_path, 'r', encoding='utf-8') as f: - return json.load(f) - except (IOError, json.JSONDecodeError) as e: - logger.error(f"Failed to read or parse metadata file {metadata_path}: {e}") - return None - except Exception as e: - logger.error(f"Unexpected error reading metadata for {name}#{version}: {e}", exc_info=True) - return None - else: - logger.debug(f"Metadata file not found: {metadata_path}") - return None - -def process_package_file(tgz_path): - """ - Extracts types, profile status, MS elements, examples, profile relationships, - and search parameter conformance from a downloaded .tgz package. - """ - if not tgz_path or not os.path.exists(tgz_path): - logger.error(f"Package file not found for processing: {tgz_path}") - return {'errors': [f"Package file not found: {tgz_path}"], 'resource_types_info': []} - - pkg_basename = os.path.basename(tgz_path) - name, version = parse_package_filename(tgz_path) # Assumes parse_package_filename exists - logger.info(f"Processing package file details: {pkg_basename} ({name}#{version})") - - # Initialize results dictionary - results = { - 'resource_types_info': [], - 'must_support_elements': {}, - 'examples': {}, - 'complies_with_profiles': [], - 'imposed_profiles': [], - 'search_param_conformance': {}, # Dictionary to store conformance - 'errors': [] - } - - # Intermediate storage for processing - resource_info = defaultdict(lambda: { - 'name': None, - 'type': None, - 'is_profile': False, - 'ms_flag': False, - 'ms_paths': set(), - 'examples': set(), - 'sd_processed': False, - 'optional_usage': False - }) - referenced_types = set() - capability_statement_data = None # Store the main CapabilityStatement - - try: - with tarfile.open(tgz_path, "r:gz") as tar: - members = tar.getmembers() - logger.debug(f"Found {len(members)} members in {pkg_basename}") - - # Filter for relevant JSON files once - json_members = [] - for m in members: - if m.isfile() and m.name.startswith('package/') and m.name.lower().endswith('.json'): - # Exclude common metadata files by basename - basename_lower = os.path.basename(m.name).lower() - if basename_lower not in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: - json_members.append(m) - logger.debug(f"Found {len(json_members)} potential JSON resource members.") - - # --- Pass 1: Process StructureDefinitions and Find CapabilityStatement --- - logger.debug("Pass 1: Processing StructureDefinitions and finding CapabilityStatement...") - for member in json_members: - fileobj = None - try: - fileobj = tar.extractfile(member) - if not fileobj: continue - - content_bytes = fileobj.read() - # Handle potential BOM (Byte Order Mark) - content_string = content_bytes.decode('utf-8-sig') - data = json.loads(content_string) - - if not isinstance(data, dict): continue - resourceType = data.get('resourceType') - - # --- Process StructureDefinition --- - if resourceType == 'StructureDefinition': - data = remove_narrative(data) # Assumes remove_narrative exists - profile_id = data.get('id') or data.get('name') - sd_type = data.get('type') - sd_base = data.get('baseDefinition') - is_profile_sd = bool(sd_base) - - if not profile_id or not sd_type: - logger.warning(f"Skipping SD {member.name}: missing ID ('{profile_id}') or Type ('{sd_type}').") - continue - - entry_key = profile_id - entry = resource_info[entry_key] - if entry.get('sd_processed'): continue # Avoid reprocessing - - logger.debug(f"Processing SD: {entry_key} (type={sd_type}, profile={is_profile_sd})") - entry['name'] = entry_key - entry['type'] = sd_type - entry['is_profile'] = is_profile_sd - entry['sd_processed'] = True - referenced_types.add(sd_type) - - # Extract compliesWith/imposed profile URLs - complies_with = [] - imposed = [] - for ext in data.get('extension', []): - ext_url = ext.get('url') - value = ext.get('valueCanonical') - if value: - if ext_url == 'http://hl7.org/fhir/StructureDefinition/structuredefinition-compliesWithProfile': - complies_with.append(value) - elif ext_url == 'http://hl7.org/fhir/StructureDefinition/structuredefinition-imposeProfile': - imposed.append(value) - # Add unique URLs to results - results['complies_with_profiles'].extend(c for c in complies_with if c not in results['complies_with_profiles']) - results['imposed_profiles'].extend(i for i in imposed if i not in results['imposed_profiles']) - - # Must Support and Optional Usage Logic - has_ms_in_this_sd = False - ms_paths_in_this_sd = set() - elements = data.get('snapshot', {}).get('element', []) or data.get('differential', {}).get('element', []) - for element in elements: - if not isinstance(element, dict): continue - must_support = element.get('mustSupport') - element_id = element.get('id') - element_path = element.get('path') - slice_name = element.get('sliceName') - if must_support is True: - if element_id and element_path: - # Use element ID as the key for MS paths unless it's a slice - ms_path_key = f"{element_path}[sliceName='{slice_name}']" if slice_name else element_id - ms_paths_in_this_sd.add(ms_path_key) - has_ms_in_this_sd = True - else: - logger.warning(f"MS=true without path/id in {entry_key} ({member.name})") - has_ms_in_this_sd = True - - if has_ms_in_this_sd: - entry['ms_paths'].update(ms_paths_in_this_sd) - entry['ms_flag'] = True - - if sd_type == 'Extension' and has_ms_in_this_sd: - # Check if any MustSupport path is internal to the Extension definition - internal_ms_exists = any(p.startswith('Extension.') or ':' in p for p in entry['ms_paths']) - if internal_ms_exists: - entry['optional_usage'] = True - logger.info(f"Marked Extension {entry_key} as optional_usage") - - # --- Find CapabilityStatement --- - elif resourceType == 'CapabilityStatement': - # Store the first one found. Add logic here if specific selection needed. - if capability_statement_data is None: - capability_statement_data = data - logger.info(f"Found primary CapabilityStatement in: {member.name} (ID: {data.get('id', 'N/A')})") - else: - logger.warning(f"Found multiple CapabilityStatements. Using first found ({capability_statement_data.get('id', 'unknown')}). Ignoring {member.name}.") - - # Error handling for individual file processing - except json.JSONDecodeError as e: logger.warning(f"JSON parse error in {member.name}: {e}"); results['errors'].append(f"JSON error in {member.name}") - except UnicodeDecodeError as e: logger.warning(f"Encoding error in {member.name}: {e}"); results['errors'].append(f"Encoding error in {member.name}") - except Exception as e: logger.warning(f"Error processing member {member.name}: {e}", exc_info=False); results['errors'].append(f"Processing error in {member.name}: {e}") - finally: - if fileobj: fileobj.close() - # --- End Pass 1 --- - - # --- Pass 1.5: Process CapabilityStatement for Search Param Conformance --- - if capability_statement_data: - logger.debug("Processing CapabilityStatement for Search Parameter Conformance...") - conformance_map = defaultdict(dict) - # Standard FHIR extension URL for defining expectations - expectation_extension_url = "http://hl7.org/fhir/StructureDefinition/capabilitystatement-expectation" - - for rest_component in capability_statement_data.get('rest', []): - for resource_component in rest_component.get('resource', []): - resource_type = resource_component.get('type') - if not resource_type: continue - - for search_param in resource_component.get('searchParam', []): - param_name = search_param.get('name') - param_doc = search_param.get('documentation', '') - # Default conformance level if not explicitly stated - conformance_level = 'Optional' - - # Check for the standard expectation extension first - extensions = search_param.get('extension', []) - expectation_ext = next((ext for ext in extensions if ext.get('url') == expectation_extension_url), None) - - if expectation_ext and expectation_ext.get('valueCode'): - # Use the value from the standard extension - conformance_code = expectation_ext['valueCode'].upper() - # Map to SHALL, SHOULD, MAY - adjust if other codes are used by the IG - if conformance_code in ['SHALL', 'SHOULD', 'MAY', 'SHOULD-NOT']: # Add more if needed - conformance_level = conformance_code - else: - logger.warning(f"Unknown expectation code '{expectation_ext['valueCode']}' for {resource_type}.{param_name}. Defaulting to Optional.") - logger.debug(f" Conformance for {resource_type}.{param_name} from extension: {conformance_level}") - elif param_doc: - # Fallback: Check documentation string for keywords (less reliable) - doc_lower = param_doc.lower() - if 'shall' in doc_lower: conformance_level = 'SHALL' - elif 'should' in doc_lower: conformance_level = 'SHOULD' - elif 'may' in doc_lower: conformance_level = 'MAY' - if conformance_level != 'Optional': - logger.debug(f" Conformance for {resource_type}.{param_name} from documentation keywords: {conformance_level}") - - if param_name: - conformance_map[resource_type][param_name] = conformance_level - - results['search_param_conformance'] = dict(conformance_map) # Convert back to regular dict - logger.info(f"Extracted Search Parameter conformance rules for {len(conformance_map)} resource types.") - # logger.debug(f"Full Conformance Map: {json.dumps(results['search_param_conformance'], indent=2)}") # Optional detailed logging - else: - logger.warning(f"No CapabilityStatement found in package {pkg_basename}. Search parameter conformance data will be unavailable.") - # --- End Pass 1.5 --- - - # --- Pass 2: Process Examples --- - logger.debug("Pass 2: Processing Examples...") - example_members = [m for m in members if m.isfile() and m.name.startswith('package/') and 'example' in m.name.lower()] - - for member in example_members: - # Skip metadata files again just in case - basename_lower = os.path.basename(member.name).lower() - if basename_lower in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: continue - - logger.debug(f"Processing potential example file: {member.name}") - is_json = member.name.lower().endswith('.json') - fileobj = None - associated_key = None - - try: - fileobj = tar.extractfile(member) - if not fileobj: continue - - if is_json: - content_bytes = fileobj.read() - content_string = content_bytes.decode('utf-8-sig') - data = json.loads(content_string) - - if not isinstance(data, dict): continue - resource_type_ex = data.get('resourceType') - if not resource_type_ex: continue - - # Find association key (profile or type) - profile_meta = data.get('meta', {}).get('profile', []) - found_profile_match = False - if profile_meta and isinstance(profile_meta, list): - for profile_url in profile_meta: - if profile_url and isinstance(profile_url, str): - # Try matching by ID derived from profile URL first - profile_id_from_meta = profile_url.split('/')[-1] - if profile_id_from_meta in resource_info: - associated_key = profile_id_from_meta - found_profile_match = True - break - # Fallback to matching by full profile URL if needed - elif profile_url in resource_info: - associated_key = profile_url - found_profile_match = True - break - # If no profile match, associate with base resource type - if not found_profile_match: - key_to_use = resource_type_ex - # Ensure the base type exists in resource_info - if key_to_use not in resource_info: - resource_info[key_to_use].update({'name': key_to_use, 'type': resource_type_ex, 'is_profile': False}) - associated_key = key_to_use - - referenced_types.add(resource_type_ex) # Track type even if example has profile - - else: # Guessing for non-JSON examples - guessed_type = basename_lower.split('-')[0].capitalize() - guessed_profile_id = basename_lower.split('-')[0] # Often filename starts with profile ID - key_to_use = None - if guessed_profile_id in resource_info: key_to_use = guessed_profile_id - elif guessed_type in resource_info: key_to_use = guessed_type - else: # Add base type if not seen - key_to_use = guessed_type - resource_info[key_to_use].update({'name': key_to_use, 'type': key_to_use, 'is_profile': False}) - associated_key = key_to_use - referenced_types.add(guessed_type) - - # Add example filename to the associated resource/profile - if associated_key: - resource_info[associated_key]['examples'].add(member.name) - # logger.debug(f"Associated example {member.name} with {associated_key}") - else: - logger.warning(f"Could not associate example {member.name} with any known resource or profile.") - - # --- CORRECTED INDENTATION FOR FINALLY BLOCK --- - except json.JSONDecodeError as e: logger.warning(f"Could not parse JSON example {member.name}: {e}") - except UnicodeDecodeError as e: logger.warning(f"Could not decode example {member.name}: {e}") - except tarfile.TarError as e: logger.warning(f"TarError reading example {member.name}: {e}") - except Exception as e: logger.warning(f"Could not process example member {member.name}: {e}", exc_info=False) - finally: - if fileobj: fileobj.close() - # --- End Pass 2 --- - - # --- Pass 3: Ensure Relevant Base Types --- - logger.debug("Pass 3: Ensuring relevant base types...") - essential_types = {'CapabilityStatement'} # Add any other types vital for display/logic - for type_name in referenced_types | essential_types: - # Check against a predefined list of valid FHIR types (FHIR_R4_BASE_TYPES) - if type_name in FHIR_R4_BASE_TYPES and type_name not in resource_info: - resource_info[type_name]['name'] = type_name - resource_info[type_name]['type'] = type_name - resource_info[type_name]['is_profile'] = False - logger.debug(f"Added base type entry for {type_name}") - # --- End Pass 3 --- - - # --- Final Consolidation --- - logger.debug(f"Finalizing results from {len(resource_info)} resource_info entries...") - final_list = [] - final_ms_elements = {} - final_examples = {} - for key, info in resource_info.items(): - display_name = info.get('name') or key - base_type = info.get('type') - # Skip entries missing essential info (should be rare now) - if not display_name or not base_type: - logger.warning(f"Skipping final format for incomplete key: {key} - Info: {info}") - continue - # Add to final list for UI display - final_list.append({ - 'name': display_name, - 'type': base_type, - 'is_profile': info.get('is_profile', False), - 'must_support': info.get('ms_flag', False), - 'optional_usage': info.get('optional_usage', False) - }) - # Add Must Support paths if present - if info['ms_paths']: - final_ms_elements[display_name] = sorted(list(info['ms_paths'])) - # Add Examples if present - if info['examples']: - final_examples[display_name] = sorted(list(info['examples'])) - - # Store final lists/dicts in results - results['resource_types_info'] = sorted(final_list, key=lambda x: (not x.get('is_profile', False), x.get('name', ''))) - results['must_support_elements'] = final_ms_elements - results['examples'] = final_examples - logger.debug(f"Final must_support_elements count: {len(final_ms_elements)}") - logger.debug(f"Final examples count: {len(final_examples)}") - # --- End Final Consolidation --- - - # Exception handling for opening/reading the tarfile itself - except tarfile.ReadError as e: err_msg = f"Tar ReadError processing package file {pkg_basename}: {e}"; logger.error(err_msg); results['errors'].append(err_msg) - except tarfile.TarError as e: err_msg = f"TarError processing package file {pkg_basename}: {e}"; logger.error(err_msg); results['errors'].append(err_msg) - except FileNotFoundError: err_msg = f"Package file not found during processing: {tgz_path}"; logger.error(err_msg); results['errors'].append(err_msg) - except Exception as e: err_msg = f"Unexpected error processing package file {pkg_basename}: {e}"; logger.error(err_msg, exc_info=True); results['errors'].append(err_msg) - - # --- Final Summary Logging --- - final_types_count = len(results['resource_types_info']) - ms_count = sum(1 for r in results['resource_types_info'] if r.get('must_support')) - optional_ms_count = sum(1 for r in results['resource_types_info'] if r.get('optional_usage')) - total_ms_paths = sum(len(v) for v in results['must_support_elements'].values()) - total_examples = sum(len(v) for v in results['examples'].values()) - total_conf_types = len(results['search_param_conformance']) - total_conf_params = sum(len(v) for v in results['search_param_conformance'].values()) - - logger.info(f"Package processing finished for {pkg_basename}: " - f"{final_types_count} Res/Profs; {ms_count} MS ({optional_ms_count} OptExt); {total_ms_paths} MS paths; " - f"{total_examples} Exs; Comp={len(results['complies_with_profiles'])}; Imp={len(results['imposed_profiles'])}; " - f"ConfParams={total_conf_params} for {total_conf_types} types; Errors={len(results['errors'])}") - - return results # Return the full results dictionary - - -# --- Validation Functions --- - -def _legacy_navigate_fhir_path(resource, path, extension_url=None): - """Navigates a FHIR resource using a FHIRPath-like expression, handling nested structures.""" - logger.debug(f"Navigating FHIR path: {path}") - if not resource or not path: - return None - parts = path.split('.') - current = resource - resource_type = resource.get('resourceType') - for i, part in enumerate(parts): - # Skip resource type prefix (e.g., Patient) - if i == 0 and part == resource_type: - continue - # Handle array indexing (e.g., name[0]) - match = re.match(r'^(\w+)\[(\d+)\]$', part) - if match: - key, index = match.groups() - index = int(index) - if isinstance(current, dict) and key in current: - if isinstance(current[key], list) and index < len(current[key]): - current = current[key][index] - else: - logger.debug(f"Path {part} invalid: key={key}, index={index}, current={current.get(key)}") - return None - elif isinstance(current, list) and index < len(current): - current = current[index] - else: - logger.debug(f"Path {part} not found in current={current}") - return None - else: - # Handle choice types (e.g., onset[x]) - if '[x]' in part: - part = part.replace('[x]', '') - # Try common choice type suffixes - for suffix in ['', 'DateTime', 'Age', 'Period', 'Range', 'String', 'CodeableConcept']: - test_key = part + suffix - if isinstance(current, dict) and test_key in current: - current = current[test_key] - break - else: - logger.debug(f"Choice type {part}[x] not found in current={current}") - return None - elif isinstance(current, dict): - if part in current: - current = current[part] - else: - # Handle FHIR complex types - if part == 'code' and 'coding' in current and isinstance(current['coding'], list) and current['coding']: - current = current['coding'] - elif part == 'patient' and 'reference' in current and current['reference']: - current = current['reference'] - elif part == 'manifestation' and isinstance(current, list) and current and 'coding' in current[0] and current[0]['coding']: - current = current[0]['coding'] - elif part == 'clinicalStatus' and 'coding' in current and isinstance(current['coding'], list) and current['coding']: - current = current['coding'] - else: - logger.debug(f"Path {part} not found in current={current}") - return None - elif isinstance(current, list) and len(current) > 0: - # Try to find the part in list items - found = False - for item in current: - if isinstance(item, dict) and part in item: - current = item[part] - found = True - break - if not found: - # For nested paths like communication.language, return None only if the parent is absent - logger.debug(f"Path {part} not found in list items: {current}") - return None - if extension_url and isinstance(current, list): - current = [item for item in current if item.get('url') == extension_url] - # Return non-None/non-empty values as present - result = current if (current is not None and (not isinstance(current, list) or current)) else None - logger.debug(f"Path {path} resolved to: {result}") - return result - -def navigate_fhir_path(resource, path, extension_url=None): - """Navigates a FHIR resource using FHIRPath expressions.""" - logger.debug(f"Navigating FHIR path: {path}, extension_url={extension_url}") - if not resource or not path: - return None - try: - # Adjust path for extension filtering - if extension_url and 'extension' in path: - path = f"{path}[url='{extension_url}']" - result = evaluate(resource, path) - # Return first result if list, None if empty - return result[0] if result else None - except Exception as e: - logger.error(f"FHIRPath evaluation failed for {path}: {e}") - # Fallback to legacy navigation for compatibility - return _legacy_navigate_fhir_path(resource, path, extension_url) - -def _legacy_validate_resource_against_profile(package_name, version, resource, include_dependencies=True): - """Validates a FHIR resource against a StructureDefinition in the specified package.""" - logger.debug(f"Validating resource {resource.get('resourceType')} against {package_name}#{version}, include_dependencies={include_dependencies}") - result = { - 'valid': True, - 'errors': [], - 'warnings': [], - 'details': [], # Enhanced info for future use - 'resource_type': resource.get('resourceType'), - 'resource_id': resource.get('id', 'unknown'), - 'profile': resource.get('meta', {}).get('profile', [None])[0] - } - download_dir = _get_download_dir() - if not download_dir: - result['valid'] = False - result['errors'].append("Could not access download directory") - result['details'].append({ - 'issue': "Could not access download directory", - 'severity': 'error', - 'description': "The server could not locate the directory where FHIR packages are stored." - }) - logger.error("Validation failed: Could not access download directory") - return result - - tgz_path = os.path.join(download_dir, construct_tgz_filename(package_name, version)) - logger.debug(f"Checking for package file: {tgz_path}") - if not os.path.exists(tgz_path): - result['valid'] = False - result['errors'].append(f"Package file not found: {package_name}#{version}") - result['details'].append({ - 'issue': f"Package file not found: {package_name}#{version}", - 'severity': 'error', - 'description': f"The package {package_name}#{version} is not available in the download directory." - }) - logger.error(f"Validation failed: Package file not found at {tgz_path}") - return result - - # Use profile from meta.profile if available - profile_url = None - meta = resource.get('meta', {}) - profiles = meta.get('profile', []) - if profiles: - profile_url = profiles[0] - logger.debug(f"Using profile from meta.profile: {profile_url}") - - # Find StructureDefinition - sd_data, sd_path = find_and_extract_sd(tgz_path, resource.get('resourceType'), profile_url) - if not sd_data and include_dependencies: - logger.debug(f"SD not found in {package_name}#{version}. Checking dependencies.") - try: - with tarfile.open(tgz_path, "r:gz") as tar: - package_json_member = None - for member in tar: - if member.name == 'package/package.json': - package_json_member = member - break - if package_json_member: - fileobj = tar.extractfile(package_json_member) - pkg_data = json.load(fileobj) - fileobj.close() - dependencies = pkg_data.get('dependencies', {}) - logger.debug(f"Found dependencies: {dependencies}") - for dep_name, dep_version in dependencies.items(): - dep_tgz = os.path.join(download_dir, construct_tgz_filename(dep_name, dep_version)) - if os.path.exists(dep_tgz): - logger.debug(f"Searching SD in dependency {dep_name}#{dep_version}") - sd_data, sd_path = find_and_extract_sd(dep_tgz, resource.get('resourceType'), profile_url) - if sd_data: - logger.info(f"Found SD in dependency {dep_name}#{dep_version} at {sd_path}") - break - else: - logger.warning(f"Dependency package {dep_name}#{dep_version} not found at {dep_tgz}") - else: - logger.warning(f"No package.json found in {tgz_path}") - except json.JSONDecodeError as e: - logger.error(f"Failed to parse package.json in {tgz_path}: {e}") - except tarfile.TarError as e: - logger.error(f"Failed to read {tgz_path} while checking dependencies: {e}") - except Exception as e: - logger.error(f"Unexpected error while checking dependencies in {tgz_path}: {e}") - - if not sd_data: - result['valid'] = False - result['errors'].append(f"No StructureDefinition found for {resource.get('resourceType')} with profile {profile_url or 'any'}") - result['details'].append({ - 'issue': f"No StructureDefinition found for {resource.get('resourceType')} with profile {profile_url or 'any'}", - 'severity': 'error', - 'description': f"The package {package_name}#{version} (and dependencies, if checked) does not contain a matching StructureDefinition." - }) - logger.error(f"Validation failed: No SD for {resource.get('resourceType')} in {tgz_path}") - return result - logger.debug(f"Found SD at {sd_path}") - - # Validate required elements (min=1) - errors = [] - warnings = set() # Deduplicate warnings - elements = sd_data.get('snapshot', {}).get('element', []) - for element in elements: - path = element.get('path') - min_val = element.get('min', 0) - must_support = element.get('mustSupport', False) - definition = element.get('definition', 'No definition provided in StructureDefinition.') - - # Check required elements - if min_val > 0 and not '.' in path[1 + path.find('.'):] if path.find('.') != -1 else True: - value = navigate_fhir_path(resource, path) - if value is None or (isinstance(value, list) and not any(value)): - error_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Required element {path} missing" - errors.append(error_msg) - result['details'].append({ - 'issue': error_msg, - 'severity': 'error', - 'description': f"{definition} This element is mandatory (min={min_val}) per the profile {profile_url or 'unknown'}." - }) - logger.info(f"Validation error: Required element {path} missing") - - # Check must-support elements - if must_support and not '.' in path[1 + path.find('.'):] if path.find('.') != -1 else True: - if '[x]' in path: - base_path = path.replace('[x]', '') - found = False - for suffix in ['Quantity', 'CodeableConcept', 'String', 'DateTime', 'Period', 'Range']: - test_path = f"{base_path}{suffix}" - value = navigate_fhir_path(resource, test_path) - if value is not None and (not isinstance(value, list) or any(value)): - found = True - break - if not found: - warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty" - warnings.add(warning_msg) - result['details'].append({ - 'issue': warning_msg, - 'severity': 'warning', - 'description': f"{definition} This element is marked as Must Support in AU Core, meaning it should be populated if the data is available (e.g., phone or email for Patient.telecom)." - }) - logger.info(f"Validation warning: Must Support element {path} missing or empty") - else: - value = navigate_fhir_path(resource, path) - if value is None or (isinstance(value, list) and not any(value)): - if element.get('min', 0) == 0: - warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty" - warnings.add(warning_msg) - result['details'].append({ - 'issue': warning_msg, - 'severity': 'warning', - 'description': f"{definition} This element is marked as Must Support in AU Core, meaning it should be populated if the data is available (e.g., phone or email for Patient.telecom)." - }) - logger.info(f"Validation warning: Must Support element {path} missing or empty") - - # Handle dataAbsentReason for must-support elements - if path.endswith('dataAbsentReason') and must_support: - value_x_path = path.replace('dataAbsentReason', 'value[x]') - value_found = False - for suffix in ['Quantity', 'CodeableConcept', 'String', 'DateTime', 'Period', 'Range']: - test_path = path.replace('dataAbsentReason', f'value{suffix}') - value = navigate_fhir_path(resource, test_path) - if value is not None and (not isinstance(value, list) or any(value)): - value_found = True - break - if not value_found: - value = navigate_fhir_path(resource, path) - if value is None or (isinstance(value, list) and not any(value)): - warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty" - warnings.add(warning_msg) - result['details'].append({ - 'issue': warning_msg, - 'severity': 'warning', - 'description': f"{definition} This element is marked as Must Support and should be used to indicate why the associated value is absent." - }) - logger.info(f"Validation warning: Must Support element {path} missing or empty") - - result['errors'] = errors - result['warnings'] = list(warnings) - result['valid'] = len(errors) == 0 - result['summary'] = { - 'error_count': len(errors), - 'warning_count': len(warnings) - } - logger.debug(f"Validation result: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}") - return result -# -- OLD -# def validate_resource_against_profile(package_name, version, resource, include_dependencies=True): -# result = { -# 'valid': True, -# 'errors': [], -# 'warnings': [], -# 'details': [], -# 'resource_type': resource.get('resourceType'), -# 'resource_id': resource.get('id', 'unknown'), -# 'profile': resource.get('meta', {}).get('profile', [None])[0] -# } - -# # Attempt HAPI validation if a profile is specified -# if result['profile']: -# try: -# hapi_url = f"{current_app.config['HAPI_FHIR_URL'].rstrip('/')}/{resource['resourceType']}/$validate?profile={result['profile']}" -# response = requests.post( -# hapi_url, -# json=resource, -# headers={'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'}, -# timeout=10 -# ) -# response.raise_for_status() -# outcome = response.json() -# if outcome.get('resourceType') == 'OperationOutcome': -# for issue in outcome.get('issue', []): -# severity = issue.get('severity') -# diagnostics = issue.get('diagnostics', issue.get('details', {}).get('text', 'No details provided')) -# detail = { -# 'issue': diagnostics, -# 'severity': severity, -# 'description': issue.get('details', {}).get('text', diagnostics) -# } -# if severity in ['error', 'fatal']: -# result['valid'] = False -# result['errors'].append(diagnostics) -# elif severity == 'warning': -# result['warnings'].append(diagnostics) -# result['details'].append(detail) -# result['summary'] = { -# 'error_count': len(result['errors']), -# 'warning_count': len(result['warnings']) -# } -# logger.debug(f"HAPI validation for {result['resource_type']}/{result['resource_id']}: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}") -# return result -# else: -# logger.warning(f"HAPI returned non-OperationOutcome: {outcome.get('resourceType')}") -# except requests.RequestException as e: -# logger.error(f"HAPI validation failed for {result['resource_type']}/{result['resource_id']}: {e}") -# result['details'].append({ -# 'issue': f"HAPI validation failed: {str(e)}", -# 'severity': 'warning', -# 'description': 'Falling back to local validation due to HAPI server error.' -# }) - -# # Fallback to local validation -# download_dir = _get_download_dir() -# if not download_dir: -# result['valid'] = False -# result['errors'].append("Could not access download directory") -# result['details'].append({ -# 'issue': "Could not access download directory", -# 'severity': 'error', -# 'description': "The server could not locate the directory where FHIR packages are stored." -# }) -# return result - -# tgz_path = os.path.join(download_dir, construct_tgz_filename(package_name, version)) -# sd_data, sd_path = find_and_extract_sd(tgz_path, resource.get('resourceType'), result['profile']) -# if not sd_data: -# result['valid'] = False -# result['errors'].append(f"No StructureDefinition found for {resource.get('resourceType')}") -# result['details'].append({ -# 'issue': f"No StructureDefinition found for {resource.get('resourceType')}", -# 'severity': 'error', -# 'description': f"The package {package_name}#{version} does not contain a matching StructureDefinition." -# }) -# return result - -# elements = sd_data.get('snapshot', {}).get('element', []) -# for element in elements: -# path = element.get('path') -# min_val = element.get('min', 0) -# must_support = element.get('mustSupport', False) -# slicing = element.get('slicing') -# slice_name = element.get('sliceName') - -# # Check required elements -# if min_val > 0: -# value = navigate_fhir_path(resource, path) -# if value is None or (isinstance(value, list) and not any(value)): -# result['valid'] = False -# result['errors'].append(f"Required element {path} missing") -# result['details'].append({ -# 'issue': f"Required element {path} missing", -# 'severity': 'error', -# 'description': f"Element {path} has min={min_val} in profile {result['profile'] or 'unknown'}" -# }) - -# # Check must-support elements -# if must_support: -# value = navigate_fhir_path(resource, slice_name if slice_name else path) -# if value is None or (isinstance(value, list) and not any(value)): -# result['warnings'].append(f"Must Support element {path} missing or empty") -# result['details'].append({ -# 'issue': f"Must Support element {path} missing or empty", -# 'severity': 'warning', -# 'description': f"Element {path} is marked as Must Support in profile {result['profile'] or 'unknown'}" -# }) - -# # Validate slicing -# if slicing and not slice_name: # Parent slicing element -# discriminator = slicing.get('discriminator', []) -# for d in discriminator: -# d_type = d.get('type') -# d_path = d.get('path') -# if d_type == 'value': -# sliced_elements = navigate_fhir_path(resource, path) -# if isinstance(sliced_elements, list): -# seen_values = set() -# for elem in sliced_elements: -# d_value = navigate_fhir_path(elem, d_path) -# if d_value in seen_values: -# result['valid'] = False -# result['errors'].append(f"Duplicate discriminator value {d_value} for {path}.{d_path}") -# seen_values.add(d_value) -# elif d_type == 'type': -# sliced_elements = navigate_fhir_path(resource, path) -# if isinstance(sliced_elements, list): -# for elem in sliced_elements: -# if not navigate_fhir_path(elem, d_path): -# result['valid'] = False -# result['errors'].append(f"Missing discriminator type {d_path} for {path}") - -# result['summary'] = { -# 'error_count': len(result['errors']), -# 'warning_count': len(result['warnings']) -# } -# return result - -# def validate_bundle_against_profile(package_name, version, bundle, include_dependencies=True): -# """Validates a FHIR Bundle against profiles in the specified package.""" -# logger.debug(f"Validating bundle against {package_name}#{version}, include_dependencies={include_dependencies}") -# result = { -# 'valid': True, -# 'errors': [], -# 'warnings': [], -# 'details': [], -# 'results': {}, -# 'summary': { -# 'resource_count': 0, -# 'failed_resources': 0, -# 'profiles_validated': set() -# } -# } -# if not bundle.get('resourceType') == 'Bundle': -# result['valid'] = False -# result['errors'].append("Resource is not a Bundle") -# result['details'].append({ -# 'issue': "Resource is not a Bundle", -# 'severity': 'error', -# 'description': "The provided resource must have resourceType 'Bundle' to be validated as a bundle." -# }) -# logger.error("Validation failed: Resource is not a Bundle") -# return result - -# # Track references to validate resolvability -# references = set() -# resolved_references = set() - -# for entry in bundle.get('entry', []): -# resource = entry.get('resource') -# if not resource: -# continue -# resource_type = resource.get('resourceType') -# resource_id = resource.get('id', 'unknown') -# result['summary']['resource_count'] += 1 - -# # Collect references -# for key, value in resource.items(): -# if isinstance(value, dict) and 'reference' in value: -# references.add(value['reference']) -# elif isinstance(value, list): -# for item in value: -# if isinstance(item, dict) and 'reference' in item: -# references.add(item['reference']) - -# # Validate resource -# validation_result = validate_resource_against_profile(package_name, version, resource, include_dependencies) -# result['results'][f"{resource_type}/{resource_id}"] = validation_result -# result['summary']['profiles_validated'].add(validation_result['profile'] or 'unknown') - -# # Aggregate errors and warnings -# if not validation_result['valid']: -# result['valid'] = False -# result['summary']['failed_resources'] += 1 -# result['errors'].extend(validation_result['errors']) -# result['warnings'].extend(validation_result['warnings']) -# result['details'].extend(validation_result['details']) - -# # Mark resource as resolved if it has an ID -# if resource_id != 'unknown': -# resolved_references.add(f"{resource_type}/{resource_id}") - -# # Check for unresolved references -# unresolved = references - resolved_references -# for ref in unresolved: -# warning_msg = f"Unresolved reference: {ref}" -# result['warnings'].append(warning_msg) -# result['details'].append({ -# 'issue': warning_msg, -# 'severity': 'warning', -# 'description': f"The reference {ref} points to a resource not included in the bundle. Ensure the referenced resource is present or resolvable." -# }) -# logger.info(f"Validation warning: Unresolved reference {ref}") - -# # Finalize summary -# result['summary']['profiles_validated'] = list(result['summary']['profiles_validated']) -# result['summary']['error_count'] = len(result['errors']) -# result['summary']['warning_count'] = len(result['warnings']) -# logger.debug(f"Bundle validation result: valid={result['valid']}, errors={result['summary']['error_count']}, warnings={result['summary']['warning_count']}, resources={result['summary']['resource_count']}") -# return result -# -- OLD - - -# --- UPDATED: validate_resource_against_profile function --- -def validate_resource_against_profile(package_name, version, resource, include_dependencies=True): - """ - Validates a FHIR resource against a StructureDefinition in the specified package. - - This version correctly handles the absence of a `meta.profile` by falling back - to the base resource definition. It also sanitizes profile URLs to avoid - version mismatch errors. - """ - result = { - 'valid': True, - 'errors': [], - 'warnings': [], - 'details': [], - 'resource_type': resource.get('resourceType'), - 'resource_id': resource.get('id', 'unknown'), - 'profile': resource.get('meta', {}).get('profile', [None])[0] - } - - download_dir = _get_download_dir() - if not download_dir: - result['valid'] = False - result['errors'].append("Could not access download directory") - result['details'].append({ - 'issue': "Could not access download directory", - 'severity': 'error', - 'description': "The server could not locate the directory where FHIR packages are stored." - }) - logger.error("Validation failed: Could not access download directory") - return result - - # --- Work Item 3 & 2: Get profile URL or fallback to resourceType --- - profile_url = result['profile'] - resource_identifier = resource.get('resourceType') - - if profile_url: - # Sanitize profile URL to remove version - clean_profile_url = profile_url.split('|')[0] - logger.debug(f"Using provided profile: {profile_url}. Cleaned to: {clean_profile_url}") - resource_identifier = profile_url - else: - # No profile provided, fallback to resource type - logger.debug(f"No profile in resource, using base type as identifier: {resource_identifier}") - clean_profile_url = None - - tgz_path = os.path.join(download_dir, construct_tgz_filename(package_name, version)) - logger.debug(f"Checking for package file: {tgz_path}") - - # Find StructureDefinition - sd_data, sd_path = find_and_extract_sd(tgz_path, resource_identifier, clean_profile_url) - - if not sd_data and include_dependencies: - logger.debug(f"SD not found in {package_name}#{version}. Checking dependencies.") - try: - with tarfile.open(tgz_path, "r:gz") as tar: - package_json_member = None - for member in tar: - if member.name == 'package/package.json': - package_json_member = member - break - if package_json_member: - fileobj = tar.extractfile(package_json_member) - pkg_data = json.load(fileobj) - fileobj.close() - dependencies = pkg_data.get('dependencies', {}) - logger.debug(f"Found dependencies: {dependencies}") - for dep_name, dep_version in dependencies.items(): - dep_tgz = os.path.join(download_dir, construct_tgz_filename(dep_name, dep_version)) - if os.path.exists(dep_tgz): - logger.debug(f"Searching SD in dependency {dep_name}#{dep_version}") - sd_data, sd_path = find_and_extract_sd(dep_tgz, resource_identifier, clean_profile_url) - if sd_data: - logger.info(f"Found SD in dependency {dep_name}#{dep_version} at {sd_path}") - break - else: - logger.warning(f"Dependency package {dep_name}#{dep_version} not found at {dep_tgz}") - else: - logger.warning(f"No package.json found in {tgz_path}") - except json.JSONDecodeError as e: - logger.error(f"Failed to parse package.json in {tgz_path}: {e}") - except tarfile.TarError as e: - logger.error(f"Failed to read {tgz_path} while checking dependencies: {e}") - except Exception as e: - logger.error(f"Unexpected error while checking dependencies in {tgz_path}: {e}") - - if not sd_data: - result['valid'] = False - result['errors'].append(f"No StructureDefinition found for {resource_identifier} with profile {clean_profile_url or 'any'}") - result['details'].append({ - 'issue': f"No StructureDefinition found for {resource_identifier} with profile {clean_profile_url or 'any'}", - 'severity': 'error', - 'description': f"The package {package_name}#{version} (and dependencies, if checked) does not contain a matching StructureDefinition." - }) - logger.error(f"Validation failed: No SD for {resource_identifier} in {tgz_path}") - return result - logger.debug(f"Found SD at {sd_path}") - - # Validate required elements (min=1) - errors = [] - warnings = set() - elements = sd_data.get('snapshot', {}).get('element', []) - for element in elements: - path = element.get('path') - min_val = element.get('min', 0) - must_support = element.get('mustSupport', False) - definition = element.get('definition', 'No definition provided in StructureDefinition.') - - # Check required elements - if min_val > 0 and not '.' in path[1 + path.find('.'):] if path.find('.') != -1 else True: - value = navigate_fhir_path(resource, path) - if value is None or (isinstance(value, list) and not any(value)): - error_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Required element {path} missing" - errors.append(error_msg) - result['details'].append({ - 'issue': error_msg, - 'severity': 'error', - 'description': f"{definition} This element is mandatory (min={min_val}) per the profile {profile_url or 'unknown'}." - }) - logger.info(f"Validation error: Required element {path} missing") - - # Check must-support elements - if must_support and not '.' in path[1 + path.find('.'):] if path.find('.') != -1 else True: - if '[x]' in path: - base_path = path.replace('[x]', '') - found = False - for suffix in ['Quantity', 'CodeableConcept', 'String', 'DateTime', 'Period', 'Range']: - test_path = f"{base_path}{suffix}" - value = navigate_fhir_path(resource, test_path) - if value is not None and (not isinstance(value, list) or any(value)): - found = True - break - if not found: - warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty" - warnings.add(warning_msg) - result['details'].append({ - 'issue': warning_msg, - 'severity': 'warning', - 'description': f"{definition} This element is marked as Must Support in AU Core, meaning it should be populated if the data is available (e.g., phone or email for Patient.telecom)." - }) - logger.info(f"Validation warning: Must Support element {path} missing or empty") - else: - value = navigate_fhir_path(resource, path) - if value is None or (isinstance(value, list) and not any(value)): - if element.get('min', 0) == 0: - warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty" - warnings.add(warning_msg) - result['details'].append({ - 'issue': warning_msg, - 'severity': 'warning', - 'description': f"{definition} This element is marked as Must Support in AU Core, meaning it should be populated if the data is available (e.g., phone or email for Patient.telecom)." - }) - logger.info(f"Validation warning: Must Support element {path} missing or empty") - - # Handle dataAbsentReason for must-support elements - if path.endswith('dataAbsentReason') and must_support: - value_x_path = path.replace('dataAbsentReason', 'value[x]') - value_found = False - for suffix in ['Quantity', 'CodeableConcept', 'String', 'DateTime', 'Period', 'Range']: - test_path = path.replace('dataAbsentReason', f'value{suffix}') - value = navigate_fhir_path(resource, test_path) - if value is not None and (not isinstance(value, list) or any(value)): - value_found = True - break - if not value_found: - value = navigate_fhir_path(resource, path) - if value is None or (isinstance(value, list) and not any(value)): - warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty" - warnings.add(warning_msg) - result['details'].append({ - 'issue': warning_msg, - 'severity': 'warning', - 'description': f"{definition} This element is marked as Must Support and should be used to indicate why the associated value is absent." - }) - logger.info(f"Validation warning: Must Support element {path} missing or empty") - - result['errors'] = errors - result['warnings'] = list(warnings) - result['valid'] = len(errors) == 0 - result['summary'] = { - 'error_count': len(errors), - 'warning_count': len(warnings) - } - logger.debug(f"Validation result: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}") - return result - -# --- UPDATED: validate_bundle_against_profile function --- -def validate_bundle_against_profile(package_name, version, bundle, include_dependencies=True): - """ - Validates a FHIR Bundle against profiles in the specified package. - - This version adds a new two-pass process to correctly resolve `urn:uuid` - references within the bundle before flagging them as unresolved. - """ - logger.debug(f"Validating bundle against {package_name}#{version}, include_dependencies={include_dependencies}") - result = { - 'valid': True, - 'errors': [], - 'warnings': [], - 'details': [], - 'results': {}, - 'summary': { - 'resource_count': 0, - 'failed_resources': 0, - 'profiles_validated': set() - } - } - if not bundle.get('resourceType') == 'Bundle': - result['valid'] = False - result['errors'].append("Resource is not a Bundle") - result['details'].append({ - 'issue': "Resource is not a Bundle", - 'severity': 'error', - 'description': "The provided resource must have resourceType 'Bundle' to be validated as a bundle." - }) - logger.error("Validation failed: Resource is not a Bundle") - return result - - # --- Work Item 1: First pass to collect all local references --- - local_references = set() - for entry in bundle.get('entry', []): - fullUrl = entry.get('fullUrl') - resource = entry.get('resource') - if fullUrl: - local_references.add(fullUrl) - if resource and resource.get('resourceType') and resource.get('id'): - local_references.add(f"{resource['resourceType']}/{resource['id']}") - logger.debug(f"Found {len(local_references)} local references in the bundle.") - - # Track references and resolved references for external check - all_references_found = set() - - # Second pass for validation and reference checking - for entry in bundle.get('entry', []): - resource = entry.get('resource') - if not resource: - continue - resource_type = resource.get('resourceType') - resource_id = resource.get('id', 'unknown') - result['summary']['resource_count'] += 1 - - # Collect references - current_refs = [] - find_references(resource, current_refs) - for ref_str in current_refs: - if isinstance(ref_str, str): - all_references_found.add(ref_str) - - # Validate resource - validation_result = validate_resource_against_profile(package_name, version, resource, include_dependencies) - result['results'][f"{resource_type}/{resource_id}"] = validation_result - result['summary']['profiles_validated'].add(validation_result['profile'] or 'unknown') - - # Aggregate errors and warnings - if not validation_result['valid']: - result['valid'] = False - result['summary']['failed_resources'] += 1 - result['errors'].extend(validation_result['errors']) - result['warnings'].extend(validation_result['warnings']) - result['details'].extend(validation_result['details']) - - # --- Work Item 1: Check for unresolved references *after* processing all local resources --- - for ref in all_references_found: - if ref not in local_references: - warning_msg = f"Unresolved reference: {ref}" - result['warnings'].append(warning_msg) - result['details'].append({ - 'issue': warning_msg, - 'severity': 'warning', - 'description': f"The reference {ref} points to a resource not included in the bundle. Ensure the referenced resource is present or resolvable." - }) - logger.info(f"Validation warning: Unresolved reference {ref}") - - # Finalize summary - result['summary']['profiles_validated'] = list(result['summary']['profiles_validated']) - result['summary']['error_count'] = len(result['errors']) - result['summary']['warning_count'] = len(result['warnings']) - logger.debug(f"Bundle validation result: valid={result['valid']}, errors={result['summary']['error_count']}, warnings={result['summary']['warning_count']}, resources={result['summary']['resource_count']}") - return result - - -# --- Structure Definition Retrieval --- -def get_structure_definition(package_name, version, resource_type): - """Fetches StructureDefinition with slicing support.""" - download_dir = _get_download_dir() - if not download_dir: - logger.error("Could not get download directory.") - return {'error': 'Download directory not accessible'} - - tgz_filename = construct_tgz_filename(package_name, version) - tgz_path = os.path.join(download_dir, tgz_filename) - sd_data, sd_path = find_and_extract_sd(tgz_path, resource_type) - - if not sd_data: - # Fallback to canonical package - canonical_tgz = construct_tgz_filename(*CANONICAL_PACKAGE) - canonical_path = os.path.join(download_dir, canonical_tgz) - sd_data, sd_path = find_and_extract_sd(canonical_path, resource_type) - if sd_data: - logger.info(f"Using canonical SD for {resource_type} from {canonical_path}") - elements = sd_data.get('snapshot', {}).get('element', []) - return { - 'elements': elements, - 'must_support_paths': [el['path'] for el in elements if el.get('mustSupport', False)], - 'slices': [], - 'fallback_used': True, - 'source_package': f"{CANONICAL_PACKAGE[0]}#{CANONICAL_PACKAGE[1]}" - } - logger.error(f"No StructureDefinition found for {resource_type} in {package_name}#{version} or canonical package") - return {'error': f"No StructureDefinition for {resource_type}"} - - elements = sd_data.get('snapshot', {}).get('element', []) - must_support_paths = [] - slices = [] - - # Process elements for must-support and slicing - for element in elements: - path = element.get('path', '') - element_id = element.get('id', '') - slice_name = element.get('sliceName') - if element.get('mustSupport', False): - ms_path = f"{path}[sliceName='{slice_name}']" if slice_name else element_id - must_support_paths.append(ms_path) - if 'slicing' in element: - slice_info = { - 'path': path, - 'sliceName': slice_name, - 'discriminator': element.get('slicing', {}).get('discriminator', []), - 'nested_slices': [] - } - # Find nested slices - for sub_element in elements: - if sub_element['path'].startswith(path + '.') and 'slicing' in sub_element: - sub_slice_name = sub_element.get('sliceName') - slice_info['nested_slices'].append({ - 'path': sub_element['path'], - 'sliceName': sub_slice_name, - 'discriminator': sub_element.get('slicing', {}).get('discriminator', []) - }) - slices.append(slice_info) - - logger.debug(f"StructureDefinition for {resource_type}: {len(elements)} elements, {len(must_support_paths)} must-support paths, {len(slices)} slices") - return { - 'elements': elements, - 'must_support_paths': sorted(list(set(must_support_paths))), - 'slices': slices, - 'fallback_used': False - } - -# --- Other Service Functions --- -def _build_package_index(download_dir): - """Builds an index of canonical URLs to package details from .index.json files.""" - index = {} - try: - for tgz_file in os.listdir(download_dir): - if not tgz_file.endswith('.tgz'): - continue - tgz_path = os.path.join(download_dir, tgz_file) - try: - with tarfile.open(tgz_path, "r:gz") as tar: - index_file = next((m for m in tar.getmembers() if m.name == 'package/.index.json'), None) - if index_file: - fileobj = tar.extractfile(index_file) - if fileobj: - content = json.loads(fileobj.read().decode('utf-8-sig')) - package_name = content.get('package-id', '') - package_version = content.get('version', '') - for file_entry in content.get('files', []): - canonical = file_entry.get('canonical') - filename = file_entry.get('filename') - if canonical and filename: - index[canonical] = { - 'package_name': package_name, - 'package_version': package_version, - 'filename': filename - } - fileobj.close() - except Exception as e: - logger.warning(f"Failed to index {tgz_file}: {e}") - except Exception as e: - logger.error(f"Error building package index: {e}") - return index - -def _find_definition_details(url, download_dir): - """Finds package details for a canonical URL.""" - index = current_app.config.get('PACKAGE_INDEX') - if index is None: - index = _build_package_index(download_dir) - current_app.config['PACKAGE_INDEX'] = index - return index.get(url) - -def _load_definition(details, download_dir): - """Loads a StructureDefinition from package details.""" - if not details: - return None - tgz_path = os.path.join(download_dir, construct_tgz_filename(details['package_name'], details['package_version'])) - try: - with tarfile.open(tgz_path, "r:gz") as tar: - member_path = f"package/{details['filename']}" - member = next((m for m in tar.getmembers() if m.name == member_path), None) - if member: - fileobj = tar.extractfile(member) - if fileobj: - data = json.loads(fileobj.read().decode('utf-8-sig')) - fileobj.close() - return data - except Exception as e: - logger.error(f"Failed to load definition {details['filename']} from {tgz_path}: {e}") - return None - -# def download_package(name, version): -# """Downloads a single FHIR package.""" -# download_dir = _get_download_dir() -# if not download_dir: return None, "Download dir error" -# filename = construct_tgz_filename(name, version) -# if not filename: return None, "Filename construction error" -# save_path = os.path.join(download_dir, filename) -# if os.path.exists(save_path): -# logger.info(f"Package already exists: {save_path}") -# return save_path, None -# package_url = f"{FHIR_REGISTRY_BASE_URL}/{name}/{version}" -# try: -# with requests.get(package_url, stream=True, timeout=60) as r: -# r.raise_for_status() -# with open(save_path, 'wb') as f: -# for chunk in r.iter_content(chunk_size=8192): f.write(chunk) -# logger.info(f"Downloaded {filename}") -# return save_path, None -# except requests.exceptions.RequestException as e: -# logger.error(f"Download failed for {name}#{version}: {e}") -# return None, f"Download error: {e}" -# except IOError as e: -# logger.error(f"File write error for {save_path}: {e}") -# return None, f"File write error: {e}" - -def download_package(name, version, dependency_mode='none'): - """Downloads a FHIR package by name and version to the configured directory.""" - download_dir = _get_download_dir() - if not download_dir: - return None, ["Could not determine download directory"] - tgz_filename = construct_tgz_filename(name, version) - if not tgz_filename: - return None, [f"Could not construct filename for {name}#{version}"] - download_path = os.path.join(download_dir, tgz_filename) - errors = [] - - # Check if already downloaded - if os.path.exists(download_path): - logger.info(f"Package {name}#{version} already downloaded at {download_path}") - return download_path, [] - - # Primary download URL - primary_url = f"{FHIR_REGISTRY_BASE_URL}/{name}/{version}" - logger.info(f"Attempting download of {name}#{version} from {primary_url}") - - try: - response = requests.get(primary_url, timeout=30) - response.raise_for_status() - with open(download_path, 'wb') as f: - f.write(response.content) - logger.info(f"Successfully downloaded {name}#{version} to {download_path}") - save_package_metadata(name, version, dependency_mode, []) - return download_path, [] - except requests.exceptions.HTTPError as e: - if e.response.status_code == 404: - logger.warning(f"Primary download failed (404) for {name}#{version} at {primary_url}. Attempting fallback URL.") - else: - error_msg = f"Download error for {name}#{version}: {str(e)}" - logger.error(error_msg, exc_info=True) - errors.append(error_msg) - return None, errors - except requests.exceptions.RequestException as e: - error_msg = f"Download error for {name}#{version}: {str(e)}" - logger.error(error_msg, exc_info=True) - errors.append(error_msg) - return None, errors - except Exception as e: - error_msg = f"Unexpected error downloading {name}#{version}: {str(e)}" - logger.error(error_msg, exc_info=True) - errors.append(error_msg) - return None, errors - - # Fallback: Try the package's URL from the normalized package data - if errors and "404" in errors[0]: - logger.info(f"Looking up alternative download URL for {name}#{version}") - try: - # Access the in-memory cache from the Flask app config - normalized_packages = current_app.config.get('MANUAL_PACKAGE_CACHE', []) - package_data = next((pkg for pkg in normalized_packages if pkg.get('name') == name), None) - if not package_data: - error_msg = f"Package {name} not found in cache for fallback download." - logger.error(error_msg) - errors.append(error_msg) - return None, errors - - package_url = package_data.get('url') - if not package_url: - error_msg = f"No alternative URL found for {name}#{version}." - logger.error(error_msg) - errors.append(error_msg) - return None, errors - - # Construct a download URL using the package's URL - # Assuming the URL is a base (e.g., https://packages.simplifier.net/fhir.ieb.core) - # and we append the version to form the download URL - # This may need adjustment based on the actual format of 'url' - fallback_url = f"{package_url.rstrip('/')}/{version}.tgz" - logger.info(f"Attempting fallback download of {name}#{version} from {fallback_url}") - - response = requests.get(fallback_url, timeout=30) - response.raise_for_status() - with open(download_path, 'wb') as f: - f.write(response.content) - logger.info(f"Successfully downloaded {name}#{version} using fallback URL to {download_path}") - save_package_metadata(name, version, dependency_mode, []) - return download_path, [] - except requests.exceptions.HTTPError as e: - error_msg = f"Fallback download error for {name}#{version} at {fallback_url}: {str(e)}" - logger.error(error_msg, exc_info=True) - errors.append(error_msg) - return None, errors - except requests.exceptions.RequestException as e: - error_msg = f"Fallback download network error for {name}#{version}: {str(e)}" - logger.error(error_msg, exc_info=True) - errors.append(error_msg) - return None, errors - except Exception as e: - error_msg = f"Unexpected error during fallback download of {name}#{version}: {str(e)}" - logger.error(error_msg, exc_info=True) - errors.append(error_msg) - return None, errors - - return None, errors - -def extract_dependencies(tgz_path): - """Extracts dependencies from package.json.""" - package_json_path = "package/package.json" - dependencies = {} - error_message = None - if not tgz_path or not os.path.exists(tgz_path): return None, "File not found" - try: - with tarfile.open(tgz_path, "r:gz") as tar: - try: - pkg_member = tar.getmember(package_json_path) - with tar.extractfile(pkg_member) as f: - pkg_data = json.load(f) - dependencies = pkg_data.get('dependencies', {}) - except KeyError: error_message = "package.json not found" - except (json.JSONDecodeError, tarfile.TarError) as e: error_message = f"Error reading package.json: {e}" - except tarfile.TarError as e: error_message = f"Error opening tarfile: {e}" - except Exception as e: error_message = f"Unexpected error: {e}" - return dependencies, error_message - -def extract_used_types(tgz_path): - """Extracts all resource types and referenced types from the package resources.""" - used_types = set() - if not tgz_path or not os.path.exists(tgz_path): - logger.error(f"Cannot extract used types: File not found at {tgz_path}") - return used_types - try: - with tarfile.open(tgz_path, "r:gz") as tar: - for member in tar: - if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): - continue - if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: - continue - fileobj = None - try: - fileobj = tar.extractfile(member) - if fileobj: - content_bytes = fileobj.read() - content_string = content_bytes.decode('utf-8-sig') - data = json.loads(content_string) - if not isinstance(data, dict): continue - resource_type = data.get('resourceType') - if not resource_type: continue - used_types.add(resource_type) - if resource_type == 'StructureDefinition': - sd_type = data.get('type') - if sd_type: used_types.add(sd_type) - base_def = data.get('baseDefinition') - if base_def: - base_type = base_def.split('/')[-1] - if base_type and base_type[0].isupper(): used_types.add(base_type) - elements = data.get('snapshot', {}).get('element', []) or data.get('differential', {}).get('element', []) - for element in elements: - if isinstance(element, dict) and 'type' in element: - for t in element.get('type', []): - code = t.get('code') - if code and code[0].isupper(): used_types.add(code) - for profile_uri in t.get('targetProfile', []): - if profile_uri: - profile_type = profile_uri.split('/')[-1] - if profile_type and profile_type[0].isupper(): used_types.add(profile_type) - else: - profiles = data.get('meta', {}).get('profile', []) - for profile_uri in profiles: - if profile_uri: - profile_type = profile_uri.split('/')[-1] - if profile_type and profile_type[0].isupper(): used_types.add(profile_type) - if resource_type == 'ValueSet': - for include in data.get('compose', {}).get('include', []): - system = include.get('system') - if system and system.startswith('http://hl7.org/fhir/'): - type_name = system.split('/')[-1] - if type_name and type_name[0].isupper() and not type_name.startswith('sid'): - used_types.add(type_name) - if resource_type == 'CapabilityStatement': - for rest_item in data.get('rest', []): - for resource_item in rest_item.get('resource', []): - res_type = resource_item.get('type') - if res_type and res_type[0].isupper(): used_types.add(res_type) - profile_uri = resource_item.get('profile') - if profile_uri: - profile_type = profile_uri.split('/')[-1] - if profile_type and profile_type[0].isupper(): used_types.add(profile_type) - except json.JSONDecodeError as e: - logger.warning(f"Could not parse JSON in {member.name}: {e}") - except UnicodeDecodeError as e: - logger.warning(f"Could not decode {member.name}: {e}") - except Exception as e: - logger.warning(f"Could not process member {member.name}: {e}") - finally: - if fileobj: - fileobj.close() - except tarfile.ReadError as e: - logger.error(f"Tar ReadError extracting used types from {tgz_path}: {e}") - except tarfile.TarError as e: - logger.error(f"TarError extracting used types from {tgz_path}: {e}") - except FileNotFoundError: - logger.error(f"Package file not found: {tgz_path}") - except Exception as e: - logger.error(f"Error extracting used types from {tgz_path}: {e}", exc_info=True) - core_non_resource_types = { - 'string', 'boolean', 'integer', 'decimal', 'uri', 'url', 'canonical', 'base64Binary', 'instant', - 'date', 'dateTime', 'time', 'code', 'oid', 'id', 'markdown', 'unsignedInt', 'positiveInt', 'xhtml', - 'Element', 'BackboneElement', 'Resource', 'DomainResource', 'DataType' - } - final_used_types = {t for t in used_types if t not in core_non_resource_types and t[0].isupper()} - logger.debug(f"Extracted used types from {os.path.basename(tgz_path)}: {final_used_types}") - return final_used_types - -def map_types_to_packages(used_types, all_dependencies, download_dir): - """Maps used types to packages by checking .index.json files.""" - type_to_package = {} - processed_types = set() - for (pkg_name, pkg_version), _ in all_dependencies.items(): - tgz_filename = construct_tgz_filename(pkg_name, pkg_version) - tgz_path = os.path.join(download_dir, tgz_filename) - if not os.path.exists(tgz_path): - logger.warning(f"Package {tgz_filename} not found for type mapping") - continue - try: - with tarfile.open(tgz_path, "r:gz") as tar: - index_file = next((m for m in tar.getmembers() if m.name == 'package/.index.json'), None) - if index_file: - fileobj = tar.extractfile(index_file) - if fileobj: - content = json.loads(fileobj.read().decode('utf-8-sig')) - for file_entry in content.get('files', []): - resource_type = file_entry.get('resourceType') - filename = file_entry.get('filename') - if resource_type == 'StructureDefinition' and filename.endswith('.json'): - sd_name = os.path.splitext(os.path.basename(filename))[0] - if sd_name in used_types: - type_to_package[sd_name] = (pkg_name, pkg_version) - processed_types.add(sd_name) - logger.debug(f"Mapped type '{sd_name}' to package '{pkg_name}#{pkg_version}'") - except Exception as e: - logger.warning(f"Failed to process .index.json for {pkg_name}#{pkg_version}: {e}") - for t in used_types - processed_types: - for (pkg_name, pkg_version), _ in all_dependencies.items(): - if t.lower() in pkg_name.lower(): - type_to_package[t] = (pkg_name, pkg_version) - processed_types.add(t) - logger.debug(f"Fallback: Mapped type '{t}' to package '{pkg_name}#{pkg_version}'") - break - canonical_name, canonical_version = CANONICAL_PACKAGE - for t in used_types - processed_types: - type_to_package[t] = CANONICAL_PACKAGE - logger.debug(f"Fallback: Mapped type '{t}' to canonical package {canonical_name}#{canonical_version}") - logger.debug(f"Final type-to-package mapping: {type_to_package}") - return type_to_package - -def import_package_and_dependencies(initial_name, initial_version, dependency_mode='recursive'): - """Orchestrates recursive download and dependency extraction.""" - logger.info(f"Starting import of {initial_name}#{initial_version} with mode {dependency_mode}") - download_dir = _get_download_dir() - if not download_dir: - logger.error("Download directory not accessible") - return { - 'requested': (initial_name, initial_version), - 'processed': set(), - 'downloaded': {}, - 'all_dependencies': {}, - 'dependencies': [], - 'errors': ['Download directory not accessible'] - } - - results = { - 'requested': (initial_name, initial_version), - 'processed': set(), - 'downloaded': {}, - 'all_dependencies': {}, - 'dependencies': [], - 'errors': [] - } - pending_queue = [(initial_name, initial_version)] - queued_or_processed_lookup = set([(initial_name, initial_version)]) - all_found_dependencies = set() - - while pending_queue: - name, version = pending_queue.pop(0) - package_id_tuple = (name, version) - if package_id_tuple in results['processed']: - logger.debug(f"Skipping already processed package: {name}#{version}") - continue - logger.info(f"Processing package {name}#{version}") - save_path, dl_error = download_package(name, version) - if dl_error: - logger.error(f"Download failed for {name}#{version}: {dl_error}") - results['errors'].append(f"Download failed for {name}#{version}: {dl_error}") - continue - tgz_filename = os.path.basename(save_path) - logger.info(f"Downloaded {tgz_filename}") - results['downloaded'][package_id_tuple] = save_path - logger.info(f"Extracting dependencies from {tgz_filename}") - dependencies, dep_error = extract_dependencies(save_path) - if dep_error: - logger.error(f"Dependency extraction failed for {name}#{version}: {dep_error}") - results['errors'].append(f"Dependency extraction failed for {name}#{version}: {dep_error}") - results['processed'].add(package_id_tuple) - continue - elif dependencies is None: - logger.error(f"Critical error in dependency extraction for {name}#{version}") - results['errors'].append(f"Dependency extraction returned critical error for {name}#{version}.") - results['processed'].add(package_id_tuple) - continue - results['all_dependencies'][package_id_tuple] = dependencies - results['processed'].add(package_id_tuple) - current_package_deps = [] - for dep_name, dep_version in dependencies.items(): - if isinstance(dep_name, str) and isinstance(dep_version, str) and dep_name and dep_version: - dep_tuple = (dep_name, dep_version) - current_package_deps.append({"name": dep_name, "version": dep_version}) - if dep_tuple not in all_found_dependencies: - all_found_dependencies.add(dep_tuple) - results['dependencies'].append({"name": dep_name, "version": dep_version}) - if dep_tuple not in queued_or_processed_lookup: - should_queue = False - if dependency_mode == 'recursive': - should_queue = True - logger.info(f"Queueing dependency {dep_name}#{dep_version} (recursive mode)") - elif dependency_mode == 'patch-canonical' and dep_tuple == CANONICAL_PACKAGE: - should_queue = True - logger.info(f"Queueing canonical dependency {dep_name}#{dep_version} (patch-canonical mode)") - if should_queue: - logger.debug(f"Adding dependency to queue ({dependency_mode}): {dep_name}#{dep_version}") - pending_queue.append(dep_tuple) - queued_or_processed_lookup.add(dep_tuple) - logger.info(f"Saving metadata for {name}#{version}") - save_package_metadata(name, version, dependency_mode, current_package_deps) - if dependency_mode == 'tree-shaking' and package_id_tuple == (initial_name, initial_version): - logger.info(f"Performing tree-shaking for {initial_name}#{initial_version}") - used_types = extract_used_types(save_path) - if used_types: - type_to_package = map_types_to_packages(used_types, results['all_dependencies'], download_dir) - tree_shaken_deps = set(type_to_package.values()) - {package_id_tuple} - if CANONICAL_PACKAGE not in tree_shaken_deps: - tree_shaken_deps.add(CANONICAL_PACKAGE) - logger.info(f"Ensuring canonical package {CANONICAL_PACKAGE[0]}#{CANONICAL_PACKAGE[1]} for tree-shaking") - for dep_tuple in tree_shaken_deps: - if dep_tuple not in queued_or_processed_lookup: - logger.info(f"Queueing tree-shaken dependency {dep_tuple[0]}#{dep_tuple[1]}") - pending_queue.append(dep_tuple) - queued_or_processed_lookup.add(dep_tuple) - results['dependencies'] = [{"name": d[0], "version": d[1]} for d in all_found_dependencies] - logger.info(f"Completed import of {initial_name}#{initial_version}. Processed {len(results['processed'])} packages, downloaded {len(results['downloaded'])}, with {len(results['errors'])} errors") - return results - -# --- Validation Route --- -@services_bp.route('/validate-sample', methods=['POST']) -@swag_from({ - 'tags': ['Validation'], - 'summary': 'Validate a FHIR resource or bundle.', - 'description': 'Validates a given FHIR resource or bundle against profiles defined in a specified FHIR package. Uses HAPI FHIR for validation if a profile is specified, otherwise falls back to local StructureDefinition checks.', - 'security': [{'ApiKeyAuth': []}], # Assuming API key is desired - 'consumes': ['application/json'], - 'parameters': [ - { - 'name': 'validation_payload', # Changed name - 'in': 'body', - 'required': True, - 'schema': { - 'type': 'object', - 'required': ['package_name', 'version', 'sample_data'], - 'properties': { - 'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'}, - 'version': {'type': 'string', 'example': '6.1.0'}, - 'sample_data': {'type': 'string', 'description': 'A JSON string of the FHIR resource or Bundle to validate.'}, - # 'include_dependencies': {'type': 'boolean', 'default': True} # This seems to be a server-side decision now - } - } - } - ], - 'responses': { - '200': { - 'description': 'Validation result.', - 'schema': { # Define the schema of the validation_result dictionary - 'type': 'object', - 'properties': { - 'valid': {'type': 'boolean'}, - 'errors': {'type': 'array', 'items': {'type': 'string'}}, - 'warnings': {'type': 'array', 'items': {'type': 'string'}}, - 'details': {'type': 'array', 'items': {'type': 'object'}}, # more specific if known - 'resource_type': {'type': 'string'}, - 'resource_id': {'type': 'string'}, - 'profile': {'type': 'string', 'nullable': True}, - 'summary': {'type': 'object'} - } - } - }, - '400': {'description': 'Invalid request (e.g., missing fields, invalid JSON).'}, - '404': {'description': 'Specified package for validation not found.'}, - '500': {'description': 'Server error during validation.'} - } -}) -def validate_sample(): - """Validates a FHIR sample against a package profile.""" - logger.debug("Received validate-sample request") - data = request.get_json(silent=True) - if not data: - logger.error("No JSON data provided or invalid JSON in validate-sample request") - return jsonify({ - 'valid': False, - 'errors': ["No JSON data provided or invalid JSON"], - 'warnings': [], - 'results': {} - }), 400 - - package_name = data.get('package_name') - version = data.get('version') - sample_data = data.get('sample_data') - - logger.debug(f"Request params: package_name={package_name}, version={version}, sample_data_length={len(sample_data) if sample_data else 0}") - if not package_name or not version or not sample_data: - logger.error(f"Missing required fields: package_name={package_name}, version={version}, sample_data={'provided' if sample_data else 'missing'}") - return jsonify({ - 'valid': False, - 'errors': ["Missing required fields: package_name, version, or sample_data"], - 'warnings': [], - 'results': {} - }), 400 - - # Verify download directory access - download_dir = _get_download_dir() - if not download_dir: - logger.error("Cannot access download directory") - return jsonify({ - 'valid': False, - 'errors': ["Server configuration error: cannot access package directory"], - 'warnings': [], - 'results': {} - }), 500 - - # Verify package file exists - tgz_filename = construct_tgz_filename(package_name, version) - tgz_path = os.path.join(download_dir, tgz_filename) - logger.debug(f"Checking package file: {tgz_path}") - if not os.path.exists(tgz_path): - logger.error(f"Package file not found: {tgz_path}") - return jsonify({ - 'valid': False, - 'errors': [f"Package not found: {package_name}#{version}. Please import the package first."], - 'warnings': [], - 'results': {} - }), 400 - - try: - # Parse JSON sample - sample = json.loads(sample_data) - resource_type = sample.get('resourceType') - if not resource_type: - logger.error("Sample JSON missing resourceType") - return jsonify({ - 'valid': False, - 'errors': ["Sample JSON missing resourceType"], - 'warnings': [], - 'results': {} - }), 400 - - logger.debug(f"Validating {resource_type} against {package_name}#{version}") - # Validate resource or bundle - if resource_type == 'Bundle': - result = validate_bundle_against_profile(package_name, version, sample) - else: - result = validate_resource_against_profile(package_name, version, sample) - - logger.info(f"Validation result for {resource_type} against {package_name}#{version}: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}") - return jsonify(result) - except json.JSONDecodeError as e: - logger.error(f"Invalid JSON in sample_data: {e}") - return jsonify({ - 'valid': False, - 'errors': [f"Invalid JSON: {str(e)}"], - 'warnings': [], - 'results': {} - }), 400 - except Exception as e: - logger.error(f"Validation failed: {e}", exc_info=True) - return jsonify({ - 'valid': False, - 'errors': [f"Validation failed: {str(e)}"], - 'warnings': [], - 'results': {} - }), 500 - -def run_gofsh(input_path, output_dir, output_style, log_level, fhir_version=None, fishing_trip=False, dependencies=None, indent_rules=False, meta_profile='only-one', alias_file=None, no_alias=False): - """Run GoFSH with advanced options and return FSH output and optional comparison report.""" - # Use a temporary output directory for initial GoFSH run - temp_output_dir = tempfile.mkdtemp() - os.chmod(temp_output_dir, 0o777) - - cmd = ["gofsh", input_path, "-o", temp_output_dir, "-s", output_style, "-l", log_level] - if fhir_version: - cmd.extend(["-u", fhir_version]) - if dependencies: - for dep in dependencies: - cmd.extend(["--dependency", dep.strip()]) - if indent_rules: - cmd.append("--indent") - if no_alias: - cmd.append("--no-alias") - if alias_file: - cmd.extend(["--alias-file", alias_file]) - if meta_profile != 'only-one': - cmd.extend(["--meta-profile", meta_profile]) - - # Set environment to disable TTY interactions - env = os.environ.copy() - env["NODE_NO_READLINE"] = "1" - env["NODE_NO_INTERACTIVE"] = "1" - env["TERM"] = "dumb" - env["CI"] = "true" - env["FORCE_COLOR"] = "0" - env["NODE_ENV"] = "production" - - # Create a wrapper script in /tmp - wrapper_script = "/tmp/gofsh_wrapper.sh" - output_file = "/tmp/gofsh_output.log" - try: - with open(wrapper_script, 'w') as f: - f.write("#!/bin/bash\n") - # Redirect /dev/tty writes to /dev/null - f.write("exec 3>/dev/null\n") - f.write(" ".join([f'"{arg}"' for arg in cmd]) + f" {output_file} 2>&1\n") - os.chmod(wrapper_script, 0o755) - - # Log the wrapper script contents for debugging - with open(wrapper_script, 'r') as f: - logger.debug(f"Wrapper script contents:\n{f.read()}") - except Exception as e: - logger.error(f"Failed to create wrapper script {wrapper_script}: {str(e)}", exc_info=True) - return None, None, f"Failed to create wrapper script: {str(e)}" - - try: - # Log directory contents before execution - logger.debug(f"Temp output directory contents before GoFSH: {os.listdir(temp_output_dir)}") - - result = subprocess.run( - [wrapper_script], - check=True, - env=env - ) - # Read output from the log file - with open(output_file, 'r', encoding='utf-8') as f: - output = f.read() - logger.debug(f"GoFSH output:\n{output}") - - # Prepare final output directory - if os.path.exists(output_dir): - shutil.rmtree(output_dir) - os.makedirs(output_dir, exist_ok=True) - os.chmod(output_dir, 0o777) - - # Copy .fsh files, sushi-config.yaml, and input JSON to final output directory - copied_files = [] - for root, _, files in os.walk(temp_output_dir): - for file in files: - src_path = os.path.join(root, file) - if file.endswith(".fsh") or file == "sushi-config.yaml": - relative_path = os.path.relpath(src_path, temp_output_dir) - dst_path = os.path.join(output_dir, relative_path) - os.makedirs(os.path.dirname(dst_path), exist_ok=True) - shutil.copy2(src_path, dst_path) - copied_files.append(relative_path) - - # Copy input JSON to final directory - input_filename = os.path.basename(input_path) - dst_input_path = os.path.join(output_dir, "input", input_filename) - os.makedirs(os.path.dirname(dst_input_path), exist_ok=True) - shutil.copy2(input_path, dst_input_path) - copied_files.append(os.path.join("input", input_filename)) - - # Create a minimal sushi-config.yaml if missing - sushi_config_path = os.path.join(output_dir, "sushi-config.yaml") - if not os.path.exists(sushi_config_path): - minimal_config = { - "id": "fhirflare.temp", - "canonical": "http://fhirflare.org", - "name": "FHIRFLARETempIG", - "version": "0.1.0", - "fhirVersion": fhir_version or "4.0.1", - "FSHOnly": True, - "dependencies": dependencies or [] - } - with open(sushi_config_path, 'w') as f: - json.dump(minimal_config, f, indent=2) - copied_files.append("sushi-config.yaml") - - # Run GoFSH with --fshing-trip in a fresh temporary directory - comparison_report = None - if fishing_trip: - fishing_temp_dir = tempfile.mkdtemp() - os.chmod(fishing_temp_dir, 0o777) - gofsh_fishing_cmd = ["gofsh", input_path, "-o", fishing_temp_dir, "-s", output_style, "-l", log_level, "--fshing-trip"] - if fhir_version: - gofsh_fishing_cmd.extend(["-u", fhir_version]) - if dependencies: - for dep in dependencies: - gofsh_fishing_cmd.extend(["--dependency", dep.strip()]) - if indent_rules: - gofsh_fishing_cmd.append("--indent") - if no_alias: - gofsh_fishing_cmd.append("--no-alias") - if alias_file: - gofsh_fishing_cmd.extend(["--alias-file", alias_file]) - if meta_profile != 'only-one': - gofsh_fishing_cmd.extend(["--meta-profile", meta_profile]) - - try: - with open(wrapper_script, 'w') as f: - f.write("#!/bin/bash\n") - f.write("exec 3>/dev/null\n") - f.write("exec >/dev/null 2>&1\n") # Suppress all output to /dev/tty - f.write(" ".join([f'"{arg}"' for arg in gofsh_fishing_cmd]) + f" {output_file} 2>&1\n") - os.chmod(wrapper_script, 0o755) - - logger.debug(f"GoFSH fishing-trip wrapper script contents:\n{open(wrapper_script, 'r').read()}") - - result = subprocess.run( - [wrapper_script], - check=True, - env=env - ) - with open(output_file, 'r', encoding='utf-8') as f: - fishing_output = f.read() - logger.debug(f"GoFSH fishing-trip output:\n{fishing_output}") - - # Copy fshing-trip-comparison.html to final directory - for root, _, files in os.walk(fishing_temp_dir): - for file in files: - if file.endswith(".html") and "fshing-trip-comparison" in file.lower(): - src_path = os.path.join(root, file) - dst_path = os.path.join(output_dir, file) - shutil.copy2(src_path, dst_path) - copied_files.append(file) - with open(dst_path, 'r', encoding='utf-8') as f: - comparison_report = f.read() - except subprocess.CalledProcessError as e: - error_output = "" - if os.path.exists(output_file): - with open(output_file, 'r', encoding='utf-8') as f: - error_output = f.read() - logger.error(f"GoFSH fishing-trip failed: {error_output}") - return None, None, f"GoFSH fishing-trip failed: {error_output}" - finally: - if os.path.exists(fishing_temp_dir): - shutil.rmtree(fishing_temp_dir, ignore_errors=True) - - # Read FSH files from final output directory - fsh_content = [] - for root, _, files in os.walk(output_dir): - for file in files: - if file.endswith(".fsh"): - with open(os.path.join(root, file), 'r', encoding='utf-8') as f: - fsh_content.append(f.read()) - fsh_output = "\n\n".join(fsh_content) - - # Log copied files - logger.debug(f"Copied files to final output directory: {copied_files}") - - logger.info(f"GoFSH executed successfully for {input_path}") - return fsh_output, comparison_report, None - except subprocess.CalledProcessError as e: - error_output = "" - if os.path.exists(output_file): - with open(output_file, 'r', encoding='utf-8') as f: - error_output = f.read() - logger.error(f"GoFSH failed: {error_output}") - return None, None, f"GoFSH failed: {error_output}" - except Exception as e: - logger.error(f"Error running GoFSH: {str(e)}", exc_info=True) - return None, None, f"Error running GoFSH: {str(e)}" - finally: - # Clean up temporary files - if os.path.exists(wrapper_script): - os.remove(wrapper_script) - if os.path.exists(output_file): - os.remove(output_file) - if os.path.exists(temp_output_dir): - shutil.rmtree(temp_output_dir, ignore_errors=True) - -def process_fhir_input(input_mode, fhir_file, fhir_text, alias_file=None): - """Process user input (file or text) and save to temporary files.""" - temp_dir = tempfile.mkdtemp() - input_file = None - alias_path = None - - try: - if input_mode == 'file' and fhir_file: - content = fhir_file.read().decode('utf-8') - file_type = 'json' if content.strip().startswith('{') else 'xml' - input_file = os.path.join(temp_dir, f"input.{file_type}") - with open(input_file, 'w') as f: - f.write(content) - elif input_mode == 'text' and fhir_text: - content = fhir_text.strip() - file_type = 'json' if content.strip().startswith('{') else 'xml' - input_file = os.path.join(temp_dir, f"input.{file_type}") - with open(input_file, 'w') as f: - f.write(content) - else: - return None, None, None, "No input provided" - - # Basic validation - if file_type == 'json': - try: - json.loads(content) - except json.JSONDecodeError: - return None, None, None, "Invalid JSON format" - elif file_type == 'xml': - try: - ET.fromstring(content) - except ET.ParseError: - return None, None, None, "Invalid XML format" - - # Process alias file if provided - if alias_file: - alias_content = alias_file.read().decode('utf-8') - alias_path = os.path.join(temp_dir, "aliases.fsh") - with open(alias_path, 'w') as f: - f.write(alias_content) - - logger.debug(f"Processed input: {(input_file, alias_path)}") - return input_file, temp_dir, alias_path, None - except Exception as e: - logger.error(f"Error processing input: {str(e)}", exc_info=True) - return None, None, None, f"Error processing input: {str(e)}" - -# --- ADD THIS NEW FUNCTION TO services.py --- -def find_and_extract_search_params(tgz_path, base_resource_type): - """Finds and extracts SearchParameter resources relevant to a given base resource type from a FHIR package tgz file.""" - search_params = [] - if not tgz_path or not os.path.exists(tgz_path): - logger.error(f"Package file not found for SearchParameter extraction: {tgz_path}") - return search_params - logger.debug(f"Searching for SearchParameters based on '{base_resource_type}' in {os.path.basename(tgz_path)}") - try: - with tarfile.open(tgz_path, "r:gz") as tar: - for member in tar: - if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): - continue - if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: - continue - fileobj = None - try: - fileobj = tar.extractfile(member) - if fileobj: - content_bytes = fileobj.read() - content_string = content_bytes.decode('utf-8-sig') - data = json.loads(content_string) - if isinstance(data, dict) and data.get('resourceType') == 'SearchParameter': - sp_bases = data.get('base', []) - if base_resource_type in sp_bases: - param_info = { - 'id': data.get('id'), - 'url': data.get('url'), - 'name': data.get('name'), - 'description': data.get('description'), - 'code': data.get('code'), - 'type': data.get('type'), - 'expression': data.get('expression'), - 'base': sp_bases, - 'conformance': 'N/A', - 'is_mandatory': False - } - search_params.append(param_info) - logger.debug(f"Found relevant SearchParameter: {param_info.get('name')} (ID: {param_info.get('id')}) for base {base_resource_type}") - except json.JSONDecodeError as e: - logger.debug(f"Could not parse JSON for SearchParameter in {member.name}, skipping: {e}") - except UnicodeDecodeError as e: - logger.warning(f"Could not decode UTF-8 for SearchParameter in {member.name}, skipping: {e}") - except tarfile.TarError as e: - logger.warning(f"Tar error reading member {member.name} for SearchParameter, skipping: {e}") - except Exception as e: - logger.warning(f"Could not read/parse potential SearchParameter {member.name}, skipping: {e}", exc_info=False) - finally: - if fileobj: - fileobj.close() - except tarfile.ReadError as e: - logger.error(f"Tar ReadError extracting SearchParameters from {tgz_path}: {e}") - except tarfile.TarError as e: - logger.error(f"TarError extracting SearchParameters from {tgz_path}: {e}") - except FileNotFoundError: - logger.error(f"Package file not found during SearchParameter extraction: {tgz_path}") - except Exception as e: - logger.error(f"Unexpected error extracting SearchParameters from {tgz_path}: {e}", exc_info=True) - logger.info(f"Found {len(search_params)} SearchParameters relevant to '{base_resource_type}' in {os.path.basename(tgz_path)}") - return search_params -# --- END OF NEW FUNCTION --- - -# --- Full Replacement Function (Corrected Prefix Definitions & Unabbreviated) --- - -def generate_push_stream(package_name, version, fhir_server_url, include_dependencies, - auth_type, auth_token, resource_types_filter, skip_files, - dry_run, verbose, force_upload, packages_dir): - """ - Generates NDJSON stream for the push IG operation. - Handles canonical resources (search by URL, POST/PUT), - skips identical resources (unless force_upload is true), and specified files. - """ - # --- Variable Initializations --- - pushed_packages_info = [] - success_count = 0 - failure_count = 0 - skipped_count = 0 - post_count = 0 - put_count = 0 - total_resources_attempted = 0 - processed_resources = set() - failed_uploads_details = [] - skipped_resources_details = [] - filter_set = set(resource_types_filter) if resource_types_filter else None - skip_files_set = set(skip_files) if skip_files else set() - - try: - # --- Start Messages --- - operation_mode = " (DRY RUN)" if dry_run else "" - force_mode = " (FORCE UPLOAD)" if force_upload else "" - yield json.dumps({"type": "start", "message": f"Starting push{operation_mode}{force_mode} for {package_name}#{version} to {fhir_server_url}"}) + "\n" - if filter_set: - yield json.dumps({"type": "info", "message": f"Filtering for resource types: {', '.join(sorted(list(filter_set)))}"}) + "\n" - if skip_files_set: - yield json.dumps({"type": "info", "message": f"Skipping {len(skip_files_set)} specific files."}) + "\n" - yield json.dumps({"type": "info", "message": f"Include Dependencies: {'Yes' if include_dependencies else 'No'}"}) + "\n" - - # --- Define packages_to_push --- - packages_to_push = [] - primary_tgz_filename = construct_tgz_filename(package_name, version) - primary_tgz_path = os.path.join(packages_dir, primary_tgz_filename) - - if not os.path.exists(primary_tgz_path): - yield json.dumps({"type": "error", "message": f"Primary package file not found: {primary_tgz_filename}"}) + "\n" - raise FileNotFoundError(f"Primary package file not found: {primary_tgz_path}") - - packages_to_push.append((package_name, version, primary_tgz_path)) - logger.debug(f"Added primary package to push list: {package_name}#{version}") - - if include_dependencies: - yield json.dumps({"type": "info", "message": "Including dependencies based on import metadata..."}) + "\n" - metadata = get_package_metadata(package_name, version) - if metadata and metadata.get("imported_dependencies"): - dependencies_to_include = metadata["imported_dependencies"] - logger.info(f"Found {len(dependencies_to_include)} dependencies in metadata to potentially include.") - for dep in dependencies_to_include: - dep_name = dep.get("name") - dep_version = dep.get("version") - if dep_name and dep_version: - dep_tgz_filename = construct_tgz_filename(dep_name, dep_version) - dep_tgz_path = os.path.join(packages_dir, dep_tgz_filename) - if os.path.exists(dep_tgz_path): - if (dep_name, dep_version, dep_tgz_path) not in packages_to_push: - packages_to_push.append((dep_name, dep_version, dep_tgz_path)) - logger.debug(f"Added dependency package to push list: {dep_name}#{dep_version}") - else: - yield json.dumps({"type": "warning", "message": f"Dependency package file not found, cannot include: {dep_tgz_filename}"}) + "\n" - logger.warning(f"Dependency package file listed in metadata but not found locally: {dep_tgz_path}") - else: - yield json.dumps({"type": "warning", "message": "Include Dependencies checked, but no dependency metadata found. Only pushing primary."}) + "\n" - logger.warning(f"No dependency metadata found for {package_name}#{version} despite include_dependencies=True") - - # --- Resource Extraction & Filtering --- - resources_to_upload = [] - seen_resource_files = set() - - for pkg_name, pkg_version, pkg_path in packages_to_push: - yield json.dumps({"type": "progress", "message": f"Extracting resources from: {pkg_name}#{pkg_version}..."}) + "\n" - try: - with tarfile.open(pkg_path, "r:gz") as tar: - for member in tar.getmembers(): - if not (member.isfile() and member.name.startswith("package/") and member.name.lower().endswith(".json")): - continue - basename_lower = os.path.basename(member.name).lower() - if basename_lower in ["package.json", ".index.json", "validation-summary.json", "validation-oo.json"]: - continue - - normalized_member_name = member.name.replace("\\", "/") - if normalized_member_name in skip_files_set or member.name in skip_files_set: - if verbose: - yield json.dumps({"type": "info", "message": f"Skipping file due to filter: {member.name}"}) + "\n" - continue - - if member.name in seen_resource_files: - if verbose: - yield json.dumps({"type": "info", "message": f"Skipping already seen file: {member.name}"}) + "\n" - continue - seen_resource_files.add(member.name) - - try: - with tar.extractfile(member) as f: - resource_content = f.read().decode("utf-8-sig") - resource_data = json.loads(resource_content) - - if isinstance(resource_data, dict) and "resourceType" in resource_data and "id" in resource_data: - resource_type_val = resource_data.get("resourceType") - if filter_set and resource_type_val not in filter_set: - if verbose: - yield json.dumps({"type": "info", "message": f"Skipping resource type {resource_type_val} due to filter: {member.name}"}) + "\n" - continue - resources_to_upload.append({ - "data": resource_data, - "source_package": f"{pkg_name}#{pkg_version}", - "source_filename": member.name - }) - else: - yield json.dumps({"type": "warning", "message": f"Skipping invalid/incomplete resource structure in file: {member.name}"}) + "\n" - except json.JSONDecodeError as json_e: - yield json.dumps({"type": "warning", "message": f"JSON parse error in file {member.name}: {json_e}"}) + "\n" - except UnicodeDecodeError as uni_e: - yield json.dumps({"type": "warning", "message": f"Encoding error in file {member.name}: {uni_e}"}) + "\n" - except KeyError: - yield json.dumps({"type": "warning", "message": f"File not found within archive: {member.name}"}) + "\n" - except Exception as extract_e: - yield json.dumps({"type": "warning", "message": f"Error processing file {member.name}: {extract_e}"}) + "\n" - except tarfile.ReadError as tar_read_e: - error_msg = f"Tar ReadError reading package {pkg_name}#{pkg_version}: {tar_read_e}. Skipping package." - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Read Error: {tar_read_e}"}) - continue - except tarfile.TarError as tar_e: - error_msg = f"TarError reading package {pkg_name}#{pkg_version}: {tar_e}. Skipping package." - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Tar Error: {tar_e}"}) - continue - except Exception as pkg_e: - error_msg = f"Unexpected error reading package {pkg_name}#{pkg_version}: {pkg_e}. Skipping package." - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Unexpected: {pkg_e}"}) - logger.error(f"Error reading package {pkg_path}: {pkg_e}", exc_info=True) - continue - - total_resources_attempted = len(resources_to_upload) - yield json.dumps({"type": "info", "message": f"Found {total_resources_attempted} resources matching filters across selected packages."}) + "\n" - - if total_resources_attempted == 0: - yield json.dumps({"type": "warning", "message": "No resources found to upload after filtering."}) + "\n" - else: - # --- Resource Upload Loop Setup --- - session = requests.Session() - base_url = fhir_server_url.rstrip("/") - headers = {"Content-Type": "application/fhir+json", "Accept": "application/fhir+json"} - # MODIFIED: Enhanced authentication handling - if auth_type in ["bearerToken", "basic"] and auth_token: - # Log the Authorization header (mask sensitive data) - auth_display = "Basic " if auth_type == "basic" else (auth_token[:10] + "..." if len(auth_token) > 10 else auth_token) - yield json.dumps({"type": "info", "message": f"Using {auth_type} auth with header: Authorization: {auth_display}"}) + "\n" - headers["Authorization"] = auth_token # Use auth_token for both Bearer and Basic - elif auth_type == "apiKey": - internal_api_key = None - try: - internal_api_key = current_app.config.get("API_KEY") - except RuntimeError: - logger.warning("Cannot access current_app config outside of request context for API Key.") - if internal_api_key: - headers["X-API-Key"] = internal_api_key - yield json.dumps({"type": "info", "message": "Using internal API Key authentication."}) + "\n" - else: - yield json.dumps({"type": "warning", "message": "API Key auth selected, but no internal key configured/accessible."}) + "\n" - else: - yield json.dumps({"type": "info", "message": "Using no authentication."}) + "\n" - - # --- Main Upload Loop --- - for i, resource_info in enumerate(resources_to_upload, 1): - local_resource = resource_info["data"] - source_pkg = resource_info["source_package"] - resource_type = local_resource.get("resourceType") - resource_id = local_resource.get("id") - resource_log_id = f"{resource_type}/{resource_id}" - canonical_url = local_resource.get("url") - canonical_version = local_resource.get("version") - is_canonical_type = resource_type in CANONICAL_RESOURCE_TYPES - - if resource_log_id in processed_resources: - if verbose: - yield json.dumps({"type": "info", "message": f"Skipping duplicate ID in processing list: {resource_log_id}"}) + "\n" - continue - processed_resources.add(resource_log_id) - - if dry_run: - dry_run_action = "check/PUT" - if is_canonical_type and canonical_url: - dry_run_action = "search/POST/PUT" - yield json.dumps({"type": "progress", "message": f"[DRY RUN] Would {dry_run_action} {resource_log_id} ({i}/{total_resources_attempted}) from {source_pkg}"}) + "\n" - success_count += 1 - pkg_found = False - for p in pushed_packages_info: - if p["id"] == source_pkg: - p["resource_count"] += 1 - pkg_found = True - break - if not pkg_found: - pushed_packages_info.append({"id": source_pkg, "resource_count": 1}) - continue - - existing_resource_id = None - existing_resource_data = None - action = "PUT" - target_url = f"{base_url}/{resource_type}/{resource_id}" - skip_resource = False - - if is_canonical_type and canonical_url: - action = "SEARCH_POST_PUT" - search_params = {"url": canonical_url} - if canonical_version: - search_params["version"] = canonical_version - search_url = f"{base_url}/{resource_type}" - if verbose: - yield json.dumps({"type": "info", "message": f"Canonical Type: Searching {search_url} with params {search_params}"}) + "\n" - - try: - search_response = session.get(search_url, params=search_params, headers=headers, timeout=20) - search_response.raise_for_status() - search_bundle = search_response.json() - - if search_bundle.get("resourceType") == "Bundle" and "entry" in search_bundle: - entries = search_bundle.get("entry", []) - if len(entries) == 1: - existing_resource_data = entries[0].get("resource") - if existing_resource_data: - existing_resource_id = existing_resource_data.get("id") - if existing_resource_id: - action = "PUT" - target_url = f"{base_url}/{resource_type}/{existing_resource_id}" - if verbose: - yield json.dumps({"type": "info", "message": f"Found existing canonical resource ID: {existing_resource_id}"}) + "\n" - else: - yield json.dumps({"type": "warning", "message": f"Found canonical {canonical_url}|{canonical_version} but lacks ID. Skipping update."}) + "\n" - action = "SKIP" - skip_resource = True - skipped_count += 1 - skipped_resources_details.append({"resource": resource_log_id, "reason": "Found canonical match without ID"}) - else: - yield json.dumps({"type": "warning", "message": f"Search for {canonical_url}|{canonical_version} entry lacks resource data. Assuming not found."}) + "\n" - action = "POST" - target_url = f"{base_url}/{resource_type}" - elif len(entries) == 0: - action = "POST" - target_url = f"{base_url}/{resource_type}" - if verbose: - yield json.dumps({"type": "info", "message": f"Canonical not found by URL/Version. Planning POST."}) + "\n" - else: - ids_found = [e.get("resource", {}).get("id", "unknown") for e in entries] - yield json.dumps({"type": "error", "message": f"Conflict: Found {len(entries)} matches for {canonical_url}|{canonical_version} (IDs: {', '.join(ids_found)}). Skipping."}) + "\n" - action = "SKIP" - skip_resource = True - failure_count += 1 - failed_uploads_details.append({"resource": resource_log_id, "error": f"Conflict: Multiple matches ({len(entries)}) for canonical URL/Version"}) - else: - yield json.dumps({"type": "warning", "message": f"Search for {canonical_url}|{canonical_version} returned non-Bundle/empty. Assuming not found."}) + "\n" - action = "POST" - target_url = f"{base_url}/{resource_type}" - - except requests.exceptions.RequestException as search_err: - yield json.dumps({"type": "warning", "message": f"Search failed for {resource_log_id}: {search_err}. Defaulting to PUT by ID."}) + "\n" - action = "PUT" - target_url = f"{base_url}/{resource_type}/{resource_id}" - except json.JSONDecodeError as json_err: - yield json.dumps({"type": "warning", "message": f"Failed parse search result for {resource_log_id}: {json_err}. Defaulting PUT by ID."}) + "\n" - action = "PUT" - target_url = f"{base_url}/{resource_type}/{resource_id}" - except Exception as e: - yield json.dumps({"type": "warning", "message": f"Unexpected canonical search error for {resource_log_id}: {e}. Defaulting PUT by ID."}) + "\n" - action = "PUT" - target_url = f"{base_url}/{resource_type}/{resource_id}" - - if action == "PUT" and not force_upload and not skip_resource: - resource_to_compare = existing_resource_data - if not resource_to_compare: - try: - if verbose: - yield json.dumps({"type": "info", "message": f"Checking existing (PUT target): {target_url}"}) + "\n" - get_response = session.get(target_url, headers=headers, timeout=15) - if get_response.status_code == 200: - resource_to_compare = get_response.json() - if verbose: - yield json.dumps({"type": "info", "message": f"Found resource by ID for comparison."}) + "\n" - elif get_response.status_code == 404: - if verbose: - yield json.dumps({"type": "info", "message": f"Resource {resource_log_id} not found by ID ({target_url}). Proceeding with PUT create."}) + "\n" - else: - yield json.dumps({"type": "warning", "message": f"Comparison check failed (GET {get_response.status_code}). Attempting PUT."}) + "\n" - except Exception as get_err: - yield json.dumps({"type": "warning", "message": f"Comparison check failed (Error during GET by ID: {get_err}). Attempting PUT."}) + "\n" - - if resource_to_compare: - try: - if are_resources_semantically_equal(local_resource, resource_to_compare): - yield json.dumps({"type": "info", "message": f"Skipping {resource_log_id} (Identical content)"}) + "\n" - skip_resource = True - skipped_count += 1 - skipped_resources_details.append({"resource": resource_log_id, "reason": "Identical content"}) - elif verbose: - yield json.dumps({"type": "info", "message": f"{resource_log_id} exists but differs. Updating."}) + "\n" - except Exception as comp_err: - yield json.dumps({"type": "warning", "message": f"Comparison failed for {resource_log_id}: {comp_err}. Proceeding with PUT."}) + "\n" - - elif action == "PUT" and force_upload: - if verbose: - yield json.dumps({"type": "info", "message": f"Force Upload enabled, skipping comparison for {resource_log_id}."}) + "\n" - - if not skip_resource: - http_method = action if action in ["POST", "PUT"] else "PUT" - log_action = f"{http_method}ing" - yield json.dumps({"type": "progress", "message": f"{log_action} {resource_log_id} ({i}/{total_resources_attempted}) to {target_url}..."}) + "\n" - - try: - if http_method == "POST": - response = session.post(target_url, json=local_resource, headers=headers, timeout=30) - post_count += 1 - else: - response = session.put(target_url, json=local_resource, headers=headers, timeout=30) - put_count += 1 - - response.raise_for_status() - - success_msg = f"{http_method} successful for {resource_log_id} (Status: {response.status_code})" - if http_method == "POST" and response.status_code == 201: - location = response.headers.get("Location") - if location: - match = re.search(f"{resource_type}/([^/]+)/_history", location) - new_id = match.group(1) if match else "unknown" - success_msg += f" -> New ID: {new_id}" - else: - success_msg += " (No Location header)" - yield json.dumps({"type": "success", "message": success_msg}) + "\n" - success_count += 1 - pkg_found_success = False - for p in pushed_packages_info: - if p["id"] == source_pkg: - p["resource_count"] += 1 - pkg_found_success = True - break - if not pkg_found_success: - pushed_packages_info.append({"id": source_pkg, "resource_count": 1}) - - except requests.exceptions.HTTPError as http_err: - outcome_text = "" - status_code = http_err.response.status_code if http_err.response is not None else "N/A" - try: - outcome = http_err.response.json() - if outcome and outcome.get("resourceType") == "OperationOutcome": - issues = outcome.get("issue", []) - outcome_text = "; ".join([f"{i.get('severity', 'info')}: {i.get('diagnostics', i.get('details', {}).get('text', 'No details'))}" for i in issues]) if issues else "OperationOutcome with no issues." - else: - outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body" - except ValueError: - outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body (or not JSON)" - error_msg = f"Failed {http_method} {resource_log_id} (Status: {status_code}): {outcome_text or str(http_err)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({"resource": resource_log_id, "error": error_msg}) - except requests.exceptions.Timeout: - error_msg = f"Timeout during {http_method} {resource_log_id}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({"resource": resource_log_id, "error": "Timeout"}) - except requests.exceptions.ConnectionError as conn_err: - error_msg = f"Connection error during {http_method} {resource_log_id}: {conn_err}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({"resource": resource_log_id, "error": f"Connection Error: {conn_err}"}) - except requests.exceptions.RequestException as req_err: - error_msg = f"Request error during {http_method} {resource_log_id}: {str(req_err)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({"resource": resource_log_id, "error": f"Request Error: {req_err}"}) - except Exception as e: - error_msg = f"Unexpected error during {http_method} {resource_log_id}: {str(e)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({"resource": resource_log_id, "error": f"Unexpected: {e}"}) - logger.error(f"[API Push Stream] Upload error for {resource_log_id}: {e}", exc_info=True) - else: - pkg_found_skipped = False - for p in pushed_packages_info: - if p["id"] == source_pkg: - pkg_found_skipped = True - break - if not pkg_found_skipped: - pushed_packages_info.append({"id": source_pkg, "resource_count": 0}) - - # --- Final Summary --- - final_status = "success" if failure_count == 0 else "partial" if success_count > 0 else "failure" - dry_run_prefix = "[DRY RUN] " if dry_run else "" - force_prefix = "[FORCE UPLOAD] " if force_upload else "" - if total_resources_attempted == 0 and failure_count == 0: - summary_message = f"{dry_run_prefix}Push finished: No matching resources found to process." - final_status = "success" - else: - summary_message = f"{dry_run_prefix}{force_prefix}Push finished: {post_count} POSTed, {put_count} PUT, {failure_count} failed, {skipped_count} skipped ({total_resources_attempted} resources attempted)." - - summary = { - "status": final_status, - "message": summary_message, - "target_server": fhir_server_url, - "package_name": package_name, - "version": version, - "included_dependencies": include_dependencies, - "resources_attempted": total_resources_attempted, - "success_count": success_count, - "post_count": post_count, - "put_count": put_count, - "failure_count": failure_count, - "skipped_count": skipped_count, - "validation_failure_count": 0, - "failed_details": failed_uploads_details, - "skipped_details": skipped_resources_details, - "pushed_packages_summary": pushed_packages_info, - "dry_run": dry_run, - "force_upload": force_upload, - "resource_types_filter": resource_types_filter, - "skip_files_filter": sorted(list(skip_files_set)) if skip_files_set else None - } - yield json.dumps({"type": "complete", "data": summary}) + "\n" - logger.info(f"[API Push Stream] Completed {package_name}#{version}. Status: {final_status}. {summary_message}") - - except FileNotFoundError as fnf_err: - logger.error(f"[API Push Stream] Setup error: {str(fnf_err)}", exc_info=False) - error_response = {"status": "error", "message": f"Setup error: {str(fnf_err)}"} - try: - yield json.dumps({"type": "error", "message": error_response["message"]}) + "\n" - yield json.dumps({"type": "complete", "data": error_response}) + "\n" - except Exception as yield_e: - logger.error(f"Error yielding final setup error: {yield_e}") - except Exception as e: - logger.error(f"[API Push Stream] Critical error during setup or stream generation: {str(e)}", exc_info=True) - error_response = {"status": "error", "message": f"Server error during push setup: {str(e)}"} - try: - yield json.dumps({"type": "error", "message": error_response["message"]}) + "\n" - yield json.dumps({"type": "complete", "data": error_response}) + "\n" - except Exception as yield_e: - logger.error(f"Error yielding final critical error: {yield_e}") - -# --- END generate_push_stream FUNCTION --- - -def are_resources_semantically_equal(resource1, resource2): - """ - Compares two FHIR resources, ignoring metadata like versionId, lastUpdated, - source, and the text narrative. - Logs differing JSON strings if comparison fails and DeepDiff is unavailable. - Returns True if they are semantically equal, False otherwise. - """ - if not isinstance(resource1, dict) or not isinstance(resource2, dict): - return False - if resource1.get('resourceType') != resource2.get('resourceType'): - # Log difference if needed, or just return False - # logger.debug(f"Resource types differ: {resource1.get('resourceType')} vs {resource2.get('resourceType')}") - return False - - # Create deep copies to avoid modifying the originals - try: - copy1 = json.loads(json.dumps(resource1)) - copy2 = json.loads(json.dumps(resource2)) - except Exception as e: - logger.error(f"Compare Error: Failed deep copy: {e}") - return False # Cannot compare if copying fails - - # Keys to ignore within the 'meta' tag during comparison - # --- UPDATED: Added 'source' to the list --- - keys_to_ignore_in_meta = ['versionId', 'lastUpdated', 'source'] - # --- END UPDATE --- - - # Remove meta fields to ignore from copy1 - if 'meta' in copy1: - for key in keys_to_ignore_in_meta: - copy1['meta'].pop(key, None) - # Remove meta tag entirely if it's now empty - if not copy1['meta']: - copy1.pop('meta', None) - - # Remove meta fields to ignore from copy2 - if 'meta' in copy2: - for key in keys_to_ignore_in_meta: - copy2['meta'].pop(key, None) - # Remove meta tag entirely if it's now empty - if not copy2['meta']: - copy2.pop('meta', None) - - # Remove narrative text element from both copies - copy1.pop('text', None) - copy2.pop('text', None) - - # --- Comparison --- - try: - # Convert cleaned copies to sorted, indented JSON strings for comparison & logging - # Using indent=2 helps readability when logging the strings. - json_str1 = json.dumps(copy1, sort_keys=True, indent=2) - json_str2 = json.dumps(copy2, sort_keys=True, indent=2) - - # Perform the comparison - are_equal = (json_str1 == json_str2) - - # --- Debug Logging if Comparison Fails --- - if not are_equal: - resource_id = resource1.get('id', 'UNKNOWN_ID') # Get ID safely - resource_type = resource1.get('resourceType', 'UNKNOWN_TYPE') # Get Type safely - log_prefix = f"Comparison Failed for {resource_type}/{resource_id} (after ignoring meta.source)" - logger.debug(log_prefix) - - # Attempt to use DeepDiff for a structured difference report - try: - from deepdiff import DeepDiff - # Configure DeepDiff for potentially better comparison - # ignore_order=True is important for lists/arrays - # significant_digits might help with float issues if needed - # report_repetition=True might help spot array differences - diff = DeepDiff(copy1, copy2, ignore_order=True, report_repetition=True, verbose_level=0) - # Only log if diff is not empty - if diff: - logger.debug(f"DeepDiff details: {diff}") - else: - # This case suggests deepdiff found them equal but string comparison failed - odd. - logger.debug(f"JSON strings differed, but DeepDiff found no differences.") - # Log JSON strings if deepdiff shows no difference (or isn't available) - logger.debug(f"--- {resource_type}/{resource_id} Resource 1 (Local/Cleaned) --- START ---") - logger.debug(json_str1) - logger.debug(f"--- {resource_type}/{resource_id} Resource 1 (Local/Cleaned) --- END ---") - logger.debug(f"--- {resource_type}/{resource_id} Resource 2 (Server/Cleaned) --- START ---") - logger.debug(json_str2) - logger.debug(f"--- {resource_type}/{resource_id} Resource 2 (Server/Cleaned) --- END ---") - - except ImportError: - # DeepDiff not available, log the differing JSON strings - logger.debug(f"DeepDiff not available. Logging differing JSON strings.") - logger.debug(f"--- {resource_type}/{resource_id} Resource 1 (Local/Cleaned) --- START ---") - logger.debug(json_str1) - logger.debug(f"--- {resource_type}/{resource_id} Resource 1 (Local/Cleaned) --- END ---") - logger.debug(f"--- {resource_type}/{resource_id} Resource 2 (Server/Cleaned) --- START ---") - logger.debug(json_str2) - logger.debug(f"--- {resource_type}/{resource_id} Resource 2 (Server/Cleaned) --- END ---") - except Exception as diff_err: - # Error during deepdiff itself - logger.error(f"Error during deepdiff calculation for {resource_type}/{resource_id}: {diff_err}") - # Fallback to logging JSON strings - logger.debug(f"--- {resource_type}/{resource_id} Resource 1 (Local/Cleaned) --- START ---") - logger.debug(json_str1) - logger.debug(f"--- {resource_type}/{resource_id} Resource 1 (Local/Cleaned) --- END ---") - logger.debug(f"--- {resource_type}/{resource_id} Resource 2 (Server/Cleaned) --- START ---") - logger.debug(json_str2) - logger.debug(f"--- {resource_type}/{resource_id} Resource 2 (Server/Cleaned) --- END ---") - - # --- END DEBUG LOGGING --- - - return are_equal - - except Exception as e: - # Catch errors during JSON dumping or final comparison steps - resource_id_err = resource1.get('id', 'UNKNOWN_ID') - resource_type_err = resource1.get('resourceType', 'UNKNOWN_TYPE') - logger.error(f"Error during final comparison step for {resource_type_err}/{resource_id_err}: {e}", exc_info=True) - return False # Treat comparison errors as 'not equal' to be safe -# --- END FUNCTION --- - -# --- Service Function for Test Data Upload (with Conditional Upload) --- -def process_and_upload_test_data(server_info, options, temp_file_dir): - """ - Parses test data files, optionally validates, builds dependency graph, - sorts, and uploads resources individually (conditionally or simple PUT) or as a transaction bundle. - Yields NDJSON progress updates. - """ - files_processed_count = 0 - resource_map = {} - error_count = 0 - errors = [] - processed_filenames = set() - verbose = True - resources_uploaded_count = 0 - resources_parsed_list = [] - sorted_resources_ids = [] - validation_errors_count = 0 - validation_warnings_count = 0 - validation_failed_resources = set() - adj = defaultdict(list) - rev_adj = defaultdict(list) - in_degree = defaultdict(int) - nodes = set() - - try: - yield json.dumps({"type": "progress", "message": f"Scanning upload directory..."}) + "\n" - - # --- 1. List and Process Files --- - files_to_parse = [] - initial_files = [os.path.join(temp_file_dir, f) for f in os.listdir(temp_file_dir) if os.path.isfile(os.path.join(temp_file_dir, f))] - files_processed_count = len(initial_files) - for file_path in initial_files: - filename = os.path.basename(file_path) - if filename.lower().endswith('.zip'): - yield json.dumps({"type": "progress", "message": f"Extracting ZIP: {filename}..."}) + "\n" - try: - with zipfile.ZipFile(file_path, 'r') as zip_ref: - extracted_count = 0 - for member in zip_ref.namelist(): - if member.endswith('/') or member.startswith('__MACOSX') or member.startswith('.'): continue - member_filename = os.path.basename(member) - if not member_filename: continue - if member_filename.lower().endswith(('.json', '.xml')): - target_path = os.path.join(temp_file_dir, member_filename) - if not os.path.exists(target_path): - with zip_ref.open(member) as source, open(target_path, "wb") as target: - shutil.copyfileobj(source, target) - files_to_parse.append(target_path) - extracted_count += 1 - else: - yield json.dumps({"type": "warning", "message": f"Skipped extracting '{member_filename}' from ZIP, file exists."}) + "\n" - yield json.dumps({"type": "info", "message": f"Extracted {extracted_count} JSON/XML files from {filename}."}) + "\n" - processed_filenames.add(filename) - except zipfile.BadZipFile: - error_msg = f"Invalid ZIP: {filename}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - error_count += 1 - except Exception as e: - error_msg = f"Error extracting ZIP {filename}: {e}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - error_count += 1 - elif filename.lower().endswith(('.json', '.xml')): - files_to_parse.append(file_path) - yield json.dumps({"type": "info", "message": f"Found {len(files_to_parse)} JSON/XML files to parse."}) + "\n" - - # --- 2. Parse JSON/XML Files --- - temp_resources_parsed = [] - for file_path in files_to_parse: - filename = os.path.basename(file_path) - if filename in processed_filenames: - continue - processed_filenames.add(filename) - yield json.dumps({"type": "progress", "message": f"Parsing {filename}..."}) + "\n" - try: - with open(file_path, 'r', encoding='utf-8-sig') as f: - content = f.read() - parsed_content_list = [] - if filename.lower().endswith('.json'): - try: - parsed_json = json.loads(content) - if isinstance(parsed_json, dict) and parsed_json.get('resourceType') == 'Bundle': - for entry_idx, entry in enumerate(parsed_json.get('entry', [])): - resource = entry.get('resource') - if isinstance(resource, dict) and 'resourceType' in resource and 'id' in resource: - parsed_content_list.append(resource) - elif resource: - yield json.dumps({"type": "warning", "message": f"Skipping invalid resource #{entry_idx+1} in Bundle {filename}."}) + "\n" - elif isinstance(parsed_json, dict) and 'resourceType' in parsed_json and 'id' in parsed_json: - parsed_content_list.append(parsed_json) - elif isinstance(parsed_json, list): - yield json.dumps({"type": "warning", "message": f"File {filename} contains JSON array."}) + "\n" - for item_idx, item in enumerate(parsed_json): - if isinstance(item, dict) and 'resourceType' in item and 'id' in item: - parsed_content_list.append(item) - else: - yield json.dumps({"type": "warning", "message": f"Skipping invalid item #{item_idx+1} in JSON array {filename}."}) + "\n" - else: - raise ValueError("Not valid FHIR Resource/Bundle.") - except json.JSONDecodeError as e: - raise ValueError(f"Invalid JSON: {e}") - elif filename.lower().endswith('.xml'): - if FHIR_RESOURCES_AVAILABLE: - try: - root = ET.fromstring(content) - resource_type = root.tag - if not resource_type: - raise ValueError("XML root tag missing.") - temp_dict = basic_fhir_xml_to_dict(content) - if temp_dict: - model_class = get_fhir_model_class(resource_type) - fhir_resource = model_class(**temp_dict) - resource_dict = fhir_resource.dict(exclude_none=True) - if 'id' in resource_dict: - parsed_content_list.append(resource_dict) - yield json.dumps({"type": "info", "message": f"Parsed/validated XML: {filename}"}) + "\n" - else: - yield json.dumps({"type": "warning", "message": f"Parsed XML {filename} missing 'id'. Skipping."}) + "\n" - else: - raise ValueError("Basic XML to Dict failed.") - except (ET.ParseError, FHIRValidationError, ValueError, NotImplementedError, Exception) as e: - raise ValueError(f"Invalid/Unsupported FHIR XML: {e}") - else: - parsed_content = basic_fhir_xml_to_dict(content) - if parsed_content and parsed_content.get("resourceType") and parsed_content.get("id"): - yield json.dumps({"type": "warning", "message": f"Parsed basic XML (no validation): {filename}"}) + "\n" - parsed_content_list.append(parsed_content) - else: - yield json.dumps({"type": "warning", "message": f"Basic XML parse failed or missing type/id: {filename}. Skipping."}) + "\n" - continue - if parsed_content_list: - temp_resources_parsed.extend(parsed_content_list) - else: - yield json.dumps({"type": "warning", "message": f"Skipping {filename}: No valid content."}) + "\n" - except (IOError, ValueError, Exception) as e: - error_msg = f"Error processing file {filename}: {e}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - error_count += 1 - logger.error(f"Error processing file {filename}", exc_info=True) - - # Populate Resource Map - for resource in temp_resources_parsed: - res_type = resource.get('resourceType') - res_id = resource.get('id') - if res_type and res_id: - full_id = f"{res_type}/{res_id}" - if full_id not in resource_map: - resource_map[full_id] = resource - else: - yield json.dumps({"type": "warning", "message": f"Duplicate ID: {full_id}. Using first."}) + "\n" - else: - yield json.dumps({"type": "warning", "message": f"Parsed resource missing type/id: {str(resource)[:100]}..."}) + "\n" - resources_parsed_list = list(resource_map.values()) - yield json.dumps({"type": "info", "message": f"Parsed {len(resources_parsed_list)} unique resources."}) + "\n" - - # --- 2.5 Pre-Upload Validation Step --- - if options.get('validate_before_upload'): - validation_package_id = options.get('validation_package_id') - if not validation_package_id or '#' not in validation_package_id: - raise ValueError("Validation package ID missing/invalid.") - val_pkg_name, val_pkg_version = validation_package_id.split('#', 1) - yield json.dumps({"type": "progress", "message": f"Starting validation against {val_pkg_name}#{val_pkg_version}..."}) + "\n" - validated_resources_map = {} - for resource in resources_parsed_list: - full_id = f"{resource.get('resourceType')}/{resource.get('id')}" - yield json.dumps({"type": "validation_info", "message": f"Validating {full_id}..."}) + "\n" - try: - validation_report = validate_resource_against_profile(val_pkg_name, val_pkg_version, resource, include_dependencies=False) - for warning in validation_report.get('warnings', []): - yield json.dumps({"type": "validation_warning", "message": f"{full_id}: {warning}"}) + "\n" - validation_warnings_count += 1 - if not validation_report.get('valid', False): - validation_failed_resources.add(full_id) - validation_errors_count += 1 - for error in validation_report.get('errors', []): - error_detail = f"Validation Error ({full_id}): {error}" - yield json.dumps({"type": "validation_error", "message": error_detail}) + "\n" - errors.append(error_detail) - if options.get('error_handling', 'stop') == 'stop': - raise ValueError(f"Validation failed for {full_id} (stop on error).") - else: - validated_resources_map[full_id] = resource - except Exception as val_err: - error_msg = f"Validation error {full_id}: {val_err}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - error_count += 1 - validation_failed_resources.add(full_id) - validation_errors_count += 1 - logger.error(f"Validation exception {full_id}", exc_info=True) - if options.get('error_handling', 'stop') == 'stop': - raise ValueError(f"Validation exception for {full_id} (stop on error).") - yield json.dumps({"type": "info", "message": f"Validation complete. Errors: {validation_errors_count}, Warnings: {validation_warnings_count}."}) + "\n" - resource_map = validated_resources_map - nodes = set(resource_map.keys()) - yield json.dumps({"type": "info", "message": f"Proceeding with {len(nodes)} valid resources."}) + "\n" - else: - yield json.dumps({"type": "info", "message": "Pre-upload validation skipped."}) + "\n" - nodes = set(resource_map.keys()) - - # --- 3. Build Dependency Graph --- - yield json.dumps({"type": "progress", "message": "Building dependency graph..."}) + "\n" - dependency_count = 0 - external_refs = defaultdict(list) - for full_id, resource in resource_map.items(): - refs_list = [] - find_references(resource, refs_list) - if refs_list: - if verbose: - yield json.dumps({"type": "info", "message": f"Processing {len(refs_list)} refs in {full_id}"}) + "\n" - for ref_str in refs_list: - target_full_id = None - if isinstance(ref_str, str) and '/' in ref_str and not ref_str.startswith('#'): - parts = ref_str.split('/') - if len(parts) == 2 and parts[0] and parts[1]: - target_full_id = ref_str - elif len(parts) > 2: - try: - parsed_url = urlparse(ref_str) - if parsed_url.path: - path_parts = parsed_url.path.strip('/').split('/') - if len(path_parts) >= 2 and path_parts[-2] and path_parts[-1]: - target_full_id = f"{path_parts[-2]}/{path_parts[-1]}" - except: - pass - if target_full_id and target_full_id != full_id: - if target_full_id in resource_map: - if target_full_id not in adj[full_id]: - adj[full_id].append(target_full_id) - rev_adj[target_full_id].append(full_id) - in_degree[full_id] += 1 - dependency_count += 1 - if verbose: - yield json.dumps({"type": "info", "message": f" Dep Added: {full_id} -> {target_full_id}"}) + "\n" - else: - target_failed_validation = options.get('validate_before_upload') and target_full_id in validation_failed_resources - if not target_failed_validation and verbose: - yield json.dumps({"type": "warning", "message": f"Ref '{ref_str}' in {full_id} points outside processed set ({target_full_id})."}) + "\n" - external_refs[full_id].append(ref_str) - yield json.dumps({"type": "info", "message": f"Graph built for {len(nodes)} resources. Internal Deps: {dependency_count}."}) + "\n" - - # --- 4. Perform Topological Sort --- - yield json.dumps({"type": "progress", "message": "Sorting resources by dependency..."}) + "\n" - sorted_resources_ids = [] - queue = deque([node for node in nodes if in_degree[node] == 0]) - processed_count = 0 - while queue: - u = queue.popleft() - sorted_resources_ids.append(u) - processed_count += 1 - if u in rev_adj: - for v in rev_adj[u]: - in_degree[v] -= 1 - if in_degree[v] == 0: - queue.append(v) - if processed_count != len(nodes): - cycle_nodes = sorted([node for node in nodes if in_degree[node] > 0]) - error_msg = f"Circular dependency detected. Involved: {', '.join(cycle_nodes[:10])}{'...' if len(cycle_nodes) > 10 else ''}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - error_count += 1 - raise ValueError("Circular dependency detected") - yield json.dumps({"type": "info", "message": f"Topological sort successful. Order determined for {len(sorted_resources_ids)} resources."}) + "\n" - - # --- 5. Upload Sorted Resources --- - if not sorted_resources_ids: - yield json.dumps({"type": "info", "message": "No valid resources remaining to upload."}) + "\n" - else: - upload_mode = options.get('upload_mode', 'individual') - error_handling_mode = options.get('error_handling', 'stop') - use_conditional = options.get('use_conditional_uploads', False) and upload_mode == 'individual' - session = requests.Session() - base_url = server_info['url'].rstrip('/') - upload_headers = {'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'} - if server_info['auth_type'] in ['bearerToken', 'basic'] and server_info.get('auth_token'): - # Log the Authorization header (mask sensitive data) - auth_header = server_info['auth_token'] - if auth_header.startswith('Basic '): - auth_display = 'Basic ' - else: - auth_display = auth_header[:10] + '...' if len(auth_header) > 10 else auth_header - yield json.dumps({"type": "info", "message": f"Using {server_info['auth_type']} auth with header: Authorization: {auth_display}"}) + "\n" - upload_headers['Authorization'] = server_info['auth_token'] # FIXED: Use server_info['auth_token'] - else: - yield json.dumps({"type": "info", "message": "Using no auth."}) + "\n" - - if upload_mode == 'transaction': - # --- Transaction Bundle Upload --- - yield json.dumps({"type": "progress", "message": f"Preparing transaction bundle for {len(sorted_resources_ids)} resources..."}) + "\n" - transaction_bundle = {"resourceType": "Bundle", "type": "transaction", "entry": []} - for full_id in sorted_resources_ids: - resource = resource_map.get(full_id) - if resource: - res_type = resource.get('resourceType') - res_id = resource.get('id') - entry = { - "fullUrl": f"{base_url}/{res_type}/{res_id}", - "resource": resource, - "request": {"method": "PUT", "url": f"{res_type}/{res_id}"} - } - transaction_bundle["entry"].append(entry) - if not transaction_bundle["entry"]: - yield json.dumps({"type": "warning", "message": "No valid entries for transaction."}) + "\n" - else: - yield json.dumps({"type": "progress", "message": f"Uploading transaction bundle ({len(transaction_bundle['entry'])} entries)..."}) + "\n" - try: - response = session.post(base_url, json=transaction_bundle, headers=upload_headers, timeout=120) - response.raise_for_status() - response_bundle = response.json() - current_bundle_success = 0 - current_bundle_errors = 0 - for entry in response_bundle.get("entry", []): - entry_response = entry.get("response", {}) - status = entry_response.get("status", "") - location = entry_response.get("location", "N/A") - resource_ref = location.split('/')[-3] + '/' + location.split('/')[-1] if status.startswith("201") and '/_history/' in location else location - if status.startswith("200") or status.startswith("201"): - current_bundle_success += 1 - else: - current_bundle_errors += 1 - outcome = entry.get("resource") - outcome_text = f"Status: {status}" - if outcome and outcome.get('resourceType') == 'OperationOutcome': - try: - issue_texts = [] - for issue in outcome.get('issue', []): - severity = issue.get('severity', 'info') - diag = issue.get('diagnostics') or issue.get('details', {}).get('text', 'No details') - issue_texts.append(f"{severity}: {diag}") - if issue_texts: - outcome_text += "; " + "; ".join(issue_texts) - except Exception as parse_err: - logger.warning(f"Could not parse OperationOutcome details: {parse_err}") - error_msg = f"Txn entry failed for '{resource_ref}'. {outcome_text}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - if error_handling_mode == 'stop': - break - resources_uploaded_count += current_bundle_success - error_count += current_bundle_errors - yield json.dumps({"type": "success", "message": f"Txn processed. Success: {current_bundle_success}, Errors: {current_bundle_errors}."}) + "\n" - if current_bundle_errors > 0 and error_handling_mode == 'stop': - raise ValueError("Stopping due to transaction error.") - except requests.exceptions.HTTPError as e: - outcome_text = "" - if e.response is not None: - try: - outcome = e.response.json() - if outcome and outcome.get('resourceType') == 'OperationOutcome': - issue_texts = [] - for issue in outcome.get('issue', []): - severity = issue.get('severity', 'info') - diag = issue.get('diagnostics') or issue.get('details', {}).get('text', 'No details') - issue_texts.append(f"{severity}: {diag}") - if issue_texts: - outcome_text = "; ".join(issue_texts) - else: - outcome_text = e.response.text[:300] - else: - outcome_text = e.response.text[:300] - except ValueError: - outcome_text = e.response.text[:300] - else: - outcome_text = "No response body." - error_msg = f"Txn POST failed (Status: {e.response.status_code if e.response is not None else 'N/A'}): {outcome_text or str(e)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - error_count += len(transaction_bundle["entry"]) - raise ValueError("Stopping due to transaction POST error.") - except requests.exceptions.RequestException as e: - error_msg = f"Network error posting txn: {e}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - error_count += len(transaction_bundle["entry"]) - raise ValueError("Stopping due to transaction network error.") - except Exception as e: - error_msg = f"Error processing txn response: {e}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(error_msg) - error_count += len(transaction_bundle["entry"]) - logger.error("Txn response error", exc_info=True) - raise ValueError("Stopping due to txn response error.") - - else: - # --- Individual Resource Upload --- - yield json.dumps({"type": "progress", "message": f"Starting individual upload ({'conditional' if use_conditional else 'simple PUT'})..."}) + "\n" - for i, full_id in enumerate(sorted_resources_ids): - resource_to_upload = resource_map.get(full_id) - if not resource_to_upload: - continue - res_type = resource_to_upload.get('resourceType') - res_id = resource_to_upload.get('id') - target_url_put = f"{base_url}/{res_type}/{res_id}" - target_url_post = f"{base_url}/{res_type}" - - current_headers = upload_headers.copy() - action_log_prefix = f"Uploading {full_id} ({i+1}/{len(sorted_resources_ids)})" - etag = None - resource_exists = False - method = "PUT" - target_url = target_url_put - log_action = "Uploading (PUT)" # Defaults for simple PUT - - # --- Conditional Logic --- - if use_conditional: - yield json.dumps({"type": "progress", "message": f"{action_log_prefix}: Checking existence..."}) + "\n" - try: - get_response = session.get(target_url_put, headers=current_headers, timeout=15) - if get_response.status_code == 200: - resource_exists = True - etag = get_response.headers.get('ETag') - if etag: - current_headers['If-Match'] = etag - log_action = "Updating (conditional)" - yield json.dumps({"type": "info", "message": f" Resource exists. ETag: {etag}. Will use conditional PUT."}) + "\n" - else: - log_action = "Updating (no ETag)" - yield json.dumps({"type": "warning", "message": f" Resource exists but no ETag found. Will use simple PUT."}) + "\n" - method = "PUT" - target_url = target_url_put - elif get_response.status_code == 404: - resource_exists = False - method = "PUT" - target_url = target_url_put # Use PUT for creation with specific ID - log_action = "Creating (PUT)" - yield json.dumps({"type": "info", "message": f" Resource not found. Will use PUT to create."}) + "\n" - else: - get_response.raise_for_status() - except requests.exceptions.HTTPError as http_err: - error_msg = f"Error checking existence for {full_id} (Status: {http_err.response.status_code}). Cannot proceed conditionally." - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(f"{full_id}: {error_msg}") - error_count += 1 - if error_handling_mode == 'stop': - raise ValueError("Stopping due to existence check error.") - continue - except requests.exceptions.RequestException as req_err: - error_msg = f"Network error checking existence for {full_id}: {req_err}. Cannot proceed conditionally." - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(f"{full_id}: {error_msg}") - error_count += 1 - if error_handling_mode == 'stop': - raise ValueError("Stopping due to existence check network error.") - continue - - # --- Perform Upload Action --- - try: - yield json.dumps({"type": "progress", "message": f"{action_log_prefix}: {log_action}..."}) + "\n" - if method == "POST": - response = session.post(target_url, json=resource_to_upload, headers=current_headers, timeout=30) - else: - response = session.put(target_url, json=resource_to_upload, headers=current_headers, timeout=30) - response.raise_for_status() - - status_code = response.status_code - success_msg = f"{log_action} successful for {full_id} (Status: {status_code})" - if method == "POST" and status_code == 201: - location = response.headers.get('Location') - success_msg += f" Loc: {location}" if location else "" - yield json.dumps({"type": "success", "message": success_msg}) + "\n" - resources_uploaded_count += 1 - - except requests.exceptions.HTTPError as e: - status_code = e.response.status_code if e.response is not None else 'N/A' - outcome_text = "" - if e.response is not None: - try: - outcome = e.response.json() - if outcome and outcome.get('resourceType') == 'OperationOutcome': - issue_texts = [] - for issue in outcome.get('issue', []): - severity = issue.get('severity', 'info') - diag = issue.get('diagnostics') or issue.get('details', {}).get('text', 'No details') - issue_texts.append(f"{severity}: {diag}") - if issue_texts: - outcome_text = "; ".join(issue_texts) - else: - outcome_text = e.response.text[:200] - else: - outcome_text = e.response.text[:200] - except ValueError: - outcome_text = e.response.text[:200] - else: - outcome_text = "No response body." - error_prefix = "Conditional update failed" if status_code == 412 else f"{method} failed" - error_msg = f"{error_prefix} for {full_id} (Status: {status_code}): {outcome_text or str(e)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(f"{full_id}: {error_msg}") - error_count += 1 - if error_handling_mode == 'stop': - raise ValueError(f"Stopping due to {method} error.") - except requests.exceptions.Timeout: - error_msg = f"Timeout during {method} for {full_id}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(f"{full_id}: {error_msg}") - error_count += 1 - if error_handling_mode == 'stop': - raise ValueError("Stopping due to upload timeout.") - except requests.exceptions.ConnectionError as e: - error_msg = f"Connection error during {method} for {full_id}: {e}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(f"{full_id}: {error_msg}") - error_count += 1 - if error_handling_mode == 'stop': - raise ValueError("Stopping due to connection error.") - except requests.exceptions.RequestException as e: - error_msg = f"Request error during {method} for {full_id}: {str(e)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(f"{full_id}: {error_msg}") - error_count += 1 - if error_handling_mode == 'stop': - raise ValueError("Stopping due to request error.") - except Exception as e: - error_msg = f"Unexpected error during {method} for {full_id}: {str(e)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - errors.append(f"{full_id}: {error_msg}") - error_count += 1 - logger.error(f"Upload error for {full_id}", exc_info=True) - if error_handling_mode == 'stop': - raise ValueError("Stopping due to unexpected upload error.") - - yield json.dumps({"type": "info", "message": f"Individual upload loop finished."}) + "\n" - - except ValueError as ve: - logger.error(f"Processing stopped: {ve}") - except Exception as e: - logger.error(f"Critical error: {e}", exc_info=True) - error_count += 1 - errors.append(f"Critical Error: {str(e)}") - yield json.dumps({"type": "error", "message": f"Critical error: {str(e)}"}) + "\n" - - # --- Final Summary --- - final_status = "unknown" - total_errors = error_count + validation_errors_count - if total_errors > 0: - final_status = "failure" if resources_uploaded_count == 0 else "partial" - elif resource_map or resources_parsed_list: - final_status = "success" - elif files_processed_count > 0: - final_status = "success" - else: - final_status = "success" - summary_message = f"Processing finished. Status: {final_status}. Files: {files_processed_count}, Parsed: {len(resources_parsed_list)}, Validation Errors: {validation_errors_count}, Validation Warnings: {validation_warnings_count}, Uploaded: {resources_uploaded_count}, Upload Errors: {error_count}." - summary = { - "status": final_status, - "message": summary_message, - "files_processed": files_processed_count, - "resources_parsed": len(resources_parsed_list), - "validation_errors": validation_errors_count, - "validation_warnings": validation_warnings_count, - "resources_uploaded": resources_uploaded_count, - "error_count": error_count, - "errors": errors - } - yield json.dumps({"type": "complete", "data": summary}) + "\n" - logger.info(f"[Upload Test Data] Completed. Status: {final_status}. {summary_message}") - -# --- END Service Function --- - -# --- CORRECTED retrieve_bundles function with NEW logic --- -def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references=False, fetch_reference_bundles=False, auth_type='none', auth_token=None): - """ - Retrieve FHIR bundles and save to a ZIP file. - Optionally fetches referenced resources, either individually by ID or as full bundles by type. - Supports authentication for custom FHIR servers. - Yields NDJSON progress updates. - """ - temp_dir = None - try: - total_initial_bundles = 0 - fetched_individual_references = 0 - fetched_type_bundles = 0 - retrieved_references_or_types = set() - - temp_dir = tempfile.mkdtemp(prefix="fhir_retrieve_") - logger.debug(f"Created temporary directory for bundle retrieval: {temp_dir}") - yield json.dumps({"type": "progress", "message": f"Starting bundle retrieval for {len(resources)} resource types"}) + "\n" - if validate_references: - yield json.dumps({"type": "info", "message": f"Reference fetching ON (Mode: {'Full Type Bundles' if fetch_reference_bundles else 'Individual Resources'})"}) + "\n" - else: - yield json.dumps({"type": "info", "message": "Reference fetching OFF"}) + "\n" - - # Determine Base URL and Headers for Proxy - base_proxy_url = f"{current_app.config['APP_BASE_URL'].rstrip('/')}/fhir" - headers = {'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8'} - is_custom_url = fhir_server_url != '/fhir' and fhir_server_url is not None and fhir_server_url.startswith('http') - if is_custom_url: - headers['X-Target-FHIR-Server'] = fhir_server_url.rstrip('/') - if auth_type in ['bearer', 'basic'] and auth_token: - auth_display = 'Basic ' if auth_type == 'basic' else (auth_token[:10] + '...' if len(auth_token) > 10 else auth_token) - yield json.dumps({"type": "info", "message": f"Using {auth_type} auth with header: Authorization: {auth_display}"}) + "\n" - headers['Authorization'] = auth_token - else: - yield json.dumps({"type": "info", "message": "Using no authentication for custom URL"}) + "\n" - logger.debug(f"Will use proxy with X-Target-FHIR-Server: {headers['X-Target-FHIR-Server']}") - else: - yield json.dumps({"type": "info", "message": "Using no authentication for local HAPI server"}) + "\n" - logger.debug("Will use proxy targeting local HAPI server") - - # Fetch Initial Bundles - initial_bundle_files = [] - for resource_type in resources: - url = f"{base_proxy_url}/{quote(resource_type)}" - yield json.dumps({"type": "progress", "message": f"Fetching bundle for {resource_type} via proxy..."}) + "\n" - logger.debug(f"Sending GET request to proxy {url} with headers: {json.dumps(headers)}") - try: - response = requests.get(url, headers=headers, timeout=60) - logger.debug(f"Proxy response for {resource_type}: HTTP {response.status_code}") - if response.status_code != 200: - error_detail = f"Proxy returned HTTP {response.status_code}." - try: error_detail += f" Body: {response.text[:200]}..." - except: pass - yield json.dumps({"type": "error", "message": f"Failed to fetch {resource_type}: {error_detail}"}) + "\n" - logger.error(f"Failed to fetch {resource_type} via proxy {url}: {error_detail}") - continue - try: - bundle = response.json() - except ValueError as e: - yield json.dumps({"type": "error", "message": f"Invalid JSON response for {resource_type}: {str(e)}"}) + "\n" - logger.error(f"Invalid JSON from proxy for {resource_type} at {url}: {e}, Response: {response.text[:500]}") - continue - if not isinstance(bundle, dict) or bundle.get('resourceType') != 'Bundle': - yield json.dumps({"type": "error", "message": f"Expected Bundle for {resource_type}, got {bundle.get('resourceType', 'unknown')}"}) + "\n" - logger.error(f"Expected Bundle for {resource_type}, got {bundle.get('resourceType', 'unknown')}") - continue - if not bundle.get('entry'): - yield json.dumps({"type": "warning", "message": f"No entries found in bundle for {resource_type}"}) + "\n" - - # Save the bundle - output_file = os.path.join(temp_dir, f"{resource_type}_bundle.json") - try: - with open(output_file, 'w', encoding='utf-8') as f: - json.dump(bundle, f, indent=2) - logger.debug(f"Wrote bundle to {output_file}") - initial_bundle_files.append(output_file) - total_initial_bundles += 1 - yield json.dumps({"type": "success", "message": f"Saved bundle for {resource_type}"}) + "\n" - except IOError as e: - yield json.dumps({"type": "error", "message": f"Failed to save bundle file for {resource_type}: {e}"}) + "\n" - logger.error(f"Failed to write bundle file {output_file}: {e}") - continue - except requests.RequestException as e: - yield json.dumps({"type": "error", "message": f"Error connecting to proxy for {resource_type}: {str(e)}"}) + "\n" - logger.error(f"Error retrieving bundle for {resource_type} via proxy {url}: {e}") - continue - except Exception as e: - yield json.dumps({"type": "error", "message": f"Unexpected error fetching {resource_type}: {str(e)}"}) + "\n" - logger.error(f"Unexpected error during initial fetch for {resource_type} at {url}: {e}", exc_info=True) - continue - - # Fetch Referenced Resources (Conditionally) - if validate_references and initial_bundle_files: - yield json.dumps({"type": "progress", "message": "Scanning retrieved bundles for references..."}) + "\n" - all_references = set() - references_by_type = defaultdict(set) - - # Scan for References - for bundle_file_path in initial_bundle_files: - try: - with open(bundle_file_path, 'r', encoding='utf-8') as f: - bundle = json.load(f) - for entry in bundle.get('entry', []): - resource = entry.get('resource') - if resource: - current_refs = [] - find_references(resource, current_refs) - for ref_str in current_refs: - if isinstance(ref_str, str) and '/' in ref_str and not ref_str.startswith('#'): - all_references.add(ref_str) - try: - ref_type = ref_str.split('/')[0] - if ref_type: - references_by_type[ref_type].add(ref_str) - except Exception: - pass - except Exception as e: - yield json.dumps({"type": "warning", "message": f"Could not scan references in {os.path.basename(bundle_file_path)}: {e}"}) + "\n" - logger.warning(f"Error processing references in {bundle_file_path}: {e}") - - # Fetch Logic - if not all_references: - yield json.dumps({"type": "info", "message": "No references found to fetch."}) + "\n" - else: - if fetch_reference_bundles: - # Fetch Full Bundles by Type - unique_ref_types = sorted(list(references_by_type.keys())) - yield json.dumps({"type": "progress", "message": f"Fetching full bundles for {len(unique_ref_types)} referenced types..."}) + "\n" - logger.info(f"Fetching full bundles for referenced types: {unique_ref_types}") - - for ref_type in unique_ref_types: - if ref_type in retrieved_references_or_types: - continue - - url = f"{base_proxy_url}/{quote(ref_type)}" - yield json.dumps({"type": "progress", "message": f"Fetching full bundle for type {ref_type} via proxy..."}) + "\n" - logger.debug(f"Sending GET request for full type bundle {ref_type} to proxy {url} with headers: {json.dumps(headers)}") - try: - response = requests.get(url, headers=headers, timeout=180) - logger.debug(f"Proxy response for {ref_type} bundle: HTTP {response.status_code}") - if response.status_code != 200: - error_detail = f"Proxy returned HTTP {response.status_code}." - try: error_detail += f" Body: {response.text[:200]}..." - except: pass - yield json.dumps({"type": "warning", "message": f"Failed to fetch full bundle for {ref_type}: {error_detail}"}) + "\n" - logger.warning(f"Failed to fetch full bundle {ref_type} via proxy {url}: {error_detail}") - retrieved_references_or_types.add(ref_type) - continue - - try: - bundle = response.json() - except ValueError as e: - yield json.dumps({"type": "warning", "message": f"Invalid JSON for full {ref_type} bundle: {str(e)}"}) + "\n" - logger.warning(f"Invalid JSON response from proxy for full {ref_type} bundle at {url}: {e}") - retrieved_references_or_types.add(ref_type) - continue - - if not isinstance(bundle, dict) or bundle.get('resourceType') != 'Bundle': - yield json.dumps({"type": "warning", "message": f"Expected Bundle for full {ref_type} fetch, got {bundle.get('resourceType', 'unknown')}"}) + "\n" - logger.warning(f"Expected Bundle for full {ref_type} fetch, got {bundle.get('resourceType', 'unknown')}") - retrieved_references_or_types.add(ref_type) - continue - - # Save the full type bundle - output_file = os.path.join(temp_dir, f"ref_{ref_type}_BUNDLE.json") - try: - with open(output_file, 'w', encoding='utf-8') as f: - json.dump(bundle, f, indent=2) - logger.debug(f"Wrote full type bundle to {output_file}") - fetched_type_bundles += 1 - retrieved_references_or_types.add(ref_type) - yield json.dumps({"type": "success", "message": f"Saved full bundle for type {ref_type}"}) + "\n" - except IOError as e: - yield json.dumps({"type": "warning", "message": f"Failed to save full bundle file for {ref_type}: {e}"}) + "\n" - logger.error(f"Failed to write full bundle file {output_file}: {e}") - retrieved_references_or_types.add(ref_type) - except requests.RequestException as e: - yield json.dumps({"type": "warning", "message": f"Error connecting to proxy for full {ref_type} bundle: {str(e)}"}) + "\n" - logger.warning(f"Error retrieving full {ref_type} bundle via proxy: {e}") - retrieved_references_or_types.add(ref_type) - except Exception as e: - yield json.dumps({"type": "warning", "message": f"Unexpected error fetching full {ref_type} bundle: {str(e)}"}) + "\n" - logger.warning(f"Unexpected error during full {ref_type} bundle fetch: {e}", exc_info=True) - retrieved_references_or_types.add(ref_type) - else: - # Fetch Individual Referenced Resources - yield json.dumps({"type": "progress", "message": f"Fetching {len(all_references)} unique referenced resources individually..."}) + "\n" - logger.info(f"Fetching {len(all_references)} unique referenced resources by ID.") - for ref in sorted(list(all_references)): - if ref in retrieved_references_or_types: - continue - - try: - ref_parts = ref.split('/') - if len(ref_parts) != 2 or not ref_parts[0] or not ref_parts[1]: - logger.warning(f"Skipping invalid reference format: {ref}") - continue - ref_type, ref_id = ref_parts - - search_param = quote(f"_id={ref_id}") - url = f"{base_proxy_url}/{quote(ref_type)}?{search_param}" - yield json.dumps({"type": "progress", "message": f"Fetching referenced {ref_type}/{ref_id} via proxy..."}) + "\n" - logger.debug(f"Sending GET request for referenced {ref} to proxy {url} with headers: {json.dumps(headers)}") - - response = requests.get(url, headers=headers, timeout=60) - logger.debug(f"Proxy response for referenced {ref}: HTTP {response.status_code}") - - if response.status_code != 200: - error_detail = f"Proxy returned HTTP {response.status_code}." - try: error_detail += f" Body: {response.text[:200]}..." - except: pass - yield json.dumps({"type": "warning", "message": f"Failed to fetch referenced {ref}: {error_detail}"}) + "\n" - logger.warning(f"Failed to fetch referenced {ref} via proxy {url}: {error_detail}") - retrieved_references_or_types.add(ref) - continue - - try: - bundle = response.json() - except ValueError as e: - yield json.dumps({"type": "warning", "message": f"Invalid JSON for referenced {ref}: {str(e)}"}) + "\n" - logger.warning(f"Invalid JSON from proxy for ref {ref} at {url}: {e}") - retrieved_references_or_types.add(ref) - continue - - if not isinstance(bundle, dict) or bundle.get('resourceType') != 'Bundle': - yield json.dumps({"type": "warning", "message": f"Expected Bundle for referenced {ref}, got {bundle.get('resourceType', 'unknown')}"}) + "\n" - retrieved_references_or_types.add(ref) - continue - - if not bundle.get('entry'): - yield json.dumps({"type": "info", "message": f"Referenced resource {ref} not found on server."}) + "\n" - logger.info(f"Referenced resource {ref} not found via search {url}") - retrieved_references_or_types.add(ref) - continue - - # Save the bundle containing the single referenced resource - output_file = os.path.join(temp_dir, f"ref_{ref_type}_{ref_id}.json") - try: - with open(output_file, 'w', encoding='utf-8') as f: - json.dump(bundle, f, indent=2) - logger.debug(f"Wrote referenced resource bundle to {output_file}") - fetched_individual_references += 1 - retrieved_references_or_types.add(ref) - yield json.dumps({"type": "success", "message": f"Saved referenced resource {ref}"}) + "\n" - except IOError as e: - yield json.dumps({"type": "warning", "message": f"Failed to save file for referenced {ref}: {e}"}) + "\n" - logger.error(f"Failed to write file {output_file}: {e}") - retrieved_references_or_types.add(ref) - except requests.RequestException as e: - yield json.dumps({"type": "warning", "message": f"Network error fetching referenced {ref}: {str(e)}"}) + "\n" - logger.warning(f"Network error retrieving referenced {ref} via proxy: {e}") - retrieved_references_or_types.add(ref) - except Exception as e: - yield json.dumps({"type": "warning", "message": f"Unexpected error fetching referenced {ref}: {str(e)}"}) + "\n" - logger.warning(f"Unexpected error during reference fetch for {ref}: {e}", exc_info=True) - retrieved_references_or_types.add(ref) - - # Create Final ZIP File - yield json.dumps({"type": "progress", "message": f"Creating ZIP file {os.path.basename(output_zip)}..."}) + "\n" - files_to_zip = [f for f in os.listdir(temp_dir) if f.endswith('.json')] - if not files_to_zip: - yield json.dumps({"type": "warning", "message": "No bundle files were successfully retrieved to include in ZIP."}) + "\n" - logger.warning(f"No JSON files found in {temp_dir} to include in ZIP.") - else: - logger.debug(f"Found {len(files_to_zip)} JSON files to include in ZIP: {files_to_zip}") - try: - with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zipf: - for filename in files_to_zip: - file_path = os.path.join(temp_dir, filename) - if os.path.exists(file_path): - zipf.write(file_path, filename) - else: - logger.error(f"File {file_path} disappeared before adding to ZIP.") - yield json.dumps({"type": "success", "message": f"ZIP file created: {os.path.basename(output_zip)} with {len(files_to_zip)} files."}) + "\n" - except Exception as e: - yield json.dumps({"type": "error", "message": f"Failed to create ZIP file: {e}"}) + "\n" - logger.error(f"Error creating ZIP file {output_zip}: {e}", exc_info=True) - - # Final Completion Message - completion_message = ( - f"Bundle retrieval finished. Initial bundles: {total_initial_bundles}, " - f"Referenced items fetched: {fetched_individual_references if not fetch_reference_bundles else fetched_type_bundles} " - f"({'individual resources' if not fetch_reference_bundles else 'full type bundles'})" - ) - yield json.dumps({ - "type": "complete", - "message": completion_message, - "data": { - "total_initial_bundles": total_initial_bundles, - "fetched_individual_references": fetched_individual_references, - "fetched_type_bundles": fetched_type_bundles, - "reference_mode": "individual" if validate_references and not fetch_reference_bundles else "type_bundle" if validate_references and fetch_reference_bundles else "off" - } - }) + "\n" - - except Exception as e: - yield json.dumps({"type": "error", "message": f"Critical error during retrieval setup: {str(e)}"}) + "\n" - logger.error(f"Unexpected error in retrieve_bundles setup: {e}", exc_info=True) - yield json.dumps({"type": "complete", "message": f"Retrieval failed: {str(e)}", "data": {"total_initial_bundles": 0, "fetched_individual_references": 0, "fetched_type_bundles": 0}}) + "\n" - finally: - if temp_dir and os.path.exists(temp_dir): - try: - shutil.rmtree(temp_dir) - logger.debug(f"Successfully removed temporary directory: {temp_dir}") - except Exception as cleanup_e: - logger.error(f"Error removing temporary directory {temp_dir}: {cleanup_e}", exc_info=True) -# --- End corrected retrieve_bundles function --- - -def split_bundles(input_zip_path, output_zip): - """Split FHIR bundles from a ZIP file into individual resource JSON files and save to a ZIP.""" - try: - total_resources = 0 - temp_dir = tempfile.mkdtemp() - yield json.dumps({"type": "progress", "message": f"Starting bundle splitting from ZIP"}) + "\n" - - # Extract input ZIP - with zipfile.ZipFile(input_zip_path, 'r') as zip_ref: - zip_ref.extractall(temp_dir) - yield json.dumps({"type": "progress", "message": f"Extracted input ZIP to temporary directory"}) + "\n" - - # Process JSON files - for filename in os.listdir(temp_dir): - if not filename.endswith('.json'): - continue - input_file = os.path.join(temp_dir, filename) - try: - with open(input_file, 'r', encoding='utf-8') as f: - bundle = json.load(f) - if bundle.get('resourceType') != 'Bundle': - yield json.dumps({"type": "error", "message": f"Skipping {filename}: Not a Bundle"}) + "\n" - continue - yield json.dumps({"type": "progress", "message": f"Processing bundle {filename}"}) + "\n" - index = 1 - for entry in bundle.get('entry', []): - resource = entry.get('resource') - if not resource or not resource.get('resourceType'): - yield json.dumps({"type": "error", "message": f"Invalid resource in {filename} at entry {index}"}) + "\n" - continue - resource_type = resource['resourceType'] - output_file = os.path.join(temp_dir, f"{resource_type}-{index}.json") - with open(output_file, 'w', encoding='utf-8') as f: - json.dump(resource, f, indent=2) - total_resources += 1 - yield json.dumps({"type": "success", "message": f"Saved {resource_type}-{index}.json"}) + "\n" - index += 1 - except Exception as e: - yield json.dumps({"type": "error", "message": f"Error processing {filename}: {str(e)}"}) + "\n" - logger.error(f"Error splitting bundle {filename}: {e}", exc_info=True) - - # Create output ZIP - with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zipf: - for filename in os.listdir(temp_dir): - if filename.endswith('.json') and '-' in filename: - zipf.write(os.path.join(temp_dir, filename), filename) - yield json.dumps({ - "type": "complete", - "message": f"Bundle splitting completed. Extracted {total_resources} resources.", - "data": {"total_resources": total_resources} - }) + "\n" - except Exception as e: - yield json.dumps({"type": "error", "message": f"Unexpected error during splitting: {str(e)}"}) + "\n" - logger.error(f"Unexpected error in split_bundles: {e}", exc_info=True) - finally: - if os.path.exists(temp_dir): - for filename in os.listdir(temp_dir): - os.remove(os.path.join(temp_dir, filename)) - os.rmdir(temp_dir) - - -# --- Standalone Test --- -if __name__ == '__main__': - logger.info("Running services.py directly for testing.") - class MockFlask: - class Config(dict): - pass - config = Config() - instance_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'instance')) - mock_app = MockFlask() - test_download_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'instance', DOWNLOAD_DIR_NAME)) - mock_app.config['FHIR_PACKAGES_DIR'] = test_download_dir - os.makedirs(test_download_dir, exist_ok=True) - logger.info(f"Using test download directory: {test_download_dir}") - print("\n--- Testing Filename Parsing ---") - test_files = [ - "hl7.fhir.r4.core-4.0.1.tgz", - "hl7.fhir.us.core-6.1.0.tgz", - "fhir.myig.patient-1.2.3-beta.tgz", - "my.company.fhir.Terminologies-0.1.0.tgz", - "package-with-hyphens-in-name-1.0.tgz", - "noversion.tgz", - "badformat-1.0", - "hl7.fhir.au.core-1.1.0-preview.tgz", - ] - for tf in test_files: - p_name, p_ver = parse_package_filename(tf) - print(f"'{tf}' -> Name: '{p_name}', Version: '{p_ver}'") - pkg_name_to_test = "hl7.fhir.au.core" - pkg_version_to_test = "1.1.0-preview" - print(f"\n--- Testing Import: {pkg_name_to_test}#{pkg_version_to_test} ---") - import_results = import_package_and_dependencies(pkg_name_to_test, pkg_version_to_test, dependency_mode='recursive') - print("\nImport Results:") - print(f" Requested: {import_results['requested']}") - print(f" Downloaded Count: {len(import_results['downloaded'])}") - print(f" Unique Dependencies Found: {len(import_results['dependencies'])}") - print(f" Errors: {len(import_results['errors'])}") - for error in import_results['errors']: - print(f" - {error}") - if (pkg_name_to_test, pkg_version_to_test) in import_results['downloaded']: - test_tgz_path = import_results['downloaded'][(pkg_name_to_test, pkg_version_to_test)] - print(f"\n--- Testing Processing: {test_tgz_path} ---") - processing_results = process_package_file(test_tgz_path) - print("\nProcessing Results:") - print(f" Resource Types Info Count: {len(processing_results.get('resource_types_info', []))}") - print(f" Profiles with MS Elements: {sum(1 for r in processing_results.get('resource_types_info', []) if r.get('must_support'))}") - print(f" Optional Extensions w/ MS: {sum(1 for r in processing_results.get('resource_types_info', []) if r.get('optional_usage'))}") - print(f" Must Support Elements Dict Count: {len(processing_results.get('must_support_elements', {}))}") - print(f" Examples Dict Count: {len(processing_results.get('examples', {}))}") - print(f" Complies With Profiles: {processing_results.get('complies_with_profiles', [])}") - print(f" Imposed Profiles: {processing_results.get('imposed_profiles', [])}") - print(f" Processing Errors: {processing_results.get('errors', [])}") - else: - print(f"\n--- Skipping Processing Test (Import failed for {pkg_name_to_test}#{pkg_version_to_test}) ---") - diff --git a/setup_linux.sh b/setup_linux.sh deleted file mode 100644 index 2c05ae7..0000000 --- a/setup_linux.sh +++ /dev/null @@ -1,233 +0,0 @@ -#!/bin/bash - -# --- Configuration --- -REPO_URL="https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git" -CLONE_DIR="hapi-fhir-jpaserver" -SOURCE_CONFIG_DIR="hapi-fhir-Setup" # Assuming this is relative to the script's parent -CONFIG_FILE="application.yaml" - -# --- Define Paths --- -# Note: Adjust SOURCE_CONFIG_PATH if SOURCE_CONFIG_DIR is not a sibling directory -# This assumes the script is run from a directory, and hapi-fhir-setup is at the same level -SOURCE_CONFIG_PATH="../${SOURCE_CONFIG_DIR}/target/classes/${CONFIG_FILE}" -DEST_CONFIG_PATH="${CLONE_DIR}/target/classes/${CONFIG_FILE}" - -APP_MODE="" - -# --- Error Handling Function --- -handle_error() { - echo "------------------------------------" - echo "An error occurred: $1" - echo "Script aborted." - echo "------------------------------------" - # Removed 'read -p "Press Enter to exit..."' as it's not typical for non-interactive CI/CD - exit 1 -} - -# === Prompt for Installation Mode === -get_mode_choice() { - echo "Select Installation Mode:" - echo "1. Standalone (Includes local HAPI FHIR Server - Requires Git & Maven)" - echo "2. Lite (Excludes local HAPI FHIR Server - No Git/Maven needed)" - - while true; do - read -r -p "Enter your choice (1 or 2): " choice - case "$choice" in - 1) - APP_MODE="standalone" - break - ;; - 2) - APP_MODE="lite" - break - ;; - *) - echo "Invalid input. Please try again." - ;; - esac - done - echo "Selected Mode: $APP_MODE" - echo -} - -# Call the function to get mode choice -get_mode_choice - -# === Conditionally Execute HAPI Setup === -if [ "$APP_MODE" = "standalone" ]; then - echo "Running Standalone setup including HAPI FHIR..." - echo - - # --- Step 0: Clean up previous clone (optional) --- - echo "Checking for existing directory: $CLONE_DIR" - if [ -d "$CLONE_DIR" ]; then - echo "Found existing directory, removing it..." - rm -rf "$CLONE_DIR" - if [ $? -ne 0 ]; then - handle_error "Failed to remove existing directory: $CLONE_DIR" - fi - echo "Existing directory removed." - else - echo "Directory does not exist, proceeding with clone." - fi - echo - - # --- Step 1: Clone the HAPI FHIR server repository --- - echo "Cloning repository: $REPO_URL into $CLONE_DIR..." - git clone "$REPO_URL" "$CLONE_DIR" - if [ $? -ne 0 ]; then - handle_error "Failed to clone repository. Check Git installation and network connection." - fi - echo "Repository cloned successfully." - echo - - # --- Step 2: Navigate into the cloned directory --- - echo "Changing directory to $CLONE_DIR..." - cd "$CLONE_DIR" || handle_error "Failed to change directory to $CLONE_DIR." - echo "Current directory: $(pwd)" - echo - - # --- Step 3: Build the HAPI server using Maven --- - echo "===> Starting Maven build (Step 3)..." - mvn clean package -DskipTests=true -Pboot - if [ $? -ne 0 ]; then - echo "ERROR: Maven build failed." - cd .. - handle_error "Maven build process resulted in an error." - fi - echo "Maven build completed successfully." - echo - - # --- Step 4: Copy the configuration file --- - echo "===> Starting file copy (Step 4)..." - echo "Copying configuration file..." - # Corrected SOURCE_CONFIG_PATH to be relative to the new current directory ($CLONE_DIR) - # This assumes the original script's SOURCE_CONFIG_PATH was relative to its execution location - # If SOURCE_CONFIG_DIR is ../hapi-fhir-setup relative to script's original location: - # Then from within CLONE_DIR, it becomes ../../hapi-fhir-setup - # We defined SOURCE_CONFIG_PATH earlier relative to the script start. - # So, when inside CLONE_DIR, the path from original script location should be used. - # The original script had: set SOURCE_CONFIG_PATH=..\%SOURCE_CONFIG_DIR%\target\classes\%CONFIG_FILE% - # And then: xcopy "%SOURCE_CONFIG_PATH%" "target\classes\" - # This implies SOURCE_CONFIG_PATH is relative to the original script's location, not the $CLONE_DIR - # Therefore, we need to construct the correct relative path from *within* $CLONE_DIR back to the source. - # Assuming the script is in dir X, and SOURCE_CONFIG_DIR is ../hapi-fhir-setup from X. - # So, hapi-fhir-setup is a sibling of X's parent. - # If CLONE_DIR is also in X, then from within CLONE_DIR, the path is ../ + original SOURCE_CONFIG_PATH - # For simplicity and robustness, let's use an absolute path or a more clearly defined relative path from the start. - # The original `SOURCE_CONFIG_PATH=..\%SOURCE_CONFIG_DIR%\target\classes\%CONFIG_FILE%` implies - # that `hapi-fhir-setup` is a sibling of the directory where the script *is being run from*. - - # Let's assume the script is run from the root of FHIRFLARE-IG-Toolkit. - # And hapi-fhir-setup is also in the root, next to this script. - # Then SOURCE_CONFIG_PATH would be ./hapi-fhir-setup/target/classes/application.yaml - # And from within ./hapi-fhir-jpaserver/, the path would be ../hapi-fhir-setup/target/classes/application.yaml - - # The original batch file sets SOURCE_CONFIG_PATH as "..\%SOURCE_CONFIG_DIR%\target\classes\%CONFIG_FILE%" - # And COPIES it to "target\classes\" *while inside CLONE_DIR*. - # This means the source path is relative to where the *cd %CLONE_DIR%* happened from. - # Let's make it relative to the script's initial execution directory. - INITIAL_SCRIPT_DIR=$(pwd) - ABSOLUTE_SOURCE_CONFIG_PATH="${INITIAL_SCRIPT_DIR}/../${SOURCE_CONFIG_DIR}/target/classes/${CONFIG_FILE}" # This matches the ..\ logic - - echo "Source: $ABSOLUTE_SOURCE_CONFIG_PATH" - echo "Destination: target/classes/$CONFIG_FILE" - - if [ ! -f "$ABSOLUTE_SOURCE_CONFIG_PATH" ]; then - echo "WARNING: Source configuration file not found at $ABSOLUTE_SOURCE_CONFIG_PATH." - echo "The script will continue, but the server might use default configuration." - else - cp "$ABSOLUTE_SOURCE_CONFIG_PATH" "target/classes/" - if [ $? -ne 0 ]; then - echo "WARNING: Failed to copy configuration file. Check if the source file exists and permissions." - echo "The script will continue, but the server might use default configuration." - else - echo "Configuration file copied successfully." - fi - fi - echo - - # --- Step 5: Navigate back to the parent directory --- - echo "===> Changing directory back (Step 5)..." - cd .. || handle_error "Failed to change back to the parent directory." - echo "Current directory: $(pwd)" - echo - -else # APP_MODE is "lite" - echo "Running Lite setup, skipping HAPI FHIR build..." - # Ensure the hapi-fhir-jpaserver directory doesn't exist or is empty if Lite mode is chosen - if [ -d "$CLONE_DIR" ]; then - echo "Found existing HAPI directory ($CLONE_DIR) in Lite mode. Removing it..." - rm -rf "$CLONE_DIR" - fi - # Create empty target directories expected by Dockerfile COPY, even if not used - mkdir -p "${CLONE_DIR}/target/classes" - mkdir -p "${CLONE_DIR}/custom" # This was in the original batch, ensure it's here - # Create a placeholder empty WAR file and application.yaml to satisfy Dockerfile COPY - touch "${CLONE_DIR}/target/ROOT.war" - touch "${CLONE_DIR}/target/classes/application.yaml" - echo "Placeholder files and directories created for Lite mode build in $CLONE_DIR." - echo -fi - -# === Modify docker-compose.yml to set APP_MODE === -echo "Updating docker-compose.yml with APP_MODE=$APP_MODE..." -DOCKER_COMPOSE_TMP="docker-compose.yml.tmp" -DOCKER_COMPOSE_ORIG="docker-compose.yml" - -cat << EOF > "$DOCKER_COMPOSE_TMP" -version: '3.8' -services: - fhirflare: - build: - context: . - dockerfile: Dockerfile - ports: - - "5000:5000" - - "8080:8080" # Keep port exposed, even if Tomcat isn't running useful stuff in Lite - volumes: - - ./instance:/app/instance - - ./static/uploads:/app/static/uploads - - ./instance/hapi-h2-data/:/app/h2-data # Keep volume mounts consistent - - ./logs:/app/logs - environment: - - FLASK_APP=app.py - - FLASK_ENV=development - - NODE_PATH=/usr/lib/node_modules - - APP_MODE=${APP_MODE} - - APP_BASE_URL=http://localhost:5000 - - HAPI_FHIR_URL=http://localhost:8080/fhir - command: supervisord -c /etc/supervisord.conf -EOF - -if [ ! -f "$DOCKER_COMPOSE_TMP" ]; then - handle_error "Failed to create temporary docker-compose file ($DOCKER_COMPOSE_TMP)." -fi - -# Replace the original docker-compose.yml -mv "$DOCKER_COMPOSE_TMP" "$DOCKER_COMPOSE_ORIG" -echo "docker-compose.yml updated successfully." -echo - -# --- Step 6: Build Docker images --- -echo "===> Starting Docker build (Step 6)..." -docker-compose build --no-cache -if [ $? -ne 0 ]; then - handle_error "Docker Compose build failed. Check Docker installation and docker-compose.yml file." -fi -echo "Docker images built successfully." -echo - -# --- Step 7: Start Docker containers --- -echo "===> Starting Docker containers (Step 7)..." -docker-compose up -d -if [ $? -ne 0 ]; then - handle_error "Docker Compose up failed. Check Docker installation and container configurations." -fi -echo "Docker containers started successfully." -echo - -echo "====================================" -echo "Script finished successfully! (Mode: $APP_MODE)" -echo "====================================" -exit 0 diff --git a/supervisord.conf b/supervisord.conf deleted file mode 100644 index cdf08db..0000000 --- a/supervisord.conf +++ /dev/null @@ -1,36 +0,0 @@ -[supervisord] -nodaemon=true -logfile=/app/logs/supervisord.log -logfile_maxbytes=50MB -logfile_backups=10 -pidfile=/app/logs/supervisord.pid - -[program:flask] -command=/app/venv/bin/python /app/app.py -directory=/app -environment=FLASK_APP="app.py",FLASK_ENV="development",NODE_PATH="/usr/lib/node_modules" -autostart=true -autorestart=true -startsecs=10 -stopwaitsecs=10 -stdout_logfile=/app/logs/flask.log -stdout_logfile_maxbytes=10MB -stdout_logfile_backups=5 -stderr_logfile=/app/logs/flask_err.log -stderr_logfile_maxbytes=10MB -stderr_logfile_backups=5 - -[program:tomcat] -command=/usr/local/tomcat/bin/catalina.sh run -directory=/usr/local/tomcat -environment=SPRING_CONFIG_LOCATION="file:/usr/local/tomcat/conf/application.yaml",NODE_PATH="/usr/lib/node_modules" -autostart=false -autorestart=false -startsecs=30 -stopwaitsecs=30 -stdout_logfile=/app/logs/tomcat.log -stdout_logfile_maxbytes=10MB -stdout_logfile_backups=5 -stderr_logfile=/app/logs/tomcat_err.log -stderr_logfile_maxbytes=10MB -stderr_logfile_backups=5 \ No newline at end of file diff --git a/templates/_flash_messages.html b/templates/_flash_messages.html deleted file mode 100644 index c1f0288..0000000 --- a/templates/_flash_messages.html +++ /dev/null @@ -1,24 +0,0 @@ -{# templates/_flash_messages.html #} - -{# Check if there are any flashed messages #} -{% with messages = get_flashed_messages(with_categories=true) %} - {% if messages %} - {# Loop through messages and display them as Bootstrap alerts #} - {% for category, message in messages %} - {# Map Flask message categories (e.g., 'error', 'success', 'warning') to Bootstrap alert classes #} - {% set alert_class = 'alert-info' %} {# Default class #} - {% if category == 'error' or category == 'danger' %} - {% set alert_class = 'alert-danger' %} - {% elif category == 'success' %} - {% set alert_class = 'alert-success' %} - {% elif category == 'warning' %} - {% set alert_class = 'alert-warning' %} - {% endif %} - - - {% endfor %} - {% endif %} -{% endwith %} diff --git a/templates/_form_helpers.html b/templates/_form_helpers.html deleted file mode 100644 index f3534a4..0000000 --- a/templates/_form_helpers.html +++ /dev/null @@ -1,42 +0,0 @@ -{# app/templates/_form_helpers.html #} -{% macro render_field(field, label_visible=true) %} -
- {% if field.type == "BooleanField" %} -
- {{ field(class="form-check-input" + (" is-invalid" if field.errors else ""), **kwargs) }} - {% if label_visible and field.label %} - - {% endif %} - {% if field.description %} - {{ field.description }} - {% endif %} - {% if field.errors %} -
- {% for error in field.errors %} - {{ error }}
- {% endfor %} -
- {% endif %} -
- {% else %} - {% if label_visible and field.label %} - {{ field.label(class="form-label") }} - {% endif %} - {% set css_class = 'form-control ' + kwargs.pop('class', '') %} - {% if field.errors %} - {% set css_class = css_class + ' is-invalid' %} - {% endif %} - {{ field(class=css_class, **kwargs) }} - {% if field.description %} - {{ field.description }} - {% endif %} - {% if field.errors %} -
- {% for error in field.errors %} - {{ error }}
- {% endfor %} -
- {% endif %} - {% endif %} -
-{% endmacro %} \ No newline at end of file diff --git a/templates/_fsh_output.html b/templates/_fsh_output.html deleted file mode 100644 index a3927fe..0000000 --- a/templates/_fsh_output.html +++ /dev/null @@ -1,60 +0,0 @@ -{% from "_form_helpers.html" import render_field %} -
-
-
-
-
- {{ form.hidden_tag() }} - {{ render_field(form.package) }} - {{ render_field(form.input_mode) }} - - - {{ render_field(form.output_style) }} - {{ render_field(form.log_level) }} - {{ render_field(form.fhir_version) }} - {{ render_field(form.fishing_trip) }} - {{ render_field(form.dependencies, placeholder="One per line, e.g., hl7.fhir.us.core@6.1.0") }} - {{ render_field(form.indent_rules) }} - {{ render_field(form.meta_profile) }} - {{ render_field(form.alias_file) }} - {{ render_field(form.no_alias) }} -
- {{ form.submit(class="btn btn-success", id="submit-btn") }} - Back -
-
-
-
-
-
-{% if error %} -
{{ error }}
-{% endif %} -{% if fsh_output %} -
Conversion successful!
-

FSH Output

-
{{ fsh_output }}
-Download FSH -{% if comparison_report %} -

Fishing Trip Comparison Report

-Click here for SUSHI Validation -
-
- {% if comparison_report.differences %} -

Differences found in round-trip validation:

-
    - {% for diff in comparison_report.differences %} -
  • {{ diff.path }}: {{ diff.description }}
  • - {% endfor %} -
- {% else %} -

No differences found in round-trip validation.

- {% endif %} -
-
-{% endif %} -{% endif %} \ No newline at end of file diff --git a/templates/_search_results_table.html b/templates/_search_results_table.html deleted file mode 100644 index 91dbe6d..0000000 --- a/templates/_search_results_table.html +++ /dev/null @@ -1,113 +0,0 @@ -{# templates/_search_results_table.html #} -{# This partial template renders the search results table and pagination #} - -{% if packages %} - - - - - - - - - - {# Add Dependencies header if needed based on your data #} - {# #} - - - - {% for pkg in packages %} - - - - - - - - {# Add Dependencies data if needed #} - {# #} - - {% endfor %} - -
PackageLatestAuthorFHIRVersionsDependencies
- - {# Link to package details page - adjust if endpoint name is different #} - - {{ pkg.name }} - - {{ pkg.display_version }}{{ pkg.author or '' }}{{ pkg.fhir_version or '' }}{{ pkg.version_count }}{{ pkg.dependencies | join(', ') if pkg.dependencies else '' }}
- - {# Pagination Controls - Ensure 'pagination' object is passed from the route #} - {% if pagination and pagination.pages > 1 %} - - {% endif %} {# End pagination nav #} - -{% elif request and request.args.get('search') %} - {# Message when search term is present but no results found #} -

No packages found matching your search term.

-{% else %} - {# Initial message before any search #} -

Start typing in the search box above to find packages.

-{% endif %} diff --git a/templates/about.html b/templates/about.html deleted file mode 100644 index 8eb644f..0000000 --- a/templates/about.html +++ /dev/null @@ -1,66 +0,0 @@ -{% extends "base.html" %} - -{% block content %} -
- -

About FHIRFLARE IG Toolkit{% if app_mode == 'lite' %} (Lite Version){% endif %}

-
-

- A comprehensive toolkit designed for developers and implementers working with FHIR® Implementation Guides (IGs). -

-
-
- -
-
-
-

Overview

-

The FHIRFLARE IG Toolkit is a web application built to simplify the lifecycle of managing, processing, validating, and deploying FHIR Implementation Guides. It provides a central, user-friendly interface to handle common tasks associated with FHIR IGs, streamlining workflows and improving productivity.

-

Whether you're downloading the latest IG versions, checking compliance, converting resources to FHIR Shorthand (FSH), or pushing guides to a test server, this toolkit aims to be an essential companion.

- {% if app_mode == 'lite' %} - - {% else %} - - {% endif %} - - -

Core Features

-
    -
  • IG Package Management: Import FHIR IG packages directly from the registry using various version formats (e.g., `1.1.0-preview`, `current`), with flexible dependency handling (Recursive, Patch Canonical, Tree Shaking). View, process, unload, or delete downloaded packages, with detection of duplicate dependencies.
  • -
  • IG Processing & Viewing: Extract and display key information from processed IGs, including defined profiles, referenced resource types, must-support elements, and examples. Visualize profile relationships like `compliesWithProfile` and `imposeProfile`.
  • -
  • FHIR Validation: Validate individual FHIR resources or entire Bundles against the profiles defined within a selected IG. Provides detailed error and warning feedback. (Note: Validation accuracy is still under development, especially for complex constraints).
  • -
  • FHIR Server Interaction: -
      -
    • Push processed IGs (including dependencies) to a target FHIR server with real-time console feedback.
    • -
    • Explore FHIR server capabilities and interact with resources using the "FHIR API Explorer" (GET/POST/PUT/DELETE) and "FHIR UI Operations" pages, supporting both the local HAPI server (in Standalone mode) and external servers.
    • -
    -
  • -
  • FHIR Shorthand (FSH) Conversion: Convert FHIR JSON or XML resources to FSH using the integrated GoFSH tool. Offers advanced options like context package selection, various output styles, FHIR version selection, dependency loading, alias file usage, and round-trip validation ("Fishing Trip") with SUSHI. Includes a loading indicator during conversion.
  • -
  • API Support: Provides basic API endpoints for programmatic import and push operations.
  • -
- -

Technology

-

The toolkit leverages a combination of technologies:

-
    -
  • Backend: Python with the Flask web framework and SQLAlchemy for database interaction (SQLite).
  • -
  • Frontend: HTML, Bootstrap 5 for styling, and JavaScript for interactivity and dynamic content loading. Uses Lottie-Web for animations.
  • -
  • FHIR Tooling: Integrates GoFSH and SUSHI (via Node.js) for FSH conversion and validation. Utilizes the HAPI FHIR server (in Standalone mode) for robust FHIR validation and operations.
  • -
  • Deployment: Runs within a Docker container managed by Docker Compose and Supervisor, ensuring a consistent environment.
  • -
- -

Get Involved

-

This is an open-source project. Contributions, feedback, and bug reports are welcome!

- - -
-
-
-{% endblock %} \ No newline at end of file diff --git a/templates/base.html b/templates/base.html deleted file mode 100644 index 4d1d846..0000000 --- a/templates/base.html +++ /dev/null @@ -1,905 +0,0 @@ - - - - - - - - - - - - - - {% if app_mode == 'lite' %}(Lite Version) {% endif %}{% if title %}{{ title }} - {% endif %}{{ site_name }} - - - - - -
-
- {% with messages = get_flashed_messages(with_categories=true) %} - {% if messages %} -
- {% for category, message in messages %} - - {% endfor %} -
- {% endif %} - {% endwith %} - {% block content %}{% endblock %} -
-
- - - - - - - - {% block scripts %}{% endblock %} - - diff --git a/templates/config_hapi.html b/templates/config_hapi.html deleted file mode 100644 index 2a3e6c5..0000000 --- a/templates/config_hapi.html +++ /dev/null @@ -1,234 +0,0 @@ -{% extends "base.html" %} -{% block content %} -
-

HAPI FHIR Configuration Manager

- {% with messages = get_flashed_messages(with_categories=true) %} - {% if messages %} - {% for category, message in messages %} - - {% endfor %} - {% endif %} - {% endwith %} -
-

Loading configuration...

-
- Loading... -
-
- -
-
- - - -
-
-
-
- - -{% endblock %} \ No newline at end of file diff --git a/templates/cp_downloaded_igs.html b/templates/cp_downloaded_igs.html deleted file mode 100644 index 18e8e18..0000000 --- a/templates/cp_downloaded_igs.html +++ /dev/null @@ -1,168 +0,0 @@ -{% extends "base.html" %} - -{% block content %} -
- FHIRFLARE Ig Toolkit -

Manage & Process FHIR Packages

-
-

- This is the starting Point for your Journey through the IG's -

- -
-
- -
- - -
-

Manage FHIR Packages

- -
- -
- -
-
-
Downloaded Packages ({{ packages|length }})
-
- {% if packages %} -
-

Risk: = Duplicate Dependencies

- - - - - - {% for pkg in packages %} - {% set is_processed = (pkg.name, pkg.version) in processed_ids %} - {% set is_duplicate = pkg.name in duplicate_groups %} - {% set group_color = group_colors[pkg.name] if (is_duplicate and pkg.name in group_colors) else 'bg-warning' if is_duplicate else '' %} - - - - - - {% endfor %} - -
Package NameVersionActions
- {{ pkg.name }} - {% if is_duplicate %} - Duplicate - {% endif %} - {{ pkg.version }} -
- {% if is_processed %} - Processed - {% else %} -
- {{ form.csrf_token }} - - -
- {% endif %} -
- {{ form.csrf_token }} - - -
-
-
-
- {% if duplicate_groups %} -

Duplicate dependencies detected: - {% for name, versions in duplicate_groups.items() %} - {% set group_color = group_colors[name] if name in group_colors else 'bg-warning' %} - {{ name }} ({{ versions|join(', ') }}) - {% endfor %} -

- {% else %} -

No duplicates detected.

- {% endif %} - {% else %} -

No downloaded FHIR packages found.

- {% endif %} -
-
-
- - -
-
-
Processed Packages ({{ processed_list|length }})
-
- {% if processed_list %} -

- MS = Contains Must Support Elements
- Optional MS Ext = Optional Extension with Must Support Sub-Elements -

-

Resource Types in the list will be both Profile and Base Type:

-
- - - - - - {% for processed_ig in processed_list %} - - - - - - - {% endfor %} - -
Package NameVersionResource TypesActions
{{ processed_ig.package_name }}{{ processed_ig.version }} - {% set types_info = processed_ig.resource_types_info %} - {% if types_info %} -
- {% for type_info in types_info %} - {{ type_info.name }} - {% endfor %} -
- {% else %} - N/A - {% endif %} -
-
- View -
- {{ form.csrf_token }} - - -
-
-
-
- {% else %} -

No packages recorded as processed yet.

- {% endif %} -
-
-
-
-
-{% endblock %} - -{% block scripts %} -{{ super() }} -{% endblock %} \ No newline at end of file diff --git a/templates/cp_push_igs.html b/templates/cp_push_igs.html deleted file mode 100644 index 9af92bf..0000000 --- a/templates/cp_push_igs.html +++ /dev/null @@ -1,501 +0,0 @@ -{% extends "base.html" %} - -{# Import form helpers for CSRF token and field rendering #} -{% from "_form_helpers.html" import render_field %} - -{% block content %} -
-
- {# Left Column: Downloaded IGs List & Report Area #} -
-

Downloaded IGs

- {% if packages %} -
- - - - - - - - - {% for pkg in packages %} - {% set name = pkg.name %} - {% set version = pkg.version %} - {% set duplicate_group = (duplicate_groups or {}).get(name) %} - {% set color_class = group_colors[name] if (duplicate_group and group_colors and name in group_colors) else '' %} - - - - - {% endfor %} - -
Package NameVersion
{{ name }}{{ version }}
-
- {% if duplicate_groups %} - - {% endif %} - {% else %} -

No packages downloaded yet. Use the "Import IG" tab.

- {% endif %} - - {# Push Response Area #} -
-
-

Push Report

- -
-
- Report summary will appear here after pushing... - {% with messages = get_flashed_messages(with_categories=true) %} - {% if messages %} - {% for category, message in messages %} - - {% endfor %} - {% endif %} - {% endwith %} -
-
-
{# End Left Column #} - - {# Right Column: Push IGs Form and Console #} -
-

Push IGs to FHIR Server

-
- {{ form.csrf_token if form else '' }} - - {# Package Selection #} -
- - -
- - {# Dependency Mode Display #} -
- - -
- - {# FHIR Server URL #} -
- - -
- - {# Authentication Section #} -
-
- - -
- -
- - {# Checkboxes Row #} -
-
-
- - -
-
-
-
- - - Force upload all resources. -
-
-
-
- - - Simulate only. -
-
-
-
- - - Show detailed log. -
-
-
- - {# Resource Type Filter #} -
- - -
- - {# Skip Files Filter #} -
- - -
- - -
- - {# Live Console #} -
-

Live Console

-
- Console output will appear here... -
-
-
{# End Right Column #} -
{# End row #} -
{# End container-fluid #} - - -{% endblock %} \ No newline at end of file diff --git a/templates/cp_view_processed_ig.html b/templates/cp_view_processed_ig.html deleted file mode 100644 index b5c8faf..0000000 --- a/templates/cp_view_processed_ig.html +++ /dev/null @@ -1,1338 +0,0 @@ -{% extends "base.html" %} - -{% block content %} -
- FHIRFLARE Ig Toolkit -

{{ title }}

-
-

- View details of the processed FHIR Implementation Guide. -

-
-
- -
-
-

{{ title }}

- Back to Package List -
- - {% if processed_ig %} -
-
Package Details
-
-
-
Package Name
-
{{ processed_ig.package_name }}
-
Package Version
-
{{ processed_ig.version }}
-
Processed At
-
{{ processed_ig.processed_date.strftime('%Y-%m-%d %H:%M:%S UTC') }}
-
-
-
- - {% if config.DISPLAY_PROFILE_RELATIONSHIPS %} -
-
Profile Relationships
-
-
Complies With
- {% if complies_with_profiles %} -
    - {% for profile in complies_with_profiles %} -
  • {{ profile }}
  • - {% endfor %} -
- {% else %} -

No profiles declared as compatible.

- {% endif %} - -
Required Dependent Profiles (Must Also Validate Against)
- {% if imposed_profiles %} -
    - {% for profile in imposed_profiles %} -
  • {{ profile }}
  • - {% endfor %} -
- {% else %} -

No imposed profiles.

- {% endif %} -
-
- {% endif %} - -
-
Resource Types Found / Defined
-
- {% if profile_list or base_list %} -

- - MS = Contains Must Support Elements
- Optional MS Ext = Optional Extension with Must Support Sub-Elements -
-

- {% if profile_list %} -

Examples = Examples will be displayed when selecting profile Types if contained in the IG

-
Profiles Defined ({{ profile_list|length }}):
- - {% else %} -

No profiles defined.

- {% endif %} - {% if base_list %} -
Base Resource Types Referenced ({{ base_list|length }}):
- - {% else %} -

No base resource types referenced.

- {% endif %} - {% else %} -

No resource type information extracted or stored.

- {% endif %} -
-
- - - - - - {% else %} - - {% endif %} -
- - - - - - - - -{% endblock %} \ No newline at end of file diff --git a/templates/fhir_ui.html b/templates/fhir_ui.html deleted file mode 100644 index bd33b58..0000000 --- a/templates/fhir_ui.html +++ /dev/null @@ -1,473 +0,0 @@ -{% extends "base.html" %} - -{% block content %} -
- FHIRFLARE IG Toolkit -

FHIR API Explorer

-
-

- Interact with FHIR servers using GET, POST, PUT, or DELETE requests. Toggle between local HAPI or a custom server to explore resources or perform searches. -

-
-
- -
-
-
Send FHIR Request
-
-
- {{ form.hidden_tag() }} -
- -
- - -
- Toggle to use local HAPI (http://localhost:8080/fhir) or enter a custom FHIR server URL. -
- -
- - - Enter a resource path (e.g., Patient, Observation/example) or '_search' for search queries. -
-
- -
- - - - - - - - -
-
- - -
-
-
- - -
- - -{% endblock %} \ No newline at end of file diff --git a/templates/fhir_ui_operations.html b/templates/fhir_ui_operations.html deleted file mode 100644 index c8d1425..0000000 --- a/templates/fhir_ui_operations.html +++ /dev/null @@ -1,1957 +0,0 @@ -{% extends "base.html" %} - -{% block content %} - - -
- FHIRFLARE IG Toolkit -

FHIR UI Operations

-
-

- Explore FHIR server operations by selecting resource types or system operations. Toggle between local HAPI or a custom server to interact with FHIR metadata, resources, and server-wide operations. -

- -
-
- -
-
-
FHIR Operations Configuration
-
-
- {{ form.hidden_tag() }} -
- -
- - -
- Toggle to use local HAPI (/fhir proxy) or enter a custom FHIR server URL. -
- - -
- - - - -
-
-
- - -{% endblock %} \ No newline at end of file diff --git a/templates/fsh_converter.html b/templates/fsh_converter.html deleted file mode 100644 index 1e08299..0000000 --- a/templates/fsh_converter.html +++ /dev/null @@ -1,225 +0,0 @@ -{% extends "base.html" %} -{% from "_form_helpers.html" import render_field %} - -{% block content %} -
- FHIRFLARE IG Toolkit -

FSH Converter

-
-

- Convert FHIR JSON or XML resources to FHIR Shorthand (FSH) using GoFSH. -

- -
-
- - -
-
-
-

Waiting, don't leave this page...

-
-
- -
-

Convert FHIR to FSH

-
-
-
-
-
-
- {{ form.hidden_tag() }} - {{ render_field(form.package) }} - {{ render_field(form.input_mode) }} - - - {{ render_field(form.output_style) }} - {{ render_field(form.log_level) }} - {{ render_field(form.fhir_version) }} - {{ render_field(form.fishing_trip) }} - {{ render_field(form.dependencies, placeholder="One per line, e.g., hl7.fhir.us.core@6.1.0") }} - {{ render_field(form.indent_rules) }} - {{ render_field(form.meta_profile) }} - {{ render_field(form.alias_file) }} - {{ render_field(form.no_alias) }} -
- {{ form.submit(class="btn btn-success", id="submit-btn") }} - Back -
-
-
-
-
-
-
-
- - - -{% endblock %} \ No newline at end of file diff --git a/templates/import_ig.html b/templates/import_ig.html deleted file mode 100644 index adc9e4a..0000000 --- a/templates/import_ig.html +++ /dev/null @@ -1,145 +0,0 @@ -{% extends "base.html" %} -{% from "_form_helpers.html" import render_field %} - -{% block content %} -
- FHIRFLARE IG Toolkit -

Import FHIR Implementation Guides

-
-

- Import new FHIR Implementation Guides to the system for viewing. -

-
-
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-

Importing Implementation Guide... Please wait.

-

-
-
- -
-

Import a New IG

-
-
-
-
-
- {{ form.hidden_tag() }} - {{ render_field(form.package_name) }} - {{ render_field(form.package_version) }} - {{ render_field(form.dependency_mode) }} -
- {{ form.submit(class="btn btn-success", id="submit-btn") }} - Back -
-
-
-
-
-
-
- - -{% endblock %} \ No newline at end of file diff --git a/templates/index.html b/templates/index.html deleted file mode 100644 index 5a6a1ad..0000000 --- a/templates/index.html +++ /dev/null @@ -1,168 +0,0 @@ -{% extends "base.html" %} - -{% block body_class %}fire-animation-page{% endblock %} - -{% block content %} -
-
- FHIRFLARE IG Toolkit -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- - - -
-
- - - - - - - - - - -
-
-
-

Welcome to {{ site_name }}

-
-

Simple tool for importing, viewing, and validating FHIR Implementation Guides.

-

Streamline Your FHIR Workflow

-
-
- -
-
- -
-
-
-
IG Management
-

Import and manage FHIR Implementation Guides.

- -
-
-
- -
-
-
-
Validation & Testing
-

Validate and test FHIR resources.

- -
-
-
- -
-
-
-
API & Tools
-

Explore FHIR APIs and convert resources.

- -
-
-
-
- -
- - - - -{% endblock %} \ No newline at end of file diff --git a/templates/manual_import_ig.html b/templates/manual_import_ig.html deleted file mode 100644 index 466d3b0..0000000 --- a/templates/manual_import_ig.html +++ /dev/null @@ -1,194 +0,0 @@ -{% extends "base.html" %} -{% from "_form_helpers.html" import render_field %} - -{% block content %} -
- {% include "_flash_messages.html" %} - -

Import FHIR Implementation Guides

-
-
-

Import new FHIR Implementation Guides to the system via file or URL.

-
-
- -
-
-

Import a New IG

-
- {{ form.hidden_tag() }} -
- {{ render_field(form.import_mode, class="form-select") }} -
-
-
- -
- - No file selected -
- {% if form.tgz_file.errors %} -
- {% for error in form.tgz_file.errors %} -

{{ error }}

- {% endfor %} -
- {% endif %} -
-
-
-
- {{ render_field(form.tgz_url, class="form-control url-input") }} -
-
-
- {{ render_field(form.dependency_mode, class="form-select") }} -
-
- {{ form.resolve_dependencies(type="checkbox", class="form-check-input") }} - - {% if form.resolve_dependencies.errors %} -
- {% for error in form.resolve_dependencies.errors %} -

{{ error }}

- {% endfor %} -
- {% endif %} -
-
- {{ form.submit(class="btn btn-success", id="submit-btn") }} - Back -
-
-
-
-
- - - - -{% endblock %} \ No newline at end of file diff --git a/templates/package.canonicals.html b/templates/package.canonicals.html deleted file mode 100644 index 5cccc47..0000000 --- a/templates/package.canonicals.html +++ /dev/null @@ -1,7 +0,0 @@ -
    - {% for canonical in canonicals %} -
  • - {{ canonical }} -
  • - {% endfor %} -
\ No newline at end of file diff --git a/templates/package.dependents.html b/templates/package.dependents.html deleted file mode 100644 index b240ce2..0000000 --- a/templates/package.dependents.html +++ /dev/null @@ -1,31 +0,0 @@ -{% if dependents %} - - - - - - - - - - - - - {% for dep in dependents %} - - - - - - - - - {% endfor %} - -
PackageLatestAuthorFHIRVersionsCanonical
- - {{ dep.name }} - {{ dep.version }}{{ dep.author }}{{ dep.fhir_version }}{{ dep.version_count }}{{ dep.canonical }}
-{% else %} -

No dependent packages found for this package.

-{% endif %} \ No newline at end of file diff --git a/templates/package.logs.html b/templates/package.logs.html deleted file mode 100644 index 6cf0354..0000000 --- a/templates/package.logs.html +++ /dev/null @@ -1,22 +0,0 @@ -{% if logs %} - - - - - - - - - - {% for log in logs %} - - - - - - {% endfor %} - -
VersionPublication DateWhen
{{ log.version }}{{ log.pubDate }}{{ log.when }}
-{% else %} -

No version history found for this package.

-{% endif %} \ No newline at end of file diff --git a/templates/package.problems.html b/templates/package.problems.html deleted file mode 100644 index 3714532..0000000 --- a/templates/package.problems.html +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - {% for problem in problems %} - - - - - {% endfor %} - -
PackageDependency
{{ problem.package }}{{ problem.dependency }}
\ No newline at end of file diff --git a/templates/package_details.html b/templates/package_details.html deleted file mode 100644 index 1f6d817..0000000 --- a/templates/package_details.html +++ /dev/null @@ -1,124 +0,0 @@ -{% extends "base.html" %} - -{% block content %} -
-
-
- {# Package Header #} -

{{ package_json.name }} v{{ package_json.version }}

-

- Latest Version: {{ package_json.version }} - {% if latest_official_version and latest_official_version != package_json.version %} - | Latest Official: {{ latest_official_version }} - {% endif %} -

- - {# Install Commands #} -
- -
- {% set registry_base = package_json.registry | default('https://packages.simplifier.net') %} - {% set registry_url = registry_base | replace('/rssfeed', '') %} - - -
-
-
- -
- {% set registry_base = package_json.registry | default('https://packages.simplifier.net') %} - {% set registry_url = registry_base | replace('/rssfeed', '') %} - - -
-
-
- -
- {% set registry_base = package_json.registry | default('https://packages.simplifier.net') %} - {% set registry_url = registry_base | replace('/rssfeed', '') %} - - -
-
- - {# Description #} -
Description
-

{{ package_json.description | default('No description provided.', true) }}

- - {# Dependencies #} -
Dependencies
- {% if dependencies %} - - - - - - - - - {% for dep in dependencies %} - - - - - {% endfor %} - -
PackageVersion
{{ dep.name }}{{ dep.version }}
- {% else %} -

No dependencies found for this package.

- {% endif %} - - {# Dependents #} -
Dependents
-
- Loading... -
- - {# Logs #} -
Logs
-
- Loading... -
-
- -
-

Versions ({{ versions | length }})

-
    - {% for version in versions %} -
  • - {{ version }} - {% if version == latest_official_version %} - - {% endif %} -
  • - {% else %} -
  • No versions found.
  • - {% endfor %} -
-
-
-
- - -{% endblock %} \ No newline at end of file diff --git a/templates/retrieve_split_data.html b/templates/retrieve_split_data.html deleted file mode 100644 index c930322..0000000 --- a/templates/retrieve_split_data.html +++ /dev/null @@ -1,690 +0,0 @@ -{% extends "base.html" %} -{% from "_form_helpers.html" import render_field %} - -{% block content %} -
-

Retrieve & Split Data

-
-

- Retrieve FHIR bundles from a server, then download as a ZIP file. Split uploaded or retrieved bundle ZIPs into individual resources, downloaded as a ZIP file. -

-
-
- -
-
-
-

Retrieve Bundles

-
-
- {% if form.errors %} -
-

Please correct the following errors:

-
    - {% for field, errors in form.errors.items() %} -
  • {{ form[field].label.text }}: {{ errors|join(', ') }}
  • - {% endfor %} -
-
- {% endif %} -
- {{ form.hidden_tag() }} -
- -
- - {{ form.fhir_server_url(class="form-control", id="fhirServerUrl", style="display: none;", placeholder="e.g., https://fhir.hl7.org.au/aucore/fhir/DEFAULT", **{'aria-describedby': 'fhirServerHelp'}) }} -
- Toggle to use local HAPI (/fhir proxy) or enter a custom FHIR server URL. -
- - {# Authentication Section (Shown for Custom URL) #} - - - {# Checkbox Row #} -
-
- {{ render_field(form.validate_references, id='validate_references_checkbox') }} -
- -
- - - - - -
-
-
Retrieval Log
-
-
- Retrieval output will appear here... -
-
-
-
-
- -
-
-

Split Bundles

-
-
-
- {{ form.hidden_tag() }} -
- -
- - -
-
- - -
-
- {{ render_field(form.split_bundle_zip, class="form-control") }} - - -
-
-
Splitting Log
-
-
- Splitting output will appear here... -
-
-
-
-
-
- - -{% endblock %} \ No newline at end of file diff --git a/templates/search_and_import_ig.html b/templates/search_and_import_ig.html deleted file mode 100644 index 5ff6914..0000000 --- a/templates/search_and_import_ig.html +++ /dev/null @@ -1,461 +0,0 @@ -{% extends "base.html" %} -{% from "_form_helpers.html" import render_field %} - -{% block extra_head %} {# Assuming base.html has an 'extra_head' block for additional head elements #} - -{% endblock %} - -{% block content %} -{# Main page content for searching and importing packages #} - -
- {# Flash messages area - Ensure _flash_messages.html exists and is included in base.html or rendered here #} - {% include "_flash_messages.html" %} - - {# Display warning if package fetching failed on initial load #} - {% if fetch_failed %} -
- Unable to fetch packages from registries. Showing a fallback list. Please try again later or contact support. -
- {% endif %} - -
- {# Left Column: Search Packages Area #} -
-
-
- {# Header with Refresh Button #} -
-

Search Packages

- -
- - {# Cache Status Timestamp #} -
- {% if last_cached_timestamp %} - Package list last fetched: {{ last_cached_timestamp.strftime('%Y-%m-%d %H:%M:%S %Z') if last_cached_timestamp else 'Never' }} - {% if fetch_failed %} (Fetch Failed){% endif %} - {% elif is_fetching %} {# Show text spinner specifically during initial fetch state triggered by backend #} - Fetching package list... - {% else %} - Never fetched or cache cleared. - {% endif %} -
- - {# Search Input with HTMX #} -
- - - -
-
- - {# Search Results Area (populated by HTMX) #} -
- {% include '_search_results_table.html' %} {# Includes the initial table state or updated results #} -
-
-
-
- - {# Right Column: Import Form, Log Window, Animation, Warning #} -
-
-
- {# Import Form #} -

Import a New IG

-
{# Form ID used by JS #} - {{ form.hidden_tag() }} {# Include CSRF token if using Flask-WTF #} - {{ render_field(form.package_name, class="form-control") }} - {{ render_field(form.package_version, class="form-control") }} - {{ render_field(form.dependency_mode, class="form-select") }} -
- {# Import Button triggers HTMX POST #} - - - Back {# Simple link back #} -
-
- - {# Live Log Output Window #} -
-
Live Log Output
-
-

Logs from caching or import actions will appear here.

-
- {# Indicator shown while connecting to SSE #} - -
- - {# Animation Window (Hidden by default) #} - - - {# Warning Text (Hidden by default) #} - - -
{# End card-body #} -
{# End card #} -
{# End col-md-3 #} -
{# End row #} -
{# End container #} - - - - -{% endblock %} \ No newline at end of file diff --git a/templates/upload_test_data.html b/templates/upload_test_data.html deleted file mode 100644 index 02f5d68..0000000 --- a/templates/upload_test_data.html +++ /dev/null @@ -1,302 +0,0 @@ -{% extends "base.html" %} -{% from "_form_helpers.html" import render_field %} - -{% block content %} -
- FHIRFLARE IG Toolkit -

{{ title }}

-
-

- Upload FHIR test data (JSON, XML, or ZIP containing JSON/XML) to a target server. The tool will attempt to parse resources, determine dependencies based on references, and upload them in the correct order. Optionally validate resources before uploading. -

-
-
- -
-
-
-
-
-

Upload Configuration

-
-
- {% if form.errors %} -
-

Please correct the following errors:

-
    - {% for field, errors in form.errors.items() %} -
  • {{ form[field].label.text }}: {{ errors|join(', ') }}
  • - {% endfor %} -
-
- {% endif %} - -
- {{ form.csrf_token }} - - {{ render_field(form.fhir_server_url, class="form-control form-control-lg") }} - -
-
- {{ render_field(form.auth_type, class="form-select") }} -
- -
- - {{ render_field(form.test_data_file, class="form-control") }} - Select one or more .json, .xml files, or a single .zip file containing them. - -
-
- {{ render_field(form.validate_before_upload) }} - It is suggested to not validate against more than 500 files -
- -
- -
-
- {{ render_field(form.upload_mode, class="form-select") }} -
-
- {{ render_field(form.use_conditional_uploads) }} -
-
- {{ render_field(form.error_handling, class="form-select") }} -
-
- - -
-
-
- -
-
-

Processing Log & Results

-
-
-
- Processing output will appear here... -
-
-
-
-
-
-
-
- -{% endblock %} - -{% block scripts %} -{{ super() }} - -{% endblock %} \ No newline at end of file diff --git a/templates/validate_sample.html b/templates/validate_sample.html deleted file mode 100644 index e85034d..0000000 --- a/templates/validate_sample.html +++ /dev/null @@ -1,376 +0,0 @@ - -{% extends "base.html" %} - -{% block content %} -
- FHIRFLARE IG Toolkit -

Validate FHIR Sample

-
-

- Validate a FHIR resource or bundle against a selected Implementation Guide. (ALPHA - TEST Fhir pathing is complex and the logic is WIP, please report anomalies you find in Github issues alongside your sample json REMOVE PHI) -

- -
-
- -
-
-
Validation Form
-
-
- {{ form.hidden_tag() }} -
- - Select a package from the list below or enter a new one (e.g., hl7.fhir.us.core). - -
- -
- - {{ form.package_name(class="form-control", id=form.package_name.id) }} - {% for error in form.package_name.errors %} -
{{ error }}
- {% endfor %} -
-
- - {{ form.version(class="form-control", id=form.version.id) }} - {% for error in form.version.errors %} -
{{ error }}
- {% endfor %} -
-
- -
- {{ form.include_dependencies(class="form-check-input") }} - {{ form.include_dependencies.label(class="form-check-label") }} -
-
-
- - {{ form.mode(class="form-select") }} -
-
- - {{ form.sample_input(class="form-control", rows=10, placeholder="Paste your FHIR JSON here...") }} - {% for error in form.sample_input.errors %} -
{{ error }}
- {% endfor %} -
- -
-
-
- - -
- - -{% endblock %} \ No newline at end of file diff --git a/tests/test_app.py b/tests/test_app.py deleted file mode 100644 index 996d1e9..0000000 --- a/tests/test_app.py +++ /dev/null @@ -1,648 +0,0 @@ -import unittest -import os -import sys -import json -import tarfile -import shutil -import io -import requests -import time -import subprocess -from unittest.mock import patch, MagicMock, mock_open, call -from flask import Flask, session -from flask.testing import FlaskClient -from datetime import datetime, timezone - -# Add the parent directory (/app) to sys.path -APP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) -if APP_DIR not in sys.path: - sys.path.insert(0, APP_DIR) - -from app import app, db, ProcessedIg -import services - -# Helper function to parse NDJSON stream -def parse_ndjson(byte_stream): - decoded_stream = byte_stream.decode('utf-8').strip() - if not decoded_stream: - return [] - lines = decoded_stream.split('\n') - return [json.loads(line) for line in lines if line.strip()] - -class DockerComposeContainer: - """ - A class that follows the Testcontainers pattern for managing Docker Compose environments. - This implementation uses subprocess to call docker-compose directly since we're not - installing the testcontainers-python package. - """ - - def __init__(self, compose_file_path): - """ - Initialize with the path to the docker-compose.yml file - - Args: - compose_file_path: Path to the docker-compose.yml file - """ - self.compose_file = compose_file_path - self.compose_dir = os.path.dirname(os.path.abspath(compose_file_path)) - self.containers_up = False - self.service_ports = {} - self._container_ids = {} - - def __enter__(self): - """Start containers when entering context""" - self.start() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """Stop containers when exiting context""" - self.stop() - - def with_service_port(self, service_name, port): - """ - Map a service port (following the testcontainers builder pattern) - - Args: - service_name: Name of the service in docker-compose.yml - port: Port number to expose - - Returns: - self for chaining - """ - self.service_ports[service_name] = port - return self - - def start(self): - """Start the Docker Compose environment""" - if self.containers_up: - return self - - print("Starting Docker Compose environment...") - result = subprocess.run( - ['docker-compose', '-f', self.compose_file, 'up', '-d'], - cwd=self.compose_dir, - capture_output=True, - text=True - ) - - if result.returncode != 0: - error_msg = f"Failed to start Docker Compose environment: {result.stderr}" - print(error_msg) - raise RuntimeError(error_msg) - - # Store container IDs for later use - self._get_container_ids() - - self.containers_up = True - self._wait_for_services() - return self - - def _get_container_ids(self): - """Get the container IDs for all services""" - result = subprocess.run( - ['docker-compose', '-f', self.compose_file, 'ps', '-q'], - cwd=self.compose_dir, - capture_output=True, - text=True - ) - - if result.returncode != 0: - return - - container_ids = result.stdout.strip().split('\n') - if not container_ids: - return - - # Get service names for each container - for container_id in container_ids: - if not container_id: - continue - - inspect_result = subprocess.run( - ['docker', 'inspect', '--format', '{{index .Config.Labels "com.docker.compose.service"}}', container_id], - capture_output=True, - text=True - ) - - if inspect_result.returncode == 0: - service_name = inspect_result.stdout.strip() - self._container_ids[service_name] = container_id - - def get_container_id(self, service_name): - """ - Get the container ID for a specific service - - Args: - service_name: Name of the service in docker-compose.yml - - Returns: - Container ID as string or None if not found - """ - return self._container_ids.get(service_name) - - def get_service_host(self, service_name): - """ - Get the host for a specific service - for Docker Compose we just use localhost - - Args: - service_name: Name of the service in docker-compose.yml - - Returns: - Host as string (usually localhost) - """ - return "localhost" - - def get_service_url(self, service_name, path=""): - """ - Get the URL for a specific service - - Args: - service_name: Name of the service in docker-compose.yml - path: Optional path to append to the URL - - Returns: - URL as string - """ - port = self.service_ports.get(service_name) - if not port: - raise ValueError(f"No port mapping defined for service {service_name}") - - url = f"http://{self.get_service_host(service_name)}:{port}" - if path: - # Ensure path starts with / - if not path.startswith('/'): - path = f"/{path}" - url = f"{url}{path}" - - return url - - def get_logs(self, service_name): - """ - Get logs for a specific service - - Args: - service_name: Name of the service in docker-compose.yml - - Returns: - Logs as string - """ - container_id = self.get_container_id(service_name) - if not container_id: - return f"No container found for service {service_name}" - - result = subprocess.run( - ['docker', 'logs', container_id], - capture_output=True, - text=True - ) - - return result.stdout - - def stop(self): - """Stop the Docker Compose environment""" - if not self.containers_up: - return - - print("Stopping Docker Compose environment...") - result = subprocess.run( - ['docker-compose', '-f', self.compose_file, 'down'], - cwd=self.compose_dir, - capture_output=True, - text=True - ) - - if result.returncode != 0: - print(f"Warning: Error stopping Docker Compose: {result.stderr}") - - self.containers_up = False - - def _wait_for_services(self): - """Wait for all services to be ready""" - print("Waiting for services to be ready...") - - # Wait for HAPI FHIR server - if 'fhir' in self.service_ports: - self._wait_for_http_service( - self.get_service_url('fhir', 'fhir/metadata'), - "HAPI FHIR server" - ) - - # Wait for FHIRFLARE application - if 'fhirflare' in self.service_ports: - self._wait_for_http_service( - self.get_service_url('fhirflare'), - "FHIRFLARE application" - ) - - # Give additional time for services to stabilize - time.sleep(5) - - def _wait_for_http_service(self, url, service_name, max_retries=30, retry_interval=2): - """ - Wait for an HTTP service to be ready - - Args: - url: URL to check - service_name: Name of the service for logging - max_retries: Maximum number of retries - retry_interval: Interval between retries in seconds - """ - for attempt in range(max_retries): - try: - response = requests.get(url, timeout=5) - if response.status_code == 200: - print(f"{service_name} is ready after {attempt + 1} attempts") - return True - except requests.RequestException: - pass - - print(f"Waiting for {service_name} (attempt {attempt + 1}/{max_retries})...") - time.sleep(retry_interval) - - print(f"Warning: {service_name} did not become ready in time") - return False - -class TestFHIRFlareIGToolkit(unittest.TestCase): - @classmethod - def setUpClass(cls): - # Define the Docker Compose container - compose_file_path = os.path.join(os.path.dirname(__file__), 'docker-compose.yml') - cls.container = DockerComposeContainer(compose_file_path) \ - .with_service_port('fhir', 8080) \ - .with_service_port('fhirflare', 5000) - - # Start the containers - cls.container.start() - - # Configure app for testing - app.config['TESTING'] = True - app.config['WTF_CSRF_ENABLED'] = False - app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:' - cls.test_packages_dir = os.path.join(os.path.dirname(__file__), 'test_fhir_packages_temp') - app.config['FHIR_PACKAGES_DIR'] = cls.test_packages_dir - app.config['SECRET_KEY'] = 'test-secret-key' - app.config['API_KEY'] = 'test-api-key' - app.config['VALIDATE_IMPOSED_PROFILES'] = True - app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True - app.config['HAPI_FHIR_URL'] = cls.container.get_service_url('fhir', 'fhir') # Point to containerized HAPI FHIR - - cls.app_context = app.app_context() - cls.app_context.push() - db.create_all() - cls.client = app.test_client() - - @classmethod - def tearDownClass(cls): - cls.app_context.pop() - if os.path.exists(cls.test_packages_dir): - shutil.rmtree(cls.test_packages_dir) - - # Stop Docker Compose environment - cls.container.stop() - - def setUp(self): - if os.path.exists(self.test_packages_dir): - shutil.rmtree(self.test_packages_dir) - os.makedirs(self.test_packages_dir, exist_ok=True) - with self.app_context: - for item in db.session.query(ProcessedIg).all(): - db.session.delete(item) - db.session.commit() - - def tearDown(self): - pass - - # Helper Method - def create_mock_tgz(self, filename, files_content): - tgz_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], filename) - with tarfile.open(tgz_path, "w:gz") as tar: - for name, content in files_content.items(): - if isinstance(content, (dict, list)): - data_bytes = json.dumps(content).encode('utf-8') - elif isinstance(content, str): - data_bytes = content.encode('utf-8') - else: - raise TypeError(f"Unsupported type for mock file '{name}': {type(content)}") - file_io = io.BytesIO(data_bytes) - tarinfo = tarfile.TarInfo(name=name) - tarinfo.size = len(data_bytes) - tarinfo.mtime = int(datetime.now(timezone.utc).timestamp()) - tar.addfile(tarinfo, file_io) - return tgz_path - - # --- Phase 1 Tests --- - - def test_01_navigate_fhir_path(self): - resource = { - "resourceType": "Patient", - "name": [{"given": ["John"]}], - "identifier": [{"system": "http://hl7.org/fhir/sid/us-ssn", "sliceName": "us-ssn"}], - "extension": [{"url": "http://hl7.org/fhir/StructureDefinition/patient-birthPlace", "valueAddress": {"city": "Boston"}}] - } - self.assertEqual(services.navigate_fhir_path(resource, "Patient.name[0].given"), ["John"]) - self.assertEqual(services.navigate_fhir_path(resource, "Patient.identifier:us-ssn.system"), "http://hl7.org/fhir/sid/us-ssn") - self.assertEqual(services.navigate_fhir_path(resource, "Patient.extension", extension_url="http://hl7.org/fhir/StructureDefinition/patient-birthPlace")["valueAddress"]["city"], "Boston") - with patch('fhirpath.evaluate', side_effect=Exception("fhirpath error")): - self.assertEqual(services.navigate_fhir_path(resource, "Patient.name[0].given"), ["John"]) - - # --- Basic Page Rendering Tests --- - - def test_03_homepage(self): - # Connect to the containerized application - response = requests.get(self.container.get_service_url('fhirflare')) - self.assertEqual(response.status_code, 200) - self.assertIn('FHIRFLARE IG Toolkit', response.text) - - def test_04_import_ig_page(self): - response = requests.get(self.container.get_service_url('fhirflare', 'import-ig')) - self.assertEqual(response.status_code, 200) - self.assertIn('Import IG', response.text) - self.assertIn('Package Name', response.text) - self.assertIn('Package Version', response.text) - self.assertIn('name="dependency_mode"', response.text) - - # --- API Integration Tests --- - - def test_30_load_ig_to_hapi_integration(self): - """Test loading an IG to the containerized HAPI FHIR server""" - pkg_name = 'hl7.fhir.us.core' - pkg_version = '6.1.0' - filename = f'{pkg_name}-{pkg_version}.tgz' - self.create_mock_tgz(filename, { - 'package/package.json': {'name': pkg_name, 'version': pkg_version}, - 'package/StructureDefinition-us-core-patient.json': { - 'resourceType': 'StructureDefinition', - 'id': 'us-core-patient', - 'url': 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient', - 'name': 'USCorePatientProfile', - 'type': 'Patient', - 'status': 'active' - } - }) - - # Load IG to HAPI - response = self.client.post( - '/api/load-ig-to-hapi', - data=json.dumps({'package_name': pkg_name, 'version': pkg_version}), - content_type='application/json', - headers={'X-API-Key': 'test-api-key'} - ) - - self.assertEqual(response.status_code, 200) - data = json.loads(response.data) - self.assertEqual(data['status'], 'success') - - # Verify the resource was loaded by querying the HAPI FHIR server directly - hapi_response = requests.get(self.container.get_service_url('fhir', 'fhir/StructureDefinition/us-core-patient')) - self.assertEqual(hapi_response.status_code, 200) - resource = hapi_response.json() - self.assertEqual(resource['resourceType'], 'StructureDefinition') - self.assertEqual(resource['id'], 'us-core-patient') - - def test_31_validate_sample_with_hapi_integration(self): - """Test validating a sample against the containerized HAPI FHIR server""" - # First, load the necessary StructureDefinition - pkg_name = 'hl7.fhir.us.core' - pkg_version = '6.1.0' - filename = f'{pkg_name}-{pkg_version}.tgz' - self.create_mock_tgz(filename, { - 'package/package.json': {'name': pkg_name, 'version': pkg_version}, - 'package/StructureDefinition-us-core-patient.json': { - 'resourceType': 'StructureDefinition', - 'id': 'us-core-patient', - 'url': 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient', - 'name': 'USCorePatientProfile', - 'type': 'Patient', - 'status': 'active', - 'snapshot': { - 'element': [ - {'path': 'Patient', 'min': 1, 'max': '1'}, - {'path': 'Patient.name', 'min': 1, 'max': '*'}, - {'path': 'Patient.identifier', 'min': 0, 'max': '*', 'mustSupport': True} - ] - } - } - }) - - # Load IG to HAPI - self.client.post( - '/api/load-ig-to-hapi', - data=json.dumps({'package_name': pkg_name, 'version': pkg_version}), - content_type='application/json', - headers={'X-API-Key': 'test-api-key'} - ) - - # Validate a sample that's missing a required element - sample_resource = { - 'resourceType': 'Patient', - 'id': 'test-patient', - 'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']} - # Missing required 'name' element - } - - response = self.client.post( - '/api/validate-sample', - data=json.dumps({ - 'package_name': pkg_name, - 'version': pkg_version, - 'sample_data': json.dumps(sample_resource), - 'mode': 'single', - 'include_dependencies': True - }), - content_type='application/json', - headers={'X-API-Key': 'test-api-key'} - ) - - self.assertEqual(response.status_code, 200) - data = json.loads(response.data) - self.assertFalse(data['valid']) - # Check for validation error related to missing name - found_name_error = any('name' in error for error in data['errors']) - self.assertTrue(found_name_error, f"Expected error about missing name element, got: {data['errors']}") - - def test_32_push_ig_to_hapi_integration(self): - """Test pushing multiple resources from an IG to the containerized HAPI FHIR server""" - pkg_name = 'test.push.pkg' - pkg_version = '1.0.0' - filename = f'{pkg_name}-{pkg_version}.tgz' - - # Create a test package with multiple resources - self.create_mock_tgz(filename, { - 'package/package.json': {'name': pkg_name, 'version': pkg_version}, - 'package/Patient-test1.json': { - 'resourceType': 'Patient', - 'id': 'test1', - 'name': [{'family': 'Test', 'given': ['Patient']}] - }, - 'package/Observation-test1.json': { - 'resourceType': 'Observation', - 'id': 'test1', - 'status': 'final', - 'code': {'coding': [{'system': 'http://loinc.org', 'code': '12345-6'}]} - } - }) - - # Push the IG to HAPI - response = self.client.post( - '/api/push-ig', - data=json.dumps({ - 'package_name': pkg_name, - 'version': pkg_version, - 'fhir_server_url': self.container.get_service_url('fhir', 'fhir'), - 'include_dependencies': False - }), - content_type='application/json', - headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'} - ) - - self.assertEqual(response.status_code, 200) - streamed_data = parse_ndjson(response.data) - complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None) - self.assertIsNotNone(complete_msg, "Complete message not found in streamed response") - summary = complete_msg.get('data', {}) - self.assertTrue(summary.get('success_count') >= 2, f"Expected at least 2 successful resources, got {summary.get('success_count')}") - - # Verify resources were loaded by querying the HAPI FHIR server directly - patient_response = requests.get(self.container.get_service_url('fhir', 'fhir/Patient/test1')) - self.assertEqual(patient_response.status_code, 200) - patient = patient_response.json() - self.assertEqual(patient['resourceType'], 'Patient') - self.assertEqual(patient['id'], 'test1') - - observation_response = requests.get(self.container.get_service_url('fhir', 'fhir/Observation/test1')) - self.assertEqual(observation_response.status_code, 200) - observation = observation_response.json() - self.assertEqual(observation['resourceType'], 'Observation') - self.assertEqual(observation['id'], 'test1') - - # --- Existing API Tests --- - - @patch('app.list_downloaded_packages') - @patch('app.services.process_package_file') - @patch('app.services.import_package_and_dependencies') - @patch('os.path.exists') - def test_40_api_import_ig_success(self, mock_os_exists, mock_import, mock_process, mock_list_pkgs): - pkg_name = 'api.test.pkg' - pkg_version = '1.2.3' - filename = f'{pkg_name}-{pkg_version}.tgz' - pkg_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], filename) - mock_import.return_value = {'requested': (pkg_name, pkg_version), 'processed': {(pkg_name, pkg_version)}, 'downloaded': {(pkg_name, pkg_version): pkg_path}, 'all_dependencies': {}, 'dependencies': [], 'errors': []} - mock_process.return_value = {'resource_types_info': [], 'must_support_elements': {}, 'examples': {}, 'complies_with_profiles': ['http://prof.com/a'], 'imposed_profiles': [], 'errors': []} - mock_os_exists.return_value = True - mock_list_pkgs.return_value = ([{'name': pkg_name, 'version': pkg_version, 'filename': filename}], [], {}) - response = self.client.post( - '/api/import-ig', - data=json.dumps({'package_name': pkg_name, 'version': pkg_version, 'dependency_mode': 'direct', 'api_key': 'test-api-key'}), - content_type='application/json' - ) - self.assertEqual(response.status_code, 200) - data = json.loads(response.data) - self.assertEqual(data['status'], 'success') - self.assertEqual(data['complies_with_profiles'], ['http://prof.com/a']) - - @patch('app.services.import_package_and_dependencies') - def test_41_api_import_ig_failure(self, mock_import): - mock_import.return_value = {'requested': ('bad.pkg', '1.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['HTTP error: 404 Not Found']} - response = self.client.post( - '/api/import-ig', - data=json.dumps({'package_name': 'bad.pkg', 'version': '1.0', 'api_key': 'test-api-key'}), - content_type='application/json' - ) - self.assertEqual(response.status_code, 404) - data = json.loads(response.data) - self.assertIn('Failed to import bad.pkg#1.0: HTTP error: 404 Not Found', data['message']) - - def test_42_api_import_ig_invalid_key(self): - response = self.client.post( - '/api/import-ig', - data=json.dumps({'package_name': 'a', 'version': '1', 'api_key': 'wrong'}), - content_type='application/json' - ) - self.assertEqual(response.status_code, 401) - - def test_43_api_import_ig_missing_key(self): - response = self.client.post( - '/api/import-ig', - data=json.dumps({'package_name': 'a', 'version': '1'}), - content_type='application/json' - ) - self.assertEqual(response.status_code, 401) - - # --- API Push Tests --- - - @patch('os.path.exists', return_value=True) - @patch('app.services.get_package_metadata') - @patch('tarfile.open') - @patch('requests.Session') - def test_50_api_push_ig_success(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists): - pkg_name = 'push.test.pkg' - pkg_version = '1.0.0' - filename = f'{pkg_name}-{pkg_version}.tgz' - fhir_server_url = self.container.get_service_url('fhir', 'fhir') - mock_get_metadata.return_value = {'imported_dependencies': []} - mock_tar = MagicMock() - mock_patient = {'resourceType': 'Patient', 'id': 'pat1'} - mock_obs = {'resourceType': 'Observation', 'id': 'obs1', 'status': 'final'} - patient_member = MagicMock(spec=tarfile.TarInfo) - patient_member.name = 'package/Patient-pat1.json' - patient_member.isfile.return_value = True - obs_member = MagicMock(spec=tarfile.TarInfo) - obs_member.name = 'package/Observation-obs1.json' - obs_member.isfile.return_value = True - mock_tar.getmembers.return_value = [patient_member, obs_member] - def mock_extractfile(member): - if member.name == 'package/Patient-pat1.json': - return io.BytesIO(json.dumps(mock_patient).encode('utf-8')) - if member.name == 'package/Observation-obs1.json': - return io.BytesIO(json.dumps(mock_obs).encode('utf-8')) - return None - mock_tar.extractfile.side_effect = mock_extractfile - mock_tarfile_open.return_value.__enter__.return_value = mock_tar - mock_session_instance = MagicMock() - mock_put_response = MagicMock(status_code=200) - mock_put_response.raise_for_status.return_value = None - mock_session_instance.put.return_value = mock_put_response - mock_session.return_value = mock_session_instance - self.create_mock_tgz(filename, {'package/dummy.txt': 'content'}) - response = self.client.post( - '/api/push-ig', - data=json.dumps({ - 'package_name': pkg_name, - 'version': pkg_version, - 'fhir_server_url': fhir_server_url, - 'include_dependencies': False, - 'api_key': 'test-api-key' - }), - content_type='application/json', - headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'} - ) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.mimetype, 'application/x-ndjson') - streamed_data = parse_ndjson(response.data) - complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None) - self.assertIsNotNone(complete_msg) - summary = complete_msg.get('data', {}) - self.assertEqual(summary.get('status'), 'success') - self.assertEqual(summary.get('success_count'), 2) - self.assertEqual(len(summary.get('failed_details')), 0) - mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename)) - - # --- Helper method to debug container issues --- - - def test_99_print_container_logs_on_failure(self): - """Helper test that prints container logs in case of failures""" - # This test should always pass but will print logs if other tests fail - try: - if hasattr(self, 'container') and self.container.containers_up: - for service_name in ['fhir', 'db', 'fhirflare']: - if service_name in self.container._container_ids: - print(f"\n=== Logs for {service_name} ===") - print(self.container.get_logs(service_name)) - except Exception as e: - print(f"Error getting container logs: {e}") - - # This assertion always passes - this test is just for debug info - self.assertTrue(True) - -if __name__ == '__main__': - unittest.main() \ No newline at end of file diff --git a/tests/upload_samples/Bundle-transaction-ex.json b/tests/upload_samples/Bundle-transaction-ex.json deleted file mode 100644 index 22432af..0000000 --- a/tests/upload_samples/Bundle-transaction-ex.json +++ /dev/null @@ -1,431 +0,0 @@ -{ - "resourceType" : "Bundle", - "id" : "transaction-ex", - "type" : "transaction", - "entry" : [{ - "fullUrl" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b", - "resource" : { - "resourceType" : "Patient", - "id" : "example-patient", - "meta" : { - "profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-patient"] - }, - "text" : { - "status" : "generated", - "div" : "
Juan Dela Cruz is a male patient born on 1 January 1980, residing in Manila, NCR, Philippines.
" - }, - "extension" : [{ - "extension" : [{ - "url" : "code", - "valueCodeableConcept" : { - "coding" : [{ - "system" : "urn:iso:std:iso:3166", - "code" : "PH", - "display" : "Philippines" - }] - } - }, - { - "url" : "period", - "valuePeriod" : { - "start" : "2020-01-01", - "end" : "2023-01-01" - } - }], - "url" : "http://hl7.org/fhir/StructureDefinition/patient-nationality" - }, - { - "url" : "http://hl7.org/fhir/StructureDefinition/patient-religion", - "valueCodeableConcept" : { - "coding" : [{ - "system" : "http://terminology.hl7.org/CodeSystem/v3-ReligiousAffiliation", - "code" : "1007", - "display" : "Atheism" - }] - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/indigenous-people", - "valueBoolean" : true - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/indigenous-group", - "valueCodeableConcept" : { - "coding" : [{ - "system" : "urn://example.com/ph-core/fhir/CodeSystem/indigenous-groups", - "code" : "Ilongots", - "display" : "Ilongots" - }] - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/race", - "valueCodeableConcept" : { - "coding" : [{ - "system" : "http://terminology.hl7.org/CodeSystem/v3-Race", - "code" : "2036-2", - "display" : "Filipino" - }] - } - }], - "identifier" : [{ - "system" : "http://philhealth.gov.ph/fhir/Identifier/philhealth-id", - "value" : "63-584789845-5" - }], - "active" : true, - "name" : [{ - "family" : "Dela Cruz", - "given" : ["Juan Jane", - "Dela Fuente"] - }], - "gender" : "male", - "birthDate" : "1985-06-15", - "address" : [{ - "extension" : [{ - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/city-municipality", - "valueCoding" : { - "system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC", - "code" : "1380200000", - "display" : "City of Las Piñas" - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/city-municipality", - "valueCoding" : { - "system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC", - "code" : "1380100000", - "display" : "City of Caloocan" - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/province", - "valueCoding" : { - "system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC", - "code" : "0402100000", - "display" : "Cavite" - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/province", - "valueCoding" : { - "system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC", - "code" : "0403400000", - "display" : "Laguna" - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/province", - "valueCoding" : { - "system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC", - "code" : "0405800000", - "display" : "Rizal" - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/province", - "valueCoding" : { - "system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC", - "code" : "1704000000", - "display" : "Marinduque" - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/province", - "valueCoding" : { - "system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC", - "code" : "0402100000", - "display" : "Cavite" - } - }, - { - "url" : "urn://example.com/ph-core/fhir/StructureDefinition/province", - "valueCoding" : { - "system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC", - "code" : "1705100000", - "display" : "Occidental Mindoro" - } - }], - "line" : ["123 Mabini Street", - "Barangay Malinis"], - "city" : "Quezon City", - "district" : "NCR", - "postalCode" : "1100", - "country" : "PH" - }] - }, - "request" : { - "method" : "POST", - "url" : "Patient" - } - }, - { - "fullUrl" : "urn:uuid:60b7132e-7cfd-44bc-83c2-de140dc8aaae", - "resource" : { - "resourceType" : "Encounter", - "id" : "example-encounter", - "meta" : { - "profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-encounter"] - }, - "text" : { - "status" : "generated", - "div" : "
An ambulatory encounter for Juan Dela Cruz that has been completed.
" - }, - "status" : "finished", - "class" : { - "system" : "http://terminology.hl7.org/CodeSystem/v3-ActCode", - "code" : "AMB", - "display" : "ambulatory" - }, - "subject" : { - "reference" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b" - } - }, - "request" : { - "method" : "POST", - "url" : "Encounter" - } - }, - { - "fullUrl" : "urn:uuid:1a391d1e-a068-479a-88e3-e3d52c3a6f64", - "resource" : { - "resourceType" : "Condition", - "id" : "example-condition", - "text" : { - "status" : "generated", - "div" : "
Juan Dela Cruz has an active diagnosis of Type 2 Diabetes Mellitus.
" - }, - "clinicalStatus" : { - "coding" : [{ - "system" : "http://terminology.hl7.org/CodeSystem/condition-clinical", - "code" : "active", - "display" : "Active" - }] - }, - "code" : { - "coding" : [{ - "system" : "http://snomed.info/sct", - "code" : "44054006", - "display" : "Diabetes mellitus type 2" - }] - }, - "subject" : { - "reference" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b" - }, - "encounter" : { - "reference" : "urn:uuid:60b7132e-7cfd-44bc-83c2-de140dc8aaae" - } - }, - "request" : { - "method" : "POST", - "url" : "Condition" - } - }, - { - "fullUrl" : "urn:uuid:024dcb47-cc23-407a-839b-b4634e95abae", - "resource" : { - "resourceType" : "Medication", - "id" : "example-medication", - "meta" : { - "profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-medication"] - }, - "text" : { - "status" : "generated", - "div" : "
A medication resource has been created, but no specific details are provided.
" - } - }, - "request" : { - "method" : "POST", - "url" : "Medication" - } - }, - { - "fullUrl" : "urn:uuid:013f46df-f245-4a2f-beaf-9eb2c47fb1a3", - "resource" : { - "resourceType" : "Observation", - "id" : "blood-pressure", - "meta" : { - "profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-observation", - "http://hl7.org/fhir/StructureDefinition/vitalsigns", - "http://hl7.org/fhir/StructureDefinition/bp"] - }, - "text" : { - "status" : "generated", - "div" : "
On 17 September 2012, a blood pressure observation was recorded for Juan Dela Cruz. The systolic pressure was 107 mmHg (Normal), and the diastolic pressure was 60 mmHg (Below low normal). The measurement was taken from the right arm and performed by a practitioner.
" - }, - "identifier" : [{ - "system" : "urn:ietf:rfc:3986", - "value" : "urn:uuid:187e0c12-8dd2-67e2-99b2-bf273c878281" - }], - "basedOn" : [{ - "identifier" : { - "system" : "https://acme.org/identifiers", - "value" : "1234" - } - }], - "status" : "final", - "category" : [{ - "coding" : [{ - "system" : "http://terminology.hl7.org/CodeSystem/observation-category", - "code" : "vital-signs", - "display" : "Vital Signs" - }] - }], - "code" : { - "coding" : [{ - "system" : "http://loinc.org", - "code" : "85354-9", - "display" : "Blood pressure panel with all children optional" - }], - "text" : "Blood pressure systolic & diastolic" - }, - "subject" : { - "reference" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b" - }, - "effectiveDateTime" : "2012-09-17", - "performer" : [{ - "reference" : "urn:uuid:a036fd4c-c950-497b-8905-0d2c5ec6f1d4" - }], - "interpretation" : [{ - "coding" : [{ - "system" : "http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation", - "code" : "L", - "display" : "Low" - }], - "text" : "Below low normal" - }], - "bodySite" : { - "coding" : [{ - "system" : "http://snomed.info/sct", - "code" : "85050009", - "display" : "Bone structure of humerus" - }] - }, - "component" : [{ - "code" : { - "coding" : [{ - "system" : "http://loinc.org", - "code" : "8480-6", - "display" : "Systolic blood pressure" - }] - }, - "valueQuantity" : { - "value" : 107, - "unit" : "mmHg", - "system" : "http://unitsofmeasure.org", - "code" : "mm[Hg]" - }, - "interpretation" : [{ - "coding" : [{ - "system" : "http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation", - "code" : "N", - "display" : "Normal" - }], - "text" : "Normal" - }] - }, - { - "code" : { - "coding" : [{ - "system" : "http://loinc.org", - "code" : "8462-4", - "display" : "Diastolic blood pressure" - }] - }, - "valueQuantity" : { - "value" : 60, - "unit" : "mmHg", - "system" : "http://unitsofmeasure.org", - "code" : "mm[Hg]" - }, - "interpretation" : [{ - "coding" : [{ - "system" : "http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation", - "code" : "L", - "display" : "Low" - }], - "text" : "Below low normal" - }] - }] - }, - "request" : { - "method" : "POST", - "url" : "Observation" - } - }, - { - "fullUrl" : "urn:uuid:b43c67e7-d9c4-48bb-a1b4-55769eeb9066", - "resource" : { - "resourceType" : "AllergyIntolerance", - "id" : "example-allergy", - "text" : { - "status" : "generated", - "div" : "
Juan Dela Cruz has a high criticality, active allergy to Benethamine penicillin.
" - }, - "clinicalStatus" : { - "coding" : [{ - "system" : "http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical", - "code" : "active", - "display" : "Active" - }] - }, - "criticality" : "high", - "code" : { - "coding" : [{ - "system" : "http://snomed.info/sct", - "code" : "294494002", - "display" : "Benethamine penicillin allergy" - }] - }, - "patient" : { - "reference" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b" - } - }, - "request" : { - "method" : "POST", - "url" : "AllergyIntolerance" - } - }, - { - "fullUrl" : "urn:uuid:a036fd4c-c950-497b-8905-0d2c5ec6f1d4", - "resource" : { - "resourceType" : "Practitioner", - "id" : "example-practitioner", - "meta" : { - "profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-practitioner"] - }, - "text" : { - "status" : "generated", - "div" : "
Dr. Maria Clara Santos is a female practitioner born on May 15, 1985. She resides at 1234 Mabini Street, Manila, NCR, 1000, Philippines. She can be contacted via mobile at +63-912-345-6789 or by email at maria.santos@example.ph.
" - }, - "name" : [{ - "family" : "Santos", - "given" : ["Maria", - "Clara"] - }], - "telecom" : [{ - "system" : "phone", - "value" : "+63-912-345-6789", - "use" : "mobile" - }, - { - "system" : "email", - "value" : "maria.santos@example.ph", - "use" : "work" - }], - "address" : [{ - "use" : "home", - "line" : ["1234 Mabini Street"], - "city" : "Manila", - "state" : "NCR", - "postalCode" : "1000", - "country" : "PH" - }], - "gender" : "female", - "birthDate" : "1985-05-15" - }, - "request" : { - "method" : "POST", - "url" : "Practitioner" - } - }] -} \ No newline at end of file diff --git a/tests/upload_samples/PHCDI.r4-0.1.0.tgz b/tests/upload_samples/PHCDI.r4-0.1.0.tgz deleted file mode 100644 index beea6f9..0000000 Binary files a/tests/upload_samples/PHCDI.r4-0.1.0.tgz and /dev/null differ diff --git a/tests/upload_samples/example.fhir.ph.core.r4-0.1.0.tgz b/tests/upload_samples/example.fhir.ph.core.r4-0.1.0.tgz deleted file mode 100644 index 2a1c844..0000000 Binary files a/tests/upload_samples/example.fhir.ph.core.r4-0.1.0.tgz and /dev/null differ diff --git a/tests/upload_samples/validation.log b/tests/upload_samples/validation.log deleted file mode 100644 index 0bc8823..0000000 --- a/tests/upload_samples/validation.log +++ /dev/null @@ -1,25 +0,0 @@ -2025-07-31 12:34:21,943 - services - DEBUG - Received validate-sample request -2025-07-31 12:34:21,944 - services - DEBUG - Request params: package_name=example.fhir.ph.core.r4, version=0.1.0, sample_data_length=713 -2025-07-31 12:34:21,944 - services - DEBUG - Using FHIR_PACKAGES_DIR from current_app config: /app/instance/fhir_packages -2025-07-31 12:34:21,944 - services - DEBUG - Checking package file: /app/instance/fhir_packages/example.fhir.ph.core.r4-0.1.0.tgz -2025-07-31 12:34:21,944 - services - DEBUG - Validating AllergyIntolerance against example.fhir.ph.core.r4#0.1.0 -2025-07-31 12:34:21,944 - services - DEBUG - Using FHIR_PACKAGES_DIR from current_app config: /app/instance/fhir_packages -2025-07-31 12:34:21,945 - services - DEBUG - Searching for SD matching 'AllergyIntolerance' with profile 'None' in example.fhir.ph.core.r4-0.1.0.tgz -2025-07-31 12:34:21,956 - services - INFO - SD matching identifier 'AllergyIntolerance' or profile 'None' not found within archive example.fhir.ph.core.r4-0.1.0.tgz -2025-07-31 12:34:21,956 - services - INFO - Validation result for AllergyIntolerance against example.fhir.ph.core.r4#0.1.0: valid=False, errors=1, warnings=0 -2025-07-31 12:34:21,957 - werkzeug - INFO - 10.0.0.102 - - [31/Jul/2025 12:34:21] "POST /api/validate-sample HTTP/1.1" 200 - -2025-07-31 12:34:24,510 - werkzeug - INFO - 10.0.2.245 - - [31/Jul/2025 12:34:24] "GET / HTTP/1.1" 200 - -2025-07-31 12:34:27,378 - werkzeug - INFO - 10.0.2.245 - - [31/Jul/2025 12:34:27] "GET / HTTP/1.1" 200 - -2025-07-31 12:34:34,510 - werkzeug - INFO - 10.0.2.245 - - [31/Jul/2025 12:34:34] "GET / HTTP/1.1" 200 - -2025-07-31 12:34:36,799 - __main__ - DEBUG - Scanning packages directory: /app/instance/fhir_packages -2025-07-31 12:34:36,800 - __main__ - DEBUG - Found 8 .tgz files: ['PHCDI.r4-0.1.0.tgz', 'hl7.fhir.uv.ips-1.1.0.tgz', 'hl7.fhir.r4.core-4.0.1.tgz', 'fhir.dicom-2022.4.20221006.tgz', 'hl7.terminology.r4-5.0.0.tgz', 'example.fhir.ph.core.r4-0.1.0.tgz', 'hl7.terminology.r4-6.4.0.tgz', 'hl7.fhir.uv.extensions.r4-5.2.0.tgz'] -2025-07-31 12:34:36,813 - __main__ - DEBUG - Added package: PHCDI.r4#0.1.0 -2025-07-31 12:34:36,837 - __main__ - DEBUG - Added package: hl7.fhir.uv.ips#1.1.0 -2025-07-31 12:34:37,378 - werkzeug - INFO - 10.0.2.245 - - [31/Jul/2025 12:34:37] "GET / HTTP/1.1" 200 - -2025-07-31 12:34:37,514 - __main__ - DEBUG - Added package: hl7.fhir.r4.core#4.0.1 -2025-07-31 12:34:37,622 - __main__ - DEBUG - Added package: fhir.dicom#2022.4.20221006 -2025-07-31 12:34:38,008 - __main__ - DEBUG - Added package: hl7.terminology.r4#5.0.0 -2025-07-31 12:34:38,015 - __main__ - DEBUG - Added package: example.fhir.ph.core.r4#0.1.0 -2025-07-31 12:34:38,413 - __main__ - DEBUG - Added package: hl7.terminology.r4#6.4.0 -2025-07-31 12:34:38,524 - __main__ - DEBUG - Added package: hl7.fhir.uv.extensions.r4#5.2.0 -2025-07-31 12:34:38,525 - __main__ - DEBUG - Set package choices: [('', 'None'), ('PHCDI.r4#0.1.0', 'PHCDI.r4#0.1.0'), ('example.fhir.ph.core.r4#0.1.0', 'example.fhir.ph.core.r4#0.1.0'), ('fhir.dicom#2022.4.20221006', 'fhir.dicom#2022.4.20221006'), ('hl7.fhir.r4.core#4.0.1', 'hl7.fhir.r4.core#4.0.1'), ('hl7.fhir.uv.extensions.r4#5.2.0', 'hl7.fhir.uv.extensions.r4#5.2.0'), ('hl7.fhir.uv.ips#1.1.0', 'hl7.fhir.uv.ips#1.1.0'), ('hl7.terminology.r4#5.0.0', 'hl7.terminology.r4#5.0.0'), ('hl7.terminology.r4#6.4.0', 'hl7.terminology.r4#6.4.0')] \ No newline at end of file