mirror of
https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit.git
synced 2025-11-05 13:35:15 +00:00
Merge pull request #21 from Sudo-JHare/Validation_PH_2Pass
2 Pass Validation and Helm Charts
This commit is contained in:
commit
ff366fa6ba
23
.github/ct/chart-schema.yaml
vendored
Normal file
23
.github/ct/chart-schema.yaml
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
name: str()
|
||||||
|
home: str()
|
||||||
|
version: str()
|
||||||
|
apiVersion: str()
|
||||||
|
appVersion: any(str(), num(), required=False)
|
||||||
|
type: str()
|
||||||
|
dependencies: any(required=False)
|
||||||
|
description: str()
|
||||||
|
keywords: list(str(), required=False)
|
||||||
|
sources: list(str(), required=False)
|
||||||
|
maintainers: list(include('maintainer'), required=False)
|
||||||
|
icon: str(required=False)
|
||||||
|
engine: str(required=False)
|
||||||
|
condition: str(required=False)
|
||||||
|
tags: str(required=False)
|
||||||
|
deprecated: bool(required=False)
|
||||||
|
kubeVersion: str(required=False)
|
||||||
|
annotations: map(str(), str(), required=False)
|
||||||
|
---
|
||||||
|
maintainer:
|
||||||
|
name: str()
|
||||||
|
email: str(required=False)
|
||||||
|
url: str(required=False)
|
||||||
15
.github/ct/config.yaml
vendored
Normal file
15
.github/ct/config.yaml
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
debug: true
|
||||||
|
remote: origin
|
||||||
|
chart-yaml-schema: .github/ct/chart-schema.yaml
|
||||||
|
validate-maintainers: false
|
||||||
|
validate-chart-schema: true
|
||||||
|
validate-yaml: true
|
||||||
|
check-version-increment: true
|
||||||
|
chart-dirs:
|
||||||
|
- charts
|
||||||
|
helm-extra-args: --timeout 300s
|
||||||
|
upgrade: true
|
||||||
|
skip-missing-values: true
|
||||||
|
release-label: release
|
||||||
|
release-name-template: "helm-v{{ .Version }}"
|
||||||
|
target-branch: master
|
||||||
84
.github/workflows/build-images.yaml
vendored
Normal file
84
.github/workflows/build-images.yaml
vendored
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
name: Build Container Images
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "image/v*"
|
||||||
|
paths-ignore:
|
||||||
|
- "charts/**"
|
||||||
|
pull_request:
|
||||||
|
branches: [master]
|
||||||
|
paths-ignore:
|
||||||
|
- "charts/**"
|
||||||
|
env:
|
||||||
|
IMAGES: docker.io/hapiproject/hapi
|
||||||
|
PLATFORMS: linux/amd64,linux/arm64/v8
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Container meta for default (distroless) image
|
||||||
|
id: docker_meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.IMAGES }}
|
||||||
|
tags: |
|
||||||
|
type=match,pattern=image/(.*),group=1,enable=${{github.event_name != 'pull_request'}}
|
||||||
|
|
||||||
|
|
||||||
|
- name: Container meta for tomcat image
|
||||||
|
id: docker_tomcat_meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.IMAGES }}
|
||||||
|
tags: |
|
||||||
|
type=match,pattern=image/(.*),group=1,enable=${{github.event_name != 'pull_request'}}
|
||||||
|
flavor: |
|
||||||
|
suffix=-tomcat,onlatest=true
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Cache Docker layers
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: /tmp/.buildx-cache
|
||||||
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
|
- name: Build and push default (distroless) image
|
||||||
|
id: docker_build
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||||
|
platforms: ${{ env.PLATFORMS }}
|
||||||
|
target: default
|
||||||
|
|
||||||
|
- name: Build and push tomcat image
|
||||||
|
id: docker_build_tomcat
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.docker_tomcat_meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.docker_tomcat_meta.outputs.labels }}
|
||||||
|
platforms: ${{ env.PLATFORMS }}
|
||||||
|
target: tomcat
|
||||||
41
.github/workflows/chart-release.yaml
vendored
Normal file
41
.github/workflows/chart-release.yaml
vendored
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
name: Release Charts
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- "charts/**"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Add workspace as safe directory
|
||||||
|
run: |
|
||||||
|
git config --global --add safe.directory /__w/FHIRFLARE-IG-Toolkit/FHIRFLARE-IG-Toolkit
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Configure Git
|
||||||
|
run: |
|
||||||
|
git config user.name "$GITHUB_ACTOR"
|
||||||
|
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||||
|
|
||||||
|
- name: Update dependencies
|
||||||
|
run: find charts/ ! -path charts/ -maxdepth 1 -type d -exec helm dependency update {} \;
|
||||||
|
|
||||||
|
- name: Add Helm Repositories
|
||||||
|
run: |
|
||||||
|
helm repo add hapifhir https://hapifhir.github.io/hapi-fhir-jpaserver-starter/
|
||||||
|
helm repo update
|
||||||
|
|
||||||
|
- name: Run chart-releaser
|
||||||
|
uses: helm/chart-releaser-action@be16258da8010256c6e82849661221415f031968 # v1.5.0
|
||||||
|
with:
|
||||||
|
config: .github/ct/config.yaml
|
||||||
|
env:
|
||||||
|
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||||
73
.github/workflows/chart-test.yaml
vendored
Normal file
73
.github/workflows/chart-test.yaml
vendored
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
name: Lint and Test Charts
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
paths:
|
||||||
|
- "charts/**"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
container: quay.io/helmpack/chart-testing:v3.11.0@sha256:f2fd21d30b64411105c7eafb1862783236a219d29f2292219a09fe94ca78ad2a
|
||||||
|
steps:
|
||||||
|
- name: Install helm-docs
|
||||||
|
working-directory: /tmp
|
||||||
|
env:
|
||||||
|
HELM_DOCS_URL: https://github.com/norwoodj/helm-docs/releases/download/v1.14.2/helm-docs_1.14.2_Linux_x86_64.tar.gz
|
||||||
|
run: |
|
||||||
|
curl -LSs $HELM_DOCS_URL | tar xz && \
|
||||||
|
mv ./helm-docs /usr/local/bin/helm-docs && \
|
||||||
|
chmod +x /usr/local/bin/helm-docs && \
|
||||||
|
helm-docs --version
|
||||||
|
|
||||||
|
- name: Add workspace as safe directory
|
||||||
|
run: |
|
||||||
|
git config --global --add safe.directory /__w/hapi-fhir-jpaserver-starter/hapi-fhir-jpaserver-starter
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Check if documentation is up-to-date
|
||||||
|
run: helm-docs && git diff --exit-code HEAD
|
||||||
|
|
||||||
|
- name: Run chart-testing (lint)
|
||||||
|
run: ct lint --config .github/ct/config.yaml
|
||||||
|
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
k8s-version: [1.30.8, 1.31.4, 1.32.0]
|
||||||
|
needs:
|
||||||
|
- lint
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up chart-testing
|
||||||
|
uses: helm/chart-testing-action@e6669bcd63d7cb57cb4380c33043eebe5d111992 # v2.6.1
|
||||||
|
|
||||||
|
- name: Run chart-testing (list-changed)
|
||||||
|
id: list-changed
|
||||||
|
run: |
|
||||||
|
changed=$(ct list-changed --config .github/ct/config.yaml)
|
||||||
|
if [[ -n "$changed" ]]; then
|
||||||
|
echo "::set-output name=changed::true"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create k8s Kind Cluster
|
||||||
|
uses: helm/kind-action@dda0770415bac9fc20092cacbc54aa298604d140 # v1.8.0
|
||||||
|
if: ${{ steps.list-changed.outputs.changed == 'true' }}
|
||||||
|
with:
|
||||||
|
cluster_name: kind-cluster-k8s-${{ matrix.k8s-version }}
|
||||||
|
node_image: kindest/node:v${{ matrix.k8s-version }}
|
||||||
|
|
||||||
|
- name: Run chart-testing (install)
|
||||||
|
run: ct install --config .github/ct/config.yaml
|
||||||
|
if: ${{ steps.list-changed.outputs.changed == 'true' }}
|
||||||
58
.github/workflows/docker-publish.yml
vendored
Normal file
58
.github/workflows/docker-publish.yml
vendored
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
# This workflow builds and pushes a multi-architecture Docker image to GitHub Container Registry (ghcr.io).
|
||||||
|
#
|
||||||
|
# The Docker meta step is required because GitHub repository names can contain uppercase letters, but Docker image tags must be lowercase.
|
||||||
|
# The docker/metadata-action@v5 normalizes the repository name to lowercase, ensuring the build and push steps use a valid image tag.
|
||||||
|
#
|
||||||
|
# This workflow builds for both AMD64 and ARM64 architectures using Docker Buildx and QEMU emulation.
|
||||||
|
|
||||||
|
name: Build and Push Docker image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- '*' # This will run the workflow on any branch
|
||||||
|
workflow_dispatch: # This enables manual triggering
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-push:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ghcr.io/${{ github.repository }}
|
||||||
|
|
||||||
|
- name: Set normalized image name
|
||||||
|
run: |
|
||||||
|
if [[ "${{ github.ref_name }}" == "main" ]]; then
|
||||||
|
echo "IMAGE_NAME=$(echo ${{ steps.meta.outputs.tags }} | sed 's/:main/:latest/')" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "IMAGE_NAME=${{ steps.meta.outputs.tags }}" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build and push multi-architecture Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./docker/Dockerfile
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ${{ env.IMAGE_NAME }}
|
||||||
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
/instance/
|
||||||
|
/logs/
|
||||||
|
/.pydevproject
|
||||||
|
/__pycache__/
|
||||||
|
/myenv/
|
||||||
|
/tmp/
|
||||||
23
.project
Normal file
23
.project
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<projectDescription>
|
||||||
|
<name>FHIRFLARE-IG-Toolkit</name>
|
||||||
|
<comment></comment>
|
||||||
|
<projects>
|
||||||
|
</projects>
|
||||||
|
<buildSpec>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.python.pydev.PyDevBuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.eclipse.wst.validation.validationbuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
|
</buildSpec>
|
||||||
|
<natures>
|
||||||
|
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
|
||||||
|
<nature>org.python.pydev.pythonNature</nature>
|
||||||
|
</natures>
|
||||||
|
</projectDescription>
|
||||||
7
.settings/.jsdtscope
Normal file
7
.settings/.jsdtscope
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<classpath>
|
||||||
|
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.JRE_CONTAINER"/>
|
||||||
|
<classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.baseBrowserLibrary"/>
|
||||||
|
<classpathentry kind="src" path=""/>
|
||||||
|
<classpathentry kind="output" path=""/>
|
||||||
|
</classpath>
|
||||||
1
.settings/org.eclipse.wst.jsdt.ui.superType.container
Normal file
1
.settings/org.eclipse.wst.jsdt.ui.superType.container
Normal file
@ -0,0 +1 @@
|
|||||||
|
org.eclipse.wst.jsdt.launching.JRE_CONTAINER
|
||||||
1
.settings/org.eclipse.wst.jsdt.ui.superType.name
Normal file
1
.settings/org.eclipse.wst.jsdt.ui.superType.name
Normal file
@ -0,0 +1 @@
|
|||||||
|
Global
|
||||||
@ -1,211 +0,0 @@
|
|||||||
@echo off
|
|
||||||
setlocal enabledelayedexpansion
|
|
||||||
|
|
||||||
REM --- Configuration ---
|
|
||||||
set REPO_URL=https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git
|
|
||||||
set CLONE_DIR=hapi-fhir-jpaserver
|
|
||||||
set SOURCE_CONFIG_DIR=hapi-fhir-setup
|
|
||||||
set CONFIG_FILE=application.yaml
|
|
||||||
|
|
||||||
REM --- Define Paths ---
|
|
||||||
set SOURCE_CONFIG_PATH=..\%SOURCE_CONFIG_DIR%\target\classes\%CONFIG_FILE%
|
|
||||||
set DEST_CONFIG_PATH=%CLONE_DIR%\target\classes\%CONFIG_FILE%
|
|
||||||
|
|
||||||
REM === CORRECTED: Prompt for Version ===
|
|
||||||
:GetModeChoice
|
|
||||||
SET "APP_MODE=" REM Clear the variable first
|
|
||||||
echo Select Installation Mode:
|
|
||||||
echo 1. Standalone (Includes local HAPI FHIR Server - Requires Git & Maven)
|
|
||||||
echo 2. Lite (Excludes local HAPI FHIR Server - No Git/Maven needed)
|
|
||||||
CHOICE /C 12 /N /M "Enter your choice (1 or 2):"
|
|
||||||
|
|
||||||
IF ERRORLEVEL 2 (
|
|
||||||
SET APP_MODE=lite
|
|
||||||
goto :ModeSet
|
|
||||||
)
|
|
||||||
IF ERRORLEVEL 1 (
|
|
||||||
SET APP_MODE=standalone
|
|
||||||
goto :ModeSet
|
|
||||||
)
|
|
||||||
REM If somehow neither was chosen (e.g., Ctrl+C), loop back
|
|
||||||
echo Invalid input. Please try again.
|
|
||||||
goto :GetModeChoice
|
|
||||||
|
|
||||||
:ModeSet
|
|
||||||
IF "%APP_MODE%"=="" (
|
|
||||||
echo Invalid choice detected after checks. Exiting.
|
|
||||||
goto :eof
|
|
||||||
)
|
|
||||||
echo Selected Mode: %APP_MODE%
|
|
||||||
echo.
|
|
||||||
REM === END CORRECTION ===
|
|
||||||
|
|
||||||
|
|
||||||
REM === Conditionally Execute HAPI Setup ===
|
|
||||||
IF "%APP_MODE%"=="standalone" (
|
|
||||||
echo Running Standalone setup including HAPI FHIR...
|
|
||||||
echo.
|
|
||||||
|
|
||||||
REM --- Step 0: Clean up previous clone (optional) ---
|
|
||||||
echo Checking for existing directory: %CLONE_DIR%
|
|
||||||
if exist "%CLONE_DIR%" (
|
|
||||||
echo Found existing directory, removing it...
|
|
||||||
rmdir /s /q "%CLONE_DIR%"
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo ERROR: Failed to remove existing directory: %CLONE_DIR%
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
echo Existing directory removed.
|
|
||||||
) else (
|
|
||||||
echo Directory does not exist, proceeding with clone.
|
|
||||||
)
|
|
||||||
echo.
|
|
||||||
|
|
||||||
REM --- Step 1: Clone the HAPI FHIR server repository ---
|
|
||||||
echo Cloning repository: %REPO_URL% into %CLONE_DIR%...
|
|
||||||
git clone "%REPO_URL%" "%CLONE_DIR%"
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo ERROR: Failed to clone repository. Check Git installation and network connection.
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
echo Repository cloned successfully.
|
|
||||||
echo.
|
|
||||||
|
|
||||||
REM --- Step 2: Navigate into the cloned directory ---
|
|
||||||
echo Changing directory to %CLONE_DIR%...
|
|
||||||
cd "%CLONE_DIR%"
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo ERROR: Failed to change directory to %CLONE_DIR%.
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
echo Current directory: %CD%
|
|
||||||
echo.
|
|
||||||
|
|
||||||
REM --- Step 3: Build the HAPI server using Maven ---
|
|
||||||
echo ===> "Starting Maven build (Step 3)...""
|
|
||||||
cmd /c "mvn clean package -DskipTests=true -Pboot"
|
|
||||||
echo ===> Maven command finished. Checking error level...
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo ERROR: Maven build failed or cmd /c failed
|
|
||||||
cd ..
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
echo Maven build completed successfully. ErrorLevel: %errorlevel%
|
|
||||||
echo.
|
|
||||||
|
|
||||||
REM --- Step 4: Copy the configuration file ---
|
|
||||||
echo ===> "Starting file copy (Step 4)..."
|
|
||||||
echo Copying configuration file...
|
|
||||||
echo Source: %SOURCE_CONFIG_PATH%
|
|
||||||
echo Destination: target\classes\%CONFIG_FILE%
|
|
||||||
xcopy "%SOURCE_CONFIG_PATH%" "target\classes\" /Y /I
|
|
||||||
echo ===> xcopy command finished. Checking error level...
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo WARNING: Failed to copy configuration file. Check if the source file exists.
|
|
||||||
echo The script will continue, but the server might use default configuration.
|
|
||||||
) else (
|
|
||||||
echo Configuration file copied successfully. ErrorLevel: %errorlevel%
|
|
||||||
)
|
|
||||||
echo.
|
|
||||||
|
|
||||||
REM --- Step 5: Navigate back to the parent directory ---
|
|
||||||
echo ===> "Changing directory back (Step 5)..."
|
|
||||||
cd ..
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo ERROR: Failed to change back to the parent directory. ErrorLevel: %errorlevel%
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
echo Current directory: %CD%
|
|
||||||
echo.
|
|
||||||
|
|
||||||
) ELSE (
|
|
||||||
echo Running Lite setup, skipping HAPI FHIR build...
|
|
||||||
REM Ensure the hapi-fhir-jpaserver directory doesn't exist or is empty if Lite mode is chosen after a standalone attempt
|
|
||||||
if exist "%CLONE_DIR%" (
|
|
||||||
echo Found existing HAPI directory in Lite mode. Removing it to avoid build issues...
|
|
||||||
rmdir /s /q "%CLONE_DIR%"
|
|
||||||
)
|
|
||||||
REM Create empty target directories expected by Dockerfile COPY, even if not used
|
|
||||||
mkdir "%CLONE_DIR%\target\classes" 2> nul
|
|
||||||
mkdir "%CLONE_DIR%\custom" 2> nul
|
|
||||||
REM Create a placeholder empty WAR file to satisfy Dockerfile COPY
|
|
||||||
echo. > "%CLONE_DIR%\target\ROOT.war"
|
|
||||||
echo. > "%CLONE_DIR%\target\classes\application.yaml"
|
|
||||||
echo Placeholder files created for Lite mode build.
|
|
||||||
echo.
|
|
||||||
)
|
|
||||||
|
|
||||||
REM === Modify docker-compose.yml to set APP_MODE ===
|
|
||||||
echo Updating docker-compose.yml with APP_MODE=%APP_MODE%...
|
|
||||||
(
|
|
||||||
echo version: '3.8'
|
|
||||||
echo services:
|
|
||||||
echo fhirflare:
|
|
||||||
echo build:
|
|
||||||
echo context: .
|
|
||||||
echo dockerfile: Dockerfile
|
|
||||||
echo ports:
|
|
||||||
echo - "5000:5000"
|
|
||||||
echo - "8080:8080" # Keep port exposed, even if Tomcat isn't running useful stuff in Lite
|
|
||||||
echo volumes:
|
|
||||||
echo - ./instance:/app/instance
|
|
||||||
echo - ./static/uploads:/app/static/uploads
|
|
||||||
echo - ./instance/hapi-h2-data/:/app/h2-data # Keep volume mounts consistent
|
|
||||||
echo - ./logs:/app/logs
|
|
||||||
echo environment:
|
|
||||||
echo - FLASK_APP=app.py
|
|
||||||
echo - FLASK_ENV=development
|
|
||||||
echo - NODE_PATH=/usr/lib/node_modules
|
|
||||||
echo - APP_MODE=%APP_MODE%
|
|
||||||
echo - APP_BASE_URL=http://localhost:5000
|
|
||||||
echo - HAPI_FHIR_URL=http://localhost:8080/fhir
|
|
||||||
echo command: supervisord -c /etc/supervisord.conf
|
|
||||||
) > docker-compose.yml.tmp
|
|
||||||
|
|
||||||
REM Check if docker-compose.yml.tmp was created successfully
|
|
||||||
if not exist docker-compose.yml.tmp (
|
|
||||||
echo ERROR: Failed to create temporary docker-compose file.
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
|
|
||||||
REM Replace the original docker-compose.yml
|
|
||||||
del docker-compose.yml /Q > nul 2>&1
|
|
||||||
ren docker-compose.yml.tmp docker-compose.yml
|
|
||||||
echo docker-compose.yml updated successfully.
|
|
||||||
echo.
|
|
||||||
|
|
||||||
REM --- Step 6: Build Docker images ---
|
|
||||||
echo ===> Starting Docker build (Step 6)...
|
|
||||||
docker-compose build --no-cache
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo ERROR: Docker Compose build failed. Check Docker installation and docker-compose.yml file. ErrorLevel: %errorlevel%
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
echo Docker images built successfully. ErrorLevel: %errorlevel%
|
|
||||||
echo.
|
|
||||||
|
|
||||||
REM --- Step 7: Start Docker containers ---
|
|
||||||
echo ===> Starting Docker containers (Step 7)...
|
|
||||||
docker-compose up -d
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo ERROR: Docker Compose up failed. Check Docker installation and container configurations. ErrorLevel: %errorlevel%
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
echo Docker containers started successfully. ErrorLevel: %errorlevel%
|
|
||||||
echo.
|
|
||||||
|
|
||||||
echo ====================================
|
|
||||||
echo Script finished successfully! (Mode: %APP_MODE%)
|
|
||||||
echo ====================================
|
|
||||||
goto :eof
|
|
||||||
|
|
||||||
:error
|
|
||||||
echo ------------------------------------
|
|
||||||
echo An error occurred. Script aborted.
|
|
||||||
echo ------------------------------------
|
|
||||||
pause
|
|
||||||
exit /b 1
|
|
||||||
|
|
||||||
:eof
|
|
||||||
echo Script execution finished.
|
|
||||||
pause
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
Docker Commands.MD
|
|
||||||
|
|
||||||
|
|
||||||
<HAPI-server.>
|
|
||||||
to pull and clone:
|
|
||||||
git clone https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git hapi-fhir-jpaserver
|
|
||||||
|
|
||||||
to build:
|
|
||||||
mvn clean package -DskipTests=true -Pboot
|
|
||||||
|
|
||||||
to run:
|
|
||||||
java -jar target/ROOT.war
|
|
||||||
|
|
||||||
|
|
||||||
<rest-of-the-app:>
|
|
||||||
|
|
||||||
docker-compose build --no-cache
|
|
||||||
docker-compose up -d
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<useful-stuff:>
|
|
||||||
|
|
||||||
cp <CONTAINERID>:/app/PATH/Filename.ext . - . copies to the root folder you ran it from
|
|
||||||
|
|
||||||
docker exec -it <CONTAINERID> bash - to get a bash - session in the container -
|
|
||||||
57
Dockerfile
57
Dockerfile
@ -1,57 +0,0 @@
|
|||||||
# Base image with Python and Java
|
|
||||||
FROM tomcat:10.1-jdk17
|
|
||||||
|
|
||||||
# Install build dependencies, Node.js 18, and coreutils (for stdbuf)
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
python3 python3-pip python3-venv curl coreutils \
|
|
||||||
&& curl -fsSL https://deb.nodesource.com/setup_18.x | bash - \
|
|
||||||
&& apt-get install -y --no-install-recommends nodejs \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Install specific versions of GoFSH and SUSHI
|
|
||||||
# REMOVED pip install fhirpath from this line
|
|
||||||
RUN npm install -g gofsh fsh-sushi
|
|
||||||
|
|
||||||
# Set up Python environment
|
|
||||||
WORKDIR /app
|
|
||||||
RUN python3 -m venv /app/venv
|
|
||||||
ENV PATH="/app/venv/bin:$PATH"
|
|
||||||
|
|
||||||
# ADDED: Uninstall old fhirpath just in case it's in requirements.txt
|
|
||||||
RUN pip uninstall -y fhirpath || true
|
|
||||||
# ADDED: Install the new fhirpathpy library
|
|
||||||
RUN pip install --no-cache-dir fhirpathpy
|
|
||||||
|
|
||||||
# Copy Flask files
|
|
||||||
COPY requirements.txt .
|
|
||||||
# Install requirements (including Pydantic - check version compatibility if needed)
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
COPY app.py .
|
|
||||||
COPY services.py .
|
|
||||||
COPY forms.py .
|
|
||||||
COPY package.py .
|
|
||||||
COPY templates/ templates/
|
|
||||||
COPY static/ static/
|
|
||||||
COPY tests/ tests/
|
|
||||||
|
|
||||||
# Ensure /tmp, /app/h2-data, /app/static/uploads, and /app/logs are writable
|
|
||||||
RUN mkdir -p /tmp /app/h2-data /app/static/uploads /app/logs && chmod 777 /tmp /app/h2-data /app/static/uploads /app/logs
|
|
||||||
|
|
||||||
# Copy pre-built HAPI WAR and configuration
|
|
||||||
COPY hapi-fhir-jpaserver/target/ROOT.war /usr/local/tomcat/webapps/
|
|
||||||
COPY hapi-fhir-jpaserver/target/classes/application.yaml /usr/local/tomcat/conf/
|
|
||||||
COPY hapi-fhir-jpaserver/target/classes/application.yaml /app/config/application.yaml
|
|
||||||
COPY hapi-fhir-jpaserver/target/classes/application.yaml /usr/local/tomcat/webapps/app/config/application.yaml
|
|
||||||
COPY hapi-fhir-jpaserver/custom/ /usr/local/tomcat/webapps/custom/
|
|
||||||
|
|
||||||
# Install supervisord
|
|
||||||
RUN pip install supervisor
|
|
||||||
|
|
||||||
# Configure supervisord
|
|
||||||
COPY supervisord.conf /etc/supervisord.conf
|
|
||||||
|
|
||||||
# Expose ports
|
|
||||||
EXPOSE 5000 8080
|
|
||||||
|
|
||||||
# Start supervisord
|
|
||||||
CMD ["supervisord", "-c", "/etc/supervisord.conf"]
|
|
||||||
25
Run.bat
25
Run.bat
@ -1,25 +0,0 @@
|
|||||||
REM --- Step 1: Start Docker containers ---
|
|
||||||
echo ===> Starting Docker containers (Step 7)...
|
|
||||||
docker-compose up -d
|
|
||||||
if errorlevel 1 (
|
|
||||||
echo ERROR: Docker Compose up failed. Check Docker installation and container configurations. ErrorLevel: %errorlevel%
|
|
||||||
goto :error
|
|
||||||
)
|
|
||||||
echo Docker containers started successfully. ErrorLevel: %errorlevel%
|
|
||||||
echo.
|
|
||||||
|
|
||||||
echo ====================================
|
|
||||||
echo Script finished successfully!
|
|
||||||
echo ====================================
|
|
||||||
goto :eof
|
|
||||||
|
|
||||||
:error
|
|
||||||
echo ------------------------------------
|
|
||||||
echo An error occurred. Script aborted.
|
|
||||||
echo ------------------------------------
|
|
||||||
pause
|
|
||||||
exit /b 1
|
|
||||||
|
|
||||||
:eof
|
|
||||||
echo Script execution finished.
|
|
||||||
pause
|
|
||||||
47
app.py
47
app.py
@ -1,6 +1,10 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
|
# Make paths relative to the current directory instead of absolute '/app' paths
|
||||||
|
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
# Introduce app_dir variable that can be overridden by environment
|
||||||
|
app_dir = os.environ.get('APP_DIR', CURRENT_DIR)
|
||||||
|
sys.path.append(CURRENT_DIR)
|
||||||
import datetime
|
import datetime
|
||||||
import shutil
|
import shutil
|
||||||
import queue
|
import queue
|
||||||
@ -52,16 +56,19 @@ from logging.handlers import RotatingFileHandler
|
|||||||
#app setup
|
#app setup
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'your-fallback-secret-key-here')
|
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'your-fallback-secret-key-here')
|
||||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', 'sqlite:////app/instance/fhir_ig.db')
|
|
||||||
|
# Update paths to be relative to current directory
|
||||||
|
instance_path = os.path.join(CURRENT_DIR, 'instance')
|
||||||
|
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', f'sqlite:///{os.path.join(instance_path, "fhir_ig.db")}')
|
||||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
||||||
app.config['FHIR_PACKAGES_DIR'] = '/app/instance/fhir_packages'
|
app.config['FHIR_PACKAGES_DIR'] = os.path.join(instance_path, 'fhir_packages')
|
||||||
app.config['API_KEY'] = os.environ.get('API_KEY', 'your-fallback-api-key-here')
|
app.config['API_KEY'] = os.environ.get('API_KEY', 'your-fallback-api-key-here')
|
||||||
app.config['VALIDATE_IMPOSED_PROFILES'] = True
|
app.config['VALIDATE_IMPOSED_PROFILES'] = True
|
||||||
app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True
|
app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True
|
||||||
app.config['UPLOAD_FOLDER'] = '/app/static/uploads' # For GoFSH output
|
app.config['UPLOAD_FOLDER'] = os.path.join(CURRENT_DIR, 'static', 'uploads') # For GoFSH output
|
||||||
app.config['APP_BASE_URL'] = os.environ.get('APP_BASE_URL', 'http://localhost:5000')
|
app.config['APP_BASE_URL'] = os.environ.get('APP_BASE_URL', 'http://localhost:5000')
|
||||||
app.config['HAPI_FHIR_URL'] = os.environ.get('HAPI_FHIR_URL', 'http://localhost:8080/fhir')
|
app.config['HAPI_FHIR_URL'] = os.environ.get('HAPI_FHIR_URL', 'http://localhost:8080/fhir')
|
||||||
CONFIG_PATH = '/usr/local/tomcat/conf/application.yaml'
|
CONFIG_PATH = os.environ.get('CONFIG_PATH', '/usr/local/tomcat/conf/application.yaml')
|
||||||
|
|
||||||
# Basic Swagger configuration
|
# Basic Swagger configuration
|
||||||
app.config['SWAGGER'] = {
|
app.config['SWAGGER'] = {
|
||||||
@ -228,6 +235,11 @@ db = SQLAlchemy(app)
|
|||||||
csrf = CSRFProtect(app)
|
csrf = CSRFProtect(app)
|
||||||
migrate = Migrate(app, db)
|
migrate = Migrate(app, db)
|
||||||
|
|
||||||
|
# Add a global application state dictionary for sharing state between threads
|
||||||
|
app_state = {
|
||||||
|
'fetch_failed': False
|
||||||
|
}
|
||||||
|
|
||||||
# @app.route('/clear-cache')
|
# @app.route('/clear-cache')
|
||||||
# def clear_cache():
|
# def clear_cache():
|
||||||
# """Clears the in-memory package cache, the DB timestamp, and the CachedPackage table."""
|
# """Clears the in-memory package cache, the DB timestamp, and the CachedPackage table."""
|
||||||
@ -695,7 +707,7 @@ def perform_cache_refresh_and_log():
|
|||||||
now_ts = datetime.datetime.now(datetime.timezone.utc)
|
now_ts = datetime.datetime.now(datetime.timezone.utc)
|
||||||
app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages
|
app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages
|
||||||
app.config['MANUAL_CACHE_TIMESTAMP'] = now_ts
|
app.config['MANUAL_CACHE_TIMESTAMP'] = now_ts
|
||||||
session['fetch_failed'] = fetch_failed # Update session flag reflecting fetch outcome
|
app_state['fetch_failed'] = fetch_failed # Update app_state instead of session
|
||||||
logger.info(f"Updated in-memory cache with {len(normalized_packages)} packages. Fetch failed: {fetch_failed}")
|
logger.info(f"Updated in-memory cache with {len(normalized_packages)} packages. Fetch failed: {fetch_failed}")
|
||||||
|
|
||||||
# 6. Cache in Database (if successful fetch)
|
# 6. Cache in Database (if successful fetch)
|
||||||
@ -2194,7 +2206,7 @@ def api_upload_test_data():
|
|||||||
if auth_type not in ['none', 'bearerToken', 'basic']:
|
if auth_type not in ['none', 'bearerToken', 'basic']:
|
||||||
return jsonify({"status": "error", "message": "Invalid Authentication Type."}), 400
|
return jsonify({"status": "error", "message": "Invalid Authentication Type."}), 400
|
||||||
if auth_type == 'bearerToken' and not auth_token:
|
if auth_type == 'bearerToken' and not auth_token:
|
||||||
return jsonify({"status": "error", "message": "Bearer Token required."}), 400
|
return jsonify({"status": "error", "message": "auth_token required for bearerToken."}), 400
|
||||||
if auth_type == 'basic' and (not username or not password):
|
if auth_type == 'basic' and (not username or not password):
|
||||||
return jsonify({"status": "error", "message": "Username and Password required for Basic Authentication."}), 400
|
return jsonify({"status": "error", "message": "Username and Password required for Basic Authentication."}), 400
|
||||||
if upload_mode not in ['individual', 'transaction']:
|
if upload_mode not in ['individual', 'transaction']:
|
||||||
@ -2238,7 +2250,7 @@ def api_upload_test_data():
|
|||||||
|
|
||||||
# --- Prepare Server Info and Options ---
|
# --- Prepare Server Info and Options ---
|
||||||
server_info = {'url': fhir_server_url, 'auth_type': auth_type}
|
server_info = {'url': fhir_server_url, 'auth_type': auth_type}
|
||||||
if auth_type == 'bearerToken':
|
if auth_type == 'bearer':
|
||||||
server_info['auth_token'] = auth_token
|
server_info['auth_token'] = auth_token
|
||||||
elif auth_type == 'basic':
|
elif auth_type == 'basic':
|
||||||
credentials = f"{username}:{password}"
|
credentials = f"{username}:{password}"
|
||||||
@ -2658,7 +2670,7 @@ def search_and_import():
|
|||||||
raw_packages = fetch_packages_from_registries(search_term='')
|
raw_packages = fetch_packages_from_registries(search_term='')
|
||||||
logger.debug(f"fetch_packages_from_registries returned {len(raw_packages)} raw packages.")
|
logger.debug(f"fetch_packages_from_registries returned {len(raw_packages)} raw packages.")
|
||||||
if not raw_packages:
|
if not raw_packages:
|
||||||
logger.warning("fetch_packages_from_registries returned no packages. Handling fallback or empty list.")
|
logger.warning("No packages returned from registries during refresh.")
|
||||||
normalized_packages = []
|
normalized_packages = []
|
||||||
fetch_failed_flag = True
|
fetch_failed_flag = True
|
||||||
session['fetch_failed'] = True
|
session['fetch_failed'] = True
|
||||||
@ -2672,6 +2684,7 @@ def search_and_import():
|
|||||||
now_ts = datetime.datetime.now(datetime.timezone.utc)
|
now_ts = datetime.datetime.now(datetime.timezone.utc)
|
||||||
app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages
|
app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages
|
||||||
app.config['MANUAL_CACHE_TIMESTAMP'] = now_ts
|
app.config['MANUAL_CACHE_TIMESTAMP'] = now_ts
|
||||||
|
app_state['fetch_failed'] = False
|
||||||
logger.info(f"Stored {len(normalized_packages)} packages in manual cache (memory).")
|
logger.info(f"Stored {len(normalized_packages)} packages in manual cache (memory).")
|
||||||
|
|
||||||
# Save to CachedPackage table
|
# Save to CachedPackage table
|
||||||
@ -2881,10 +2894,18 @@ def safe_parse_version_local(v_str): # Use different name
|
|||||||
elif suffix in ['draft', 'ballot', 'preview']: return pkg_version_local.parse(f"{base_part}b0")
|
elif suffix in ['draft', 'ballot', 'preview']: return pkg_version_local.parse(f"{base_part}b0")
|
||||||
elif suffix and suffix.startswith('rc'): return pkg_version_local.parse(f"{base_part}rc{ ''.join(filter(str.isdigit, suffix)) or '0'}")
|
elif suffix and suffix.startswith('rc'): return pkg_version_local.parse(f"{base_part}rc{ ''.join(filter(str.isdigit, suffix)) or '0'}")
|
||||||
return pkg_version_local.parse(base_part)
|
return pkg_version_local.parse(base_part)
|
||||||
except pkg_version_local.InvalidVersion: logger_details.warning(f"[DetailsView] Invalid base version '{base_part}' after splitting '{original_v_str}'. Treating as alpha."); return pkg_version_local.parse("0.0.0a0")
|
except pkg_version_local.InvalidVersion:
|
||||||
except Exception as e: logger_details.error(f"[DetailsView] Unexpected error parsing FHIR-suffixed version '{original_v_str}': {e}"); return pkg_version_local.parse("0.0.0a0")
|
logger_details.warning(f"[DetailsView] Invalid base version '{base_part}' after splitting '{original_v_str}'. Treating as alpha.")
|
||||||
else: logger_details.warning(f"[DetailsView] Unparseable version '{original_v_str}' (base '{base_part}' not standard). Treating as alpha."); return pkg_version_local.parse("0.0.0a0")
|
return pkg_version_local.parse("0.0.0a0")
|
||||||
except Exception as e: logger_details.error(f"[DetailsView] Unexpected error in safe_parse_version_local for '{v_str}': {e}"); return pkg_version_local.parse("0.0.0a0")
|
except Exception as e:
|
||||||
|
logger_details.error(f"[DetailsView] Unexpected error parsing FHIR-suffixed version '{original_v_str}': {e}")
|
||||||
|
return pkg_version_local.parse("0.0.0a0")
|
||||||
|
else:
|
||||||
|
logger_details.warning(f"[DetailsView] Unparseable version '{original_v_str}' (base '{base_part}' not standard). Treating as alpha.")
|
||||||
|
return pkg_version_local.parse("0.0.0a0")
|
||||||
|
except Exception as e:
|
||||||
|
logger_details.error(f"[DetailsView] Unexpected error in safe_parse_version_local for '{v_str}': {e}")
|
||||||
|
return pkg_version_local.parse("0.0.0a0")
|
||||||
# --- End Local Helper Definition ---
|
# --- End Local Helper Definition ---
|
||||||
|
|
||||||
@app.route('/package-details/<name>')
|
@app.route('/package-details/<name>')
|
||||||
|
|||||||
1
charts/.gitignore
vendored
Normal file
1
charts/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
/hapi-fhir-jpaserver-0.20.0.tgz
|
||||||
1
charts/fhirflare-ig-toolkit/.gitignore
vendored
Normal file
1
charts/fhirflare-ig-toolkit/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
/rendered/
|
||||||
3
charts/fhirflare-ig-toolkit/Chart.lock
Normal file
3
charts/fhirflare-ig-toolkit/Chart.lock
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
dependencies: []
|
||||||
|
digest: sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
|
||||||
|
generated: "2025-08-04T14:30:00.000000000Z"
|
||||||
16
charts/fhirflare-ig-toolkit/Chart.yaml
Normal file
16
charts/fhirflare-ig-toolkit/Chart.yaml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
apiVersion: v2
|
||||||
|
name: fhirflare-ig-toolkit
|
||||||
|
version: 0.3.0
|
||||||
|
description: Helm chart for deploying the fhirflare-ig-toolkit application
|
||||||
|
type: application
|
||||||
|
appVersion: "latest"
|
||||||
|
icon: https://github.com/jgsuess/FHIRFLARE-IG-Toolkit/raw/main/static/FHIRFLARE.png
|
||||||
|
keywords:
|
||||||
|
- fhir
|
||||||
|
- healthcare
|
||||||
|
- ig-toolkit
|
||||||
|
- implementation-guide
|
||||||
|
home: https://github.com/jgsuess/FHIRFLARE-IG-Toolkit
|
||||||
|
maintainers:
|
||||||
|
- name: Jörn Guy Süß
|
||||||
|
email: jgsuess@gmail.com
|
||||||
152
charts/fhirflare-ig-toolkit/templates/_helpers.tpl
Normal file
152
charts/fhirflare-ig-toolkit/templates/_helpers.tpl
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
{{/*
|
||||||
|
Expand the name of the chart.
|
||||||
|
*/}}
|
||||||
|
{{- define "fhirflare-ig-toolkit.name" -}}
|
||||||
|
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Create a default fully qualified app name.
|
||||||
|
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||||
|
If release name contains chart name it will be used as a full name.
|
||||||
|
*/}}
|
||||||
|
{{- define "fhirflare-ig-toolkit.fullname" -}}
|
||||||
|
{{- if .Values.fullnameOverride }}
|
||||||
|
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- else }}
|
||||||
|
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||||
|
{{- if contains $name .Release.Name }}
|
||||||
|
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- else }}
|
||||||
|
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Create chart name and version as used by the chart label.
|
||||||
|
*/}}
|
||||||
|
{{- define "fhirflare-ig-toolkit.chart" -}}
|
||||||
|
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Common labels
|
||||||
|
*/}}
|
||||||
|
{{- define "fhirflare-ig-toolkit.labels" -}}
|
||||||
|
helm.sh/chart: {{ include "fhirflare-ig-toolkit.chart" . }}
|
||||||
|
{{ include "fhirflare-ig-toolkit.selectorLabels" . }}
|
||||||
|
{{- if .Chart.AppVersion }}
|
||||||
|
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||||
|
{{- end }}
|
||||||
|
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Selector labels
|
||||||
|
*/}}
|
||||||
|
{{- define "fhirflare-ig-toolkit.selectorLabels" -}}
|
||||||
|
app.kubernetes.io/name: {{ include "fhirflare-ig-toolkit.name" . }}
|
||||||
|
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Create the name of the service account to use
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.serviceAccountName" -}}
|
||||||
|
{{- if .Values.serviceAccount.create }}
|
||||||
|
{{- default (include "hapi-fhir-jpaserver.fullname" .) .Values.serviceAccount.name }}
|
||||||
|
{{- else }}
|
||||||
|
{{- default "default" .Values.serviceAccount.name }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Create a default fully qualified postgresql name.
|
||||||
|
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.postgresql.fullname" -}}
|
||||||
|
{{- $name := default "postgresql" .Values.postgresql.nameOverride -}}
|
||||||
|
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Get the Postgresql credentials secret name.
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.postgresql.secretName" -}}
|
||||||
|
{{- if .Values.postgresql.enabled -}}
|
||||||
|
{{- if .Values.postgresql.auth.existingSecret -}}
|
||||||
|
{{- printf "%s" .Values.postgresql.auth.existingSecret -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s" (include "hapi-fhir-jpaserver.postgresql.fullname" .) -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- else }}
|
||||||
|
{{- if .Values.externalDatabase.existingSecret -}}
|
||||||
|
{{- printf "%s" .Values.externalDatabase.existingSecret -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{ printf "%s-%s" (include "hapi-fhir-jpaserver.fullname" .) "external-db" }}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Get the Postgresql credentials secret key.
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.postgresql.secretKey" -}}
|
||||||
|
{{- if .Values.postgresql.enabled -}}
|
||||||
|
{{- if .Values.postgresql.auth.username -}}
|
||||||
|
{{- printf "%s" .Values.postgresql.auth.secretKeys.userPasswordKey -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s" .Values.postgresql.auth.secretKeys.adminPasswordKey -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- else }}
|
||||||
|
{{- if .Values.externalDatabase.existingSecret -}}
|
||||||
|
{{- printf "%s" .Values.externalDatabase.existingSecretKey -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "postgres-password" -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Add environment variables to configure database values
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.database.host" -}}
|
||||||
|
{{- ternary (include "hapi-fhir-jpaserver.postgresql.fullname" .) .Values.externalDatabase.host .Values.postgresql.enabled -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Add environment variables to configure database values
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.database.user" -}}
|
||||||
|
{{- if .Values.postgresql.enabled -}}
|
||||||
|
{{- printf "%s" .Values.postgresql.auth.username | default "postgres" -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- printf "%s" .Values.externalDatabase.user -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Add environment variables to configure database values
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.database.name" -}}
|
||||||
|
{{- ternary .Values.postgresql.auth.database .Values.externalDatabase.database .Values.postgresql.enabled -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Add environment variables to configure database values
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.database.port" -}}
|
||||||
|
{{- ternary "5432" .Values.externalDatabase.port .Values.postgresql.enabled -}}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
Create the JDBC URL from the host, port and database name.
|
||||||
|
*/}}
|
||||||
|
{{- define "hapi-fhir-jpaserver.database.jdbcUrl" -}}
|
||||||
|
{{- $host := (include "hapi-fhir-jpaserver.database.host" .) -}}
|
||||||
|
{{- $port := (include "hapi-fhir-jpaserver.database.port" .) -}}
|
||||||
|
{{- $name := (include "hapi-fhir-jpaserver.database.name" .) -}}
|
||||||
|
{{- $appName := .Release.Name -}}
|
||||||
|
{{ printf "jdbc:postgresql://%s:%d/%s?ApplicationName=%s" $host (int $port) $name $appName }}
|
||||||
|
{{- end -}}
|
||||||
91
charts/fhirflare-ig-toolkit/templates/deployment.yaml
Normal file
91
charts/fhirflare-ig-toolkit/templates/deployment.yaml
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ include "fhirflare-ig-toolkit.fullname" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "fhirflare-ig-toolkit.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.replicaCount | default 1 }}
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
{{- include "fhirflare-ig-toolkit.selectorLabels" . | nindent 6 }}
|
||||||
|
strategy:
|
||||||
|
type: Recreate
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
{{- include "fhirflare-ig-toolkit.selectorLabels" . | nindent 8 }}
|
||||||
|
{{- with .Values.podAnnotations }}
|
||||||
|
annotations:
|
||||||
|
{{- toYaml . | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
spec:
|
||||||
|
{{- with .Values.imagePullSecrets }}
|
||||||
|
imagePullSecrets:
|
||||||
|
{{- toYaml . | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
securityContext:
|
||||||
|
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||||
|
containers:
|
||||||
|
- name: {{ .Chart.Name }}
|
||||||
|
securityContext:
|
||||||
|
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||||
|
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||||
|
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||||
|
args: ["supervisord", "-c", "/etc/supervisord.conf"]
|
||||||
|
env:
|
||||||
|
- name: APP_BASE_URL
|
||||||
|
value: {{ .Values.config.appBaseUrl | default "http://localhost:5000" | quote }}
|
||||||
|
- name: APP_MODE
|
||||||
|
value: {{ .Values.config.appMode | default "lite" | quote }}
|
||||||
|
- name: FLASK_APP
|
||||||
|
value: {{ .Values.config.flaskApp | default "app.py" | quote }}
|
||||||
|
- name: FLASK_ENV
|
||||||
|
value: {{ .Values.config.flaskEnv | default "development" | quote }}
|
||||||
|
- name: HAPI_FHIR_URL
|
||||||
|
value: {{ .Values.config.externalHapiServerUrl | default "http://external-hapi-fhir:8080/fhir" | quote }}
|
||||||
|
- name: NODE_PATH
|
||||||
|
value: {{ .Values.config.nodePath | default "/usr/lib/node_modules" | quote }}
|
||||||
|
- name: TMPDIR
|
||||||
|
value: "/tmp-dir"
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
containerPort: {{ .Values.service.port | default 5000 }}
|
||||||
|
protocol: TCP
|
||||||
|
volumeMounts:
|
||||||
|
- name: logs
|
||||||
|
mountPath: /app/logs
|
||||||
|
- name: tmp-dir
|
||||||
|
mountPath: /tmp-dir
|
||||||
|
{{- with .Values.resources }}
|
||||||
|
resources:
|
||||||
|
{{- toYaml . | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.livenessProbe }}
|
||||||
|
livenessProbe:
|
||||||
|
{{- toYaml . | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.readinessProbe }}
|
||||||
|
readinessProbe:
|
||||||
|
{{- toYaml . | nindent 12 }}
|
||||||
|
{{- end }}
|
||||||
|
volumes:
|
||||||
|
- name: logs
|
||||||
|
emptyDir: {}
|
||||||
|
- name: tmp-dir
|
||||||
|
emptyDir: {}
|
||||||
|
# Always require Intel 64-bit architecture nodes
|
||||||
|
nodeSelector:
|
||||||
|
kubernetes.io/arch: amd64
|
||||||
|
{{- with .Values.nodeSelector }}
|
||||||
|
# Merge with user-defined nodeSelectors if any
|
||||||
|
{{- toYaml . | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.affinity }}
|
||||||
|
affinity:
|
||||||
|
{{- toYaml . | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- with .Values.tolerations }}
|
||||||
|
tolerations:
|
||||||
|
{{- toYaml . | nindent 8 }}
|
||||||
|
{{- end }}
|
||||||
36
charts/fhirflare-ig-toolkit/templates/ingress.yaml
Normal file
36
charts/fhirflare-ig-toolkit/templates/ingress.yaml
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
{{- if .Values.ingress.enabled -}}
|
||||||
|
{{- $fullName := include "fhirflare-ig-toolkit.fullname" . -}}
|
||||||
|
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion }}
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion }}
|
||||||
|
apiVersion: networking.k8s.io/v1beta1
|
||||||
|
{{- else }}
|
||||||
|
apiVersion: extensions/v1beta1
|
||||||
|
{{- end }}
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: {{ $fullName }}
|
||||||
|
labels:
|
||||||
|
{{- include "fhirflare-ig-toolkit.labels" . | nindent 4 }}
|
||||||
|
{{- with .Values.ingress.annotations }}
|
||||||
|
annotations:
|
||||||
|
{{- toYaml . | nindent 4 }}
|
||||||
|
{{- end }}
|
||||||
|
spec:
|
||||||
|
rules:
|
||||||
|
- http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion }}
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: {{ $fullName }}
|
||||||
|
port:
|
||||||
|
number: {{ .Values.service.port | default 5000 }}
|
||||||
|
{{- else }}
|
||||||
|
backend:
|
||||||
|
serviceName: {{ $fullName }}
|
||||||
|
servicePort: {{ .Values.service.port | default 5000 }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
18
charts/fhirflare-ig-toolkit/templates/service.yaml
Normal file
18
charts/fhirflare-ig-toolkit/templates/service.yaml
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: {{ include "fhirflare-ig-toolkit.fullname" . }}
|
||||||
|
labels:
|
||||||
|
{{- include "fhirflare-ig-toolkit.labels" . | nindent 4 }}
|
||||||
|
spec:
|
||||||
|
type: {{ .Values.service.type | default "ClusterIP" }}
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
port: {{ .Values.service.port | default 5000 }}
|
||||||
|
targetPort: {{ .Values.service.port | default 5000 }}
|
||||||
|
protocol: TCP
|
||||||
|
{{- if and (eq .Values.service.type "NodePort") .Values.service.nodePort }}
|
||||||
|
nodePort: {{ .Values.service.nodePort }}
|
||||||
|
{{- end }}
|
||||||
|
selector:
|
||||||
|
{{- include "fhirflare-ig-toolkit.selectorLabels" . | nindent 4 }}
|
||||||
@ -0,0 +1,41 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Pod
|
||||||
|
metadata:
|
||||||
|
name: "{{ .Release.Name }}-fhirflare-test-endpoint"
|
||||||
|
labels:
|
||||||
|
helm.sh/chart: "{{ .Chart.Name }}-{{ .Chart.Version }}"
|
||||||
|
app.kubernetes.io/name: {{ .Chart.Name }}
|
||||||
|
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||||
|
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||||
|
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||||
|
app.kubernetes.io/component: tests
|
||||||
|
annotations:
|
||||||
|
"helm.sh/hook": test
|
||||||
|
spec:
|
||||||
|
restartPolicy: Never
|
||||||
|
containers:
|
||||||
|
- name: test-fhirflare-endpoint
|
||||||
|
image: curlimages/curl:8.12.1
|
||||||
|
command: ["curl", "--fail-with-body", "--retry", "5", "--retry-delay", "10"]
|
||||||
|
args: ["http://fhirflare:5000"]
|
||||||
|
securityContext:
|
||||||
|
allowPrivilegeEscalation: false
|
||||||
|
capabilities:
|
||||||
|
drop:
|
||||||
|
- ALL
|
||||||
|
privileged: false
|
||||||
|
readOnlyRootFilesystem: true
|
||||||
|
runAsGroup: 65534
|
||||||
|
runAsNonRoot: true
|
||||||
|
runAsUser: 65534
|
||||||
|
seccompProfile:
|
||||||
|
type: RuntimeDefault
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 150m
|
||||||
|
ephemeral-storage: 2Gi
|
||||||
|
memory: 192Mi
|
||||||
|
requests:
|
||||||
|
cpu: 100m
|
||||||
|
ephemeral-storage: 50Mi
|
||||||
|
memory: 128Mi
|
||||||
89
charts/fhirflare-ig-toolkit/values.yaml
Normal file
89
charts/fhirflare-ig-toolkit/values.yaml
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
# Default values for fhirflare-ig-toolkit
|
||||||
|
replicaCount: 1
|
||||||
|
|
||||||
|
image:
|
||||||
|
repository: ghcr.io/jgsuess/fhirflare-ig-toolkit
|
||||||
|
pullPolicy: Always
|
||||||
|
tag: "latest"
|
||||||
|
|
||||||
|
imagePullSecrets: []
|
||||||
|
nameOverride: ""
|
||||||
|
fullnameOverride: ""
|
||||||
|
|
||||||
|
# FHIRflare specific configuration
|
||||||
|
config:
|
||||||
|
# Application mode: "lite" means using external HAPI server, "standalone" means running with embedded HAPI server
|
||||||
|
appMode: "lite"
|
||||||
|
# URL for the external HAPI FHIR server when in lite mode
|
||||||
|
externalHapiServerUrl: "http://external-hapi-fhir:8080/fhir"
|
||||||
|
appBaseUrl: "http://localhost:5000"
|
||||||
|
flaskApp: "app.py"
|
||||||
|
flaskEnv: "development"
|
||||||
|
nodePath: "/usr/lib/node_modules"
|
||||||
|
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
port: 5000
|
||||||
|
nodePort: null
|
||||||
|
|
||||||
|
podAnnotations: {}
|
||||||
|
|
||||||
|
# podSecurityContext:
|
||||||
|
# fsGroup: 65532
|
||||||
|
# fsGroupChangePolicy: OnRootMismatch
|
||||||
|
# runAsNonRoot: true
|
||||||
|
# runAsGroup: 65532
|
||||||
|
# runAsUser: 65532
|
||||||
|
# seccompProfile:
|
||||||
|
# type: RuntimeDefault
|
||||||
|
|
||||||
|
# securityContext:
|
||||||
|
# allowPrivilegeEscalation: false
|
||||||
|
# capabilities:
|
||||||
|
# drop:
|
||||||
|
# - ALL
|
||||||
|
# privileged: false
|
||||||
|
# readOnlyRootFilesystem: true
|
||||||
|
# runAsGroup: 65532
|
||||||
|
# runAsNonRoot: true
|
||||||
|
# runAsUser: 65532
|
||||||
|
# seccompProfile:
|
||||||
|
# type: RuntimeDefault
|
||||||
|
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpu: 500m
|
||||||
|
memory: 512Mi
|
||||||
|
ephemeral-storage: 1Gi
|
||||||
|
requests:
|
||||||
|
cpu: 100m
|
||||||
|
memory: 128Mi
|
||||||
|
ephemeral-storage: 100Mi
|
||||||
|
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 30
|
||||||
|
periodSeconds: 10
|
||||||
|
timeoutSeconds: 5
|
||||||
|
failureThreshold: 6
|
||||||
|
successThreshold: 1
|
||||||
|
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: http
|
||||||
|
initialDelaySeconds: 5
|
||||||
|
periodSeconds: 10
|
||||||
|
timeoutSeconds: 5
|
||||||
|
failureThreshold: 6
|
||||||
|
successThreshold: 1
|
||||||
|
|
||||||
|
nodeSelector: {}
|
||||||
|
tolerations: []
|
||||||
|
affinity: {}
|
||||||
|
|
||||||
|
ingress:
|
||||||
|
# -- whether to create a primitive Ingress to expose the FHIR server HTTP endpoint
|
||||||
|
enabled: false
|
||||||
23
charts/install.sh
Executable file
23
charts/install.sh
Executable file
@ -0,0 +1,23 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# FHIRFLARE-IG-Toolkit Installation Script
|
||||||
|
#
|
||||||
|
# Description:
|
||||||
|
# This script installs the FHIRFLARE-IG-Toolkit Helm chart into a Kubernetes cluster.
|
||||||
|
# It adds the FHIRFLARE-IG-Toolkit Helm repository and then installs the chart
|
||||||
|
# in the 'flare' namespace, creating the namespace if it doesn't exist.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./install.sh
|
||||||
|
#
|
||||||
|
# Requirements:
|
||||||
|
# - Helm (v3+)
|
||||||
|
# - kubectl configured with access to your Kubernetes cluster
|
||||||
|
#
|
||||||
|
|
||||||
|
# Add the FHIRFLARE-IG-Toolkit Helm repository
|
||||||
|
helm repo add flare https://jgsuess.github.io/FHIRFLARE-IG-Toolkit/
|
||||||
|
|
||||||
|
# Install the FHIRFLARE-IG-Toolkit chart in the 'flare' namespace
|
||||||
|
|
||||||
|
helm install flare/fhirflare-ig-toolkit --namespace flare --create-namespace --generate-name --set hapi-fhir-jpaserver.postgresql.primary.persistence.storageClass=gp2 --atomic
|
||||||
@ -1,22 +0,0 @@
|
|||||||
version: '3.8'
|
|
||||||
services:
|
|
||||||
fhirflare:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
ports:
|
|
||||||
- "5000:5000"
|
|
||||||
- "8080:8080" # Keep port exposed, even if Tomcat isn't running useful stuff in Lite
|
|
||||||
volumes:
|
|
||||||
- ./instance:/app/instance
|
|
||||||
- ./static/uploads:/app/static/uploads
|
|
||||||
- ./instance/hapi-h2-data/:/app/h2-data # Keep volume mounts consistent
|
|
||||||
- ./logs:/app/logs
|
|
||||||
environment:
|
|
||||||
- FLASK_APP=app.py
|
|
||||||
- FLASK_ENV=development
|
|
||||||
- NODE_PATH=/usr/lib/node_modules
|
|
||||||
- APP_MODE=lite
|
|
||||||
- APP_BASE_URL=http://localhost:5000
|
|
||||||
- HAPI_FHIR_URL=http://localhost:8080/fhir
|
|
||||||
command: supervisord -c /etc/supervisord.conf
|
|
||||||
22
docker-compose/all-in-one/docker-compose.yml
Normal file
22
docker-compose/all-in-one/docker-compose.yml
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# This docker-compose file uses ephemeral Docker named volumes for all data storage.
|
||||||
|
# These volumes persist only as long as the Docker volumes exist and are deleted if you run `docker-compose down -v`.
|
||||||
|
# No data is stored on the host filesystem. If you want persistent storage, replace these with host mounts.
|
||||||
|
services:
|
||||||
|
fhirflare-standalone:
|
||||||
|
image: ${FHIRFLARE_IMAGE:-ghcr.io/sudo-jhare/fhirflare-ig-toolkit-standalone:latest}
|
||||||
|
container_name: fhirflare-standalone
|
||||||
|
ports:
|
||||||
|
- "5000:5000"
|
||||||
|
- "8080:8080"
|
||||||
|
volumes:
|
||||||
|
- fhirflare-instance:/app/instance
|
||||||
|
- fhirflare-uploads:/app/static/uploads
|
||||||
|
- fhirflare-h2-data:/app/h2-data
|
||||||
|
- fhirflare-logs:/app/logs
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
fhirflare-instance:
|
||||||
|
fhirflare-uploads:
|
||||||
|
fhirflare-h2-data:
|
||||||
|
fhirflare-logs:
|
||||||
5
docker-compose/all-in-one/down.sh
Executable file
5
docker-compose/all-in-one/down.sh
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Stop and remove all containers defined in the Docker Compose file,
|
||||||
|
# along with any anonymous volumes attached to them.
|
||||||
|
docker compose down --volumes
|
||||||
5
docker-compose/all-in-one/up.sh
Executable file
5
docker-compose/all-in-one/up.sh
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Run Docker Compose
|
||||||
|
|
||||||
|
docker compose up --detach --force-recreate --renew-anon-volumes --always-recreate-deps
|
||||||
18
docker-compose/lite/local/application.yaml
Normal file
18
docker-compose/lite/local/application.yaml
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
hapi.fhir:
|
||||||
|
ig_runtime_upload_enabled: false
|
||||||
|
narrative_enabled: true
|
||||||
|
logical_urls:
|
||||||
|
- http://terminology.hl7.org/*
|
||||||
|
- https://terminology.hl7.org/*
|
||||||
|
- http://snomed.info/*
|
||||||
|
- https://snomed.info/*
|
||||||
|
- http://unitsofmeasure.org/*
|
||||||
|
- https://unitsofmeasure.org/*
|
||||||
|
- http://loinc.org/*
|
||||||
|
- https://loinc.org/*
|
||||||
|
cors:
|
||||||
|
allow_Credentials: true
|
||||||
|
allowed_origin:
|
||||||
|
- '*'
|
||||||
|
tester.home.name: FHIRFLARE Tester
|
||||||
|
inline_resource_storage_below_size: 4000
|
||||||
50
docker-compose/lite/local/docker-compose.yml
Normal file
50
docker-compose/lite/local/docker-compose.yml
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
services:
|
||||||
|
fhirflare:
|
||||||
|
image: ${FHIRFLARE_IMAGE:-ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest}
|
||||||
|
ports:
|
||||||
|
- "5000:5000"
|
||||||
|
# Ephemeral Docker named volumes for all data storage. No data is stored on the host filesystem.
|
||||||
|
volumes:
|
||||||
|
- fhirflare-instance:/app/instance
|
||||||
|
- fhirflare-uploads:/app/static/uploads
|
||||||
|
- fhirflare-h2-data:/app/h2-data
|
||||||
|
- fhirflare-logs:/app/logs
|
||||||
|
environment:
|
||||||
|
- FLASK_APP=app.py
|
||||||
|
- FLASK_ENV=development
|
||||||
|
- NODE_PATH=/usr/lib/node_modules
|
||||||
|
- APP_MODE=lite
|
||||||
|
- APP_BASE_URL=http://localhost:5000
|
||||||
|
- HAPI_FHIR_URL=http://fhir:8080/fhir
|
||||||
|
command: supervisord -c /etc/supervisord.conf
|
||||||
|
|
||||||
|
fhir:
|
||||||
|
container_name: hapi
|
||||||
|
image: "hapiproject/hapi:v8.2.0-1"
|
||||||
|
ports:
|
||||||
|
- "8080:8080"
|
||||||
|
configs:
|
||||||
|
- source: hapi
|
||||||
|
target: /app/config/application.yaml
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
|
||||||
|
db:
|
||||||
|
image: "postgres:17.2-bookworm"
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: admin
|
||||||
|
POSTGRES_USER: admin
|
||||||
|
POSTGRES_DB: hapi
|
||||||
|
volumes:
|
||||||
|
- ./hapi.postgress.data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
configs:
|
||||||
|
hapi:
|
||||||
|
file: ./application.yaml
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
fhirflare-instance:
|
||||||
|
fhirflare-uploads:
|
||||||
|
fhirflare-h2-data:
|
||||||
|
fhirflare-logs:
|
||||||
5
docker-compose/lite/local/down.sh
Executable file
5
docker-compose/lite/local/down.sh
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Stop and remove all containers defined in the Docker Compose file,
|
||||||
|
# along with any anonymous volumes attached to them.
|
||||||
|
docker compose down --volumes
|
||||||
19
docker-compose/lite/local/readme.md
Normal file
19
docker-compose/lite/local/readme.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# FHIRFLARE IG Toolkit
|
||||||
|
|
||||||
|
This directory provides scripts and configuration to start and stop a FHIRFLARE instance with an attached HAPI FHIR server using Docker Compose.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
- To start the FHIRFLARE toolkit and HAPI server:
|
||||||
|
```sh
|
||||||
|
./docker-compose/up.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
- To stop and remove the containers and volumes:
|
||||||
|
```sh
|
||||||
|
./docker-compose/down.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
The web interface will be available at [http://localhost:5000](http://localhost:5000) and the HAPI FHIR server at [http://localhost:8080/fhir](http://localhost:8080/fhir).
|
||||||
|
|
||||||
|
For more details, see the configuration files in this directory.
|
||||||
5
docker-compose/lite/local/up.sh
Executable file
5
docker-compose/lite/local/up.sh
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Run Docker Compose
|
||||||
|
|
||||||
|
docker compose up --detach --force-recreate --renew-anon-volumes --always-recreate-deps
|
||||||
25
docker-compose/lite/remote/docker-compose.yml
Normal file
25
docker-compose/lite/remote/docker-compose.yml
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
services:
|
||||||
|
fhirflare:
|
||||||
|
image: ${FHIRFLARE_IMAGE:-ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest}
|
||||||
|
ports:
|
||||||
|
- "5000:5000"
|
||||||
|
# Ephemeral Docker named volumes for all data storage. No data is stored on the host filesystem.
|
||||||
|
volumes:
|
||||||
|
- fhirflare-instance:/app/instance
|
||||||
|
- fhirflare-uploads:/app/static/uploads
|
||||||
|
- fhirflare-h2-data:/app/h2-data
|
||||||
|
- fhirflare-logs:/app/logs
|
||||||
|
environment:
|
||||||
|
- FLASK_APP=app.py
|
||||||
|
- FLASK_ENV=development
|
||||||
|
- NODE_PATH=/usr/lib/node_modules
|
||||||
|
- APP_MODE=lite
|
||||||
|
- APP_BASE_URL=http://localhost:5000
|
||||||
|
- HAPI_FHIR_URL=https://cdr.fhirlab.net/fhir
|
||||||
|
command: supervisord -c /etc/supervisord.conf
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
fhirflare-instance:
|
||||||
|
fhirflare-uploads:
|
||||||
|
fhirflare-h2-data:
|
||||||
|
fhirflare-logs:
|
||||||
5
docker-compose/lite/remote/down.sh
Executable file
5
docker-compose/lite/remote/down.sh
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Stop and remove all containers defined in the Docker Compose file,
|
||||||
|
# along with any anonymous volumes attached to them.
|
||||||
|
docker compose down --volumes
|
||||||
19
docker-compose/lite/remote/readme.md
Normal file
19
docker-compose/lite/remote/readme.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# FHIRFLARE IG Toolkit
|
||||||
|
|
||||||
|
This directory provides scripts and configuration to start and stop a FHIRFLARE instance with an attached HAPI FHIR server using Docker Compose.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
- To start the FHIRFLARE toolkit and HAPI server:
|
||||||
|
```sh
|
||||||
|
./docker-compose/up.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
- To stop and remove the containers and volumes:
|
||||||
|
```sh
|
||||||
|
./docker-compose/down.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
The web interface will be available at [http://localhost:5000](http://localhost:5000) and the HAPI FHIR server at [http://localhost:8080/fhir](http://localhost:8080/fhir).
|
||||||
|
|
||||||
|
For more details, see the configuration files in this directory.
|
||||||
5
docker-compose/lite/remote/up.sh
Executable file
5
docker-compose/lite/remote/up.sh
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Run Docker Compose
|
||||||
|
|
||||||
|
docker compose up --detach --force-recreate --renew-anon-volumes --always-recreate-deps
|
||||||
66
docker/Dockerfile
Normal file
66
docker/Dockerfile
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
# Dockerfile for FHIRFLARE-IG-Toolkit (Optimized for Python/Flask)
|
||||||
|
#
|
||||||
|
# This Dockerfile builds a container for the FHIRFLARE-IG-Toolkit application.
|
||||||
|
#
|
||||||
|
# Key Features:
|
||||||
|
# - Uses python:3.11-slim as the base image for a minimal, secure Python runtime.
|
||||||
|
# - Installs Node.js and global NPM packages (gofsh, fsh-sushi) for FHIR IG tooling.
|
||||||
|
# - Sets up a Python virtual environment and installs all Python dependencies.
|
||||||
|
# - Installs and configures Supervisor to manage the Flask app and related processes.
|
||||||
|
# - Copies all necessary application code, templates, static files, and configuration.
|
||||||
|
# - Exposes ports 5000 (Flask) and 8080 (optional, for compatibility).
|
||||||
|
# - Entrypoint runs Supervisor for process management.
|
||||||
|
#
|
||||||
|
# Notes:
|
||||||
|
# - The Dockerfile is optimized for Python. Tomcat/Java is not included.
|
||||||
|
# - Node.js is only installed if needed for FHIR IG tooling.
|
||||||
|
# - The image is suitable for development and production with minimal changes.
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# Optimized Dockerfile for Python (Flask)
|
||||||
|
FROM python:3.11-slim AS base
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
coreutils \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Optional: Install Node.js if needed for GoFSH/SUSHI
|
||||||
|
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - \
|
||||||
|
&& apt-get install -y --no-install-recommends nodejs \
|
||||||
|
&& npm install -g gofsh fsh-sushi \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Set workdir
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy requirements and install Python dependencies
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN python -m venv /app/venv \
|
||||||
|
&& . /app/venv/bin/activate \
|
||||||
|
&& pip install --upgrade pip \
|
||||||
|
&& pip install --no-cache-dir -r requirements.txt \
|
||||||
|
&& pip uninstall -y fhirpath || true \
|
||||||
|
&& pip install --no-cache-dir fhirpathpy \
|
||||||
|
&& pip install supervisor
|
||||||
|
|
||||||
|
# Copy application files
|
||||||
|
COPY app.py .
|
||||||
|
COPY services.py .
|
||||||
|
COPY forms.py .
|
||||||
|
COPY package.py .
|
||||||
|
COPY templates/ templates/
|
||||||
|
COPY static/ static/
|
||||||
|
COPY tests/ tests/
|
||||||
|
COPY supervisord.conf /etc/supervisord.conf
|
||||||
|
|
||||||
|
# Expose ports
|
||||||
|
EXPOSE 5000 8080
|
||||||
|
|
||||||
|
# Set environment
|
||||||
|
ENV PATH="/app/venv/bin:$PATH"
|
||||||
|
|
||||||
|
# Start supervisord
|
||||||
|
CMD ["supervisord", "-c", "/etc/supervisord.conf"]
|
||||||
7
docker/build-docker.sh
Executable file
7
docker/build-docker.sh
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Build FHIRFLARE-IG-Toolkit Docker image
|
||||||
|
|
||||||
|
# Build the image using the Dockerfile in the docker directory
|
||||||
|
docker build -f Dockerfile -t fhirflare-ig-toolkit:latest ..
|
||||||
|
|
||||||
|
echo "Docker image built successfully"
|
||||||
@ -1,111 +0,0 @@
|
|||||||
# Application Build and Run Guide - MANUAL STEPS
|
|
||||||
|
|
||||||
This guide outlines the steps to set up, build, and run the application, including the HAPI FHIR server component and the rest of the application managed via Docker Compose.
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
Before you begin, ensure you have the following installed on your system:
|
|
||||||
|
|
||||||
* [Git](https://git-scm.com/)
|
|
||||||
* [Maven](https://maven.apache.org/)
|
|
||||||
* [Java Development Kit (JDK)](https://www.oracle.com/java/technologies/downloads/) (Ensure compatibility with the HAPI FHIR version)
|
|
||||||
* [Docker](https://www.docker.com/products/docker-desktop/)
|
|
||||||
* [Docker Compose](https://docs.docker.com/compose/install/) (Often included with Docker Desktop)
|
|
||||||
|
|
||||||
## Setup and Build
|
|
||||||
|
|
||||||
Follow these steps to clone the necessary repository and build the components.
|
|
||||||
|
|
||||||
### 1. Clone and Build the HAPI FHIR Server
|
|
||||||
|
|
||||||
First, clone the HAPI FHIR JPA Server Starter project and build the server application.
|
|
||||||
|
|
||||||
|
|
||||||
# Step 1: Clone the repository
|
|
||||||
git clone https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git hapi-fhir-jpaserver hapi-fhir-jpaserver
|
|
||||||
|
|
||||||
# Navigate into the cloned directory
|
|
||||||
cd hapi-fhir-jpaserver
|
|
||||||
|
|
||||||
copy the folder from hapi-fhir-setup/target/classes/application.yaml to the hapi-fhir-jpaserver/target/classes/application.yaml folder created above
|
|
||||||
|
|
||||||
# Step 2: Build the HAPI server package (skipping tests, using 'boot' profile)
|
|
||||||
# This creates the runnable WAR file in the 'target/' directory
|
|
||||||
mvn clean package -DskipTests=true -Pboot
|
|
||||||
|
|
||||||
# Return to the parent directory (or your project root)
|
|
||||||
cd ..
|
|
||||||
2. Build the Rest of the Application (Docker)
|
|
||||||
Next, build the Docker images for the remaining parts of the application as defined in your docker-compose.yml file. Run this command from the root directory where your docker-compose.yml file is located.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Step 3: Build Docker images without using cache
|
|
||||||
docker-compose build --no-cache
|
|
||||||
Running the Application
|
|
||||||
Option A: Running the Full Application (Recommended)
|
|
||||||
Use Docker Compose to start all services, including (presumably) the HAPI FHIR server if it's configured in your docker-compose.yml. Run this from the root directory containing your docker-compose.yml.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Step 4: Start all services defined in docker-compose.yml in detached mode
|
|
||||||
docker-compose up -d
|
|
||||||
Option B: Running the HAPI FHIR Server Standalone (Debugging Only)
|
|
||||||
This method runs only the HAPI FHIR server directly using the built WAR file. Use this primarily for debugging the server in isolation.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Navigate into the HAPI server directory where you built it
|
|
||||||
cd hapi-fhir-jpaserver
|
|
||||||
|
|
||||||
# Run the WAR file directly using Java
|
|
||||||
java -jar target/ROOT.war
|
|
||||||
|
|
||||||
# Note: You might need to configure ports or database connections
|
|
||||||
# separately when running this way, depending on the application's needs.
|
|
||||||
|
|
||||||
# Remember to navigate back when done
|
|
||||||
# cd ..
|
|
||||||
Useful Docker Commands
|
|
||||||
Here are some helpful commands for interacting with your running Docker containers:
|
|
||||||
|
|
||||||
Copying files from a container:
|
|
||||||
To copy a file from a running container to your local machine's current directory:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Syntax: docker cp <CONTAINER_ID_OR_NAME>:<PATH_IN_CONTAINER> <LOCAL_DESTINATION_PATH>
|
|
||||||
docker cp <CONTAINER_ID>:/app/PATH/Filename.ext .
|
|
||||||
(Replace <CONTAINER_ID>, /app/PATH/Filename.ext with actual values. . refers to the current directory on your host machine.)
|
|
||||||
|
|
||||||
Accessing a container's shell:
|
|
||||||
To get an interactive bash shell inside a running container:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Syntax: docker exec -it <CONTAINER_ID_OR_NAME> bash
|
|
||||||
docker exec -it <CONTAINER_ID> bash
|
|
||||||
(Replace <CONTAINER_ID> with the actual container ID or name. You can find this using docker ps.)
|
|
||||||
|
|
||||||
Viewing running containers:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
docker ps
|
|
||||||
Viewing application logs:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Follow logs for all services
|
|
||||||
docker-compose logs -f
|
|
||||||
|
|
||||||
# Follow logs for a specific service
|
|
||||||
docker-compose logs -f <SERVICE_NAME>
|
|
||||||
(Replace <SERVICE_NAME> with the name defined in your docker-compose.yml)
|
|
||||||
|
|
||||||
Stopping the application:
|
|
||||||
To stop the services started with docker-compose up -d:
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
docker-compose down
|
|
||||||
@ -1,342 +0,0 @@
|
|||||||
#Uncomment the "servlet" and "context-path" lines below to make the fhir endpoint available at /example/path/fhir instead of the default value of /fhir
|
|
||||||
server:
|
|
||||||
# servlet:
|
|
||||||
# context-path: /example/path
|
|
||||||
port: 8080
|
|
||||||
#Adds the option to go to eg. http://localhost:8080/actuator/health for seeing the running configuration
|
|
||||||
#see https://docs.spring.io/spring-boot/docs/current/reference/html/actuator.html#actuator.endpoints
|
|
||||||
management:
|
|
||||||
#The following configuration will enable the actuator endpoints at /actuator/health, /actuator/info, /actuator/prometheus, /actuator/metrics. For security purposes, only /actuator/health is enabled by default.
|
|
||||||
endpoints:
|
|
||||||
enabled-by-default: false
|
|
||||||
web:
|
|
||||||
exposure:
|
|
||||||
include: 'health' # or e.g. 'info,health,prometheus,metrics' or '*' for all'
|
|
||||||
endpoint:
|
|
||||||
info:
|
|
||||||
enabled: true
|
|
||||||
metrics:
|
|
||||||
enabled: true
|
|
||||||
health:
|
|
||||||
enabled: true
|
|
||||||
probes:
|
|
||||||
enabled: true
|
|
||||||
group:
|
|
||||||
liveness:
|
|
||||||
include:
|
|
||||||
- livenessState
|
|
||||||
- readinessState
|
|
||||||
prometheus:
|
|
||||||
enabled: true
|
|
||||||
prometheus:
|
|
||||||
metrics:
|
|
||||||
export:
|
|
||||||
enabled: true
|
|
||||||
spring:
|
|
||||||
main:
|
|
||||||
allow-circular-references: true
|
|
||||||
flyway:
|
|
||||||
enabled: false
|
|
||||||
baselineOnMigrate: true
|
|
||||||
fail-on-missing-locations: false
|
|
||||||
datasource:
|
|
||||||
#url: 'jdbc:h2:file:./target/database/h2'
|
|
||||||
url: jdbc:h2:file:/app/h2-data/fhir;DB_CLOSE_DELAY=-1;AUTO_SERVER=TRUE
|
|
||||||
#url: jdbc:h2:mem:test_mem
|
|
||||||
username: sa
|
|
||||||
password: null
|
|
||||||
driverClassName: org.h2.Driver
|
|
||||||
max-active: 15
|
|
||||||
|
|
||||||
# database connection pool size
|
|
||||||
hikari:
|
|
||||||
maximum-pool-size: 10
|
|
||||||
jpa:
|
|
||||||
properties:
|
|
||||||
hibernate.format_sql: false
|
|
||||||
hibernate.show_sql: false
|
|
||||||
|
|
||||||
#Hibernate dialect is automatically detected except Postgres and H2.
|
|
||||||
#If using H2, then supply the value of ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect
|
|
||||||
#If using postgres, then supply the value of ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgresDialect
|
|
||||||
hibernate.dialect: ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect
|
|
||||||
# hibernate.hbm2ddl.auto: update
|
|
||||||
# hibernate.jdbc.batch_size: 20
|
|
||||||
# hibernate.cache.use_query_cache: false
|
|
||||||
# hibernate.cache.use_second_level_cache: false
|
|
||||||
# hibernate.cache.use_structured_entries: false
|
|
||||||
# hibernate.cache.use_minimal_puts: false
|
|
||||||
|
|
||||||
### These settings will enable fulltext search with lucene or elastic
|
|
||||||
hibernate.search.enabled: false
|
|
||||||
### lucene parameters
|
|
||||||
# hibernate.search.backend.type: lucene
|
|
||||||
# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiLuceneAnalysisConfigurer
|
|
||||||
# hibernate.search.backend.directory.type: local-filesystem
|
|
||||||
# hibernate.search.backend.directory.root: target/lucenefiles
|
|
||||||
# hibernate.search.backend.lucene_version: lucene_current
|
|
||||||
### elastic parameters ===> see also elasticsearch section below <===
|
|
||||||
# hibernate.search.backend.type: elasticsearch
|
|
||||||
# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiElasticAnalysisConfigurer
|
|
||||||
hapi:
|
|
||||||
fhir:
|
|
||||||
### This flag when enabled to true, will avail evaluate measure operations from CR Module.
|
|
||||||
### Flag is false by default, can be passed as command line argument to override.
|
|
||||||
cr:
|
|
||||||
enabled: false
|
|
||||||
caregaps:
|
|
||||||
reporter: "default"
|
|
||||||
section_author: "default"
|
|
||||||
cql:
|
|
||||||
use_embedded_libraries: true
|
|
||||||
compiler:
|
|
||||||
### These are low-level compiler options.
|
|
||||||
### They are not typically needed by most users.
|
|
||||||
# validate_units: true
|
|
||||||
# verify_only: false
|
|
||||||
# compatibility_level: "1.5"
|
|
||||||
error_level: Info
|
|
||||||
signature_level: All
|
|
||||||
# analyze_data_requirements: false
|
|
||||||
# collapse_data_requirements: false
|
|
||||||
# translator_format: JSON
|
|
||||||
# enable_date_range_optimization: true
|
|
||||||
enable_annotations: true
|
|
||||||
enable_locators: true
|
|
||||||
enable_results_type: true
|
|
||||||
enable_detailed_errors: true
|
|
||||||
# disable_list_traversal: false
|
|
||||||
# disable_list_demotion: false
|
|
||||||
# enable_interval_demotion: false
|
|
||||||
# enable_interval_promotion: false
|
|
||||||
# disable_method_invocation: false
|
|
||||||
# require_from_keyword: false
|
|
||||||
# disable_default_model_info_load: false
|
|
||||||
runtime:
|
|
||||||
debug_logging_enabled: false
|
|
||||||
# enable_validation: false
|
|
||||||
# enable_expression_caching: true
|
|
||||||
terminology:
|
|
||||||
valueset_preexpansion_mode: REQUIRE # USE_IF_PRESENT, REQUIRE, IGNORE
|
|
||||||
valueset_expansion_mode: PERFORM_NAIVE_EXPANSION # AUTO, USE_EXPANSION_OPERATION, PERFORM_NAIVE_EXPANSION
|
|
||||||
valueset_membership_mode: USE_EXPANSION # AUTO, USE_VALIDATE_CODE_OPERATION, USE_EXPANSION
|
|
||||||
code_lookup_mode: USE_VALIDATE_CODE_OPERATION # AUTO, USE_VALIDATE_CODE_OPERATION, USE_CODESYSTEM_URL
|
|
||||||
data:
|
|
||||||
search_parameter_mode: USE_SEARCH_PARAMETERS # AUTO, USE_SEARCH_PARAMETERS, FILTER_IN_MEMORY
|
|
||||||
terminology_parameter_mode: FILTER_IN_MEMORY # AUTO, USE_VALUE_SET_URL, USE_INLINE_CODES, FILTER_IN_MEMORY
|
|
||||||
profile_mode: DECLARED # ENFORCED, DECLARED, OPTIONAL, TRUST, OFF
|
|
||||||
|
|
||||||
cdshooks:
|
|
||||||
enabled: false
|
|
||||||
clientIdHeaderName: client_id
|
|
||||||
|
|
||||||
### This enables the swagger-ui at /fhir/swagger-ui/index.html as well as the /fhir/api-docs (see https://hapifhir.io/hapi-fhir/docs/server_plain/openapi.html)
|
|
||||||
openapi_enabled: true
|
|
||||||
### This is the FHIR version. Choose between, DSTU2, DSTU3, R4 or R5
|
|
||||||
fhir_version: R4
|
|
||||||
### Flag is false by default. This flag enables runtime installation of IG's.
|
|
||||||
ig_runtime_upload_enabled: false
|
|
||||||
### This flag when enabled to true, will avail evaluate measure operations from CR Module.
|
|
||||||
|
|
||||||
### enable to use the ApacheProxyAddressStrategy which uses X-Forwarded-* headers
|
|
||||||
### to determine the FHIR server address
|
|
||||||
# use_apache_address_strategy: false
|
|
||||||
### forces the use of the https:// protocol for the returned server address.
|
|
||||||
### alternatively, it may be set using the X-Forwarded-Proto header.
|
|
||||||
# use_apache_address_strategy_https: false
|
|
||||||
### enables the server to overwrite defaults on HTML, css, etc. under the url pattern of eg. /content/custom **
|
|
||||||
### Folder with custom content MUST be named custom. If omitted then default content applies
|
|
||||||
custom_content_path: ./custom
|
|
||||||
### enables the server host custom content. If e.g. the value ./configs/app is supplied then the content
|
|
||||||
### will be served under /web/app
|
|
||||||
#app_content_path: ./configs/app
|
|
||||||
### enable to set the Server URL
|
|
||||||
# server_address: http://hapi.fhir.org/baseR4
|
|
||||||
# defer_indexing_for_codesystems_of_size: 101
|
|
||||||
### Flag is true by default. This flag filters resources during package installation, allowing only those resources with a valid status (e.g. active) to be installed.
|
|
||||||
# validate_resource_status_for_package_upload: false
|
|
||||||
# install_transitive_ig_dependencies: true
|
|
||||||
#implementationguides:
|
|
||||||
### example from registry (packages.fhir.org)
|
|
||||||
# swiss:
|
|
||||||
# name: swiss.mednet.fhir
|
|
||||||
# version: 0.8.0
|
|
||||||
# reloadExisting: false
|
|
||||||
# installMode: STORE_AND_INSTALL
|
|
||||||
# example not from registry
|
|
||||||
# ips_1_0_0:
|
|
||||||
# packageUrl: https://build.fhir.org/ig/HL7/fhir-ips/package.tgz
|
|
||||||
# name: hl7.fhir.uv.ips
|
|
||||||
# version: 1.0.0
|
|
||||||
# supported_resource_types:
|
|
||||||
# - Patient
|
|
||||||
# - Observation
|
|
||||||
##################################################
|
|
||||||
# Allowed Bundle Types for persistence (defaults are: COLLECTION,DOCUMENT,MESSAGE)
|
|
||||||
##################################################
|
|
||||||
# allowed_bundle_types: COLLECTION,DOCUMENT,MESSAGE,TRANSACTION,TRANSACTIONRESPONSE,BATCH,BATCHRESPONSE,HISTORY,SEARCHSET
|
|
||||||
# allow_cascading_deletes: true
|
|
||||||
# allow_contains_searches: true
|
|
||||||
# allow_external_references: true
|
|
||||||
# allow_multiple_delete: true
|
|
||||||
# allow_override_default_search_params: true
|
|
||||||
# auto_create_placeholder_reference_targets: false
|
|
||||||
# mass_ingestion_mode_enabled: false
|
|
||||||
### tells the server to automatically append the current version of the target resource to references at these paths
|
|
||||||
# auto_version_reference_at_paths: Device.patient, Device.location, Device.parent, DeviceMetric.parent, DeviceMetric.source, Observation.device, Observation.subject
|
|
||||||
# ips_enabled: false
|
|
||||||
# default_encoding: JSON
|
|
||||||
# default_pretty_print: true
|
|
||||||
# default_page_size: 20
|
|
||||||
# delete_expunge_enabled: true
|
|
||||||
# enable_repository_validating_interceptor: true
|
|
||||||
# enable_index_missing_fields: false
|
|
||||||
# enable_index_of_type: true
|
|
||||||
# enable_index_contained_resource: false
|
|
||||||
# upliftedRefchains_enabled: true
|
|
||||||
# resource_dbhistory_enabled: false
|
|
||||||
### !!Extended Lucene/Elasticsearch Indexing is still a experimental feature, expect some features (e.g. _total=accurate) to not work as expected!!
|
|
||||||
### more information here: https://hapifhir.io/hapi-fhir/docs/server_jpa/elastic.html
|
|
||||||
advanced_lucene_indexing: false
|
|
||||||
bulk_export_enabled: false
|
|
||||||
bulk_import_enabled: false
|
|
||||||
# language_search_parameter_enabled: true
|
|
||||||
# enforce_referential_integrity_on_delete: false
|
|
||||||
# This is an experimental feature, and does not fully support _total and other FHIR features.
|
|
||||||
# enforce_referential_integrity_on_delete: false
|
|
||||||
# enforce_referential_integrity_on_write: false
|
|
||||||
# etag_support_enabled: true
|
|
||||||
# expunge_enabled: true
|
|
||||||
# client_id_strategy: ALPHANUMERIC
|
|
||||||
# server_id_strategy: SEQUENTIAL_NUMERIC
|
|
||||||
# fhirpath_interceptor_enabled: false
|
|
||||||
# filter_search_enabled: true
|
|
||||||
# graphql_enabled: true
|
|
||||||
narrative_enabled: true
|
|
||||||
mdm_enabled: false
|
|
||||||
mdm_rules_json_location: "mdm-rules.json"
|
|
||||||
## see: https://hapifhir.io/hapi-fhir/docs/interceptors/built_in_server_interceptors.html#jpa-server-retry-on-version-conflicts
|
|
||||||
# userRequestRetryVersionConflictsInterceptorEnabled : false
|
|
||||||
# local_base_urls:
|
|
||||||
# - https://hapi.fhir.org/baseR4
|
|
||||||
# pre_expand_value_sets: true
|
|
||||||
# enable_task_pre_expand_value_sets: true
|
|
||||||
# pre_expand_value_sets_default_count: 1000
|
|
||||||
# pre_expand_value_sets_max_count: 1000
|
|
||||||
# maximum_expansion_size: 1000
|
|
||||||
|
|
||||||
logical_urls:
|
|
||||||
- http://terminology.hl7.org/*
|
|
||||||
- https://terminology.hl7.org/*
|
|
||||||
- http://snomed.info/*
|
|
||||||
- https://snomed.info/*
|
|
||||||
- http://unitsofmeasure.org/*
|
|
||||||
- https://unitsofmeasure.org/*
|
|
||||||
- http://loinc.org/*
|
|
||||||
- https://loinc.org/*
|
|
||||||
# partitioning:
|
|
||||||
# allow_references_across_partitions: false
|
|
||||||
# partitioning_include_in_search_hashes: false
|
|
||||||
# conditional_create_duplicate_identifiers_enabled: false
|
|
||||||
cors:
|
|
||||||
allow_Credentials: true
|
|
||||||
# These are allowed_origin patterns, see: https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/web/cors/CorsConfiguration.html#setAllowedOriginPatterns-java.util.List-
|
|
||||||
allowed_origin:
|
|
||||||
- '*'
|
|
||||||
|
|
||||||
# Search coordinator thread pool sizes
|
|
||||||
search-coord-core-pool-size: 20
|
|
||||||
search-coord-max-pool-size: 100
|
|
||||||
search-coord-queue-capacity: 200
|
|
||||||
|
|
||||||
# Search Prefetch Thresholds.
|
|
||||||
|
|
||||||
# This setting sets the number of search results to prefetch. For example, if this list
|
|
||||||
# is set to [100, 1000, -1] then the server will initially load 100 results and not
|
|
||||||
# attempt to load more. If the user requests subsequent page(s) of results and goes
|
|
||||||
# past 100 results, the system will load the next 900 (up to the following threshold of 1000).
|
|
||||||
# The system will progressively work through these thresholds.
|
|
||||||
# A threshold of -1 means to load all results. Note that if the final threshold is a
|
|
||||||
# number other than -1, the system will never prefetch more than the given number.
|
|
||||||
search_prefetch_thresholds: 13,503,2003,-1
|
|
||||||
|
|
||||||
# comma-separated package names, will be @ComponentScan'ed by Spring to allow for creating custom Spring beans
|
|
||||||
#custom-bean-packages:
|
|
||||||
|
|
||||||
# comma-separated list of fully qualified interceptor classes.
|
|
||||||
# classes listed here will be fetched from the Spring context when combined with 'custom-bean-packages',
|
|
||||||
# or will be instantiated via reflection using an no-arg contructor; then registered with the server
|
|
||||||
#custom-interceptor-classes:
|
|
||||||
|
|
||||||
# comma-separated list of fully qualified provider classes.
|
|
||||||
# classes listed here will be fetched from the Spring context when combined with 'custom-bean-packages',
|
|
||||||
# or will be instantiated via reflection using an no-arg contructor; then registered with the server
|
|
||||||
#custom-provider-classes:
|
|
||||||
|
|
||||||
# Threadpool size for BATCH'ed GETs in a bundle.
|
|
||||||
# bundle_batch_pool_size: 10
|
|
||||||
# bundle_batch_pool_max_size: 50
|
|
||||||
|
|
||||||
# logger:
|
|
||||||
# error_format: 'ERROR - ${requestVerb} ${requestUrl}'
|
|
||||||
# format: >-
|
|
||||||
# Path[${servletPath}] Source[${requestHeader.x-forwarded-for}]
|
|
||||||
# Operation[${operationType} ${operationName} ${idOrResourceName}]
|
|
||||||
# UA[${requestHeader.user-agent}] Params[${requestParameters}]
|
|
||||||
# ResponseEncoding[${responseEncodingNoDefault}]
|
|
||||||
# log_exceptions: true
|
|
||||||
# name: fhirtest.access
|
|
||||||
# max_binary_size: 104857600
|
|
||||||
# max_page_size: 200
|
|
||||||
# retain_cached_searches_mins: 60
|
|
||||||
# reuse_cached_search_results_millis: 60000
|
|
||||||
tester:
|
|
||||||
home:
|
|
||||||
name: FHIRFLARE Tester
|
|
||||||
server_address: http://localhost:8080/fhir
|
|
||||||
refuse_to_fetch_third_party_urls: false
|
|
||||||
fhir_version: R4
|
|
||||||
global:
|
|
||||||
name: Global Tester
|
|
||||||
server_address: "http://hapi.fhir.org/baseR4"
|
|
||||||
refuse_to_fetch_third_party_urls: false
|
|
||||||
fhir_version: R4
|
|
||||||
# validation:
|
|
||||||
# requests_enabled: true
|
|
||||||
# responses_enabled: true
|
|
||||||
# binary_storage_enabled: true
|
|
||||||
inline_resource_storage_below_size: 4000
|
|
||||||
# bulk_export_enabled: true
|
|
||||||
# subscription:
|
|
||||||
# resthook_enabled: true
|
|
||||||
# websocket_enabled: false
|
|
||||||
# polling_interval_ms: 5000
|
|
||||||
# immediately_queued: false
|
|
||||||
# email:
|
|
||||||
# from: some@test.com
|
|
||||||
# host: google.com
|
|
||||||
# port:
|
|
||||||
# username:
|
|
||||||
# password:
|
|
||||||
# auth:
|
|
||||||
# startTlsEnable:
|
|
||||||
# startTlsRequired:
|
|
||||||
# quitWait:
|
|
||||||
# lastn_enabled: true
|
|
||||||
# store_resource_in_lucene_index_enabled: true
|
|
||||||
### This is configuration for normalized quantity search level default is 0
|
|
||||||
### 0: NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED - default
|
|
||||||
### 1: NORMALIZED_QUANTITY_STORAGE_SUPPORTED
|
|
||||||
### 2: NORMALIZED_QUANTITY_SEARCH_SUPPORTED
|
|
||||||
# normalized_quantity_search_level: 2
|
|
||||||
#elasticsearch:
|
|
||||||
# debug:
|
|
||||||
# pretty_print_json_log: false
|
|
||||||
# refresh_after_write: false
|
|
||||||
# enabled: false
|
|
||||||
# password: SomePassword
|
|
||||||
# required_index_status: YELLOW
|
|
||||||
# rest_url: 'localhost:9200'
|
|
||||||
# protocol: 'http'
|
|
||||||
# schema_management_strategy: CREATE
|
|
||||||
# username: SomeUsername
|
|
||||||
Binary file not shown.
Binary file not shown.
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.fhir.au.base",
|
|
||||||
"version": "5.1.0-preview",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.terminology.r4",
|
|
||||||
"version": "6.2.0"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.uv.extensions.r4",
|
|
||||||
"version": "5.2.0"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:17.475734+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,34 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.fhir.au.core",
|
|
||||||
"version": "1.1.0-preview",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.terminology.r4",
|
|
||||||
"version": "6.2.0"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.uv.extensions.r4",
|
|
||||||
"version": "5.2.0"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.au.base",
|
|
||||||
"version": "5.1.0-preview"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.uv.smart-app-launch",
|
|
||||||
"version": "2.1.0"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.uv.ipa",
|
|
||||||
"version": "1.0.0"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:15.067826+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:16.477868+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.fhir.uv.extensions.r4",
|
|
||||||
"version": "5.2.0",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:17.363719+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,22 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.fhir.uv.ipa",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.terminology.r4",
|
|
||||||
"version": "5.0.0"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.uv.smart-app-launch",
|
|
||||||
"version": "2.0.0"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:17.590266+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.fhir.uv.smart-app-launch",
|
|
||||||
"version": "2.0.0",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:18.256800+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,18 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.fhir.uv.smart-app-launch",
|
|
||||||
"version": "2.1.0",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hl7.terminology.r4",
|
|
||||||
"version": "5.0.0"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:17.529611+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.terminology.r4",
|
|
||||||
"version": "5.0.0",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:18.216757+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,14 +0,0 @@
|
|||||||
{
|
|
||||||
"package_name": "hl7.terminology.r4",
|
|
||||||
"version": "6.2.0",
|
|
||||||
"dependency_mode": "recursive",
|
|
||||||
"imported_dependencies": [
|
|
||||||
{
|
|
||||||
"name": "hl7.fhir.r4.core",
|
|
||||||
"version": "4.0.1"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"complies_with_profiles": [],
|
|
||||||
"imposed_profiles": [],
|
|
||||||
"timestamp": "2025-05-04T12:29:17.148041+00:00"
|
|
||||||
}
|
|
||||||
Binary file not shown.
@ -1,6 +0,0 @@
|
|||||||
#FileLock
|
|
||||||
#Sun May 04 12:29:20 UTC 2025
|
|
||||||
server=172.18.0.2\:34351
|
|
||||||
hostName=1913c9e2ec9b
|
|
||||||
method=file
|
|
||||||
id=1969b45b76c42f20115290bfabb203a60dc75365e9d
|
|
||||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
637
services.py
637
services.py
@ -1087,8 +1087,115 @@ def cache_structure(package_name, package_version, resource_type, view, structur
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error caching structure: {e}", exc_info=True)
|
logger.error(f"Error caching structure: {e}", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
|
#----OLD CODE HERE
|
||||||
|
# def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include_narrative=False, raw=False):
|
||||||
|
# """Helper to find and extract StructureDefinition json from a tgz path, prioritizing profile match."""
|
||||||
|
# sd_data = None
|
||||||
|
# found_path = None
|
||||||
|
# if not tgz_path or not os.path.exists(tgz_path):
|
||||||
|
# logger.error(f"File not found in find_and_extract_sd: {tgz_path}")
|
||||||
|
# return None, None
|
||||||
|
# try:
|
||||||
|
# with tarfile.open(tgz_path, "r:gz") as tar:
|
||||||
|
# logger.debug(f"Searching for SD matching '{resource_identifier}' with profile '{profile_url}' in {os.path.basename(tgz_path)}")
|
||||||
|
# potential_matches = []
|
||||||
|
# for member in tar:
|
||||||
|
# if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')):
|
||||||
|
# continue
|
||||||
|
# if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']:
|
||||||
|
# continue
|
||||||
|
# fileobj = None
|
||||||
|
# try:
|
||||||
|
# fileobj = tar.extractfile(member)
|
||||||
|
# if fileobj:
|
||||||
|
# content_bytes = fileobj.read()
|
||||||
|
# content_string = content_bytes.decode('utf-8-sig')
|
||||||
|
# data = json.loads(content_string)
|
||||||
|
# if isinstance(data, dict) and data.get('resourceType') == 'StructureDefinition':
|
||||||
|
# sd_id = data.get('id')
|
||||||
|
# sd_name = data.get('name')
|
||||||
|
# sd_type = data.get('type')
|
||||||
|
# sd_url = data.get('url')
|
||||||
|
# sd_filename_base = os.path.splitext(os.path.basename(member.name))[0]
|
||||||
|
# sd_filename_lower = sd_filename_base.lower()
|
||||||
|
# resource_identifier_lower = resource_identifier.lower() if resource_identifier else None
|
||||||
|
# match_score = 0
|
||||||
|
# if profile_url and sd_url == profile_url:
|
||||||
|
# match_score = 5
|
||||||
|
# sd_data = remove_narrative(data, include_narrative)
|
||||||
|
# found_path = member.name
|
||||||
|
# logger.info(f"Found definitive SD matching profile '{profile_url}' at path: {found_path}")
|
||||||
|
# break
|
||||||
|
# elif resource_identifier_lower:
|
||||||
|
# if sd_id and resource_identifier_lower == sd_id.lower():
|
||||||
|
# match_score = 4
|
||||||
|
# elif sd_name and resource_identifier_lower == sd_name.lower():
|
||||||
|
# match_score = 4
|
||||||
|
# elif sd_filename_lower == f"structuredefinition-{resource_identifier_lower}":
|
||||||
|
# match_score = 3
|
||||||
|
# elif sd_type and resource_identifier_lower == sd_type.lower() and not re.search(r'[-.]', resource_identifier):
|
||||||
|
# match_score = 2
|
||||||
|
# elif resource_identifier_lower in sd_filename_lower:
|
||||||
|
# match_score = 1
|
||||||
|
# elif sd_url and resource_identifier_lower in sd_url.lower():
|
||||||
|
# match_score = 1
|
||||||
|
# if match_score > 0:
|
||||||
|
# potential_matches.append((match_score, remove_narrative(data, include_narrative), member.name))
|
||||||
|
# if match_score >= 3:
|
||||||
|
# sd_data = remove_narrative(data, include_narrative)
|
||||||
|
# found_path = member.name
|
||||||
|
# break
|
||||||
|
# except json.JSONDecodeError as e:
|
||||||
|
# logger.debug(f"Could not parse JSON in {member.name}, skipping: {e}")
|
||||||
|
# except UnicodeDecodeError as e:
|
||||||
|
# logger.warning(f"Could not decode UTF-8 in {member.name}, skipping: {e}")
|
||||||
|
# except tarfile.TarError as e:
|
||||||
|
# logger.warning(f"Tar error reading member {member.name}, skipping: {e}")
|
||||||
|
# except Exception as e:
|
||||||
|
# logger.warning(f"Could not read/parse potential SD {member.name}, skipping: {e}")
|
||||||
|
# finally:
|
||||||
|
# if fileobj:
|
||||||
|
# fileobj.close()
|
||||||
|
# if not sd_data and potential_matches:
|
||||||
|
# potential_matches.sort(key=lambda x: x[0], reverse=True)
|
||||||
|
# best_match = potential_matches[0]
|
||||||
|
# sd_data = best_match[1]
|
||||||
|
# found_path = best_match[2]
|
||||||
|
# logger.info(f"Selected best match for '{resource_identifier}' from potential matches (Score: {best_match[0]}): {found_path}")
|
||||||
|
# if sd_data is None:
|
||||||
|
# logger.info(f"SD matching identifier '{resource_identifier}' or profile '{profile_url}' not found within archive {os.path.basename(tgz_path)}")
|
||||||
|
# elif raw:
|
||||||
|
# # Return the full, unprocessed StructureDefinition JSON
|
||||||
|
# with tarfile.open(tgz_path, "r:gz") as tar:
|
||||||
|
# fileobj = tar.extractfile(found_path)
|
||||||
|
# content_bytes = fileobj.read()
|
||||||
|
# content_string = content_bytes.decode('utf-8-sig')
|
||||||
|
# raw_data = json.loads(content_string)
|
||||||
|
# return remove_narrative(raw_data, include_narrative), found_path
|
||||||
|
# except tarfile.ReadError as e:
|
||||||
|
# logger.error(f"Tar ReadError reading {tgz_path}: {e}")
|
||||||
|
# return None, None
|
||||||
|
# except tarfile.TarError as e:
|
||||||
|
# logger.error(f"TarError reading {tgz_path} in find_and_extract_sd: {e}")
|
||||||
|
# raise
|
||||||
|
# except FileNotFoundError:
|
||||||
|
# logger.error(f"FileNotFoundError reading {tgz_path} in find_and_extract_sd.")
|
||||||
|
# raise
|
||||||
|
# except Exception as e:
|
||||||
|
# logger.error(f"Unexpected error in find_and_extract_sd for {tgz_path}: {e}", exc_info=True)
|
||||||
|
# raise
|
||||||
|
# return sd_data, found_path
|
||||||
|
#--- OLD
|
||||||
|
|
||||||
|
# --- UPDATED: find_and_extract_sd function ---
|
||||||
def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include_narrative=False, raw=False):
|
def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include_narrative=False, raw=False):
|
||||||
"""Helper to find and extract StructureDefinition json from a tgz path, prioritizing profile match."""
|
"""
|
||||||
|
Helper to find and extract StructureDefinition json from a tgz path, prioritizing profile match.
|
||||||
|
|
||||||
|
This version includes logic to handle canonical URLs with version numbers (e.g., `|5.2.0`)
|
||||||
|
and to prioritize a direct profile URL match.
|
||||||
|
"""
|
||||||
sd_data = None
|
sd_data = None
|
||||||
found_path = None
|
found_path = None
|
||||||
if not tgz_path or not os.path.exists(tgz_path):
|
if not tgz_path or not os.path.exists(tgz_path):
|
||||||
@ -1098,6 +1205,11 @@ def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include
|
|||||||
with tarfile.open(tgz_path, "r:gz") as tar:
|
with tarfile.open(tgz_path, "r:gz") as tar:
|
||||||
logger.debug(f"Searching for SD matching '{resource_identifier}' with profile '{profile_url}' in {os.path.basename(tgz_path)}")
|
logger.debug(f"Searching for SD matching '{resource_identifier}' with profile '{profile_url}' in {os.path.basename(tgz_path)}")
|
||||||
potential_matches = []
|
potential_matches = []
|
||||||
|
|
||||||
|
# --- Work Item 3: Sanitize profile URL to strip version ---
|
||||||
|
clean_profile_url = profile_url.split('|')[0] if profile_url else None
|
||||||
|
logger.debug(f"Cleaned profile URL for search: '{clean_profile_url}'")
|
||||||
|
|
||||||
for member in tar:
|
for member in tar:
|
||||||
if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')):
|
if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')):
|
||||||
continue
|
continue
|
||||||
@ -1119,12 +1231,15 @@ def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include
|
|||||||
sd_filename_lower = sd_filename_base.lower()
|
sd_filename_lower = sd_filename_base.lower()
|
||||||
resource_identifier_lower = resource_identifier.lower() if resource_identifier else None
|
resource_identifier_lower = resource_identifier.lower() if resource_identifier else None
|
||||||
match_score = 0
|
match_score = 0
|
||||||
if profile_url and sd_url == profile_url:
|
|
||||||
|
# --- Prioritize exact match on the canonical URL (without version) ---
|
||||||
|
if clean_profile_url and sd_url == clean_profile_url:
|
||||||
match_score = 5
|
match_score = 5
|
||||||
sd_data = remove_narrative(data, include_narrative)
|
sd_data = remove_narrative(data, include_narrative)
|
||||||
found_path = member.name
|
found_path = member.name
|
||||||
logger.info(f"Found definitive SD matching profile '{profile_url}' at path: {found_path}")
|
logger.info(f"Found definitive SD matching profile '{clean_profile_url}' at path: {found_path}")
|
||||||
break
|
break
|
||||||
|
|
||||||
elif resource_identifier_lower:
|
elif resource_identifier_lower:
|
||||||
if sd_id and resource_identifier_lower == sd_id.lower():
|
if sd_id and resource_identifier_lower == sd_id.lower():
|
||||||
match_score = 4
|
match_score = 4
|
||||||
@ -1132,6 +1247,7 @@ def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include
|
|||||||
match_score = 4
|
match_score = 4
|
||||||
elif sd_filename_lower == f"structuredefinition-{resource_identifier_lower}":
|
elif sd_filename_lower == f"structuredefinition-{resource_identifier_lower}":
|
||||||
match_score = 3
|
match_score = 3
|
||||||
|
# --- Work Item 2: Score match on resourceType for fallback logic ---
|
||||||
elif sd_type and resource_identifier_lower == sd_type.lower() and not re.search(r'[-.]', resource_identifier):
|
elif sd_type and resource_identifier_lower == sd_type.lower() and not re.search(r'[-.]', resource_identifier):
|
||||||
match_score = 2
|
match_score = 2
|
||||||
elif resource_identifier_lower in sd_filename_lower:
|
elif resource_identifier_lower in sd_filename_lower:
|
||||||
@ -1164,7 +1280,6 @@ def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include
|
|||||||
if sd_data is None:
|
if sd_data is None:
|
||||||
logger.info(f"SD matching identifier '{resource_identifier}' or profile '{profile_url}' not found within archive {os.path.basename(tgz_path)}")
|
logger.info(f"SD matching identifier '{resource_identifier}' or profile '{profile_url}' not found within archive {os.path.basename(tgz_path)}")
|
||||||
elif raw:
|
elif raw:
|
||||||
# Return the full, unprocessed StructureDefinition JSON
|
|
||||||
with tarfile.open(tgz_path, "r:gz") as tar:
|
with tarfile.open(tgz_path, "r:gz") as tar:
|
||||||
fileobj = tar.extractfile(found_path)
|
fileobj = tar.extractfile(found_path)
|
||||||
content_bytes = fileobj.read()
|
content_bytes = fileobj.read()
|
||||||
@ -1185,6 +1300,8 @@ def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include
|
|||||||
raise
|
raise
|
||||||
return sd_data, found_path
|
return sd_data, found_path
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# --- Metadata Saving/Loading ---
|
# --- Metadata Saving/Loading ---
|
||||||
def save_package_metadata(name, version, dependency_mode, dependencies, complies_with_profiles=None, imposed_profiles=None):
|
def save_package_metadata(name, version, dependency_mode, dependencies, complies_with_profiles=None, imposed_profiles=None):
|
||||||
"""Saves dependency mode, imported dependencies, and profile relationships as metadata."""
|
"""Saves dependency mode, imported dependencies, and profile relationships as metadata."""
|
||||||
@ -1871,8 +1988,240 @@ def _legacy_validate_resource_against_profile(package_name, version, resource, i
|
|||||||
}
|
}
|
||||||
logger.debug(f"Validation result: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}")
|
logger.debug(f"Validation result: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}")
|
||||||
return result
|
return result
|
||||||
|
# -- OLD
|
||||||
|
# def validate_resource_against_profile(package_name, version, resource, include_dependencies=True):
|
||||||
|
# result = {
|
||||||
|
# 'valid': True,
|
||||||
|
# 'errors': [],
|
||||||
|
# 'warnings': [],
|
||||||
|
# 'details': [],
|
||||||
|
# 'resource_type': resource.get('resourceType'),
|
||||||
|
# 'resource_id': resource.get('id', 'unknown'),
|
||||||
|
# 'profile': resource.get('meta', {}).get('profile', [None])[0]
|
||||||
|
# }
|
||||||
|
|
||||||
|
# # Attempt HAPI validation if a profile is specified
|
||||||
|
# if result['profile']:
|
||||||
|
# try:
|
||||||
|
# hapi_url = f"{current_app.config['HAPI_FHIR_URL'].rstrip('/')}/{resource['resourceType']}/$validate?profile={result['profile']}"
|
||||||
|
# response = requests.post(
|
||||||
|
# hapi_url,
|
||||||
|
# json=resource,
|
||||||
|
# headers={'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'},
|
||||||
|
# timeout=10
|
||||||
|
# )
|
||||||
|
# response.raise_for_status()
|
||||||
|
# outcome = response.json()
|
||||||
|
# if outcome.get('resourceType') == 'OperationOutcome':
|
||||||
|
# for issue in outcome.get('issue', []):
|
||||||
|
# severity = issue.get('severity')
|
||||||
|
# diagnostics = issue.get('diagnostics', issue.get('details', {}).get('text', 'No details provided'))
|
||||||
|
# detail = {
|
||||||
|
# 'issue': diagnostics,
|
||||||
|
# 'severity': severity,
|
||||||
|
# 'description': issue.get('details', {}).get('text', diagnostics)
|
||||||
|
# }
|
||||||
|
# if severity in ['error', 'fatal']:
|
||||||
|
# result['valid'] = False
|
||||||
|
# result['errors'].append(diagnostics)
|
||||||
|
# elif severity == 'warning':
|
||||||
|
# result['warnings'].append(diagnostics)
|
||||||
|
# result['details'].append(detail)
|
||||||
|
# result['summary'] = {
|
||||||
|
# 'error_count': len(result['errors']),
|
||||||
|
# 'warning_count': len(result['warnings'])
|
||||||
|
# }
|
||||||
|
# logger.debug(f"HAPI validation for {result['resource_type']}/{result['resource_id']}: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}")
|
||||||
|
# return result
|
||||||
|
# else:
|
||||||
|
# logger.warning(f"HAPI returned non-OperationOutcome: {outcome.get('resourceType')}")
|
||||||
|
# except requests.RequestException as e:
|
||||||
|
# logger.error(f"HAPI validation failed for {result['resource_type']}/{result['resource_id']}: {e}")
|
||||||
|
# result['details'].append({
|
||||||
|
# 'issue': f"HAPI validation failed: {str(e)}",
|
||||||
|
# 'severity': 'warning',
|
||||||
|
# 'description': 'Falling back to local validation due to HAPI server error.'
|
||||||
|
# })
|
||||||
|
|
||||||
|
# # Fallback to local validation
|
||||||
|
# download_dir = _get_download_dir()
|
||||||
|
# if not download_dir:
|
||||||
|
# result['valid'] = False
|
||||||
|
# result['errors'].append("Could not access download directory")
|
||||||
|
# result['details'].append({
|
||||||
|
# 'issue': "Could not access download directory",
|
||||||
|
# 'severity': 'error',
|
||||||
|
# 'description': "The server could not locate the directory where FHIR packages are stored."
|
||||||
|
# })
|
||||||
|
# return result
|
||||||
|
|
||||||
|
# tgz_path = os.path.join(download_dir, construct_tgz_filename(package_name, version))
|
||||||
|
# sd_data, sd_path = find_and_extract_sd(tgz_path, resource.get('resourceType'), result['profile'])
|
||||||
|
# if not sd_data:
|
||||||
|
# result['valid'] = False
|
||||||
|
# result['errors'].append(f"No StructureDefinition found for {resource.get('resourceType')}")
|
||||||
|
# result['details'].append({
|
||||||
|
# 'issue': f"No StructureDefinition found for {resource.get('resourceType')}",
|
||||||
|
# 'severity': 'error',
|
||||||
|
# 'description': f"The package {package_name}#{version} does not contain a matching StructureDefinition."
|
||||||
|
# })
|
||||||
|
# return result
|
||||||
|
|
||||||
|
# elements = sd_data.get('snapshot', {}).get('element', [])
|
||||||
|
# for element in elements:
|
||||||
|
# path = element.get('path')
|
||||||
|
# min_val = element.get('min', 0)
|
||||||
|
# must_support = element.get('mustSupport', False)
|
||||||
|
# slicing = element.get('slicing')
|
||||||
|
# slice_name = element.get('sliceName')
|
||||||
|
|
||||||
|
# # Check required elements
|
||||||
|
# if min_val > 0:
|
||||||
|
# value = navigate_fhir_path(resource, path)
|
||||||
|
# if value is None or (isinstance(value, list) and not any(value)):
|
||||||
|
# result['valid'] = False
|
||||||
|
# result['errors'].append(f"Required element {path} missing")
|
||||||
|
# result['details'].append({
|
||||||
|
# 'issue': f"Required element {path} missing",
|
||||||
|
# 'severity': 'error',
|
||||||
|
# 'description': f"Element {path} has min={min_val} in profile {result['profile'] or 'unknown'}"
|
||||||
|
# })
|
||||||
|
|
||||||
|
# # Check must-support elements
|
||||||
|
# if must_support:
|
||||||
|
# value = navigate_fhir_path(resource, slice_name if slice_name else path)
|
||||||
|
# if value is None or (isinstance(value, list) and not any(value)):
|
||||||
|
# result['warnings'].append(f"Must Support element {path} missing or empty")
|
||||||
|
# result['details'].append({
|
||||||
|
# 'issue': f"Must Support element {path} missing or empty",
|
||||||
|
# 'severity': 'warning',
|
||||||
|
# 'description': f"Element {path} is marked as Must Support in profile {result['profile'] or 'unknown'}"
|
||||||
|
# })
|
||||||
|
|
||||||
|
# # Validate slicing
|
||||||
|
# if slicing and not slice_name: # Parent slicing element
|
||||||
|
# discriminator = slicing.get('discriminator', [])
|
||||||
|
# for d in discriminator:
|
||||||
|
# d_type = d.get('type')
|
||||||
|
# d_path = d.get('path')
|
||||||
|
# if d_type == 'value':
|
||||||
|
# sliced_elements = navigate_fhir_path(resource, path)
|
||||||
|
# if isinstance(sliced_elements, list):
|
||||||
|
# seen_values = set()
|
||||||
|
# for elem in sliced_elements:
|
||||||
|
# d_value = navigate_fhir_path(elem, d_path)
|
||||||
|
# if d_value in seen_values:
|
||||||
|
# result['valid'] = False
|
||||||
|
# result['errors'].append(f"Duplicate discriminator value {d_value} for {path}.{d_path}")
|
||||||
|
# seen_values.add(d_value)
|
||||||
|
# elif d_type == 'type':
|
||||||
|
# sliced_elements = navigate_fhir_path(resource, path)
|
||||||
|
# if isinstance(sliced_elements, list):
|
||||||
|
# for elem in sliced_elements:
|
||||||
|
# if not navigate_fhir_path(elem, d_path):
|
||||||
|
# result['valid'] = False
|
||||||
|
# result['errors'].append(f"Missing discriminator type {d_path} for {path}")
|
||||||
|
|
||||||
|
# result['summary'] = {
|
||||||
|
# 'error_count': len(result['errors']),
|
||||||
|
# 'warning_count': len(result['warnings'])
|
||||||
|
# }
|
||||||
|
# return result
|
||||||
|
|
||||||
|
# def validate_bundle_against_profile(package_name, version, bundle, include_dependencies=True):
|
||||||
|
# """Validates a FHIR Bundle against profiles in the specified package."""
|
||||||
|
# logger.debug(f"Validating bundle against {package_name}#{version}, include_dependencies={include_dependencies}")
|
||||||
|
# result = {
|
||||||
|
# 'valid': True,
|
||||||
|
# 'errors': [],
|
||||||
|
# 'warnings': [],
|
||||||
|
# 'details': [],
|
||||||
|
# 'results': {},
|
||||||
|
# 'summary': {
|
||||||
|
# 'resource_count': 0,
|
||||||
|
# 'failed_resources': 0,
|
||||||
|
# 'profiles_validated': set()
|
||||||
|
# }
|
||||||
|
# }
|
||||||
|
# if not bundle.get('resourceType') == 'Bundle':
|
||||||
|
# result['valid'] = False
|
||||||
|
# result['errors'].append("Resource is not a Bundle")
|
||||||
|
# result['details'].append({
|
||||||
|
# 'issue': "Resource is not a Bundle",
|
||||||
|
# 'severity': 'error',
|
||||||
|
# 'description': "The provided resource must have resourceType 'Bundle' to be validated as a bundle."
|
||||||
|
# })
|
||||||
|
# logger.error("Validation failed: Resource is not a Bundle")
|
||||||
|
# return result
|
||||||
|
|
||||||
|
# # Track references to validate resolvability
|
||||||
|
# references = set()
|
||||||
|
# resolved_references = set()
|
||||||
|
|
||||||
|
# for entry in bundle.get('entry', []):
|
||||||
|
# resource = entry.get('resource')
|
||||||
|
# if not resource:
|
||||||
|
# continue
|
||||||
|
# resource_type = resource.get('resourceType')
|
||||||
|
# resource_id = resource.get('id', 'unknown')
|
||||||
|
# result['summary']['resource_count'] += 1
|
||||||
|
|
||||||
|
# # Collect references
|
||||||
|
# for key, value in resource.items():
|
||||||
|
# if isinstance(value, dict) and 'reference' in value:
|
||||||
|
# references.add(value['reference'])
|
||||||
|
# elif isinstance(value, list):
|
||||||
|
# for item in value:
|
||||||
|
# if isinstance(item, dict) and 'reference' in item:
|
||||||
|
# references.add(item['reference'])
|
||||||
|
|
||||||
|
# # Validate resource
|
||||||
|
# validation_result = validate_resource_against_profile(package_name, version, resource, include_dependencies)
|
||||||
|
# result['results'][f"{resource_type}/{resource_id}"] = validation_result
|
||||||
|
# result['summary']['profiles_validated'].add(validation_result['profile'] or 'unknown')
|
||||||
|
|
||||||
|
# # Aggregate errors and warnings
|
||||||
|
# if not validation_result['valid']:
|
||||||
|
# result['valid'] = False
|
||||||
|
# result['summary']['failed_resources'] += 1
|
||||||
|
# result['errors'].extend(validation_result['errors'])
|
||||||
|
# result['warnings'].extend(validation_result['warnings'])
|
||||||
|
# result['details'].extend(validation_result['details'])
|
||||||
|
|
||||||
|
# # Mark resource as resolved if it has an ID
|
||||||
|
# if resource_id != 'unknown':
|
||||||
|
# resolved_references.add(f"{resource_type}/{resource_id}")
|
||||||
|
|
||||||
|
# # Check for unresolved references
|
||||||
|
# unresolved = references - resolved_references
|
||||||
|
# for ref in unresolved:
|
||||||
|
# warning_msg = f"Unresolved reference: {ref}"
|
||||||
|
# result['warnings'].append(warning_msg)
|
||||||
|
# result['details'].append({
|
||||||
|
# 'issue': warning_msg,
|
||||||
|
# 'severity': 'warning',
|
||||||
|
# 'description': f"The reference {ref} points to a resource not included in the bundle. Ensure the referenced resource is present or resolvable."
|
||||||
|
# })
|
||||||
|
# logger.info(f"Validation warning: Unresolved reference {ref}")
|
||||||
|
|
||||||
|
# # Finalize summary
|
||||||
|
# result['summary']['profiles_validated'] = list(result['summary']['profiles_validated'])
|
||||||
|
# result['summary']['error_count'] = len(result['errors'])
|
||||||
|
# result['summary']['warning_count'] = len(result['warnings'])
|
||||||
|
# logger.debug(f"Bundle validation result: valid={result['valid']}, errors={result['summary']['error_count']}, warnings={result['summary']['warning_count']}, resources={result['summary']['resource_count']}")
|
||||||
|
# return result
|
||||||
|
# -- OLD
|
||||||
|
|
||||||
|
|
||||||
|
# --- UPDATED: validate_resource_against_profile function ---
|
||||||
def validate_resource_against_profile(package_name, version, resource, include_dependencies=True):
|
def validate_resource_against_profile(package_name, version, resource, include_dependencies=True):
|
||||||
|
"""
|
||||||
|
Validates a FHIR resource against a StructureDefinition in the specified package.
|
||||||
|
|
||||||
|
This version correctly handles the absence of a `meta.profile` by falling back
|
||||||
|
to the base resource definition. It also sanitizes profile URLs to avoid
|
||||||
|
version mismatch errors.
|
||||||
|
"""
|
||||||
result = {
|
result = {
|
||||||
'valid': True,
|
'valid': True,
|
||||||
'errors': [],
|
'errors': [],
|
||||||
@ -1883,50 +2232,6 @@ def validate_resource_against_profile(package_name, version, resource, include_d
|
|||||||
'profile': resource.get('meta', {}).get('profile', [None])[0]
|
'profile': resource.get('meta', {}).get('profile', [None])[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
# Attempt HAPI validation if a profile is specified
|
|
||||||
if result['profile']:
|
|
||||||
try:
|
|
||||||
hapi_url = f"{current_app.config['HAPI_FHIR_URL'].rstrip('/')}/{resource['resourceType']}/$validate?profile={result['profile']}"
|
|
||||||
response = requests.post(
|
|
||||||
hapi_url,
|
|
||||||
json=resource,
|
|
||||||
headers={'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'},
|
|
||||||
timeout=10
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
outcome = response.json()
|
|
||||||
if outcome.get('resourceType') == 'OperationOutcome':
|
|
||||||
for issue in outcome.get('issue', []):
|
|
||||||
severity = issue.get('severity')
|
|
||||||
diagnostics = issue.get('diagnostics', issue.get('details', {}).get('text', 'No details provided'))
|
|
||||||
detail = {
|
|
||||||
'issue': diagnostics,
|
|
||||||
'severity': severity,
|
|
||||||
'description': issue.get('details', {}).get('text', diagnostics)
|
|
||||||
}
|
|
||||||
if severity in ['error', 'fatal']:
|
|
||||||
result['valid'] = False
|
|
||||||
result['errors'].append(diagnostics)
|
|
||||||
elif severity == 'warning':
|
|
||||||
result['warnings'].append(diagnostics)
|
|
||||||
result['details'].append(detail)
|
|
||||||
result['summary'] = {
|
|
||||||
'error_count': len(result['errors']),
|
|
||||||
'warning_count': len(result['warnings'])
|
|
||||||
}
|
|
||||||
logger.debug(f"HAPI validation for {result['resource_type']}/{result['resource_id']}: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}")
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
logger.warning(f"HAPI returned non-OperationOutcome: {outcome.get('resourceType')}")
|
|
||||||
except requests.RequestException as e:
|
|
||||||
logger.error(f"HAPI validation failed for {result['resource_type']}/{result['resource_id']}: {e}")
|
|
||||||
result['details'].append({
|
|
||||||
'issue': f"HAPI validation failed: {str(e)}",
|
|
||||||
'severity': 'warning',
|
|
||||||
'description': 'Falling back to local validation due to HAPI server error.'
|
|
||||||
})
|
|
||||||
|
|
||||||
# Fallback to local validation
|
|
||||||
download_dir = _get_download_dir()
|
download_dir = _get_download_dir()
|
||||||
if not download_dir:
|
if not download_dir:
|
||||||
result['valid'] = False
|
result['valid'] = False
|
||||||
@ -1936,83 +2241,171 @@ def validate_resource_against_profile(package_name, version, resource, include_d
|
|||||||
'severity': 'error',
|
'severity': 'error',
|
||||||
'description': "The server could not locate the directory where FHIR packages are stored."
|
'description': "The server could not locate the directory where FHIR packages are stored."
|
||||||
})
|
})
|
||||||
|
logger.error("Validation failed: Could not access download directory")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
# --- Work Item 3 & 2: Get profile URL or fallback to resourceType ---
|
||||||
|
profile_url = result['profile']
|
||||||
|
resource_identifier = resource.get('resourceType')
|
||||||
|
|
||||||
|
if profile_url:
|
||||||
|
# Sanitize profile URL to remove version
|
||||||
|
clean_profile_url = profile_url.split('|')[0]
|
||||||
|
logger.debug(f"Using provided profile: {profile_url}. Cleaned to: {clean_profile_url}")
|
||||||
|
resource_identifier = profile_url
|
||||||
|
else:
|
||||||
|
# No profile provided, fallback to resource type
|
||||||
|
logger.debug(f"No profile in resource, using base type as identifier: {resource_identifier}")
|
||||||
|
clean_profile_url = None
|
||||||
|
|
||||||
tgz_path = os.path.join(download_dir, construct_tgz_filename(package_name, version))
|
tgz_path = os.path.join(download_dir, construct_tgz_filename(package_name, version))
|
||||||
sd_data, sd_path = find_and_extract_sd(tgz_path, resource.get('resourceType'), result['profile'])
|
logger.debug(f"Checking for package file: {tgz_path}")
|
||||||
|
|
||||||
|
# Find StructureDefinition
|
||||||
|
sd_data, sd_path = find_and_extract_sd(tgz_path, resource_identifier, clean_profile_url)
|
||||||
|
|
||||||
|
if not sd_data and include_dependencies:
|
||||||
|
logger.debug(f"SD not found in {package_name}#{version}. Checking dependencies.")
|
||||||
|
try:
|
||||||
|
with tarfile.open(tgz_path, "r:gz") as tar:
|
||||||
|
package_json_member = None
|
||||||
|
for member in tar:
|
||||||
|
if member.name == 'package/package.json':
|
||||||
|
package_json_member = member
|
||||||
|
break
|
||||||
|
if package_json_member:
|
||||||
|
fileobj = tar.extractfile(package_json_member)
|
||||||
|
pkg_data = json.load(fileobj)
|
||||||
|
fileobj.close()
|
||||||
|
dependencies = pkg_data.get('dependencies', {})
|
||||||
|
logger.debug(f"Found dependencies: {dependencies}")
|
||||||
|
for dep_name, dep_version in dependencies.items():
|
||||||
|
dep_tgz = os.path.join(download_dir, construct_tgz_filename(dep_name, dep_version))
|
||||||
|
if os.path.exists(dep_tgz):
|
||||||
|
logger.debug(f"Searching SD in dependency {dep_name}#{dep_version}")
|
||||||
|
sd_data, sd_path = find_and_extract_sd(dep_tgz, resource_identifier, clean_profile_url)
|
||||||
|
if sd_data:
|
||||||
|
logger.info(f"Found SD in dependency {dep_name}#{dep_version} at {sd_path}")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
logger.warning(f"Dependency package {dep_name}#{dep_version} not found at {dep_tgz}")
|
||||||
|
else:
|
||||||
|
logger.warning(f"No package.json found in {tgz_path}")
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logger.error(f"Failed to parse package.json in {tgz_path}: {e}")
|
||||||
|
except tarfile.TarError as e:
|
||||||
|
logger.error(f"Failed to read {tgz_path} while checking dependencies: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error while checking dependencies in {tgz_path}: {e}")
|
||||||
|
|
||||||
if not sd_data:
|
if not sd_data:
|
||||||
result['valid'] = False
|
result['valid'] = False
|
||||||
result['errors'].append(f"No StructureDefinition found for {resource.get('resourceType')}")
|
result['errors'].append(f"No StructureDefinition found for {resource_identifier} with profile {clean_profile_url or 'any'}")
|
||||||
result['details'].append({
|
result['details'].append({
|
||||||
'issue': f"No StructureDefinition found for {resource.get('resourceType')}",
|
'issue': f"No StructureDefinition found for {resource_identifier} with profile {clean_profile_url or 'any'}",
|
||||||
'severity': 'error',
|
'severity': 'error',
|
||||||
'description': f"The package {package_name}#{version} does not contain a matching StructureDefinition."
|
'description': f"The package {package_name}#{version} (and dependencies, if checked) does not contain a matching StructureDefinition."
|
||||||
})
|
})
|
||||||
|
logger.error(f"Validation failed: No SD for {resource_identifier} in {tgz_path}")
|
||||||
return result
|
return result
|
||||||
|
logger.debug(f"Found SD at {sd_path}")
|
||||||
|
|
||||||
|
# Validate required elements (min=1)
|
||||||
|
errors = []
|
||||||
|
warnings = set()
|
||||||
elements = sd_data.get('snapshot', {}).get('element', [])
|
elements = sd_data.get('snapshot', {}).get('element', [])
|
||||||
for element in elements:
|
for element in elements:
|
||||||
path = element.get('path')
|
path = element.get('path')
|
||||||
min_val = element.get('min', 0)
|
min_val = element.get('min', 0)
|
||||||
must_support = element.get('mustSupport', False)
|
must_support = element.get('mustSupport', False)
|
||||||
slicing = element.get('slicing')
|
definition = element.get('definition', 'No definition provided in StructureDefinition.')
|
||||||
slice_name = element.get('sliceName')
|
|
||||||
|
|
||||||
# Check required elements
|
# Check required elements
|
||||||
if min_val > 0:
|
if min_val > 0 and not '.' in path[1 + path.find('.'):] if path.find('.') != -1 else True:
|
||||||
value = navigate_fhir_path(resource, path)
|
value = navigate_fhir_path(resource, path)
|
||||||
if value is None or (isinstance(value, list) and not any(value)):
|
if value is None or (isinstance(value, list) and not any(value)):
|
||||||
result['valid'] = False
|
error_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Required element {path} missing"
|
||||||
result['errors'].append(f"Required element {path} missing")
|
errors.append(error_msg)
|
||||||
result['details'].append({
|
result['details'].append({
|
||||||
'issue': f"Required element {path} missing",
|
'issue': error_msg,
|
||||||
'severity': 'error',
|
'severity': 'error',
|
||||||
'description': f"Element {path} has min={min_val} in profile {result['profile'] or 'unknown'}"
|
'description': f"{definition} This element is mandatory (min={min_val}) per the profile {profile_url or 'unknown'}."
|
||||||
})
|
})
|
||||||
|
logger.info(f"Validation error: Required element {path} missing")
|
||||||
|
|
||||||
# Check must-support elements
|
# Check must-support elements
|
||||||
if must_support:
|
if must_support and not '.' in path[1 + path.find('.'):] if path.find('.') != -1 else True:
|
||||||
value = navigate_fhir_path(resource, slice_name if slice_name else path)
|
if '[x]' in path:
|
||||||
if value is None or (isinstance(value, list) and not any(value)):
|
base_path = path.replace('[x]', '')
|
||||||
result['warnings'].append(f"Must Support element {path} missing or empty")
|
found = False
|
||||||
result['details'].append({
|
for suffix in ['Quantity', 'CodeableConcept', 'String', 'DateTime', 'Period', 'Range']:
|
||||||
'issue': f"Must Support element {path} missing or empty",
|
test_path = f"{base_path}{suffix}"
|
||||||
'severity': 'warning',
|
value = navigate_fhir_path(resource, test_path)
|
||||||
'description': f"Element {path} is marked as Must Support in profile {result['profile'] or 'unknown'}"
|
if value is not None and (not isinstance(value, list) or any(value)):
|
||||||
})
|
found = True
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty"
|
||||||
|
warnings.add(warning_msg)
|
||||||
|
result['details'].append({
|
||||||
|
'issue': warning_msg,
|
||||||
|
'severity': 'warning',
|
||||||
|
'description': f"{definition} This element is marked as Must Support in AU Core, meaning it should be populated if the data is available (e.g., phone or email for Patient.telecom)."
|
||||||
|
})
|
||||||
|
logger.info(f"Validation warning: Must Support element {path} missing or empty")
|
||||||
|
else:
|
||||||
|
value = navigate_fhir_path(resource, path)
|
||||||
|
if value is None or (isinstance(value, list) and not any(value)):
|
||||||
|
if element.get('min', 0) == 0:
|
||||||
|
warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty"
|
||||||
|
warnings.add(warning_msg)
|
||||||
|
result['details'].append({
|
||||||
|
'issue': warning_msg,
|
||||||
|
'severity': 'warning',
|
||||||
|
'description': f"{definition} This element is marked as Must Support in AU Core, meaning it should be populated if the data is available (e.g., phone or email for Patient.telecom)."
|
||||||
|
})
|
||||||
|
logger.info(f"Validation warning: Must Support element {path} missing or empty")
|
||||||
|
|
||||||
# Validate slicing
|
# Handle dataAbsentReason for must-support elements
|
||||||
if slicing and not slice_name: # Parent slicing element
|
if path.endswith('dataAbsentReason') and must_support:
|
||||||
discriminator = slicing.get('discriminator', [])
|
value_x_path = path.replace('dataAbsentReason', 'value[x]')
|
||||||
for d in discriminator:
|
value_found = False
|
||||||
d_type = d.get('type')
|
for suffix in ['Quantity', 'CodeableConcept', 'String', 'DateTime', 'Period', 'Range']:
|
||||||
d_path = d.get('path')
|
test_path = path.replace('dataAbsentReason', f'value{suffix}')
|
||||||
if d_type == 'value':
|
value = navigate_fhir_path(resource, test_path)
|
||||||
sliced_elements = navigate_fhir_path(resource, path)
|
if value is not None and (not isinstance(value, list) or any(value)):
|
||||||
if isinstance(sliced_elements, list):
|
value_found = True
|
||||||
seen_values = set()
|
break
|
||||||
for elem in sliced_elements:
|
if not value_found:
|
||||||
d_value = navigate_fhir_path(elem, d_path)
|
value = navigate_fhir_path(resource, path)
|
||||||
if d_value in seen_values:
|
if value is None or (isinstance(value, list) and not any(value)):
|
||||||
result['valid'] = False
|
warning_msg = f"{resource.get('resourceType')}/{resource.get('id', 'unknown')}: Must Support element {path} missing or empty"
|
||||||
result['errors'].append(f"Duplicate discriminator value {d_value} for {path}.{d_path}")
|
warnings.add(warning_msg)
|
||||||
seen_values.add(d_value)
|
result['details'].append({
|
||||||
elif d_type == 'type':
|
'issue': warning_msg,
|
||||||
sliced_elements = navigate_fhir_path(resource, path)
|
'severity': 'warning',
|
||||||
if isinstance(sliced_elements, list):
|
'description': f"{definition} This element is marked as Must Support and should be used to indicate why the associated value is absent."
|
||||||
for elem in sliced_elements:
|
})
|
||||||
if not navigate_fhir_path(elem, d_path):
|
logger.info(f"Validation warning: Must Support element {path} missing or empty")
|
||||||
result['valid'] = False
|
|
||||||
result['errors'].append(f"Missing discriminator type {d_path} for {path}")
|
|
||||||
|
|
||||||
|
result['errors'] = errors
|
||||||
|
result['warnings'] = list(warnings)
|
||||||
|
result['valid'] = len(errors) == 0
|
||||||
result['summary'] = {
|
result['summary'] = {
|
||||||
'error_count': len(result['errors']),
|
'error_count': len(errors),
|
||||||
'warning_count': len(result['warnings'])
|
'warning_count': len(warnings)
|
||||||
}
|
}
|
||||||
|
logger.debug(f"Validation result: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
# --- UPDATED: validate_bundle_against_profile function ---
|
||||||
def validate_bundle_against_profile(package_name, version, bundle, include_dependencies=True):
|
def validate_bundle_against_profile(package_name, version, bundle, include_dependencies=True):
|
||||||
"""Validates a FHIR Bundle against profiles in the specified package."""
|
"""
|
||||||
|
Validates a FHIR Bundle against profiles in the specified package.
|
||||||
|
|
||||||
|
This version adds a new two-pass process to correctly resolve `urn:uuid`
|
||||||
|
references within the bundle before flagging them as unresolved.
|
||||||
|
"""
|
||||||
logger.debug(f"Validating bundle against {package_name}#{version}, include_dependencies={include_dependencies}")
|
logger.debug(f"Validating bundle against {package_name}#{version}, include_dependencies={include_dependencies}")
|
||||||
result = {
|
result = {
|
||||||
'valid': True,
|
'valid': True,
|
||||||
@ -2037,10 +2430,21 @@ def validate_bundle_against_profile(package_name, version, bundle, include_depen
|
|||||||
logger.error("Validation failed: Resource is not a Bundle")
|
logger.error("Validation failed: Resource is not a Bundle")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# Track references to validate resolvability
|
# --- Work Item 1: First pass to collect all local references ---
|
||||||
references = set()
|
local_references = set()
|
||||||
resolved_references = set()
|
for entry in bundle.get('entry', []):
|
||||||
|
fullUrl = entry.get('fullUrl')
|
||||||
|
resource = entry.get('resource')
|
||||||
|
if fullUrl:
|
||||||
|
local_references.add(fullUrl)
|
||||||
|
if resource and resource.get('resourceType') and resource.get('id'):
|
||||||
|
local_references.add(f"{resource['resourceType']}/{resource['id']}")
|
||||||
|
logger.debug(f"Found {len(local_references)} local references in the bundle.")
|
||||||
|
|
||||||
|
# Track references and resolved references for external check
|
||||||
|
all_references_found = set()
|
||||||
|
|
||||||
|
# Second pass for validation and reference checking
|
||||||
for entry in bundle.get('entry', []):
|
for entry in bundle.get('entry', []):
|
||||||
resource = entry.get('resource')
|
resource = entry.get('resource')
|
||||||
if not resource:
|
if not resource:
|
||||||
@ -2050,13 +2454,11 @@ def validate_bundle_against_profile(package_name, version, bundle, include_depen
|
|||||||
result['summary']['resource_count'] += 1
|
result['summary']['resource_count'] += 1
|
||||||
|
|
||||||
# Collect references
|
# Collect references
|
||||||
for key, value in resource.items():
|
current_refs = []
|
||||||
if isinstance(value, dict) and 'reference' in value:
|
find_references(resource, current_refs)
|
||||||
references.add(value['reference'])
|
for ref_str in current_refs:
|
||||||
elif isinstance(value, list):
|
if isinstance(ref_str, str):
|
||||||
for item in value:
|
all_references_found.add(ref_str)
|
||||||
if isinstance(item, dict) and 'reference' in item:
|
|
||||||
references.add(item['reference'])
|
|
||||||
|
|
||||||
# Validate resource
|
# Validate resource
|
||||||
validation_result = validate_resource_against_profile(package_name, version, resource, include_dependencies)
|
validation_result = validate_resource_against_profile(package_name, version, resource, include_dependencies)
|
||||||
@ -2071,21 +2473,17 @@ def validate_bundle_against_profile(package_name, version, bundle, include_depen
|
|||||||
result['warnings'].extend(validation_result['warnings'])
|
result['warnings'].extend(validation_result['warnings'])
|
||||||
result['details'].extend(validation_result['details'])
|
result['details'].extend(validation_result['details'])
|
||||||
|
|
||||||
# Mark resource as resolved if it has an ID
|
# --- Work Item 1: Check for unresolved references *after* processing all local resources ---
|
||||||
if resource_id != 'unknown':
|
for ref in all_references_found:
|
||||||
resolved_references.add(f"{resource_type}/{resource_id}")
|
if ref not in local_references:
|
||||||
|
warning_msg = f"Unresolved reference: {ref}"
|
||||||
# Check for unresolved references
|
result['warnings'].append(warning_msg)
|
||||||
unresolved = references - resolved_references
|
result['details'].append({
|
||||||
for ref in unresolved:
|
'issue': warning_msg,
|
||||||
warning_msg = f"Unresolved reference: {ref}"
|
'severity': 'warning',
|
||||||
result['warnings'].append(warning_msg)
|
'description': f"The reference {ref} points to a resource not included in the bundle. Ensure the referenced resource is present or resolvable."
|
||||||
result['details'].append({
|
})
|
||||||
'issue': warning_msg,
|
logger.info(f"Validation warning: Unresolved reference {ref}")
|
||||||
'severity': 'warning',
|
|
||||||
'description': f"The reference {ref} points to a resource not included in the bundle. Ensure the referenced resource is present or resolvable."
|
|
||||||
})
|
|
||||||
logger.info(f"Validation warning: Unresolved reference {ref}")
|
|
||||||
|
|
||||||
# Finalize summary
|
# Finalize summary
|
||||||
result['summary']['profiles_validated'] = list(result['summary']['profiles_validated'])
|
result['summary']['profiles_validated'] = list(result['summary']['profiles_validated'])
|
||||||
@ -2094,6 +2492,7 @@ def validate_bundle_against_profile(package_name, version, bundle, include_depen
|
|||||||
logger.debug(f"Bundle validation result: valid={result['valid']}, errors={result['summary']['error_count']}, warnings={result['summary']['warning_count']}, resources={result['summary']['resource_count']}")
|
logger.debug(f"Bundle validation result: valid={result['valid']}, errors={result['summary']['error_count']}, warnings={result['summary']['warning_count']}, resources={result['summary']['resource_count']}")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
# --- Structure Definition Retrieval ---
|
# --- Structure Definition Retrieval ---
|
||||||
def get_structure_definition(package_name, version, resource_type):
|
def get_structure_definition(package_name, version, resource_type):
|
||||||
"""Fetches StructureDefinition with slicing support."""
|
"""Fetches StructureDefinition with slicing support."""
|
||||||
|
|||||||
233
setup_linux.sh
233
setup_linux.sh
@ -1,233 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# --- Configuration ---
|
|
||||||
REPO_URL="https://github.com/hapifhir/hapi-fhir-jpaserver-starter.git"
|
|
||||||
CLONE_DIR="hapi-fhir-jpaserver"
|
|
||||||
SOURCE_CONFIG_DIR="hapi-fhir-Setup" # Assuming this is relative to the script's parent
|
|
||||||
CONFIG_FILE="application.yaml"
|
|
||||||
|
|
||||||
# --- Define Paths ---
|
|
||||||
# Note: Adjust SOURCE_CONFIG_PATH if SOURCE_CONFIG_DIR is not a sibling directory
|
|
||||||
# This assumes the script is run from a directory, and hapi-fhir-setup is at the same level
|
|
||||||
SOURCE_CONFIG_PATH="../${SOURCE_CONFIG_DIR}/target/classes/${CONFIG_FILE}"
|
|
||||||
DEST_CONFIG_PATH="${CLONE_DIR}/target/classes/${CONFIG_FILE}"
|
|
||||||
|
|
||||||
APP_MODE=""
|
|
||||||
|
|
||||||
# --- Error Handling Function ---
|
|
||||||
handle_error() {
|
|
||||||
echo "------------------------------------"
|
|
||||||
echo "An error occurred: $1"
|
|
||||||
echo "Script aborted."
|
|
||||||
echo "------------------------------------"
|
|
||||||
# Removed 'read -p "Press Enter to exit..."' as it's not typical for non-interactive CI/CD
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# === Prompt for Installation Mode ===
|
|
||||||
get_mode_choice() {
|
|
||||||
echo "Select Installation Mode:"
|
|
||||||
echo "1. Standalone (Includes local HAPI FHIR Server - Requires Git & Maven)"
|
|
||||||
echo "2. Lite (Excludes local HAPI FHIR Server - No Git/Maven needed)"
|
|
||||||
|
|
||||||
while true; do
|
|
||||||
read -r -p "Enter your choice (1 or 2): " choice
|
|
||||||
case "$choice" in
|
|
||||||
1)
|
|
||||||
APP_MODE="standalone"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
2)
|
|
||||||
APP_MODE="lite"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "Invalid input. Please try again."
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
echo "Selected Mode: $APP_MODE"
|
|
||||||
echo
|
|
||||||
}
|
|
||||||
|
|
||||||
# Call the function to get mode choice
|
|
||||||
get_mode_choice
|
|
||||||
|
|
||||||
# === Conditionally Execute HAPI Setup ===
|
|
||||||
if [ "$APP_MODE" = "standalone" ]; then
|
|
||||||
echo "Running Standalone setup including HAPI FHIR..."
|
|
||||||
echo
|
|
||||||
|
|
||||||
# --- Step 0: Clean up previous clone (optional) ---
|
|
||||||
echo "Checking for existing directory: $CLONE_DIR"
|
|
||||||
if [ -d "$CLONE_DIR" ]; then
|
|
||||||
echo "Found existing directory, removing it..."
|
|
||||||
rm -rf "$CLONE_DIR"
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
handle_error "Failed to remove existing directory: $CLONE_DIR"
|
|
||||||
fi
|
|
||||||
echo "Existing directory removed."
|
|
||||||
else
|
|
||||||
echo "Directory does not exist, proceeding with clone."
|
|
||||||
fi
|
|
||||||
echo
|
|
||||||
|
|
||||||
# --- Step 1: Clone the HAPI FHIR server repository ---
|
|
||||||
echo "Cloning repository: $REPO_URL into $CLONE_DIR..."
|
|
||||||
git clone "$REPO_URL" "$CLONE_DIR"
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
handle_error "Failed to clone repository. Check Git installation and network connection."
|
|
||||||
fi
|
|
||||||
echo "Repository cloned successfully."
|
|
||||||
echo
|
|
||||||
|
|
||||||
# --- Step 2: Navigate into the cloned directory ---
|
|
||||||
echo "Changing directory to $CLONE_DIR..."
|
|
||||||
cd "$CLONE_DIR" || handle_error "Failed to change directory to $CLONE_DIR."
|
|
||||||
echo "Current directory: $(pwd)"
|
|
||||||
echo
|
|
||||||
|
|
||||||
# --- Step 3: Build the HAPI server using Maven ---
|
|
||||||
echo "===> Starting Maven build (Step 3)..."
|
|
||||||
mvn clean package -DskipTests=true -Pboot
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "ERROR: Maven build failed."
|
|
||||||
cd ..
|
|
||||||
handle_error "Maven build process resulted in an error."
|
|
||||||
fi
|
|
||||||
echo "Maven build completed successfully."
|
|
||||||
echo
|
|
||||||
|
|
||||||
# --- Step 4: Copy the configuration file ---
|
|
||||||
echo "===> Starting file copy (Step 4)..."
|
|
||||||
echo "Copying configuration file..."
|
|
||||||
# Corrected SOURCE_CONFIG_PATH to be relative to the new current directory ($CLONE_DIR)
|
|
||||||
# This assumes the original script's SOURCE_CONFIG_PATH was relative to its execution location
|
|
||||||
# If SOURCE_CONFIG_DIR is ../hapi-fhir-setup relative to script's original location:
|
|
||||||
# Then from within CLONE_DIR, it becomes ../../hapi-fhir-setup
|
|
||||||
# We defined SOURCE_CONFIG_PATH earlier relative to the script start.
|
|
||||||
# So, when inside CLONE_DIR, the path from original script location should be used.
|
|
||||||
# The original script had: set SOURCE_CONFIG_PATH=..\%SOURCE_CONFIG_DIR%\target\classes\%CONFIG_FILE%
|
|
||||||
# And then: xcopy "%SOURCE_CONFIG_PATH%" "target\classes\"
|
|
||||||
# This implies SOURCE_CONFIG_PATH is relative to the original script's location, not the $CLONE_DIR
|
|
||||||
# Therefore, we need to construct the correct relative path from *within* $CLONE_DIR back to the source.
|
|
||||||
# Assuming the script is in dir X, and SOURCE_CONFIG_DIR is ../hapi-fhir-setup from X.
|
|
||||||
# So, hapi-fhir-setup is a sibling of X's parent.
|
|
||||||
# If CLONE_DIR is also in X, then from within CLONE_DIR, the path is ../ + original SOURCE_CONFIG_PATH
|
|
||||||
# For simplicity and robustness, let's use an absolute path or a more clearly defined relative path from the start.
|
|
||||||
# The original `SOURCE_CONFIG_PATH=..\%SOURCE_CONFIG_DIR%\target\classes\%CONFIG_FILE%` implies
|
|
||||||
# that `hapi-fhir-setup` is a sibling of the directory where the script *is being run from*.
|
|
||||||
|
|
||||||
# Let's assume the script is run from the root of FHIRFLARE-IG-Toolkit.
|
|
||||||
# And hapi-fhir-setup is also in the root, next to this script.
|
|
||||||
# Then SOURCE_CONFIG_PATH would be ./hapi-fhir-setup/target/classes/application.yaml
|
|
||||||
# And from within ./hapi-fhir-jpaserver/, the path would be ../hapi-fhir-setup/target/classes/application.yaml
|
|
||||||
|
|
||||||
# The original batch file sets SOURCE_CONFIG_PATH as "..\%SOURCE_CONFIG_DIR%\target\classes\%CONFIG_FILE%"
|
|
||||||
# And COPIES it to "target\classes\" *while inside CLONE_DIR*.
|
|
||||||
# This means the source path is relative to where the *cd %CLONE_DIR%* happened from.
|
|
||||||
# Let's make it relative to the script's initial execution directory.
|
|
||||||
INITIAL_SCRIPT_DIR=$(pwd)
|
|
||||||
ABSOLUTE_SOURCE_CONFIG_PATH="${INITIAL_SCRIPT_DIR}/../${SOURCE_CONFIG_DIR}/target/classes/${CONFIG_FILE}" # This matches the ..\ logic
|
|
||||||
|
|
||||||
echo "Source: $ABSOLUTE_SOURCE_CONFIG_PATH"
|
|
||||||
echo "Destination: target/classes/$CONFIG_FILE"
|
|
||||||
|
|
||||||
if [ ! -f "$ABSOLUTE_SOURCE_CONFIG_PATH" ]; then
|
|
||||||
echo "WARNING: Source configuration file not found at $ABSOLUTE_SOURCE_CONFIG_PATH."
|
|
||||||
echo "The script will continue, but the server might use default configuration."
|
|
||||||
else
|
|
||||||
cp "$ABSOLUTE_SOURCE_CONFIG_PATH" "target/classes/"
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo "WARNING: Failed to copy configuration file. Check if the source file exists and permissions."
|
|
||||||
echo "The script will continue, but the server might use default configuration."
|
|
||||||
else
|
|
||||||
echo "Configuration file copied successfully."
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
echo
|
|
||||||
|
|
||||||
# --- Step 5: Navigate back to the parent directory ---
|
|
||||||
echo "===> Changing directory back (Step 5)..."
|
|
||||||
cd .. || handle_error "Failed to change back to the parent directory."
|
|
||||||
echo "Current directory: $(pwd)"
|
|
||||||
echo
|
|
||||||
|
|
||||||
else # APP_MODE is "lite"
|
|
||||||
echo "Running Lite setup, skipping HAPI FHIR build..."
|
|
||||||
# Ensure the hapi-fhir-jpaserver directory doesn't exist or is empty if Lite mode is chosen
|
|
||||||
if [ -d "$CLONE_DIR" ]; then
|
|
||||||
echo "Found existing HAPI directory ($CLONE_DIR) in Lite mode. Removing it..."
|
|
||||||
rm -rf "$CLONE_DIR"
|
|
||||||
fi
|
|
||||||
# Create empty target directories expected by Dockerfile COPY, even if not used
|
|
||||||
mkdir -p "${CLONE_DIR}/target/classes"
|
|
||||||
mkdir -p "${CLONE_DIR}/custom" # This was in the original batch, ensure it's here
|
|
||||||
# Create a placeholder empty WAR file and application.yaml to satisfy Dockerfile COPY
|
|
||||||
touch "${CLONE_DIR}/target/ROOT.war"
|
|
||||||
touch "${CLONE_DIR}/target/classes/application.yaml"
|
|
||||||
echo "Placeholder files and directories created for Lite mode build in $CLONE_DIR."
|
|
||||||
echo
|
|
||||||
fi
|
|
||||||
|
|
||||||
# === Modify docker-compose.yml to set APP_MODE ===
|
|
||||||
echo "Updating docker-compose.yml with APP_MODE=$APP_MODE..."
|
|
||||||
DOCKER_COMPOSE_TMP="docker-compose.yml.tmp"
|
|
||||||
DOCKER_COMPOSE_ORIG="docker-compose.yml"
|
|
||||||
|
|
||||||
cat << EOF > "$DOCKER_COMPOSE_TMP"
|
|
||||||
version: '3.8'
|
|
||||||
services:
|
|
||||||
fhirflare:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
ports:
|
|
||||||
- "5000:5000"
|
|
||||||
- "8080:8080" # Keep port exposed, even if Tomcat isn't running useful stuff in Lite
|
|
||||||
volumes:
|
|
||||||
- ./instance:/app/instance
|
|
||||||
- ./static/uploads:/app/static/uploads
|
|
||||||
- ./instance/hapi-h2-data/:/app/h2-data # Keep volume mounts consistent
|
|
||||||
- ./logs:/app/logs
|
|
||||||
environment:
|
|
||||||
- FLASK_APP=app.py
|
|
||||||
- FLASK_ENV=development
|
|
||||||
- NODE_PATH=/usr/lib/node_modules
|
|
||||||
- APP_MODE=${APP_MODE}
|
|
||||||
- APP_BASE_URL=http://localhost:5000
|
|
||||||
- HAPI_FHIR_URL=http://localhost:8080/fhir
|
|
||||||
command: supervisord -c /etc/supervisord.conf
|
|
||||||
EOF
|
|
||||||
|
|
||||||
if [ ! -f "$DOCKER_COMPOSE_TMP" ]; then
|
|
||||||
handle_error "Failed to create temporary docker-compose file ($DOCKER_COMPOSE_TMP)."
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Replace the original docker-compose.yml
|
|
||||||
mv "$DOCKER_COMPOSE_TMP" "$DOCKER_COMPOSE_ORIG"
|
|
||||||
echo "docker-compose.yml updated successfully."
|
|
||||||
echo
|
|
||||||
|
|
||||||
# --- Step 6: Build Docker images ---
|
|
||||||
echo "===> Starting Docker build (Step 6)..."
|
|
||||||
docker-compose build --no-cache
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
handle_error "Docker Compose build failed. Check Docker installation and docker-compose.yml file."
|
|
||||||
fi
|
|
||||||
echo "Docker images built successfully."
|
|
||||||
echo
|
|
||||||
|
|
||||||
# --- Step 7: Start Docker containers ---
|
|
||||||
echo "===> Starting Docker containers (Step 7)..."
|
|
||||||
docker-compose up -d
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
handle_error "Docker Compose up failed. Check Docker installation and container configurations."
|
|
||||||
fi
|
|
||||||
echo "Docker containers started successfully."
|
|
||||||
echo
|
|
||||||
|
|
||||||
echo "===================================="
|
|
||||||
echo "Script finished successfully! (Mode: $APP_MODE)"
|
|
||||||
echo "===================================="
|
|
||||||
exit 0
|
|
||||||
@ -24,8 +24,8 @@ stderr_logfile_backups=5
|
|||||||
command=/usr/local/tomcat/bin/catalina.sh run
|
command=/usr/local/tomcat/bin/catalina.sh run
|
||||||
directory=/usr/local/tomcat
|
directory=/usr/local/tomcat
|
||||||
environment=SPRING_CONFIG_LOCATION="file:/usr/local/tomcat/conf/application.yaml",NODE_PATH="/usr/lib/node_modules"
|
environment=SPRING_CONFIG_LOCATION="file:/usr/local/tomcat/conf/application.yaml",NODE_PATH="/usr/lib/node_modules"
|
||||||
autostart=true
|
autostart=false
|
||||||
autorestart=true
|
autorestart=false
|
||||||
startsecs=30
|
startsecs=30
|
||||||
stopwaitsecs=30
|
stopwaitsecs=30
|
||||||
stdout_logfile=/app/logs/tomcat.log
|
stdout_logfile=/app/logs/tomcat.log
|
||||||
|
|||||||
@ -6,6 +6,8 @@ import tarfile
|
|||||||
import shutil
|
import shutil
|
||||||
import io
|
import io
|
||||||
import requests
|
import requests
|
||||||
|
import time
|
||||||
|
import subprocess
|
||||||
from unittest.mock import patch, MagicMock, mock_open, call
|
from unittest.mock import patch, MagicMock, mock_open, call
|
||||||
from flask import Flask, session
|
from flask import Flask, session
|
||||||
from flask.testing import FlaskClient
|
from flask.testing import FlaskClient
|
||||||
@ -27,9 +29,252 @@ def parse_ndjson(byte_stream):
|
|||||||
lines = decoded_stream.split('\n')
|
lines = decoded_stream.split('\n')
|
||||||
return [json.loads(line) for line in lines if line.strip()]
|
return [json.loads(line) for line in lines if line.strip()]
|
||||||
|
|
||||||
|
class DockerComposeContainer:
|
||||||
|
"""
|
||||||
|
A class that follows the Testcontainers pattern for managing Docker Compose environments.
|
||||||
|
This implementation uses subprocess to call docker-compose directly since we're not
|
||||||
|
installing the testcontainers-python package.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, compose_file_path):
|
||||||
|
"""
|
||||||
|
Initialize with the path to the docker-compose.yml file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
compose_file_path: Path to the docker-compose.yml file
|
||||||
|
"""
|
||||||
|
self.compose_file = compose_file_path
|
||||||
|
self.compose_dir = os.path.dirname(os.path.abspath(compose_file_path))
|
||||||
|
self.containers_up = False
|
||||||
|
self.service_ports = {}
|
||||||
|
self._container_ids = {}
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Start containers when entering context"""
|
||||||
|
self.start()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""Stop containers when exiting context"""
|
||||||
|
self.stop()
|
||||||
|
|
||||||
|
def with_service_port(self, service_name, port):
|
||||||
|
"""
|
||||||
|
Map a service port (following the testcontainers builder pattern)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
service_name: Name of the service in docker-compose.yml
|
||||||
|
port: Port number to expose
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
self for chaining
|
||||||
|
"""
|
||||||
|
self.service_ports[service_name] = port
|
||||||
|
return self
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""Start the Docker Compose environment"""
|
||||||
|
if self.containers_up:
|
||||||
|
return self
|
||||||
|
|
||||||
|
print("Starting Docker Compose environment...")
|
||||||
|
result = subprocess.run(
|
||||||
|
['docker-compose', '-f', self.compose_file, 'up', '-d'],
|
||||||
|
cwd=self.compose_dir,
|
||||||
|
capture_output=True,
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
error_msg = f"Failed to start Docker Compose environment: {result.stderr}"
|
||||||
|
print(error_msg)
|
||||||
|
raise RuntimeError(error_msg)
|
||||||
|
|
||||||
|
# Store container IDs for later use
|
||||||
|
self._get_container_ids()
|
||||||
|
|
||||||
|
self.containers_up = True
|
||||||
|
self._wait_for_services()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _get_container_ids(self):
|
||||||
|
"""Get the container IDs for all services"""
|
||||||
|
result = subprocess.run(
|
||||||
|
['docker-compose', '-f', self.compose_file, 'ps', '-q'],
|
||||||
|
cwd=self.compose_dir,
|
||||||
|
capture_output=True,
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
container_ids = result.stdout.strip().split('\n')
|
||||||
|
if not container_ids:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get service names for each container
|
||||||
|
for container_id in container_ids:
|
||||||
|
if not container_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
inspect_result = subprocess.run(
|
||||||
|
['docker', 'inspect', '--format', '{{index .Config.Labels "com.docker.compose.service"}}', container_id],
|
||||||
|
capture_output=True,
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if inspect_result.returncode == 0:
|
||||||
|
service_name = inspect_result.stdout.strip()
|
||||||
|
self._container_ids[service_name] = container_id
|
||||||
|
|
||||||
|
def get_container_id(self, service_name):
|
||||||
|
"""
|
||||||
|
Get the container ID for a specific service
|
||||||
|
|
||||||
|
Args:
|
||||||
|
service_name: Name of the service in docker-compose.yml
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Container ID as string or None if not found
|
||||||
|
"""
|
||||||
|
return self._container_ids.get(service_name)
|
||||||
|
|
||||||
|
def get_service_host(self, service_name):
|
||||||
|
"""
|
||||||
|
Get the host for a specific service - for Docker Compose we just use localhost
|
||||||
|
|
||||||
|
Args:
|
||||||
|
service_name: Name of the service in docker-compose.yml
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Host as string (usually localhost)
|
||||||
|
"""
|
||||||
|
return "localhost"
|
||||||
|
|
||||||
|
def get_service_url(self, service_name, path=""):
|
||||||
|
"""
|
||||||
|
Get the URL for a specific service
|
||||||
|
|
||||||
|
Args:
|
||||||
|
service_name: Name of the service in docker-compose.yml
|
||||||
|
path: Optional path to append to the URL
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
URL as string
|
||||||
|
"""
|
||||||
|
port = self.service_ports.get(service_name)
|
||||||
|
if not port:
|
||||||
|
raise ValueError(f"No port mapping defined for service {service_name}")
|
||||||
|
|
||||||
|
url = f"http://{self.get_service_host(service_name)}:{port}"
|
||||||
|
if path:
|
||||||
|
# Ensure path starts with /
|
||||||
|
if not path.startswith('/'):
|
||||||
|
path = f"/{path}"
|
||||||
|
url = f"{url}{path}"
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
def get_logs(self, service_name):
|
||||||
|
"""
|
||||||
|
Get logs for a specific service
|
||||||
|
|
||||||
|
Args:
|
||||||
|
service_name: Name of the service in docker-compose.yml
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Logs as string
|
||||||
|
"""
|
||||||
|
container_id = self.get_container_id(service_name)
|
||||||
|
if not container_id:
|
||||||
|
return f"No container found for service {service_name}"
|
||||||
|
|
||||||
|
result = subprocess.run(
|
||||||
|
['docker', 'logs', container_id],
|
||||||
|
capture_output=True,
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return result.stdout
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
"""Stop the Docker Compose environment"""
|
||||||
|
if not self.containers_up:
|
||||||
|
return
|
||||||
|
|
||||||
|
print("Stopping Docker Compose environment...")
|
||||||
|
result = subprocess.run(
|
||||||
|
['docker-compose', '-f', self.compose_file, 'down'],
|
||||||
|
cwd=self.compose_dir,
|
||||||
|
capture_output=True,
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
print(f"Warning: Error stopping Docker Compose: {result.stderr}")
|
||||||
|
|
||||||
|
self.containers_up = False
|
||||||
|
|
||||||
|
def _wait_for_services(self):
|
||||||
|
"""Wait for all services to be ready"""
|
||||||
|
print("Waiting for services to be ready...")
|
||||||
|
|
||||||
|
# Wait for HAPI FHIR server
|
||||||
|
if 'fhir' in self.service_ports:
|
||||||
|
self._wait_for_http_service(
|
||||||
|
self.get_service_url('fhir', 'fhir/metadata'),
|
||||||
|
"HAPI FHIR server"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wait for FHIRFLARE application
|
||||||
|
if 'fhirflare' in self.service_ports:
|
||||||
|
self._wait_for_http_service(
|
||||||
|
self.get_service_url('fhirflare'),
|
||||||
|
"FHIRFLARE application"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Give additional time for services to stabilize
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
def _wait_for_http_service(self, url, service_name, max_retries=30, retry_interval=2):
|
||||||
|
"""
|
||||||
|
Wait for an HTTP service to be ready
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: URL to check
|
||||||
|
service_name: Name of the service for logging
|
||||||
|
max_retries: Maximum number of retries
|
||||||
|
retry_interval: Interval between retries in seconds
|
||||||
|
"""
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
response = requests.get(url, timeout=5)
|
||||||
|
if response.status_code == 200:
|
||||||
|
print(f"{service_name} is ready after {attempt + 1} attempts")
|
||||||
|
return True
|
||||||
|
except requests.RequestException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
print(f"Waiting for {service_name} (attempt {attempt + 1}/{max_retries})...")
|
||||||
|
time.sleep(retry_interval)
|
||||||
|
|
||||||
|
print(f"Warning: {service_name} did not become ready in time")
|
||||||
|
return False
|
||||||
|
|
||||||
class TestFHIRFlareIGToolkit(unittest.TestCase):
|
class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||||
@classmethod
|
@classmethod
|
||||||
def setUpClass(cls):
|
def setUpClass(cls):
|
||||||
|
# Define the Docker Compose container
|
||||||
|
compose_file_path = os.path.join(os.path.dirname(__file__), 'docker-compose.yml')
|
||||||
|
cls.container = DockerComposeContainer(compose_file_path) \
|
||||||
|
.with_service_port('fhir', 8080) \
|
||||||
|
.with_service_port('fhirflare', 5000)
|
||||||
|
|
||||||
|
# Start the containers
|
||||||
|
cls.container.start()
|
||||||
|
|
||||||
|
# Configure app for testing
|
||||||
app.config['TESTING'] = True
|
app.config['TESTING'] = True
|
||||||
app.config['WTF_CSRF_ENABLED'] = False
|
app.config['WTF_CSRF_ENABLED'] = False
|
||||||
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:'
|
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:'
|
||||||
@ -39,6 +284,7 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
|||||||
app.config['API_KEY'] = 'test-api-key'
|
app.config['API_KEY'] = 'test-api-key'
|
||||||
app.config['VALIDATE_IMPOSED_PROFILES'] = True
|
app.config['VALIDATE_IMPOSED_PROFILES'] = True
|
||||||
app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True
|
app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True
|
||||||
|
app.config['HAPI_FHIR_URL'] = cls.container.get_service_url('fhir', 'fhir') # Point to containerized HAPI FHIR
|
||||||
|
|
||||||
cls.app_context = app.app_context()
|
cls.app_context = app.app_context()
|
||||||
cls.app_context.push()
|
cls.app_context.push()
|
||||||
@ -51,6 +297,9 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
|||||||
if os.path.exists(cls.test_packages_dir):
|
if os.path.exists(cls.test_packages_dir):
|
||||||
shutil.rmtree(cls.test_packages_dir)
|
shutil.rmtree(cls.test_packages_dir)
|
||||||
|
|
||||||
|
# Stop Docker Compose environment
|
||||||
|
cls.container.stop()
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
if os.path.exists(self.test_packages_dir):
|
if os.path.exists(self.test_packages_dir):
|
||||||
shutil.rmtree(self.test_packages_dir)
|
shutil.rmtree(self.test_packages_dir)
|
||||||
@ -96,321 +345,63 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
|||||||
with patch('fhirpath.evaluate', side_effect=Exception("fhirpath error")):
|
with patch('fhirpath.evaluate', side_effect=Exception("fhirpath error")):
|
||||||
self.assertEqual(services.navigate_fhir_path(resource, "Patient.name[0].given"), ["John"])
|
self.assertEqual(services.navigate_fhir_path(resource, "Patient.name[0].given"), ["John"])
|
||||||
|
|
||||||
def test_02_render_node_as_li(self):
|
|
||||||
node = {
|
|
||||||
"element": {"path": "Patient.identifier", "id": "Patient.identifier", "sliceName": "us-ssn", "min": 0, "max": "*", "type": [{"code": "Identifier"}]},
|
|
||||||
"name": "identifier",
|
|
||||||
"children": {}
|
|
||||||
}
|
|
||||||
must_support_paths = {"Patient.identifier:us-ssn"}
|
|
||||||
with app.app_context:
|
|
||||||
html = render_template('cp_view_processed_ig.html', processed_ig=MagicMock(must_support_elements={"USCorePatientProfile": ["Patient.identifier:us-ssn"]}), profile_list=[{"name": "USCorePatientProfile"}], base_list=[])
|
|
||||||
self.assertIn("identifier:us-ssn", html)
|
|
||||||
self.assertIn("list-group-item-warning", html)
|
|
||||||
self.assertIn("Must Support (Slice: us-ssn)", html)
|
|
||||||
|
|
||||||
# --- Basic Page Rendering Tests ---
|
# --- Basic Page Rendering Tests ---
|
||||||
|
|
||||||
def test_03_homepage(self):
|
def test_03_homepage(self):
|
||||||
response = self.client.get('/')
|
# Connect to the containerized application
|
||||||
|
response = requests.get(self.container.get_service_url('fhirflare'))
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertIn(b'FHIRFLARE IG Toolkit', response.data)
|
self.assertIn('FHIRFLARE IG Toolkit', response.text)
|
||||||
|
|
||||||
def test_04_import_ig_page(self):
|
def test_04_import_ig_page(self):
|
||||||
response = self.client.get('/import-ig')
|
response = requests.get(self.container.get_service_url('fhirflare', 'import-ig'))
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertIn(b'Import IG', response.data)
|
self.assertIn('Import IG', response.text)
|
||||||
self.assertIn(b'Package Name', response.data)
|
self.assertIn('Package Name', response.text)
|
||||||
self.assertIn(b'Package Version', response.data)
|
self.assertIn('Package Version', response.text)
|
||||||
self.assertIn(b'name="dependency_mode"', response.data)
|
self.assertIn('name="dependency_mode"', response.text)
|
||||||
|
|
||||||
@patch('app.list_downloaded_packages', return_value=([], [], {}))
|
# --- API Integration Tests ---
|
||||||
def test_05_view_igs_no_packages(self, mock_list_pkgs):
|
|
||||||
response = self.client.get('/view-igs')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertNotIn(b'<th>Package Name</th>', response.data)
|
|
||||||
self.assertIn(b'No packages downloaded yet.', response.data)
|
|
||||||
mock_list_pkgs.assert_called_once()
|
|
||||||
|
|
||||||
def test_06_view_igs_with_packages(self):
|
def test_30_load_ig_to_hapi_integration(self):
|
||||||
self.create_mock_tgz('hl7.fhir.us.core-6.1.0.tgz', {'package/package.json': {'name': 'hl7.fhir.us.core', 'version': '6.1.0'}})
|
"""Test loading an IG to the containerized HAPI FHIR server"""
|
||||||
response = self.client.get('/view-igs')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(b'hl7.fhir.us.core', response.data)
|
|
||||||
self.assertIn(b'6.1.0', response.data)
|
|
||||||
self.assertIn(b'<th>Package Name</th>', response.data)
|
|
||||||
|
|
||||||
@patch('app.render_template')
|
|
||||||
def test_07_push_igs_page(self, mock_render_template):
|
|
||||||
mock_render_template.return_value = "Mock Render"
|
|
||||||
response = self.client.get('/push-igs')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
mock_render_template.assert_called()
|
|
||||||
call_args, call_kwargs = mock_render_template.call_args
|
|
||||||
self.assertEqual(call_args[0], 'cp_push_igs.html')
|
|
||||||
|
|
||||||
# --- UI Form Tests ---
|
|
||||||
|
|
||||||
@patch('app.services.import_package_and_dependencies')
|
|
||||||
def test_10_import_ig_form_success(self, mock_import):
|
|
||||||
mock_import.return_value = {'requested': ('hl7.fhir.us.core', '6.1.0'), 'processed': {('hl7.fhir.us.core', '6.1.0')}, 'downloaded': {('hl7.fhir.us.core', '6.1.0'): 'path/pkg.tgz'}, 'all_dependencies': {}, 'dependencies': [], 'errors': []}
|
|
||||||
response = self.client.post('/import-ig', data={'package_name': 'hl7.fhir.us.core', 'package_version': '6.1.0', 'dependency_mode': 'recursive'}, follow_redirects=True)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(b'Successfully downloaded hl7.fhir.us.core#6.1.0 and dependencies! Mode: recursive', response.data)
|
|
||||||
mock_import.assert_called_once_with('hl7.fhir.us.core', '6.1.0', dependency_mode='recursive')
|
|
||||||
|
|
||||||
@patch('app.services.import_package_and_dependencies')
|
|
||||||
def test_11_import_ig_form_failure_404(self, mock_import):
|
|
||||||
mock_import.return_value = {'requested': ('invalid.package', '1.0.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['HTTP error fetching package: 404 Client Error: Not Found for url: ...']}
|
|
||||||
response = self.client.post('/import-ig', data={'package_name': 'invalid.package', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=False)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(b'Package not found on registry (404)', response.data)
|
|
||||||
|
|
||||||
@patch('app.services.import_package_and_dependencies')
|
|
||||||
def test_12_import_ig_form_failure_conn_error(self, mock_import):
|
|
||||||
mock_import.return_value = {'requested': ('conn.error.pkg', '1.0.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['Connection error: Cannot connect to registry...']}
|
|
||||||
response = self.client.post('/import-ig', data={'package_name': 'conn.error.pkg', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=False)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(b'Could not connect to the FHIR package registry', response.data)
|
|
||||||
|
|
||||||
def test_13_import_ig_form_invalid_input(self):
|
|
||||||
response = self.client.post('/import-ig', data={'package_name': 'invalid@package', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=True)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(b'Error in Package Name: Invalid package name format.', response.data)
|
|
||||||
|
|
||||||
@patch('app.services.process_package_file')
|
|
||||||
@patch('app.services.parse_package_filename')
|
|
||||||
def test_20_process_ig_success(self, mock_parse, mock_process):
|
|
||||||
pkg_name = 'hl7.fhir.us.core'
|
|
||||||
pkg_version = '6.1.0'
|
|
||||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
|
||||||
mock_parse.return_value = (pkg_name, pkg_version)
|
|
||||||
mock_process.return_value = {
|
|
||||||
'resource_types_info': [{'name': 'Patient', 'type': 'Patient', 'is_profile': False, 'must_support': True, 'optional_usage': False}],
|
|
||||||
'must_support_elements': {'Patient': ['Patient.name', 'Patient.identifier:us-ssn']},
|
|
||||||
'examples': {'Patient': ['package/Patient-example.json']},
|
|
||||||
'complies_with_profiles': [],
|
|
||||||
'imposed_profiles': ['http://hl7.org/fhir/StructureDefinition/Patient'],
|
|
||||||
'errors': []
|
|
||||||
}
|
|
||||||
self.create_mock_tgz(filename, {'package/package.json': {'name': pkg_name, 'version': pkg_version}})
|
|
||||||
response = self.client.post('/process-igs', data={'filename': filename}, follow_redirects=False)
|
|
||||||
self.assertEqual(response.status_code, 302)
|
|
||||||
self.assertTrue(response.location.endswith('/view-igs'))
|
|
||||||
with self.client.session_transaction() as sess:
|
|
||||||
self.assertIn(('success', f'Successfully processed {pkg_name}#{pkg_version}!'), sess.get('_flashes', []))
|
|
||||||
mock_parse.assert_called_once_with(filename)
|
|
||||||
mock_process.assert_called_once_with(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename))
|
|
||||||
processed_ig = db.session.query(ProcessedIg).filter_by(package_name=pkg_name, version=pkg_version).first()
|
|
||||||
self.assertIsNotNone(processed_ig)
|
|
||||||
self.assertEqual(processed_ig.package_name, pkg_name)
|
|
||||||
self.assertIn('Patient.name', processed_ig.must_support_elements.get('Patient', []))
|
|
||||||
|
|
||||||
def test_21_process_ig_file_not_found(self):
|
|
||||||
response = self.client.post('/process-igs', data={'filename': 'nonexistent.tgz'}, follow_redirects=True)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(b'Package file not found: nonexistent.tgz', response.data)
|
|
||||||
|
|
||||||
def test_22_delete_ig_success(self):
|
|
||||||
filename = 'hl7.fhir.us.core-6.1.0.tgz'
|
|
||||||
metadata_filename = 'hl7.fhir.us.core-6.1.0.metadata.json'
|
|
||||||
self.create_mock_tgz(filename, {'package/package.json': {'name': 'hl7.fhir.us.core', 'version': '6.1.0'}})
|
|
||||||
metadata_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], metadata_filename)
|
|
||||||
open(metadata_path, 'w').write(json.dumps({'name': 'hl7.fhir.us.core'}))
|
|
||||||
self.assertTrue(os.path.exists(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)))
|
|
||||||
self.assertTrue(os.path.exists(metadata_path))
|
|
||||||
response = self.client.post('/delete-ig', data={'filename': filename}, follow_redirects=True)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(f'Deleted: {filename}, {metadata_filename}'.encode('utf-8'), response.data)
|
|
||||||
self.assertFalse(os.path.exists(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)))
|
|
||||||
self.assertFalse(os.path.exists(metadata_path))
|
|
||||||
|
|
||||||
def test_23_unload_ig_success(self):
|
|
||||||
processed_ig = ProcessedIg(package_name='test.pkg', version='1.0', processed_date=datetime.now(timezone.utc), resource_types_info=[], must_support_elements={}, examples={})
|
|
||||||
db.session.add(processed_ig)
|
|
||||||
db.session.commit()
|
|
||||||
ig_id = processed_ig.id
|
|
||||||
self.assertIsNotNone(db.session.get(ProcessedIg, ig_id))
|
|
||||||
response = self.client.post('/unload-ig', data={'ig_id': str(ig_id)}, follow_redirects=True)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(b'Unloaded processed data for test.pkg#1.0', response.data)
|
|
||||||
self.assertIsNone(db.session.get(ProcessedIg, ig_id))
|
|
||||||
|
|
||||||
# --- Phase 2 Tests ---
|
|
||||||
|
|
||||||
@patch('os.path.exists', return_value=True)
|
|
||||||
@patch('tarfile.open')
|
|
||||||
@patch('requests.put')
|
|
||||||
def test_30_load_ig_to_hapi_success(self, mock_requests_put, mock_tarfile_open, mock_os_exists):
|
|
||||||
pkg_name = 'hl7.fhir.us.core'
|
pkg_name = 'hl7.fhir.us.core'
|
||||||
pkg_version = '6.1.0'
|
pkg_version = '6.1.0'
|
||||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||||
self.create_mock_tgz(filename, {
|
self.create_mock_tgz(filename, {
|
||||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
||||||
'package/Patient-profile.json': {'resourceType': 'StructureDefinition', 'id': 'us-core-patient'}
|
'package/StructureDefinition-us-core-patient.json': {
|
||||||
|
'resourceType': 'StructureDefinition',
|
||||||
|
'id': 'us-core-patient',
|
||||||
|
'url': 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient',
|
||||||
|
'name': 'USCorePatientProfile',
|
||||||
|
'type': 'Patient',
|
||||||
|
'status': 'active'
|
||||||
|
}
|
||||||
})
|
})
|
||||||
mock_tar = MagicMock()
|
|
||||||
profile_member = MagicMock(spec=tarfile.TarInfo)
|
# Load IG to HAPI
|
||||||
profile_member.name = 'package/Patient-profile.json'
|
|
||||||
profile_member.isfile.return_value = True
|
|
||||||
mock_tar.getmembers.return_value = [profile_member]
|
|
||||||
mock_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'StructureDefinition', 'id': 'us-core-patient'}).encode('utf-8'))
|
|
||||||
mock_tarfile_open.return_value.__enter__.return_value = mock_tar
|
|
||||||
mock_requests_put.return_value = MagicMock(status_code=200)
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
'/api/load-ig-to-hapi',
|
'/api/load-ig-to-hapi',
|
||||||
data=json.dumps({'package_name': pkg_name, 'version': pkg_version}),
|
data=json.dumps({'package_name': pkg_name, 'version': pkg_version}),
|
||||||
content_type='application/json'
|
content_type='application/json',
|
||||||
|
headers={'X-API-Key': 'test-api-key'}
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
data = json.loads(response.data)
|
data = json.loads(response.data)
|
||||||
self.assertEqual(data['status'], 'success')
|
self.assertEqual(data['status'], 'success')
|
||||||
mock_requests_put.assert_called_once_with(
|
|
||||||
'http://localhost:8080/fhir/StructureDefinition/us-core-patient',
|
|
||||||
json={'resourceType': 'StructureDefinition', 'id': 'us-core-patient'},
|
|
||||||
headers={'Content-Type': 'application/fhir+json'}
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_31_load_ig_to_hapi_not_found(self):
|
# Verify the resource was loaded by querying the HAPI FHIR server directly
|
||||||
response = self.client.post(
|
hapi_response = requests.get(self.container.get_service_url('fhir', 'fhir/StructureDefinition/us-core-patient'))
|
||||||
'/api/load-ig-to-hapi',
|
self.assertEqual(hapi_response.status_code, 200)
|
||||||
data=json.dumps({'package_name': 'nonexistent', 'version': '1.0'}),
|
resource = hapi_response.json()
|
||||||
content_type='application/json'
|
self.assertEqual(resource['resourceType'], 'StructureDefinition')
|
||||||
)
|
self.assertEqual(resource['id'], 'us-core-patient')
|
||||||
self.assertEqual(response.status_code, 404)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertEqual(data['error'], 'Package not found')
|
|
||||||
|
|
||||||
@patch('os.path.exists', return_value=True)
|
def test_31_validate_sample_with_hapi_integration(self):
|
||||||
@patch('requests.post')
|
"""Test validating a sample against the containerized HAPI FHIR server"""
|
||||||
def test_32_api_validate_sample_hapi_success(self, mock_requests_post, mock_os_exists):
|
# First, load the necessary StructureDefinition
|
||||||
pkg_name = 'hl7.fhir.us.core'
|
|
||||||
pkg_version = '6.1.0'
|
|
||||||
sample_resource = {
|
|
||||||
'resourceType': 'Patient',
|
|
||||||
'id': 'valid1',
|
|
||||||
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']},
|
|
||||||
'name': [{'given': ['John'], 'family': 'Doe'}]
|
|
||||||
}
|
|
||||||
mock_requests_post.return_value = MagicMock(
|
|
||||||
status_code=200,
|
|
||||||
json=lambda: {
|
|
||||||
'resourceType': 'OperationOutcome',
|
|
||||||
'issue': [{'severity': 'warning', 'diagnostics': 'Must Support element Patient.identifier missing'}]
|
|
||||||
}
|
|
||||||
)
|
|
||||||
response = self.client.post(
|
|
||||||
'/api/validate-sample',
|
|
||||||
data=json.dumps({
|
|
||||||
'package_name': pkg_name,
|
|
||||||
'version': pkg_version,
|
|
||||||
'sample_data': json.dumps(sample_resource),
|
|
||||||
'mode': 'single',
|
|
||||||
'include_dependencies': True
|
|
||||||
}),
|
|
||||||
content_type='application/json',
|
|
||||||
headers={'X-API-Key': 'test-api-key'}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertTrue(data['valid'])
|
|
||||||
self.assertEqual(data['warnings'], ['Must Support element Patient.identifier missing'])
|
|
||||||
mock_requests_post.assert_called_once_with(
|
|
||||||
'http://localhost:8080/fhir/Patient/$validate?profile=http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient',
|
|
||||||
json=sample_resource,
|
|
||||||
headers={'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'},
|
|
||||||
timeout=10
|
|
||||||
)
|
|
||||||
|
|
||||||
@patch('os.path.exists', return_value=True)
|
|
||||||
@patch('requests.post', side_effect=requests.ConnectionError("HAPI down"))
|
|
||||||
@patch('services.navigate_fhir_path')
|
|
||||||
def test_33_api_validate_sample_hapi_fallback(self, mock_navigate_fhir_path, mock_requests_post, mock_os_exists):
|
|
||||||
pkg_name = 'hl7.fhir.us.core'
|
|
||||||
pkg_version = '6.1.0'
|
|
||||||
sample_resource = {
|
|
||||||
'resourceType': 'Patient',
|
|
||||||
'id': 'valid1',
|
|
||||||
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
|
|
||||||
}
|
|
||||||
mock_navigate_fhir_path.return_value = None
|
|
||||||
self.create_mock_tgz(f'{pkg_name}-{pkg_version}.tgz', {
|
|
||||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
|
||||||
'package/StructureDefinition-us-core-patient.json': {
|
|
||||||
'resourceType': 'StructureDefinition',
|
|
||||||
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True}]}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
response = self.client.post(
|
|
||||||
'/api/validate-sample',
|
|
||||||
data=json.dumps({
|
|
||||||
'package_name': pkg_name,
|
|
||||||
'version': pkg_version,
|
|
||||||
'sample_data': json.dumps(sample_resource),
|
|
||||||
'mode': 'single',
|
|
||||||
'include_dependencies': True
|
|
||||||
}),
|
|
||||||
content_type='application/json',
|
|
||||||
headers={'X-API-Key': 'test-api-key'}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertFalse(data['valid'])
|
|
||||||
self.assertIn('Required element Patient.name missing', data['errors'])
|
|
||||||
self.assertIn('HAPI validation failed', [d['issue'] for d in data['details']])
|
|
||||||
|
|
||||||
# --- Phase 3 Tests ---
|
|
||||||
|
|
||||||
@patch('requests.get')
|
|
||||||
def test_34_hapi_status_check(self, mock_requests_get):
|
|
||||||
mock_requests_get.return_value = MagicMock(status_code=200, json=lambda: {'resourceType': 'CapabilityStatement'})
|
|
||||||
response = self.client.get('/fhir/metadata')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertEqual(data['resourceType'], 'CapabilityStatement')
|
|
||||||
mock_requests_get.side_effect = requests.ConnectionError("HAPI down")
|
|
||||||
response = self.client.get('/fhir/metadata')
|
|
||||||
self.assertEqual(response.status_code, 503)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertIn('Unable to connect to HAPI FHIR server', data['error'])
|
|
||||||
|
|
||||||
def test_35_validate_sample_ui_rendering(self):
|
|
||||||
pkg_name = 'hl7.fhir.us.core'
|
|
||||||
pkg_version = '6.1.0'
|
|
||||||
sample_resource = {
|
|
||||||
'resourceType': 'Patient',
|
|
||||||
'id': 'test',
|
|
||||||
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
|
|
||||||
}
|
|
||||||
self.create_mock_tgz(f'{pkg_name}-{pkg_version}.tgz', {
|
|
||||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
|
||||||
'package/StructureDefinition-us-core-patient.json': {
|
|
||||||
'resourceType': 'StructureDefinition',
|
|
||||||
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True}]}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
response = self.client.post(
|
|
||||||
'/api/validate-sample',
|
|
||||||
data=json.dumps({
|
|
||||||
'package_name': pkg_name,
|
|
||||||
'version': pkg_version,
|
|
||||||
'sample_data': json.dumps(sample_resource),
|
|
||||||
'mode': 'single',
|
|
||||||
'include_dependencies': True
|
|
||||||
}),
|
|
||||||
content_type='application/json',
|
|
||||||
headers={'X-API-Key': 'test-api-key'}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertFalse(data['valid'])
|
|
||||||
self.assertIn('Required element Patient.name missing', data['errors'])
|
|
||||||
self.assertIn('Must Support element Patient.identifier missing', data['warnings'])
|
|
||||||
response = self.client.get('/validate-sample')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
self.assertIn(b'us-core-patient', response.data)
|
|
||||||
|
|
||||||
def test_36_must_support_consistency(self):
|
|
||||||
pkg_name = 'hl7.fhir.us.core'
|
pkg_name = 'hl7.fhir.us.core'
|
||||||
pkg_version = '6.1.0'
|
pkg_version = '6.1.0'
|
||||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||||
@ -418,15 +409,37 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
|||||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
||||||
'package/StructureDefinition-us-core-patient.json': {
|
'package/StructureDefinition-us-core-patient.json': {
|
||||||
'resourceType': 'StructureDefinition',
|
'resourceType': 'StructureDefinition',
|
||||||
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True, 'sliceName': 'us-ssn'}]}
|
'id': 'us-core-patient',
|
||||||
|
'url': 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient',
|
||||||
|
'name': 'USCorePatientProfile',
|
||||||
|
'type': 'Patient',
|
||||||
|
'status': 'active',
|
||||||
|
'snapshot': {
|
||||||
|
'element': [
|
||||||
|
{'path': 'Patient', 'min': 1, 'max': '1'},
|
||||||
|
{'path': 'Patient.name', 'min': 1, 'max': '*'},
|
||||||
|
{'path': 'Patient.identifier', 'min': 0, 'max': '*', 'mustSupport': True}
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
services.process_package_file(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename))
|
|
||||||
|
# Load IG to HAPI
|
||||||
|
self.client.post(
|
||||||
|
'/api/load-ig-to-hapi',
|
||||||
|
data=json.dumps({'package_name': pkg_name, 'version': pkg_version}),
|
||||||
|
content_type='application/json',
|
||||||
|
headers={'X-API-Key': 'test-api-key'}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate a sample that's missing a required element
|
||||||
sample_resource = {
|
sample_resource = {
|
||||||
'resourceType': 'Patient',
|
'resourceType': 'Patient',
|
||||||
'id': 'test',
|
'id': 'test-patient',
|
||||||
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
|
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
|
||||||
|
# Missing required 'name' element
|
||||||
}
|
}
|
||||||
|
|
||||||
response = self.client.post(
|
response = self.client.post(
|
||||||
'/api/validate-sample',
|
'/api/validate-sample',
|
||||||
data=json.dumps({
|
data=json.dumps({
|
||||||
@ -439,18 +452,68 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
|||||||
content_type='application/json',
|
content_type='application/json',
|
||||||
headers={'X-API-Key': 'test-api-key'}
|
headers={'X-API-Key': 'test-api-key'}
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
data = json.loads(response.data)
|
data = json.loads(response.data)
|
||||||
self.assertIn('Must Support element Patient.identifier missing', data['warnings'])
|
self.assertFalse(data['valid'])
|
||||||
with self.app_context:
|
# Check for validation error related to missing name
|
||||||
ig = ProcessedIg.query.filter_by(package_name=pkg_name, version=pkg_version).first()
|
found_name_error = any('name' in error for error in data['errors'])
|
||||||
self.assertIsNotNone(ig)
|
self.assertTrue(found_name_error, f"Expected error about missing name element, got: {data['errors']}")
|
||||||
must_support_paths = ig.must_support_elements.get('Patient', [])
|
|
||||||
self.assertIn('Patient.identifier:us-ssn', must_support_paths)
|
def test_32_push_ig_to_hapi_integration(self):
|
||||||
response = self.client.get(f'/view-ig/{ig.id}')
|
"""Test pushing multiple resources from an IG to the containerized HAPI FHIR server"""
|
||||||
|
pkg_name = 'test.push.pkg'
|
||||||
|
pkg_version = '1.0.0'
|
||||||
|
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||||
|
|
||||||
|
# Create a test package with multiple resources
|
||||||
|
self.create_mock_tgz(filename, {
|
||||||
|
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
||||||
|
'package/Patient-test1.json': {
|
||||||
|
'resourceType': 'Patient',
|
||||||
|
'id': 'test1',
|
||||||
|
'name': [{'family': 'Test', 'given': ['Patient']}]
|
||||||
|
},
|
||||||
|
'package/Observation-test1.json': {
|
||||||
|
'resourceType': 'Observation',
|
||||||
|
'id': 'test1',
|
||||||
|
'status': 'final',
|
||||||
|
'code': {'coding': [{'system': 'http://loinc.org', 'code': '12345-6'}]}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# Push the IG to HAPI
|
||||||
|
response = self.client.post(
|
||||||
|
'/api/push-ig',
|
||||||
|
data=json.dumps({
|
||||||
|
'package_name': pkg_name,
|
||||||
|
'version': pkg_version,
|
||||||
|
'fhir_server_url': self.container.get_service_url('fhir', 'fhir'),
|
||||||
|
'include_dependencies': False
|
||||||
|
}),
|
||||||
|
content_type='application/json',
|
||||||
|
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
|
||||||
|
)
|
||||||
|
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertIn(b'Patient.identifier:us-ssn', response.data)
|
streamed_data = parse_ndjson(response.data)
|
||||||
self.assertIn(b'list-group-item-warning', response.data)
|
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
|
||||||
|
self.assertIsNotNone(complete_msg, "Complete message not found in streamed response")
|
||||||
|
summary = complete_msg.get('data', {})
|
||||||
|
self.assertTrue(summary.get('success_count') >= 2, f"Expected at least 2 successful resources, got {summary.get('success_count')}")
|
||||||
|
|
||||||
|
# Verify resources were loaded by querying the HAPI FHIR server directly
|
||||||
|
patient_response = requests.get(self.container.get_service_url('fhir', 'fhir/Patient/test1'))
|
||||||
|
self.assertEqual(patient_response.status_code, 200)
|
||||||
|
patient = patient_response.json()
|
||||||
|
self.assertEqual(patient['resourceType'], 'Patient')
|
||||||
|
self.assertEqual(patient['id'], 'test1')
|
||||||
|
|
||||||
|
observation_response = requests.get(self.container.get_service_url('fhir', 'fhir/Observation/test1'))
|
||||||
|
self.assertEqual(observation_response.status_code, 200)
|
||||||
|
observation = observation_response.json()
|
||||||
|
self.assertEqual(observation['resourceType'], 'Observation')
|
||||||
|
self.assertEqual(observation['id'], 'test1')
|
||||||
|
|
||||||
# --- Existing API Tests ---
|
# --- Existing API Tests ---
|
||||||
|
|
||||||
@ -515,7 +578,7 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
|||||||
pkg_name = 'push.test.pkg'
|
pkg_name = 'push.test.pkg'
|
||||||
pkg_version = '1.0.0'
|
pkg_version = '1.0.0'
|
||||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||||
fhir_server_url = 'http://fake-fhir.com/baseR4'
|
fhir_server_url = self.container.get_service_url('fhir', 'fhir')
|
||||||
mock_get_metadata.return_value = {'imported_dependencies': []}
|
mock_get_metadata.return_value = {'imported_dependencies': []}
|
||||||
mock_tar = MagicMock()
|
mock_tar = MagicMock()
|
||||||
mock_patient = {'resourceType': 'Patient', 'id': 'pat1'}
|
mock_patient = {'resourceType': 'Patient', 'id': 'pat1'}
|
||||||
@ -564,225 +627,22 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
|||||||
self.assertEqual(len(summary.get('failed_details')), 0)
|
self.assertEqual(len(summary.get('failed_details')), 0)
|
||||||
mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename))
|
mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename))
|
||||||
|
|
||||||
@patch('os.path.exists', return_value=True)
|
# --- Helper method to debug container issues ---
|
||||||
@patch('app.services.get_package_metadata')
|
|
||||||
@patch('tarfile.open')
|
|
||||||
@patch('requests.Session')
|
|
||||||
def test_51_api_push_ig_with_failures(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists):
|
|
||||||
pkg_name = 'push.fail.pkg'
|
|
||||||
pkg_version = '1.0.0'
|
|
||||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
|
||||||
fhir_server_url = 'http://fail-fhir.com/baseR4'
|
|
||||||
mock_get_metadata.return_value = {'imported_dependencies': []}
|
|
||||||
mock_tar = MagicMock()
|
|
||||||
mock_ok_res = {'resourceType': 'Patient', 'id': 'ok1'}
|
|
||||||
mock_fail_res = {'resourceType': 'Observation', 'id': 'fail1'}
|
|
||||||
ok_member = MagicMock(spec=tarfile.TarInfo)
|
|
||||||
ok_member.name = 'package/Patient-ok1.json'
|
|
||||||
ok_member.isfile.return_value = True
|
|
||||||
fail_member = MagicMock(spec=tarfile.TarInfo)
|
|
||||||
fail_member.name = 'package/Observation-fail1.json'
|
|
||||||
fail_member.isfile.return_value = True
|
|
||||||
mock_tar.getmembers.return_value = [ok_member, fail_member]
|
|
||||||
def mock_extractfile(member):
|
|
||||||
if member.name == 'package/Patient-ok1.json':
|
|
||||||
return io.BytesIO(json.dumps(mock_ok_res).encode('utf-8'))
|
|
||||||
if member.name == 'package/Observation-fail1.json':
|
|
||||||
return io.BytesIO(json.dumps(mock_fail_res).encode('utf-8'))
|
|
||||||
return None
|
|
||||||
mock_tar.extractfile.side_effect = mock_extractfile
|
|
||||||
mock_tarfile_open.return_value.__enter__.return_value = mock_tar
|
|
||||||
mock_session_instance = MagicMock()
|
|
||||||
mock_ok_response = MagicMock(status_code=200)
|
|
||||||
mock_ok_response.raise_for_status.return_value = None
|
|
||||||
mock_fail_http_response = MagicMock(status_code=400)
|
|
||||||
mock_fail_http_response.json.return_value = {'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'diagnostics': 'Validation failed'}]}
|
|
||||||
mock_fail_exception = requests.exceptions.HTTPError(response=mock_fail_http_response)
|
|
||||||
mock_fail_http_response.raise_for_status.side_effect = mock_fail_exception
|
|
||||||
mock_session_instance.put.side_effect = [mock_ok_response, mock_fail_http_response]
|
|
||||||
mock_session.return_value = mock_session_instance
|
|
||||||
self.create_mock_tgz(filename, {'package/dummy.txt': 'content'})
|
|
||||||
response = self.client.post(
|
|
||||||
'/api/push-ig',
|
|
||||||
data=json.dumps({
|
|
||||||
'package_name': pkg_name,
|
|
||||||
'version': pkg_version,
|
|
||||||
'fhir_server_url': fhir_server_url,
|
|
||||||
'include_dependencies': False,
|
|
||||||
'api_key': 'test-api-key'
|
|
||||||
}),
|
|
||||||
content_type='application/json',
|
|
||||||
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
streamed_data = parse_ndjson(response.data)
|
|
||||||
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
|
|
||||||
self.assertIsNotNone(complete_msg)
|
|
||||||
summary = complete_msg.get('data', {})
|
|
||||||
self.assertEqual(summary.get('status'), 'partial')
|
|
||||||
self.assertEqual(summary.get('success_count'), 1)
|
|
||||||
self.assertEqual(summary.get('failure_count'), 1)
|
|
||||||
self.assertEqual(len(summary.get('failed_details')), 1)
|
|
||||||
self.assertEqual(summary['failed_details'][0].get('resource'), 'Observation/fail1')
|
|
||||||
self.assertIn('Validation failed', summary['failed_details'][0].get('error', ''))
|
|
||||||
mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename))
|
|
||||||
|
|
||||||
@patch('os.path.exists', return_value=True)
|
def test_99_print_container_logs_on_failure(self):
|
||||||
@patch('app.services.get_package_metadata')
|
"""Helper test that prints container logs in case of failures"""
|
||||||
@patch('tarfile.open')
|
# This test should always pass but will print logs if other tests fail
|
||||||
@patch('requests.Session')
|
try:
|
||||||
def test_52_api_push_ig_with_dependency(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists):
|
if hasattr(self, 'container') and self.container.containers_up:
|
||||||
main_pkg_name = 'main.dep.pkg'
|
for service_name in ['fhir', 'db', 'fhirflare']:
|
||||||
main_pkg_ver = '1.0'
|
if service_name in self.container._container_ids:
|
||||||
main_filename = f'{main_pkg_name}-{main_pkg_ver}.tgz'
|
print(f"\n=== Logs for {service_name} ===")
|
||||||
dep_pkg_name = 'dep.pkg'
|
print(self.container.get_logs(service_name))
|
||||||
dep_pkg_ver = '1.0'
|
except Exception as e:
|
||||||
dep_filename = f'{dep_pkg_name}-{dep_pkg_ver}.tgz'
|
print(f"Error getting container logs: {e}")
|
||||||
fhir_server_url = 'http://dep-fhir.com/baseR4'
|
|
||||||
self.create_mock_tgz(main_filename, {'package/Patient-main.json': {'resourceType': 'Patient', 'id': 'main'}})
|
|
||||||
self.create_mock_tgz(dep_filename, {'package/Observation-dep.json': {'resourceType': 'Observation', 'id': 'dep'}})
|
|
||||||
mock_get_metadata.return_value = {'imported_dependencies': [{'name': dep_pkg_name, 'version': dep_pkg_ver}]}
|
|
||||||
mock_main_tar = MagicMock()
|
|
||||||
main_member = MagicMock(spec=tarfile.TarInfo)
|
|
||||||
main_member.name = 'package/Patient-main.json'
|
|
||||||
main_member.isfile.return_value = True
|
|
||||||
mock_main_tar.getmembers.return_value = [main_member]
|
|
||||||
mock_main_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'Patient', 'id': 'main'}).encode('utf-8'))
|
|
||||||
mock_dep_tar = MagicMock()
|
|
||||||
dep_member = MagicMock(spec=tarfile.TarInfo)
|
|
||||||
dep_member.name = 'package/Observation-dep.json'
|
|
||||||
dep_member.isfile.return_value = True
|
|
||||||
mock_dep_tar.getmembers.return_value = [dep_member]
|
|
||||||
mock_dep_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'Observation', 'id': 'dep'}).encode('utf-8'))
|
|
||||||
def tar_opener(path, mode):
|
|
||||||
mock_tar_ctx = MagicMock()
|
|
||||||
if main_filename in path:
|
|
||||||
mock_tar_ctx.__enter__.return_value = mock_main_tar
|
|
||||||
elif dep_filename in path:
|
|
||||||
mock_tar_ctx.__enter__.return_value = mock_dep_tar
|
|
||||||
else:
|
|
||||||
empty_mock_tar = MagicMock()
|
|
||||||
empty_mock_tar.getmembers.return_value = []
|
|
||||||
mock_tar_ctx.__enter__.return_value = empty_mock_tar
|
|
||||||
return mock_tar_ctx
|
|
||||||
mock_tarfile_open.side_effect = tar_opener
|
|
||||||
mock_session_instance = MagicMock()
|
|
||||||
mock_put_response = MagicMock(status_code=200)
|
|
||||||
mock_put_response.raise_for_status.return_value = None
|
|
||||||
mock_session_instance.put.return_value = mock_put_response
|
|
||||||
mock_session.return_value = mock_session_instance
|
|
||||||
response = self.client.post(
|
|
||||||
'/api/push-ig',
|
|
||||||
data=json.dumps({
|
|
||||||
'package_name': main_pkg_name,
|
|
||||||
'version': main_pkg_ver,
|
|
||||||
'fhir_server_url': fhir_server_url,
|
|
||||||
'include_dependencies': True,
|
|
||||||
'api_key': 'test-api-key'
|
|
||||||
}),
|
|
||||||
content_type='application/json',
|
|
||||||
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
|
|
||||||
)
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
streamed_data = parse_ndjson(response.data)
|
|
||||||
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
|
|
||||||
self.assertIsNotNone(complete_msg)
|
|
||||||
summary = complete_msg.get('data', {})
|
|
||||||
self.assertEqual(summary.get('status'), 'success')
|
|
||||||
self.assertEqual(summary.get('success_count'), 2)
|
|
||||||
self.assertEqual(len(summary.get('pushed_packages_summary')), 2)
|
|
||||||
mock_os_exists.assert_any_call(os.path.join(self.test_packages_dir, main_filename))
|
|
||||||
mock_os_exists.assert_any_call(os.path.join(self.test_packages_dir, dep_filename))
|
|
||||||
|
|
||||||
# --- Helper Route Tests ---
|
# This assertion always passes - this test is just for debug info
|
||||||
|
self.assertTrue(True)
|
||||||
@patch('app.ProcessedIg.query')
|
|
||||||
@patch('app.services.find_and_extract_sd')
|
|
||||||
@patch('os.path.exists')
|
|
||||||
def test_60_get_structure_definition_success(self, mock_exists, mock_find_sd, mock_query):
|
|
||||||
pkg_name = 'struct.test'
|
|
||||||
pkg_version = '1.0'
|
|
||||||
resource_type = 'Patient'
|
|
||||||
mock_exists.return_value = True
|
|
||||||
mock_sd_data = {'resourceType': 'StructureDefinition', 'snapshot': {'element': [{'id': 'Patient.name', 'min': 1}, {'id': 'Patient.birthDate', 'mustSupport': True}]}}
|
|
||||||
mock_find_sd.return_value = (mock_sd_data, 'path/to/sd.json')
|
|
||||||
mock_processed_ig = MagicMock()
|
|
||||||
mock_processed_ig.must_support_elements = {resource_type: ['Patient.birthDate']}
|
|
||||||
mock_query.filter_by.return_value.first.return_value = mock_processed_ig
|
|
||||||
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type={resource_type}')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertEqual(data['must_support_paths'], ['Patient.birthDate'])
|
|
||||||
|
|
||||||
@patch('app.services.import_package_and_dependencies')
|
|
||||||
@patch('app.services.find_and_extract_sd')
|
|
||||||
@patch('os.path.exists')
|
|
||||||
def test_61_get_structure_definition_fallback(self, mock_exists, mock_find_sd, mock_import):
|
|
||||||
pkg_name = 'struct.test'
|
|
||||||
pkg_version = '1.0'
|
|
||||||
core_pkg_name, core_pkg_version = services.CANONICAL_PACKAGE
|
|
||||||
resource_type = 'Observation'
|
|
||||||
def exists_side_effect(path):
|
|
||||||
return True
|
|
||||||
mock_exists.side_effect = exists_side_effect
|
|
||||||
mock_core_sd_data = {'resourceType': 'StructureDefinition', 'snapshot': {'element': [{'id': 'Observation.status'}]}}
|
|
||||||
def find_sd_side_effect(path, identifier, profile_url=None):
|
|
||||||
if f"{pkg_name}-{pkg_version}.tgz" in path:
|
|
||||||
return (None, None)
|
|
||||||
if f"{core_pkg_name}-{core_pkg_version}.tgz" in path:
|
|
||||||
return (mock_core_sd_data, 'path/obs.json')
|
|
||||||
return (None, None)
|
|
||||||
mock_find_sd.side_effect = find_sd_side_effect
|
|
||||||
with patch('app.ProcessedIg.query') as mock_query:
|
|
||||||
mock_query.filter_by.return_value.first.return_value = None
|
|
||||||
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type={resource_type}')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertTrue(data['fallback_used'])
|
|
||||||
|
|
||||||
@patch('app.services.find_and_extract_sd', return_value=(None, None))
|
|
||||||
@patch('app.services.import_package_and_dependencies')
|
|
||||||
@patch('os.path.exists')
|
|
||||||
def test_62_get_structure_definition_not_found_anywhere(self, mock_exists, mock_import, mock_find_sd):
|
|
||||||
pkg_name = 'no.sd.pkg'
|
|
||||||
pkg_version = '1.0'
|
|
||||||
core_pkg_name, core_pkg_version = services.CANONICAL_PACKAGE
|
|
||||||
def exists_side_effect(path):
|
|
||||||
if f"{pkg_name}-{pkg_version}.tgz" in path:
|
|
||||||
return True
|
|
||||||
if f"{core_pkg_name}-{core_pkg_version}.tgz" in path:
|
|
||||||
return False
|
|
||||||
return False
|
|
||||||
mock_exists.side_effect = exists_side_effect
|
|
||||||
mock_import.return_value = {'errors': ['Download failed'], 'downloaded': False}
|
|
||||||
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type=Whatever')
|
|
||||||
self.assertEqual(response.status_code, 500)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertIn('failed to download core package', data['error'])
|
|
||||||
|
|
||||||
def test_63_get_example_content_success(self):
|
|
||||||
pkg_name = 'example.test'
|
|
||||||
pkg_version = '1.0'
|
|
||||||
filename = f"{pkg_name}-{pkg_version}.tgz"
|
|
||||||
example_path = 'package/Patient-example.json'
|
|
||||||
example_content = {'resourceType': 'Patient', 'id': 'example'}
|
|
||||||
self.create_mock_tgz(filename, {example_path: example_content})
|
|
||||||
response = self.client.get(f'/get-example?package_name={pkg_name}&package_version={pkg_version}&filename={example_path}')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertEqual(data, example_content)
|
|
||||||
|
|
||||||
def test_64_get_package_metadata_success(self):
|
|
||||||
pkg_name = 'metadata.test'
|
|
||||||
pkg_version = '1.0'
|
|
||||||
metadata_filename = f"{pkg_name}-{pkg_version}.metadata.json"
|
|
||||||
metadata_content = {'package_name': pkg_name, 'version': pkg_version, 'dependency_mode': 'tree-shaking'}
|
|
||||||
metadata_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], metadata_filename)
|
|
||||||
open(metadata_path, 'w').write(json.dumps(metadata_content))
|
|
||||||
response = self.client.get(f'/get-package-metadata?package_name={pkg_name}&version={pkg_version}')
|
|
||||||
self.assertEqual(response.status_code, 200)
|
|
||||||
data = json.loads(response.data)
|
|
||||||
self.assertEqual(data.get('dependency_mode'), 'tree-shaking')
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
431
tests/upload_samples/Bundle-transaction-ex.json
Normal file
431
tests/upload_samples/Bundle-transaction-ex.json
Normal file
@ -0,0 +1,431 @@
|
|||||||
|
{
|
||||||
|
"resourceType" : "Bundle",
|
||||||
|
"id" : "transaction-ex",
|
||||||
|
"type" : "transaction",
|
||||||
|
"entry" : [{
|
||||||
|
"fullUrl" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b",
|
||||||
|
"resource" : {
|
||||||
|
"resourceType" : "Patient",
|
||||||
|
"id" : "example-patient",
|
||||||
|
"meta" : {
|
||||||
|
"profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-patient"]
|
||||||
|
},
|
||||||
|
"text" : {
|
||||||
|
"status" : "generated",
|
||||||
|
"div" : "<div xmlns=\"http://www.w3.org/1999/xhtml\"><a name=\"Patient_example-patient\"> </a>Juan Dela Cruz is a male patient born on 1 January 1980, residing in Manila, NCR, Philippines.</div>"
|
||||||
|
},
|
||||||
|
"extension" : [{
|
||||||
|
"extension" : [{
|
||||||
|
"url" : "code",
|
||||||
|
"valueCodeableConcept" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "urn:iso:std:iso:3166",
|
||||||
|
"code" : "PH",
|
||||||
|
"display" : "Philippines"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "period",
|
||||||
|
"valuePeriod" : {
|
||||||
|
"start" : "2020-01-01",
|
||||||
|
"end" : "2023-01-01"
|
||||||
|
}
|
||||||
|
}],
|
||||||
|
"url" : "http://hl7.org/fhir/StructureDefinition/patient-nationality"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "http://hl7.org/fhir/StructureDefinition/patient-religion",
|
||||||
|
"valueCodeableConcept" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/v3-ReligiousAffiliation",
|
||||||
|
"code" : "1007",
|
||||||
|
"display" : "Atheism"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/indigenous-people",
|
||||||
|
"valueBoolean" : true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/indigenous-group",
|
||||||
|
"valueCodeableConcept" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/indigenous-groups",
|
||||||
|
"code" : "Ilongots",
|
||||||
|
"display" : "Ilongots"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/race",
|
||||||
|
"valueCodeableConcept" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/v3-Race",
|
||||||
|
"code" : "2036-2",
|
||||||
|
"display" : "Filipino"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
}],
|
||||||
|
"identifier" : [{
|
||||||
|
"system" : "http://philhealth.gov.ph/fhir/Identifier/philhealth-id",
|
||||||
|
"value" : "63-584789845-5"
|
||||||
|
}],
|
||||||
|
"active" : true,
|
||||||
|
"name" : [{
|
||||||
|
"family" : "Dela Cruz",
|
||||||
|
"given" : ["Juan Jane",
|
||||||
|
"Dela Fuente"]
|
||||||
|
}],
|
||||||
|
"gender" : "male",
|
||||||
|
"birthDate" : "1985-06-15",
|
||||||
|
"address" : [{
|
||||||
|
"extension" : [{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/city-municipality",
|
||||||
|
"valueCoding" : {
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC",
|
||||||
|
"code" : "1380200000",
|
||||||
|
"display" : "City of Las Piñas"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/city-municipality",
|
||||||
|
"valueCoding" : {
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC",
|
||||||
|
"code" : "1380100000",
|
||||||
|
"display" : "City of Caloocan"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/province",
|
||||||
|
"valueCoding" : {
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC",
|
||||||
|
"code" : "0402100000",
|
||||||
|
"display" : "Cavite"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/province",
|
||||||
|
"valueCoding" : {
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC",
|
||||||
|
"code" : "0403400000",
|
||||||
|
"display" : "Laguna"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/province",
|
||||||
|
"valueCoding" : {
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC",
|
||||||
|
"code" : "0405800000",
|
||||||
|
"display" : "Rizal"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/province",
|
||||||
|
"valueCoding" : {
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC",
|
||||||
|
"code" : "1704000000",
|
||||||
|
"display" : "Marinduque"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/province",
|
||||||
|
"valueCoding" : {
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC",
|
||||||
|
"code" : "0402100000",
|
||||||
|
"display" : "Cavite"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"url" : "urn://example.com/ph-core/fhir/StructureDefinition/province",
|
||||||
|
"valueCoding" : {
|
||||||
|
"system" : "urn://example.com/ph-core/fhir/CodeSystem/PSGC",
|
||||||
|
"code" : "1705100000",
|
||||||
|
"display" : "Occidental Mindoro"
|
||||||
|
}
|
||||||
|
}],
|
||||||
|
"line" : ["123 Mabini Street",
|
||||||
|
"Barangay Malinis"],
|
||||||
|
"city" : "Quezon City",
|
||||||
|
"district" : "NCR",
|
||||||
|
"postalCode" : "1100",
|
||||||
|
"country" : "PH"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"request" : {
|
||||||
|
"method" : "POST",
|
||||||
|
"url" : "Patient"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fullUrl" : "urn:uuid:60b7132e-7cfd-44bc-83c2-de140dc8aaae",
|
||||||
|
"resource" : {
|
||||||
|
"resourceType" : "Encounter",
|
||||||
|
"id" : "example-encounter",
|
||||||
|
"meta" : {
|
||||||
|
"profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-encounter"]
|
||||||
|
},
|
||||||
|
"text" : {
|
||||||
|
"status" : "generated",
|
||||||
|
"div" : "<div xmlns=\"http://www.w3.org/1999/xhtml\"><a name=\"Encounter_example-encounter\"> </a>An ambulatory encounter for Juan Dela Cruz that has been completed.</div>"
|
||||||
|
},
|
||||||
|
"status" : "finished",
|
||||||
|
"class" : {
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/v3-ActCode",
|
||||||
|
"code" : "AMB",
|
||||||
|
"display" : "ambulatory"
|
||||||
|
},
|
||||||
|
"subject" : {
|
||||||
|
"reference" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"request" : {
|
||||||
|
"method" : "POST",
|
||||||
|
"url" : "Encounter"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fullUrl" : "urn:uuid:1a391d1e-a068-479a-88e3-e3d52c3a6f64",
|
||||||
|
"resource" : {
|
||||||
|
"resourceType" : "Condition",
|
||||||
|
"id" : "example-condition",
|
||||||
|
"text" : {
|
||||||
|
"status" : "generated",
|
||||||
|
"div" : "<div xmlns=\"http://www.w3.org/1999/xhtml\"><a name=\"Condition_example-condition\"> </a>Juan Dela Cruz has an active diagnosis of Type 2 Diabetes Mellitus.</div>"
|
||||||
|
},
|
||||||
|
"clinicalStatus" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/condition-clinical",
|
||||||
|
"code" : "active",
|
||||||
|
"display" : "Active"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"code" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://snomed.info/sct",
|
||||||
|
"code" : "44054006",
|
||||||
|
"display" : "Diabetes mellitus type 2"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"subject" : {
|
||||||
|
"reference" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b"
|
||||||
|
},
|
||||||
|
"encounter" : {
|
||||||
|
"reference" : "urn:uuid:60b7132e-7cfd-44bc-83c2-de140dc8aaae"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"request" : {
|
||||||
|
"method" : "POST",
|
||||||
|
"url" : "Condition"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fullUrl" : "urn:uuid:024dcb47-cc23-407a-839b-b4634e95abae",
|
||||||
|
"resource" : {
|
||||||
|
"resourceType" : "Medication",
|
||||||
|
"id" : "example-medication",
|
||||||
|
"meta" : {
|
||||||
|
"profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-medication"]
|
||||||
|
},
|
||||||
|
"text" : {
|
||||||
|
"status" : "generated",
|
||||||
|
"div" : "<div xmlns=\"http://www.w3.org/1999/xhtml\"><a name=\"Medication_example-medication\"> </a>A medication resource has been created, but no specific details are provided.</div>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"request" : {
|
||||||
|
"method" : "POST",
|
||||||
|
"url" : "Medication"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fullUrl" : "urn:uuid:013f46df-f245-4a2f-beaf-9eb2c47fb1a3",
|
||||||
|
"resource" : {
|
||||||
|
"resourceType" : "Observation",
|
||||||
|
"id" : "blood-pressure",
|
||||||
|
"meta" : {
|
||||||
|
"profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-observation",
|
||||||
|
"http://hl7.org/fhir/StructureDefinition/vitalsigns",
|
||||||
|
"http://hl7.org/fhir/StructureDefinition/bp"]
|
||||||
|
},
|
||||||
|
"text" : {
|
||||||
|
"status" : "generated",
|
||||||
|
"div" : "<div xmlns=\"http://www.w3.org/1999/xhtml\"><a name=\"Observation_blood-pressure\"> </a>On 17 September 2012, a blood pressure observation was recorded for Juan Dela Cruz. The systolic pressure was 107 mmHg (Normal), and the diastolic pressure was 60 mmHg (Below low normal). The measurement was taken from the right arm and performed by a practitioner.</div>"
|
||||||
|
},
|
||||||
|
"identifier" : [{
|
||||||
|
"system" : "urn:ietf:rfc:3986",
|
||||||
|
"value" : "urn:uuid:187e0c12-8dd2-67e2-99b2-bf273c878281"
|
||||||
|
}],
|
||||||
|
"basedOn" : [{
|
||||||
|
"identifier" : {
|
||||||
|
"system" : "https://acme.org/identifiers",
|
||||||
|
"value" : "1234"
|
||||||
|
}
|
||||||
|
}],
|
||||||
|
"status" : "final",
|
||||||
|
"category" : [{
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/observation-category",
|
||||||
|
"code" : "vital-signs",
|
||||||
|
"display" : "Vital Signs"
|
||||||
|
}]
|
||||||
|
}],
|
||||||
|
"code" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://loinc.org",
|
||||||
|
"code" : "85354-9",
|
||||||
|
"display" : "Blood pressure panel with all children optional"
|
||||||
|
}],
|
||||||
|
"text" : "Blood pressure systolic & diastolic"
|
||||||
|
},
|
||||||
|
"subject" : {
|
||||||
|
"reference" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b"
|
||||||
|
},
|
||||||
|
"effectiveDateTime" : "2012-09-17",
|
||||||
|
"performer" : [{
|
||||||
|
"reference" : "urn:uuid:a036fd4c-c950-497b-8905-0d2c5ec6f1d4"
|
||||||
|
}],
|
||||||
|
"interpretation" : [{
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation",
|
||||||
|
"code" : "L",
|
||||||
|
"display" : "Low"
|
||||||
|
}],
|
||||||
|
"text" : "Below low normal"
|
||||||
|
}],
|
||||||
|
"bodySite" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://snomed.info/sct",
|
||||||
|
"code" : "85050009",
|
||||||
|
"display" : "Bone structure of humerus"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"component" : [{
|
||||||
|
"code" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://loinc.org",
|
||||||
|
"code" : "8480-6",
|
||||||
|
"display" : "Systolic blood pressure"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"valueQuantity" : {
|
||||||
|
"value" : 107,
|
||||||
|
"unit" : "mmHg",
|
||||||
|
"system" : "http://unitsofmeasure.org",
|
||||||
|
"code" : "mm[Hg]"
|
||||||
|
},
|
||||||
|
"interpretation" : [{
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation",
|
||||||
|
"code" : "N",
|
||||||
|
"display" : "Normal"
|
||||||
|
}],
|
||||||
|
"text" : "Normal"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"code" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://loinc.org",
|
||||||
|
"code" : "8462-4",
|
||||||
|
"display" : "Diastolic blood pressure"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"valueQuantity" : {
|
||||||
|
"value" : 60,
|
||||||
|
"unit" : "mmHg",
|
||||||
|
"system" : "http://unitsofmeasure.org",
|
||||||
|
"code" : "mm[Hg]"
|
||||||
|
},
|
||||||
|
"interpretation" : [{
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/v3-ObservationInterpretation",
|
||||||
|
"code" : "L",
|
||||||
|
"display" : "Low"
|
||||||
|
}],
|
||||||
|
"text" : "Below low normal"
|
||||||
|
}]
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"request" : {
|
||||||
|
"method" : "POST",
|
||||||
|
"url" : "Observation"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fullUrl" : "urn:uuid:b43c67e7-d9c4-48bb-a1b4-55769eeb9066",
|
||||||
|
"resource" : {
|
||||||
|
"resourceType" : "AllergyIntolerance",
|
||||||
|
"id" : "example-allergy",
|
||||||
|
"text" : {
|
||||||
|
"status" : "generated",
|
||||||
|
"div" : "<div xmlns=\"http://www.w3.org/1999/xhtml\"><a name=\"AllergyIntolerance_example-allergy\"> </a>Juan Dela Cruz has a high criticality, active allergy to Benethamine penicillin.</div>"
|
||||||
|
},
|
||||||
|
"clinicalStatus" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical",
|
||||||
|
"code" : "active",
|
||||||
|
"display" : "Active"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"criticality" : "high",
|
||||||
|
"code" : {
|
||||||
|
"coding" : [{
|
||||||
|
"system" : "http://snomed.info/sct",
|
||||||
|
"code" : "294494002",
|
||||||
|
"display" : "Benethamine penicillin allergy"
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
"patient" : {
|
||||||
|
"reference" : "urn:uuid:64eb2d39-8da6-4c1d-b4c7-a6d3e916cd5b"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"request" : {
|
||||||
|
"method" : "POST",
|
||||||
|
"url" : "AllergyIntolerance"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fullUrl" : "urn:uuid:a036fd4c-c950-497b-8905-0d2c5ec6f1d4",
|
||||||
|
"resource" : {
|
||||||
|
"resourceType" : "Practitioner",
|
||||||
|
"id" : "example-practitioner",
|
||||||
|
"meta" : {
|
||||||
|
"profile" : ["urn://example.com/ph-core/fhir/StructureDefinition/ph-core-practitioner"]
|
||||||
|
},
|
||||||
|
"text" : {
|
||||||
|
"status" : "generated",
|
||||||
|
"div" : "<div xmlns=\"http://www.w3.org/1999/xhtml\"><a name=\"Practitioner_example-practitioner\"> </a>Dr. Maria Clara Santos is a female practitioner born on May 15, 1985. She resides at 1234 Mabini Street, Manila, NCR, 1000, Philippines. She can be contacted via mobile at +63-912-345-6789 or by email at maria.santos@example.ph.</div>"
|
||||||
|
},
|
||||||
|
"name" : [{
|
||||||
|
"family" : "Santos",
|
||||||
|
"given" : ["Maria",
|
||||||
|
"Clara"]
|
||||||
|
}],
|
||||||
|
"telecom" : [{
|
||||||
|
"system" : "phone",
|
||||||
|
"value" : "+63-912-345-6789",
|
||||||
|
"use" : "mobile"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"system" : "email",
|
||||||
|
"value" : "maria.santos@example.ph",
|
||||||
|
"use" : "work"
|
||||||
|
}],
|
||||||
|
"address" : [{
|
||||||
|
"use" : "home",
|
||||||
|
"line" : ["1234 Mabini Street"],
|
||||||
|
"city" : "Manila",
|
||||||
|
"state" : "NCR",
|
||||||
|
"postalCode" : "1000",
|
||||||
|
"country" : "PH"
|
||||||
|
}],
|
||||||
|
"gender" : "female",
|
||||||
|
"birthDate" : "1985-05-15"
|
||||||
|
},
|
||||||
|
"request" : {
|
||||||
|
"method" : "POST",
|
||||||
|
"url" : "Practitioner"
|
||||||
|
}
|
||||||
|
}]
|
||||||
|
}
|
||||||
BIN
tests/upload_samples/PHCDI.r4-0.1.0.tgz
Normal file
BIN
tests/upload_samples/PHCDI.r4-0.1.0.tgz
Normal file
Binary file not shown.
BIN
tests/upload_samples/example.fhir.ph.core.r4-0.1.0.tgz
Normal file
BIN
tests/upload_samples/example.fhir.ph.core.r4-0.1.0.tgz
Normal file
Binary file not shown.
25
tests/upload_samples/validation.log
Normal file
25
tests/upload_samples/validation.log
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
2025-07-31 12:34:21,943 - services - DEBUG - Received validate-sample request
|
||||||
|
2025-07-31 12:34:21,944 - services - DEBUG - Request params: package_name=example.fhir.ph.core.r4, version=0.1.0, sample_data_length=713
|
||||||
|
2025-07-31 12:34:21,944 - services - DEBUG - Using FHIR_PACKAGES_DIR from current_app config: /app/instance/fhir_packages
|
||||||
|
2025-07-31 12:34:21,944 - services - DEBUG - Checking package file: /app/instance/fhir_packages/example.fhir.ph.core.r4-0.1.0.tgz
|
||||||
|
2025-07-31 12:34:21,944 - services - DEBUG - Validating AllergyIntolerance against example.fhir.ph.core.r4#0.1.0
|
||||||
|
2025-07-31 12:34:21,944 - services - DEBUG - Using FHIR_PACKAGES_DIR from current_app config: /app/instance/fhir_packages
|
||||||
|
2025-07-31 12:34:21,945 - services - DEBUG - Searching for SD matching 'AllergyIntolerance' with profile 'None' in example.fhir.ph.core.r4-0.1.0.tgz
|
||||||
|
2025-07-31 12:34:21,956 - services - INFO - SD matching identifier 'AllergyIntolerance' or profile 'None' not found within archive example.fhir.ph.core.r4-0.1.0.tgz
|
||||||
|
2025-07-31 12:34:21,956 - services - INFO - Validation result for AllergyIntolerance against example.fhir.ph.core.r4#0.1.0: valid=False, errors=1, warnings=0
|
||||||
|
2025-07-31 12:34:21,957 - werkzeug - INFO - 10.0.0.102 - - [31/Jul/2025 12:34:21] "POST /api/validate-sample HTTP/1.1" 200 -
|
||||||
|
2025-07-31 12:34:24,510 - werkzeug - INFO - 10.0.2.245 - - [31/Jul/2025 12:34:24] "GET / HTTP/1.1" 200 -
|
||||||
|
2025-07-31 12:34:27,378 - werkzeug - INFO - 10.0.2.245 - - [31/Jul/2025 12:34:27] "GET / HTTP/1.1" 200 -
|
||||||
|
2025-07-31 12:34:34,510 - werkzeug - INFO - 10.0.2.245 - - [31/Jul/2025 12:34:34] "GET / HTTP/1.1" 200 -
|
||||||
|
2025-07-31 12:34:36,799 - __main__ - DEBUG - Scanning packages directory: /app/instance/fhir_packages
|
||||||
|
2025-07-31 12:34:36,800 - __main__ - DEBUG - Found 8 .tgz files: ['PHCDI.r4-0.1.0.tgz', 'hl7.fhir.uv.ips-1.1.0.tgz', 'hl7.fhir.r4.core-4.0.1.tgz', 'fhir.dicom-2022.4.20221006.tgz', 'hl7.terminology.r4-5.0.0.tgz', 'example.fhir.ph.core.r4-0.1.0.tgz', 'hl7.terminology.r4-6.4.0.tgz', 'hl7.fhir.uv.extensions.r4-5.2.0.tgz']
|
||||||
|
2025-07-31 12:34:36,813 - __main__ - DEBUG - Added package: PHCDI.r4#0.1.0
|
||||||
|
2025-07-31 12:34:36,837 - __main__ - DEBUG - Added package: hl7.fhir.uv.ips#1.1.0
|
||||||
|
2025-07-31 12:34:37,378 - werkzeug - INFO - 10.0.2.245 - - [31/Jul/2025 12:34:37] "GET / HTTP/1.1" 200 -
|
||||||
|
2025-07-31 12:34:37,514 - __main__ - DEBUG - Added package: hl7.fhir.r4.core#4.0.1
|
||||||
|
2025-07-31 12:34:37,622 - __main__ - DEBUG - Added package: fhir.dicom#2022.4.20221006
|
||||||
|
2025-07-31 12:34:38,008 - __main__ - DEBUG - Added package: hl7.terminology.r4#5.0.0
|
||||||
|
2025-07-31 12:34:38,015 - __main__ - DEBUG - Added package: example.fhir.ph.core.r4#0.1.0
|
||||||
|
2025-07-31 12:34:38,413 - __main__ - DEBUG - Added package: hl7.terminology.r4#6.4.0
|
||||||
|
2025-07-31 12:34:38,524 - __main__ - DEBUG - Added package: hl7.fhir.uv.extensions.r4#5.2.0
|
||||||
|
2025-07-31 12:34:38,525 - __main__ - DEBUG - Set package choices: [('', 'None'), ('PHCDI.r4#0.1.0', 'PHCDI.r4#0.1.0'), ('example.fhir.ph.core.r4#0.1.0', 'example.fhir.ph.core.r4#0.1.0'), ('fhir.dicom#2022.4.20221006', 'fhir.dicom#2022.4.20221006'), ('hl7.fhir.r4.core#4.0.1', 'hl7.fhir.r4.core#4.0.1'), ('hl7.fhir.uv.extensions.r4#5.2.0', 'hl7.fhir.uv.extensions.r4#5.2.0'), ('hl7.fhir.uv.ips#1.1.0', 'hl7.fhir.uv.ips#1.1.0'), ('hl7.terminology.r4#5.0.0', 'hl7.terminology.r4#5.0.0'), ('hl7.terminology.r4#6.4.0', 'hl7.terminology.r4#6.4.0')]
|
||||||
Loading…
x
Reference in New Issue
Block a user