mirror of
https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit.git
synced 2025-09-17 14:25:02 +00:00
Initial commit
This commit is contained in:
parent
8931e921be
commit
d740ac8b9e
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,2 +1,6 @@
|
||||
/instance/
|
||||
/logs/
|
||||
/.pydevproject
|
||||
/__pycache__/
|
||||
/myenv/
|
||||
/tmp/
|
||||
|
6
.project
6
.project
@ -5,6 +5,11 @@
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.python.pydev.PyDevBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.wst.validation.validationbuilder</name>
|
||||
<arguments>
|
||||
@ -13,5 +18,6 @@
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
|
||||
<nature>org.python.pydev.pythonNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
||||
|
49
app.py
49
app.py
@ -1,6 +1,10 @@
|
||||
import sys
|
||||
import os
|
||||
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
|
||||
# Make paths relative to the current directory instead of absolute '/app' paths
|
||||
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
# Introduce app_dir variable that can be overridden by environment
|
||||
app_dir = os.environ.get('APP_DIR', CURRENT_DIR)
|
||||
sys.path.append(CURRENT_DIR)
|
||||
import datetime
|
||||
import shutil
|
||||
import queue
|
||||
@ -52,16 +56,19 @@ from logging.handlers import RotatingFileHandler
|
||||
#app setup
|
||||
app = Flask(__name__)
|
||||
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'your-fallback-secret-key-here')
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', 'sqlite:////app/instance/fhir_ig.db')
|
||||
|
||||
# Update paths to be relative to current directory
|
||||
instance_path = os.path.join(CURRENT_DIR, 'instance')
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', f'sqlite:///{os.path.join(instance_path, "fhir_ig.db")}')
|
||||
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
|
||||
app.config['FHIR_PACKAGES_DIR'] = '/app/instance/fhir_packages'
|
||||
app.config['FHIR_PACKAGES_DIR'] = os.path.join(instance_path, 'fhir_packages')
|
||||
app.config['API_KEY'] = os.environ.get('API_KEY', 'your-fallback-api-key-here')
|
||||
app.config['VALIDATE_IMPOSED_PROFILES'] = True
|
||||
app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True
|
||||
app.config['UPLOAD_FOLDER'] = '/app/static/uploads' # For GoFSH output
|
||||
app.config['UPLOAD_FOLDER'] = os.path.join(CURRENT_DIR, 'static', 'uploads') # For GoFSH output
|
||||
app.config['APP_BASE_URL'] = os.environ.get('APP_BASE_URL', 'http://localhost:5000')
|
||||
app.config['HAPI_FHIR_URL'] = os.environ.get('HAPI_FHIR_URL', 'http://localhost:8080/fhir')
|
||||
CONFIG_PATH = '/usr/local/tomcat/conf/application.yaml'
|
||||
CONFIG_PATH = os.environ.get('CONFIG_PATH', '/usr/local/tomcat/conf/application.yaml')
|
||||
|
||||
# Basic Swagger configuration
|
||||
app.config['SWAGGER'] = {
|
||||
@ -228,6 +235,11 @@ db = SQLAlchemy(app)
|
||||
csrf = CSRFProtect(app)
|
||||
migrate = Migrate(app, db)
|
||||
|
||||
# Add a global application state dictionary for sharing state between threads
|
||||
app_state = {
|
||||
'fetch_failed': False
|
||||
}
|
||||
|
||||
# @app.route('/clear-cache')
|
||||
# def clear_cache():
|
||||
# """Clears the in-memory package cache, the DB timestamp, and the CachedPackage table."""
|
||||
@ -695,7 +707,7 @@ def perform_cache_refresh_and_log():
|
||||
now_ts = datetime.datetime.now(datetime.timezone.utc)
|
||||
app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages
|
||||
app.config['MANUAL_CACHE_TIMESTAMP'] = now_ts
|
||||
session['fetch_failed'] = fetch_failed # Update session flag reflecting fetch outcome
|
||||
app_state['fetch_failed'] = fetch_failed # Update app_state instead of session
|
||||
logger.info(f"Updated in-memory cache with {len(normalized_packages)} packages. Fetch failed: {fetch_failed}")
|
||||
|
||||
# 6. Cache in Database (if successful fetch)
|
||||
@ -2194,7 +2206,7 @@ def api_upload_test_data():
|
||||
if auth_type not in ['none', 'bearerToken', 'basic']:
|
||||
return jsonify({"status": "error", "message": "Invalid Authentication Type."}), 400
|
||||
if auth_type == 'bearerToken' and not auth_token:
|
||||
return jsonify({"status": "error", "message": "Bearer Token required."}), 400
|
||||
return jsonify({"status": "error", "message": "auth_token required for bearerToken."}), 400
|
||||
if auth_type == 'basic' and (not username or not password):
|
||||
return jsonify({"status": "error", "message": "Username and Password required for Basic Authentication."}), 400
|
||||
if upload_mode not in ['individual', 'transaction']:
|
||||
@ -2238,7 +2250,7 @@ def api_upload_test_data():
|
||||
|
||||
# --- Prepare Server Info and Options ---
|
||||
server_info = {'url': fhir_server_url, 'auth_type': auth_type}
|
||||
if auth_type == 'bearerToken':
|
||||
if auth_type == 'bearer':
|
||||
server_info['auth_token'] = auth_token
|
||||
elif auth_type == 'basic':
|
||||
credentials = f"{username}:{password}"
|
||||
@ -2658,7 +2670,7 @@ def search_and_import():
|
||||
raw_packages = fetch_packages_from_registries(search_term='')
|
||||
logger.debug(f"fetch_packages_from_registries returned {len(raw_packages)} raw packages.")
|
||||
if not raw_packages:
|
||||
logger.warning("fetch_packages_from_registries returned no packages. Handling fallback or empty list.")
|
||||
logger.warning("No packages returned from registries during refresh.")
|
||||
normalized_packages = []
|
||||
fetch_failed_flag = True
|
||||
session['fetch_failed'] = True
|
||||
@ -2672,6 +2684,7 @@ def search_and_import():
|
||||
now_ts = datetime.datetime.now(datetime.timezone.utc)
|
||||
app.config['MANUAL_PACKAGE_CACHE'] = normalized_packages
|
||||
app.config['MANUAL_CACHE_TIMESTAMP'] = now_ts
|
||||
app_state['fetch_failed'] = False
|
||||
logger.info(f"Stored {len(normalized_packages)} packages in manual cache (memory).")
|
||||
|
||||
# Save to CachedPackage table
|
||||
@ -2881,10 +2894,18 @@ def safe_parse_version_local(v_str): # Use different name
|
||||
elif suffix in ['draft', 'ballot', 'preview']: return pkg_version_local.parse(f"{base_part}b0")
|
||||
elif suffix and suffix.startswith('rc'): return pkg_version_local.parse(f"{base_part}rc{ ''.join(filter(str.isdigit, suffix)) or '0'}")
|
||||
return pkg_version_local.parse(base_part)
|
||||
except pkg_version_local.InvalidVersion: logger_details.warning(f"[DetailsView] Invalid base version '{base_part}' after splitting '{original_v_str}'. Treating as alpha."); return pkg_version_local.parse("0.0.0a0")
|
||||
except Exception as e: logger_details.error(f"[DetailsView] Unexpected error parsing FHIR-suffixed version '{original_v_str}': {e}"); return pkg_version_local.parse("0.0.0a0")
|
||||
else: logger_details.warning(f"[DetailsView] Unparseable version '{original_v_str}' (base '{base_part}' not standard). Treating as alpha."); return pkg_version_local.parse("0.0.0a0")
|
||||
except Exception as e: logger_details.error(f"[DetailsView] Unexpected error in safe_parse_version_local for '{v_str}': {e}"); return pkg_version_local.parse("0.0.0a0")
|
||||
except pkg_version_local.InvalidVersion:
|
||||
logger_details.warning(f"[DetailsView] Invalid base version '{base_part}' after splitting '{original_v_str}'. Treating as alpha.")
|
||||
return pkg_version_local.parse("0.0.0a0")
|
||||
except Exception as e:
|
||||
logger_details.error(f"[DetailsView] Unexpected error parsing FHIR-suffixed version '{original_v_str}': {e}")
|
||||
return pkg_version_local.parse("0.0.0a0")
|
||||
else:
|
||||
logger_details.warning(f"[DetailsView] Unparseable version '{original_v_str}' (base '{base_part}' not standard). Treating as alpha.")
|
||||
return pkg_version_local.parse("0.0.0a0")
|
||||
except Exception as e:
|
||||
logger_details.error(f"[DetailsView] Unexpected error in safe_parse_version_local for '{v_str}': {e}")
|
||||
return pkg_version_local.parse("0.0.0a0")
|
||||
# --- End Local Helper Definition ---
|
||||
|
||||
@app.route('/package-details/<name>')
|
||||
@ -3072,4 +3093,4 @@ if __name__ == '__main__':
|
||||
logger.debug(f"Attempting to create database tables for URI: {app.config['SQLALCHEMY_DATABASE_URI']}")
|
||||
db.create_all()
|
||||
logger.info("Database tables created successfully (if they didn't exist).")
|
||||
app.run(host='0.0.0.0', port=5000, debug=False)
|
||||
app.run(host='0.0.0.0', port=5000, debug=False)
|
||||
|
@ -1,6 +1,3 @@
|
||||
dependencies:
|
||||
- name: hapi-fhir-jpaserver
|
||||
repository: https://hapifhir.github.io/hapi-fhir-jpaserver-starter/
|
||||
version: 0.20.0
|
||||
digest: sha256:0e3b3ee43fdec137a4e61465880c7f437bac52459514674d4ce54aac39f83bde
|
||||
generated: "2025-07-16T09:42:23.594307042+10:00"
|
||||
dependencies: []
|
||||
digest: sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
|
||||
generated: "2025-08-04T14:30:00.000000000Z"
|
@ -13,8 +13,4 @@ keywords:
|
||||
home: https://github.com/jgsuess/FHIRFLARE-IG-Toolkit
|
||||
maintainers:
|
||||
- name: Jörn Guy Süß
|
||||
email: jgsuess@gmail.com
|
||||
dependencies:
|
||||
- name: hapi-fhir-jpaserver
|
||||
version: 0.20.0
|
||||
repository: https://hapifhir.github.io/hapi-fhir-jpaserver-starter/
|
||||
email: jgsuess@gmail.com
|
Binary file not shown.
@ -2,19 +2,19 @@ apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "fhirflare-ig-toolkit.fullname" . }}
|
||||
labels:
|
||||
{{ include "fhirflare-ig-toolkit.labels" . | indent 4 }}
|
||||
labels:
|
||||
{{- include "fhirflare-ig-toolkit.labels" . | nindent 4 }}
|
||||
spec:
|
||||
replicas: {{ .Values.replicaCount | default 1 }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{ include "fhirflare-ig-toolkit.selectorLabels" . | indent 6 }}
|
||||
matchLabels:
|
||||
{{- include "fhirflare-ig-toolkit.selectorLabels" . | nindent 6 }}
|
||||
strategy:
|
||||
type: Recreate
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
{{ include "fhirflare-ig-toolkit.selectorLabels" . | indent 8 }}
|
||||
labels:
|
||||
{{- include "fhirflare-ig-toolkit.selectorLabels" . | nindent 8 }}
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
@ -43,7 +43,7 @@ spec:
|
||||
- name: FLASK_ENV
|
||||
value: {{ .Values.config.flaskEnv | default "development" | quote }}
|
||||
- name: HAPI_FHIR_URL
|
||||
value: {{ .Values.config.hapiFhirUrl | default "http://localhost:8080/fhir" | quote }}
|
||||
value: {{ .Values.config.externalHapiServerUrl | default "http://external-hapi-fhir:8080/fhir" | quote }}
|
||||
- name: NODE_PATH
|
||||
value: {{ .Values.config.nodePath | default "/usr/lib/node_modules" | quote }}
|
||||
- name: TMPDIR
|
||||
|
@ -4,7 +4,7 @@
|
||||
apiVersion: networking.k8s.io/v1
|
||||
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion }}
|
||||
apiVersion: networking.k8s.io/v1beta1
|
||||
{{ else }}
|
||||
{{- else }}
|
||||
apiVersion: extensions/v1beta1
|
||||
{{- end }}
|
||||
kind: Ingress
|
||||
@ -28,7 +28,7 @@ spec:
|
||||
name: {{ $fullName }}
|
||||
port:
|
||||
number: {{ .Values.service.port | default 5000 }}
|
||||
{{ else }}
|
||||
{{- else }}
|
||||
backend:
|
||||
serviceName: {{ $fullName }}
|
||||
servicePort: {{ .Values.service.port | default 5000 }}
|
||||
|
@ -3,7 +3,7 @@ kind: Pod
|
||||
metadata:
|
||||
name: "{{ .Release.Name }}-fhirflare-test-endpoint"
|
||||
labels:
|
||||
helm.sh/chart: {{ .Chart.Name }}-{{ .Chart.Version }}
|
||||
helm.sh/chart: "{{ .Chart.Name }}-{{ .Chart.Version }}"
|
||||
app.kubernetes.io/name: {{ .Chart.Name }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
|
@ -12,11 +12,13 @@ fullnameOverride: ""
|
||||
|
||||
# FHIRflare specific configuration
|
||||
config:
|
||||
appBaseUrl: "http://localhost:5000"
|
||||
# Application mode: "lite" means using external HAPI server, "standalone" means running with embedded HAPI server
|
||||
appMode: "lite"
|
||||
# URL for the external HAPI FHIR server when in lite mode
|
||||
externalHapiServerUrl: "http://external-hapi-fhir:8080/fhir"
|
||||
appBaseUrl: "http://localhost:5000"
|
||||
flaskApp: "app.py"
|
||||
flaskEnv: "development"
|
||||
hapiFhirUrl: "http://localhost:8080/fhir"
|
||||
nodePath: "/usr/lib/node_modules"
|
||||
|
||||
service:
|
||||
@ -84,13 +86,4 @@ affinity: {}
|
||||
|
||||
ingress:
|
||||
# -- whether to create a primitive Ingress to expose the FHIR server HTTP endpoint
|
||||
enabled: false
|
||||
|
||||
# HAPI FHIR server subchart configuration
|
||||
hapi-fhir-jpaserver:
|
||||
# Add any HAPI FHIR specific values here to override defaults
|
||||
enabled: true
|
||||
postgresql:
|
||||
enabled: true
|
||||
auth:
|
||||
database: "fhir"
|
||||
enabled: false
|
23
charts/install.sh
Executable file
23
charts/install.sh
Executable file
@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# FHIRFLARE-IG-Toolkit Installation Script
|
||||
#
|
||||
# Description:
|
||||
# This script installs the FHIRFLARE-IG-Toolkit Helm chart into a Kubernetes cluster.
|
||||
# It adds the FHIRFLARE-IG-Toolkit Helm repository and then installs the chart
|
||||
# in the 'flare' namespace, creating the namespace if it doesn't exist.
|
||||
#
|
||||
# Usage:
|
||||
# ./install.sh
|
||||
#
|
||||
# Requirements:
|
||||
# - Helm (v3+)
|
||||
# - kubectl configured with access to your Kubernetes cluster
|
||||
#
|
||||
|
||||
# Add the FHIRFLARE-IG-Toolkit Helm repository
|
||||
helm repo add flare https://jgsuess.github.io/FHIRFLARE-IG-Toolkit/
|
||||
|
||||
# Install the FHIRFLARE-IG-Toolkit chart in the 'flare' namespace
|
||||
|
||||
helm install flare/fhirflare-ig-toolkit --namespace flare --create-namespace --generate-name --set hapi-fhir-jpaserver.postgresql.primary.persistence.storageClass=gp2 --atomic
|
13
docker-compose-winnebago/docker-compose.yml
Normal file
13
docker-compose-winnebago/docker-compose.yml
Normal file
@ -0,0 +1,13 @@
|
||||
services:
|
||||
fhirflare-standalone:
|
||||
image: ghcr.io/sudo-jhare/fhirflare-ig-toolkit-standalone:latest
|
||||
container_name: fhirflare-standalone
|
||||
ports:
|
||||
- "5000:5000"
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ./instance:/app/instance
|
||||
- ./static/uploads:/app/static/uploads
|
||||
- ./instance/hapi-h2-data:/app/h2-data
|
||||
- ./logs:/app/logs
|
||||
restart: unless-stopped
|
5
docker-compose-winnebago/down.sh
Executable file
5
docker-compose-winnebago/down.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Stop and remove all containers defined in the Docker Compose file,
|
||||
# along with any anonymous volumes attached to them.
|
||||
docker compose down --volumes
|
5
docker-compose-winnebago/up.sh
Executable file
5
docker-compose-winnebago/up.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Run Docker Compose
|
||||
|
||||
docker compose up --detach --force-recreate --renew-anon-volumes --always-recreate-deps
|
@ -1,6 +1,6 @@
|
||||
services:
|
||||
fhirflare:
|
||||
image: ghcr.io/jgsuess/fhirflare-ig-toolkit:latest
|
||||
image: ghcr.io/sudo-jhare/fhirflare-ig-toolkit-lite:latest
|
||||
ports:
|
||||
- "5000:5000"
|
||||
volumes:
|
||||
|
File diff suppressed because one or more lines are too long
@ -1,8 +0,0 @@
|
||||
* Serving Flask app 'app'
|
||||
* Debug mode: off
|
||||
* Serving Flask app 'app'
|
||||
* Debug mode: off
|
||||
* Serving Flask app 'app'
|
||||
* Debug mode: off
|
||||
* Serving Flask app 'app'
|
||||
* Debug mode: off
|
File diff suppressed because one or more lines are too long
@ -1,40 +0,0 @@
|
||||
2025-07-15 02:43:35,572 CRIT Supervisor is running as root. Privileges were not dropped because no user is specified in the config file. If you intend to run as root, you can set user=root in the config file to avoid this message.
|
||||
2025-07-15 02:43:35,575 INFO supervisord started with pid 1
|
||||
2025-07-15 02:43:36,577 INFO spawned: 'flask' with pid 7
|
||||
2025-07-15 02:43:36,578 INFO spawned: 'tomcat' with pid 8
|
||||
2025-07-15 02:43:46,682 INFO success: flask entered RUNNING state, process has stayed up for > than 10 seconds (startsecs)
|
||||
2025-07-15 02:44:07,102 INFO success: tomcat entered RUNNING state, process has stayed up for > than 30 seconds (startsecs)
|
||||
2025-07-15 03:33:26,668 WARN received SIGTERM indicating exit request
|
||||
2025-07-15 03:33:26,676 INFO waiting for flask, tomcat to die
|
||||
2025-07-15 03:33:26,787 WARN stopped: tomcat (exit status 143)
|
||||
2025-07-15 03:33:26,798 WARN stopped: flask (terminated by SIGTERM)
|
||||
2025-07-15 03:35:53,949 CRIT Supervisor is running as root. Privileges were not dropped because no user is specified in the config file. If you intend to run as root, you can set user=root in the config file to avoid this message.
|
||||
2025-07-15 03:35:53,950 INFO supervisord started with pid 1
|
||||
2025-07-15 03:35:54,952 INFO spawned: 'flask' with pid 7
|
||||
2025-07-15 03:35:54,954 INFO spawned: 'tomcat' with pid 8
|
||||
2025-07-15 03:36:05,793 INFO success: flask entered RUNNING state, process has stayed up for > than 10 seconds (startsecs)
|
||||
2025-07-15 03:36:25,546 INFO success: tomcat entered RUNNING state, process has stayed up for > than 30 seconds (startsecs)
|
||||
2025-07-15 03:36:32,553 WARN received SIGTERM indicating exit request
|
||||
2025-07-15 03:36:32,555 INFO waiting for flask, tomcat to die
|
||||
2025-07-15 03:36:32,608 WARN stopped: tomcat (exit status 143)
|
||||
2025-07-15 03:36:32,615 WARN stopped: flask (terminated by SIGTERM)
|
||||
2025-07-15 03:36:48,443 CRIT Supervisor is running as root. Privileges were not dropped because no user is specified in the config file. If you intend to run as root, you can set user=root in the config file to avoid this message.
|
||||
2025-07-15 03:36:48,444 INFO supervisord started with pid 1
|
||||
2025-07-15 03:36:49,446 INFO spawned: 'flask' with pid 7
|
||||
2025-07-15 03:36:49,448 INFO spawned: 'tomcat' with pid 8
|
||||
2025-07-15 03:37:00,401 INFO success: flask entered RUNNING state, process has stayed up for > than 10 seconds (startsecs)
|
||||
2025-07-15 03:37:20,421 INFO success: tomcat entered RUNNING state, process has stayed up for > than 30 seconds (startsecs)
|
||||
2025-07-15 03:41:24,314 WARN received SIGTERM indicating exit request
|
||||
2025-07-15 03:41:24,314 INFO waiting for flask, tomcat to die
|
||||
2025-07-15 03:41:24,355 WARN stopped: tomcat (exit status 143)
|
||||
2025-07-15 03:41:24,371 WARN stopped: flask (terminated by SIGTERM)
|
||||
2025-07-15 03:42:33,879 CRIT Supervisor is running as root. Privileges were not dropped because no user is specified in the config file. If you intend to run as root, you can set user=root in the config file to avoid this message.
|
||||
2025-07-15 03:42:33,880 INFO supervisord started with pid 1
|
||||
2025-07-15 03:42:34,883 INFO spawned: 'flask' with pid 7
|
||||
2025-07-15 03:42:34,885 INFO spawned: 'tomcat' with pid 8
|
||||
2025-07-15 03:42:44,901 INFO success: flask entered RUNNING state, process has stayed up for > than 10 seconds (startsecs)
|
||||
2025-07-15 03:43:04,922 INFO success: tomcat entered RUNNING state, process has stayed up for > than 30 seconds (startsecs)
|
||||
2025-07-15 04:54:10,136 WARN received SIGTERM indicating exit request
|
||||
2025-07-15 04:54:10,146 INFO waiting for flask, tomcat to die
|
||||
2025-07-15 04:54:10,310 WARN stopped: tomcat (exit status 143)
|
||||
2025-07-15 04:54:10,320 WARN stopped: flask (terminated by SIGTERM)
|
@ -1,152 +0,0 @@
|
||||
15-Jul-2025 02:43:37.029 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version name: Apache Tomcat/10.1.43
|
||||
15-Jul-2025 02:43:37.034 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server built: Jul 1 2025 21:30:20 UTC
|
||||
15-Jul-2025 02:43:37.034 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version number: 10.1.43.0
|
||||
15-Jul-2025 02:43:37.035 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log OS Name: Linux
|
||||
15-Jul-2025 02:43:37.035 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log OS Version: 6.11.0-29-generic
|
||||
15-Jul-2025 02:43:37.035 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Architecture: amd64
|
||||
15-Jul-2025 02:43:37.035 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Java Home: /opt/java/openjdk
|
||||
15-Jul-2025 02:43:37.035 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log JVM Version: 17.0.15+6
|
||||
15-Jul-2025 02:43:37.035 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log JVM Vendor: Eclipse Adoptium
|
||||
15-Jul-2025 02:43:37.035 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log CATALINA_BASE: /usr/local/tomcat
|
||||
15-Jul-2025 02:43:37.035 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log CATALINA_HOME: /usr/local/tomcat
|
||||
15-Jul-2025 02:43:37.049 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.util.logging.config.file=/usr/local/tomcat/conf/logging.properties
|
||||
15-Jul-2025 02:43:37.049 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.util.logging.manager=org.apache.juli.ClassLoaderLogManager
|
||||
15-Jul-2025 02:43:37.050 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djdk.tls.ephemeralDHKeySize=2048
|
||||
15-Jul-2025 02:43:37.050 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.protocol.handler.pkgs=org.apache.catalina.webresources
|
||||
15-Jul-2025 02:43:37.050 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dsun.io.useCanonCaches=false
|
||||
15-Jul-2025 02:43:37.050 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dorg.apache.catalina.security.SecurityListener.UMASK=0027
|
||||
15-Jul-2025 02:43:37.050 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.lang=ALL-UNNAMED
|
||||
15-Jul-2025 02:43:37.050 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.lang.reflect=ALL-UNNAMED
|
||||
15-Jul-2025 02:43:37.050 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.io=ALL-UNNAMED
|
||||
15-Jul-2025 02:43:37.050 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.util=ALL-UNNAMED
|
||||
15-Jul-2025 02:43:37.051 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.util.concurrent=ALL-UNNAMED
|
||||
15-Jul-2025 02:43:37.052 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED
|
||||
15-Jul-2025 02:43:37.052 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dcatalina.base=/usr/local/tomcat
|
||||
15-Jul-2025 02:43:37.052 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dcatalina.home=/usr/local/tomcat
|
||||
15-Jul-2025 02:43:37.053 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.io.tmpdir=/usr/local/tomcat/temp
|
||||
15-Jul-2025 02:43:37.059 INFO [main] org.apache.catalina.core.AprLifecycleListener.lifecycleEvent Loaded Apache Tomcat Native library [2.0.9] using APR version [1.7.2].
|
||||
15-Jul-2025 02:43:37.063 INFO [main] org.apache.catalina.core.AprLifecycleListener.initializeSSL OpenSSL successfully initialized [OpenSSL 3.0.13 30 Jan 2024]
|
||||
15-Jul-2025 02:43:37.283 INFO [main] org.apache.coyote.AbstractProtocol.init Initializing ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 02:43:37.305 INFO [main] org.apache.catalina.startup.Catalina.load Server initialization in [468] milliseconds
|
||||
15-Jul-2025 02:43:37.354 INFO [main] org.apache.catalina.core.StandardService.startInternal Starting service [Catalina]
|
||||
15-Jul-2025 02:43:37.354 INFO [main] org.apache.catalina.core.StandardEngine.startInternal Starting Servlet engine: [Apache Tomcat/10.1.43]
|
||||
15-Jul-2025 02:43:37.370 INFO [main] org.apache.coyote.AbstractProtocol.start Starting ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 02:43:37.382 INFO [main] org.apache.catalina.startup.Catalina.start Server startup in [77] milliseconds
|
||||
15-Jul-2025 03:33:26.694 INFO [Thread-1] org.apache.coyote.AbstractProtocol.pause Pausing ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:33:26.730 INFO [Thread-1] org.apache.catalina.core.StandardService.stopInternal Stopping service [Catalina]
|
||||
15-Jul-2025 03:33:26.735 INFO [Thread-1] org.apache.coyote.AbstractProtocol.stop Stopping ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:33:26.748 INFO [Thread-1] org.apache.coyote.AbstractProtocol.destroy Destroying ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:35:55.373 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version name: Apache Tomcat/10.1.43
|
||||
15-Jul-2025 03:35:55.379 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server built: Jul 1 2025 21:30:20 UTC
|
||||
15-Jul-2025 03:35:55.379 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version number: 10.1.43.0
|
||||
15-Jul-2025 03:35:55.380 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log OS Name: Linux
|
||||
15-Jul-2025 03:35:55.380 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log OS Version: 6.11.0-29-generic
|
||||
15-Jul-2025 03:35:55.380 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Architecture: amd64
|
||||
15-Jul-2025 03:35:55.380 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Java Home: /opt/java/openjdk
|
||||
15-Jul-2025 03:35:55.380 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log JVM Version: 17.0.15+6
|
||||
15-Jul-2025 03:35:55.380 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log JVM Vendor: Eclipse Adoptium
|
||||
15-Jul-2025 03:35:55.380 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log CATALINA_BASE: /usr/local/tomcat
|
||||
15-Jul-2025 03:35:55.380 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log CATALINA_HOME: /usr/local/tomcat
|
||||
15-Jul-2025 03:35:55.397 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.util.logging.config.file=/usr/local/tomcat/conf/logging.properties
|
||||
15-Jul-2025 03:35:55.397 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.util.logging.manager=org.apache.juli.ClassLoaderLogManager
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djdk.tls.ephemeralDHKeySize=2048
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.protocol.handler.pkgs=org.apache.catalina.webresources
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dsun.io.useCanonCaches=false
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dorg.apache.catalina.security.SecurityListener.UMASK=0027
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.lang=ALL-UNNAMED
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.lang.reflect=ALL-UNNAMED
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.io=ALL-UNNAMED
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.util=ALL-UNNAMED
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.util.concurrent=ALL-UNNAMED
|
||||
15-Jul-2025 03:35:55.398 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED
|
||||
15-Jul-2025 03:35:55.399 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dcatalina.base=/usr/local/tomcat
|
||||
15-Jul-2025 03:35:55.399 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dcatalina.home=/usr/local/tomcat
|
||||
15-Jul-2025 03:35:55.399 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.io.tmpdir=/usr/local/tomcat/temp
|
||||
15-Jul-2025 03:35:55.402 INFO [main] org.apache.catalina.core.AprLifecycleListener.lifecycleEvent Loaded Apache Tomcat Native library [2.0.9] using APR version [1.7.2].
|
||||
15-Jul-2025 03:35:55.405 INFO [main] org.apache.catalina.core.AprLifecycleListener.initializeSSL OpenSSL successfully initialized [OpenSSL 3.0.13 30 Jan 2024]
|
||||
15-Jul-2025 03:35:55.684 INFO [main] org.apache.coyote.AbstractProtocol.init Initializing ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:35:55.723 INFO [main] org.apache.catalina.startup.Catalina.load Server initialization in [531] milliseconds
|
||||
15-Jul-2025 03:35:55.782 INFO [main] org.apache.catalina.core.StandardService.startInternal Starting service [Catalina]
|
||||
15-Jul-2025 03:35:55.782 INFO [main] org.apache.catalina.core.StandardEngine.startInternal Starting Servlet engine: [Apache Tomcat/10.1.43]
|
||||
15-Jul-2025 03:35:55.804 INFO [main] org.apache.coyote.AbstractProtocol.start Starting ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:35:55.819 INFO [main] org.apache.catalina.startup.Catalina.start Server startup in [96] milliseconds
|
||||
15-Jul-2025 03:36:32.560 INFO [Thread-1] org.apache.coyote.AbstractProtocol.pause Pausing ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:36:32.570 INFO [Thread-1] org.apache.catalina.core.StandardService.stopInternal Stopping service [Catalina]
|
||||
15-Jul-2025 03:36:32.571 INFO [Thread-1] org.apache.coyote.AbstractProtocol.stop Stopping ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:36:32.575 INFO [Thread-1] org.apache.coyote.AbstractProtocol.destroy Destroying ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:36:49.853 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version name: Apache Tomcat/10.1.43
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server built: Jul 1 2025 21:30:20 UTC
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version number: 10.1.43.0
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log OS Name: Linux
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log OS Version: 6.11.0-29-generic
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Architecture: amd64
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Java Home: /opt/java/openjdk
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log JVM Version: 17.0.15+6
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log JVM Vendor: Eclipse Adoptium
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log CATALINA_BASE: /usr/local/tomcat
|
||||
15-Jul-2025 03:36:49.857 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log CATALINA_HOME: /usr/local/tomcat
|
||||
15-Jul-2025 03:36:49.864 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.util.logging.config.file=/usr/local/tomcat/conf/logging.properties
|
||||
15-Jul-2025 03:36:49.864 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.util.logging.manager=org.apache.juli.ClassLoaderLogManager
|
||||
15-Jul-2025 03:36:49.864 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djdk.tls.ephemeralDHKeySize=2048
|
||||
15-Jul-2025 03:36:49.864 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.protocol.handler.pkgs=org.apache.catalina.webresources
|
||||
15-Jul-2025 03:36:49.864 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dsun.io.useCanonCaches=false
|
||||
15-Jul-2025 03:36:49.864 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dorg.apache.catalina.security.SecurityListener.UMASK=0027
|
||||
15-Jul-2025 03:36:49.864 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.lang=ALL-UNNAMED
|
||||
15-Jul-2025 03:36:49.864 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.lang.reflect=ALL-UNNAMED
|
||||
15-Jul-2025 03:36:49.865 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.io=ALL-UNNAMED
|
||||
15-Jul-2025 03:36:49.865 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.util=ALL-UNNAMED
|
||||
15-Jul-2025 03:36:49.865 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.util.concurrent=ALL-UNNAMED
|
||||
15-Jul-2025 03:36:49.865 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED
|
||||
15-Jul-2025 03:36:49.865 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dcatalina.base=/usr/local/tomcat
|
||||
15-Jul-2025 03:36:49.865 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dcatalina.home=/usr/local/tomcat
|
||||
15-Jul-2025 03:36:49.865 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.io.tmpdir=/usr/local/tomcat/temp
|
||||
15-Jul-2025 03:36:49.868 INFO [main] org.apache.catalina.core.AprLifecycleListener.lifecycleEvent Loaded Apache Tomcat Native library [2.0.9] using APR version [1.7.2].
|
||||
15-Jul-2025 03:36:49.872 INFO [main] org.apache.catalina.core.AprLifecycleListener.initializeSSL OpenSSL successfully initialized [OpenSSL 3.0.13 30 Jan 2024]
|
||||
15-Jul-2025 03:36:50.082 INFO [main] org.apache.coyote.AbstractProtocol.init Initializing ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:36:50.100 INFO [main] org.apache.catalina.startup.Catalina.load Server initialization in [398] milliseconds
|
||||
15-Jul-2025 03:36:50.141 INFO [main] org.apache.catalina.core.StandardService.startInternal Starting service [Catalina]
|
||||
15-Jul-2025 03:36:50.141 INFO [main] org.apache.catalina.core.StandardEngine.startInternal Starting Servlet engine: [Apache Tomcat/10.1.43]
|
||||
15-Jul-2025 03:36:50.154 INFO [main] org.apache.coyote.AbstractProtocol.start Starting ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:36:50.163 INFO [main] org.apache.catalina.startup.Catalina.start Server startup in [62] milliseconds
|
||||
15-Jul-2025 03:41:24.321 INFO [Thread-1] org.apache.coyote.AbstractProtocol.pause Pausing ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:41:24.328 INFO [Thread-1] org.apache.catalina.core.StandardService.stopInternal Stopping service [Catalina]
|
||||
15-Jul-2025 03:41:24.329 INFO [Thread-1] org.apache.coyote.AbstractProtocol.stop Stopping ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:41:24.334 INFO [Thread-1] org.apache.coyote.AbstractProtocol.destroy Destroying ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:42:35.284 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version name: Apache Tomcat/10.1.43
|
||||
15-Jul-2025 03:42:35.291 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server built: Jul 1 2025 21:30:20 UTC
|
||||
15-Jul-2025 03:42:35.291 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Server version number: 10.1.43.0
|
||||
15-Jul-2025 03:42:35.291 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log OS Name: Linux
|
||||
15-Jul-2025 03:42:35.292 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log OS Version: 6.11.0-29-generic
|
||||
15-Jul-2025 03:42:35.292 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Architecture: amd64
|
||||
15-Jul-2025 03:42:35.292 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Java Home: /opt/java/openjdk
|
||||
15-Jul-2025 03:42:35.292 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log JVM Version: 17.0.15+6
|
||||
15-Jul-2025 03:42:35.292 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log JVM Vendor: Eclipse Adoptium
|
||||
15-Jul-2025 03:42:35.292 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log CATALINA_BASE: /usr/local/tomcat
|
||||
15-Jul-2025 03:42:35.292 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log CATALINA_HOME: /usr/local/tomcat
|
||||
15-Jul-2025 03:42:35.304 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.util.logging.config.file=/usr/local/tomcat/conf/logging.properties
|
||||
15-Jul-2025 03:42:35.304 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.util.logging.manager=org.apache.juli.ClassLoaderLogManager
|
||||
15-Jul-2025 03:42:35.304 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djdk.tls.ephemeralDHKeySize=2048
|
||||
15-Jul-2025 03:42:35.304 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.protocol.handler.pkgs=org.apache.catalina.webresources
|
||||
15-Jul-2025 03:42:35.304 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dsun.io.useCanonCaches=false
|
||||
15-Jul-2025 03:42:35.304 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dorg.apache.catalina.security.SecurityListener.UMASK=0027
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.lang=ALL-UNNAMED
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.lang.reflect=ALL-UNNAMED
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.io=ALL-UNNAMED
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.util=ALL-UNNAMED
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.base/java.util.concurrent=ALL-UNNAMED
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dcatalina.base=/usr/local/tomcat
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Dcatalina.home=/usr/local/tomcat
|
||||
15-Jul-2025 03:42:35.305 INFO [main] org.apache.catalina.startup.VersionLoggerListener.log Command line argument: -Djava.io.tmpdir=/usr/local/tomcat/temp
|
||||
15-Jul-2025 03:42:35.308 INFO [main] org.apache.catalina.core.AprLifecycleListener.lifecycleEvent Loaded Apache Tomcat Native library [2.0.9] using APR version [1.7.2].
|
||||
15-Jul-2025 03:42:35.311 INFO [main] org.apache.catalina.core.AprLifecycleListener.initializeSSL OpenSSL successfully initialized [OpenSSL 3.0.13 30 Jan 2024]
|
||||
15-Jul-2025 03:42:35.488 INFO [main] org.apache.coyote.AbstractProtocol.init Initializing ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:42:35.511 INFO [main] org.apache.catalina.startup.Catalina.load Server initialization in [361] milliseconds
|
||||
15-Jul-2025 03:42:35.562 INFO [main] org.apache.catalina.core.StandardService.startInternal Starting service [Catalina]
|
||||
15-Jul-2025 03:42:35.562 INFO [main] org.apache.catalina.core.StandardEngine.startInternal Starting Servlet engine: [Apache Tomcat/10.1.43]
|
||||
15-Jul-2025 03:42:35.581 INFO [main] org.apache.coyote.AbstractProtocol.start Starting ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 03:42:35.596 INFO [main] org.apache.catalina.startup.Catalina.start Server startup in [84] milliseconds
|
||||
15-Jul-2025 04:54:10.164 INFO [Thread-1] org.apache.coyote.AbstractProtocol.pause Pausing ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 04:54:10.213 INFO [Thread-1] org.apache.catalina.core.StandardService.stopInternal Stopping service [Catalina]
|
||||
15-Jul-2025 04:54:10.222 INFO [Thread-1] org.apache.coyote.AbstractProtocol.stop Stopping ProtocolHandler ["http-nio-8080"]
|
||||
15-Jul-2025 04:54:10.240 INFO [Thread-1] org.apache.coyote.AbstractProtocol.destroy Destroying ProtocolHandler ["http-nio-8080"]
|
Binary file not shown.
Binary file not shown.
@ -1,22 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.fhir.au.base",
|
||||
"version": "5.1.0-preview",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [
|
||||
{
|
||||
"name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1"
|
||||
},
|
||||
{
|
||||
"name": "hl7.terminology.r4",
|
||||
"version": "6.2.0"
|
||||
},
|
||||
{
|
||||
"name": "hl7.fhir.uv.extensions.r4",
|
||||
"version": "5.2.0"
|
||||
}
|
||||
],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:17.475734+00:00"
|
||||
}
|
Binary file not shown.
@ -1,34 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.fhir.au.core",
|
||||
"version": "1.1.0-preview",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [
|
||||
{
|
||||
"name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1"
|
||||
},
|
||||
{
|
||||
"name": "hl7.terminology.r4",
|
||||
"version": "6.2.0"
|
||||
},
|
||||
{
|
||||
"name": "hl7.fhir.uv.extensions.r4",
|
||||
"version": "5.2.0"
|
||||
},
|
||||
{
|
||||
"name": "hl7.fhir.au.base",
|
||||
"version": "5.1.0-preview"
|
||||
},
|
||||
{
|
||||
"name": "hl7.fhir.uv.smart-app-launch",
|
||||
"version": "2.1.0"
|
||||
},
|
||||
{
|
||||
"name": "hl7.fhir.uv.ipa",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:15.067826+00:00"
|
||||
}
|
Binary file not shown.
@ -1,9 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:16.477868+00:00"
|
||||
}
|
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.fhir.uv.extensions.r4",
|
||||
"version": "5.2.0",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [
|
||||
{
|
||||
"name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1"
|
||||
}
|
||||
],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:17.363719+00:00"
|
||||
}
|
Binary file not shown.
@ -1,22 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.fhir.uv.ipa",
|
||||
"version": "1.0.0",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [
|
||||
{
|
||||
"name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1"
|
||||
},
|
||||
{
|
||||
"name": "hl7.terminology.r4",
|
||||
"version": "5.0.0"
|
||||
},
|
||||
{
|
||||
"name": "hl7.fhir.uv.smart-app-launch",
|
||||
"version": "2.0.0"
|
||||
}
|
||||
],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:17.590266+00:00"
|
||||
}
|
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.fhir.uv.smart-app-launch",
|
||||
"version": "2.0.0",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [
|
||||
{
|
||||
"name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1"
|
||||
}
|
||||
],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:18.256800+00:00"
|
||||
}
|
Binary file not shown.
@ -1,18 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.fhir.uv.smart-app-launch",
|
||||
"version": "2.1.0",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [
|
||||
{
|
||||
"name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1"
|
||||
},
|
||||
{
|
||||
"name": "hl7.terminology.r4",
|
||||
"version": "5.0.0"
|
||||
}
|
||||
],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:17.529611+00:00"
|
||||
}
|
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.terminology.r4",
|
||||
"version": "5.0.0",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [
|
||||
{
|
||||
"name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1"
|
||||
}
|
||||
],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:18.216757+00:00"
|
||||
}
|
Binary file not shown.
@ -1,14 +0,0 @@
|
||||
{
|
||||
"package_name": "hl7.terminology.r4",
|
||||
"version": "6.2.0",
|
||||
"dependency_mode": "recursive",
|
||||
"imported_dependencies": [
|
||||
{
|
||||
"name": "hl7.fhir.r4.core",
|
||||
"version": "4.0.1"
|
||||
}
|
||||
],
|
||||
"complies_with_profiles": [],
|
||||
"imposed_profiles": [],
|
||||
"timestamp": "2025-05-04T12:29:17.148041+00:00"
|
||||
}
|
Binary file not shown.
@ -1,6 +0,0 @@
|
||||
#FileLock
|
||||
#Sun May 04 12:29:20 UTC 2025
|
||||
server=172.18.0.2\:34351
|
||||
hostName=1913c9e2ec9b
|
||||
method=file
|
||||
id=1969b45b76c42f20115290bfabb203a60dc75365e9d
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -6,6 +6,8 @@ import tarfile
|
||||
import shutil
|
||||
import io
|
||||
import requests
|
||||
import time
|
||||
import subprocess
|
||||
from unittest.mock import patch, MagicMock, mock_open, call
|
||||
from flask import Flask, session
|
||||
from flask.testing import FlaskClient
|
||||
@ -27,9 +29,252 @@ def parse_ndjson(byte_stream):
|
||||
lines = decoded_stream.split('\n')
|
||||
return [json.loads(line) for line in lines if line.strip()]
|
||||
|
||||
class DockerComposeContainer:
|
||||
"""
|
||||
A class that follows the Testcontainers pattern for managing Docker Compose environments.
|
||||
This implementation uses subprocess to call docker-compose directly since we're not
|
||||
installing the testcontainers-python package.
|
||||
"""
|
||||
|
||||
def __init__(self, compose_file_path):
|
||||
"""
|
||||
Initialize with the path to the docker-compose.yml file
|
||||
|
||||
Args:
|
||||
compose_file_path: Path to the docker-compose.yml file
|
||||
"""
|
||||
self.compose_file = compose_file_path
|
||||
self.compose_dir = os.path.dirname(os.path.abspath(compose_file_path))
|
||||
self.containers_up = False
|
||||
self.service_ports = {}
|
||||
self._container_ids = {}
|
||||
|
||||
def __enter__(self):
|
||||
"""Start containers when entering context"""
|
||||
self.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Stop containers when exiting context"""
|
||||
self.stop()
|
||||
|
||||
def with_service_port(self, service_name, port):
|
||||
"""
|
||||
Map a service port (following the testcontainers builder pattern)
|
||||
|
||||
Args:
|
||||
service_name: Name of the service in docker-compose.yml
|
||||
port: Port number to expose
|
||||
|
||||
Returns:
|
||||
self for chaining
|
||||
"""
|
||||
self.service_ports[service_name] = port
|
||||
return self
|
||||
|
||||
def start(self):
|
||||
"""Start the Docker Compose environment"""
|
||||
if self.containers_up:
|
||||
return self
|
||||
|
||||
print("Starting Docker Compose environment...")
|
||||
result = subprocess.run(
|
||||
['docker-compose', '-f', self.compose_file, 'up', '-d'],
|
||||
cwd=self.compose_dir,
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
error_msg = f"Failed to start Docker Compose environment: {result.stderr}"
|
||||
print(error_msg)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
# Store container IDs for later use
|
||||
self._get_container_ids()
|
||||
|
||||
self.containers_up = True
|
||||
self._wait_for_services()
|
||||
return self
|
||||
|
||||
def _get_container_ids(self):
|
||||
"""Get the container IDs for all services"""
|
||||
result = subprocess.run(
|
||||
['docker-compose', '-f', self.compose_file, 'ps', '-q'],
|
||||
cwd=self.compose_dir,
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
return
|
||||
|
||||
container_ids = result.stdout.strip().split('\n')
|
||||
if not container_ids:
|
||||
return
|
||||
|
||||
# Get service names for each container
|
||||
for container_id in container_ids:
|
||||
if not container_id:
|
||||
continue
|
||||
|
||||
inspect_result = subprocess.run(
|
||||
['docker', 'inspect', '--format', '{{index .Config.Labels "com.docker.compose.service"}}', container_id],
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
if inspect_result.returncode == 0:
|
||||
service_name = inspect_result.stdout.strip()
|
||||
self._container_ids[service_name] = container_id
|
||||
|
||||
def get_container_id(self, service_name):
|
||||
"""
|
||||
Get the container ID for a specific service
|
||||
|
||||
Args:
|
||||
service_name: Name of the service in docker-compose.yml
|
||||
|
||||
Returns:
|
||||
Container ID as string or None if not found
|
||||
"""
|
||||
return self._container_ids.get(service_name)
|
||||
|
||||
def get_service_host(self, service_name):
|
||||
"""
|
||||
Get the host for a specific service - for Docker Compose we just use localhost
|
||||
|
||||
Args:
|
||||
service_name: Name of the service in docker-compose.yml
|
||||
|
||||
Returns:
|
||||
Host as string (usually localhost)
|
||||
"""
|
||||
return "localhost"
|
||||
|
||||
def get_service_url(self, service_name, path=""):
|
||||
"""
|
||||
Get the URL for a specific service
|
||||
|
||||
Args:
|
||||
service_name: Name of the service in docker-compose.yml
|
||||
path: Optional path to append to the URL
|
||||
|
||||
Returns:
|
||||
URL as string
|
||||
"""
|
||||
port = self.service_ports.get(service_name)
|
||||
if not port:
|
||||
raise ValueError(f"No port mapping defined for service {service_name}")
|
||||
|
||||
url = f"http://{self.get_service_host(service_name)}:{port}"
|
||||
if path:
|
||||
# Ensure path starts with /
|
||||
if not path.startswith('/'):
|
||||
path = f"/{path}"
|
||||
url = f"{url}{path}"
|
||||
|
||||
return url
|
||||
|
||||
def get_logs(self, service_name):
|
||||
"""
|
||||
Get logs for a specific service
|
||||
|
||||
Args:
|
||||
service_name: Name of the service in docker-compose.yml
|
||||
|
||||
Returns:
|
||||
Logs as string
|
||||
"""
|
||||
container_id = self.get_container_id(service_name)
|
||||
if not container_id:
|
||||
return f"No container found for service {service_name}"
|
||||
|
||||
result = subprocess.run(
|
||||
['docker', 'logs', container_id],
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
return result.stdout
|
||||
|
||||
def stop(self):
|
||||
"""Stop the Docker Compose environment"""
|
||||
if not self.containers_up:
|
||||
return
|
||||
|
||||
print("Stopping Docker Compose environment...")
|
||||
result = subprocess.run(
|
||||
['docker-compose', '-f', self.compose_file, 'down'],
|
||||
cwd=self.compose_dir,
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"Warning: Error stopping Docker Compose: {result.stderr}")
|
||||
|
||||
self.containers_up = False
|
||||
|
||||
def _wait_for_services(self):
|
||||
"""Wait for all services to be ready"""
|
||||
print("Waiting for services to be ready...")
|
||||
|
||||
# Wait for HAPI FHIR server
|
||||
if 'fhir' in self.service_ports:
|
||||
self._wait_for_http_service(
|
||||
self.get_service_url('fhir', 'fhir/metadata'),
|
||||
"HAPI FHIR server"
|
||||
)
|
||||
|
||||
# Wait for FHIRFLARE application
|
||||
if 'fhirflare' in self.service_ports:
|
||||
self._wait_for_http_service(
|
||||
self.get_service_url('fhirflare'),
|
||||
"FHIRFLARE application"
|
||||
)
|
||||
|
||||
# Give additional time for services to stabilize
|
||||
time.sleep(5)
|
||||
|
||||
def _wait_for_http_service(self, url, service_name, max_retries=30, retry_interval=2):
|
||||
"""
|
||||
Wait for an HTTP service to be ready
|
||||
|
||||
Args:
|
||||
url: URL to check
|
||||
service_name: Name of the service for logging
|
||||
max_retries: Maximum number of retries
|
||||
retry_interval: Interval between retries in seconds
|
||||
"""
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
response = requests.get(url, timeout=5)
|
||||
if response.status_code == 200:
|
||||
print(f"{service_name} is ready after {attempt + 1} attempts")
|
||||
return True
|
||||
except requests.RequestException:
|
||||
pass
|
||||
|
||||
print(f"Waiting for {service_name} (attempt {attempt + 1}/{max_retries})...")
|
||||
time.sleep(retry_interval)
|
||||
|
||||
print(f"Warning: {service_name} did not become ready in time")
|
||||
return False
|
||||
|
||||
class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
# Define the Docker Compose container
|
||||
compose_file_path = os.path.join(os.path.dirname(__file__), 'docker-compose.yml')
|
||||
cls.container = DockerComposeContainer(compose_file_path) \
|
||||
.with_service_port('fhir', 8080) \
|
||||
.with_service_port('fhirflare', 5000)
|
||||
|
||||
# Start the containers
|
||||
cls.container.start()
|
||||
|
||||
# Configure app for testing
|
||||
app.config['TESTING'] = True
|
||||
app.config['WTF_CSRF_ENABLED'] = False
|
||||
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:'
|
||||
@ -39,6 +284,7 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||
app.config['API_KEY'] = 'test-api-key'
|
||||
app.config['VALIDATE_IMPOSED_PROFILES'] = True
|
||||
app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True
|
||||
app.config['HAPI_FHIR_URL'] = cls.container.get_service_url('fhir', 'fhir') # Point to containerized HAPI FHIR
|
||||
|
||||
cls.app_context = app.app_context()
|
||||
cls.app_context.push()
|
||||
@ -50,6 +296,9 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||
cls.app_context.pop()
|
||||
if os.path.exists(cls.test_packages_dir):
|
||||
shutil.rmtree(cls.test_packages_dir)
|
||||
|
||||
# Stop Docker Compose environment
|
||||
cls.container.stop()
|
||||
|
||||
def setUp(self):
|
||||
if os.path.exists(self.test_packages_dir):
|
||||
@ -96,321 +345,63 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||
with patch('fhirpath.evaluate', side_effect=Exception("fhirpath error")):
|
||||
self.assertEqual(services.navigate_fhir_path(resource, "Patient.name[0].given"), ["John"])
|
||||
|
||||
def test_02_render_node_as_li(self):
|
||||
node = {
|
||||
"element": {"path": "Patient.identifier", "id": "Patient.identifier", "sliceName": "us-ssn", "min": 0, "max": "*", "type": [{"code": "Identifier"}]},
|
||||
"name": "identifier",
|
||||
"children": {}
|
||||
}
|
||||
must_support_paths = {"Patient.identifier:us-ssn"}
|
||||
with app.app_context:
|
||||
html = render_template('cp_view_processed_ig.html', processed_ig=MagicMock(must_support_elements={"USCorePatientProfile": ["Patient.identifier:us-ssn"]}), profile_list=[{"name": "USCorePatientProfile"}], base_list=[])
|
||||
self.assertIn("identifier:us-ssn", html)
|
||||
self.assertIn("list-group-item-warning", html)
|
||||
self.assertIn("Must Support (Slice: us-ssn)", html)
|
||||
|
||||
# --- Basic Page Rendering Tests ---
|
||||
|
||||
def test_03_homepage(self):
|
||||
response = self.client.get('/')
|
||||
# Connect to the containerized application
|
||||
response = requests.get(self.container.get_service_url('fhirflare'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'FHIRFLARE IG Toolkit', response.data)
|
||||
self.assertIn('FHIRFLARE IG Toolkit', response.text)
|
||||
|
||||
def test_04_import_ig_page(self):
|
||||
response = self.client.get('/import-ig')
|
||||
response = requests.get(self.container.get_service_url('fhirflare', 'import-ig'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Import IG', response.data)
|
||||
self.assertIn(b'Package Name', response.data)
|
||||
self.assertIn(b'Package Version', response.data)
|
||||
self.assertIn(b'name="dependency_mode"', response.data)
|
||||
self.assertIn('Import IG', response.text)
|
||||
self.assertIn('Package Name', response.text)
|
||||
self.assertIn('Package Version', response.text)
|
||||
self.assertIn('name="dependency_mode"', response.text)
|
||||
|
||||
@patch('app.list_downloaded_packages', return_value=([], [], {}))
|
||||
def test_05_view_igs_no_packages(self, mock_list_pkgs):
|
||||
response = self.client.get('/view-igs')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertNotIn(b'<th>Package Name</th>', response.data)
|
||||
self.assertIn(b'No packages downloaded yet.', response.data)
|
||||
mock_list_pkgs.assert_called_once()
|
||||
# --- API Integration Tests ---
|
||||
|
||||
def test_06_view_igs_with_packages(self):
|
||||
self.create_mock_tgz('hl7.fhir.us.core-6.1.0.tgz', {'package/package.json': {'name': 'hl7.fhir.us.core', 'version': '6.1.0'}})
|
||||
response = self.client.get('/view-igs')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'hl7.fhir.us.core', response.data)
|
||||
self.assertIn(b'6.1.0', response.data)
|
||||
self.assertIn(b'<th>Package Name</th>', response.data)
|
||||
|
||||
@patch('app.render_template')
|
||||
def test_07_push_igs_page(self, mock_render_template):
|
||||
mock_render_template.return_value = "Mock Render"
|
||||
response = self.client.get('/push-igs')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
mock_render_template.assert_called()
|
||||
call_args, call_kwargs = mock_render_template.call_args
|
||||
self.assertEqual(call_args[0], 'cp_push_igs.html')
|
||||
|
||||
# --- UI Form Tests ---
|
||||
|
||||
@patch('app.services.import_package_and_dependencies')
|
||||
def test_10_import_ig_form_success(self, mock_import):
|
||||
mock_import.return_value = {'requested': ('hl7.fhir.us.core', '6.1.0'), 'processed': {('hl7.fhir.us.core', '6.1.0')}, 'downloaded': {('hl7.fhir.us.core', '6.1.0'): 'path/pkg.tgz'}, 'all_dependencies': {}, 'dependencies': [], 'errors': []}
|
||||
response = self.client.post('/import-ig', data={'package_name': 'hl7.fhir.us.core', 'package_version': '6.1.0', 'dependency_mode': 'recursive'}, follow_redirects=True)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Successfully downloaded hl7.fhir.us.core#6.1.0 and dependencies! Mode: recursive', response.data)
|
||||
mock_import.assert_called_once_with('hl7.fhir.us.core', '6.1.0', dependency_mode='recursive')
|
||||
|
||||
@patch('app.services.import_package_and_dependencies')
|
||||
def test_11_import_ig_form_failure_404(self, mock_import):
|
||||
mock_import.return_value = {'requested': ('invalid.package', '1.0.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['HTTP error fetching package: 404 Client Error: Not Found for url: ...']}
|
||||
response = self.client.post('/import-ig', data={'package_name': 'invalid.package', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=False)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Package not found on registry (404)', response.data)
|
||||
|
||||
@patch('app.services.import_package_and_dependencies')
|
||||
def test_12_import_ig_form_failure_conn_error(self, mock_import):
|
||||
mock_import.return_value = {'requested': ('conn.error.pkg', '1.0.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['Connection error: Cannot connect to registry...']}
|
||||
response = self.client.post('/import-ig', data={'package_name': 'conn.error.pkg', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=False)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Could not connect to the FHIR package registry', response.data)
|
||||
|
||||
def test_13_import_ig_form_invalid_input(self):
|
||||
response = self.client.post('/import-ig', data={'package_name': 'invalid@package', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=True)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Error in Package Name: Invalid package name format.', response.data)
|
||||
|
||||
@patch('app.services.process_package_file')
|
||||
@patch('app.services.parse_package_filename')
|
||||
def test_20_process_ig_success(self, mock_parse, mock_process):
|
||||
pkg_name = 'hl7.fhir.us.core'
|
||||
pkg_version = '6.1.0'
|
||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||
mock_parse.return_value = (pkg_name, pkg_version)
|
||||
mock_process.return_value = {
|
||||
'resource_types_info': [{'name': 'Patient', 'type': 'Patient', 'is_profile': False, 'must_support': True, 'optional_usage': False}],
|
||||
'must_support_elements': {'Patient': ['Patient.name', 'Patient.identifier:us-ssn']},
|
||||
'examples': {'Patient': ['package/Patient-example.json']},
|
||||
'complies_with_profiles': [],
|
||||
'imposed_profiles': ['http://hl7.org/fhir/StructureDefinition/Patient'],
|
||||
'errors': []
|
||||
}
|
||||
self.create_mock_tgz(filename, {'package/package.json': {'name': pkg_name, 'version': pkg_version}})
|
||||
response = self.client.post('/process-igs', data={'filename': filename}, follow_redirects=False)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertTrue(response.location.endswith('/view-igs'))
|
||||
with self.client.session_transaction() as sess:
|
||||
self.assertIn(('success', f'Successfully processed {pkg_name}#{pkg_version}!'), sess.get('_flashes', []))
|
||||
mock_parse.assert_called_once_with(filename)
|
||||
mock_process.assert_called_once_with(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename))
|
||||
processed_ig = db.session.query(ProcessedIg).filter_by(package_name=pkg_name, version=pkg_version).first()
|
||||
self.assertIsNotNone(processed_ig)
|
||||
self.assertEqual(processed_ig.package_name, pkg_name)
|
||||
self.assertIn('Patient.name', processed_ig.must_support_elements.get('Patient', []))
|
||||
|
||||
def test_21_process_ig_file_not_found(self):
|
||||
response = self.client.post('/process-igs', data={'filename': 'nonexistent.tgz'}, follow_redirects=True)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Package file not found: nonexistent.tgz', response.data)
|
||||
|
||||
def test_22_delete_ig_success(self):
|
||||
filename = 'hl7.fhir.us.core-6.1.0.tgz'
|
||||
metadata_filename = 'hl7.fhir.us.core-6.1.0.metadata.json'
|
||||
self.create_mock_tgz(filename, {'package/package.json': {'name': 'hl7.fhir.us.core', 'version': '6.1.0'}})
|
||||
metadata_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], metadata_filename)
|
||||
open(metadata_path, 'w').write(json.dumps({'name': 'hl7.fhir.us.core'}))
|
||||
self.assertTrue(os.path.exists(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)))
|
||||
self.assertTrue(os.path.exists(metadata_path))
|
||||
response = self.client.post('/delete-ig', data={'filename': filename}, follow_redirects=True)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(f'Deleted: {filename}, {metadata_filename}'.encode('utf-8'), response.data)
|
||||
self.assertFalse(os.path.exists(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)))
|
||||
self.assertFalse(os.path.exists(metadata_path))
|
||||
|
||||
def test_23_unload_ig_success(self):
|
||||
processed_ig = ProcessedIg(package_name='test.pkg', version='1.0', processed_date=datetime.now(timezone.utc), resource_types_info=[], must_support_elements={}, examples={})
|
||||
db.session.add(processed_ig)
|
||||
db.session.commit()
|
||||
ig_id = processed_ig.id
|
||||
self.assertIsNotNone(db.session.get(ProcessedIg, ig_id))
|
||||
response = self.client.post('/unload-ig', data={'ig_id': str(ig_id)}, follow_redirects=True)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Unloaded processed data for test.pkg#1.0', response.data)
|
||||
self.assertIsNone(db.session.get(ProcessedIg, ig_id))
|
||||
|
||||
# --- Phase 2 Tests ---
|
||||
|
||||
@patch('os.path.exists', return_value=True)
|
||||
@patch('tarfile.open')
|
||||
@patch('requests.put')
|
||||
def test_30_load_ig_to_hapi_success(self, mock_requests_put, mock_tarfile_open, mock_os_exists):
|
||||
def test_30_load_ig_to_hapi_integration(self):
|
||||
"""Test loading an IG to the containerized HAPI FHIR server"""
|
||||
pkg_name = 'hl7.fhir.us.core'
|
||||
pkg_version = '6.1.0'
|
||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||
self.create_mock_tgz(filename, {
|
||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
||||
'package/Patient-profile.json': {'resourceType': 'StructureDefinition', 'id': 'us-core-patient'}
|
||||
'package/StructureDefinition-us-core-patient.json': {
|
||||
'resourceType': 'StructureDefinition',
|
||||
'id': 'us-core-patient',
|
||||
'url': 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient',
|
||||
'name': 'USCorePatientProfile',
|
||||
'type': 'Patient',
|
||||
'status': 'active'
|
||||
}
|
||||
})
|
||||
mock_tar = MagicMock()
|
||||
profile_member = MagicMock(spec=tarfile.TarInfo)
|
||||
profile_member.name = 'package/Patient-profile.json'
|
||||
profile_member.isfile.return_value = True
|
||||
mock_tar.getmembers.return_value = [profile_member]
|
||||
mock_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'StructureDefinition', 'id': 'us-core-patient'}).encode('utf-8'))
|
||||
mock_tarfile_open.return_value.__enter__.return_value = mock_tar
|
||||
mock_requests_put.return_value = MagicMock(status_code=200)
|
||||
|
||||
# Load IG to HAPI
|
||||
response = self.client.post(
|
||||
'/api/load-ig-to-hapi',
|
||||
data=json.dumps({'package_name': pkg_name, 'version': pkg_version}),
|
||||
content_type='application/json'
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key'}
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertEqual(data['status'], 'success')
|
||||
mock_requests_put.assert_called_once_with(
|
||||
'http://localhost:8080/fhir/StructureDefinition/us-core-patient',
|
||||
json={'resourceType': 'StructureDefinition', 'id': 'us-core-patient'},
|
||||
headers={'Content-Type': 'application/fhir+json'}
|
||||
)
|
||||
|
||||
# Verify the resource was loaded by querying the HAPI FHIR server directly
|
||||
hapi_response = requests.get(self.container.get_service_url('fhir', 'fhir/StructureDefinition/us-core-patient'))
|
||||
self.assertEqual(hapi_response.status_code, 200)
|
||||
resource = hapi_response.json()
|
||||
self.assertEqual(resource['resourceType'], 'StructureDefinition')
|
||||
self.assertEqual(resource['id'], 'us-core-patient')
|
||||
|
||||
def test_31_load_ig_to_hapi_not_found(self):
|
||||
response = self.client.post(
|
||||
'/api/load-ig-to-hapi',
|
||||
data=json.dumps({'package_name': 'nonexistent', 'version': '1.0'}),
|
||||
content_type='application/json'
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
data = json.loads(response.data)
|
||||
self.assertEqual(data['error'], 'Package not found')
|
||||
|
||||
@patch('os.path.exists', return_value=True)
|
||||
@patch('requests.post')
|
||||
def test_32_api_validate_sample_hapi_success(self, mock_requests_post, mock_os_exists):
|
||||
pkg_name = 'hl7.fhir.us.core'
|
||||
pkg_version = '6.1.0'
|
||||
sample_resource = {
|
||||
'resourceType': 'Patient',
|
||||
'id': 'valid1',
|
||||
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']},
|
||||
'name': [{'given': ['John'], 'family': 'Doe'}]
|
||||
}
|
||||
mock_requests_post.return_value = MagicMock(
|
||||
status_code=200,
|
||||
json=lambda: {
|
||||
'resourceType': 'OperationOutcome',
|
||||
'issue': [{'severity': 'warning', 'diagnostics': 'Must Support element Patient.identifier missing'}]
|
||||
}
|
||||
)
|
||||
response = self.client.post(
|
||||
'/api/validate-sample',
|
||||
data=json.dumps({
|
||||
'package_name': pkg_name,
|
||||
'version': pkg_version,
|
||||
'sample_data': json.dumps(sample_resource),
|
||||
'mode': 'single',
|
||||
'include_dependencies': True
|
||||
}),
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key'}
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertTrue(data['valid'])
|
||||
self.assertEqual(data['warnings'], ['Must Support element Patient.identifier missing'])
|
||||
mock_requests_post.assert_called_once_with(
|
||||
'http://localhost:8080/fhir/Patient/$validate?profile=http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient',
|
||||
json=sample_resource,
|
||||
headers={'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'},
|
||||
timeout=10
|
||||
)
|
||||
|
||||
@patch('os.path.exists', return_value=True)
|
||||
@patch('requests.post', side_effect=requests.ConnectionError("HAPI down"))
|
||||
@patch('services.navigate_fhir_path')
|
||||
def test_33_api_validate_sample_hapi_fallback(self, mock_navigate_fhir_path, mock_requests_post, mock_os_exists):
|
||||
pkg_name = 'hl7.fhir.us.core'
|
||||
pkg_version = '6.1.0'
|
||||
sample_resource = {
|
||||
'resourceType': 'Patient',
|
||||
'id': 'valid1',
|
||||
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
|
||||
}
|
||||
mock_navigate_fhir_path.return_value = None
|
||||
self.create_mock_tgz(f'{pkg_name}-{pkg_version}.tgz', {
|
||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
||||
'package/StructureDefinition-us-core-patient.json': {
|
||||
'resourceType': 'StructureDefinition',
|
||||
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True}]}
|
||||
}
|
||||
})
|
||||
response = self.client.post(
|
||||
'/api/validate-sample',
|
||||
data=json.dumps({
|
||||
'package_name': pkg_name,
|
||||
'version': pkg_version,
|
||||
'sample_data': json.dumps(sample_resource),
|
||||
'mode': 'single',
|
||||
'include_dependencies': True
|
||||
}),
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key'}
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertFalse(data['valid'])
|
||||
self.assertIn('Required element Patient.name missing', data['errors'])
|
||||
self.assertIn('HAPI validation failed', [d['issue'] for d in data['details']])
|
||||
|
||||
# --- Phase 3 Tests ---
|
||||
|
||||
@patch('requests.get')
|
||||
def test_34_hapi_status_check(self, mock_requests_get):
|
||||
mock_requests_get.return_value = MagicMock(status_code=200, json=lambda: {'resourceType': 'CapabilityStatement'})
|
||||
response = self.client.get('/fhir/metadata')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertEqual(data['resourceType'], 'CapabilityStatement')
|
||||
mock_requests_get.side_effect = requests.ConnectionError("HAPI down")
|
||||
response = self.client.get('/fhir/metadata')
|
||||
self.assertEqual(response.status_code, 503)
|
||||
data = json.loads(response.data)
|
||||
self.assertIn('Unable to connect to HAPI FHIR server', data['error'])
|
||||
|
||||
def test_35_validate_sample_ui_rendering(self):
|
||||
pkg_name = 'hl7.fhir.us.core'
|
||||
pkg_version = '6.1.0'
|
||||
sample_resource = {
|
||||
'resourceType': 'Patient',
|
||||
'id': 'test',
|
||||
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
|
||||
}
|
||||
self.create_mock_tgz(f'{pkg_name}-{pkg_version}.tgz', {
|
||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
||||
'package/StructureDefinition-us-core-patient.json': {
|
||||
'resourceType': 'StructureDefinition',
|
||||
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True}]}
|
||||
}
|
||||
})
|
||||
response = self.client.post(
|
||||
'/api/validate-sample',
|
||||
data=json.dumps({
|
||||
'package_name': pkg_name,
|
||||
'version': pkg_version,
|
||||
'sample_data': json.dumps(sample_resource),
|
||||
'mode': 'single',
|
||||
'include_dependencies': True
|
||||
}),
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key'}
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertFalse(data['valid'])
|
||||
self.assertIn('Required element Patient.name missing', data['errors'])
|
||||
self.assertIn('Must Support element Patient.identifier missing', data['warnings'])
|
||||
response = self.client.get('/validate-sample')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'us-core-patient', response.data)
|
||||
|
||||
def test_36_must_support_consistency(self):
|
||||
def test_31_validate_sample_with_hapi_integration(self):
|
||||
"""Test validating a sample against the containerized HAPI FHIR server"""
|
||||
# First, load the necessary StructureDefinition
|
||||
pkg_name = 'hl7.fhir.us.core'
|
||||
pkg_version = '6.1.0'
|
||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||
@ -418,15 +409,37 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
||||
'package/StructureDefinition-us-core-patient.json': {
|
||||
'resourceType': 'StructureDefinition',
|
||||
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True, 'sliceName': 'us-ssn'}]}
|
||||
'id': 'us-core-patient',
|
||||
'url': 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient',
|
||||
'name': 'USCorePatientProfile',
|
||||
'type': 'Patient',
|
||||
'status': 'active',
|
||||
'snapshot': {
|
||||
'element': [
|
||||
{'path': 'Patient', 'min': 1, 'max': '1'},
|
||||
{'path': 'Patient.name', 'min': 1, 'max': '*'},
|
||||
{'path': 'Patient.identifier', 'min': 0, 'max': '*', 'mustSupport': True}
|
||||
]
|
||||
}
|
||||
}
|
||||
})
|
||||
services.process_package_file(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename))
|
||||
|
||||
# Load IG to HAPI
|
||||
self.client.post(
|
||||
'/api/load-ig-to-hapi',
|
||||
data=json.dumps({'package_name': pkg_name, 'version': pkg_version}),
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key'}
|
||||
)
|
||||
|
||||
# Validate a sample that's missing a required element
|
||||
sample_resource = {
|
||||
'resourceType': 'Patient',
|
||||
'id': 'test',
|
||||
'id': 'test-patient',
|
||||
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
|
||||
# Missing required 'name' element
|
||||
}
|
||||
|
||||
response = self.client.post(
|
||||
'/api/validate-sample',
|
||||
data=json.dumps({
|
||||
@ -439,18 +452,68 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key'}
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertIn('Must Support element Patient.identifier missing', data['warnings'])
|
||||
with self.app_context:
|
||||
ig = ProcessedIg.query.filter_by(package_name=pkg_name, version=pkg_version).first()
|
||||
self.assertIsNotNone(ig)
|
||||
must_support_paths = ig.must_support_elements.get('Patient', [])
|
||||
self.assertIn('Patient.identifier:us-ssn', must_support_paths)
|
||||
response = self.client.get(f'/view-ig/{ig.id}')
|
||||
self.assertFalse(data['valid'])
|
||||
# Check for validation error related to missing name
|
||||
found_name_error = any('name' in error for error in data['errors'])
|
||||
self.assertTrue(found_name_error, f"Expected error about missing name element, got: {data['errors']}")
|
||||
|
||||
def test_32_push_ig_to_hapi_integration(self):
|
||||
"""Test pushing multiple resources from an IG to the containerized HAPI FHIR server"""
|
||||
pkg_name = 'test.push.pkg'
|
||||
pkg_version = '1.0.0'
|
||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||
|
||||
# Create a test package with multiple resources
|
||||
self.create_mock_tgz(filename, {
|
||||
'package/package.json': {'name': pkg_name, 'version': pkg_version},
|
||||
'package/Patient-test1.json': {
|
||||
'resourceType': 'Patient',
|
||||
'id': 'test1',
|
||||
'name': [{'family': 'Test', 'given': ['Patient']}]
|
||||
},
|
||||
'package/Observation-test1.json': {
|
||||
'resourceType': 'Observation',
|
||||
'id': 'test1',
|
||||
'status': 'final',
|
||||
'code': {'coding': [{'system': 'http://loinc.org', 'code': '12345-6'}]}
|
||||
}
|
||||
})
|
||||
|
||||
# Push the IG to HAPI
|
||||
response = self.client.post(
|
||||
'/api/push-ig',
|
||||
data=json.dumps({
|
||||
'package_name': pkg_name,
|
||||
'version': pkg_version,
|
||||
'fhir_server_url': self.container.get_service_url('fhir', 'fhir'),
|
||||
'include_dependencies': False
|
||||
}),
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Patient.identifier:us-ssn', response.data)
|
||||
self.assertIn(b'list-group-item-warning', response.data)
|
||||
streamed_data = parse_ndjson(response.data)
|
||||
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
|
||||
self.assertIsNotNone(complete_msg, "Complete message not found in streamed response")
|
||||
summary = complete_msg.get('data', {})
|
||||
self.assertTrue(summary.get('success_count') >= 2, f"Expected at least 2 successful resources, got {summary.get('success_count')}")
|
||||
|
||||
# Verify resources were loaded by querying the HAPI FHIR server directly
|
||||
patient_response = requests.get(self.container.get_service_url('fhir', 'fhir/Patient/test1'))
|
||||
self.assertEqual(patient_response.status_code, 200)
|
||||
patient = patient_response.json()
|
||||
self.assertEqual(patient['resourceType'], 'Patient')
|
||||
self.assertEqual(patient['id'], 'test1')
|
||||
|
||||
observation_response = requests.get(self.container.get_service_url('fhir', 'fhir/Observation/test1'))
|
||||
self.assertEqual(observation_response.status_code, 200)
|
||||
observation = observation_response.json()
|
||||
self.assertEqual(observation['resourceType'], 'Observation')
|
||||
self.assertEqual(observation['id'], 'test1')
|
||||
|
||||
# --- Existing API Tests ---
|
||||
|
||||
@ -515,7 +578,7 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||
pkg_name = 'push.test.pkg'
|
||||
pkg_version = '1.0.0'
|
||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||
fhir_server_url = 'http://fake-fhir.com/baseR4'
|
||||
fhir_server_url = self.container.get_service_url('fhir', 'fhir')
|
||||
mock_get_metadata.return_value = {'imported_dependencies': []}
|
||||
mock_tar = MagicMock()
|
||||
mock_patient = {'resourceType': 'Patient', 'id': 'pat1'}
|
||||
@ -564,225 +627,22 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
|
||||
self.assertEqual(len(summary.get('failed_details')), 0)
|
||||
mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename))
|
||||
|
||||
@patch('os.path.exists', return_value=True)
|
||||
@patch('app.services.get_package_metadata')
|
||||
@patch('tarfile.open')
|
||||
@patch('requests.Session')
|
||||
def test_51_api_push_ig_with_failures(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists):
|
||||
pkg_name = 'push.fail.pkg'
|
||||
pkg_version = '1.0.0'
|
||||
filename = f'{pkg_name}-{pkg_version}.tgz'
|
||||
fhir_server_url = 'http://fail-fhir.com/baseR4'
|
||||
mock_get_metadata.return_value = {'imported_dependencies': []}
|
||||
mock_tar = MagicMock()
|
||||
mock_ok_res = {'resourceType': 'Patient', 'id': 'ok1'}
|
||||
mock_fail_res = {'resourceType': 'Observation', 'id': 'fail1'}
|
||||
ok_member = MagicMock(spec=tarfile.TarInfo)
|
||||
ok_member.name = 'package/Patient-ok1.json'
|
||||
ok_member.isfile.return_value = True
|
||||
fail_member = MagicMock(spec=tarfile.TarInfo)
|
||||
fail_member.name = 'package/Observation-fail1.json'
|
||||
fail_member.isfile.return_value = True
|
||||
mock_tar.getmembers.return_value = [ok_member, fail_member]
|
||||
def mock_extractfile(member):
|
||||
if member.name == 'package/Patient-ok1.json':
|
||||
return io.BytesIO(json.dumps(mock_ok_res).encode('utf-8'))
|
||||
if member.name == 'package/Observation-fail1.json':
|
||||
return io.BytesIO(json.dumps(mock_fail_res).encode('utf-8'))
|
||||
return None
|
||||
mock_tar.extractfile.side_effect = mock_extractfile
|
||||
mock_tarfile_open.return_value.__enter__.return_value = mock_tar
|
||||
mock_session_instance = MagicMock()
|
||||
mock_ok_response = MagicMock(status_code=200)
|
||||
mock_ok_response.raise_for_status.return_value = None
|
||||
mock_fail_http_response = MagicMock(status_code=400)
|
||||
mock_fail_http_response.json.return_value = {'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'diagnostics': 'Validation failed'}]}
|
||||
mock_fail_exception = requests.exceptions.HTTPError(response=mock_fail_http_response)
|
||||
mock_fail_http_response.raise_for_status.side_effect = mock_fail_exception
|
||||
mock_session_instance.put.side_effect = [mock_ok_response, mock_fail_http_response]
|
||||
mock_session.return_value = mock_session_instance
|
||||
self.create_mock_tgz(filename, {'package/dummy.txt': 'content'})
|
||||
response = self.client.post(
|
||||
'/api/push-ig',
|
||||
data=json.dumps({
|
||||
'package_name': pkg_name,
|
||||
'version': pkg_version,
|
||||
'fhir_server_url': fhir_server_url,
|
||||
'include_dependencies': False,
|
||||
'api_key': 'test-api-key'
|
||||
}),
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
streamed_data = parse_ndjson(response.data)
|
||||
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
|
||||
self.assertIsNotNone(complete_msg)
|
||||
summary = complete_msg.get('data', {})
|
||||
self.assertEqual(summary.get('status'), 'partial')
|
||||
self.assertEqual(summary.get('success_count'), 1)
|
||||
self.assertEqual(summary.get('failure_count'), 1)
|
||||
self.assertEqual(len(summary.get('failed_details')), 1)
|
||||
self.assertEqual(summary['failed_details'][0].get('resource'), 'Observation/fail1')
|
||||
self.assertIn('Validation failed', summary['failed_details'][0].get('error', ''))
|
||||
mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename))
|
||||
|
||||
@patch('os.path.exists', return_value=True)
|
||||
@patch('app.services.get_package_metadata')
|
||||
@patch('tarfile.open')
|
||||
@patch('requests.Session')
|
||||
def test_52_api_push_ig_with_dependency(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists):
|
||||
main_pkg_name = 'main.dep.pkg'
|
||||
main_pkg_ver = '1.0'
|
||||
main_filename = f'{main_pkg_name}-{main_pkg_ver}.tgz'
|
||||
dep_pkg_name = 'dep.pkg'
|
||||
dep_pkg_ver = '1.0'
|
||||
dep_filename = f'{dep_pkg_name}-{dep_pkg_ver}.tgz'
|
||||
fhir_server_url = 'http://dep-fhir.com/baseR4'
|
||||
self.create_mock_tgz(main_filename, {'package/Patient-main.json': {'resourceType': 'Patient', 'id': 'main'}})
|
||||
self.create_mock_tgz(dep_filename, {'package/Observation-dep.json': {'resourceType': 'Observation', 'id': 'dep'}})
|
||||
mock_get_metadata.return_value = {'imported_dependencies': [{'name': dep_pkg_name, 'version': dep_pkg_ver}]}
|
||||
mock_main_tar = MagicMock()
|
||||
main_member = MagicMock(spec=tarfile.TarInfo)
|
||||
main_member.name = 'package/Patient-main.json'
|
||||
main_member.isfile.return_value = True
|
||||
mock_main_tar.getmembers.return_value = [main_member]
|
||||
mock_main_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'Patient', 'id': 'main'}).encode('utf-8'))
|
||||
mock_dep_tar = MagicMock()
|
||||
dep_member = MagicMock(spec=tarfile.TarInfo)
|
||||
dep_member.name = 'package/Observation-dep.json'
|
||||
dep_member.isfile.return_value = True
|
||||
mock_dep_tar.getmembers.return_value = [dep_member]
|
||||
mock_dep_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'Observation', 'id': 'dep'}).encode('utf-8'))
|
||||
def tar_opener(path, mode):
|
||||
mock_tar_ctx = MagicMock()
|
||||
if main_filename in path:
|
||||
mock_tar_ctx.__enter__.return_value = mock_main_tar
|
||||
elif dep_filename in path:
|
||||
mock_tar_ctx.__enter__.return_value = mock_dep_tar
|
||||
else:
|
||||
empty_mock_tar = MagicMock()
|
||||
empty_mock_tar.getmembers.return_value = []
|
||||
mock_tar_ctx.__enter__.return_value = empty_mock_tar
|
||||
return mock_tar_ctx
|
||||
mock_tarfile_open.side_effect = tar_opener
|
||||
mock_session_instance = MagicMock()
|
||||
mock_put_response = MagicMock(status_code=200)
|
||||
mock_put_response.raise_for_status.return_value = None
|
||||
mock_session_instance.put.return_value = mock_put_response
|
||||
mock_session.return_value = mock_session_instance
|
||||
response = self.client.post(
|
||||
'/api/push-ig',
|
||||
data=json.dumps({
|
||||
'package_name': main_pkg_name,
|
||||
'version': main_pkg_ver,
|
||||
'fhir_server_url': fhir_server_url,
|
||||
'include_dependencies': True,
|
||||
'api_key': 'test-api-key'
|
||||
}),
|
||||
content_type='application/json',
|
||||
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
streamed_data = parse_ndjson(response.data)
|
||||
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
|
||||
self.assertIsNotNone(complete_msg)
|
||||
summary = complete_msg.get('data', {})
|
||||
self.assertEqual(summary.get('status'), 'success')
|
||||
self.assertEqual(summary.get('success_count'), 2)
|
||||
self.assertEqual(len(summary.get('pushed_packages_summary')), 2)
|
||||
mock_os_exists.assert_any_call(os.path.join(self.test_packages_dir, main_filename))
|
||||
mock_os_exists.assert_any_call(os.path.join(self.test_packages_dir, dep_filename))
|
||||
|
||||
# --- Helper Route Tests ---
|
||||
|
||||
@patch('app.ProcessedIg.query')
|
||||
@patch('app.services.find_and_extract_sd')
|
||||
@patch('os.path.exists')
|
||||
def test_60_get_structure_definition_success(self, mock_exists, mock_find_sd, mock_query):
|
||||
pkg_name = 'struct.test'
|
||||
pkg_version = '1.0'
|
||||
resource_type = 'Patient'
|
||||
mock_exists.return_value = True
|
||||
mock_sd_data = {'resourceType': 'StructureDefinition', 'snapshot': {'element': [{'id': 'Patient.name', 'min': 1}, {'id': 'Patient.birthDate', 'mustSupport': True}]}}
|
||||
mock_find_sd.return_value = (mock_sd_data, 'path/to/sd.json')
|
||||
mock_processed_ig = MagicMock()
|
||||
mock_processed_ig.must_support_elements = {resource_type: ['Patient.birthDate']}
|
||||
mock_query.filter_by.return_value.first.return_value = mock_processed_ig
|
||||
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type={resource_type}')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertEqual(data['must_support_paths'], ['Patient.birthDate'])
|
||||
|
||||
@patch('app.services.import_package_and_dependencies')
|
||||
@patch('app.services.find_and_extract_sd')
|
||||
@patch('os.path.exists')
|
||||
def test_61_get_structure_definition_fallback(self, mock_exists, mock_find_sd, mock_import):
|
||||
pkg_name = 'struct.test'
|
||||
pkg_version = '1.0'
|
||||
core_pkg_name, core_pkg_version = services.CANONICAL_PACKAGE
|
||||
resource_type = 'Observation'
|
||||
def exists_side_effect(path):
|
||||
return True
|
||||
mock_exists.side_effect = exists_side_effect
|
||||
mock_core_sd_data = {'resourceType': 'StructureDefinition', 'snapshot': {'element': [{'id': 'Observation.status'}]}}
|
||||
def find_sd_side_effect(path, identifier, profile_url=None):
|
||||
if f"{pkg_name}-{pkg_version}.tgz" in path:
|
||||
return (None, None)
|
||||
if f"{core_pkg_name}-{core_pkg_version}.tgz" in path:
|
||||
return (mock_core_sd_data, 'path/obs.json')
|
||||
return (None, None)
|
||||
mock_find_sd.side_effect = find_sd_side_effect
|
||||
with patch('app.ProcessedIg.query') as mock_query:
|
||||
mock_query.filter_by.return_value.first.return_value = None
|
||||
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type={resource_type}')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertTrue(data['fallback_used'])
|
||||
|
||||
@patch('app.services.find_and_extract_sd', return_value=(None, None))
|
||||
@patch('app.services.import_package_and_dependencies')
|
||||
@patch('os.path.exists')
|
||||
def test_62_get_structure_definition_not_found_anywhere(self, mock_exists, mock_import, mock_find_sd):
|
||||
pkg_name = 'no.sd.pkg'
|
||||
pkg_version = '1.0'
|
||||
core_pkg_name, core_pkg_version = services.CANONICAL_PACKAGE
|
||||
def exists_side_effect(path):
|
||||
if f"{pkg_name}-{pkg_version}.tgz" in path:
|
||||
return True
|
||||
if f"{core_pkg_name}-{core_pkg_version}.tgz" in path:
|
||||
return False
|
||||
return False
|
||||
mock_exists.side_effect = exists_side_effect
|
||||
mock_import.return_value = {'errors': ['Download failed'], 'downloaded': False}
|
||||
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type=Whatever')
|
||||
self.assertEqual(response.status_code, 500)
|
||||
data = json.loads(response.data)
|
||||
self.assertIn('failed to download core package', data['error'])
|
||||
|
||||
def test_63_get_example_content_success(self):
|
||||
pkg_name = 'example.test'
|
||||
pkg_version = '1.0'
|
||||
filename = f"{pkg_name}-{pkg_version}.tgz"
|
||||
example_path = 'package/Patient-example.json'
|
||||
example_content = {'resourceType': 'Patient', 'id': 'example'}
|
||||
self.create_mock_tgz(filename, {example_path: example_content})
|
||||
response = self.client.get(f'/get-example?package_name={pkg_name}&package_version={pkg_version}&filename={example_path}')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertEqual(data, example_content)
|
||||
|
||||
def test_64_get_package_metadata_success(self):
|
||||
pkg_name = 'metadata.test'
|
||||
pkg_version = '1.0'
|
||||
metadata_filename = f"{pkg_name}-{pkg_version}.metadata.json"
|
||||
metadata_content = {'package_name': pkg_name, 'version': pkg_version, 'dependency_mode': 'tree-shaking'}
|
||||
metadata_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], metadata_filename)
|
||||
open(metadata_path, 'w').write(json.dumps(metadata_content))
|
||||
response = self.client.get(f'/get-package-metadata?package_name={pkg_name}&version={pkg_version}')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
data = json.loads(response.data)
|
||||
self.assertEqual(data.get('dependency_mode'), 'tree-shaking')
|
||||
|
||||
# --- Helper method to debug container issues ---
|
||||
|
||||
def test_99_print_container_logs_on_failure(self):
|
||||
"""Helper test that prints container logs in case of failures"""
|
||||
# This test should always pass but will print logs if other tests fail
|
||||
try:
|
||||
if hasattr(self, 'container') and self.container.containers_up:
|
||||
for service_name in ['fhir', 'db', 'fhirflare']:
|
||||
if service_name in self.container._container_ids:
|
||||
print(f"\n=== Logs for {service_name} ===")
|
||||
print(self.container.get_logs(service_name))
|
||||
except Exception as e:
|
||||
print(f"Error getting container logs: {e}")
|
||||
|
||||
# This assertion always passes - this test is just for debug info
|
||||
self.assertTrue(True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
Loading…
x
Reference in New Issue
Block a user