incremental fixes

operation UI fixes, CRSF token handling for security internally
Fhirpath added to IG processing view
explicit removal of narrative views in the ig viewer
changes to enforce hapi server config is read.
added Prism for code highlighting in the Base and ui operations
This commit is contained in:
Joshua Hare 2025-04-20 09:44:38 +10:00
parent 22892991f6
commit 8e769bd126
16 changed files with 5666 additions and 710 deletions

View File

@ -9,6 +9,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Install specific versions of GoFSH and SUSHI # Install specific versions of GoFSH and SUSHI
# REMOVED pip install fhirpath from this line
RUN npm install -g gofsh fsh-sushi RUN npm install -g gofsh fsh-sushi
# Set up Python environment # Set up Python environment
@ -16,8 +17,14 @@ WORKDIR /app
RUN python3 -m venv /app/venv RUN python3 -m venv /app/venv
ENV PATH="/app/venv/bin:$PATH" ENV PATH="/app/venv/bin:$PATH"
# ADDED: Uninstall old fhirpath just in case it's in requirements.txt
RUN pip uninstall -y fhirpath || true
# ADDED: Install the new fhirpathpy library
RUN pip install --no-cache-dir fhirpathpy
# Copy Flask files # Copy Flask files
COPY requirements.txt . COPY requirements.txt .
# Install requirements (including Pydantic - check version compatibility if needed)
RUN pip install --no-cache-dir -r requirements.txt RUN pip install --no-cache-dir -r requirements.txt
COPY app.py . COPY app.py .
COPY services.py . COPY services.py .
@ -32,6 +39,8 @@ RUN mkdir -p /tmp /app/h2-data /app/static/uploads /app/logs && chmod 777 /tmp /
# Copy pre-built HAPI WAR and configuration # Copy pre-built HAPI WAR and configuration
COPY hapi-fhir-jpaserver/target/ROOT.war /usr/local/tomcat/webapps/ COPY hapi-fhir-jpaserver/target/ROOT.war /usr/local/tomcat/webapps/
COPY hapi-fhir-jpaserver/target/classes/application.yaml /usr/local/tomcat/conf/ COPY hapi-fhir-jpaserver/target/classes/application.yaml /usr/local/tomcat/conf/
COPY hapi-fhir-jpaserver/target/classes/application.yaml /app/config/application.yaml
COPY hapi-fhir-jpaserver/target/classes/application.yaml /usr/local/tomcat/webapps/app/config/application.yaml
COPY hapi-fhir-jpaserver/custom/ /usr/local/tomcat/webapps/custom/ COPY hapi-fhir-jpaserver/custom/ /usr/local/tomcat/webapps/custom/
# Install supervisord # Install supervisord

200
app.py
View File

@ -381,12 +381,12 @@ def view_ig(processed_ig_id):
config=current_app.config) config=current_app.config)
@app.route('/get-structure') @app.route('/get-structure')
def get_structure_definition(): def get_structure():
package_name = request.args.get('package_name') package_name = request.args.get('package_name')
package_version = request.args.get('package_version') package_version = request.args.get('package_version')
resource_identifier = request.args.get('resource_type') resource_type = request.args.get('resource_type')
if not all([package_name, package_version, resource_identifier]): if not all([package_name, package_version, resource_type]):
logger.warning("get_structure_definition: Missing query parameters.") logger.warning("get_structure: Missing query parameters: package_name=%s, package_version=%s, resource_type=%s", package_name, package_version, resource_type)
return jsonify({"error": "Missing required query parameters: package_name, package_version, resource_type"}), 400 return jsonify({"error": "Missing required query parameters: package_name, package_version, resource_type"}), 400
packages_dir = current_app.config.get('FHIR_PACKAGES_DIR') packages_dir = current_app.config.get('FHIR_PACKAGES_DIR')
if not packages_dir: if not packages_dir:
@ -397,16 +397,23 @@ def get_structure_definition():
sd_data = None sd_data = None
fallback_used = False fallback_used = False
source_package_id = f"{package_name}#{package_version}" source_package_id = f"{package_name}#{package_version}"
logger.debug(f"Attempting to find SD for '{resource_identifier}' in {tgz_filename}") logger.debug(f"Attempting to find SD for '{resource_type}' in {tgz_filename}")
if os.path.exists(tgz_path): if os.path.exists(tgz_path):
try: try:
sd_data, _ = services.find_and_extract_sd(tgz_path, resource_identifier) sd_data, _ = services.find_and_extract_sd(tgz_path, resource_type)
except json.JSONDecodeError as e:
logger.error(f"JSON parsing error for SD '{resource_type}' in {tgz_path}: {e}")
return jsonify({"error": f"Invalid JSON in StructureDefinition: {str(e)}"}), 500
except tarfile.TarError as e:
logger.error(f"TarError extracting SD '{resource_type}' from {tgz_path}: {e}")
return jsonify({"error": f"Error reading package archive: {str(e)}"}), 500
except Exception as e: except Exception as e:
logger.error(f"Error extracting SD for '{resource_identifier}' from {tgz_path}: {e}", exc_info=True) logger.error(f"Unexpected error extracting SD '{resource_type}' from {tgz_path}: {e}", exc_info=True)
return jsonify({"error": f"Unexpected error reading StructureDefinition: {str(e)}"}), 500
else: else:
logger.warning(f"Package file not found: {tgz_path}") logger.warning(f"Package file not found: {tgz_path}")
if sd_data is None: if sd_data is None:
logger.info(f"SD for '{resource_identifier}' not found in {source_package_id}. Attempting fallback to {services.CANONICAL_PACKAGE_ID}.") logger.info(f"SD for '{resource_type}' not found in {source_package_id}. Attempting fallback to {services.CANONICAL_PACKAGE_ID}.")
core_package_name, core_package_version = services.CANONICAL_PACKAGE core_package_name, core_package_version = services.CANONICAL_PACKAGE
core_tgz_filename = services.construct_tgz_filename(core_package_name, core_package_version) core_tgz_filename = services.construct_tgz_filename(core_package_name, core_package_version)
core_tgz_path = os.path.join(packages_dir, core_tgz_filename) core_tgz_path = os.path.join(packages_dir, core_tgz_filename)
@ -415,104 +422,115 @@ def get_structure_definition():
try: try:
result = services.import_package_and_dependencies(core_package_name, core_package_version, dependency_mode='direct') result = services.import_package_and_dependencies(core_package_name, core_package_version, dependency_mode='direct')
if result['errors'] and not result['downloaded']: if result['errors'] and not result['downloaded']:
err_msg = f"Failed to download fallback core package {services.CANONICAL_PACKAGE_ID}: {result['errors'][0]}" logger.error(f"Failed to download fallback core package {services.CANONICAL_PACKAGE_ID}: {result['errors'][0]}")
logger.error(err_msg) return jsonify({"error": f"SD for '{resource_type}' not found in primary package, and failed to download core package: {result['errors'][0]}"}), 500
return jsonify({"error": f"SD for '{resource_identifier}' not found in primary package, and failed to download core package: {result['errors'][0]}"}), 500
elif not os.path.exists(core_tgz_path): elif not os.path.exists(core_tgz_path):
err_msg = f"Core package download reported success but file {core_tgz_filename} still not found." logger.error(f"Core package download reported success but file {core_tgz_filename} still not found.")
logger.error(err_msg) return jsonify({"error": f"SD for '{resource_type}' not found, and core package download failed unexpectedly."}), 500
return jsonify({"error": f"SD for '{resource_identifier}' not found, and core package download failed unexpectedly."}), 500
else:
logger.info(f"Successfully downloaded core package {services.CANONICAL_PACKAGE_ID}.")
except Exception as e: except Exception as e:
logger.error(f"Error downloading core package {services.CANONICAL_PACKAGE_ID}: {str(e)}", exc_info=True) logger.error(f"Error downloading core package {services.CANONICAL_PACKAGE_ID}: {str(e)}", exc_info=True)
return jsonify({"error": f"SD for '{resource_identifier}' not found, and error downloading core package: {str(e)}"}), 500 return jsonify({"error": f"SD for '{resource_type}' not found, and error downloading core package: {str(e)}"}), 500
if os.path.exists(core_tgz_path): try:
try: sd_data, _ = services.find_and_extract_sd(core_tgz_path, resource_type)
sd_data, _ = services.find_and_extract_sd(core_tgz_path, resource_identifier) if sd_data is not None:
if sd_data is not None: fallback_used = True
fallback_used = True source_package_id = services.CANONICAL_PACKAGE_ID
source_package_id = services.CANONICAL_PACKAGE_ID logger.info(f"Found SD for '{resource_type}' in fallback package {source_package_id}.")
logger.info(f"Found SD for '{resource_identifier}' in fallback package {source_package_id}.") else:
else: logger.error(f"SD for '{resource_type}' not found in primary package OR fallback {services.CANONICAL_PACKAGE_ID}.")
logger.error(f"SD for '{resource_identifier}' not found in primary package OR fallback {services.CANONICAL_PACKAGE_ID}.") return jsonify({"error": f"StructureDefinition for '{resource_type}' not found in {package_name}#{package_version} or in core FHIR package."}), 404
return jsonify({"error": f"StructureDefinition for '{resource_identifier}' not found in {package_name}#{package_version} or in core FHIR package."}), 404 except json.JSONDecodeError as e:
except Exception as e: logger.error(f"JSON parsing error for SD '{resource_type}' in fallback {core_tgz_path}: {e}")
logger.error(f"Error extracting SD for '{resource_identifier}' from fallback {core_tgz_path}: {e}", exc_info=True) return jsonify({"error": f"Invalid JSON in fallback StructureDefinition: {str(e)}"}), 500
return jsonify({"error": f"Error reading core FHIR package: {str(e)}"}), 500 except tarfile.TarError as e:
else: logger.error(f"TarError extracting SD '{resource_type}' from fallback {core_tgz_path}: {e}")
logger.error(f"Core package {core_tgz_path} missing even after download attempt.") return jsonify({"error": f"Error reading fallback package archive: {str(e)}"}), 500
return jsonify({"error": f"SD not found, and core package could not be located/downloaded."}), 500 except Exception as e:
logger.error(f"Unexpected error extracting SD '{resource_type}' from fallback {core_tgz_path}: {e}", exc_info=True)
return jsonify({"error": f"Unexpected error reading fallback StructureDefinition: {str(e)}"}), 500
# Remove narrative text element (ensure applied after fallback)
if sd_data and 'text' in sd_data:
logger.debug(f"Removing narrative text from SD for '{resource_type}'")
del sd_data['text']
if not sd_data:
logger.error(f"SD for '{resource_type}' not found in primary or fallback package.")
return jsonify({"error": f"StructureDefinition for '{resource_type}' not found."}), 404
elements = sd_data.get('snapshot', {}).get('element', []) elements = sd_data.get('snapshot', {}).get('element', [])
if not elements and 'differential' in sd_data: if not elements and 'differential' in sd_data:
logger.debug(f"Using differential elements for {resource_identifier} as snapshot is missing.") logger.debug(f"Using differential elements for {resource_type} as snapshot is missing.")
elements = sd_data.get('differential', {}).get('element', []) elements = sd_data.get('differential', {}).get('element', [])
if not elements: if not elements:
logger.warning(f"No snapshot or differential elements found in the SD for '{resource_identifier}' from {source_package_id}") logger.warning(f"No snapshot or differential elements found in the SD for '{resource_type}' from {source_package_id}")
must_support_paths = [] must_support_paths = []
processed_ig = ProcessedIg.query.filter_by(package_name=package_name, version=package_version).first() processed_ig = ProcessedIg.query.filter_by(package_name=package_name, version=package_version).first()
if processed_ig and processed_ig.must_support_elements: if processed_ig and processed_ig.must_support_elements:
must_support_paths = processed_ig.must_support_elements.get(resource_identifier, []) must_support_paths = processed_ig.must_support_elements.get(resource_type, [])
logger.debug(f"Retrieved {len(must_support_paths)} Must Support paths for '{resource_identifier}' from processed IG {package_name}#{package_version}") logger.debug(f"Retrieved {len(must_support_paths)} Must Support paths for '{resource_type}' from processed IG {package_name}#{package_version}")
# Serialize with indent=4 and sort_keys=False for consistent formatting
response_data = { response_data = {
"elements": elements, 'structure_definition': sd_data,
"must_support_paths": must_support_paths, 'must_support_paths': must_support_paths,
"fallback_used": fallback_used, 'fallback_used': fallback_used,
"source_package": source_package_id, 'source_package': source_package_id
"requested_identifier": resource_identifier,
"original_package": f"{package_name}#{package_version}"
} }
return jsonify(response_data) return Response(json.dumps(response_data, indent=4, sort_keys=False), mimetype='application/json')
@app.route('/get-example') @app.route('/get-example')
def get_example_content(): def get_example():
package_name = request.args.get('package_name') package_name = request.args.get('package_name')
package_version = request.args.get('package_version') version = request.args.get('package_version')
example_member_path = request.args.get('filename') filename = request.args.get('filename')
if not all([package_name, package_version, example_member_path]): if not all([package_name, version, filename]):
logger.warning(f"get_example_content: Missing query parameters: name={package_name}, version={package_version}, path={example_member_path}") logger.warning("get_example: Missing query parameters: package_name=%s, version=%s, filename=%s", package_name, version, filename)
return jsonify({"error": "Missing required query parameters: package_name, package_version, filename"}), 400 return jsonify({"error": "Missing required query parameters: package_name, package_version, filename"}), 400
if not example_member_path.startswith('package/') or '..' in example_member_path: if not filename.startswith('package/') or '..' in filename:
logger.warning(f"Invalid example file path requested: {example_member_path}") logger.warning(f"Invalid example file path requested: {filename}")
return jsonify({"error": "Invalid example file path."}), 400 return jsonify({"error": "Invalid example file path."}), 400
packages_dir = current_app.config.get('FHIR_PACKAGES_DIR') packages_dir = current_app.config.get('FHIR_PACKAGES_DIR')
if not packages_dir: if not packages_dir:
logger.error("FHIR_PACKAGES_DIR not configured.") logger.error("FHIR_PACKAGES_DIR not configured.")
return jsonify({"error": "Server configuration error: Package directory not set."}), 500 return jsonify({"error": "Server configuration error: Package directory not set."}), 500
tgz_filename = services.construct_tgz_filename(package_name, package_version) tgz_filename = services.construct_tgz_filename(package_name, version)
tgz_path = os.path.join(packages_dir, tgz_filename) tgz_path = os.path.join(packages_dir, tgz_filename)
if not os.path.exists(tgz_path): if not os.path.exists(tgz_path):
logger.error(f"Package file not found for example extraction: {tgz_path}") logger.error(f"Package file not found: {tgz_path}")
return jsonify({"error": f"Package file not found: {package_name}#{package_version}"}), 404 return jsonify({"error": f"Package {package_name}#{version} not found"}), 404
try: try:
with tarfile.open(tgz_path, "r:gz") as tar: with tarfile.open(tgz_path, "r:gz") as tar:
try: try:
example_member = tar.getmember(example_member_path) example_member = tar.getmember(filename)
with tar.extractfile(example_member) as example_fileobj: with tar.extractfile(example_member) as example_fileobj:
content_bytes = example_fileobj.read() content_bytes = example_fileobj.read()
content_string = content_bytes.decode('utf-8-sig') content_string = content_bytes.decode('utf-8-sig')
content_type = 'application/json' if example_member_path.lower().endswith('.json') else \ # Parse JSON to remove narrative
'application/xml' if example_member_path.lower().endswith('.xml') else \ content = json.loads(content_string)
'text/plain' if 'text' in content:
return Response(content_string, mimetype=content_type) logger.debug(f"Removing narrative text from example '{filename}'")
del content['text']
# Return filtered JSON content as a compact string
filtered_content_string = json.dumps(content, separators=(',', ':'), sort_keys=False)
return Response(filtered_content_string, mimetype='application/json')
except KeyError: except KeyError:
logger.error(f"Example file '{example_member_path}' not found within {tgz_filename}") logger.error(f"Example file '{filename}' not found within {tgz_filename}")
return jsonify({"error": f"Example file '{os.path.basename(example_member_path)}' not found in package."}), 404 return jsonify({"error": f"Example file '{os.path.basename(filename)}' not found in package."}), 404
except tarfile.TarError as e: except json.JSONDecodeError as e:
logger.error(f"TarError reading example {example_member_path} from {tgz_filename}: {e}") logger.error(f"JSON parsing error for example '{filename}' in {tgz_filename}: {e}")
return jsonify({"error": f"Error reading package archive: {e}"}), 500 return jsonify({"error": f"Invalid JSON in example file: {str(e)}"}), 500
except UnicodeDecodeError as e: except UnicodeDecodeError as e:
logger.error(f"Encoding error reading example {example_member_path} from {tgz_filename}: {e}") logger.error(f"Encoding error reading example '{filename}' from {tgz_filename}: {e}")
return jsonify({"error": f"Error decoding example file (invalid UTF-8?): {e}"}), 500 return jsonify({"error": f"Error decoding example file (invalid UTF-8?): {str(e)}"}), 500
except tarfile.TarError as e:
logger.error(f"TarError reading example '{filename}' from {tgz_filename}: {e}")
return jsonify({"error": f"Error reading package archive: {str(e)}"}), 500
except tarfile.TarError as e: except tarfile.TarError as e:
logger.error(f"Error opening package file {tgz_path}: {e}") logger.error(f"Error opening package file {tgz_path}: {e}")
return jsonify({"error": f"Error reading package archive: {e}"}), 500 return jsonify({"error": f"Error reading package archive: {str(e)}"}), 500
except FileNotFoundError: except FileNotFoundError:
logger.error(f"Package file disappeared: {tgz_path}") logger.error(f"Package file disappeared: {tgz_path}")
return jsonify({"error": f"Package file not found: {package_name}#{package_version}"}), 404 return jsonify({"error": f"Package file not found: {package_name}#{version}"}), 404
except Exception as e: except Exception as e:
logger.error(f"Unexpected error getting example {example_member_path} from {tgz_filename}: {e}", exc_info=True) logger.error(f"Unexpected error getting example '{filename}' from {tgz_filename}: {e}", exc_info=True)
return jsonify({"error": f"An unexpected error occurred: {str(e)}"}), 500 return jsonify({"error": f"Unexpected error: {str(e)}"}), 500
@app.route('/get-package-metadata') @app.route('/get-package-metadata')
def get_package_metadata(): def get_package_metadata():
@ -924,7 +942,8 @@ def proxy_hapi(subpath):
headers=headers, headers=headers,
data=request.get_data(), data=request.get_data(),
cookies=request.cookies, cookies=request.cookies,
allow_redirects=False allow_redirects=False,
timeout=5
) )
response.raise_for_status() response.raise_for_status()
# Strip hop-by-hop headers to avoid chunked encoding issues # Strip hop-by-hop headers to avoid chunked encoding issues
@ -936,13 +955,44 @@ def proxy_hapi(subpath):
'proxy-authorization', 'te', 'trailers', 'upgrade' 'proxy-authorization', 'te', 'trailers', 'upgrade'
) )
} }
# Ensure Content-Length matches the actual body
response_headers['Content-Length'] = str(len(response.content)) response_headers['Content-Length'] = str(len(response.content))
logger.debug(f"Response: {response.status_code} {response.reason}") logger.debug(f"HAPI response: {response.status_code} {response.reason}")
return response.content, response.status_code, response_headers.items() return response.content, response.status_code, response_headers.items()
except requests.RequestException as e: except requests.RequestException as e:
logger.error(f"Proxy error: {str(e)}") logger.error(f"HAPI proxy error for {subpath}: {str(e)}")
return jsonify({'error': str(e)}), response.status_code if 'response' in locals() else 500 error_message = "HAPI FHIR server is unavailable. Please check server status."
if clean_subpath == 'metadata':
error_message = "Unable to connect to HAPI FHIR server for status check. Local validation will be used."
return jsonify({'error': error_message, 'details': str(e)}), 503
@app.route('/api/load-ig-to-hapi', methods=['POST'])
def load_ig_to_hapi():
data = request.get_json()
package_name = data.get('package_name')
version = data.get('version')
tgz_path = os.path.join(current_app.config['FHIR_PACKAGES_DIR'], construct_tgz_filename(package_name, version))
if not os.path.exists(tgz_path):
return jsonify({"error": "Package not found"}), 404
try:
with tarfile.open(tgz_path, "r:gz") as tar:
for member in tar.getmembers():
if member.name.endswith('.json') and member.name not in ['package/package.json', 'package/.index.json']:
resource = json.load(tar.extractfile(member))
resource_type = resource.get('resourceType')
resource_id = resource.get('id')
if resource_type and resource_id:
response = requests.put(
f"http://localhost:8080/fhir/{resource_type}/{resource_id}",
json=resource,
headers={'Content-Type': 'application/fhir+json'}
)
response.raise_for_status()
return jsonify({"status": "success", "message": f"Loaded {package_name}#{version} to HAPI"})
except Exception as e:
logger.error(f"Failed to load IG to HAPI: {e}")
return jsonify({"error": str(e)}), 500
# Assuming 'app' and 'logger' are defined, and other necessary imports are present above # Assuming 'app' and 'logger' are defined, and other necessary imports are present above

View File

@ -10,7 +10,8 @@ services:
volumes: volumes:
- ./instance:/app/instance - ./instance:/app/instance
- ./static/uploads:/app/static/uploads - ./static/uploads:/app/static/uploads
- ./hapi-fhir-jpaserver/target/h2-data:/app/h2-data # - ./hapi-fhir-jpaserver/target/h2-data:/app/h2-data
- ./instance/hapi-h2-data/:/app/h2-data
- ./logs:/app/logs - ./logs:/app/logs
environment: environment:
- FLASK_APP=app.py - FLASK_APP=app.py

Binary file not shown.

View File

@ -0,0 +1,6 @@
#FileLock
#Sat Apr 19 22:16:34 UTC 2025
server=172.18.0.2\:46549
hostName=5fcfaca62eed
method=file
id=196501fef4b8f140e4827bc0866757e1c405fd48a82

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,7 @@ import re
import logging import logging
import shutil import shutil
from flask import current_app, Blueprint, request, jsonify from flask import current_app, Blueprint, request, jsonify
from fhirpathpy import evaluate
from collections import defaultdict from collections import defaultdict
from pathlib import Path from pathlib import Path
import datetime import datetime
@ -325,6 +326,10 @@ def process_package_file(tgz_path):
if not isinstance(data, dict) or data.get('resourceType') != 'StructureDefinition': if not isinstance(data, dict) or data.get('resourceType') != 'StructureDefinition':
continue continue
# Remove narrative text element from StructureDefinition
if 'text' in data:
del data['text']
profile_id = data.get('id') or data.get('name') profile_id = data.get('id') or data.get('name')
sd_type = data.get('type') sd_type = data.get('type')
sd_base = data.get('baseDefinition') sd_base = data.get('baseDefinition')
@ -437,6 +442,10 @@ def process_package_file(tgz_path):
resource_type = data.get('resourceType') resource_type = data.get('resourceType')
if not resource_type: continue if not resource_type: continue
# Remove narrative text element from example
if 'text' in data:
del data['text']
profile_meta = data.get('meta', {}).get('profile', []) profile_meta = data.get('meta', {}).get('profile', [])
found_profile_match = False found_profile_match = False
if profile_meta and isinstance(profile_meta, list): if profile_meta and isinstance(profile_meta, list):
@ -574,8 +583,8 @@ def process_package_file(tgz_path):
return results return results
# --- Validation Functions --- # --- Validation Functions ---------------------------------------------------------------------------------------------------------------FHIRPATH CHANGES STARTED
def navigate_fhir_path(resource, path, extension_url=None): def _legacy_navigate_fhir_path(resource, path, extension_url=None):
"""Navigates a FHIR resource using a FHIRPath-like expression, handling nested structures.""" """Navigates a FHIR resource using a FHIRPath-like expression, handling nested structures."""
logger.debug(f"Navigating FHIR path: {path}") logger.debug(f"Navigating FHIR path: {path}")
if not resource or not path: if not resource or not path:
@ -650,8 +659,27 @@ def navigate_fhir_path(resource, path, extension_url=None):
result = current if (current is not None and (not isinstance(current, list) or current)) else None result = current if (current is not None and (not isinstance(current, list) or current)) else None
logger.debug(f"Path {path} resolved to: {result}") logger.debug(f"Path {path} resolved to: {result}")
return result return result
#-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
def validate_resource_against_profile(package_name, version, resource, include_dependencies=True): def navigate_fhir_path(resource, path, extension_url=None):
"""Navigates a FHIR resource using FHIRPath expressions."""
logger.debug(f"Navigating FHIR path: {path}, extension_url={extension_url}")
if not resource or not path:
return None
try:
# Adjust path for extension filtering
if extension_url and 'extension' in path:
path = f"{path}[url='{extension_url}']"
result = evaluate(resource, path)
# Return first result if list, None if empty
return result[0] if result else None
except Exception as e:
logger.error(f"FHIRPath evaluation failed for {path}: {e}")
# Fallback to legacy navigation for compatibility
return _legacy_navigate_fhir_path(resource, path, extension_url)
##------------------------------------------------------------------------------------------------------------------------------------and fhirpath here
def _legacy_validate_resource_against_profile(package_name, version, resource, include_dependencies=True):
"""Validates a FHIR resource against a StructureDefinition in the specified package.""" """Validates a FHIR resource against a StructureDefinition in the specified package."""
logger.debug(f"Validating resource {resource.get('resourceType')} against {package_name}#{version}, include_dependencies={include_dependencies}") logger.debug(f"Validating resource {resource.get('resourceType')} against {package_name}#{version}, include_dependencies={include_dependencies}")
result = { result = {
@ -831,6 +859,146 @@ def validate_resource_against_profile(package_name, version, resource, include_d
} }
logger.debug(f"Validation result: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}") logger.debug(f"Validation result: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}")
return result return result
##--------------------------------------------------------------------------------------------------------------------------------------------------------
def validate_resource_against_profile(package_name, version, resource, include_dependencies=True):
result = {
'valid': True,
'errors': [],
'warnings': [],
'details': [],
'resource_type': resource.get('resourceType'),
'resource_id': resource.get('id', 'unknown'),
'profile': resource.get('meta', {}).get('profile', [None])[0]
}
# Attempt HAPI validation if a profile is specified
if result['profile']:
try:
hapi_url = f"http://localhost:8080/fhir/{resource['resourceType']}/$validate?profile={result['profile']}"
response = requests.post(
hapi_url,
json=resource,
headers={'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'},
timeout=10
)
response.raise_for_status()
outcome = response.json()
if outcome.get('resourceType') == 'OperationOutcome':
for issue in outcome.get('issue', []):
severity = issue.get('severity')
diagnostics = issue.get('diagnostics', issue.get('details', {}).get('text', 'No details provided'))
detail = {
'issue': diagnostics,
'severity': severity,
'description': issue.get('details', {}).get('text', diagnostics)
}
if severity in ['error', 'fatal']:
result['valid'] = False
result['errors'].append(diagnostics)
elif severity == 'warning':
result['warnings'].append(diagnostics)
result['details'].append(detail)
result['summary'] = {
'error_count': len(result['errors']),
'warning_count': len(result['warnings'])
}
logger.debug(f"HAPI validation for {result['resource_type']}/{result['resource_id']}: valid={result['valid']}, errors={len(result['errors'])}, warnings={len(result['warnings'])}")
return result
else:
logger.warning(f"HAPI returned non-OperationOutcome: {outcome.get('resourceType')}")
except requests.RequestException as e:
logger.error(f"HAPI validation failed for {result['resource_type']}/{result['resource_id']}: {e}")
result['details'].append({
'issue': f"HAPI validation failed: {str(e)}",
'severity': 'warning',
'description': 'Falling back to local validation due to HAPI server error.'
})
# Fallback to local validation
download_dir = _get_download_dir()
if not download_dir:
result['valid'] = False
result['errors'].append("Could not access download directory")
result['details'].append({
'issue': "Could not access download directory",
'severity': 'error',
'description': "The server could not locate the directory where FHIR packages are stored."
})
return result
tgz_path = os.path.join(download_dir, construct_tgz_filename(package_name, version))
sd_data, sd_path = find_and_extract_sd(tgz_path, resource.get('resourceType'), result['profile'])
if not sd_data:
result['valid'] = False
result['errors'].append(f"No StructureDefinition found for {resource.get('resourceType')}")
result['details'].append({
'issue': f"No StructureDefinition found for {resource.get('resourceType')}",
'severity': 'error',
'description': f"The package {package_name}#{version} does not contain a matching StructureDefinition."
})
return result
elements = sd_data.get('snapshot', {}).get('element', [])
for element in elements:
path = element.get('path')
min_val = element.get('min', 0)
must_support = element.get('mustSupport', False)
slicing = element.get('slicing')
slice_name = element.get('sliceName')
# Check required elements
if min_val > 0:
value = navigate_fhir_path(resource, path)
if value is None or (isinstance(value, list) and not any(value)):
result['valid'] = False
result['errors'].append(f"Required element {path} missing")
result['details'].append({
'issue': f"Required element {path} missing",
'severity': 'error',
'description': f"Element {path} has min={min_val} in profile {result['profile'] or 'unknown'}"
})
# Check must-support elements
if must_support:
value = navigate_fhir_path(resource, slice_name if slice_name else path)
if value is None or (isinstance(value, list) and not any(value)):
result['warnings'].append(f"Must Support element {path} missing or empty")
result['details'].append({
'issue': f"Must Support element {path} missing or empty",
'severity': 'warning',
'description': f"Element {path} is marked as Must Support in profile {result['profile'] or 'unknown'}"
})
# Validate slicing
if slicing and not slice_name: # Parent slicing element
discriminator = slicing.get('discriminator', [])
for d in discriminator:
d_type = d.get('type')
d_path = d.get('path')
if d_type == 'value':
sliced_elements = navigate_fhir_path(resource, path)
if isinstance(sliced_elements, list):
seen_values = set()
for elem in sliced_elements:
d_value = navigate_fhir_path(elem, d_path)
if d_value in seen_values:
result['valid'] = False
result['errors'].append(f"Duplicate discriminator value {d_value} for {path}.{d_path}")
seen_values.add(d_value)
elif d_type == 'type':
sliced_elements = navigate_fhir_path(resource, path)
if isinstance(sliced_elements, list):
for elem in sliced_elements:
if not navigate_fhir_path(elem, d_path):
result['valid'] = False
result['errors'].append(f"Missing discriminator type {d_path} for {path}")
result['summary'] = {
'error_count': len(result['errors']),
'warning_count': len(result['warnings'])
}
return result
def validate_bundle_against_profile(package_name, version, bundle, include_dependencies=True): def validate_bundle_against_profile(package_name, version, bundle, include_dependencies=True):
"""Validates a FHIR Bundle against profiles in the specified package.""" """Validates a FHIR Bundle against profiles in the specified package."""

View File

@ -1,3 +1,3 @@
Alias: $condition-clinical = http://terminology.hl7.org/CodeSystem/condition-clinical Alias: $allergyintolerance-clinical = http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical
Alias: $condition-category = http://terminology.hl7.org/CodeSystem/condition-category Alias: $allergyintolerance-verification = http://terminology.hl7.org/CodeSystem/allergyintolerance-verification
Alias: $sct = http://snomed.info/sct Alias: $sct = http://snomed.info/sct

View File

@ -0,0 +1,16 @@
Instance: aspirin
InstanceOf: AllergyIntolerance
Usage: #example
* meta.profile = "http://hl7.org.au/fhir/core/StructureDefinition/au-core-allergyintolerance"
* clinicalStatus = $allergyintolerance-clinical#active
* clinicalStatus.text = "Active"
* verificationStatus = $allergyintolerance-verification#confirmed
* verificationStatus.text = "Confirmed"
* category = #medication
* criticality = #unable-to-assess
* code = $sct#387458008
* code.text = "Aspirin allergy"
* patient = Reference(Patient/hayes-arianne)
* recordedDate = "2024-02-10"
* recorder = Reference(PractitionerRole/specialistphysicians-swanborough-erick)
* asserter = Reference(PractitionerRole/specialistphysicians-swanborough-erick)

View File

@ -1,74 +1,50 @@
{ {
"resourceType": "Condition", "resourceType": "AllergyIntolerance",
"id": "vkc", "id": "aspirin",
"meta": { "meta": {
"profile": [ "profile": [
"http://hl7.org.au/fhir/core/StructureDefinition/au-core-condition" "http://hl7.org.au/fhir/core/StructureDefinition/au-core-allergyintolerance"
] ]
}, },
"clinicalStatus": { "clinicalStatus": {
"coding": [ "coding": [
{ {
"system": "http://terminology.hl7.org/CodeSystem/condition-clinical", "system": "http://terminology.hl7.org/CodeSystem/allergyintolerance-clinical",
"code": "active", "code": "active"
"display": "Active"
} }
] ],
"text": "Active"
}, },
"category": [ "verificationStatus": {
{
"coding": [
{
"system": "http://terminology.hl7.org/CodeSystem/condition-category",
"code": "encounter-diagnosis",
"display": "Encounter Diagnosis"
}
]
}
],
"severity": {
"coding": [ "coding": [
{ {
"system": "http://snomed.info/sct", "system": "http://terminology.hl7.org/CodeSystem/allergyintolerance-verification",
"code": "24484000", "code": "confirmed"
"display": "Severe"
} }
] ],
"text": "Confirmed"
}, },
"category": [
"medication"
],
"criticality": "unable-to-assess",
"code": { "code": {
"coding": [ "coding": [
{ {
"system": "http://snomed.info/sct", "system": "http://snomed.info/sct",
"code": "317349009", "code": "387458008"
"display": "Vernal keratoconjunctivitis"
} }
] ],
"text": "Aspirin allergy"
}, },
"bodySite": [ "patient": {
{ "reference": "Patient/hayes-arianne"
"coding": [
{
"system": "http://snomed.info/sct",
"code": "368601006",
"display": "Entire conjunctiva of left eye"
}
]
}
],
"subject": {
"reference": "Patient/italia-sofia"
}, },
"onsetDateTime": "2023-10-01", "recordedDate": "2024-02-10",
"recordedDate": "2023-10-02",
"recorder": { "recorder": {
"reference": "PractitionerRole/generalpractitioner-guthridge-jarred" "reference": "PractitionerRole/specialistphysicians-swanborough-erick"
}, },
"asserter": { "asserter": {
"reference": "PractitionerRole/generalpractitioner-guthridge-jarred" "reference": "PractitionerRole/specialistphysicians-swanborough-erick"
}, }
"note": [
{
"text": "Itchy and burning eye, foreign body sensation. Mucoid discharge."
}
]
} }

View File

@ -6,6 +6,7 @@
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous"> <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.3/font/bootstrap-icons.min.css"> <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.3/font/bootstrap-icons.min.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css" integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin="anonymous" referrerpolicy="no-referrer" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css" integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin="anonymous" referrerpolicy="no-referrer" />
<link href="https://cdnjs.cloudflare.com/ajax/libs/prism/1.29.0/themes/prism-okaidia.min.css" rel="stylesheet" />
<link rel="icon" type="image/x-icon" href="{{ url_for('static', filename='favicon.ico') }}"> <link rel="icon" type="image/x-icon" href="{{ url_for('static', filename='favicon.ico') }}">
<title>{% if title %}{{ title }} - {% endif %}{{ site_name }}</title> <title>{% if title %}{{ title }} - {% endif %}{{ site_name }}</title>
<style> <style>
@ -158,6 +159,24 @@
/* --- Unified Button and Badge Styles --- */ /* --- Unified Button and Badge Styles --- */
/* Add some basic adjustments for Prism compatibility if needed */
/* Ensure pre takes full width and code block behaves */
.highlight-code pre, pre[class*="language-"] {
margin: 0.5em 0;
overflow: auto; /* Allow scrolling */
border-radius: 0.3em;
}
code[class*="language-"],
pre[class*="language-"] {
text-shadow: none; /* Adjust if theme adds text shadow */
white-space: pre-wrap; /* Wrap long lines */
word-break: break-all; /* Break long words/tokens */
}
/* Adjust padding if needed based on theme */
:not(pre) > code[class*="language-"], pre[class*="language-"] {
background: inherit; /* Ensure background comes from pre or theme */
}
/* General Button Theme (Primary, Secondary) */ /* General Button Theme (Primary, Secondary) */
.btn-primary { .btn-primary {
background-color: #007bff; background-color: #007bff;
@ -195,6 +214,48 @@
color: white; color: white;
} }
/* --- Prism.js Theme Overrides for Light Mode --- */
/* Target code blocks only when NOT in dark theme */
html:not([data-theme="dark"]) .token.punctuation,
html:not([data-theme="dark"]) .token.operator,
html:not([data-theme="dark"]) .token.entity,
html:not([data-theme="dark"]) .token.url,
html:not([data-theme="dark"]) .token.symbol,
html:not([data-theme="dark"]) .token.number,
html:not([data-theme="dark"]) .token.boolean,
html:not([data-theme="dark"]) .token.variable,
html:not([data-theme="dark"]) .token.constant,
html:not([data-theme="dark"]) .token.property,
html:not([data-theme="dark"]) .token.regex,
html:not([data-theme="dark"]) .token.inserted,
html:not([data-theme="dark"]) .token.null, /* Target null specifically */
html:not([data-theme="dark"]) .language-css .token.string,
html:not([data-theme="dark"]) .style .token.string,
html:not([data-theme="dark"]) .token.important,
html:not([data-theme="dark"]) .token.bold {
/* Choose a darker color that works on your light code background */
/* Example: A dark grey or the default text color */
color: #333 !important; /* You might need !important to override theme */
}
/* You might need to adjust other tokens as well based on the specific theme */
/* Example: Make strings slightly darker too if needed */
html:not([data-theme="dark"]) .token.string {
color: #005cc5 !important; /* Example: A shade of blue */
}
/* Ensure background of highlighted code isn't overridden by theme in light mode */
html:not([data-theme="dark"]) pre[class*="language-"] {
background: #e9ecef !important; /* Match your light theme pre background */
}
html:not([data-theme="dark"]) :not(pre) > code[class*="language-"] {
background: #e9ecef !important; /* Match inline code background */
color: #333 !important; /* Ensure inline code text is dark */
}
/* --- End Prism.js Overrides --- */
/* --- Theme Styles for FSH Code Blocks --- */ /* --- Theme Styles for FSH Code Blocks --- */
pre, pre code { pre, pre code {
font-family: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; font-family: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
@ -822,6 +883,8 @@ html[data-theme="dark"] .structure-tree-root .list-group-item-warning {
tooltips.forEach(t => new bootstrap.Tooltip(t)); tooltips.forEach(t => new bootstrap.Tooltip(t));
}); });
</script> </script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/prism/1.29.0/components/prism-core.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/prism/1.29.0/plugins/autoloader/prism-autoloader.min.js"></script>
{% block scripts %}{% endblock %} {% block scripts %}{% endblock %}
</body> </body>
</html> </html>

View File

@ -157,7 +157,7 @@
<div id="raw-structure-loading" class="text-center py-3" style="display: none;"> <div id="raw-structure-loading" class="text-center py-3" style="display: none;">
<div class="spinner-border text-primary" role="status"><span class="visually-hidden">Loading...</span></div> <div class="spinner-border text-primary" role="status"><span class="visually-hidden">Loading...</span></div>
</div> </div>
<pre><code id="raw-structure-content" class="p-2 d-block border bg-light" style="max-height: 400px; overflow-y: auto;"></code></pre> <pre><code id="raw-structure-content" class="p-2 d-block border bg-light language-json" style="max-height: 400px; overflow-y: auto;"></code></pre>
</div> </div>
</div> </div>
</div> </div>
@ -179,13 +179,13 @@
<div class="col-md-6"> <div class="col-md-6">
<div class="d-flex justify-content-between align-items-center mb-1"> <div class="d-flex justify-content-between align-items-center mb-1">
<h6>Raw Content</h6> <h6>Raw Content</h6>
<button type="button" class="btn btn-sm btn-outline-secondary btn-copy" id="copy-raw-def-button" <button type="button" class="btn btn-sm btn-outline-secondary btn-copy" id="copy-raw-ex-button"
data-bs-toggle="tooltip" data-bs-placement="top" title="Copy Raw Definition JSON"> data-bs-toggle="tooltip" data-bs-placement="top" title="Copy Raw Content">
<i class="bi bi-clipboard" aria-hidden="true"></i> <i class="bi bi-clipboard" aria-hidden="true"></i>
<span class="visually-hidden">Copy Raw Definition JSON to clipboard</span> <span class="visually-hidden">Copy Raw Content to clipboard</span>
</button> </button>
</div> </div>
<pre><code id="example-content-raw" class="p-2 d-block border bg-light" style="max-height: 400px; overflow-y: auto;"></code></pre> <pre><code id="example-content-raw" class="p-2 d-block border bg-light language-json" style="max-height: 400px; overflow-y: auto; white-space: pre;"></code></pre>
</div> </div>
<div class="col-md-6"> <div class="col-md-6">
<div class="d-flex justify-content-between align-items-center mb-1"> <div class="d-flex justify-content-between align-items-center mb-1">
@ -196,7 +196,7 @@
<span class="visually-hidden">Copy JSON example to clipboard</span> <span class="visually-hidden">Copy JSON example to clipboard</span>
</button> </button>
</div> </div>
<pre><code id="example-content-json" class="p-2 d-block border bg-light" style="max-height: 400px; overflow-y: auto;"></code></pre> <pre><code id="example-content-json" class="p-2 d-block border bg-light language-json" style="max-height: 400px; overflow-y: auto;"></code></pre>
</div> </div>
</div> </div>
</div> </div>
@ -208,6 +208,8 @@
{% endif %} {% endif %}
</div> </div>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github.min.css">
<script>const examplesData = {{ examples_by_type | tojson | safe }};</script> <script>const examplesData = {{ examples_by_type | tojson | safe }};</script>
<script> <script>
document.addEventListener('DOMContentLoaded', function() { document.addEventListener('DOMContentLoaded', function() {
@ -230,76 +232,79 @@ document.addEventListener('DOMContentLoaded', function() {
const rawStructureTitle = document.getElementById('raw-structure-title'); const rawStructureTitle = document.getElementById('raw-structure-title');
const rawStructureContent = document.getElementById('raw-structure-content'); const rawStructureContent = document.getElementById('raw-structure-content');
const rawStructureLoading = document.getElementById('raw-structure-loading'); const rawStructureLoading = document.getElementById('raw-structure-loading');
// --- Added: Buttons & Tooltips Constants (Using Corrected IDs) ---
const copyRawDefButton = document.getElementById('copy-raw-def-button'); const copyRawDefButton = document.getElementById('copy-raw-def-button');
const copyRawExButton = document.getElementById('copy-raw-content-button'); const copyRawExButton = document.getElementById('copy-raw-ex-button');
const copyPrettyJsonButton = document.getElementById('copy-pretty-json-button'); const copyPrettyJsonButton = document.getElementById('copy-pretty-json-button');
let copyRawDefTooltipInstance = null; let copyRawDefTooltipInstance = null;
let copyRawExTooltipInstance = null; let copyRawExTooltipInstance = null;
let copyPrettyJsonTooltipInstance = null; let copyPrettyJsonTooltipInstance = null;
const structureBaseUrl = "{{ url_for('get_structure_definition') }}"; const structureBaseUrl = "{{ url_for('get_structure') }}";
const exampleBaseUrl = "{{ url_for('get_example_content') }}"; const exampleBaseUrl = "{{ url_for('get_example') }}";
// --- Added: Generic Copy Logic Function --- // Initialize Highlight.js
hljs.configure({ languages: ['json'] });
hljs.highlightAll();
// Generic Copy Logic Function
function setupCopyButton(buttonElement, sourceElement, tooltipInstance, successTitle, originalTitle, errorTitle, nothingTitle) { function setupCopyButton(buttonElement, sourceElement, tooltipInstance, successTitle, originalTitle, errorTitle, nothingTitle) {
if (!buttonElement || !sourceElement) { if (!buttonElement || !sourceElement) {
console.warn("Copy button or source element not found for setup:", buttonElement?.id, sourceElement?.id); console.warn("Copy button or source element not found for setup:", buttonElement?.id, sourceElement?.id);
return null; return null;
} }
// Dispose previous instance if exists, then create new one
bootstrap.Tooltip.getInstance(buttonElement)?.dispose(); bootstrap.Tooltip.getInstance(buttonElement)?.dispose();
tooltipInstance = new bootstrap.Tooltip(buttonElement); tooltipInstance = new bootstrap.Tooltip(buttonElement);
// Store handler reference to potentially remove later if needed
const clickHandler = () => { const clickHandler = () => {
const textToCopy = sourceElement.textContent; const textToCopy = sourceElement.textContent;
const copyIcon = buttonElement.querySelector('i'); const copyIcon = buttonElement.querySelector('i');
console.log(`Copy button clicked: ${buttonElement.id}`); console.log(`Copy button clicked: ${buttonElement.id}`);
if (textToCopy && textToCopy.trim() && textToCopy !== '(Not valid JSON)') { if (textToCopy && textToCopy.trim() && textToCopy !== '(Not valid JSON)') {
navigator.clipboard.writeText(textToCopy).then(() => { navigator.clipboard.writeText(textToCopy).then(() => {
console.log(` > Content copied successfully!`); console.log(` > Content copied successfully!`);
buttonElement.setAttribute('data-bs-original-title', successTitle); buttonElement.setAttribute('data-bs-original-title', successTitle);
tooltipInstance.show();
if (copyIcon) copyIcon.classList.replace('bi-clipboard', 'bi-check-lg');
setTimeout(() => {
if (buttonElement.isConnected) {
tooltipInstance.hide();
buttonElement.setAttribute('data-bs-original-title', originalTitle);
if (copyIcon) copyIcon.classList.replace('bi-check-lg', 'bi-clipboard');
}
}, 2000);
}).catch(err => {
console.error(` > Failed to copy content for ${buttonElement.id}:`, err);
buttonElement.setAttribute('data-bs-original-title', errorTitle);
tooltipInstance.show(); tooltipInstance.show();
setTimeout(() => { if (copyIcon) copyIcon.classList.replace('bi-clipboard', 'bi-check-lg');
if (buttonElement.isConnected) { setTimeout(() => {
tooltipInstance.hide(); if (buttonElement.isConnected) {
buttonElement.setAttribute('data-bs-original-title', originalTitle); tooltipInstance.hide();
} buttonElement.setAttribute('data-bs-original-title', originalTitle);
}, 2000); if (copyIcon) copyIcon.classList.replace('bi-check-lg', 'bi-clipboard');
}); }
} else { }, 2000);
console.log(' > No valid content to copy.'); }).catch(err => {
buttonElement.setAttribute('data-bs-original-title', nothingTitle); console.error(` > Failed to copy content for ${buttonElement.id}:`, err);
tooltipInstance.show(); buttonElement.setAttribute('data-bs-original-title', errorTitle);
setTimeout(() => { tooltipInstance.show();
if (buttonElement.isConnected) { setTimeout(() => {
tooltipInstance.hide(); if (buttonElement.isConnected) {
buttonElement.setAttribute('data-bs-original-title', originalTitle); tooltipInstance.hide();
} buttonElement.setAttribute('data-bs-original-title', originalTitle);
}, 1500); }
} }, 2000);
});
} else {
console.log(' > No valid content to copy.');
buttonElement.setAttribute('data-bs-original-title', nothingTitle);
tooltipInstance.show();
setTimeout(() => {
if (buttonElement.isConnected) {
tooltipInstance.hide();
buttonElement.setAttribute('data-bs-original-title', originalTitle);
}
}, 1500);
}
}; };
// Remove potentially old listener - best effort without storing reference buttonElement.addEventListener('click', clickHandler);
// buttonElement.removeEventListener('click', clickHandler); // May not work reliably without named fn
buttonElement.addEventListener('click', clickHandler); // Add the listener
return tooltipInstance; return tooltipInstance;
} }
// --- End Added ---
// Setup Copy Buttons
copyRawDefTooltipInstance = setupCopyButton(copyRawDefButton, rawStructureContent, copyRawDefTooltipInstance, 'Copied Definition!', 'Copy Raw Definition JSON', 'Copy Failed!', 'Nothing to copy');
copyRawExTooltipInstance = setupCopyButton(copyRawExButton, exampleContentRaw, copyRawExTooltipInstance, 'Copied Raw!', 'Copy Raw Content', 'Copy Failed!', 'Nothing to copy');
copyPrettyJsonTooltipInstance = setupCopyButton(copyPrettyJsonButton, exampleContentJson, copyPrettyJsonTooltipInstance, 'Copied JSON!', 'Copy JSON', 'Copy Failed!', 'Nothing to copy');
// Handle Resource Type Link Clicks
document.querySelectorAll('.resource-type-list').forEach(container => { document.querySelectorAll('.resource-type-list').forEach(container => {
container.addEventListener('click', function(event) { container.addEventListener('click', function(event) {
const link = event.target.closest('.resource-type-link'); const link = event.target.closest('.resource-type-link');
@ -312,6 +317,8 @@ document.addEventListener('DOMContentLoaded', function() {
const resourceType = link.dataset.resourceType; const resourceType = link.dataset.resourceType;
if (!pkgName || !pkgVersion || !resourceType) { if (!pkgName || !pkgVersion || !resourceType) {
console.error("Missing data attributes:", { pkgName, pkgVersion, resourceType }); console.error("Missing data attributes:", { pkgName, pkgVersion, resourceType });
structureDisplay.innerHTML = `<div class="alert alert-danger">Error: Missing resource type information</div>`;
rawStructureContent.textContent = `Error: Missing resource type information`;
return; return;
} }
@ -333,28 +340,42 @@ document.addEventListener('DOMContentLoaded', function() {
fetch(structureFetchUrl) fetch(structureFetchUrl)
.then(response => { .then(response => {
console.log("Structure fetch response status:", response.status); console.log("Structure fetch response status:", response.status);
return response.json().then(data => ({ ok: response.ok, status: response.status, data })); if (!response.ok) {
}) return response.text().then(text => {
.then(result => { try {
if (!result.ok) { const errData = JSON.parse(text);
throw new Error(result.data.error || `HTTP error ${result.status}`); throw new Error(errData.error || `HTTP error ${response.status}: Server error`);
} catch (e) {
throw new Error(`HTTP error ${response.status}: Failed to load StructureDefinition for ${resourceType} in ${pkgName}#{pkgVersion}. Please check server logs or try re-importing the package.`);
}
});
} }
console.log("Structure data received:", result.data); return response.json();
if (result.data.fallback_used) { })
.then(data => {
console.log("Structure data received:", data);
if (data.fallback_used) {
structureFallbackMessage.style.display = 'block'; structureFallbackMessage.style.display = 'block';
structureFallbackMessage.textContent = `Structure definition not found in this IG; using base FHIR definition from ${result.data.source_package}.`; structureFallbackMessage.textContent = `Structure definition not found in this IG; using base FHIR definition from ${data.source_package || 'FHIR core'}.`;
} else { } else {
structureFallbackMessage.style.display = 'none'; structureFallbackMessage.style.display = 'none';
} }
renderStructureTree(result.data.elements, result.data.must_support_paths || [], resourceType); renderStructureTree(data.structure_definition.snapshot.element, data.must_support_paths || [], resourceType);
rawStructureContent.textContent = JSON.stringify(result.data, null, 2); // Client-side fallback to remove narrative
if (data.structure_definition && ('text' in data.structure_definition || 'Text' in data.structure_definition)) {
console.warn(`Narrative text found in StructureDefinition for ${resourceType}, removing client-side`);
delete data.structure_definition.text;
delete data.structure_definition.Text; // Handle case sensitivity
}
rawStructureContent.textContent = JSON.stringify(data.structure_definition, null, 4);
hljs.highlightElement(rawStructureContent);
populateExampleSelector(resourceType); populateExampleSelector(resourceType);
}) })
.catch(error => { .catch(error => {
console.error("Error fetching structure:", error); console.error("Error fetching structure:", error);
structureFallbackMessage.style.display = 'none'; structureFallbackMessage.style.display = 'none';
structureDisplay.innerHTML = `<div class="alert alert-danger">Error loading structure: ${error.message}</div>`; structureDisplay.innerHTML = `<div class="alert alert-danger">Error loading structure: ${error.message}</div>`;
rawStructureContent.textContent = `Error loading structure: ${error.message}`; rawStructureContent.textContent = `Error: ${error.message}`;
populateExampleSelector(resourceType); populateExampleSelector(resourceType);
}) })
.finally(() => { .finally(() => {
@ -364,6 +385,7 @@ document.addEventListener('DOMContentLoaded', function() {
}); });
}); });
// Populate Example Selector
function populateExampleSelector(resourceOrProfileIdentifier) { function populateExampleSelector(resourceOrProfileIdentifier) {
console.log("Populating examples for:", resourceOrProfileIdentifier); console.log("Populating examples for:", resourceOrProfileIdentifier);
exampleResourceTypeTitle.textContent = resourceOrProfileIdentifier; exampleResourceTypeTitle.textContent = resourceOrProfileIdentifier;
@ -391,6 +413,7 @@ document.addEventListener('DOMContentLoaded', function() {
} }
} }
// Handle Example Selection
if (exampleSelect) { if (exampleSelect) {
exampleSelect.addEventListener('change', function(event) { exampleSelect.addEventListener('change', function(event) {
const selectedFilePath = this.value; const selectedFilePath = this.value;
@ -400,14 +423,16 @@ document.addEventListener('DOMContentLoaded', function() {
exampleFilename.textContent = ''; exampleFilename.textContent = '';
exampleContentWrapper.style.display = 'none'; exampleContentWrapper.style.display = 'none';
// --- ADDED: Reset Example Copy Button Tooltips ---
if (copyRawExTooltipInstance && copyRawExButton?.isConnected) { if (copyRawExTooltipInstance && copyRawExButton?.isConnected) {
copyRawExTooltipInstance.hide(); copyRawExButton.setAttribute('data-bs-original-title', 'Copy Raw Content'); copyRawExTooltipInstance.hide();
copyRawExButton.querySelector('i')?.classList.replace('bi-check-lg', 'bi-clipboard'); } copyRawExButton.setAttribute('data-bs-original-title', 'Copy Raw Content');
copyRawExButton.querySelector('i')?.classList.replace('bi-check-lg', 'bi-clipboard');
}
if (copyPrettyJsonTooltipInstance && copyPrettyJsonButton?.isConnected) { if (copyPrettyJsonTooltipInstance && copyPrettyJsonButton?.isConnected) {
copyPrettyJsonTooltipInstance.hide(); copyPrettyJsonButton.setAttribute('data-bs-original-title', 'Copy JSON'); copyPrettyJsonTooltipInstance.hide();
copyPrettyJsonButton.querySelector('i')?.classList.replace('bi-check-lg', 'bi-clipboard'); } copyPrettyJsonButton.setAttribute('data-bs-original-title', 'Copy JSON');
// --- END ADDED --- copyPrettyJsonButton.querySelector('i')?.classList.replace('bi-check-lg', 'bi-clipboard');
}
if (!selectedFilePath) return; if (!selectedFilePath) return;
@ -424,21 +449,37 @@ document.addEventListener('DOMContentLoaded', function() {
.then(response => { .then(response => {
console.log("Example fetch response status:", response.status); console.log("Example fetch response status:", response.status);
if (!response.ok) { if (!response.ok) {
return response.text().then(errText => { throw new Error(errText || `HTTP error ${response.status}`); }); return response.text().then(text => {
try {
const errData = JSON.parse(text);
throw new Error(errData.error || `HTTP error ${response.status}: Server error`);
} catch (e) {
throw new Error(`HTTP error ${response.status}: Failed to load example ${selectedFilePath.split('/').pop()} in {{ processed_ig.package_name }}#{{ processed_ig.version }}. Please check server logs or try re-importing the package.`);
}
});
} }
return response.text(); return response.text(); // Get raw JSON string
}) })
.then(content => { .then(data => {
console.log("Example content received."); console.log("Example content received.");
exampleFilename.textContent = `Source: ${selectedFilePath.split('/').pop()}`; exampleFilename.textContent = `Source: ${selectedFilePath.split('/').pop()}`;
exampleContentRaw.textContent = content; // Raw JSON: flat string
exampleContentRaw.textContent = data;
// Pretty JSON: 4-space indented, remove narrative
try { try {
const jsonContent = JSON.parse(content); const parsedData = JSON.parse(data);
exampleContentJson.textContent = JSON.stringify(jsonContent, null, 2); if (parsedData && ('text' in parsedData || 'Text' in parsedData)) {
console.warn(`Narrative text found in example ${selectedFilePath}, removing client-side`);
delete parsedData.text;
delete parsedData.Text; // Handle case sensitivity
}
exampleContentJson.textContent = JSON.stringify(parsedData, null, 4);
} catch (e) { } catch (e) {
console.warn("Example content is not valid JSON:", e.message); console.error("Error parsing JSON for pretty display:", e);
exampleContentJson.textContent = '(Not valid JSON)'; exampleContentJson.textContent = `Error: Invalid JSON - ${e.message}`;
} }
hljs.highlightElement(exampleContentRaw);
hljs.highlightElement(exampleContentJson);
exampleContentWrapper.style.display = 'block'; exampleContentWrapper.style.display = 'block';
}) })
.catch(error => { .catch(error => {
@ -454,12 +495,6 @@ document.addEventListener('DOMContentLoaded', function() {
}); });
} }
// --- ADDED: Setup Copy Buttons Calls---
copyRawDefTooltipInstance = setupCopyButton(copyRawDefButton, rawStructureContent, copyRawDefTooltipInstance, 'Copied Definition!', 'Copy Raw Definition JSON', 'Copy Failed!', 'Nothing to copy');
copyRawExTooltipInstance = setupCopyButton(copyRawExButton, exampleContentRaw, copyRawExTooltipInstance, 'Copied Raw!', 'Copy Raw Content', 'Copy Failed!', 'Nothing to copy');
copyPrettyJsonTooltipInstance = setupCopyButton(copyPrettyJsonButton, exampleContentJson, copyPrettyJsonTooltipInstance, 'Copied JSON!', 'Copy JSON', 'Copy Failed!', 'Nothing to copy');
// --- END ADDED ---
function buildTreeData(elements) { function buildTreeData(elements) {
const treeRoot = { children: {}, element: null, name: 'Root' }; const treeRoot = { children: {}, element: null, name: 'Root' };
const nodeMap = { 'Root': treeRoot }; const nodeMap = { 'Root': treeRoot };
@ -488,7 +523,6 @@ document.addEventListener('DOMContentLoaded', function() {
currentPath = i === 0 ? cleanPart : `${currentPath}.${cleanPart}`; currentPath = i === 0 ? cleanPart : `${currentPath}.${cleanPart}`;
let nodeKey = isChoiceElement ? part : cleanPart; let nodeKey = isChoiceElement ? part : cleanPart;
// Handle slices using id for accurate parent lookup
if (sliceName && i === parts.length - 1) { if (sliceName && i === parts.length - 1) {
nodeKey = sliceName; nodeKey = sliceName;
currentPath = id || currentPath; currentPath = id || currentPath;
@ -502,7 +536,6 @@ document.addEventListener('DOMContentLoaded', function() {
path: currentPath path: currentPath
}; };
let parentPath = i === 0 ? 'Root' : parts.slice(0, i).join('.').replace(/\[\d+\]/g, '').replace('[x]', ''); let parentPath = i === 0 ? 'Root' : parts.slice(0, i).join('.').replace(/\[\d+\]/g, '').replace('[x]', '');
// Adjust parentPath for slices using id
if (id && id.includes(':')) { if (id && id.includes(':')) {
const idParts = id.split('.'); const idParts = id.split('.');
const sliceIndex = idParts.findIndex(p => p.includes(':')); const sliceIndex = idParts.findIndex(p => p.includes(':'));
@ -519,7 +552,6 @@ document.addEventListener('DOMContentLoaded', function() {
nodeMap[currentPath] = newNode; nodeMap[currentPath] = newNode;
console.log(`Created node: path=${currentPath}, name=${nodeKey}, parentPath=${parentPath}`); console.log(`Created node: path=${currentPath}, name=${nodeKey}, parentPath=${parentPath}`);
// Add modifierExtension for DomainResource, BackboneElement, or explicitly defined
if (el.path === cleanPart + '.modifierExtension' || if (el.path === cleanPart + '.modifierExtension' ||
(el.type && el.type.some(t => t.code === 'DomainResource' || t.code === 'BackboneElement'))) { (el.type && el.type.some(t => t.code === 'DomainResource' || t.code === 'BackboneElement'))) {
const modExtPath = `${currentPath}.modifierExtension`; const modExtPath = `${currentPath}.modifierExtension`;
@ -549,10 +581,9 @@ document.addEventListener('DOMContentLoaded', function() {
targetNode.element = el; targetNode.element = el;
console.log(`Assigned element to node: path=${currentPath}, id=${id}, sliceName=${sliceName}`); console.log(`Assigned element to node: path=${currentPath}, id=${id}, sliceName=${sliceName}`);
// Handle choice elements ([x])
if (isChoiceElement && el.type && el.type.length > 1) { if (isChoiceElement && el.type && el.type.length > 1) {
el.type.forEach(type => { el.type.forEach(type => {
if (type.code === 'dateTime') return; // Skip dateTime as per FHIR convention if (type.code === 'dateTime') return;
const typeName = `onset${type.code.charAt(0).toUpperCase() + type.code.slice(1)}`; const typeName = `onset${type.code.charAt(0).toUpperCase() + type.code.slice(1)}`;
const typePath = `${currentPath}.${typeName}`; const typePath = `${currentPath}.${typeName}`;
const typeNode = { const typeNode = {
@ -603,9 +634,33 @@ document.addEventListener('DOMContentLoaded', function() {
console.log(`Rendering node: path=${path}, id=${id}, sliceName=${sliceName}`); console.log(`Rendering node: path=${path}, id=${id}, sliceName=${sliceName}`);
let isMustSupport = mustSupportPathsSet.has(path) || (id && mustSupportPathsSet.has(id)); let isMustSupport = mustSupportPathsSet.has(path) || (id && mustSupportPathsSet.has(id));
let mustSupportTitle = isMustSupport ? 'Must Support' : '';
let fhirPathExpression = '';
if (sliceName) {
const slicePath = `${path}:${sliceName}`;
if (mustSupportPathsSet.has(slicePath)) {
isMustSupport = true;
mustSupportTitle = `Must Support (Slice: ${sliceName})`;
fhirPathExpression = `${path}[sliceName='${sliceName}']`;
}
}
mustSupportPathsSet.forEach(msPath => {
if (msPath.includes('where') && msPath.startsWith(path)) {
isMustSupport = true;
mustSupportTitle = `Must Support (FHIRPath: ${msPath})`;
fhirPathExpression = msPath;
}
});
let isOptional = min == 0 && isMustSupport && el.type && el.type.some(t => t.code === 'Extension') && path.includes('extension'); let isOptional = min == 0 && isMustSupport && el.type && el.type.some(t => t.code === 'Extension') && path.includes('extension');
if (isOptional) {
mustSupportTitle = `Optional Must Support Extension${sliceName ? ` (Slice: ${sliceName})` : ''}`;
fhirPathExpression = sliceName ? `${path}[sliceName='${sliceName}']` : path;
}
mustSupportTitle += isMustSupport ? ' (Validated by HAPI FHIR Server)' : '';
const liClass = isMustSupport ? 'list-group-item py-1 px-2 list-group-item-warning' : 'list-group-item py-1 px-2'; const liClass = isMustSupport ? 'list-group-item py-1 px-2 list-group-item-warning' : 'list-group-item py-1 px-2';
const mustSupportDisplay = isMustSupport ? `<i class="bi bi-check-circle-fill ${isOptional ? 'text-info' : 'text-warning'} ms-1" title="${isOptional ? 'Optional Must Support Extension' : 'Must Support'}"></i>` : ''; const mustSupportDisplay = isMustSupport ? `<i class="bi bi-check-circle-fill ${isOptional ? 'text-info' : 'text-warning'} ms-1" title="${mustSupportTitle}" data-bs-toggle="tooltip"></i>` : '';
const hasChildren = Object.keys(node.children).length > 0; const hasChildren = Object.keys(node.children).length > 0;
const collapseId = `collapse-${path.replace(/[\.\:\/\[\]\(\)]/g, '-')}`; const collapseId = `collapse-${path.replace(/[\.\:\/\[\]\(\)]/g, '-')}`;
const padding = level * 20; const padding = level * 20;
@ -643,7 +698,9 @@ document.addEventListener('DOMContentLoaded', function() {
itemHtml += `<i class="bi bi-chevron-right small toggle-icon"></i>`; itemHtml += `<i class="bi bi-chevron-right small toggle-icon"></i>`;
} }
itemHtml += `</span>`; itemHtml += `</span>`;
itemHtml += `<code class="fw-bold ms-1" title="${path}">${node.name}</code>`; const displayName = sliceName ? `<i>${node.name}:${sliceName}</i>` : node.name;
const pathTitle = fhirPathExpression ? `${path} (FHIRPath: ${fhirPathExpression})` : path + (sliceName ? ` (Slice: ${sliceName})` : '');
itemHtml += `<code class="fw-bold ms-1" title="${pathTitle}" data-bs-toggle="tooltip">${displayName}</code>`;
itemHtml += `</div>`; itemHtml += `</div>`;
itemHtml += `<div class="col-lg-1 col-md-1 text-center text-muted small"><code>${min}..${max}</code></div>`; itemHtml += `<div class="col-lg-1 col-md-1 text-center text-muted small"><code>${min}..${max}</code></div>`;
itemHtml += `<div class="col-lg-3 col-md-3 text-truncate small">${typeString}</div>`; itemHtml += `<div class="col-lg-3 col-md-3 text-truncate small">${typeString}</div>`;
@ -651,11 +708,11 @@ document.addEventListener('DOMContentLoaded', function() {
let descriptionTooltipAttrs = ''; let descriptionTooltipAttrs = '';
if (definition) { if (definition) {
const escapedDefinition = definition const escapedDefinition = definition
.replace(/&/g, '&') .replace(/&/g, '&amp;')
.replace(/</g, '<') .replace(/</g, '&lt;')
.replace(/>/g, '>') .replace(/>/g, '&gt;')
.replace(/"/g, '"') .replace(/"/g, '&quot;')
.replace(/'/g, '\'') .replace(/'/g, '&#39;')
.replace(/\n/g, ' '); .replace(/\n/g, ' ');
descriptionTooltipAttrs = `data-bs-toggle="tooltip" data-bs-placement="top" title="${escapedDefinition}"`; descriptionTooltipAttrs = `data-bs-toggle="tooltip" data-bs-placement="top" title="${escapedDefinition}"`;
} }
@ -697,9 +754,8 @@ document.addEventListener('DOMContentLoaded', function() {
} }
}); });
// Initialize collapse with manual click handling and debouncing
let lastClickTime = 0; let lastClickTime = 0;
const debounceDelay = 200; // ms const debounceDelay = 200;
structureDisplay.querySelectorAll('.collapse-toggle').forEach(toggleEl => { structureDisplay.querySelectorAll('.collapse-toggle').forEach(toggleEl => {
const collapseId = toggleEl.getAttribute('data-collapse-id'); const collapseId = toggleEl.getAttribute('data-collapse-id');
if (!collapseId) { if (!collapseId) {
@ -718,12 +774,10 @@ document.addEventListener('DOMContentLoaded', function() {
} }
console.log("Initializing collapse for:", collapseId); console.log("Initializing collapse for:", collapseId);
// Initialize Bootstrap Collapse
const bsCollapse = new bootstrap.Collapse(collapseEl, { const bsCollapse = new bootstrap.Collapse(collapseEl, {
toggle: false toggle: false
}); });
// Custom click handler with debounce
toggleEl.addEventListener('click', event => { toggleEl.addEventListener('click', event => {
event.preventDefault(); event.preventDefault();
event.stopPropagation(); event.stopPropagation();
@ -741,7 +795,6 @@ document.addEventListener('DOMContentLoaded', function() {
} }
}); });
// Update icon and log state
collapseEl.addEventListener('show.bs.collapse', event => { collapseEl.addEventListener('show.bs.collapse', event => {
console.log("Show collapse triggered for:", event.target.id, "Show class:", event.target.classList.contains('show')); console.log("Show collapse triggered for:", event.target.id, "Show class:", event.target.classList.contains('show'));
if (toggleIcon) { if (toggleIcon) {
@ -756,12 +809,6 @@ document.addEventListener('DOMContentLoaded', function() {
toggleIcon.classList.add('bi-chevron-right'); toggleIcon.classList.add('bi-chevron-right');
} }
}); });
collapseEl.addEventListener('shown.bs.collapse', event => {
console.log("Shown collapse completed for:", event.target.id, "Show class:", event.target.classList.contains('show'));
});
collapseEl.addEventListener('hidden.bs.collapse', event => {
console.log("Hidden collapse completed for:", event.target.id, "Show class:", event.target.classList.contains('show'));
});
}); });
} }
} catch (e) { } catch (e) {

File diff suppressed because it is too large Load Diff

View File

@ -101,12 +101,10 @@ document.addEventListener('DOMContentLoaded', function() {
function updatePackageFields(select) { function updatePackageFields(select) {
const value = select.value; const value = select.value;
console.log('Selected package:', value); console.log('Selected package:', value);
if (!packageNameInput || !versionInput) { if (!packageNameInput || !versionInput) {
console.error('Input fields not found'); console.error('Input fields not found');
return; return;
} }
if (value) { if (value) {
const [name, version] = value.split('#'); const [name, version] = value.split('#');
if (name && version) { if (name && version) {
@ -162,6 +160,29 @@ document.addEventListener('DOMContentLoaded', function() {
}); });
} }
// Check HAPI server status
function checkHapiStatus() {
return fetch('/fhir/metadata', { method: 'GET' })
.then(response => {
if (!response.ok) {
throw new Error('HAPI server unavailable');
}
return true;
})
.catch(error => {
console.warn('HAPI status check failed:', error.message);
validationContent.innerHTML = '<div class="alert alert-warning">HAPI server unavailable; using local validation.</div>';
return false;
});
}
// Sanitize text to prevent XSS
function sanitizeText(text) {
const div = document.createElement('div');
div.textContent = text;
return div.innerHTML;
}
// Validate button handler // Validate button handler
if (validateButton) { if (validateButton) {
validateButton.addEventListener('click', function() { validateButton.addEventListener('click', function() {
@ -186,106 +207,165 @@ document.addEventListener('DOMContentLoaded', function() {
JSON.parse(sampleData); JSON.parse(sampleData);
} catch (e) { } catch (e) {
validationResult.style.display = 'block'; validationResult.style.display = 'block';
validationContent.innerHTML = '<div class="alert alert-danger">Invalid JSON: ' + e.message + '</div>'; validationContent.innerHTML = '<div class="alert alert-danger">Invalid JSON: ' + sanitizeText(e.message) + '</div>';
return; return;
} }
validationResult.style.display = 'block'; validationResult.style.display = 'block';
validationContent.innerHTML = '<div class="text-center py-3"><div class="spinner-border text-primary" role="status"><span class="visually-hidden">Validating...</span></div></div>'; validationContent.innerHTML = '<div class="text-center py-3"><div class="spinner-border text-primary" role="status"><span class="visually-hidden">Validating...</span></div></div>';
console.log('Sending request to /api/validate-sample with:', { // Check HAPI status before validation
package_name: packageName, checkHapiStatus().then(() => {
version: version, console.log('Sending request to /api/validate-sample with:', {
include_dependencies: includeDependencies,
sample_data_length: sampleData.length
});
fetch('/api/validate-sample', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': '{{ form.csrf_token._value() }}'
},
body: JSON.stringify({
package_name: packageName, package_name: packageName,
version: version, version: version,
include_dependencies: includeDependencies, include_dependencies: includeDependencies,
sample_data: sampleData sample_data_length: sampleData.length
});
fetch('/api/validate-sample', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-CSRFToken': '{{ form.csrf_token._value() }}'
},
body: JSON.stringify({
package_name: packageName,
version: version,
include_dependencies: includeDependencies,
sample_data: sampleData
})
}) })
}) .then(response => {
.then(response => { console.log('Server response status:', response.status, response.statusText);
console.log('Server response status:', response.status, response.statusText); if (!response.ok) {
if (!response.ok) { return response.text().then(text => {
return response.text().then(text => { console.error('Server response text:', text.substring(0, 200) + (text.length > 200 ? '...' : ''));
console.error('Server response text:', text.substring(0, 200) + (text.length > 200 ? '...' : '')); throw new Error(`HTTP error ${response.status}: ${text.substring(0, 100)}...`);
throw new Error(`HTTP error ${response.status}: ${text.substring(0, 100)}...`); });
}); }
} return response.json();
return response.json(); })
}) .then(data => {
.then(data => { console.log('Validation response:', data);
console.log('Validation response:', data); validationContent.innerHTML = '';
validationContent.innerHTML = ''; let hasContent = false;
let hasContent = false;
if (data.errors && data.errors.length > 0) { // Display profile information
hasContent = true; if (data.results) {
const errorDiv = document.createElement('div'); const profileDiv = document.createElement('div');
errorDiv.className = 'alert alert-danger'; profileDiv.className = 'mb-3';
errorDiv.innerHTML = '<h5>Errors</h5><ul>' + const profiles = new Set();
data.errors.map(e => `<li>${typeof e === 'string' ? e : e.message}</li>`).join('') + Object.values(data.results).forEach(result => {
'</ul>'; if (result.profile) profiles.add(result.profile);
validationContent.appendChild(errorDiv); });
} if (profiles.size > 0) {
profileDiv.innerHTML = `
if (data.warnings && data.warnings.length > 0) { <h5>Validated Against Profile${profiles.size > 1 ? 's' : ''}</h5>
hasContent = true; <ul>
const warningDiv = document.createElement('div'); ${Array.from(profiles).map(p => `<li><a href="${sanitizeText(p)}" target="_blank">${sanitizeText(p.split('/').pop())}</a></li>`).join('')}
warningDiv.className = 'alert alert-warning'; </ul>
warningDiv.innerHTML = '<h5>Warnings</h5><ul>' + `;
data.warnings.map(w => `<li>${typeof w === 'string' ? w : w.message}</li>`).join('') + validationContent.appendChild(profileDiv);
'</ul>'; hasContent = true;
validationContent.appendChild(warningDiv);
}
if (data.results) {
hasContent = true;
const resultsDiv = document.createElement('div');
resultsDiv.innerHTML = '<h5>Detailed Results</h5>';
for (const [resourceId, result] of Object.entries(data.results)) {
resultsDiv.innerHTML += `
<h6>${resourceId}</h6>
<p><strong>Valid:</strong> ${result.valid ? 'Yes' : 'No'}</p>
`;
if (result.errors && result.errors.length > 0) {
resultsDiv.innerHTML += '<ul class="text-danger">' +
result.errors.map(e => `<li>${typeof e === 'string' ? e : e.message}</li>`).join('') +
'</ul>';
}
if (result.warnings && result.warnings.length > 0) {
resultsDiv.innerHTML += '<ul class="text-warning">' +
result.warnings.map(w => `<li>${typeof w === 'string' ? w : w.message}</li>`).join('') +
'</ul>';
} }
} }
validationContent.appendChild(resultsDiv);
}
const summaryDiv = document.createElement('div'); // Display errors
summaryDiv.className = 'alert alert-info'; if (data.errors && data.errors.length > 0) {
summaryDiv.innerHTML = `<h5>Summary</h5><p>Valid: ${data.valid ? 'Yes' : 'No'}</p>`; hasContent = true;
if (data.summary && (data.summary.error_count || data.summary.warning_count)) { const errorDiv = document.createElement('div');
summaryDiv.innerHTML += `<p>Errors: ${data.summary.error_count || 0}, Warnings: ${data.summary.warning_count || 0}</p>`; errorDiv.className = 'alert alert-danger';
} errorDiv.innerHTML = `
validationContent.appendChild(summaryDiv); <h5>Errors</h5>
<ul>
${data.errors.map(e => `<li>${sanitizeText(typeof e === 'string' ? e : e.message)}</li>`).join('')}
</ul>
`;
validationContent.appendChild(errorDiv);
}
if (!hasContent && data.valid) { // Display warnings
validationContent.innerHTML = '<div class="alert alert-success">Validation passed with no errors or warnings.</div>'; if (data.warnings && data.warnings.length > 0) {
} hasContent = true;
}) const warningDiv = document.createElement('div');
.catch(error => { warningDiv.className = 'alert alert-warning';
console.error('Validation error:', error); warningDiv.innerHTML = `
validationContent.innerHTML = '<div class="alert alert-danger">Error during validation: ' + error.message + '</div>'; <h5>Warnings</h5>
<ul>
${data.warnings.map(w => {
const isMustSupport = w.includes('Must Support');
return `<li>${sanitizeText(typeof w === 'string' ? w : w.message)}${isMustSupport ? ' <i class="bi bi-info-circle" title="This element is marked as Must Support in the profile."></i>' : ''}</li>`;
}).join('')}
</ul>
`;
validationContent.appendChild(warningDiv);
}
// Detailed results with collapsible sections
if (data.results) {
hasContent = true;
const resultsDiv = document.createElement('div');
resultsDiv.innerHTML = '<h5>Detailed Results</h5>';
let index = 0;
for (const [resourceId, result] of Object.entries(data.results)) {
const collapseId = `result-collapse-${index++}`;
resultsDiv.innerHTML += `
<div class="card mb-2">
<div class="card-header" role="button" data-bs-toggle="collapse" data-bs-target="#${collapseId}" aria-expanded="false" aria-controls="${collapseId}">
<h6 class="mb-0">${sanitizeText(resourceId)}</h6>
<p class="mb-0"><strong>Valid:</strong> ${result.valid ? 'Yes' : 'No'}</p>
</div>
<div id="${collapseId}" class="collapse">
<div class="card-body">
${result.details && result.details.length > 0 ? `
<ul>
${result.details.map(d => `
<li class="${d.severity === 'error' ? 'text-danger' : d.severity === 'warning' ? 'text-warning' : 'text-info'}">
${sanitizeText(d.issue)}
${d.description && d.description !== d.issue ? `<br><small class="text-muted">${sanitizeText(d.description)}</small>` : ''}
</li>
`).join('')}
</ul>
` : '<p>No additional details.</p>'}
</div>
</div>
</div>
`;
}
validationContent.appendChild(resultsDiv);
}
// Summary
const summaryDiv = document.createElement('div');
summaryDiv.className = 'alert alert-info';
summaryDiv.innerHTML = `
<h5>Summary</h5>
<p>Valid: ${data.valid ? 'Yes' : 'No'}</p>
<p>Errors: ${data.summary.error_count || 0}, Warnings: ${data.summary.warning_count || 0}</p>
`;
validationContent.appendChild(summaryDiv);
hasContent = true;
if (!hasContent && data.valid) {
validationContent.innerHTML = '<div class="alert alert-success">Validation passed with no errors or warnings.</div>';
}
// Initialize Bootstrap collapse and tooltips
document.querySelectorAll('[data-bs-toggle="collapse"]').forEach(el => {
el.addEventListener('click', () => {
const target = document.querySelector(el.getAttribute('data-bs-target'));
target.classList.toggle('show');
});
});
document.querySelectorAll('[data-bs-toggle="tooltip"]').forEach(el => {
new bootstrap.Tooltip(el);
});
})
.catch(error => {
console.error('Validation error:', error);
validationContent.innerHTML = `<div class="alert alert-danger">Error during validation: ${sanitizeText(error.message)}</div>`;
});
}); });
}); });
} }

View File

@ -5,41 +5,31 @@ import json
import tarfile import tarfile
import shutil import shutil
import io import io
# Use import requests early to ensure it's available for exceptions if needed
import requests import requests
# Added patch, MagicMock etc. Also patch.object
from unittest.mock import patch, MagicMock, mock_open, call from unittest.mock import patch, MagicMock, mock_open, call
# Added session import for flash message checking, timezone for datetime from flask import Flask, session
from flask import Flask, session, render_template
from flask.testing import FlaskClient from flask.testing import FlaskClient
from datetime import datetime, timezone from datetime import datetime, timezone
# Add the parent directory (/app) to sys.path # Add the parent directory (/app) to sys.path
# Ensure this points correctly to the directory containing 'app.py' and 'services.py'
APP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) APP_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if APP_DIR not in sys.path: if APP_DIR not in sys.path:
sys.path.insert(0, APP_DIR) sys.path.insert(0, APP_DIR)
# Import app and models AFTER potentially modifying sys.path
# Assuming app.py and services.py are in APP_DIR
from app import app, db, ProcessedIg from app import app, db, ProcessedIg
import services # Import the module itself for patch.object import services
# Helper function to parse NDJSON stream # Helper function to parse NDJSON stream
def parse_ndjson(byte_stream): def parse_ndjson(byte_stream):
"""Parses a byte stream of NDJSON into a list of Python objects."""
decoded_stream = byte_stream.decode('utf-8').strip() decoded_stream = byte_stream.decode('utf-8').strip()
if not decoded_stream: if not decoded_stream:
return [] return []
lines = decoded_stream.split('\n') lines = decoded_stream.split('\n')
# Filter out empty lines before parsing
return [json.loads(line) for line in lines if line.strip()] return [json.loads(line) for line in lines if line.strip()]
class TestFHIRFlareIGToolkit(unittest.TestCase): class TestFHIRFlareIGToolkit(unittest.TestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
"""Configure the Flask app for testing ONCE for the entire test class."""
app.config['TESTING'] = True app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False app.config['WTF_CSRF_ENABLED'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:' app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:'
@ -55,16 +45,13 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
db.create_all() db.create_all()
cls.client = app.test_client() cls.client = app.test_client()
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
"""Clean up DB and context after all tests."""
cls.app_context.pop() cls.app_context.pop()
if os.path.exists(cls.test_packages_dir): if os.path.exists(cls.test_packages_dir):
shutil.rmtree(cls.test_packages_dir) shutil.rmtree(cls.test_packages_dir)
def setUp(self): def setUp(self):
"""Set up before each test method."""
if os.path.exists(self.test_packages_dir): if os.path.exists(self.test_packages_dir):
shutil.rmtree(self.test_packages_dir) shutil.rmtree(self.test_packages_dir)
os.makedirs(self.test_packages_dir, exist_ok=True) os.makedirs(self.test_packages_dir, exist_ok=True)
@ -74,32 +61,62 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
db.session.commit() db.session.commit()
def tearDown(self): def tearDown(self):
"""Clean up after each test method."""
pass pass
# --- Helper Method --- # Helper Method
def create_mock_tgz(self, filename, files_content): def create_mock_tgz(self, filename, files_content):
"""Creates a mock .tgz file with specified contents."""
tgz_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], filename) tgz_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)
with tarfile.open(tgz_path, "w:gz") as tar: with tarfile.open(tgz_path, "w:gz") as tar:
for name, content in files_content.items(): for name, content in files_content.items():
if isinstance(content, (dict, list)): data_bytes = json.dumps(content).encode('utf-8') if isinstance(content, (dict, list)):
elif isinstance(content, str): data_bytes = content.encode('utf-8') data_bytes = json.dumps(content).encode('utf-8')
else: raise TypeError(f"Unsupported type for mock file '{name}': {type(content)}") elif isinstance(content, str):
data_bytes = content.encode('utf-8')
else:
raise TypeError(f"Unsupported type for mock file '{name}': {type(content)}")
file_io = io.BytesIO(data_bytes) file_io = io.BytesIO(data_bytes)
tarinfo = tarfile.TarInfo(name=name); tarinfo.size = len(data_bytes) tarinfo = tarfile.TarInfo(name=name)
tarinfo.size = len(data_bytes)
tarinfo.mtime = int(datetime.now(timezone.utc).timestamp()) tarinfo.mtime = int(datetime.now(timezone.utc).timestamp())
tar.addfile(tarinfo, file_io) tar.addfile(tarinfo, file_io)
return tgz_path return tgz_path
# --- Phase 1 Tests ---
def test_01_navigate_fhir_path(self):
resource = {
"resourceType": "Patient",
"name": [{"given": ["John"]}],
"identifier": [{"system": "http://hl7.org/fhir/sid/us-ssn", "sliceName": "us-ssn"}],
"extension": [{"url": "http://hl7.org/fhir/StructureDefinition/patient-birthPlace", "valueAddress": {"city": "Boston"}}]
}
self.assertEqual(services.navigate_fhir_path(resource, "Patient.name[0].given"), ["John"])
self.assertEqual(services.navigate_fhir_path(resource, "Patient.identifier:us-ssn.system"), "http://hl7.org/fhir/sid/us-ssn")
self.assertEqual(services.navigate_fhir_path(resource, "Patient.extension", extension_url="http://hl7.org/fhir/StructureDefinition/patient-birthPlace")["valueAddress"]["city"], "Boston")
with patch('fhirpath.evaluate', side_effect=Exception("fhirpath error")):
self.assertEqual(services.navigate_fhir_path(resource, "Patient.name[0].given"), ["John"])
def test_02_render_node_as_li(self):
node = {
"element": {"path": "Patient.identifier", "id": "Patient.identifier", "sliceName": "us-ssn", "min": 0, "max": "*", "type": [{"code": "Identifier"}]},
"name": "identifier",
"children": {}
}
must_support_paths = {"Patient.identifier:us-ssn"}
with app.app_context:
html = render_template('cp_view_processed_ig.html', processed_ig=MagicMock(must_support_elements={"USCorePatientProfile": ["Patient.identifier:us-ssn"]}), profile_list=[{"name": "USCorePatientProfile"}], base_list=[])
self.assertIn("identifier:us-ssn", html)
self.assertIn("list-group-item-warning", html)
self.assertIn("Must Support (Slice: us-ssn)", html)
# --- Basic Page Rendering Tests --- # --- Basic Page Rendering Tests ---
def test_01_homepage(self): def test_03_homepage(self):
response = self.client.get('/') response = self.client.get('/')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertIn(b'FHIRFLARE IG Toolkit', response.data) self.assertIn(b'FHIRFLARE IG Toolkit', response.data)
def test_02_import_ig_page(self): def test_04_import_ig_page(self):
response = self.client.get('/import-ig') response = self.client.get('/import-ig')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertIn(b'Import IG', response.data) self.assertIn(b'Import IG', response.data)
@ -108,23 +125,23 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
self.assertIn(b'name="dependency_mode"', response.data) self.assertIn(b'name="dependency_mode"', response.data)
@patch('app.list_downloaded_packages', return_value=([], [], {})) @patch('app.list_downloaded_packages', return_value=([], [], {}))
def test_03_view_igs_no_packages(self, mock_list_pkgs): def test_05_view_igs_no_packages(self, mock_list_pkgs):
response = self.client.get('/view-igs') response = self.client.get('/view-igs')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertNotIn(b'<th>Package Name</th>', response.data) self.assertNotIn(b'<th>Package Name</th>', response.data)
self.assertIn(b'No packages downloaded yet.', response.data) self.assertIn(b'No packages downloaded yet.', response.data)
mock_list_pkgs.assert_called_once() mock_list_pkgs.assert_called_once()
def test_04_view_igs_with_packages(self): def test_06_view_igs_with_packages(self):
self.create_mock_tgz('hl7.fhir.us.core-3.1.1.tgz', {'package/package.json': {'name': 'hl7.fhir.us.core', 'version': '3.1.1'}}) self.create_mock_tgz('hl7.fhir.us.core-6.1.0.tgz', {'package/package.json': {'name': 'hl7.fhir.us.core', 'version': '6.1.0'}})
response = self.client.get('/view-igs') response = self.client.get('/view-igs')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertIn(b'hl7.fhir.us.core', response.data) self.assertIn(b'hl7.fhir.us.core', response.data)
self.assertIn(b'3.1.1', response.data) self.assertIn(b'6.1.0', response.data)
self.assertIn(b'<th>Package Name</th>', response.data) self.assertIn(b'<th>Package Name</th>', response.data)
@patch('app.render_template') @patch('app.render_template')
def test_05_push_igs_page(self, mock_render_template): def test_07_push_igs_page(self, mock_render_template):
mock_render_template.return_value = "Mock Render" mock_render_template.return_value = "Mock Render"
response = self.client.get('/push-igs') response = self.client.get('/push-igs')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
@ -136,285 +153,636 @@ class TestFHIRFlareIGToolkit(unittest.TestCase):
@patch('app.services.import_package_and_dependencies') @patch('app.services.import_package_and_dependencies')
def test_10_import_ig_form_success(self, mock_import): def test_10_import_ig_form_success(self, mock_import):
mock_import.return_value = { 'requested': ('hl7.fhir.us.core', '3.1.1'), 'processed': {('hl7.fhir.us.core', '3.1.1')}, 'downloaded': {('hl7.fhir.us.core', '3.1.1'): 'path/pkg.tgz'}, 'all_dependencies': {}, 'dependencies': [], 'errors': [] } mock_import.return_value = {'requested': ('hl7.fhir.us.core', '6.1.0'), 'processed': {('hl7.fhir.us.core', '6.1.0')}, 'downloaded': {('hl7.fhir.us.core', '6.1.0'): 'path/pkg.tgz'}, 'all_dependencies': {}, 'dependencies': [], 'errors': []}
response = self.client.post('/import-ig', data={'package_name': 'hl7.fhir.us.core', 'package_version': '3.1.1', 'dependency_mode': 'recursive'}, follow_redirects=True) response = self.client.post('/import-ig', data={'package_name': 'hl7.fhir.us.core', 'package_version': '6.1.0', 'dependency_mode': 'recursive'}, follow_redirects=True)
self.assertEqual(response.status_code, 200); self.assertIn(b'Successfully downloaded hl7.fhir.us.core#3.1.1 and dependencies! Mode: recursive', response.data) self.assertEqual(response.status_code, 200)
mock_import.assert_called_once_with('hl7.fhir.us.core', '3.1.1', dependency_mode='recursive') self.assertIn(b'Successfully downloaded hl7.fhir.us.core#6.1.0 and dependencies! Mode: recursive', response.data)
mock_import.assert_called_once_with('hl7.fhir.us.core', '6.1.0', dependency_mode='recursive')
@patch('app.services.import_package_and_dependencies') @patch('app.services.import_package_and_dependencies')
def test_11_import_ig_form_failure_404(self, mock_import): def test_11_import_ig_form_failure_404(self, mock_import):
mock_import.return_value = { 'requested': ('invalid.package', '1.0.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['HTTP error fetching package: 404 Client Error: Not Found for url: ...'] } mock_import.return_value = {'requested': ('invalid.package', '1.0.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['HTTP error fetching package: 404 Client Error: Not Found for url: ...']}
response = self.client.post('/import-ig', data={'package_name': 'invalid.package', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=False) response = self.client.post('/import-ig', data={'package_name': 'invalid.package', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=False)
self.assertEqual(response.status_code, 200); self.assertIn(b'Package not found on registry (404)', response.data) self.assertEqual(response.status_code, 200)
self.assertIn(b'Package not found on registry (404)', response.data)
@patch('app.services.import_package_and_dependencies') @patch('app.services.import_package_and_dependencies')
def test_12_import_ig_form_failure_conn_error(self, mock_import): def test_12_import_ig_form_failure_conn_error(self, mock_import):
mock_import.return_value = { 'requested': ('conn.error.pkg', '1.0.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['Connection error: Cannot connect to registry...'] } mock_import.return_value = {'requested': ('conn.error.pkg', '1.0.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['Connection error: Cannot connect to registry...']}
response = self.client.post('/import-ig', data={'package_name': 'conn.error.pkg', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=False) response = self.client.post('/import-ig', data={'package_name': 'conn.error.pkg', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=False)
self.assertEqual(response.status_code, 200); self.assertIn(b'Could not connect to the FHIR package registry', response.data) self.assertEqual(response.status_code, 200)
self.assertIn(b'Could not connect to the FHIR package registry', response.data)
def test_13_import_ig_form_invalid_input(self): def test_13_import_ig_form_invalid_input(self):
response = self.client.post('/import-ig', data={'package_name': 'invalid@package', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=True) response = self.client.post('/import-ig', data={'package_name': 'invalid@package', 'package_version': '1.0.0', 'dependency_mode': 'recursive'}, follow_redirects=True)
self.assertEqual(response.status_code, 200); self.assertIn(b'Error in Package Name: Invalid package name format.', response.data) self.assertEqual(response.status_code, 200)
self.assertIn(b'Error in Package Name: Invalid package name format.', response.data)
@patch('app.services.process_package_file') @patch('app.services.process_package_file')
@patch('app.services.parse_package_filename') @patch('app.services.parse_package_filename')
def test_20_process_ig_success(self, mock_parse, mock_process): def test_20_process_ig_success(self, mock_parse, mock_process):
pkg_name='hl7.fhir.us.core'; pkg_version='3.1.1'; filename=f'{pkg_name}-{pkg_version}.tgz' pkg_name = 'hl7.fhir.us.core'
pkg_version = '6.1.0'
filename = f'{pkg_name}-{pkg_version}.tgz'
mock_parse.return_value = (pkg_name, pkg_version) mock_parse.return_value = (pkg_name, pkg_version)
mock_process.return_value = { 'resource_types_info': [{'name': 'Patient', 'type': 'Patient', 'is_profile': False, 'must_support': True, 'optional_usage': False}], 'must_support_elements': {'Patient': ['Patient.name']}, 'examples': {'Patient': ['package/Patient-example.json']}, 'complies_with_profiles': [], 'imposed_profiles': ['http://hl7.org/fhir/StructureDefinition/Patient'], 'errors': [] } mock_process.return_value = {
'resource_types_info': [{'name': 'Patient', 'type': 'Patient', 'is_profile': False, 'must_support': True, 'optional_usage': False}],
'must_support_elements': {'Patient': ['Patient.name', 'Patient.identifier:us-ssn']},
'examples': {'Patient': ['package/Patient-example.json']},
'complies_with_profiles': [],
'imposed_profiles': ['http://hl7.org/fhir/StructureDefinition/Patient'],
'errors': []
}
self.create_mock_tgz(filename, {'package/package.json': {'name': pkg_name, 'version': pkg_version}}) self.create_mock_tgz(filename, {'package/package.json': {'name': pkg_name, 'version': pkg_version}})
response = self.client.post('/process-igs', data={'filename': filename}, follow_redirects=False) response = self.client.post('/process-igs', data={'filename': filename}, follow_redirects=False)
self.assertEqual(response.status_code, 302); self.assertTrue(response.location.endswith('/view-igs')) self.assertEqual(response.status_code, 302)
with self.client.session_transaction() as sess: self.assertIn(('success', f'Successfully processed {pkg_name}#{pkg_version}!'), sess.get('_flashes', [])) self.assertTrue(response.location.endswith('/view-igs'))
mock_parse.assert_called_once_with(filename); mock_process.assert_called_once_with(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)) with self.client.session_transaction() as sess:
processed_ig = db.session.query(ProcessedIg).filter_by(package_name=pkg_name, version=pkg_version).first(); self.assertIsNotNone(processed_ig); self.assertEqual(processed_ig.package_name, pkg_name) self.assertIn(('success', f'Successfully processed {pkg_name}#{pkg_version}!'), sess.get('_flashes', []))
mock_parse.assert_called_once_with(filename)
mock_process.assert_called_once_with(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename))
processed_ig = db.session.query(ProcessedIg).filter_by(package_name=pkg_name, version=pkg_version).first()
self.assertIsNotNone(processed_ig)
self.assertEqual(processed_ig.package_name, pkg_name)
self.assertIn('Patient.name', processed_ig.must_support_elements.get('Patient', []))
def test_21_process_ig_file_not_found(self): def test_21_process_ig_file_not_found(self):
response = self.client.post('/process-igs', data={'filename': 'nonexistent.tgz'}, follow_redirects=True) response = self.client.post('/process-igs', data={'filename': 'nonexistent.tgz'}, follow_redirects=True)
self.assertEqual(response.status_code, 200); self.assertIn(b'Package file not found: nonexistent.tgz', response.data) self.assertEqual(response.status_code, 200)
self.assertIn(b'Package file not found: nonexistent.tgz', response.data)
def test_22_delete_ig_success(self): def test_22_delete_ig_success(self):
filename='hl7.fhir.us.core-3.1.1.tgz'; metadata_filename='hl7.fhir.us.core-3.1.1.metadata.json' filename = 'hl7.fhir.us.core-6.1.0.tgz'
self.create_mock_tgz(filename, {'package/package.json': {'name': 'hl7.fhir.us.core', 'version': '3.1.1'}}) metadata_filename = 'hl7.fhir.us.core-6.1.0.metadata.json'
metadata_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], metadata_filename); open(metadata_path, 'w').write(json.dumps({'name': 'hl7.fhir.us.core'})) self.create_mock_tgz(filename, {'package/package.json': {'name': 'hl7.fhir.us.core', 'version': '6.1.0'}})
self.assertTrue(os.path.exists(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename))); self.assertTrue(os.path.exists(metadata_path)) metadata_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], metadata_filename)
open(metadata_path, 'w').write(json.dumps({'name': 'hl7.fhir.us.core'}))
self.assertTrue(os.path.exists(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)))
self.assertTrue(os.path.exists(metadata_path))
response = self.client.post('/delete-ig', data={'filename': filename}, follow_redirects=True) response = self.client.post('/delete-ig', data={'filename': filename}, follow_redirects=True)
self.assertEqual(response.status_code, 200); self.assertIn(f'Deleted: {filename}, {metadata_filename}'.encode('utf-8'), response.data) self.assertEqual(response.status_code, 200)
self.assertFalse(os.path.exists(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename))); self.assertFalse(os.path.exists(metadata_path)) self.assertIn(f'Deleted: {filename}, {metadata_filename}'.encode('utf-8'), response.data)
self.assertFalse(os.path.exists(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)))
self.assertFalse(os.path.exists(metadata_path))
def test_23_unload_ig_success(self): def test_23_unload_ig_success(self):
processed_ig = ProcessedIg(package_name='test.pkg', version='1.0', processed_date=datetime.now(timezone.utc), resource_types_info=[], must_support_elements={}, examples={}) processed_ig = ProcessedIg(package_name='test.pkg', version='1.0', processed_date=datetime.now(timezone.utc), resource_types_info=[], must_support_elements={}, examples={})
db.session.add(processed_ig); db.session.commit(); ig_id = processed_ig.id; self.assertIsNotNone(db.session.get(ProcessedIg, ig_id)) db.session.add(processed_ig)
db.session.commit()
ig_id = processed_ig.id
self.assertIsNotNone(db.session.get(ProcessedIg, ig_id))
response = self.client.post('/unload-ig', data={'ig_id': str(ig_id)}, follow_redirects=True) response = self.client.post('/unload-ig', data={'ig_id': str(ig_id)}, follow_redirects=True)
self.assertEqual(response.status_code, 200); self.assertIn(b'Unloaded processed data for test.pkg#1.0', response.data); self.assertIsNone(db.session.get(ProcessedIg, ig_id)) self.assertEqual(response.status_code, 200)
self.assertIn(b'Unloaded processed data for test.pkg#1.0', response.data)
self.assertIsNone(db.session.get(ProcessedIg, ig_id))
# --- API Tests --- # --- Phase 2 Tests ---
@patch('os.path.exists', return_value=True)
@patch('tarfile.open')
@patch('requests.put')
def test_30_load_ig_to_hapi_success(self, mock_requests_put, mock_tarfile_open, mock_os_exists):
pkg_name = 'hl7.fhir.us.core'
pkg_version = '6.1.0'
filename = f'{pkg_name}-{pkg_version}.tgz'
self.create_mock_tgz(filename, {
'package/package.json': {'name': pkg_name, 'version': pkg_version},
'package/Patient-profile.json': {'resourceType': 'StructureDefinition', 'id': 'us-core-patient'}
})
mock_tar = MagicMock()
profile_member = MagicMock(spec=tarfile.TarInfo)
profile_member.name = 'package/Patient-profile.json'
profile_member.isfile.return_value = True
mock_tar.getmembers.return_value = [profile_member]
mock_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'StructureDefinition', 'id': 'us-core-patient'}).encode('utf-8'))
mock_tarfile_open.return_value.__enter__.return_value = mock_tar
mock_requests_put.return_value = MagicMock(status_code=200)
response = self.client.post(
'/api/load-ig-to-hapi',
data=json.dumps({'package_name': pkg_name, 'version': pkg_version}),
content_type='application/json'
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertEqual(data['status'], 'success')
mock_requests_put.assert_called_once_with(
'http://localhost:8080/fhir/StructureDefinition/us-core-patient',
json={'resourceType': 'StructureDefinition', 'id': 'us-core-patient'},
headers={'Content-Type': 'application/fhir+json'}
)
def test_31_load_ig_to_hapi_not_found(self):
response = self.client.post(
'/api/load-ig-to-hapi',
data=json.dumps({'package_name': 'nonexistent', 'version': '1.0'}),
content_type='application/json'
)
self.assertEqual(response.status_code, 404)
data = json.loads(response.data)
self.assertEqual(data['error'], 'Package not found')
@patch('os.path.exists', return_value=True)
@patch('requests.post')
def test_32_api_validate_sample_hapi_success(self, mock_requests_post, mock_os_exists):
pkg_name = 'hl7.fhir.us.core'
pkg_version = '6.1.0'
sample_resource = {
'resourceType': 'Patient',
'id': 'valid1',
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']},
'name': [{'given': ['John'], 'family': 'Doe'}]
}
mock_requests_post.return_value = MagicMock(
status_code=200,
json=lambda: {
'resourceType': 'OperationOutcome',
'issue': [{'severity': 'warning', 'diagnostics': 'Must Support element Patient.identifier missing'}]
}
)
response = self.client.post(
'/api/validate-sample',
data=json.dumps({
'package_name': pkg_name,
'version': pkg_version,
'sample_data': json.dumps(sample_resource),
'mode': 'single',
'include_dependencies': True
}),
content_type='application/json',
headers={'X-API-Key': 'test-api-key'}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertTrue(data['valid'])
self.assertEqual(data['warnings'], ['Must Support element Patient.identifier missing'])
mock_requests_post.assert_called_once_with(
'http://localhost:8080/fhir/Patient/$validate?profile=http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient',
json=sample_resource,
headers={'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'},
timeout=10
)
@patch('os.path.exists', return_value=True)
@patch('requests.post', side_effect=requests.ConnectionError("HAPI down"))
@patch('services.navigate_fhir_path')
def test_33_api_validate_sample_hapi_fallback(self, mock_navigate_fhir_path, mock_requests_post, mock_os_exists):
pkg_name = 'hl7.fhir.us.core'
pkg_version = '6.1.0'
sample_resource = {
'resourceType': 'Patient',
'id': 'valid1',
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
}
mock_navigate_fhir_path.return_value = None
self.create_mock_tgz(f'{pkg_name}-{pkg_version}.tgz', {
'package/package.json': {'name': pkg_name, 'version': pkg_version},
'package/StructureDefinition-us-core-patient.json': {
'resourceType': 'StructureDefinition',
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True}]}
}
})
response = self.client.post(
'/api/validate-sample',
data=json.dumps({
'package_name': pkg_name,
'version': pkg_version,
'sample_data': json.dumps(sample_resource),
'mode': 'single',
'include_dependencies': True
}),
content_type='application/json',
headers={'X-API-Key': 'test-api-key'}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertFalse(data['valid'])
self.assertIn('Required element Patient.name missing', data['errors'])
self.assertIn('HAPI validation failed', [d['issue'] for d in data['details']])
# --- Phase 3 Tests ---
@patch('requests.get')
def test_34_hapi_status_check(self, mock_requests_get):
mock_requests_get.return_value = MagicMock(status_code=200, json=lambda: {'resourceType': 'CapabilityStatement'})
response = self.client.get('/fhir/metadata')
self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertEqual(data['resourceType'], 'CapabilityStatement')
mock_requests_get.side_effect = requests.ConnectionError("HAPI down")
response = self.client.get('/fhir/metadata')
self.assertEqual(response.status_code, 503)
data = json.loads(response.data)
self.assertIn('Unable to connect to HAPI FHIR server', data['error'])
def test_35_validate_sample_ui_rendering(self):
pkg_name = 'hl7.fhir.us.core'
pkg_version = '6.1.0'
sample_resource = {
'resourceType': 'Patient',
'id': 'test',
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
}
self.create_mock_tgz(f'{pkg_name}-{pkg_version}.tgz', {
'package/package.json': {'name': pkg_name, 'version': pkg_version},
'package/StructureDefinition-us-core-patient.json': {
'resourceType': 'StructureDefinition',
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True}]}
}
})
response = self.client.post(
'/api/validate-sample',
data=json.dumps({
'package_name': pkg_name,
'version': pkg_version,
'sample_data': json.dumps(sample_resource),
'mode': 'single',
'include_dependencies': True
}),
content_type='application/json',
headers={'X-API-Key': 'test-api-key'}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertFalse(data['valid'])
self.assertIn('Required element Patient.name missing', data['errors'])
self.assertIn('Must Support element Patient.identifier missing', data['warnings'])
response = self.client.get('/validate-sample')
self.assertEqual(response.status_code, 200)
self.assertIn(b'us-core-patient', response.data)
def test_36_must_support_consistency(self):
pkg_name = 'hl7.fhir.us.core'
pkg_version = '6.1.0'
filename = f'{pkg_name}-{pkg_version}.tgz'
self.create_mock_tgz(filename, {
'package/package.json': {'name': pkg_name, 'version': pkg_version},
'package/StructureDefinition-us-core-patient.json': {
'resourceType': 'StructureDefinition',
'snapshot': {'element': [{'path': 'Patient.name', 'min': 1}, {'path': 'Patient.identifier', 'mustSupport': True, 'sliceName': 'us-ssn'}]}
}
})
services.process_package_file(os.path.join(app.config['FHIR_PACKAGES_DIR'], filename))
sample_resource = {
'resourceType': 'Patient',
'id': 'test',
'meta': {'profile': ['http://hl7.org/fhir/us/core/StructureDefinition/us-core-patient']}
}
response = self.client.post(
'/api/validate-sample',
data=json.dumps({
'package_name': pkg_name,
'version': pkg_version,
'sample_data': json.dumps(sample_resource),
'mode': 'single',
'include_dependencies': True
}),
content_type='application/json',
headers={'X-API-Key': 'test-api-key'}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertIn('Must Support element Patient.identifier missing', data['warnings'])
with self.app_context:
ig = ProcessedIg.query.filter_by(package_name=pkg_name, version=pkg_version).first()
self.assertIsNotNone(ig)
must_support_paths = ig.must_support_elements.get('Patient', [])
self.assertIn('Patient.identifier:us-ssn', must_support_paths)
response = self.client.get(f'/view-ig/{ig.id}')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Patient.identifier:us-ssn', response.data)
self.assertIn(b'list-group-item-warning', response.data)
# --- Existing API Tests ---
@patch('app.list_downloaded_packages') @patch('app.list_downloaded_packages')
@patch('app.services.process_package_file') @patch('app.services.process_package_file')
@patch('app.services.import_package_and_dependencies') @patch('app.services.import_package_and_dependencies')
@patch('os.path.exists') @patch('os.path.exists')
def test_30_api_import_ig_success(self, mock_os_exists, mock_import, mock_process, mock_list_pkgs): def test_40_api_import_ig_success(self, mock_os_exists, mock_import, mock_process, mock_list_pkgs):
pkg_name='api.test.pkg'; pkg_version='1.2.3'; filename=f'{pkg_name}-{pkg_version}.tgz'; pkg_path=os.path.join(app.config['FHIR_PACKAGES_DIR'], filename) pkg_name = 'api.test.pkg'
mock_import.return_value = { 'requested': (pkg_name, pkg_version), 'processed': {(pkg_name, pkg_version)}, 'downloaded': {(pkg_name, pkg_version): pkg_path}, 'all_dependencies': {}, 'dependencies': [], 'errors': [] } pkg_version = '1.2.3'
mock_process.return_value = { 'resource_types_info': [], 'must_support_elements': {}, 'examples': {}, 'complies_with_profiles': ['http://prof.com/a'], 'imposed_profiles': [], 'errors': [] } filename = f'{pkg_name}-{pkg_version}.tgz'
pkg_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], filename)
mock_import.return_value = {'requested': (pkg_name, pkg_version), 'processed': {(pkg_name, pkg_version)}, 'downloaded': {(pkg_name, pkg_version): pkg_path}, 'all_dependencies': {}, 'dependencies': [], 'errors': []}
mock_process.return_value = {'resource_types_info': [], 'must_support_elements': {}, 'examples': {}, 'complies_with_profiles': ['http://prof.com/a'], 'imposed_profiles': [], 'errors': []}
mock_os_exists.return_value = True mock_os_exists.return_value = True
mock_list_pkgs.return_value = ([{'name': pkg_name, 'version': pkg_version, 'filename': filename}], [], {}) mock_list_pkgs.return_value = ([{'name': pkg_name, 'version': pkg_version, 'filename': filename}], [], {})
response = self.client.post('/api/import-ig', data=json.dumps({'package_name': pkg_name, 'version': pkg_version, 'dependency_mode': 'direct', 'api_key': 'test-api-key'}), content_type='application/json') response = self.client.post(
self.assertEqual(response.status_code, 200); data = json.loads(response.data); self.assertEqual(data['status'], 'success'); self.assertEqual(data['complies_with_profiles'], ['http://prof.com/a']) '/api/import-ig',
data=json.dumps({'package_name': pkg_name, 'version': pkg_version, 'dependency_mode': 'direct', 'api_key': 'test-api-key'}),
content_type='application/json'
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertEqual(data['status'], 'success')
self.assertEqual(data['complies_with_profiles'], ['http://prof.com/a'])
@patch('app.services.import_package_and_dependencies') @patch('app.services.import_package_and_dependencies')
def test_31_api_import_ig_failure(self, mock_import): def test_41_api_import_ig_failure(self, mock_import):
mock_import.return_value = { 'requested': ('bad.pkg', '1.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['HTTP error: 404 Not Found'] } mock_import.return_value = {'requested': ('bad.pkg', '1.0'), 'processed': set(), 'downloaded': {}, 'all_dependencies': {}, 'dependencies': [], 'errors': ['HTTP error: 404 Not Found']}
response = self.client.post('/api/import-ig', data=json.dumps({'package_name': 'bad.pkg', 'version': '1.0', 'api_key': 'test-api-key'}), content_type='application/json') response = self.client.post(
self.assertEqual(response.status_code, 404); data = json.loads(response.data); self.assertIn('Failed to import bad.pkg#1.0: HTTP error: 404 Not Found', data['message']) '/api/import-ig',
data=json.dumps({'package_name': 'bad.pkg', 'version': '1.0', 'api_key': 'test-api-key'}),
content_type='application/json'
)
self.assertEqual(response.status_code, 404)
data = json.loads(response.data)
self.assertIn('Failed to import bad.pkg#1.0: HTTP error: 404 Not Found', data['message'])
def test_32_api_import_ig_invalid_key(self): def test_42_api_import_ig_invalid_key(self):
response = self.client.post('/api/import-ig', data=json.dumps({'package_name': 'a', 'version': '1', 'api_key': 'wrong'}), content_type='application/json') response = self.client.post(
'/api/import-ig',
data=json.dumps({'package_name': 'a', 'version': '1', 'api_key': 'wrong'}),
content_type='application/json'
)
self.assertEqual(response.status_code, 401) self.assertEqual(response.status_code, 401)
def test_33_api_import_ig_missing_key(self): def test_43_api_import_ig_missing_key(self):
response = self.client.post('/api/import-ig', data=json.dumps({'package_name': 'a', 'version': '1'}), content_type='application/json') response = self.client.post(
'/api/import-ig',
data=json.dumps({'package_name': 'a', 'version': '1'}),
content_type='application/json'
)
self.assertEqual(response.status_code, 401) self.assertEqual(response.status_code, 401)
# --- API Push Tests --- # --- API Push Tests ---
@patch('os.path.exists', return_value=True) @patch('os.path.exists', return_value=True)
@patch('app.services.get_package_metadata') @patch('app.services.get_package_metadata')
@patch('tarfile.open') @patch('tarfile.open')
@patch('requests.Session') @patch('requests.Session')
def test_40_api_push_ig_success(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists): def test_50_api_push_ig_success(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists):
pkg_name='push.test.pkg'; pkg_version='1.0.0'; filename=f'{pkg_name}-{pkg_version}.tgz'; fhir_server_url='http://fake-fhir.com/baseR4' pkg_name = 'push.test.pkg'
pkg_version = '1.0.0'
filename = f'{pkg_name}-{pkg_version}.tgz'
fhir_server_url = 'http://fake-fhir.com/baseR4'
mock_get_metadata.return_value = {'imported_dependencies': []} mock_get_metadata.return_value = {'imported_dependencies': []}
mock_tar = MagicMock(); mock_patient = {'resourceType': 'Patient', 'id': 'pat1'}; mock_obs = {'resourceType': 'Observation', 'id': 'obs1', 'status': 'final'} mock_tar = MagicMock()
patient_member = MagicMock(spec=tarfile.TarInfo); patient_member.name = 'package/Patient-pat1.json'; patient_member.isfile.return_value = True mock_patient = {'resourceType': 'Patient', 'id': 'pat1'}
obs_member = MagicMock(spec=tarfile.TarInfo); obs_member.name = 'package/Observation-obs1.json'; obs_member.isfile.return_value = True mock_obs = {'resourceType': 'Observation', 'id': 'obs1', 'status': 'final'}
patient_member = MagicMock(spec=tarfile.TarInfo)
patient_member.name = 'package/Patient-pat1.json'
patient_member.isfile.return_value = True
obs_member = MagicMock(spec=tarfile.TarInfo)
obs_member.name = 'package/Observation-obs1.json'
obs_member.isfile.return_value = True
mock_tar.getmembers.return_value = [patient_member, obs_member] mock_tar.getmembers.return_value = [patient_member, obs_member]
# FIX: Restore full mock_extractfile definition
def mock_extractfile(member): def mock_extractfile(member):
if member.name == 'package/Patient-pat1.json': return io.BytesIO(json.dumps(mock_patient).encode('utf-8')) if member.name == 'package/Patient-pat1.json':
if member.name == 'package/Observation-obs1.json': return io.BytesIO(json.dumps(mock_obs).encode('utf-8')) return io.BytesIO(json.dumps(mock_patient).encode('utf-8'))
if member.name == 'package/Observation-obs1.json':
return io.BytesIO(json.dumps(mock_obs).encode('utf-8'))
return None return None
mock_tar.extractfile.side_effect = mock_extractfile mock_tar.extractfile.side_effect = mock_extractfile
mock_tarfile_open.return_value.__enter__.return_value = mock_tar mock_tarfile_open.return_value.__enter__.return_value = mock_tar
mock_session_instance = MagicMock(); mock_put_response = MagicMock(status_code=200); mock_put_response.raise_for_status.return_value = None; mock_session_instance.put.return_value = mock_put_response; mock_session.return_value = mock_session_instance mock_session_instance = MagicMock()
mock_put_response = MagicMock(status_code=200)
mock_put_response.raise_for_status.return_value = None
mock_session_instance.put.return_value = mock_put_response
mock_session.return_value = mock_session_instance
self.create_mock_tgz(filename, {'package/dummy.txt': 'content'}) self.create_mock_tgz(filename, {'package/dummy.txt': 'content'})
response = self.client.post('/api/push-ig', data=json.dumps({'package_name': pkg_name, 'version': pkg_version, 'fhir_server_url': fhir_server_url, 'include_dependencies': False, 'api_key': 'test-api-key'}), content_type='application/json', headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}) response = self.client.post(
self.assertEqual(response.status_code, 200, f"API call failed: {response.status_code} {response.data.decode()}"); self.assertEqual(response.mimetype, 'application/x-ndjson') '/api/push-ig',
streamed_data = parse_ndjson(response.data); complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None); self.assertIsNotNone(complete_msg); summary = complete_msg.get('data', {}) data=json.dumps({
self.assertEqual(summary.get('status'), 'success'); self.assertEqual(summary.get('success_count'), 2); self.assertEqual(len(summary.get('failed_details')), 0) 'package_name': pkg_name,
'version': pkg_version,
'fhir_server_url': fhir_server_url,
'include_dependencies': False,
'api_key': 'test-api-key'
}),
content_type='application/json',
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.mimetype, 'application/x-ndjson')
streamed_data = parse_ndjson(response.data)
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
self.assertIsNotNone(complete_msg)
summary = complete_msg.get('data', {})
self.assertEqual(summary.get('status'), 'success')
self.assertEqual(summary.get('success_count'), 2)
self.assertEqual(len(summary.get('failed_details')), 0)
mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename)) mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename))
@patch('os.path.exists', return_value=True) @patch('os.path.exists', return_value=True)
@patch('app.services.get_package_metadata') @patch('app.services.get_package_metadata')
@patch('tarfile.open') @patch('tarfile.open')
@patch('requests.Session') @patch('requests.Session')
def test_41_api_push_ig_with_failures(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists): def test_51_api_push_ig_with_failures(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists):
pkg_name='push.fail.pkg'; pkg_version='1.0.0'; filename=f'{pkg_name}-{pkg_version}.tgz'; fhir_server_url='http://fail-fhir.com/baseR4' pkg_name = 'push.fail.pkg'
pkg_version = '1.0.0'
filename = f'{pkg_name}-{pkg_version}.tgz'
fhir_server_url = 'http://fail-fhir.com/baseR4'
mock_get_metadata.return_value = {'imported_dependencies': []} mock_get_metadata.return_value = {'imported_dependencies': []}
mock_tar = MagicMock(); mock_ok_res = {'resourceType': 'Patient', 'id': 'ok1'}; mock_fail_res = {'resourceType': 'Observation', 'id': 'fail1'} mock_tar = MagicMock()
ok_member = MagicMock(spec=tarfile.TarInfo); ok_member.name='package/Patient-ok1.json'; ok_member.isfile.return_value = True mock_ok_res = {'resourceType': 'Patient', 'id': 'ok1'}
fail_member = MagicMock(spec=tarfile.TarInfo); fail_member.name='package/Observation-fail1.json'; fail_member.isfile.return_value = True mock_fail_res = {'resourceType': 'Observation', 'id': 'fail1'}
ok_member = MagicMock(spec=tarfile.TarInfo)
ok_member.name = 'package/Patient-ok1.json'
ok_member.isfile.return_value = True
fail_member = MagicMock(spec=tarfile.TarInfo)
fail_member.name = 'package/Observation-fail1.json'
fail_member.isfile.return_value = True
mock_tar.getmembers.return_value = [ok_member, fail_member] mock_tar.getmembers.return_value = [ok_member, fail_member]
# FIX: Restore full mock_extractfile definition
def mock_extractfile(member): def mock_extractfile(member):
if member.name == 'package/Patient-ok1.json': return io.BytesIO(json.dumps(mock_ok_res).encode('utf-8')) if member.name == 'package/Patient-ok1.json':
if member.name == 'package/Observation-fail1.json': return io.BytesIO(json.dumps(mock_fail_res).encode('utf-8')) return io.BytesIO(json.dumps(mock_ok_res).encode('utf-8'))
return None if member.name == 'package/Observation-fail1.json':
return io.BytesIO(json.dumps(mock_fail_res).encode('utf-8'))
return None
mock_tar.extractfile.side_effect = mock_extractfile mock_tar.extractfile.side_effect = mock_extractfile
mock_tarfile_open.return_value.__enter__.return_value = mock_tar mock_tarfile_open.return_value.__enter__.return_value = mock_tar
mock_session_instance = MagicMock(); mock_ok_response = MagicMock(status_code=200); mock_ok_response.raise_for_status.return_value = None mock_session_instance = MagicMock()
mock_fail_http_response = MagicMock(status_code=400); mock_fail_http_response.json.return_value = {'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'diagnostics': 'Validation failed'}]}; mock_fail_exception = requests.exceptions.HTTPError(response=mock_fail_http_response); mock_fail_http_response.raise_for_status.side_effect = mock_fail_exception mock_ok_response = MagicMock(status_code=200)
mock_session_instance.put.side_effect = [mock_ok_response, mock_fail_http_response]; mock_session.return_value = mock_session_instance mock_ok_response.raise_for_status.return_value = None
mock_fail_http_response = MagicMock(status_code=400)
mock_fail_http_response.json.return_value = {'resourceType': 'OperationOutcome', 'issue': [{'severity': 'error', 'diagnostics': 'Validation failed'}]}
mock_fail_exception = requests.exceptions.HTTPError(response=mock_fail_http_response)
mock_fail_http_response.raise_for_status.side_effect = mock_fail_exception
mock_session_instance.put.side_effect = [mock_ok_response, mock_fail_http_response]
mock_session.return_value = mock_session_instance
self.create_mock_tgz(filename, {'package/dummy.txt': 'content'}) self.create_mock_tgz(filename, {'package/dummy.txt': 'content'})
response = self.client.post('/api/push-ig', data=json.dumps({'package_name': pkg_name, 'version': pkg_version, 'fhir_server_url': fhir_server_url, 'include_dependencies': False, 'api_key': 'test-api-key'}), content_type='application/json', headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}) response = self.client.post(
self.assertEqual(response.status_code, 200, f"API call failed: {response.status_code} {response.data.decode()}"); streamed_data = parse_ndjson(response.data); complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None); self.assertIsNotNone(complete_msg); summary = complete_msg.get('data', {}) '/api/push-ig',
self.assertEqual(summary.get('status'), 'partial'); self.assertEqual(summary.get('success_count'), 1); self.assertEqual(summary.get('failure_count'), 1); self.assertEqual(len(summary.get('failed_details')), 1) data=json.dumps({
self.assertEqual(summary['failed_details'][0].get('resource'), 'Observation/fail1'); self.assertIn('Validation failed', summary['failed_details'][0].get('error', '')) 'package_name': pkg_name,
'version': pkg_version,
'fhir_server_url': fhir_server_url,
'include_dependencies': False,
'api_key': 'test-api-key'
}),
content_type='application/json',
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
)
self.assertEqual(response.status_code, 200)
streamed_data = parse_ndjson(response.data)
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
self.assertIsNotNone(complete_msg)
summary = complete_msg.get('data', {})
self.assertEqual(summary.get('status'), 'partial')
self.assertEqual(summary.get('success_count'), 1)
self.assertEqual(summary.get('failure_count'), 1)
self.assertEqual(len(summary.get('failed_details')), 1)
self.assertEqual(summary['failed_details'][0].get('resource'), 'Observation/fail1')
self.assertIn('Validation failed', summary['failed_details'][0].get('error', ''))
mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename)) mock_os_exists.assert_called_with(os.path.join(self.test_packages_dir, filename))
@patch('os.path.exists', return_value=True) @patch('os.path.exists', return_value=True)
@patch('app.services.get_package_metadata') @patch('app.services.get_package_metadata')
@patch('tarfile.open') @patch('tarfile.open')
@patch('requests.Session') @patch('requests.Session')
def test_42_api_push_ig_with_dependency(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists): def test_52_api_push_ig_with_dependency(self, mock_session, mock_tarfile_open, mock_get_metadata, mock_os_exists):
main_pkg_name='main.dep.pkg'; main_pkg_ver='1.0'; main_filename=f'{main_pkg_name}-{main_pkg_ver}.tgz'; dep_pkg_name='dep.pkg'; dep_pkg_ver='1.0'; dep_filename=f'{dep_pkg_name}-{dep_pkg_ver}.tgz'; fhir_server_url='http://dep-fhir.com/baseR4' main_pkg_name = 'main.dep.pkg'
main_pkg_ver = '1.0'
main_filename = f'{main_pkg_name}-{main_pkg_ver}.tgz'
dep_pkg_name = 'dep.pkg'
dep_pkg_ver = '1.0'
dep_filename = f'{dep_pkg_name}-{dep_pkg_ver}.tgz'
fhir_server_url = 'http://dep-fhir.com/baseR4'
self.create_mock_tgz(main_filename, {'package/Patient-main.json': {'resourceType': 'Patient', 'id': 'main'}}) self.create_mock_tgz(main_filename, {'package/Patient-main.json': {'resourceType': 'Patient', 'id': 'main'}})
self.create_mock_tgz(dep_filename, {'package/Observation-dep.json': {'resourceType': 'Observation', 'id': 'dep'}}) self.create_mock_tgz(dep_filename, {'package/Observation-dep.json': {'resourceType': 'Observation', 'id': 'dep'}})
mock_get_metadata.return_value = { 'package_name': main_pkg_name, 'version': main_pkg_ver, 'dependency_mode': 'recursive', 'imported_dependencies': [{'name': dep_pkg_name, 'version': dep_pkg_ver}]} mock_get_metadata.return_value = {'imported_dependencies': [{'name': dep_pkg_name, 'version': dep_pkg_ver}]}
mock_main_tar = MagicMock(); main_member = MagicMock(spec=tarfile.TarInfo); main_member.name='package/Patient-main.json'; main_member.isfile.return_value = True; mock_main_tar.getmembers.return_value = [main_member]; mock_main_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'Patient', 'id': 'main'}).encode('utf-8')) mock_main_tar = MagicMock()
mock_dep_tar = MagicMock(); dep_member = MagicMock(spec=tarfile.TarInfo); dep_member.name='package/Observation-dep.json'; dep_member.isfile.return_value = True; mock_dep_tar.getmembers.return_value = [dep_member]; mock_dep_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'Observation', 'id': 'dep'}).encode('utf-8')) main_member = MagicMock(spec=tarfile.TarInfo)
# FIX: Restore full tar_opener definition main_member.name = 'package/Patient-main.json'
main_member.isfile.return_value = True
mock_main_tar.getmembers.return_value = [main_member]
mock_main_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'Patient', 'id': 'main'}).encode('utf-8'))
mock_dep_tar = MagicMock()
dep_member = MagicMock(spec=tarfile.TarInfo)
dep_member.name = 'package/Observation-dep.json'
dep_member.isfile.return_value = True
mock_dep_tar.getmembers.return_value = [dep_member]
mock_dep_tar.extractfile.return_value = io.BytesIO(json.dumps({'resourceType': 'Observation', 'id': 'dep'}).encode('utf-8'))
def tar_opener(path, mode): def tar_opener(path, mode):
mock_tar_ctx = MagicMock() mock_tar_ctx = MagicMock()
if main_filename in path: mock_tar_ctx.__enter__.return_value = mock_main_tar if main_filename in path:
elif dep_filename in path: mock_tar_ctx.__enter__.return_value = mock_dep_tar mock_tar_ctx.__enter__.return_value = mock_main_tar
else: empty_mock_tar = MagicMock(); empty_mock_tar.getmembers.return_value = []; mock_tar_ctx.__enter__.return_value = empty_mock_tar elif dep_filename in path:
mock_tar_ctx.__enter__.return_value = mock_dep_tar
else:
empty_mock_tar = MagicMock()
empty_mock_tar.getmembers.return_value = []
mock_tar_ctx.__enter__.return_value = empty_mock_tar
return mock_tar_ctx return mock_tar_ctx
mock_tarfile_open.side_effect = tar_opener mock_tarfile_open.side_effect = tar_opener
mock_session_instance = MagicMock(); mock_put_response = MagicMock(status_code=200); mock_put_response.raise_for_status.return_value = None; mock_session_instance.put.return_value = mock_put_response; mock_session.return_value = mock_session_instance mock_session_instance = MagicMock()
response = self.client.post('/api/push-ig', data=json.dumps({'package_name': main_pkg_name, 'version': main_pkg_ver, 'fhir_server_url': fhir_server_url, 'include_dependencies': True, 'api_key': 'test-api-key'}), content_type='application/json', headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}) mock_put_response = MagicMock(status_code=200)
self.assertEqual(response.status_code, 200, f"API call failed: {response.status_code} {response.data.decode()}"); streamed_data = parse_ndjson(response.data); complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None); self.assertIsNotNone(complete_msg); summary = complete_msg.get('data', {}) mock_put_response.raise_for_status.return_value = None
self.assertEqual(summary.get('status'), 'success'); self.assertEqual(summary.get('success_count'), 2); self.assertEqual(len(summary.get('pushed_packages_summary')), 2) mock_session_instance.put.return_value = mock_put_response
self.assertGreaterEqual(mock_os_exists.call_count, 2); mock_os_exists.assert_any_call(os.path.join(self.test_packages_dir, main_filename)); mock_os_exists.assert_any_call(os.path.join(self.test_packages_dir, dep_filename)) mock_session.return_value = mock_session_instance
response = self.client.post(
'/api/push-ig',
data=json.dumps({
'package_name': main_pkg_name,
'version': main_pkg_ver,
'fhir_server_url': fhir_server_url,
'include_dependencies': True,
'api_key': 'test-api-key'
}),
content_type='application/json',
headers={'X-API-Key': 'test-api-key', 'Accept': 'application/x-ndjson'}
)
self.assertEqual(response.status_code, 200)
streamed_data = parse_ndjson(response.data)
complete_msg = next((item for item in streamed_data if item.get('type') == 'complete'), None)
self.assertIsNotNone(complete_msg)
summary = complete_msg.get('data', {})
self.assertEqual(summary.get('status'), 'success')
self.assertEqual(summary.get('success_count'), 2)
self.assertEqual(len(summary.get('pushed_packages_summary')), 2)
mock_os_exists.assert_any_call(os.path.join(self.test_packages_dir, main_filename))
mock_os_exists.assert_any_call(os.path.join(self.test_packages_dir, dep_filename))
# --- Helper Route Tests --- # --- Helper Route Tests ---
@patch('app.ProcessedIg.query') @patch('app.ProcessedIg.query')
@patch('app.services.find_and_extract_sd') @patch('app.services.find_and_extract_sd')
@patch('os.path.exists') @patch('os.path.exists')
def test_50_get_structure_definition_success(self, mock_exists, mock_find_sd, mock_query): def test_60_get_structure_definition_success(self, mock_exists, mock_find_sd, mock_query):
pkg_name='struct.test'; pkg_version='1.0'; resource_type='Patient'; mock_exists.return_value = True pkg_name = 'struct.test'
pkg_version = '1.0'
resource_type = 'Patient'
mock_exists.return_value = True
mock_sd_data = {'resourceType': 'StructureDefinition', 'snapshot': {'element': [{'id': 'Patient.name', 'min': 1}, {'id': 'Patient.birthDate', 'mustSupport': True}]}} mock_sd_data = {'resourceType': 'StructureDefinition', 'snapshot': {'element': [{'id': 'Patient.name', 'min': 1}, {'id': 'Patient.birthDate', 'mustSupport': True}]}}
mock_find_sd.return_value = (mock_sd_data, 'path/to/sd.json') mock_find_sd.return_value = (mock_sd_data, 'path/to/sd.json')
mock_processed_ig = MagicMock(); mock_processed_ig.must_support_elements = {resource_type: ['Patient.birthDate']}; mock_query.filter_by.return_value.first.return_value = mock_processed_ig mock_processed_ig = MagicMock()
mock_processed_ig.must_support_elements = {resource_type: ['Patient.birthDate']}
mock_query.filter_by.return_value.first.return_value = mock_processed_ig
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type={resource_type}') response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type={resource_type}')
self.assertEqual(response.status_code, 200); data = json.loads(response.data); self.assertEqual(data['must_support_paths'], ['Patient.birthDate']) self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertEqual(data['must_support_paths'], ['Patient.birthDate'])
@patch('app.services.import_package_and_dependencies') @patch('app.services.import_package_and_dependencies')
@patch('app.services.find_and_extract_sd') @patch('app.services.find_and_extract_sd')
@patch('os.path.exists') @patch('os.path.exists')
def test_51_get_structure_definition_fallback(self, mock_exists, mock_find_sd, mock_import): def test_61_get_structure_definition_fallback(self, mock_exists, mock_find_sd, mock_import):
pkg_name='struct.test'; pkg_version='1.0'; core_pkg_name, core_pkg_version = services.CANONICAL_PACKAGE; resource_type='Observation' pkg_name = 'struct.test'
def exists_side_effect(path): return True; mock_exists.side_effect = exists_side_effect pkg_version = '1.0'
core_pkg_name, core_pkg_version = services.CANONICAL_PACKAGE
resource_type = 'Observation'
def exists_side_effect(path):
return True
mock_exists.side_effect = exists_side_effect
mock_core_sd_data = {'resourceType': 'StructureDefinition', 'snapshot': {'element': [{'id': 'Observation.status'}]}} mock_core_sd_data = {'resourceType': 'StructureDefinition', 'snapshot': {'element': [{'id': 'Observation.status'}]}}
def find_sd_side_effect(path, identifier, profile_url=None): def find_sd_side_effect(path, identifier, profile_url=None):
if f"{pkg_name}-{pkg_version}.tgz" in path: return (None, None) if f"{pkg_name}-{pkg_version}.tgz" in path:
if f"{core_pkg_name}-{core_pkg_version}.tgz" in path: return (mock_core_sd_data, 'path/obs.json') return (None, None)
if f"{core_pkg_name}-{core_pkg_version}.tgz" in path:
return (mock_core_sd_data, 'path/obs.json')
return (None, None) return (None, None)
mock_find_sd.side_effect = find_sd_side_effect mock_find_sd.side_effect = find_sd_side_effect
with patch('app.ProcessedIg.query') as mock_query: with patch('app.ProcessedIg.query') as mock_query:
mock_query.filter_by.return_value.first.return_value = None mock_query.filter_by.return_value.first.return_value = None
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type={resource_type}') response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type={resource_type}')
self.assertEqual(response.status_code, 200); data = json.loads(response.data); self.assertTrue(data['fallback_used']) self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertTrue(data['fallback_used'])
@patch('app.services.find_and_extract_sd', return_value=(None, None)) @patch('app.services.find_and_extract_sd', return_value=(None, None))
@patch('app.services.import_package_and_dependencies') @patch('app.services.import_package_and_dependencies')
@patch('os.path.exists') @patch('os.path.exists')
def test_52_get_structure_definition_not_found_anywhere(self, mock_exists, mock_import, mock_find_sd): def test_62_get_structure_definition_not_found_anywhere(self, mock_exists, mock_import, mock_find_sd):
pkg_name = 'no.sd.pkg'; pkg_version = '1.0'; core_pkg_name, core_pkg_version = services.CANONICAL_PACKAGE pkg_name = 'no.sd.pkg'
pkg_version = '1.0'
core_pkg_name, core_pkg_version = services.CANONICAL_PACKAGE
def exists_side_effect(path): def exists_side_effect(path):
if f"{pkg_name}-{pkg_version}.tgz" in path: return True if f"{pkg_name}-{pkg_version}.tgz" in path:
if f"{core_pkg_name}-{core_pkg_version}.tgz" in path: return False return True
if f"{core_pkg_name}-{core_pkg_version}.tgz" in path:
return False
return False return False
mock_exists.side_effect = exists_side_effect mock_exists.side_effect = exists_side_effect
mock_import.return_value = {'errors': ['Download failed'], 'downloaded': False} mock_import.return_value = {'errors': ['Download failed'], 'downloaded': False}
response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type=Whatever') response = self.client.get(f'/get-structure?package_name={pkg_name}&package_version={pkg_version}&resource_type=Whatever')
self.assertEqual(response.status_code, 500); data = json.loads(response.data); self.assertIn('failed to download core package', data['error']) self.assertEqual(response.status_code, 500)
data = json.loads(response.data)
self.assertIn('failed to download core package', data['error'])
def test_53_get_example_content_success(self): def test_63_get_example_content_success(self):
pkg_name = 'example.test'; pkg_version = '1.0'; filename = f"{pkg_name}-{pkg_version}.tgz" pkg_name = 'example.test'
example_path = 'package/Patient-example.json'; example_content = {'resourceType': 'Patient', 'id': 'example'} pkg_version = '1.0'
filename = f"{pkg_name}-{pkg_version}.tgz"
example_path = 'package/Patient-example.json'
example_content = {'resourceType': 'Patient', 'id': 'example'}
self.create_mock_tgz(filename, {example_path: example_content}) self.create_mock_tgz(filename, {example_path: example_content})
response = self.client.get(f'/get-example?package_name={pkg_name}&package_version={pkg_version}&filename={example_path}') response = self.client.get(f'/get-example?package_name={pkg_name}&package_version={pkg_version}&filename={example_path}')
self.assertEqual(response.status_code, 200); data = json.loads(response.data); self.assertEqual(data, example_content) self.assertEqual(response.status_code, 200)
data = json.loads(response.data)
self.assertEqual(data, example_content)
def test_54_get_package_metadata_success(self): def test_64_get_package_metadata_success(self):
pkg_name = 'metadata.test'; pkg_version = '1.0'; metadata_filename = f"{pkg_name}-{pkg_version}.metadata.json" pkg_name = 'metadata.test'
pkg_version = '1.0'
metadata_filename = f"{pkg_name}-{pkg_version}.metadata.json"
metadata_content = {'package_name': pkg_name, 'version': pkg_version, 'dependency_mode': 'tree-shaking'} metadata_content = {'package_name': pkg_name, 'version': pkg_version, 'dependency_mode': 'tree-shaking'}
metadata_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], metadata_filename); open(metadata_path, 'w').write(json.dumps(metadata_content)) metadata_path = os.path.join(app.config['FHIR_PACKAGES_DIR'], metadata_filename)
open(metadata_path, 'w').write(json.dumps(metadata_content))
response = self.client.get(f'/get-package-metadata?package_name={pkg_name}&version={pkg_version}') response = self.client.get(f'/get-package-metadata?package_name={pkg_name}&version={pkg_version}')
self.assertEqual(response.status_code, 200); data = json.loads(response.data); self.assertEqual(data.get('dependency_mode'), 'tree-shaking')
# --- Validation API Tests --- (/api/validate-sample) ---
# FIX: Use patch.object decorator targeting the imported services module
@patch('os.path.exists', return_value=True)
@patch.object(services, 'validate_resource_against_profile')
def test_60_api_validate_sample_single_success(self, mock_validate, mock_os_exists): # Note order change
pkg_name='validate.pkg'; pkg_version='1.0'
mock_validate.return_value = {'valid': True, 'errors': [], 'warnings': [], 'details': [], 'resource_type': 'Patient', 'resource_id': 'valid1', 'profile': 'P', 'summary': {'error_count': 0, 'warning_count': 0}}
sample_resource = {'resourceType': 'Patient', 'id': 'valid1', 'meta': {'profile': ['P']}}
response = self.client.post('/api/validate-sample',
data=json.dumps({'package_name': pkg_name, 'version': pkg_version, 'sample_data': json.dumps(sample_resource), 'mode': 'single', 'include_dependencies': True }),
content_type='application/json', headers={'X-API-Key': 'test-api-key'}
)
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
data = json.loads(response.data); self.assertTrue(data['valid']) data = json.loads(response.data)
mock_validate.assert_called_once_with(pkg_name, pkg_version, sample_resource, include_dependencies=True) self.assertEqual(data.get('dependency_mode'), 'tree-shaking')
# FIX: Use patch.object decorator
@patch('os.path.exists', return_value=True)
@patch.object(services, 'validate_resource_against_profile')
def test_61_api_validate_sample_single_failure(self, mock_validate, mock_os_exists): # Note order change
pkg_name='validate.pkg'; pkg_version='1.0'
mock_validate.return_value = {'valid': False, 'errors': ['E1'], 'warnings': ['W1'], 'details': [], 'resource_type': 'Patient', 'resource_id': 'invalid1', 'profile': 'P', 'summary': {'error_count': 1, 'warning_count': 1}}
sample_resource = {'resourceType': 'Patient', 'id': 'invalid1', 'meta': {'profile': ['P']}}
response = self.client.post('/api/validate-sample',
data=json.dumps({'package_name': pkg_name, 'version': pkg_version, 'sample_data': json.dumps(sample_resource), 'mode': 'single', 'include_dependencies': False }),
content_type='application/json', headers={'X-API-Key': 'test-api-key'}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.data); self.assertFalse(data['valid'])
mock_validate.assert_called_once_with(pkg_name, pkg_version, sample_resource, include_dependencies=False)
# FIX: Use patch.object decorator
@patch('os.path.exists', return_value=True)
@patch.object(services, 'validate_bundle_against_profile')
def test_62_api_validate_sample_bundle_success(self, mock_validate_bundle, mock_os_exists): # Note order change
pkg_name='validate.pkg'; pkg_version='1.0'
mock_validate_bundle.return_value = { 'valid': True, 'errors': [], 'warnings': [], 'details': [], 'results': {'Patient/p1': {'valid': True, 'errors': [], 'warnings': []}}, 'summary': {'resource_count': 1, 'failed_resources': 0, 'profiles_validated': ['P'], 'error_count': 0, 'warning_count': 0} }
sample_bundle = {'resourceType': 'Bundle', 'type': 'collection', 'entry': [{'resource': {'resourceType': 'Patient', 'id': 'p1'}}]}
response = self.client.post('/api/validate-sample',
data=json.dumps({'package_name': pkg_name, 'version': pkg_version, 'sample_data': json.dumps(sample_bundle), 'mode': 'bundle', 'include_dependencies': True }),
content_type='application/json', headers={'X-API-Key': 'test-api-key'}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.data); self.assertTrue(data['valid'])
mock_validate_bundle.assert_called_once_with(pkg_name, pkg_version, sample_bundle, include_dependencies=True)
def test_63_api_validate_sample_invalid_json(self):
pkg_name = 'p'; pkg_version = '1'
self.create_mock_tgz(f"{pkg_name}-{pkg_version}.tgz", {'package/dummy.txt': 'content'})
response = self.client.post('/api/validate-sample',
data=json.dumps({ 'package_name': pkg_name, 'version': pkg_version, 'sample_data': '{"key": "value", invalid}', 'mode': 'single', 'include_dependencies': True }),
content_type='application/json', headers={'X-API-Key': 'test-api-key'}
)
self.assertEqual(response.status_code, 400)
data = json.loads(response.data)
self.assertIn('Invalid JSON', data.get('errors', [''])[0])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()