Merge pull request #17 from Sudo-JHare/Patch

QOL Changes
This commit is contained in:
Joshua Hare 2025-05-22 20:08:26 +10:00 committed by GitHub
commit 5c559f9652
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 2297 additions and 1319 deletions

613
app.py
View File

@ -16,6 +16,7 @@ from urllib.parse import urlparse
from cachetools import TTLCache
from types import SimpleNamespace
import tarfile
import base64
import json
import logging
import requests
@ -42,6 +43,7 @@ from services import (
from forms import IgImportForm, ValidationForm, FSHConverterForm, TestDataUploadForm, RetrieveSplitDataForm
from wtforms import SubmitField
from package import package_bp
from flasgger import Swagger, swag_from # Import Flasgger
from copy import deepcopy
import tempfile
from logging.handlers import RotatingFileHandler
@ -60,11 +62,45 @@ app.config['APP_BASE_URL'] = os.environ.get('APP_BASE_URL', 'http://localhost:50
app.config['HAPI_FHIR_URL'] = os.environ.get('HAPI_FHIR_URL', 'http://localhost:8080/fhir')
CONFIG_PATH = '/usr/local/tomcat/conf/application.yaml'
# Basic Swagger configuration
app.config['SWAGGER'] = {
'title': 'FHIRFLARE IG Toolkit API',
'uiversion': 3, # Use Swagger UI 3
'version': '1.0.0',
'description': 'API documentation for the FHIRFLARE IG Toolkit. This provides access to various FHIR IG management and validation functionalities.',
'termsOfService': 'https://example.com/terms', # Replace with your terms
'contact': {
'name': 'FHIRFLARE Support',
'url': 'https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/issues', # Replace with your support URL
'email': 'xsannz@gmail.com', # Replace with your support email
},
'license': {
'name': 'MIT License', # Or your project's license
'url': 'https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/blob/main/LICENSE.md', # Link to your license
},
'securityDefinitions': { # Defines how API key security is handled
'ApiKeyAuth': {
'type': 'apiKey',
'name': 'X-API-Key', # The header name for the API key
'in': 'header',
'description': 'API Key for accessing protected endpoints.'
}
},
# 'security': [{'ApiKeyAuth': []}], # Optional: Apply ApiKeyAuth globally to all Flasgger-documented API endpoints by default
# If you set this, individual public endpoints would need 'security': [] in their swag_from spec.
# It's often better to define security per-endpoint in @swag_from.
'specs_route': '/apidocs/' # URL for the Swagger UI. This makes url_for('flasgger.apidocs') work.
}
swagger = Swagger(app) # Initialize Flasgger with the app. This registers its routes.
# Register blueprints immediately after app setup
app.register_blueprint(services_bp, url_prefix='/api')
app.register_blueprint(package_bp)
logging.getLogger(__name__).info("Registered package_bp blueprint")
# Set max upload size (e.g., 12 MB, adjust as needed)
app.config['MAX_CONTENT_LENGTH'] = 6 * 1024 * 1024
@ -364,6 +400,25 @@ def index():
return render_template('index.html', site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now())
@app.route('/debug-routes')
@swag_from({
'tags': ['Debugging'],
'summary': 'List all application routes.',
'description': 'Provides a JSON list of all registered URL rules and their endpoints. Useful for development and debugging.',
'responses': {
'200': {
'description': 'A list of route strings.',
'schema': {
'type': 'array',
'items': {
'type': 'string',
'example': 'Endpoint: my_endpoint, Methods: GET,POST, URL: /my/url'
}
}
}
}
# No API key needed for this one, so you can add:
# 'security': []
})
def debug_routes():
"""
Debug endpoint to list all registered routes and their endpoints.
@ -375,6 +430,19 @@ def debug_routes():
@app.route('/api/config', methods=['GET'])
@csrf.exempt
@swag_from({
'tags': ['HAPI Configuration'],
'summary': 'Get HAPI FHIR server configuration.',
'description': 'Retrieves the current HAPI FHIR server configuration from the application.yaml file.',
'security': [{'ApiKeyAuth': []}], # Requires API Key
'responses': {
'200': {
'description': 'HAPI FHIR configuration.',
'schema': { 'type': 'object' } # You can be more specific if you know the YAML structure
},
'500': {'description': 'Error reading configuration file.'}
}
})
def get_config():
try:
with open(CONFIG_PATH, 'r') as file:
@ -386,6 +454,30 @@ def get_config():
@app.route('/api/config', methods=['POST'])
@csrf.exempt
@swag_from({
'tags': ['HAPI Configuration'],
'summary': 'Save HAPI FHIR server configuration.',
'description': 'Saves the provided HAPI FHIR server configuration to the application.yaml file.',
'security': [{'ApiKeyAuth': []}], # Requires API Key
'parameters': [
{
'name': 'config_payload', # Changed name to avoid conflict with function arg
'in': 'body',
'required': True,
'description': 'The HAPI FHIR configuration object.',
'schema': {
'type': 'object',
# Add example properties if you know them
'example': {'fhir_server': {'base_url': 'http://localhost:8080/fhir'}}
}
}
],
'responses': {
'200': {'description': 'Configuration saved successfully.'},
'400': {'description': 'Invalid request body.'},
'500': {'description': 'Error saving configuration file.'}
}
})
def save_config():
try:
config = request.get_json()
@ -399,6 +491,16 @@ def save_config():
@app.route('/api/restart-tomcat', methods=['POST'])
@csrf.exempt
@swag_from({
'tags': ['HAPI Configuration'],
'summary': 'Restart the Tomcat server.',
'description': 'Attempts to restart the Tomcat server using supervisorctl. Requires appropriate server permissions.',
'security': [{'ApiKeyAuth': []}], # Requires API Key
'responses': {
'200': {'description': 'Tomcat restart initiated successfully.'},
'500': {'description': 'Error restarting Tomcat (e.g., supervisorctl not found or command failed).'}
}
})
def restart_tomcat():
try:
result = subprocess.run(['supervisorctl', 'restart', 'tomcat'], capture_output=True, text=True)
@ -579,6 +681,16 @@ def perform_cache_refresh_and_log():
@app.route('/api/refresh-cache-task', methods=['POST'])
@csrf.exempt # Ensure CSRF is handled if needed, or keep exempt
@swag_from({
'tags': ['Package Management'],
'summary': 'Refresh FHIR package cache.',
'description': 'Triggers an asynchronous background task to clear and refresh the FHIR package cache from configured registries.',
'security': [{'ApiKeyAuth': []}], # Requires API Key
'responses': {
'202': {'description': 'Cache refresh process started in the background.'},
# Consider if other error codes are possible before task starts
}
})
def refresh_cache_task():
"""API endpoint to trigger the background cache refresh."""
# Note: Clearing queue here might interfere if multiple users click concurrently.
@ -598,6 +710,24 @@ def refresh_cache_task():
# Modify stream_import_logs - Simpler version: relies on thread putting [DONE]
@app.route('/stream-import-logs')
@swag_from({
'tags': ['Package Management'],
'summary': 'Stream package import logs.',
'description': 'Provides a Server-Sent Events (SSE) stream of logs generated during package import or cache refresh operations. The client should listen for "data:" events. The stream ends with "data: [DONE]".',
'produces': ['text/event-stream'],
# No API key usually for SSE streams if they are tied to an existing user session/action
# 'security': [],
'responses': {
'200': {
'description': 'An event stream of log messages.',
'schema': {
'type': 'string',
'format': 'text/event-stream',
'example': "data: INFO: Starting import...\ndata: INFO: Package downloaded.\ndata: [DONE]\n\n"
}
}
}
})
def stream_import_logs():
logger.debug("SSE connection established to stream-import-logs")
def generate():
@ -860,6 +990,23 @@ def view_ig(processed_ig_id):
config=current_app.config)
@app.route('/get-example')
@swag_from({
'tags': ['Package Management'],
'summary': 'Get a specific example resource from a package.',
'description': 'Retrieves the content of an example JSON file from a specified FHIR package and version.',
'parameters': [
{'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Name of the FHIR package.'},
{'name': 'version', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Version of the FHIR package.'},
{'name': 'filename', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Path to the example file within the package (e.g., "package/Patient-example.json").'},
{'name': 'include_narrative', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False, 'description': 'Whether to include the HTML narrative in the response.'}
],
'responses': {
'200': {'description': 'The example FHIR resource in JSON format.', 'schema': {'type': 'object'}},
'400': {'description': 'Missing required query parameters or invalid file path.'},
'404': {'description': 'Package or example file not found.'},
'500': {'description': 'Server error during file retrieval or processing.'}
}
})
def get_example():
package_name = request.args.get('package_name')
version = request.args.get('version')
@ -1010,6 +1157,38 @@ def generate_snapshot(structure_def, core_package_path, local_package_path):
return structure_def
@app.route('/get-structure')
@swag_from({
'tags': ['Package Management'],
'summary': 'Get a StructureDefinition from a package.',
'description': 'Retrieves a StructureDefinition, optionally generating or filtering for snapshot/differential views.',
'parameters': [
{'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True},
{'name': 'version', 'in': 'query', 'type': 'string', 'required': True},
{'name': 'resource_type', 'in': 'query', 'type': 'string', 'required': True, 'description': 'The resource type or profile ID.'},
{'name': 'view', 'in': 'query', 'type': 'string', 'required': False, 'default': 'snapshot', 'enum': ['snapshot', 'differential']},
{'name': 'include_narrative', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False},
{'name': 'raw', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False, 'description': 'If true, returns the raw SD JSON.'},
{'name': 'profile_url', 'in': 'query', 'type': 'string', 'required': False, 'description': 'Canonical URL of the profile to retrieve.'}
],
'responses': {
'200': {
'description': 'The StructureDefinition data.',
'schema': {
'type': 'object',
'properties': {
'elements': {'type': 'array', 'items': {'type': 'object'}},
'must_support_paths': {'type': 'array', 'items': {'type': 'string'}},
'search_parameters': {'type': 'array', 'items': {'type': 'object'}},
'fallback_used': {'type': 'boolean'},
'source_package': {'type': 'string'}
}
}
},
'400': {'description': 'Missing required parameters.'},
'404': {'description': 'StructureDefinition not found.'},
'500': {'description': 'Server error.'}
}
})
def get_structure():
package_name = request.args.get('package_name')
version = request.args.get('version')
@ -1180,6 +1359,33 @@ def get_structure():
@app.route('/get-package-metadata')
@swag_from({
'tags': ['Package Management'],
'summary': 'Get metadata for a downloaded package.',
'parameters': [
{'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True},
{'name': 'version', 'in': 'query', 'type': 'string', 'required': True}
],
'responses': {
'200': {
'description': 'Package metadata.',
'schema': {
'type': 'object',
'properties': {
'package_name': {'type': 'string'},
'version': {'type': 'string'},
'dependency_mode': {'type': 'string'},
'imported_dependencies': {'type': 'array', 'items': {'type': 'object'}},
'complies_with_profiles': {'type': 'array', 'items': {'type': 'string'}},
'imposed_profiles': {'type': 'array', 'items': {'type': 'string'}}
}
}
},
'400': {'description': 'Missing parameters.'},
'404': {'description': 'Metadata not found.'},
'500': {'description': 'Server error.'}
}
})
def get_package_metadata():
package_name = request.args.get('package_name')
version = request.args.get('version')
@ -1203,6 +1409,38 @@ def get_package_metadata():
return jsonify({'error': f'Error retrieving metadata: {str(e)}'}), 500
@app.route('/api/import-ig', methods=['POST'])
@swag_from({
'tags': ['Package Management'],
'summary': 'Import a FHIR Implementation Guide via API.',
'description': 'Downloads and processes a FHIR IG and its dependencies.',
'security': [{'ApiKeyAuth': []}],
'consumes': ['application/json'],
'parameters': [
{
'name': 'body',
'in': 'body',
'required': True,
'schema': {
'type': 'object',
'required': ['package_name', 'version'],
'properties': {
'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'},
'version': {'type': 'string', 'example': '6.1.0'},
'dependency_mode': {
'type': 'string', 'enum': ['recursive', 'patch-canonical', 'tree-shaking', 'direct'],
'default': 'recursive'
}
}
}
}
],
'responses': {
'200': {'description': 'Package imported successfully or with warnings.'},
'400': {'description': 'Invalid request (e.g., missing fields, invalid mode).'},
'404': {'description': 'Package not found on registry.'},
'500': {'description': 'Server error during import.'}
}
})
def api_import_ig():
auth_error = check_api_key()
if auth_error:
@ -1275,6 +1513,48 @@ def api_import_ig():
return jsonify({"status": "error", "message": f"Unexpected server error during import: {str(e)}"}), 500
@app.route('/api/push-ig', methods=['POST'])
@csrf.exempt # Retain CSRF exemption as specified
@swag_from({
'tags': ['Package Management'],
'summary': 'Push a FHIR Implementation Guide to a server via API.',
'description': 'Uploads resources from a specified FHIR IG (and optionally its dependencies) to a target FHIR server. Returns an NDJSON stream of progress.',
'security': [{'ApiKeyAuth': []}],
'consumes': ['application/json'],
'produces': ['application/x-ndjson'],
'parameters': [
{
'name': 'body',
'in': 'body',
'required': True,
'schema': {
'type': 'object',
'required': ['package_name', 'version', 'fhir_server_url'],
'properties': {
'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'},
'version': {'type': 'string', 'example': '6.1.0'},
'fhir_server_url': {'type': 'string', 'format': 'url', 'example': 'http://localhost:8080/fhir'},
'include_dependencies': {'type': 'boolean', 'default': True},
'auth_type': {'type': 'string', 'enum': ['apiKey', 'bearerToken', 'basic', 'none'], 'default': 'none'},
'auth_token': {'type': 'string', 'description': 'Required if auth_type is bearerToken or basic (for basic, use "Basic <base64_encoded_user:pass>")'},
'username': {'type': 'string', 'description': 'Required if auth_type is basic'},
'password': {'type': 'string', 'format': 'password', 'description': 'Required if auth_type is basic'},
'resource_types_filter': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of resource types to include.'},
'skip_files': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of specific file paths within packages to skip.'},
'dry_run': {'type': 'boolean', 'default': False},
'verbose': {'type': 'boolean', 'default': False},
'force_upload': {'type': 'boolean', 'default': False, 'description': 'If true, uploads resources even if they appear identical to server versions.'}
}
}
}
],
'responses': {
'200': {'description': 'NDJSON stream of push progress and results.'},
'400': {'description': 'Invalid request parameters.'},
'401': {'description': 'Authentication error.'},
'404': {'description': 'Package not found locally.'},
'500': {'description': 'Server error during push operation setup.'}
}
})
def api_push_ig():
auth_error = check_api_key()
if auth_error: return auth_error
@ -1287,20 +1567,23 @@ def api_push_ig():
include_dependencies = data.get('include_dependencies', True)
auth_type = data.get('auth_type', 'none')
auth_token = data.get('auth_token')
username = data.get('username') # ADD: Extract username
password = data.get('password') # ADD: Extract password
resource_types_filter_raw = data.get('resource_types_filter')
skip_files_raw = data.get('skip_files')
dry_run = data.get('dry_run', False)
verbose = data.get('verbose', False)
force_upload = data.get('force_upload', False) # <<< ADD: Extract force_upload
force_upload = data.get('force_upload', False)
# --- Input Validation (Assume previous validation is sufficient) ---
# --- Input Validation ---
if not all([package_name, version, fhir_server_url]): return jsonify({"status": "error", "message": "Missing required fields"}), 400
# ... (Keep other specific validations as needed) ...
valid_auth_types = ['apiKey', 'bearerToken', 'none'];
valid_auth_types = ['apiKey', 'bearerToken', 'basic', 'none'] # ADD: 'basic' to valid auth types
if auth_type not in valid_auth_types: return jsonify({"status": "error", "message": f"Invalid auth_type."}), 400
if auth_type == 'bearerToken' and not auth_token: return jsonify({"status": "error", "message": "auth_token required for bearerToken."}), 400
if auth_type == 'basic' and (not username or not password): # ADD: Validate Basic Auth inputs
return jsonify({"status": "error", "message": "Username and password required for Basic Authentication."}), 400
# Parse filters (same as before)
# Parse filters (unchanged)
resource_types_filter = None
if resource_types_filter_raw:
if isinstance(resource_types_filter_raw, list): resource_types_filter = [s for s in resource_types_filter_raw if isinstance(s, str)]
@ -1312,14 +1595,17 @@ def api_push_ig():
elif isinstance(skip_files_raw, str): skip_files = [s.strip().replace('\\', '/') for s in re.split(r'[,\n]', skip_files_raw) if s.strip()]
else: return jsonify({"status": "error", "message": "Invalid skip_files format."}), 400
# --- File Path Setup (Same as before) ---
# --- File Path Setup (unchanged) ---
packages_dir = current_app.config.get('FHIR_PACKAGES_DIR')
if not packages_dir: return jsonify({"status": "error", "message": "Server config error: Package dir missing."}), 500
# ... (check if package tgz exists - same as before) ...
tgz_filename = services.construct_tgz_filename(package_name, version)
tgz_path = os.path.join(packages_dir, tgz_filename)
if not os.path.exists(tgz_path): return jsonify({"status": "error", "message": f"Package not found locally: {package_name}#{version}"}), 404
# ADD: Handle Basic Authentication
if auth_type == 'basic':
credentials = f"{username}:{password}"
auth_token = f"Basic {base64.b64encode(credentials.encode('utf-8')).decode('utf-8')}"
# --- Streaming Response ---
def generate_stream_wrapper():
@ -1328,8 +1614,7 @@ def api_push_ig():
include_dependencies=include_dependencies, auth_type=auth_type,
auth_token=auth_token, resource_types_filter=resource_types_filter,
skip_files=skip_files, dry_run=dry_run, verbose=verbose,
force_upload=force_upload, # <<< ADD: Pass force_upload
packages_dir=packages_dir
force_upload=force_upload, packages_dir=packages_dir
)
return Response(generate_stream_wrapper(), mimetype='application/x-ndjson')
@ -1529,6 +1814,34 @@ def proxy_hapi(subpath):
@app.route('/api/load-ig-to-hapi', methods=['POST'])
@swag_from({
'tags': ['HAPI Integration'],
'summary': 'Load an IG into the local HAPI FHIR server.',
'description': 'Extracts all resources from a specified IG package and PUTs them to the configured HAPI FHIR server.',
'security': [{'ApiKeyAuth': []}],
'consumes': ['application/json'],
'parameters': [
{
'name': 'body',
'in': 'body',
'required': True,
'schema': {
'type': 'object',
'required': ['package_name', 'version'],
'properties': {
'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'},
'version': {'type': 'string', 'example': '6.1.0'}
}
}
}
],
'responses': {
'200': {'description': 'Package loaded to HAPI successfully.'},
'400': {'description': 'Invalid request (e.g., missing package_name/version).'},
'404': {'description': 'Package not found locally.'},
'500': {'description': 'Error loading IG to HAPI (e.g., HAPI server connection issue, resource upload failure).'}
}
})
def load_ig_to_hapi():
data = request.get_json()
package_name = data.get('package_name')
@ -1744,41 +2057,58 @@ def upload_test_data():
return render_template('upload_test_data.html', title="Upload Test Data", form=form, api_key=api_key)
# --- Updated /api/upload-test-data Endpoint ---
@app.route('/api/upload-test-data', methods=['POST'])
@csrf.exempt
@swag_from({
'tags': ['Test Data Management'],
'summary': 'Upload and process FHIR test data.',
'description': 'Handles multipart/form-data uploads of FHIR resources (JSON, XML, or ZIP containing these) for processing and uploading to a target FHIR server. Returns an NDJSON stream of progress.',
'security': [{'ApiKeyAuth': []}],
'consumes': ['multipart/form-data'],
'produces': ['application/x-ndjson'],
'parameters': [
{'name': 'fhir_server_url', 'in': 'formData', 'type': 'string', 'required': True, 'format': 'url', 'description': 'Target FHIR server URL.'},
{'name': 'auth_type', 'in': 'formData', 'type': 'string', 'enum': ['none', 'bearerToken', 'basic'], 'default': 'none'},
{'name': 'auth_token', 'in': 'formData', 'type': 'string', 'description': 'Bearer token if auth_type is bearerToken.'},
{'name': 'username', 'in': 'formData', 'type': 'string', 'description': 'Username if auth_type is basic.'},
{'name': 'password', 'in': 'formData', 'type': 'string', 'format': 'password', 'description': 'Password if auth_type is basic.'},
{'name': 'test_data_files', 'in': 'formData', 'type': 'file', 'required': True, 'description': 'One or more FHIR resource files (JSON, XML) or ZIP archives containing them.'},
{'name': 'validate_before_upload', 'in': 'formData', 'type': 'boolean', 'default': False},
{'name': 'validation_package_id', 'in': 'formData', 'type': 'string', 'description': 'Package ID (name#version) for validation, if validate_before_upload is true.'},
{'name': 'upload_mode', 'in': 'formData', 'type': 'string', 'enum': ['individual', 'transaction'], 'default': 'individual'},
{'name': 'use_conditional_uploads', 'in': 'formData', 'type': 'boolean', 'default': True, 'description': 'For individual mode, use conditional logic (GET then PUT/POST).'},
{'name': 'error_handling', 'in': 'formData', 'type': 'string', 'enum': ['stop', 'continue'], 'default': 'stop'}
],
'responses': {
'200': {'description': 'NDJSON stream of upload progress and results.'},
'400': {'description': 'Invalid request parameters or file types.'},
'401': {'description': 'Authentication error.'},
'413': {'description': 'Request entity too large.'},
'500': {'description': 'Server error during upload processing.'}
}
})
def api_upload_test_data():
"""API endpoint to handle test data upload and processing, using custom parser."""
auth_error = check_api_key();
auth_error = check_api_key()
if auth_error: return auth_error
temp_dir = None # Initialize temp_dir to ensure cleanup happens
temp_dir = None
try:
# --- Use Custom Form Parser ---
# Instantiate the custom parser with the desired limit
parser = CustomFormDataParser()
#parser = CustomFormDataParser(max_form_parts=2000) # Match the class definition or set higher if needed
# Parse the request using the custom parser
# We need the stream, mimetype, content_length, and options from the request
# Note: Accessing request.stream consumes it, do this first.
stream = request.stream
mimetype = request.mimetype
content_length = request.content_length
options = request.mimetype_params
# The parse method returns (stream, form_dict, files_dict)
# stream: A wrapper around the original stream
# form_dict: A MultiDict containing non-file form fields
# files_dict: A MultiDict containing FileStorage objects for uploaded files
_, form_data, files_data = parser.parse(stream, mimetype, content_length, options)
logger.debug(f"Form parsed using CustomFormDataParser. Form fields: {len(form_data)}, Files: {len(files_data)}")
# --- END Custom Form Parser Usage ---
# --- Extract Form Data (using parsed data) ---
# --- Extract Form Data ---
fhir_server_url = form_data.get('fhir_server_url')
auth_type = form_data.get('auth_type', 'none')
auth_token = form_data.get('auth_token')
username = form_data.get('username')
password = form_data.get('password')
upload_mode = form_data.get('upload_mode', 'individual')
error_handling = form_data.get('error_handling', 'stop')
validate_before_upload_str = form_data.get('validate_before_upload', 'false')
@ -1789,42 +2119,62 @@ def api_upload_test_data():
logger.debug(f"API Upload Request Params: validate={validate_before_upload}, pkg_id={validation_package_id}, conditional={use_conditional_uploads}")
# --- Basic Validation (using parsed data) ---
if not fhir_server_url or not fhir_server_url.startswith(('http://', 'https://')): return jsonify({"status": "error", "message": "Invalid Target FHIR Server URL."}), 400
if auth_type not in ['none', 'bearerToken']: return jsonify({"status": "error", "message": "Invalid Authentication Type."}), 400
if auth_type == 'bearerToken' and not auth_token: return jsonify({"status": "error", "message": "Bearer Token required."}), 400
if upload_mode not in ['individual', 'transaction']: return jsonify({"status": "error", "message": "Invalid Upload Mode."}), 400
if error_handling not in ['stop', 'continue']: return jsonify({"status": "error", "message": "Invalid Error Handling mode."}), 400
if validate_before_upload and not validation_package_id: return jsonify({"status": "error", "message": "Validation Package ID required."}), 400
# --- Basic Validation ---
if not fhir_server_url or not fhir_server_url.startswith(('http://', 'https://')):
return jsonify({"status": "error", "message": "Invalid Target FHIR Server URL."}), 400
if auth_type not in ['none', 'bearerToken', 'basic']:
return jsonify({"status": "error", "message": "Invalid Authentication Type."}), 400
if auth_type == 'bearerToken' and not auth_token:
return jsonify({"status": "error", "message": "Bearer Token required."}), 400
if auth_type == 'basic' and (not username or not password):
return jsonify({"status": "error", "message": "Username and Password required for Basic Authentication."}), 400
if upload_mode not in ['individual', 'transaction']:
return jsonify({"status": "error", "message": "Invalid Upload Mode."}), 400
if error_handling not in ['stop', 'continue']:
return jsonify({"status": "error", "message": "Invalid Error Handling mode."}), 400
if validate_before_upload and not validation_package_id:
return jsonify({"status": "error", "message": "Validation Package ID required."}), 400
# --- Handle File Uploads (using parsed data) ---
# Use files_data obtained from the custom parser
# --- Handle File Uploads ---
uploaded_files = files_data.getlist('test_data_files')
if not uploaded_files or all(f.filename == '' for f in uploaded_files): return jsonify({"status": "error", "message": "No files selected."}), 400
if not uploaded_files or all(f.filename == '' for f in uploaded_files):
return jsonify({"status": "error", "message": "No files selected."}), 400
temp_dir = tempfile.mkdtemp(prefix='fhirflare_upload_')
saved_file_paths = []
allowed_extensions = {'.json', '.xml', '.zip'}
try:
for file_storage in uploaded_files: # Iterate through FileStorage objects
for file_storage in uploaded_files:
if file_storage and file_storage.filename:
filename = secure_filename(file_storage.filename)
file_ext = os.path.splitext(filename)[1].lower()
if file_ext not in allowed_extensions: raise ValueError(f"Invalid file type: '{filename}'. Only JSON, XML, ZIP allowed.")
if file_ext not in allowed_extensions:
raise ValueError(f"Invalid file type: '{filename}'. Only JSON, XML, ZIP allowed.")
save_path = os.path.join(temp_dir, filename)
file_storage.save(save_path) # Use the save method of FileStorage
file_storage.save(save_path)
saved_file_paths.append(save_path)
if not saved_file_paths: raise ValueError("No valid files saved.")
if not saved_file_paths:
raise ValueError("No valid files saved.")
logger.debug(f"Saved {len(saved_file_paths)} files to {temp_dir}")
except ValueError as ve:
if temp_dir and os.path.exists(temp_dir): shutil.rmtree(temp_dir)
logger.warning(f"Upload rejected: {ve}"); return jsonify({"status": "error", "message": str(ve)}), 400
if temp_dir and os.path.exists(temp_dir):
shutil.rmtree(temp_dir)
logger.warning(f"Upload rejected: {ve}")
return jsonify({"status": "error", "message": str(ve)}), 400
except Exception as file_err:
if temp_dir and os.path.exists(temp_dir): shutil.rmtree(temp_dir)
logger.error(f"Error saving uploaded files: {file_err}", exc_info=True); return jsonify({"status": "error", "message": "Error saving uploaded files."}), 500
if temp_dir and os.path.exists(temp_dir):
shutil.rmtree(temp_dir)
logger.error(f"Error saving uploaded files: {file_err}", exc_info=True)
return jsonify({"status": "error", "message": "Error saving uploaded files."}), 500
# --- Prepare Server Info and Options ---
server_info = {'url': fhir_server_url, 'auth_type': auth_type, 'auth_token': auth_token}
server_info = {'url': fhir_server_url, 'auth_type': auth_type}
if auth_type == 'bearerToken':
server_info['auth_token'] = auth_token
elif auth_type == 'basic':
credentials = f"{username}:{password}"
encoded_credentials = base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
server_info['auth_token'] = f"Basic {encoded_credentials}"
options = {
'upload_mode': upload_mode,
'error_handling': error_handling,
@ -1839,25 +2189,30 @@ def api_upload_test_data():
with app.app_context():
yield from services.process_and_upload_test_data(server_info, options, temp_dir)
finally:
try: logger.debug(f"Cleaning up temp dir: {temp_dir}"); shutil.rmtree(temp_dir)
except Exception as cleanup_e: logger.error(f"Error cleaning up temp dir {temp_dir}: {cleanup_e}")
try:
logger.debug(f"Cleaning up temp dir: {temp_dir}")
shutil.rmtree(temp_dir)
except Exception as cleanup_e:
logger.error(f"Error cleaning up temp dir {temp_dir}: {cleanup_e}")
return Response(generate_stream_wrapper(), mimetype='application/x-ndjson')
except RequestEntityTooLarge as e:
# Catch the specific exception if the custom parser still fails (e.g., limit too low)
logger.error(f"RequestEntityTooLarge error in /api/upload-test-data despite custom parser: {e}", exc_info=True)
if temp_dir and os.path.exists(temp_dir):
try: shutil.rmtree(temp_dir)
except Exception as cleanup_e: logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}")
try:
shutil.rmtree(temp_dir)
except Exception as cleanup_e:
logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}")
return jsonify({"status": "error", "message": f"Upload failed: Request entity too large. Try increasing parser limit or reducing files/size. ({str(e)})"}), 413
except Exception as e:
# Catch other potential errors during parsing or setup
logger.error(f"Error in /api/upload-test-data: {e}", exc_info=True)
if temp_dir and os.path.exists(temp_dir):
try: shutil.rmtree(temp_dir)
except Exception as cleanup_e: logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}")
try:
shutil.rmtree(temp_dir)
except Exception as cleanup_e:
logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}")
return jsonify({"status": "error", "message": f"Unexpected server error: {str(e)}"}), 500
@app.route('/retrieve-split-data', methods=['GET', 'POST'])
@ -1889,6 +2244,36 @@ def retrieve_split_data():
api_key=app.config['API_KEY'])
@app.route('/api/retrieve-bundles', methods=['POST'])
@csrf.exempt
@swag_from({
'tags': ['Test Data Management'],
'summary': 'Retrieve FHIR resource bundles from a server.',
'description': 'Fetches bundles for specified resource types from a FHIR server. Optionally fetches referenced resources. Returns an NDJSON stream and prepares a ZIP file for download.',
'security': [{'ApiKeyAuth': []}],
'consumes': ['application/x-www-form-urlencoded'], # Or multipart/form-data if files are involved
'produces': ['application/x-ndjson'],
'parameters': [
{'name': 'fhir_server_url', 'in': 'formData', 'type': 'string', 'required': False, 'format': 'url', 'description': 'Target FHIR server URL. Defaults to local proxy (/fhir).'},
{'name': 'resources', 'in': 'formData', 'type': 'array', 'items': {'type': 'string'}, 'collectionFormat': 'multi', 'required': True, 'description': 'List of resource types to retrieve (e.g., Patient, Observation).'},
{'name': 'validate_references', 'in': 'formData', 'type': 'boolean', 'default': False, 'description': 'Fetch resources referenced by the initial bundles.'},
{'name': 'fetch_reference_bundles', 'in': 'formData', 'type': 'boolean', 'default': False, 'description': 'If fetching references, get full bundles for referenced types instead of individual resources.'},
{'name': 'auth_type', 'in': 'formData', 'type': 'string', 'enum': ['none', 'bearer', 'basic'], 'default': 'none'},
{'name': 'bearer_token', 'in': 'formData', 'type': 'string', 'description': 'Bearer token if auth_type is bearer.'},
{'name': 'username', 'in': 'formData', 'type': 'string', 'description': 'Username if auth_type is basic.'},
{'name': 'password', 'in': 'formData', 'type': 'string', 'format': 'password', 'description': 'Password if auth_type is basic.'}
],
'responses': {
'200': {
'description': 'NDJSON stream of retrieval progress. X-Zip-Path header indicates path to the created ZIP file.',
'headers': {
'X-Zip-Path': {'type': 'string', 'description': 'Server path to the generated ZIP file.'}
}
},
'400': {'description': 'Invalid request parameters.'},
'401': {'description': 'Authentication error.'},
'500': {'description': 'Server error during retrieval.'}
}
})
def api_retrieve_bundles():
auth_error = check_api_key()
if auth_error:
@ -1896,50 +2281,90 @@ def api_retrieve_bundles():
# Use request.form for standard form data
params = request.form.to_dict()
resources = request.form.getlist('resources') # Get list of selected resources
# Get boolean flags, converting string 'true' to boolean True
resources = request.form.getlist('resources')
validate_references = params.get('validate_references', 'false').lower() == 'true'
# --- Get NEW flag ---
fetch_reference_bundles = params.get('fetch_reference_bundles', 'false').lower() == 'true'
# --- End NEW flag ---
auth_type = params.get('auth_type', 'none')
bearer_token = params.get('bearer_token')
username = params.get('username')
password = params.get('password')
# Basic validation
if not resources:
return jsonify({"status": "error", "message": "No resources selected."}), 400
# Get FHIR server URL, default to '/fhir' (which targets local proxy)
# Get FHIR server URL, default to '/fhir' (local proxy)
fhir_server_url = params.get('fhir_server_url', '/fhir').strip()
if not fhir_server_url: # Handle empty string case
if not fhir_server_url:
fhir_server_url = '/fhir'
logger.info(f"Retrieve API: Server='{fhir_server_url}', Resources={resources}, ValidateRefs={validate_references}, FetchRefBundles={fetch_reference_bundles}")
# Validation
if not resources:
return jsonify({"status": "error", "message": "No resources selected."}), 400
valid_auth_types = ['none', 'bearer', 'basic']
if auth_type not in valid_auth_types:
return jsonify({"status": "error", "message": f"Invalid auth_type. Must be one of {valid_auth_types}."}), 400
if auth_type == 'bearer' and not bearer_token:
return jsonify({"status": "error", "message": "Bearer token required for bearer authentication."}), 400
if auth_type == 'basic' and (not username or not password):
return jsonify({"status": "error", "message": "Username and password required for basic authentication."}), 400
# Ensure the temp directory exists (use Flask's configured upload folder or system temp)
# Using system temp is generally safer for transient data
# Handle authentication
auth_token = None
if auth_type == 'bearer':
auth_token = f"Bearer {bearer_token}"
elif auth_type == 'basic':
credentials = f"{username}:{password}"
auth_token = f"Basic {base64.b64encode(credentials.encode('utf-8')).decode('utf-8')}"
logger.info(f"Retrieve API: Server='{fhir_server_url}', Resources={resources}, ValidateRefs={validate_references}, FetchRefBundles={fetch_reference_bundles}, AuthType={auth_type}")
# Ensure the temp directory exists
temp_dir = tempfile.gettempdir()
# Generate a unique filename for the zip in the temp dir
zip_filename = f"retrieved_bundles_{datetime.datetime.now().strftime('%Y%m%d%H%M%S')}.zip"
output_zip = os.path.join(temp_dir, zip_filename)
def generate():
# Pass the NEW flag to the service function
yield from retrieve_bundles(
try:
yield from services.retrieve_bundles(
fhir_server_url=fhir_server_url,
resources=resources,
output_zip=output_zip,
validate_references=validate_references,
fetch_reference_bundles=fetch_reference_bundles # Pass new flag
fetch_reference_bundles=fetch_reference_bundles,
auth_type=auth_type,
auth_token=auth_token
)
except Exception as e:
logger.error(f"Error in retrieve_bundles: {e}", exc_info=True)
yield json.dumps({"type": "error", "message": f"Unexpected error: {str(e)}"}) + "\n"
# Create the response *before* starting the generator
response = Response(generate(), mimetype='application/x-ndjson')
# Send back the *relative* path within the temp dir for download
response.headers['X-Zip-Path'] = os.path.join('/tmp', zip_filename) # Path for the /tmp/<filename> route
response.headers['X-Zip-Path'] = os.path.join('/tmp', zip_filename)
return response
@app.route('/api/split-bundles', methods=['POST'])
@swag_from({
'tags': ['Test Data Management'],
'summary': 'Split FHIR bundles from a ZIP into individual resources.',
'description': 'Takes a ZIP file containing FHIR bundles, extracts individual resources, and creates a new ZIP file with these resources. Returns an NDJSON stream of progress.',
'security': [{'ApiKeyAuth': []}],
'consumes': ['multipart/form-data'], # Assuming split_bundle_zip_path comes from a form that might include a file upload in other contexts, or it's a path string. If it's always a path string from a JSON body, change consumes.
'produces': ['application/x-ndjson'],
'parameters': [
# If split_bundle_zip_path is a path sent in form data:
{'name': 'split_bundle_zip_path', 'in': 'formData', 'type': 'string', 'required': True, 'description': 'Path to the input ZIP file containing bundles (server-side path).'},
# If it's an uploaded file:
# {'name': 'split_bundle_zip_file', 'in': 'formData', 'type': 'file', 'required': True, 'description': 'ZIP file containing bundles to split.'}
],
'responses': {
'200': {
'description': 'NDJSON stream of splitting progress. X-Zip-Path header indicates path to the output ZIP file.',
'headers': {
'X-Zip-Path': {'type': 'string', 'description': 'Server path to the generated ZIP file with split resources.'}
}
},
'400': {'description': 'Invalid request (e.g., missing input ZIP path/file).'},
'401': {'description': 'Authentication error.'},
'500': {'description': 'Server error during splitting.'}
}
})
def api_split_bundles():
auth_error = check_api_key()
if auth_error:
@ -1977,6 +2402,31 @@ def clear_session():
@app.route('/api/package/<name>', methods=['GET'])
@swag_from({
'tags': ['Package Management'],
'summary': 'Get details for a specific FHIR package.',
'description': 'Retrieves details for a FHIR IG package by its name. Data is sourced from ProcessedIg, CachedPackage, or fetched live from registries.',
'parameters': [
{'name': 'name', 'in': 'path', 'type': 'string', 'required': True, 'description': 'The canonical name of the package (e.g., hl7.fhir.us.core).'}
],
'responses': {
'200': {
'description': 'Package details.',
'schema': {
'type': 'object',
'properties': {
'name': {'type': 'string'},
'latest': {'type': 'string', 'description': 'Latest known version.'},
'author': {'type': 'string'},
'fhir_version': {'type': 'string'},
'version_count': {'type': 'integer'},
'url': {'type': 'string', 'format': 'url'}
}
}
},
'404': {'description': 'Package not found.'}
}
})
def package_details(name):
"""
Retrieve details for a specific FHIR Implementation Guide package by name.
@ -2252,6 +2702,19 @@ def search_and_import():
is_fetching=is_fetching)
@app.route('/api/search-packages', methods=['GET'], endpoint='api_search_packages')
@swag_from({
'tags': ['Package Management'],
'summary': 'Search FHIR packages (HTMX).',
'description': 'Searches the in-memory package cache. Returns an HTML fragment for HTMX to display matching packages. Primarily for UI interaction.',
'parameters': [
{'name': 'search', 'in': 'query', 'type': 'string', 'required': False, 'description': 'Search term for package name or author.'},
{'name': 'page', 'in': 'query', 'type': 'integer', 'required': False, 'default': 1}
],
'produces': ['text/html'],
'responses': {
'200': {'description': 'HTML fragment containing the search results table.'}
}
})
def api_search_packages():
"""
Handles HTMX search requests. Filters packages from the in-memory cache.

166
forms.py
View File

@ -1,69 +1,63 @@
# forms.py
from flask_wtf import FlaskForm
from wtforms import StringField, SelectField, TextAreaField, BooleanField, SubmitField, FileField
from wtforms import StringField, SelectField, TextAreaField, BooleanField, SubmitField, FileField, PasswordField
from wtforms.validators import DataRequired, Regexp, ValidationError, URL, Optional, InputRequired
from flask import request # Import request for file validation in FSHConverterForm
from flask import request
import json
import xml.etree.ElementTree as ET
import re
import logging # Import logging
import logging
import os
logger = logging.getLogger(__name__) # Setup logger if needed elsewhere
logger = logging.getLogger(__name__)
# Existing form classes (IgImportForm, ValidationForm, FSHConverterForm, TestDataUploadForm) remain unchanged
# Only providing RetrieveSplitDataForm
class RetrieveSplitDataForm(FlaskForm):
"""Form for retrieving FHIR bundles and splitting them into individual resources."""
fhir_server_url = StringField('FHIR Server URL', validators=[URL(), Optional()],
render_kw={'placeholder': 'e.g., https://hapi.fhir.org/baseR4'})
validate_references = BooleanField('Fetch Referenced Resources', default=False, # Changed label slightly
auth_type = SelectField('Authentication Type (for Custom URL)', choices=[
('none', 'None'),
('bearerToken', 'Bearer Token'),
('basicAuth', 'Basic Authentication')
], default='none', validators=[Optional()])
auth_token = StringField('Bearer Token', validators=[Optional()],
render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'})
basic_auth_username = StringField('Username', validators=[Optional()],
render_kw={'placeholder': 'Enter Basic Auth Username'})
basic_auth_password = PasswordField('Password', validators=[Optional()],
render_kw={'placeholder': 'Enter Basic Auth Password'})
validate_references = BooleanField('Fetch Referenced Resources', default=False,
description="If checked, fetches resources referenced by the initial bundles.")
# --- NEW FIELD ---
fetch_reference_bundles = BooleanField('Fetch Full Reference Bundles (instead of individual resources)', default=False,
description="Requires 'Fetch Referenced Resources'. Fetches e.g. /Patient instead of Patient/id for each reference.",
render_kw={'data-dependency': 'validate_references'}) # Add data attribute for JS
# --- END NEW FIELD ---
render_kw={'data-dependency': 'validate_references'})
split_bundle_zip = FileField('Upload Bundles to Split (ZIP)', validators=[Optional()],
render_kw={'accept': '.zip'})
submit_retrieve = SubmitField('Retrieve Bundles')
submit_split = SubmitField('Split Bundles')
def validate(self, extra_validators=None):
"""Custom validation for RetrieveSplitDataForm."""
if not super().validate(extra_validators):
return False
# --- NEW VALIDATION LOGIC ---
# Ensure fetch_reference_bundles is only checked if validate_references is also checked
if self.fetch_reference_bundles.data and not self.validate_references.data:
self.fetch_reference_bundles.errors.append('Cannot fetch full reference bundles unless "Fetch Referenced Resources" is also checked.')
return False
# --- END NEW VALIDATION LOGIC ---
# Validate based on which submit button was pressed
if self.submit_retrieve.data:
# No specific validation needed here now, handled by URL validator and JS
pass
elif self.submit_split.data:
# Need to check bundle source radio button selection in backend/JS,
# but validate file if 'upload' is selected.
# This validation might need refinement based on how source is handled.
# Assuming 'split_bundle_zip' is only required if 'upload' source is chosen.
pass # Basic validation done by Optional() and file type checks below
# Validate file uploads (keep existing)
if self.auth_type.data == 'bearerToken' and self.submit_retrieve.data and not self.auth_token.data:
self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected.')
return False
if self.auth_type.data == 'basicAuth' and self.submit_retrieve.data:
if not self.basic_auth_username.data:
self.basic_auth_username.errors.append('Username is required for Basic Authentication.')
return False
if not self.basic_auth_password.data:
self.basic_auth_password.errors.append('Password is required for Basic Authentication.')
return False
if self.split_bundle_zip.data:
if not self.split_bundle_zip.data.filename.lower().endswith('.zip'):
self.split_bundle_zip.errors.append('File must be a ZIP file.')
return False
return True
# Existing forms (IgImportForm, ValidationForm) remain unchanged
class IgImportForm(FlaskForm):
"""Form for importing Implementation Guides."""
package_name = StringField('Package Name', validators=[
@ -92,7 +86,6 @@ class ValidationForm(FlaskForm):
], default='single')
sample_input = TextAreaField('Sample Input', validators=[
DataRequired(),
# Removed lambda validator for simplicity, can be added back if needed
])
submit = SubmitField('Validate')
@ -117,7 +110,7 @@ class FSHConverterForm(FlaskForm):
('info', 'Info'),
('debug', 'Debug')
], validators=[DataRequired()])
fhir_version = SelectField('FHIR Version', choices=[ # Corrected label
fhir_version = SelectField('FHIR Version', choices=[
('', 'Auto-detect'),
('4.0.1', 'R4'),
('4.3.0', 'R4B'),
@ -136,116 +129,125 @@ class FSHConverterForm(FlaskForm):
submit = SubmitField('Convert to FSH')
def validate(self, extra_validators=None):
"""Custom validation for FSH Converter Form."""
# Run default validators first
if not super().validate(extra_validators):
return False
# Check file/text input based on mode
# Need to check request.files for file uploads as self.fhir_file.data might be None during initial POST validation
has_file_in_request = request and request.files and self.fhir_file.name in request.files and request.files[self.fhir_file.name].filename != ''
if self.input_mode.data == 'file' and not has_file_in_request:
# If it's not in request.files, check if data is already populated (e.g., on re-render after error)
if not self.fhir_file.data:
self.fhir_file.errors.append('File is required when input mode is Upload File.')
return False
if self.input_mode.data == 'text' and not self.fhir_text.data:
self.fhir_text.errors.append('Text input is required when input mode is Paste Text.')
return False
# Validate text input format
if self.input_mode.data == 'text' and self.fhir_text.data:
try:
content = self.fhir_text.data.strip()
if not content: # Empty text is technically valid but maybe not useful
pass # Allow empty text for now
elif content.startswith('{'):
json.loads(content)
elif content.startswith('<'):
ET.fromstring(content) # Basic XML check
if not content: pass
elif content.startswith('{'): json.loads(content)
elif content.startswith('<'): ET.fromstring(content)
else:
# If content exists but isn't JSON or XML, it's an error
self.fhir_text.errors.append('Text input must be valid JSON or XML.')
return False
except (json.JSONDecodeError, ET.ParseError):
self.fhir_text.errors.append('Invalid JSON or XML format.')
return False
# Validate dependency format
if self.dependencies.data:
for dep in self.dependencies.data.splitlines():
dep = dep.strip()
# Allow versions like 'current', 'dev', etc. but require package@version format
if dep and not re.match(r'^[a-zA-Z0-9\-\.]+@[a-zA-Z0-9\.\-]+$', dep):
self.dependencies.errors.append(f'Invalid dependency format: "{dep}". Use package@version (e.g., hl7.fhir.us.core@6.1.0).')
return False
# Validate alias file extension (optional, basic check)
# Check request.files for alias file as well
has_alias_file_in_request = request and request.files and self.alias_file.name in request.files and request.files[self.alias_file.name].filename != ''
alias_file_data = self.alias_file.data or (request.files.get(self.alias_file.name) if request else None)
if alias_file_data and alias_file_data.filename:
if not alias_file_data.filename.lower().endswith('.fsh'):
self.alias_file.errors.append('Alias file should have a .fsh extension.')
# return False # Might be too strict, maybe just warn?
return True
class TestDataUploadForm(FlaskForm):
"""Form for uploading FHIR test data."""
fhir_server_url = StringField('Target FHIR Server URL', validators=[DataRequired(), URL()],
render_kw={'placeholder': 'e.g., http://localhost:8080/fhir'})
auth_type = SelectField('Authentication Type', choices=[
('none', 'None'),
('bearerToken', 'Bearer Token')
('bearerToken', 'Bearer Token'),
('basic', 'Basic Authentication')
], default='none')
auth_token = StringField('Bearer Token', validators=[Optional()],
render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'})
username = StringField('Username', validators=[Optional()],
render_kw={'placeholder': 'Enter Basic Auth Username'})
password = PasswordField('Password', validators=[Optional()],
render_kw={'placeholder': 'Enter Basic Auth Password'})
test_data_file = FileField('Select Test Data File(s)', validators=[InputRequired("Please select at least one file.")],
render_kw={'multiple': True, 'accept': '.json,.xml,.zip'})
validate_before_upload = BooleanField('Validate Resources Before Upload?', default=False,
description="Validate resources against selected package profile before uploading.")
validation_package_id = SelectField('Validation Profile Package (Optional)',
choices=[('', '-- Select Package for Validation --')],
validators=[Optional()],
description="Select the processed IG package to use for validation.")
upload_mode = SelectField('Upload Mode', choices=[
('individual', 'Individual Resources'), # Simplified label
('transaction', 'Transaction Bundle') # Simplified label
('individual', 'Individual Resources'),
('transaction', 'Transaction Bundle')
], default='individual')
# --- NEW FIELD for Conditional Upload ---
use_conditional_uploads = BooleanField('Use Conditional Upload (Individual Mode Only)?', default=True,
description="If checked, checks resource existence (GET) and uses If-Match (PUT) or creates (PUT). If unchecked, uses simple PUT for all.")
# --- END NEW FIELD ---
error_handling = SelectField('Error Handling', choices=[
('stop', 'Stop on First Error'),
('continue', 'Continue on Error')
], default='stop')
submit = SubmitField('Upload and Process')
def validate(self, extra_validators=None):
"""Custom validation for Test Data Upload Form."""
if not super().validate(extra_validators): return False
if not super().validate(extra_validators):
return False
if self.validate_before_upload.data and not self.validation_package_id.data:
self.validation_package_id.errors.append('Please select a package to validate against when pre-upload validation is enabled.')
return False
# Add check: Conditional uploads only make sense for individual mode
if self.use_conditional_uploads.data and self.upload_mode.data == 'transaction':
self.use_conditional_uploads.errors.append('Conditional Uploads only apply to the "Individual Resources" mode.')
# We might allow this combination but warn the user it has no effect,
# or enforce it here. Let's enforce for clarity.
# return False # Optional: Make this a hard validation failure
# Or just let it pass and ignore the flag in the backend for transaction mode.
pass # Let it pass for now, backend will ignore if mode is transaction
return False
if self.auth_type.data == 'bearerToken' and not self.auth_token.data:
self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected.')
return False
if self.auth_type.data == 'basic':
if not self.username.data:
self.username.errors.append('Username is required for Basic Authentication.')
return False
if not self.password.data:
self.password.errors.append('Password is required for Basic Authentication.')
return False
return True
class FhirRequestForm(FlaskForm):
fhir_server_url = StringField('FHIR Server URL', validators=[URL(), Optional()],
render_kw={'placeholder': 'e.g., https://hapi.fhir.org/baseR4'})
auth_type = SelectField('Authentication Type (for Custom URL)', choices=[
('none', 'None'),
('bearerToken', 'Bearer Token'),
('basicAuth', 'Basic Authentication')
], default='none', validators=[Optional()])
auth_token = StringField('Bearer Token', validators=[Optional()],
render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'})
basic_auth_username = StringField('Username', validators=[Optional()],
render_kw={'placeholder': 'Enter Basic Auth Username'})
basic_auth_password = PasswordField('Password', validators=[Optional()],
render_kw={'placeholder': 'Enter Basic Auth Password'})
submit = SubmitField('Send Request')
def validate(self, extra_validators=None):
if not super().validate(extra_validators):
return False
if self.fhir_server_url.data:
if self.auth_type.data == 'bearerToken' and not self.auth_token.data:
self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected for a custom URL.')
return False
if self.auth_type.data == 'basicAuth':
if not self.basic_auth_username.data:
self.basic_auth_username.errors.append('Username is required for Basic Authentication with a custom URL.')
return False
if not self.basic_auth_password.data:
self.basic_auth_password.errors.append('Password is required for Basic Authentication with a custom URL.')
return False
return True

View File

@ -11,3 +11,4 @@ Flask-Migrate==4.1.0
cachetools
beautifulsoup4
feedparser==6.0.11
flasgger

View File

@ -18,6 +18,7 @@ import subprocess
import tempfile
import zipfile
import xml.etree.ElementTree as ET
from flasgger import swag_from # Import swag_from here
# Define Blueprint
services_bp = Blueprint('services', __name__)
@ -2375,6 +2376,51 @@ def import_package_and_dependencies(initial_name, initial_version, dependency_mo
# --- Validation Route ---
@services_bp.route('/validate-sample', methods=['POST'])
@swag_from({
'tags': ['Validation'],
'summary': 'Validate a FHIR resource or bundle.',
'description': 'Validates a given FHIR resource or bundle against profiles defined in a specified FHIR package. Uses HAPI FHIR for validation if a profile is specified, otherwise falls back to local StructureDefinition checks.',
'security': [{'ApiKeyAuth': []}], # Assuming API key is desired
'consumes': ['application/json'],
'parameters': [
{
'name': 'validation_payload', # Changed name
'in': 'body',
'required': True,
'schema': {
'type': 'object',
'required': ['package_name', 'version', 'sample_data'],
'properties': {
'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'},
'version': {'type': 'string', 'example': '6.1.0'},
'sample_data': {'type': 'string', 'description': 'A JSON string of the FHIR resource or Bundle to validate.'},
# 'include_dependencies': {'type': 'boolean', 'default': True} # This seems to be a server-side decision now
}
}
}
],
'responses': {
'200': {
'description': 'Validation result.',
'schema': { # Define the schema of the validation_result dictionary
'type': 'object',
'properties': {
'valid': {'type': 'boolean'},
'errors': {'type': 'array', 'items': {'type': 'string'}},
'warnings': {'type': 'array', 'items': {'type': 'string'}},
'details': {'type': 'array', 'items': {'type': 'object'}}, # more specific if known
'resource_type': {'type': 'string'},
'resource_id': {'type': 'string'},
'profile': {'type': 'string', 'nullable': True},
'summary': {'type': 'object'}
}
}
},
'400': {'description': 'Invalid request (e.g., missing fields, invalid JSON).'},
'404': {'description': 'Specified package for validation not found.'},
'500': {'description': 'Server error during validation.'}
}
})
def validate_sample():
"""Validates a FHIR sample against a package profile."""
logger.debug("Received validate-sample request")
@ -2771,10 +2817,10 @@ def find_and_extract_search_params(tgz_path, base_resource_type):
# --- END OF NEW FUNCTION ---
# --- Full Replacement Function (Corrected Prefix Definitions & Unabbreviated) ---
def generate_push_stream(package_name, version, fhir_server_url, include_dependencies,
auth_type, auth_token, resource_types_filter, skip_files,
dry_run, verbose, force_upload,
packages_dir):
dry_run, verbose, force_upload, packages_dir):
"""
Generates NDJSON stream for the push IG operation.
Handles canonical resources (search by URL, POST/PUT),
@ -2787,8 +2833,8 @@ def generate_push_stream(package_name, version, fhir_server_url, include_depende
skipped_count = 0
post_count = 0
put_count = 0
total_resources_attempted = 0 # Calculated after collecting resources
processed_resources = set() # Tracks resource IDs attempted in this run
total_resources_attempted = 0
processed_resources = set()
failed_uploads_details = []
skipped_resources_details = []
filter_set = set(resource_types_filter) if resource_types_filter else None
@ -2797,7 +2843,7 @@ def generate_push_stream(package_name, version, fhir_server_url, include_depende
try:
# --- Start Messages ---
operation_mode = " (DRY RUN)" if dry_run else ""
force_mode = " (FORCE UPLOAD)" if force_upload else "" # For initial message
force_mode = " (FORCE UPLOAD)" if force_upload else ""
yield json.dumps({"type": "start", "message": f"Starting push{operation_mode}{force_mode} for {package_name}#{version} to {fhir_server_url}"}) + "\n"
if filter_set:
yield json.dumps({"type": "info", "message": f"Filtering for resource types: {', '.join(sorted(list(filter_set)))}"}) + "\n"
@ -2814,302 +2860,293 @@ def generate_push_stream(package_name, version, fhir_server_url, include_depende
yield json.dumps({"type": "error", "message": f"Primary package file not found: {primary_tgz_filename}"}) + "\n"
raise FileNotFoundError(f"Primary package file not found: {primary_tgz_path}")
# Always add the primary package
packages_to_push.append((package_name, version, primary_tgz_path))
logger.debug(f"Added primary package to push list: {package_name}#{version}")
# Handle dependencies IF include_dependencies is true
# NOTE: This uses the simple dependency inclusion based on import metadata.
# Aligning with UploadFIG's --includeReferencedDependencies requires more complex logic here.
if include_dependencies:
yield json.dumps({"type": "info", "message": "Including dependencies based on import metadata..."}) + "\n"
metadata = get_package_metadata(package_name, version) # Assumes this helper exists
if metadata and metadata.get('imported_dependencies'):
dependencies_to_include = metadata['imported_dependencies']
metadata = get_package_metadata(package_name, version)
if metadata and metadata.get("imported_dependencies"):
dependencies_to_include = metadata["imported_dependencies"]
logger.info(f"Found {len(dependencies_to_include)} dependencies in metadata to potentially include.")
for dep in dependencies_to_include:
dep_name = dep.get('name')
dep_version = dep.get('version')
dep_name = dep.get("name")
dep_version = dep.get("version")
if dep_name and dep_version:
dep_tgz_filename = construct_tgz_filename(dep_name, dep_version)
dep_tgz_path = os.path.join(packages_dir, dep_tgz_filename)
if os.path.exists(dep_tgz_path):
# Add dependency package to the list if file exists
if (dep_name, dep_version, dep_tgz_path) not in packages_to_push:
packages_to_push.append((dep_name, dep_version, dep_tgz_path))
logger.debug(f"Added dependency package to push list: {dep_name}#{dep_version}")
else:
# Log a warning if a listed dependency file isn't found
yield json.dumps({"type": "warning", "message": f"Dependency package file not found, cannot include: {dep_tgz_filename}"}) + "\n"
logger.warning(f"Dependency package file listed in metadata but not found locally: {dep_tgz_path}")
else:
yield json.dumps({"type": "warning", "message": "Include Dependencies checked, but no dependency metadata found. Only pushing primary."}) + "\n"
logger.warning(f"No dependency metadata found for {package_name}#{version} despite include_dependencies=True")
# --- End Define packages_to_push ---
# --- Resource Extraction & Filtering ---
resources_to_upload = []
seen_resource_files = set() # Track filenames across all packages being pushed
seen_resource_files = set()
# Iterate through the populated list of packages to push
for pkg_name, pkg_version, pkg_path in packages_to_push:
yield json.dumps({"type": "progress", "message": f"Extracting resources from: {pkg_name}#{pkg_version}..."}) + "\n"
try:
with tarfile.open(pkg_path, "r:gz") as tar:
for member in tar.getmembers():
# Basic file checks: must be a file, in 'package/', end with .json
if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')):
if not (member.isfile() and member.name.startswith("package/") and member.name.lower().endswith(".json")):
continue
# Skip common metadata files by basename
basename_lower = os.path.basename(member.name).lower()
if basename_lower in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']:
if basename_lower in ["package.json", ".index.json", "validation-summary.json", "validation-oo.json"]:
continue
# Check against skip_files list (using normalized paths)
normalized_member_name = member.name.replace('\\', '/')
normalized_member_name = member.name.replace("\\", "/")
if normalized_member_name in skip_files_set or member.name in skip_files_set:
if verbose: yield json.dumps({"type": "info", "message": f"Skipping file due to filter: {member.name}"}) + "\n"
if verbose:
yield json.dumps({"type": "info", "message": f"Skipping file due to filter: {member.name}"}) + "\n"
continue
# Avoid processing the same file path if it appears in multiple packages
if member.name in seen_resource_files:
if verbose: yield json.dumps({"type": "info", "message": f"Skipping already seen file: {member.name}"}) + "\n"
if verbose:
yield json.dumps({"type": "info", "message": f"Skipping already seen file: {member.name}"}) + "\n"
continue
seen_resource_files.add(member.name)
# Extract and parse the resource JSON
try:
with tar.extractfile(member) as f:
resource_content = f.read().decode('utf-8-sig')
resource_content = f.read().decode("utf-8-sig")
resource_data = json.loads(resource_content)
# Basic validation of resource structure
if isinstance(resource_data, dict) and 'resourceType' in resource_data and 'id' in resource_data:
resource_type_val = resource_data.get('resourceType') # Use distinct var name
# Apply resource type filter if provided
if isinstance(resource_data, dict) and "resourceType" in resource_data and "id" in resource_data:
resource_type_val = resource_data.get("resourceType")
if filter_set and resource_type_val not in filter_set:
if verbose: yield json.dumps({"type": "info", "message": f"Skipping resource type {resource_type_val} due to filter: {member.name}"}) + "\n"
if verbose:
yield json.dumps({"type": "info", "message": f"Skipping resource type {resource_type_val} due to filter: {member.name}"}) + "\n"
continue
# Add valid resource to the upload list
resources_to_upload.append({
"data": resource_data,
"source_package": f"{pkg_name}#{pkg_version}",
"source_filename": member.name # Store original filename
"source_filename": member.name
})
else:
yield json.dumps({"type": "warning", "message": f"Skipping invalid/incomplete resource structure in file: {member.name}"}) + "\n"
# Handle errors during extraction/parsing of individual files
except json.JSONDecodeError as json_e: yield json.dumps({"type": "warning", "message": f"JSON parse error in file {member.name}: {json_e}"}) + "\n"
except UnicodeDecodeError as uni_e: yield json.dumps({"type": "warning", "message": f"Encoding error in file {member.name}: {uni_e}"}) + "\n"
except KeyError: yield json.dumps({"type": "warning", "message": f"File not found within archive (should not happen here): {member.name}"}) + "\n"
except Exception as extract_e: yield json.dumps({"type": "warning", "message": f"Error processing file {member.name}: {extract_e}"}) + "\n"
# Handle errors opening/reading the tarfile itself
except json.JSONDecodeError as json_e:
yield json.dumps({"type": "warning", "message": f"JSON parse error in file {member.name}: {json_e}"}) + "\n"
except UnicodeDecodeError as uni_e:
yield json.dumps({"type": "warning", "message": f"Encoding error in file {member.name}: {uni_e}"}) + "\n"
except KeyError:
yield json.dumps({"type": "warning", "message": f"File not found within archive: {member.name}"}) + "\n"
except Exception as extract_e:
yield json.dumps({"type": "warning", "message": f"Error processing file {member.name}: {extract_e}"}) + "\n"
except tarfile.ReadError as tar_read_e:
error_msg = f"Tar ReadError reading package {pkg_name}#{pkg_version}: {tar_read_e}. Skipping package."
yield json.dumps({"type": "error", "message": error_msg}) + "\n"
failure_count += 1 # Count as failure for summary
failed_uploads_details.append({'resource': f"Package: {pkg_name}#{pkg_version}", 'error': f"Read Error: {tar_read_e}"})
continue # Skip to next package in packages_to_push
failure_count += 1
failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Read Error: {tar_read_e}"})
continue
except tarfile.TarError as tar_e:
error_msg = f"TarError reading package {pkg_name}#{pkg_version}: {tar_e}. Skipping package."
yield json.dumps({"type": "error", "message": error_msg}) + "\n"
failure_count += 1
failed_uploads_details.append({'resource': f"Package: {pkg_name}#{pkg_version}", 'error': f"Tar Error: {tar_e}"})
failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Tar Error: {tar_e}"})
continue
except Exception as pkg_e: # Catch other potential errors reading package
except Exception as pkg_e:
error_msg = f"Unexpected error reading package {pkg_name}#{pkg_version}: {pkg_e}. Skipping package."
yield json.dumps({"type": "error", "message": error_msg}) + "\n"
failure_count += 1
failed_uploads_details.append({'resource': f"Package: {pkg_name}#{pkg_version}", 'error': f"Unexpected: {pkg_e}"})
failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Unexpected: {pkg_e}"})
logger.error(f"Error reading package {pkg_path}: {pkg_e}", exc_info=True)
continue
# --- End Resource Extraction ---
total_resources_attempted = len(resources_to_upload)
yield json.dumps({"type": "info", "message": f"Found {total_resources_attempted} resources matching filters across selected packages."}) + "\n"
if total_resources_attempted == 0:
yield json.dumps({"type": "warning", "message": "No resources found to upload after filtering."}) + "\n"
# Go straight to completion summary if nothing to upload
else:
# --- Resource Upload Loop Setup ---
session = requests.Session()
base_url = fhir_server_url.rstrip('/')
headers = {'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'}
# Add Authentication Header
if auth_type == 'bearerToken' and auth_token:
headers['Authorization'] = f'Bearer {auth_token}'
yield json.dumps({"type": "info", "message": "Using Bearer Token authentication."}) + "\n"
elif auth_type == 'apiKey':
# Get internal key from Flask app config if available
base_url = fhir_server_url.rstrip("/")
headers = {"Content-Type": "application/fhir+json", "Accept": "application/fhir+json"}
# MODIFIED: Enhanced authentication handling
if auth_type in ["bearerToken", "basic"] and auth_token:
# Log the Authorization header (mask sensitive data)
auth_display = "Basic <redacted>" if auth_type == "basic" else (auth_token[:10] + "..." if len(auth_token) > 10 else auth_token)
yield json.dumps({"type": "info", "message": f"Using {auth_type} auth with header: Authorization: {auth_display}"}) + "\n"
headers["Authorization"] = auth_token # Use auth_token for both Bearer and Basic
elif auth_type == "apiKey":
internal_api_key = None
try:
internal_api_key = current_app.config.get('API_KEY')
internal_api_key = current_app.config.get("API_KEY")
except RuntimeError:
logger.warning("Cannot access current_app config outside of request context for API Key.")
if internal_api_key:
headers['X-API-Key'] = internal_api_key
headers["X-API-Key"] = internal_api_key
yield json.dumps({"type": "info", "message": "Using internal API Key authentication."}) + "\n"
else:
yield json.dumps({"type": "warning", "message": "API Key auth selected, but no internal key configured/accessible."}) + "\n"
else: # 'none'
else:
yield json.dumps({"type": "info", "message": "Using no authentication."}) + "\n"
# --- Main Upload Loop ---
for i, resource_info in enumerate(resources_to_upload, 1):
local_resource = resource_info["data"]
source_pkg = resource_info["source_package"]
resource_type = local_resource.get('resourceType')
resource_id = local_resource.get('id')
resource_type = local_resource.get("resourceType")
resource_id = local_resource.get("id")
resource_log_id = f"{resource_type}/{resource_id}"
canonical_url = local_resource.get('url')
canonical_version = local_resource.get('version')
is_canonical_type = resource_type in CANONICAL_RESOURCE_TYPES # Assumes this set is defined
canonical_url = local_resource.get("url")
canonical_version = local_resource.get("version")
is_canonical_type = resource_type in CANONICAL_RESOURCE_TYPES
# Skip duplicates already attempted *in this run* (by ResourceType/Id)
if resource_log_id in processed_resources:
if verbose: yield json.dumps({"type": "info", "message": f"Skipping duplicate ID in processing list: {resource_log_id}"}) + "\n"
# Note: Do not increment skipped_count here as it wasn't an upload attempt failure/skip
if verbose:
yield json.dumps({"type": "info", "message": f"Skipping duplicate ID in processing list: {resource_log_id}"}) + "\n"
continue
processed_resources.add(resource_log_id)
# --- Dry Run Handling ---
if dry_run:
dry_run_action = "check/PUT" # Default action
dry_run_action = "check/PUT"
if is_canonical_type and canonical_url:
dry_run_action = "search/POST/PUT"
yield json.dumps({"type": "progress", "message": f"[DRY RUN] Would {dry_run_action} {resource_log_id} ({i}/{total_resources_attempted}) from {source_pkg}"}) + "\n"
success_count += 1 # Count check/potential action as success in dry run
# Track package info for dry run summary
success_count += 1
pkg_found = False
for p in pushed_packages_info:
if p["id"] == source_pkg:
p["resource_count"] += 1
pkg_found = True
break
if not pkg_found: pushed_packages_info.append({"id": source_pkg, "resource_count": 1})
continue # Skip actual request processing for dry run
if not pkg_found:
pushed_packages_info.append({"id": source_pkg, "resource_count": 1})
continue
# --- Determine Upload Strategy ---
existing_resource_id = None
existing_resource_data = None
action = "PUT" # Default action is PUT by ID
target_url = f"{base_url}/{resource_type}/{resource_id}" # Default target URL for PUT
skip_resource = False # Flag to skip upload altogether
action = "PUT"
target_url = f"{base_url}/{resource_type}/{resource_id}"
skip_resource = False
# 1. Handle Canonical Resources (Search by URL/Version)
if is_canonical_type and canonical_url:
action = "SEARCH_POST_PUT" # Indicate canonical handling path
search_params = {'url': canonical_url}
action = "SEARCH_POST_PUT"
search_params = {"url": canonical_url}
if canonical_version:
search_params['version'] = canonical_version
search_params["version"] = canonical_version
search_url = f"{base_url}/{resource_type}"
if verbose: yield json.dumps({"type": "info", "message": f"Canonical Type: Searching {search_url} with params {search_params}"}) + "\n"
if verbose:
yield json.dumps({"type": "info", "message": f"Canonical Type: Searching {search_url} with params {search_params}"}) + "\n"
try:
search_response = session.get(search_url, params=search_params, headers=headers, timeout=20)
search_response.raise_for_status() # Check for HTTP errors on search
search_response.raise_for_status()
search_bundle = search_response.json()
if search_bundle.get('resourceType') == 'Bundle' and 'entry' in search_bundle:
entries = search_bundle.get('entry', [])
if search_bundle.get("resourceType") == "Bundle" and "entry" in search_bundle:
entries = search_bundle.get("entry", [])
if len(entries) == 1:
existing_resource_data = entries[0].get('resource')
existing_resource_data = entries[0].get("resource")
if existing_resource_data:
existing_resource_id = existing_resource_data.get('id')
existing_resource_id = existing_resource_data.get("id")
if existing_resource_id:
action = "PUT" # Found existing, plan to PUT
action = "PUT"
target_url = f"{base_url}/{resource_type}/{existing_resource_id}"
if verbose: yield json.dumps({"type": "info", "message": f"Found existing canonical resource ID: {existing_resource_id}"}) + "\n"
if verbose:
yield json.dumps({"type": "info", "message": f"Found existing canonical resource ID: {existing_resource_id}"}) + "\n"
else:
yield json.dumps({"type": "warning", "message": f"Found canonical {canonical_url}|{canonical_version} but lacks ID. Skipping update."}) + "\n"
action = "SKIP"; skip_resource = True; skipped_count += 1
skipped_resources_details.append({'resource': resource_log_id, 'reason': 'Found canonical match without ID'})
action = "SKIP"
skip_resource = True
skipped_count += 1
skipped_resources_details.append({"resource": resource_log_id, "reason": "Found canonical match without ID"})
else:
yield json.dumps({"type": "warning", "message": f"Search for {canonical_url}|{canonical_version} entry lacks resource data. Assuming not found."}) + "\n"
action = "POST"; target_url = f"{base_url}/{resource_type}"
action = "POST"
target_url = f"{base_url}/{resource_type}"
elif len(entries) == 0:
action = "POST"; target_url = f"{base_url}/{resource_type}"
if verbose: yield json.dumps({"type": "info", "message": f"Canonical not found by URL/Version. Planning POST."}) + "\n"
else: # Found multiple matches - conflict!
ids_found = [e.get('resource', {}).get('id', 'unknown') for e in entries]
action = "POST"
target_url = f"{base_url}/{resource_type}"
if verbose:
yield json.dumps({"type": "info", "message": f"Canonical not found by URL/Version. Planning POST."}) + "\n"
else:
ids_found = [e.get("resource", {}).get("id", "unknown") for e in entries]
yield json.dumps({"type": "error", "message": f"Conflict: Found {len(entries)} matches for {canonical_url}|{canonical_version} (IDs: {', '.join(ids_found)}). Skipping."}) + "\n"
action = "SKIP"; skip_resource = True; failure_count += 1 # Count conflict as failure
failed_uploads_details.append({'resource': resource_log_id, 'error': f"Conflict: Multiple matches ({len(entries)}) for canonical URL/Version"})
else: # Search returned non-Bundle or empty Bundle
action = "SKIP"
skip_resource = True
failure_count += 1
failed_uploads_details.append({"resource": resource_log_id, "error": f"Conflict: Multiple matches ({len(entries)}) for canonical URL/Version"})
else:
yield json.dumps({"type": "warning", "message": f"Search for {canonical_url}|{canonical_version} returned non-Bundle/empty. Assuming not found."}) + "\n"
action = "POST"; target_url = f"{base_url}/{resource_type}"
action = "POST"
target_url = f"{base_url}/{resource_type}"
except requests.exceptions.RequestException as search_err:
yield json.dumps({"type": "warning", "message": f"Search failed for {resource_log_id}: {search_err}. Defaulting to PUT by ID."}) + "\n"
action = "PUT"; target_url = f"{base_url}/{resource_type}/{resource_id}" # Fallback
action = "PUT"
target_url = f"{base_url}/{resource_type}/{resource_id}"
except json.JSONDecodeError as json_err:
yield json.dumps({"type": "warning", "message": f"Failed parse search result for {resource_log_id}: {json_err}. Defaulting PUT by ID."}) + "\n"
action = "PUT"; target_url = f"{base_url}/{resource_type}/{resource_id}"
except Exception as e: # Catch other unexpected errors during search
action = "PUT"
target_url = f"{base_url}/{resource_type}/{resource_id}"
except Exception as e:
yield json.dumps({"type": "warning", "message": f"Unexpected canonical search error for {resource_log_id}: {e}. Defaulting PUT by ID."}) + "\n"
action = "PUT"; target_url = f"{base_url}/{resource_type}/{resource_id}"
action = "PUT"
target_url = f"{base_url}/{resource_type}/{resource_id}"
# 2. Semantic Comparison (Only if PUTting an existing resource and NOT force_upload)
if action == "PUT" and not force_upload and not skip_resource:
resource_to_compare = existing_resource_data # Use data from canonical search if available
resource_to_compare = existing_resource_data
if not resource_to_compare:
# If not canonical or search failed/skipped, try GET by ID for comparison
try:
if verbose: yield json.dumps({"type": "info", "message": f"Checking existing (PUT target): {target_url}"}) + "\n"
if verbose:
yield json.dumps({"type": "info", "message": f"Checking existing (PUT target): {target_url}"}) + "\n"
get_response = session.get(target_url, headers=headers, timeout=15)
if get_response.status_code == 200:
resource_to_compare = get_response.json()
if verbose: yield json.dumps({"type": "info", "message": f"Found resource by ID for comparison."}) + "\n"
if verbose:
yield json.dumps({"type": "info", "message": f"Found resource by ID for comparison."}) + "\n"
elif get_response.status_code == 404:
if verbose: yield json.dumps({"type": "info", "message": f"Resource {resource_log_id} not found by ID ({target_url}). Proceeding with PUT create."}) + "\n"
# No comparison needed if target doesn't exist
else: # Handle other GET errors - log warning, comparison skipped, proceed with PUT
if verbose:
yield json.dumps({"type": "info", "message": f"Resource {resource_log_id} not found by ID ({target_url}). Proceeding with PUT create."}) + "\n"
else:
yield json.dumps({"type": "warning", "message": f"Comparison check failed (GET {get_response.status_code}). Attempting PUT."}) + "\n"
except Exception as get_err:
yield json.dumps({"type": "warning", "message": f"Comparison check failed (Error during GET by ID: {get_err}). Attempting PUT."}) + "\n"
# Perform comparison if we have fetched an existing resource
if resource_to_compare:
try:
# Assumes are_resources_semantically_equal helper function exists and works
if are_resources_semantically_equal(local_resource, resource_to_compare):
yield json.dumps({"type": "info", "message": f"Skipping {resource_log_id} (Identical content)"}) + "\n"
skip_resource = True; skipped_count += 1
skipped_resources_details.append({'resource': resource_log_id, 'reason': 'Identical content'})
skip_resource = True
skipped_count += 1
skipped_resources_details.append({"resource": resource_log_id, "reason": "Identical content"})
elif verbose:
yield json.dumps({"type": "info", "message": f"{resource_log_id} exists but differs. Updating."}) + "\n"
except Exception as comp_err:
# Log error during comparison but proceed with PUT
yield json.dumps({"type": "warning", "message": f"Comparison failed for {resource_log_id}: {comp_err}. Proceeding with PUT."}) + "\n"
elif action == "PUT" and force_upload: # Force upload enabled, skip comparison
if verbose: yield json.dumps({"type": "info", "message": f"Force Upload enabled, skipping comparison for {resource_log_id}."}) + "\n"
elif action == "PUT" and force_upload:
if verbose:
yield json.dumps({"type": "info", "message": f"Force Upload enabled, skipping comparison for {resource_log_id}."}) + "\n"
# 3. Execute Upload Action (POST or PUT, if not skipped)
if not skip_resource:
http_method = action if action in ["POST", "PUT"] else "PUT" # Ensure valid method
http_method = action if action in ["POST", "PUT"] else "PUT"
log_action = f"{http_method}ing"
yield json.dumps({"type": "progress", "message": f"{log_action} {resource_log_id} ({i}/{total_resources_attempted}) to {target_url}..."}) + "\n"
try:
# Send the request
if http_method == "POST":
response = session.post(target_url, json=local_resource, headers=headers, timeout=30)
post_count += 1
else: # Default to PUT
else:
response = session.put(target_url, json=local_resource, headers=headers, timeout=30)
put_count += 1
response.raise_for_status() # Raises HTTPError for 4xx/5xx responses
response.raise_for_status()
# Log success
success_msg = f"{http_method} successful for {resource_log_id} (Status: {response.status_code})"
if http_method == "POST" and response.status_code == 201:
# Add Location header info if available on create
location = response.headers.get('Location')
location = response.headers.get("Location")
if location:
# Try to extract ID from location header
match = re.search(f"{resource_type}/([^/]+)/_history", location)
new_id = match.group(1) if match else "unknown"
success_msg += f" -> New ID: {new_id}"
@ -3117,7 +3154,6 @@ def generate_push_stream(package_name, version, fhir_server_url, include_depende
success_msg += " (No Location header)"
yield json.dumps({"type": "success", "message": success_msg}) + "\n"
success_count += 1
# Track package info for successful upload
pkg_found_success = False
for p in pushed_packages_info:
if p["id"] == source_pkg:
@ -3127,94 +3163,103 @@ def generate_push_stream(package_name, version, fhir_server_url, include_depende
if not pkg_found_success:
pushed_packages_info.append({"id": source_pkg, "resource_count": 1})
# --- CORRECTED ERROR HANDLING ---
except requests.exceptions.HTTPError as http_err:
outcome_text = ""
status_code = http_err.response.status_code if http_err.response is not None else 'N/A'
status_code = http_err.response.status_code if http_err.response is not None else "N/A"
try:
outcome = http_err.response.json()
if outcome and outcome.get('resourceType') == 'OperationOutcome':
issues = outcome.get('issue', [])
if outcome and outcome.get("resourceType") == "OperationOutcome":
issues = outcome.get("issue", [])
outcome_text = "; ".join([f"{i.get('severity', 'info')}: {i.get('diagnostics', i.get('details', {}).get('text', 'No details'))}" for i in issues]) if issues else "OperationOutcome with no issues."
else: outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body"
except ValueError: outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body (or not JSON)"
# This block is now correctly indented
else:
outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body"
except ValueError:
outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body (or not JSON)"
error_msg = f"Failed {http_method} {resource_log_id} (Status: {status_code}): {outcome_text or str(http_err)}"
yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': error_msg})
# --- END CORRECTION ---
yield json.dumps({"type": "error", "message": error_msg}) + "\n"
failure_count += 1
failed_uploads_details.append({"resource": resource_log_id, "error": error_msg})
except requests.exceptions.Timeout:
error_msg = f"Timeout during {http_method} {resource_log_id}"
yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': 'Timeout'})
yield json.dumps({"type": "error", "message": error_msg}) + "\n"
failure_count += 1
failed_uploads_details.append({"resource": resource_log_id, "error": "Timeout"})
except requests.exceptions.ConnectionError as conn_err:
error_msg = f"Connection error during {http_method} {resource_log_id}: {conn_err}"
yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': f'Connection Error: {conn_err}'})
yield json.dumps({"type": "error", "message": error_msg}) + "\n"
failure_count += 1
failed_uploads_details.append({"resource": resource_log_id, "error": f"Connection Error: {conn_err}"})
except requests.exceptions.RequestException as req_err:
error_msg = f"Request error during {http_method} {resource_log_id}: {str(req_err)}"
yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': f'Request Error: {req_err}'})
yield json.dumps({"type": "error", "message": error_msg}) + "\n"
failure_count += 1
failed_uploads_details.append({"resource": resource_log_id, "error": f"Request Error: {req_err}"})
except Exception as e:
error_msg = f"Unexpected error during {http_method} {resource_log_id}: {str(e)}"
yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': f'Unexpected: {e}'}); logger.error(f"[API Push Stream] Upload error for {resource_log_id}: {e}", exc_info=True)
# --- End Execute Action ---
else: # Resource was skipped
# Track package info even if skipped
yield json.dumps({"type": "error", "message": error_msg}) + "\n"
failure_count += 1
failed_uploads_details.append({"resource": resource_log_id, "error": f"Unexpected: {e}"})
logger.error(f"[API Push Stream] Upload error for {resource_log_id}: {e}", exc_info=True)
else:
pkg_found_skipped = False
for p in pushed_packages_info:
if p["id"] == source_pkg:
pkg_found_skipped = True; break
pkg_found_skipped = True
break
if not pkg_found_skipped:
pushed_packages_info.append({"id": source_pkg, "resource_count": 0}) # Add pkg with 0 count
# --- End Main Upload Loop ---
pushed_packages_info.append({"id": source_pkg, "resource_count": 0})
# --- Final Summary ---
final_status = "success" if failure_count == 0 else \
"partial" if success_count > 0 else "failure"
# --- Define prefixes before use ---
final_status = "success" if failure_count == 0 else "partial" if success_count > 0 else "failure"
dry_run_prefix = "[DRY RUN] " if dry_run else ""
force_prefix = "[FORCE UPLOAD] " if force_upload else ""
# --- End Define prefixes ---
# Adjust summary message construction
if total_resources_attempted == 0 and failure_count == 0:
summary_message = f"{dry_run_prefix}Push finished: No matching resources found to process."
final_status = "success" # Still success if no errors occurred
final_status = "success"
else:
# Use the defined prefixes
summary_message = f"{dry_run_prefix}{force_prefix}Push finished: {post_count} POSTed, {put_count} PUT, {failure_count} failed, {skipped_count} skipped ({total_resources_attempted} resources attempted)."
# Create summary dictionary
summary = {
"status": final_status, "message": summary_message,
"target_server": fhir_server_url, "package_name": package_name, "version": version,
"included_dependencies": include_dependencies, "resources_attempted": total_resources_attempted,
"success_count": success_count, "post_count": post_count, "put_count": put_count,
"failure_count": failure_count, "skipped_count": skipped_count,
"validation_failure_count": 0, # Placeholder
"failed_details": failed_uploads_details, "skipped_details": skipped_resources_details,
"pushed_packages_summary": pushed_packages_info, "dry_run": dry_run, "force_upload": force_upload,
"status": final_status,
"message": summary_message,
"target_server": fhir_server_url,
"package_name": package_name,
"version": version,
"included_dependencies": include_dependencies,
"resources_attempted": total_resources_attempted,
"success_count": success_count,
"post_count": post_count,
"put_count": put_count,
"failure_count": failure_count,
"skipped_count": skipped_count,
"validation_failure_count": 0,
"failed_details": failed_uploads_details,
"skipped_details": skipped_resources_details,
"pushed_packages_summary": pushed_packages_info,
"dry_run": dry_run,
"force_upload": force_upload,
"resource_types_filter": resource_types_filter,
"skip_files_filter": sorted(list(skip_files_set)) if skip_files_set else None
}
yield json.dumps({"type": "complete", "data": summary}) + "\n"
logger.info(f"[API Push Stream] Completed {package_name}#{version}. Status: {final_status}. {summary_message}")
# --- Final Exception Handling for setup/initialization errors ---
except FileNotFoundError as fnf_err:
logger.error(f"[API Push Stream] Setup error: {str(fnf_err)}", exc_info=False)
error_response = {"status": "error", "message": f"Setup error: {str(fnf_err)}"}
try:
yield json.dumps({"type": "error", "message": error_response["message"]}) + "\n"
yield json.dumps({"type": "complete", "data": error_response}) + "\n"
except Exception as yield_e: logger.error(f"Error yielding final setup error: {yield_e}")
except Exception as yield_e:
logger.error(f"Error yielding final setup error: {yield_e}")
except Exception as e:
logger.error(f"[API Push Stream] Critical error during setup or stream generation: {str(e)}", exc_info=True)
error_response = {"status": "error", "message": f"Server error during push setup: {str(e)}"}
try:
yield json.dumps({"type": "error", "message": error_response["message"]}) + "\n"
yield json.dumps({"type": "complete", "data": error_response}) + "\n"
except Exception as yield_e: logger.error(f"Error yielding final critical error: {yield_e}")
except Exception as yield_e:
logger.error(f"Error yielding final critical error: {yield_e}")
# --- END generate_push_stream FUNCTION ---
@ -3612,9 +3657,15 @@ def process_and_upload_test_data(server_info, options, temp_file_dir):
session = requests.Session()
base_url = server_info['url'].rstrip('/')
upload_headers = {'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'}
if server_info['auth_type'] == 'bearerToken' and server_info['auth_token']:
upload_headers['Authorization'] = f"Bearer {server_info['auth_token']}"
yield json.dumps({"type": "info", "message": "Using Bearer Token auth."}) + "\n"
if server_info['auth_type'] in ['bearerToken', 'basic'] and server_info.get('auth_token'):
# Log the Authorization header (mask sensitive data)
auth_header = server_info['auth_token']
if auth_header.startswith('Basic '):
auth_display = 'Basic <redacted>'
else:
auth_display = auth_header[:10] + '...' if len(auth_header) > 10 else auth_header
yield json.dumps({"type": "info", "message": f"Using {server_info['auth_type']} auth with header: Authorization: {auth_display}"}) + "\n"
upload_headers['Authorization'] = server_info['auth_token'] # FIXED: Use server_info['auth_token']
else:
yield json.dumps({"type": "info", "message": "Using no auth."}) + "\n"
@ -3892,10 +3943,11 @@ def process_and_upload_test_data(server_info, options, temp_file_dir):
# --- END Service Function ---
# --- CORRECTED retrieve_bundles function with NEW logic ---
def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references=False, fetch_reference_bundles=False):
def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references=False, fetch_reference_bundles=False, auth_type='none', auth_token=None):
"""
Retrieve FHIR bundles and save to a ZIP file.
Optionally fetches referenced resources, either individually by ID or as full bundles by type.
Supports authentication for custom FHIR servers.
Yields NDJSON progress updates.
"""
temp_dir = None
@ -3903,7 +3955,7 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
total_initial_bundles = 0
fetched_individual_references = 0
fetched_type_bundles = 0
retrieved_references_or_types = set() # Track fetched items to avoid duplicates
retrieved_references_or_types = set()
temp_dir = tempfile.mkdtemp(prefix="fhir_retrieve_")
logger.debug(f"Created temporary directory for bundle retrieval: {temp_dir}")
@ -3913,19 +3965,25 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
else:
yield json.dumps({"type": "info", "message": "Reference fetching OFF"}) + "\n"
# --- Determine Base URL and Headers for Proxy ---
# Determine Base URL and Headers for Proxy
base_proxy_url = f"{current_app.config['APP_BASE_URL'].rstrip('/')}/fhir"
headers = {'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8'}
is_custom_url = fhir_server_url != '/fhir' and fhir_server_url is not None and fhir_server_url.startswith('http')
if is_custom_url:
headers['X-Target-FHIR-Server'] = fhir_server_url.rstrip('/')
if auth_type in ['bearer', 'basic'] and auth_token:
auth_display = 'Basic <redacted>' if auth_type == 'basic' else (auth_token[:10] + '...' if len(auth_token) > 10 else auth_token)
yield json.dumps({"type": "info", "message": f"Using {auth_type} auth with header: Authorization: {auth_display}"}) + "\n"
headers['Authorization'] = auth_token
else:
yield json.dumps({"type": "info", "message": "Using no authentication for custom URL"}) + "\n"
logger.debug(f"Will use proxy with X-Target-FHIR-Server: {headers['X-Target-FHIR-Server']}")
else:
yield json.dumps({"type": "info", "message": "Using no authentication for local HAPI server"}) + "\n"
logger.debug("Will use proxy targeting local HAPI server")
# --- Fetch Initial Bundles ---
initial_bundle_files = [] # Store paths for reference scanning
# Fetch Initial Bundles
initial_bundle_files = []
for resource_type in resources:
url = f"{base_proxy_url}/{quote(resource_type)}"
yield json.dumps({"type": "progress", "message": f"Fetching bundle for {resource_type} via proxy..."}) + "\n"
@ -3934,14 +3992,14 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
response = requests.get(url, headers=headers, timeout=60)
logger.debug(f"Proxy response for {resource_type}: HTTP {response.status_code}")
if response.status_code != 200:
# ... (keep existing error handling for initial fetch) ...
error_detail = f"Proxy returned HTTP {response.status_code}."
try: error_detail += f" Body: {response.text[:200]}..."
except: pass
yield json.dumps({"type": "error", "message": f"Failed to fetch {resource_type}: {error_detail}"}) + "\n"
logger.error(f"Failed to fetch {resource_type} via proxy {url}: {error_detail}")
continue
try: bundle = response.json()
try:
bundle = response.json()
except ValueError as e:
yield json.dumps({"type": "error", "message": f"Invalid JSON response for {resource_type}: {str(e)}"}) + "\n"
logger.error(f"Invalid JSON from proxy for {resource_type} at {url}: {e}, Response: {response.text[:500]}")
@ -3956,9 +4014,10 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
# Save the bundle
output_file = os.path.join(temp_dir, f"{resource_type}_bundle.json")
try:
with open(output_file, 'w', encoding='utf-8') as f: json.dump(bundle, f, indent=2)
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(bundle, f, indent=2)
logger.debug(f"Wrote bundle to {output_file}")
initial_bundle_files.append(output_file) # Add for scanning
initial_bundle_files.append(output_file)
total_initial_bundles += 1
yield json.dumps({"type": "success", "message": f"Saved bundle for {resource_type}"}) + "\n"
except IOError as e:
@ -3974,16 +4033,17 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
logger.error(f"Unexpected error during initial fetch for {resource_type} at {url}: {e}", exc_info=True)
continue
# --- Fetch Referenced Resources (Conditionally) ---
# Fetch Referenced Resources (Conditionally)
if validate_references and initial_bundle_files:
yield json.dumps({"type": "progress", "message": "Scanning retrieved bundles for references..."}) + "\n"
all_references = set()
references_by_type = defaultdict(set) # To store { 'Patient': {'id1', 'id2'}, ... }
references_by_type = defaultdict(set)
# --- Scan for References ---
# Scan for References
for bundle_file_path in initial_bundle_files:
try:
with open(bundle_file_path, 'r', encoding='utf-8') as f: bundle = json.load(f)
with open(bundle_file_path, 'r', encoding='utf-8') as f:
bundle = json.load(f)
for entry in bundle.get('entry', []):
resource = entry.get('resource')
if resource:
@ -3992,33 +4052,35 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
for ref_str in current_refs:
if isinstance(ref_str, str) and '/' in ref_str and not ref_str.startswith('#'):
all_references.add(ref_str)
# Group by type for bundle fetch mode
try:
ref_type = ref_str.split('/')[0]
if ref_type: references_by_type[ref_type].add(ref_str)
except Exception: pass # Ignore parsing errors here
if ref_type:
references_by_type[ref_type].add(ref_str)
except Exception:
pass
except Exception as e:
yield json.dumps({"type": "warning", "message": f"Could not scan references in {os.path.basename(bundle_file_path)}: {e}"}) + "\n"
logger.warning(f"Error processing references in {bundle_file_path}: {e}")
# --- Fetch Logic ---
# Fetch Logic
if not all_references:
yield json.dumps({"type": "info", "message": "No references found to fetch."}) + "\n"
else:
if fetch_reference_bundles:
# --- Fetch Full Bundles by Type ---
# Fetch Full Bundles by Type
unique_ref_types = sorted(list(references_by_type.keys()))
yield json.dumps({"type": "progress", "message": f"Fetching full bundles for {len(unique_ref_types)} referenced types..."}) + "\n"
logger.info(f"Fetching full bundles for referenced types: {unique_ref_types}")
for ref_type in unique_ref_types:
if ref_type in retrieved_references_or_types: continue # Skip if type bundle already fetched
if ref_type in retrieved_references_or_types:
continue
url = f"{base_proxy_url}/{quote(ref_type)}" # Fetch all of this type
url = f"{base_proxy_url}/{quote(ref_type)}"
yield json.dumps({"type": "progress", "message": f"Fetching full bundle for type {ref_type} via proxy..."}) + "\n"
logger.debug(f"Sending GET request for full type bundle {ref_type} to proxy {url} with headers: {json.dumps(headers)}")
try:
response = requests.get(url, headers=headers, timeout=180) # Longer timeout for full bundles
response = requests.get(url, headers=headers, timeout=180)
logger.debug(f"Proxy response for {ref_type} bundle: HTTP {response.status_code}")
if response.status_code != 200:
error_detail = f"Proxy returned HTTP {response.status_code}."
@ -4026,10 +4088,11 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
except: pass
yield json.dumps({"type": "warning", "message": f"Failed to fetch full bundle for {ref_type}: {error_detail}"}) + "\n"
logger.warning(f"Failed to fetch full bundle {ref_type} via proxy {url}: {error_detail}")
retrieved_references_or_types.add(ref_type) # Mark type as attempted
retrieved_references_or_types.add(ref_type)
continue
try: bundle = response.json()
try:
bundle = response.json()
except ValueError as e:
yield json.dumps({"type": "warning", "message": f"Invalid JSON for full {ref_type} bundle: {str(e)}"}) + "\n"
logger.warning(f"Invalid JSON response from proxy for full {ref_type} bundle at {url}: {e}")
@ -4045,7 +4108,8 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
# Save the full type bundle
output_file = os.path.join(temp_dir, f"ref_{ref_type}_BUNDLE.json")
try:
with open(output_file, 'w', encoding='utf-8') as f: json.dump(bundle, f, indent=2)
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(bundle, f, indent=2)
logger.debug(f"Wrote full type bundle to {output_file}")
fetched_type_bundles += 1
retrieved_references_or_types.add(ref_type)
@ -4053,8 +4117,7 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
except IOError as e:
yield json.dumps({"type": "warning", "message": f"Failed to save full bundle file for {ref_type}: {e}"}) + "\n"
logger.error(f"Failed to write full bundle file {output_file}: {e}")
retrieved_references_or_types.add(ref_type) # Still mark as attempted
retrieved_references_or_types.add(ref_type)
except requests.RequestException as e:
yield json.dumps({"type": "warning", "message": f"Error connecting to proxy for full {ref_type} bundle: {str(e)}"}) + "\n"
logger.warning(f"Error retrieving full {ref_type} bundle via proxy: {e}")
@ -4063,26 +4126,23 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
yield json.dumps({"type": "warning", "message": f"Unexpected error fetching full {ref_type} bundle: {str(e)}"}) + "\n"
logger.warning(f"Unexpected error during full {ref_type} bundle fetch: {e}", exc_info=True)
retrieved_references_or_types.add(ref_type)
# End loop through ref_types
else:
# --- Fetch Individual Referenced Resources ---
# Fetch Individual Referenced Resources
yield json.dumps({"type": "progress", "message": f"Fetching {len(all_references)} unique referenced resources individually..."}) + "\n"
logger.info(f"Fetching {len(all_references)} unique referenced resources by ID.")
for ref in sorted(list(all_references)): # Sort for consistent order
if ref in retrieved_references_or_types: continue # Skip already fetched
for ref in sorted(list(all_references)):
if ref in retrieved_references_or_types:
continue
try:
# Parse reference
ref_parts = ref.split('/')
if len(ref_parts) != 2 or not ref_parts[0] or not ref_parts[1]:
logger.warning(f"Skipping invalid reference format: {ref}")
continue
ref_type, ref_id = ref_parts
# Fetch individual resource using _id search
search_param = quote(f"_id={ref_id}")
url = f"{base_proxy_url}/{quote(ref_type)}?{search_param}"
yield json.dumps({"type": "progress", "message": f"Fetching referenced {ref_type}/{ref_id} via proxy..."}) + "\n"
logger.debug(f"Sending GET request for referenced {ref} to proxy {url} with headers: {json.dumps(headers)}")
@ -4098,7 +4158,8 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
retrieved_references_or_types.add(ref)
continue
try: bundle = response.json()
try:
bundle = response.json()
except ValueError as e:
yield json.dumps({"type": "warning", "message": f"Invalid JSON for referenced {ref}: {str(e)}"}) + "\n"
logger.warning(f"Invalid JSON from proxy for ref {ref} at {url}: {e}")
@ -4117,10 +4178,10 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
continue
# Save the bundle containing the single referenced resource
# Use a filename indicating it's an individual reference fetch
output_file = os.path.join(temp_dir, f"ref_{ref_type}_{ref_id}.json")
try:
with open(output_file, 'w', encoding='utf-8') as f: json.dump(bundle, f, indent=2)
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(bundle, f, indent=2)
logger.debug(f"Wrote referenced resource bundle to {output_file}")
fetched_individual_references += 1
retrieved_references_or_types.add(ref)
@ -4129,7 +4190,6 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
yield json.dumps({"type": "warning", "message": f"Failed to save file for referenced {ref}: {e}"}) + "\n"
logger.error(f"Failed to write file {output_file}: {e}")
retrieved_references_or_types.add(ref)
except requests.RequestException as e:
yield json.dumps({"type": "warning", "message": f"Network error fetching referenced {ref}: {str(e)}"}) + "\n"
logger.warning(f"Network error retrieving referenced {ref} via proxy: {e}")
@ -4138,10 +4198,8 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
yield json.dumps({"type": "warning", "message": f"Unexpected error fetching referenced {ref}: {str(e)}"}) + "\n"
logger.warning(f"Unexpected error during reference fetch for {ref}: {e}", exc_info=True)
retrieved_references_or_types.add(ref)
# End loop through individual references
# --- End Reference Fetching Logic ---
# --- Create Final ZIP File ---
# Create Final ZIP File
yield json.dumps({"type": "progress", "message": f"Creating ZIP file {os.path.basename(output_zip)}..."}) + "\n"
files_to_zip = [f for f in os.listdir(temp_dir) if f.endswith('.json')]
if not files_to_zip:
@ -4153,18 +4211,20 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zipf:
for filename in files_to_zip:
file_path = os.path.join(temp_dir, filename)
if os.path.exists(file_path): zipf.write(file_path, filename)
else: logger.error(f"File {file_path} disappeared before adding to ZIP.")
if os.path.exists(file_path):
zipf.write(file_path, filename)
else:
logger.error(f"File {file_path} disappeared before adding to ZIP.")
yield json.dumps({"type": "success", "message": f"ZIP file created: {os.path.basename(output_zip)} with {len(files_to_zip)} files."}) + "\n"
except Exception as e:
yield json.dumps({"type": "error", "message": f"Failed to create ZIP file: {e}"}) + "\n"
logger.error(f"Error creating ZIP file {output_zip}: {e}", exc_info=True)
# --- Final Completion Message ---
# Final Completion Message
completion_message = (
f"Bundle retrieval finished. Initial bundles: {total_initial_bundles}, "
f"Referenced items fetched: {fetched_individual_references if not fetch_reference_bundles else fetched_type_bundles} "
f"({ 'individual resources' if not fetch_reference_bundles else 'full type bundles' })."
f"({'individual resources' if not fetch_reference_bundles else 'full type bundles'})"
)
yield json.dumps({
"type": "complete",
@ -4178,12 +4238,10 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references
}) + "\n"
except Exception as e:
# Catch errors during setup (like temp dir creation)
yield json.dumps({"type": "error", "message": f"Critical error during retrieval setup: {str(e)}"}) + "\n"
logger.error(f"Unexpected error in retrieve_bundles setup: {e}", exc_info=True)
yield json.dumps({"type": "complete", "message": f"Retrieval failed: {str(e)}", "data": {"total_initial_bundles": 0, "fetched_individual_references": 0, "fetched_type_bundles": 0}}) + "\n"
finally:
# --- Cleanup Temporary Directory ---
if temp_dir and os.path.exists(temp_dir):
try:
shutil.rmtree(temp_dir)

View File

@ -5,7 +5,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.3/font/bootstrap-icons.min.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css" integrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin="anonymous" referrerpolicy="no-referrer" />
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.2/css/all.min.css" xintegrity="sha512-z3gLpd7yknf1YoNbCzqRKc4qyor8gaKU1qmn+CShxbuBusANI9QpRohGBreCFkKxLhei6S9CQXFEbbKuqLg0DA==" crossorigin="anonymous" referrerpolicy="no-referrer" />
<link href="https://cdnjs.cloudflare.com/ajax/libs/prism/1.29.0/themes/prism-okaidia.min.css" rel="stylesheet" />
<link rel="icon" type="image/x-icon" href="{{ url_for('static', filename='favicon.ico') }}">
<link rel="stylesheet" href="{{ url_for('static', filename='css/fire-animation.css') }}">
@ -769,9 +769,6 @@
<li class="nav-item">
<a class="nav-link {% if request.path == '/search-and-import' %}active{% endif %}" href="{{ url_for('search_and_import') }}"><i class="fas fa-download me-1"></i> Search and Import</a>
</li>
<!-- <li class="nav-item">
<a class="nav-link {{ 'active' if request.endpoint == 'import_ig' else '' }}" href="{{ url_for('import_ig') }}"><i class="fas fa-download me-1"></i> Import IGs</a>
</li> -->
<li class="nav-item">
<a class="nav-link {{ 'active' if request.endpoint == 'view_igs' else '' }}" href="{{ url_for('view_igs') }}"><i class="fas fa-folder-open me-1"></i> Manage FHIR Packages</a>
</li>
@ -817,7 +814,6 @@
<main class="flex-grow-1">
<div class="container mt-4">
<!-- Flashed Messages Section -->
{% with messages = get_flashed_messages(with_categories=true) %}
{% if messages %}
<div class="mt-3">
@ -852,6 +848,7 @@
<a href="https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/issues/new/choose" class="text-danger text-decoration-none" aria-label="FHIRFLARE support"><i class="fas fa-exclamation-circle me-1"></i> Raise an Issue</a>
</div>
<div class="footer-right">
<a class="nav-link {{ 'active' if request.endpoint == 'flasgger.apidocs' else '' }}" href="{{ url_for('flasgger.apidocs') }}" aria-label="API Documentation"><i class="fas fa-book-open me-1"></i> API Docs</a>
<a href="https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/discussions" target="_blank" rel="noreferrer" aria-label="Project Discussion">Project Discussions</a>
<a href="https://github.com/Sudo-JHare" aria-label="Developer">Developer</a>
<a href="https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/blob/main/LICENSE.md" aria-label="License">License</a>

View File

@ -1,6 +1,6 @@
{% extends "base.html" %}
{# Import form helpers if needed, e.g., for CSRF token #}
{# Import form helpers for CSRF token and field rendering #}
{% from "_form_helpers.html" import render_field %}
{% block content %}
@ -48,11 +48,10 @@
<p class="text-muted">No packages downloaded yet. Use the "Import IG" tab.</p>
{% endif %}
{# --- MOVED: Push Response Area --- #}
{# Push Response Area #}
<div class="mt-4">
<div class="d-flex justify-content-between align-items-center mb-2">
<h4><i class="bi bi-file-earmark-text me-2"></i>Push Report</h4>
{# --- NEW: Report Action Buttons --- #}
<div id="reportActions" style="display: none;">
<button id="copyReportBtn" class="btn btn-sm btn-outline-secondary me-2" title="Copy Report Text">
<i class="bi bi-clipboard"></i> Copy
@ -61,11 +60,9 @@
<i class="bi bi-download"></i> Download
</button>
</div>
{# --- END NEW --- #}
</div>
<div id="pushResponse" class="border p-3 rounded bg-light" style="min-height: 100px;">
<span class="text-muted">Report summary will appear here after pushing...</span>
{# Flash messages can still appear here if needed #}
{% with messages = get_flashed_messages(with_categories=true) %}
{% if messages %}
{% for category, message in messages %}
@ -78,15 +75,13 @@
{% endwith %}
</div>
</div>
{# --- END MOVED --- #}
</div>{# End Left Column #}
{# Right Column: Push IGs Form and Console #}
<div class="col-md-6">
<h2><i class="bi bi-upload me-2"></i>Push IGs to FHIR Server</h2>
<form id="pushIgForm">
{{ form.csrf_token if form else '' }} {# Use form passed from route #}
{{ form.csrf_token if form else '' }}
{# Package Selection #}
<div class="mb-3">
@ -111,21 +106,31 @@
<input type="url" class="form-control" id="fhirServerUrl" name="fhir_server_url" placeholder="e.g., http://localhost:8080/fhir" required>
</div>
{# --- RESTRUCTURED: Auth and Checkboxes --- #}
{# Authentication Section #}
<div class="row g-3 mb-3 align-items-end">
{# Authentication Dropdown & Token Input #}
<div class="col-md-5">
<label for="authType" class="form-label">Authentication</label>
<select class="form-select" id="authType" name="auth_type">
<option value="none" selected>None</option>
<option value="apiKey">Toolkit API Key (Internal)</option>
<option value="bearerToken">Bearer Token</option>
<option value="basic">Basic Authentication</option>
</select>
</div>
<div class="col-md-7" id="authTokenGroup" style="display: none;">
<div class="col-md-7" id="authInputsGroup" style="display: none;">
{# Bearer Token Input #}
<div id="bearerTokenInput" style="display: none;">
<label for="authToken" class="form-label">Bearer Token</label>
<input type="password" class="form-control" id="authToken" name="auth_token" placeholder="Enter Bearer Token">
</div>
{# Basic Auth Inputs #}
<div id="basicAuthInputs" style="display: none;">
<label for="username" class="form-label">Username</label>
<input type="text" class="form-control mb-2" id="username" name="username" placeholder="Enter Basic Auth Username">
<label for="password" class="form-label">Password</label>
<input type="password" class="form-control" id="password" name="password" placeholder="Enter Basic Auth Password">
</div>
</div>
</div>
{# Checkboxes Row #}
@ -154,12 +159,10 @@
<div class="form-check">
<input type="checkbox" class="form-check-input" id="verbose" name="verbose">
<label class="form-check-label" for="verbose">Verbose Log</label>
<small class="form-text text-muted d-block">Show detailed Log.</small>
<small class="form-text text-muted d-block">Show detailed log.</small>
</div>
</div>
</div>
{# --- END RESTRUCTURED --- #}
{# Resource Type Filter #}
<div class="mb-3">
@ -185,7 +188,6 @@
<span class="text-muted">Console output will appear here...</span>
</div>
</div>
</div> {# End Right Column #}
</div> {# End row #}
</div> {# End container-fluid #}
@ -198,13 +200,17 @@ document.addEventListener('DOMContentLoaded', function() {
const pushIgForm = document.getElementById('pushIgForm');
const pushButton = document.getElementById('pushButton');
const liveConsole = document.getElementById('liveConsole');
const responseDiv = document.getElementById('pushResponse'); // Area for final report
const reportActions = document.getElementById('reportActions'); // Container for report buttons
const copyReportBtn = document.getElementById('copyReportBtn'); // New copy button
const downloadReportBtn = document.getElementById('downloadReportBtn'); // New download button
const responseDiv = document.getElementById('pushResponse');
const reportActions = document.getElementById('reportActions');
const copyReportBtn = document.getElementById('copyReportBtn');
const downloadReportBtn = document.getElementById('downloadReportBtn');
const authTypeSelect = document.getElementById('authType');
const authTokenGroup = document.getElementById('authTokenGroup');
const authInputsGroup = document.getElementById('authInputsGroup');
const bearerTokenInput = document.getElementById('bearerTokenInput');
const basicAuthInputs = document.getElementById('basicAuthInputs');
const authTokenInput = document.getElementById('authToken');
const usernameInput = document.getElementById('username');
const passwordInput = document.getElementById('password');
const resourceTypesFilterInput = document.getElementById('resourceTypesFilter');
const skipFilesFilterInput = document.getElementById('skipFilesFilter');
const dryRunCheckbox = document.getElementById('dryRun');
@ -226,7 +232,7 @@ document.addEventListener('DOMContentLoaded', function() {
if (packageSelect && dependencyModeField) {
packageSelect.addEventListener('change', function() {
const packageId = this.value;
dependencyModeField.value = ''; // Clear on change
dependencyModeField.value = '';
if (packageId) {
const [packageName, version] = packageId.split('#');
fetch(`/get-package-metadata?package_name=${packageName}&version=${version}`)
@ -240,24 +246,32 @@ document.addEventListener('DOMContentLoaded', function() {
if (packageSelect.value) { packageSelect.dispatchEvent(new Event('change')); }
}
// Show/Hide Bearer Token Input
if (authTypeSelect && authTokenGroup) {
// Show/Hide Auth Inputs
if (authTypeSelect && authInputsGroup && bearerTokenInput && basicAuthInputs) {
authTypeSelect.addEventListener('change', function() {
authTokenGroup.style.display = this.value === 'bearerToken' ? 'block' : 'none';
authInputsGroup.style.display = (this.value === 'bearerToken' || this.value === 'basic') ? 'block' : 'none';
bearerTokenInput.style.display = this.value === 'bearerToken' ? 'block' : 'none';
basicAuthInputs.style.display = this.value === 'basic' ? 'block' : 'none';
// Clear inputs when switching
if (this.value !== 'bearerToken' && authTokenInput) authTokenInput.value = '';
if (this.value !== 'basic' && usernameInput) usernameInput.value = '';
if (this.value !== 'basic' && passwordInput) passwordInput.value = '';
});
authTokenGroup.style.display = authTypeSelect.value === 'bearerToken' ? 'block' : 'none';
authInputsGroup.style.display = (authTypeSelect.value === 'bearerToken' || authTypeSelect.value === 'basic') ? 'block' : 'none';
bearerTokenInput.style.display = authTypeSelect.value === 'bearerToken' ? 'block' : 'none';
basicAuthInputs.style.display = authTypeSelect.value === 'basic' ? 'block' : 'none';
} else {
console.error("Auth elements not found.");
}
// --- NEW: Report Action Button Listeners ---
// Report Action Button Listeners
if (copyReportBtn && responseDiv) {
copyReportBtn.addEventListener('click', () => {
const reportAlert = responseDiv.querySelector('.alert'); // Get the alert div inside
const reportText = reportAlert ? reportAlert.innerText || reportAlert.textContent : ''; // Get text content
const reportAlert = responseDiv.querySelector('.alert');
const reportText = reportAlert ? reportAlert.innerText || reportAlert.textContent : '';
if (reportText && navigator.clipboard) {
navigator.clipboard.writeText(reportText)
.then(() => {
// Optional: Provide feedback (e.g., change button text/icon)
const originalIcon = copyReportBtn.innerHTML;
copyReportBtn.innerHTML = '<i class="bi bi-check-lg"></i> Copied!';
setTimeout(() => { copyReportBtn.innerHTML = originalIcon; }, 2000);
@ -292,27 +306,27 @@ document.addEventListener('DOMContentLoaded', function() {
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
URL.revokeObjectURL(url); // Clean up
URL.revokeObjectURL(url);
} else {
alert('No report content found to download.');
}
});
}
// --- END NEW ---
// --- Form Submission ---
// Form Submission
if (pushIgForm) {
pushIgForm.addEventListener('submit', async function(event) {
event.preventDefault();
// Get form values (with null checks for elements)
// Get form values
const packageId = packageSelect ? packageSelect.value : null;
const fhirServerUrl = fhirServerUrlInput ? fhirServerUrlInput.value.trim() : null;
if (!packageId || !fhirServerUrl) { alert('Please select package and enter FHIR Server URL.'); return; }
const [packageName, version] = packageId.split('#');
const auth_type = authTypeSelect ? authTypeSelect.value : 'none';
const auth_token = (auth_type === 'bearerToken' && authTokenInput) ? authTokenInput.value : null;
const username = (auth_type === 'basic' && usernameInput) ? usernameInput.value.trim() : null;
const password = (auth_type === 'basic' && passwordInput) ? passwordInput.value : null;
const resource_types_filter_raw = resourceTypesFilterInput ? resourceTypesFilterInput.value.trim() : '';
const resource_types_filter = resource_types_filter_raw ? resource_types_filter_raw.split(',').map(s => s.trim()).filter(s => s) : null;
const skip_files_raw = skipFilesFilterInput ? skipFilesFilterInput.value.trim() : '';
@ -322,12 +336,23 @@ document.addEventListener('DOMContentLoaded', function() {
const include_dependencies = includeDependenciesCheckbox ? includeDependenciesCheckbox.checked : true;
const force_upload = forceUploadCheckbox ? forceUploadCheckbox.checked : false;
// UI Updates & API Key
if (pushButton) { pushButton.disabled = true; pushButton.textContent = 'Processing...'; }
if (liveConsole) { liveConsole.innerHTML = `<div>${new Date().toLocaleTimeString()} [INFO] Starting ${dry_run ? 'DRY RUN ' : ''}${force_upload ? 'FORCE ' : ''}push for ${packageName}#${version}...</div>`; }
if (responseDiv) { responseDiv.innerHTML = '<span class="text-muted">Processing...</span>'; } // Clear previous report
if (reportActions) { reportActions.style.display = 'none'; } // Hide report buttons initially
const internalApiKey = {{ api_key | default("") | tojson }}; // Use tojson filter
// Validate Basic Auth inputs
if (auth_type === 'basic') {
if (!username) { alert('Please enter a username for Basic Authentication.'); return; }
if (!password) { alert('Please enter a password for Basic Authentication.'); return; }
}
// UI Updates
if (pushButton) {
pushButton.disabled = true;
pushButton.innerHTML = '<span class="spinner-border spinner-border-sm" role="status" aria-hidden="true"></span> Processing...';
}
if (liveConsole) {
liveConsole.innerHTML = `<div>${new Date().toLocaleTimeString()} [INFO] Starting ${dry_run ? 'DRY RUN ' : ''}${force_upload ? 'FORCE ' : ''}push for ${packageName}#${version}...</div>`;
}
if (responseDiv) { responseDiv.innerHTML = '<span class="text-muted">Processing...</span>'; }
if (reportActions) { reportActions.style.display = 'none'; }
const internalApiKey = {{ api_key | default("") | tojson }};
try {
// API Fetch
@ -335,10 +360,19 @@ document.addEventListener('DOMContentLoaded', function() {
method: 'POST',
headers: { 'Content-Type': 'application/json', 'Accept': 'application/x-ndjson', 'X-CSRFToken': csrfToken, 'X-API-Key': internalApiKey },
body: JSON.stringify({
package_name: packageName, version: version, fhir_server_url: fhirServerUrl,
include_dependencies: include_dependencies, auth_type: auth_type, auth_token: auth_token,
resource_types_filter: resource_types_filter, skip_files: skip_files,
dry_run: dry_run, verbose: isVerboseChecked, force_upload: force_upload
package_name: packageName,
version: version,
fhir_server_url: fhirServerUrl,
include_dependencies: include_dependencies,
auth_type: auth_type,
auth_token: auth_token,
username: username,
password: password,
resource_types_filter: resource_types_filter,
skip_files: skip_files,
dry_run: dry_run,
verbose: isVerboseChecked,
force_upload: force_upload
})
});
@ -361,25 +395,28 @@ document.addEventListener('DOMContentLoaded', function() {
const data = JSON.parse(line); const timestamp = new Date().toLocaleTimeString();
let messageClass = 'text-light'; let prefix = '[INFO]'; let shouldDisplay = false;
switch (data.type) { // Determine if message should display based on verbose
switch (data.type) {
case 'start': case 'error': case 'complete': shouldDisplay = true; break;
case 'success': case 'warning': case 'info': case 'progress': if (isVerboseChecked) { shouldDisplay = true; } break;
default: if (isVerboseChecked) { shouldDisplay = true; console.warn("Unknown type:", data.type); prefix = '[UNKNOWN]'; } break;
}
if (shouldDisplay && liveConsole) { // Set prefix/class and append to console
if (shouldDisplay && liveConsole) {
if(data.type==='error'){prefix='[ERROR]';messageClass='text-danger';}
else if(data.type==='complete'){const s=data.data?.status||'info';if(s==='success'){prefix='[SUCCESS]';messageClass='text-success';}else if(s==='partial'){prefix='[PARTIAL]';messageClass='text-warning';}else{prefix='[ERROR]';messageClass='text-danger';}}
else if(data.type==='start'){prefix='[START]';messageClass='text-info';}
else if(data.type==='success'){prefix='[SUCCESS]';messageClass='text-success';}else if(data.type==='warning'){prefix='[WARNING]';messageClass='text-warning';}else if(data.type==='info'){prefix='[INFO]';messageClass='text-info';}else{prefix='[PROGRESS]';messageClass='text-light';}
else if(data.type==='success'){prefix='[SUCCESS]';messageClass='text-success';}
else if(data.type==='warning'){prefix='[WARNING]';messageClass='text-warning';}
else if(data.type==='info'){prefix='[INFO]';messageClass='text-info';}
else{prefix='[PROGRESS]';messageClass='text-light';}
const messageDiv = document.createElement('div'); messageDiv.className = messageClass;
const messageText = (data.type === 'complete' && data.data) ? data.data.message : data.message;
messageDiv.textContent = `${timestamp} ${prefix} ${sanitizeText(messageText) || 'Empty message.'}`;
messageDiv.innerHTML = `${timestamp} ${prefix} ${sanitizeText(messageText) || 'Empty message.'}`;
liveConsole.appendChild(messageDiv); liveConsole.scrollTop = liveConsole.scrollHeight;
}
if (data.type === 'complete' && responseDiv) { // Update final summary box
if (data.type === 'complete' && responseDiv) {
const summaryData = data.data || {}; let alertClass = 'alert-info'; let statusText = 'Info'; let pushedPkgs = 'None'; let failHtml = ''; let skipHtml = '';
const isDryRun = summaryData.dry_run || false; const isForceUpload = summaryData.force_upload || false;
const typeFilterUsed = summaryData.resource_types_filter ? summaryData.resource_types_filter.join(', ') : 'All';
@ -391,28 +428,74 @@ document.addEventListener('DOMContentLoaded', function() {
if (summaryData.skipped_details?.length > 0) { skipHtml = '<hr><strong>Skipped:</strong><ul class="list-unstyled" style="font-size: 0.9em; max-height: 150px; overflow-y: auto; padding-left: 1em;">'; summaryData.skipped_details.forEach(s => {skipHtml += `<li><strong>${sanitizeText(s.resource)}:</strong> ${sanitizeText(s.reason)}</li>`;}); skipHtml += '</ul>';}
responseDiv.innerHTML = `<div class="alert ${alertClass} mt-0"><strong>${isDryRun?'[DRY RUN] ':''}${isForceUpload?'[FORCE] ':''}${statusText}:</strong> ${sanitizeText(summaryData.message)||'Complete.'}<hr><strong>Target:</strong> ${sanitizeText(summaryData.target_server)}<br><strong>Package:</strong> ${sanitizeText(summaryData.package_name)}#${sanitizeText(summaryData.version)}<br><strong>Config:</strong> Deps=${summaryData.included_dependencies?'Yes':'No'}, Types=${sanitizeText(typeFilterUsed)}, SkipFiles=${sanitizeText(fileFilterUsed)}, DryRun=${isDryRun?'Yes':'No'}, Force=${isForceUpload?'Yes':'No'}, Verbose=${isVerboseChecked?'Yes':'No'}<br><strong>Stats:</strong> Attempt=${sanitizeText(summaryData.resources_attempted)}, Success=${sanitizeText(summaryData.success_count)}, Fail=${sanitizeText(summaryData.failure_count)}, Skip=${sanitizeText(summaryData.skipped_count)}<br><strong>Pushed Pkgs:</strong><br><div style="padding-left:15px;">${pushedPkgs}</div>${failHtml}${skipHtml}</div>`;
if (reportActions) { reportActions.style.display = 'block'; } // Show report buttons
if (reportActions) { reportActions.style.display = 'block'; }
}
} catch (parseError) {
console.error('Stream parse error:', parseError);
if (liveConsole) {
const errDiv = document.createElement('div');
errDiv.className = 'text-danger';
errDiv.textContent = `${new Date().toLocaleTimeString()} [ERROR] Stream parse error: ${sanitizeText(parseError.message)}`;
liveConsole.appendChild(errDiv);
liveConsole.scrollTop = liveConsole.scrollHeight;
}
}
}
}
} catch (parseError) { /* (Handle JSON parse errors) */ console.error('Stream parse error:', parseError); if(liveConsole){/*...add error to console...*/} }
} // end for loop
} // end while loop
// Process Final Buffer (if any)
// Process Final Buffer
if (buffer.trim()) {
try { /* (Parsing logic for final buffer, similar to above) */ }
catch (parseError) { /* (Handle final buffer parse error) */ }
try {
const data = JSON.parse(buffer.trim());
if (data.type === 'complete' && responseDiv) {
// Same summary rendering as above
const summaryData = data.data || {};
let alertClass = 'alert-info'; let statusText = 'Info'; let pushedPkgs = 'None'; let failHtml = ''; let skipHtml = '';
const isDryRun = summaryData.dry_run || false; const isForceUpload = summaryData.force_upload || false;
const typeFilterUsed = summaryData.resource_types_filter ? summaryData.resource_types_filter.join(', ') : 'All';
const fileFilterUsed = summaryData.skip_files_filter ? summaryData.skip_files_filter.join(', ') : 'None';
if (summaryData.pushed_packages_summary?.length > 0) { pushedPkgs = summaryData.pushed_packages_summary.map(p => `${sanitizeText(p.id)} (${sanitizeText(p.resource_count)} resources)`).join('<br>'); }
if (summaryData.status === 'success') { alertClass = 'alert-success'; statusText = 'Success';} else if (summaryData.status === 'partial') { alertClass = 'alert-warning'; statusText = 'Partial Success'; } else { alertClass = 'alert-danger'; statusText = 'Error'; }
if (summaryData.failed_details?.length > 0) { failHtml = '<hr><strong>Failures:</strong><ul class="list-unstyled" style="font-size: 0.9em; max-height: 150px; overflow-y: auto; padding-left: 1em;">'; summaryData.failed_details.forEach(f => {failHtml += `<li><strong>${sanitizeText(f.resource)}:</strong> ${sanitizeText(f.error)}</li>`;}); failHtml += '</ul>';}
if (summaryData.skipped_details?.length > 0) { skipHtml = '<hr><strong>Skipped:</strong><ul class="list-unstyled" style="font-size: 0.9em; max-height: 150px; overflow-y: auto; padding-left: 1em;">'; summaryData.skipped_details.forEach(s => {skipHtml += `<li><strong>${sanitizeText(s.resource)}:</strong> ${sanitizeText(s.reason)}</li>`;}); skipHtml += '</ul>';}
responseDiv.innerHTML = `<div class="alert ${alertClass} mt-0"><strong>${isDryRun?'[DRY RUN] ':''}${isForceUpload?'[FORCE] ':''}${statusText}:</strong> ${sanitizeText(summaryData.message)||'Complete.'}<hr><strong>Target:</strong> ${sanitizeText(summaryData.target_server)}<br><strong>Package:</strong> ${sanitizeText(summaryData.package_name)}#${sanitizeText(summaryData.version)}<br><strong>Config:</strong> Deps=${summaryData.included_dependencies?'Yes':'No'}, Types=${sanitizeText(typeFilterUsed)}, SkipFiles=${sanitizeText(fileFilterUsed)}, DryRun=${isDryRun?'Yes':'No'}, Force=${isForceUpload?'Yes':'No'}, Verbose=${isVerboseChecked?'Yes':'No'}<br><strong>Stats:</strong> Attempt=${sanitizeText(summaryData.resources_attempted)}, Success=${sanitizeText(summaryData.success_count)}, Fail=${sanitizeText(summaryData.failure_count)}, Skip=${sanitizeText(summaryData.skipped_count)}<br><strong>Pushed Pkgs:</strong><br><div style="padding-left:15px;">${pushedPkgs}</div>${failHtml}${skipHtml}</div>`;
if (reportActions) { reportActions.style.display = 'block'; }
}
} catch (parseError) {
console.error('Final buffer parse error:', parseError);
if (liveConsole) {
const errDiv = document.createElement('div');
errDiv.className = 'text-danger';
errDiv.textContent = `${new Date().toLocaleTimeString()} [ERROR] Final buffer parse error: ${sanitizeText(parseError.message)}`;
liveConsole.appendChild(errDiv);
liveConsole.scrollTop = liveConsole.scrollHeight;
}
}
}
} catch (error) { // Handle overall fetch/network errors
} catch (error) {
console.error("Push operation failed:", error);
if (liveConsole) { /* ... add error to console ... */ }
if (responseDiv) { responseDiv.innerHTML = `<div class="alert alert-danger mt-3"><strong>Error:</strong> ${sanitizeText(error.message || error)}</div>`; }
if (reportActions) { reportActions.style.display = 'none'; } // Hide buttons on error
} finally { // Re-enable button
if (pushButton) { pushButton.disabled = false; pushButton.textContent = 'Push to FHIR Server'; }
if (liveConsole) {
const errDiv = document.createElement('div');
errDiv.className = 'text-danger';
errDiv.textContent = `${new Date().toLocaleTimeString()} [ERROR] ${sanitizeText(error.message || error)}`;
liveConsole.appendChild(errDiv);
liveConsole.scrollTop = liveConsole.scrollHeight;
}
}); // End form submit listener
if (responseDiv) {
responseDiv.innerHTML = `<div class="alert alert-danger mt-3"><strong>Error:</strong> ${sanitizeText(error.message || error)}</div>`;
}
if (reportActions) { reportActions.style.display = 'none'; }
} finally {
if (pushButton) {
pushButton.disabled = false;
pushButton.innerHTML = '<i class="bi bi-cloud-upload me-2"></i>Push to FHIR Server';
}
}
});
} else { console.error("Push IG Form element not found."); }
}); // End DOMContentLoaded listener
});
</script>
{% endblock %}

View File

@ -8,13 +8,6 @@
<p class="lead mb-4">
Interact with FHIR servers using GET, POST, PUT, or DELETE requests. Toggle between local HAPI or a custom server to explore resources or perform searches.
</p>
<!-----------------------------------------------------------------remove the buttons-----------------------------------------------------
<div class="d-grid gap-2 d-sm-flex justify-content-sm-center">
<a href="{{ url_for('index') }}" class="btn btn-primary btn-lg px-4 gap-3">Back to Home</a>
<a href="{{ url_for('validate_sample') }}" class="btn btn-outline-secondary btn-lg px-4">Validate FHIR Sample</a>
<a href="{{ url_for('fhir_ui_operations') }}" class="btn btn-outline-secondary btn-lg px-4">FHIR UI Operations</a>
</div>
-------------------------------------------------------------------------------------------------------------------------------------------->
</div>
</div>
@ -34,6 +27,31 @@
</div>
<small id="fhirServerHelp" class="form-text text-muted">Toggle to use local HAPI (http://localhost:8080/fhir) or enter a custom FHIR server URL.</small>
</div>
<div class="mb-3" id="authSection" style="display: none;">
<label class="form-label">Authentication</label>
<div class="row g-3 align-items-end">
<div class="col-md-5">
<select class="form-select" id="authType" name="auth_type">
<option value="none" selected>None</option>
<option value="bearer">Bearer Token</option>
<option value="basic">Basic Authentication</option>
</select>
</div>
<div class="col-md-7" id="authInputsGroup" style="display: none;">
<div id="bearerTokenInput" style="display: none;">
<label for="bearerToken" class="form-label">Bearer Token</label>
<input type="password" class="form-control" id="bearerToken" name="bearer_token" placeholder="Enter Bearer Token">
</div>
<div id="basicAuthInputs" style="display: none;">
<label for="username" class="form-label">Username</label>
<input type="text" class="form-control mb-2" id="username" name="username" placeholder="Enter Username">
<label for="password" class="form-label">Password</label>
<input type="password" class="form-control" id="password" name="password" placeholder="Enter Password">
</div>
</div>
</div>
<small class="form-text text-muted">Select authentication method for the custom FHIR server.</small>
</div>
<div class="mb-3">
<label for="fhirPath" class="form-label">FHIR Path</label>
<input type="text" class="form-control" id="fhirPath" name="fhir_path" placeholder="e.g., Patient/wang-li" required aria-describedby="fhirPathHelp">
@ -44,13 +62,10 @@
<div class="d-flex gap-2 flex-wrap">
<input type="radio" class="btn-check" name="method" id="get" value="GET" checked>
<label class="btn btn-outline-success" for="get"><span class="badge bg-success">GET</span></label>
<input type="radio" class="btn-check" name="method" id="post" value="POST">
<label class="btn btn-outline-primary" for="post"><span class="badge bg-primary">POST</span></label>
<input type="radio" class="btn-check" name="method" id="put" value="PUT">
<label class="btn btn-outline-warning" for="put"><span class="badge bg-warning text-dark">PUT</span></label>
<input type="radio" class="btn-check" name="method" id="delete" value="DELETE">
<label class="btn btn-outline-danger" for="delete"><span class="badge bg-danger">DELETE</span></label>
</div>
@ -108,68 +123,66 @@ document.addEventListener('DOMContentLoaded', function() {
const copyRequestBodyButton = document.getElementById('copyRequestBody');
const copyResponseHeadersButton = document.getElementById('copyResponseHeaders');
const copyResponseBodyButton = document.getElementById('copyResponseBody');
const authSection = document.getElementById('authSection');
const authTypeSelect = document.getElementById('authType');
const authInputsGroup = document.getElementById('authInputsGroup');
const bearerTokenInput = document.getElementById('bearerToken');
const basicAuthInputs = document.getElementById('basicAuthInputs');
const usernameInput = document.getElementById('username');
const passwordInput = document.getElementById('password');
// Basic check for critical elements
if (!form || !sendButton || !fhirPathInput || !responseCard || !toggleServerButton || !fhirServerUrlInput || !responseStatus || !responseHeaders || !responseBody || !toggleLabel) {
console.error("One or more critical UI elements could not be found. Script execution halted.");
alert("Error initializing UI components. Please check the console.");
return; // Stop script execution
return;
}
console.log("All critical elements checked/found.");
// --- State Variable ---
// Default assumes standalone, will be forced otherwise by appMode check below
let useLocalHapi = true;
// --- Get App Mode from Flask Context ---
// Ensure this variable is correctly passed from Flask using the context_processor
const appMode = '{{ app_mode | default("standalone") | lower }}';
console.log('App Mode Detected:', appMode);
// --- DEFINE HELPER FUNCTIONS ---
// Validates request body, returns null on error, otherwise returns body string or empty string
// --- Helper Functions ---
function validateRequestBody(method, path) {
if (!requestBodyInput || !jsonError) return (method === 'POST' || method === 'PUT') ? '' : undefined;
const bodyValue = requestBodyInput.value.trim();
requestBodyInput.classList.remove('is-invalid'); // Reset validation
requestBodyInput.classList.remove('is-invalid');
jsonError.style.display = 'none';
if (!bodyValue) return ''; // Empty body is valid for POST/PUT
if (!bodyValue) return '';
const isSearch = path && path.endsWith('_search');
const isJson = bodyValue.startsWith('{') || bodyValue.startsWith('[');
const isXml = bodyValue.startsWith('<');
const isForm = !isJson && !isXml;
if (method === 'POST' && isSearch && isForm) { // POST Search with form params
if (method === 'POST' && isSearch && isForm) {
return bodyValue;
} else if (method === 'POST' || method === 'PUT') { // Other POST/PUT expect JSON/XML
} else if (method === 'POST' || method === 'PUT') {
if (isJson) {
try { JSON.parse(bodyValue); return bodyValue; }
catch (e) { jsonError.textContent = `Invalid JSON: ${e.message}`; }
} else if (isXml) {
// Basic XML check is difficult in JS, accept it for now
// Backend or target server will validate fully
return bodyValue;
} else { // Neither JSON nor XML, and not a POST search form
} else {
jsonError.textContent = 'Request body must be valid JSON or XML for PUT/POST (unless using POST _search with form parameters).';
}
requestBodyInput.classList.add('is-invalid');
jsonError.style.display = 'block';
return null; // Indicate validation error
return null;
}
return undefined; // Indicate no body should be sent for GET/DELETE
return undefined;
}
// Cleans path for proxying
function cleanFhirPath(path) {
if (!path) return '';
// Remove optional leading 'r4/' or 'fhir/', then trim slashes
return path.replace(/^(r4\/|fhir\/)*/i, '').replace(/^\/+|\/+$/g, '');
}
// Copies text to clipboard
async function copyToClipboard(text, button) {
if (text === null || text === undefined || !button || !navigator.clipboard) return;
try {
@ -177,25 +190,23 @@ document.addEventListener('DOMContentLoaded', function() {
const originalIcon = button.innerHTML;
button.innerHTML = '<i class="bi bi-check-lg"></i> Copied!';
setTimeout(() => { if (button.isConnected) button.innerHTML = originalIcon; }, 1500);
} catch (err) { console.error('Copy failed:', err); /* Optionally alert user */ }
} catch (err) { console.error('Copy failed:', err); }
}
// Updates the UI elements related to the server toggle button/input
function updateServerToggleUI() {
if (appMode === 'lite') {
// LITE MODE: Force Custom URL, disable toggle
useLocalHapi = false; // Force state
useLocalHapi = false;
toggleServerButton.disabled = true;
toggleServerButton.classList.add('disabled');
toggleServerButton.style.pointerEvents = 'none'; // Make unclickable
toggleServerButton.style.pointerEvents = 'none';
toggleServerButton.setAttribute('aria-disabled', 'true');
toggleServerButton.title = "Local HAPI server is unavailable in Lite mode";
toggleLabel.textContent = 'Use Custom URL'; // Always show this label
fhirServerUrlInput.style.display = 'block'; // Always show input
toggleLabel.textContent = 'Use Custom URL';
fhirServerUrlInput.style.display = 'block';
fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)";
fhirServerUrlInput.required = true; // Make required in lite mode
fhirServerUrlInput.required = true;
authSection.style.display = 'block';
} else {
// STANDALONE MODE: Allow toggle
toggleServerButton.disabled = false;
toggleServerButton.classList.remove('disabled');
toggleServerButton.style.pointerEvents = 'auto';
@ -204,113 +215,179 @@ document.addEventListener('DOMContentLoaded', function() {
toggleLabel.textContent = useLocalHapi ? 'Using Local HAPI' : 'Using Custom URL';
fhirServerUrlInput.style.display = useLocalHapi ? 'none' : 'block';
fhirServerUrlInput.placeholder = "e.g., https://hapi.fhir.org/baseR4";
fhirServerUrlInput.required = !useLocalHapi; // Required only if custom is selected
fhirServerUrlInput.required = !useLocalHapi;
authSection.style.display = useLocalHapi ? 'none' : 'block';
}
fhirServerUrlInput.classList.remove('is-invalid'); // Clear validation state on toggle
fhirServerUrlInput.classList.remove('is-invalid');
updateAuthInputsUI();
console.log(`UI Updated: useLocalHapi=${useLocalHapi}, Button Disabled=${toggleServerButton.disabled}, Input Visible=${fhirServerUrlInput.style.display !== 'none'}, Input Required=${fhirServerUrlInput.required}`);
}
// Toggles the server selection state (only effective in Standalone mode)
function updateAuthInputsUI() {
if (!authTypeSelect || !authInputsGroup || !bearerTokenInput || !basicAuthInputs) return;
const authType = authTypeSelect.value;
authInputsGroup.style.display = (authType === 'bearer' || authType === 'basic') ? 'block' : 'none';
bearerTokenInput.style.display = authType === 'bearer' ? 'block' : 'none';
basicAuthInputs.style.display = authType === 'basic' ? 'block' : 'none';
if (authType !== 'bearer' && bearerTokenInput) bearerTokenInput.value = '';
if (authType !== 'basic' && usernameInput) usernameInput.value = '';
if (authType !== 'basic' && passwordInput) passwordInput.value = '';
}
function toggleServer() {
if (appMode === 'lite') {
console.log("Toggle ignored: Lite mode active.");
return; // Do nothing in lite mode
return;
}
useLocalHapi = !useLocalHapi;
if (useLocalHapi && fhirServerUrlInput) {
fhirServerUrlInput.value = ''; // Clear custom URL when switching to local
fhirServerUrlInput.value = '';
}
updateServerToggleUI(); // Update UI based on new state
updateServerToggleUI();
console.log(`Server toggled: Now using ${useLocalHapi ? 'Local HAPI' : 'Custom URL'}`);
}
// Updates visibility of the Request Body textarea based on selected HTTP method
function updateRequestBodyVisibility() {
const selectedMethod = document.querySelector('input[name="method"]:checked')?.value;
if (!requestBodyGroup || !selectedMethod) return;
const showBody = (selectedMethod === 'POST' || selectedMethod === 'PUT');
requestBodyGroup.style.display = showBody ? 'block' : 'none';
if (!showBody) { // Clear body and errors if body not needed
if (!showBody) {
if (requestBodyInput) requestBodyInput.value = '';
if (jsonError) jsonError.style.display = 'none';
if (requestBodyInput) requestBodyInput.classList.remove('is-invalid');
}
}
// --- INITIAL SETUP & MODE CHECK ---
updateServerToggleUI(); // Set initial UI based on detected mode and default state
updateRequestBodyVisibility(); // Set initial visibility based on default method (GET)
// --- Initial Setup ---
updateServerToggleUI();
updateRequestBodyVisibility();
// --- ATTACH EVENT LISTENERS ---
// --- Event Listeners ---
toggleServerButton.addEventListener('click', toggleServer);
if (methodRadios) { methodRadios.forEach(radio => { radio.addEventListener('change', updateRequestBodyVisibility); }); }
if (requestBodyInput && fhirPathInput) { requestBodyInput.addEventListener('input', () => validateRequestBody(document.querySelector('input[name="method"]:checked')?.value, fhirPathInput.value)); }
if (copyRequestBodyButton && requestBodyInput) { copyRequestBodyButton.addEventListener('click', () => copyToClipboard(requestBodyInput.value, copyRequestBodyButton)); }
if (copyResponseHeadersButton && responseHeaders) { copyResponseHeadersButton.addEventListener('click', () => copyToClipboard(responseHeaders.textContent, copyResponseHeadersButton)); }
if (copyResponseBodyButton && responseBody) { copyResponseBodyButton.addEventListener('click', () => copyToClipboard(responseBody.textContent, copyResponseBodyButton)); }
if (authTypeSelect) { authTypeSelect.addEventListener('change', updateAuthInputsUI); }
// --- Send Request Button Listener ---
sendButton.addEventListener('click', async function() {
console.log("Send Request button clicked.");
// --- UI Reset ---
sendButton.disabled = true; sendButton.textContent = 'Sending...';
sendButton.disabled = true;
sendButton.textContent = 'Sending...';
responseCard.style.display = 'none';
responseStatus.textContent = ''; responseHeaders.textContent = ''; responseBody.textContent = '';
responseStatus.className = 'badge'; // Reset badge class
fhirServerUrlInput.classList.remove('is-invalid'); // Reset validation
responseStatus.textContent = '';
responseHeaders.textContent = '';
responseBody.textContent = '';
responseStatus.className = 'badge';
fhirServerUrlInput.classList.remove('is-invalid');
if (requestBodyInput) requestBodyInput.classList.remove('is-invalid');
if (jsonError) jsonError.style.display = 'none';
if (bearerTokenInput) bearerTokenInput.classList.remove('is-invalid');
if (usernameInput) usernameInput.classList.remove('is-invalid');
if (passwordInput) passwordInput.classList.remove('is-invalid');
// --- Get Values ---
const path = fhirPathInput.value.trim();
const method = document.querySelector('input[name="method"]:checked')?.value;
const customUrl = fhirServerUrlInput.value.trim();
let body = undefined;
const authType = authTypeSelect ? authTypeSelect.value : 'none';
const bearerToken = bearerTokenInput ? bearerTokenInput.value.trim() : '';
const username = usernameInput ? usernameInput.value.trim() : '';
const password = passwordInput ? passwordInput.value : '';
// --- Basic Input Validation ---
if (!path) { alert('Please enter a FHIR Path.'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return; }
if (!method) { alert('Please select a Request Type.'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return; }
if (!useLocalHapi && !customUrl) { // Custom URL mode needs a URL
alert('Please enter a custom FHIR Server URL.'); fhirServerUrlInput.classList.add('is-invalid'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return;
if (!path) {
alert('Please enter a FHIR Path.');
sendButton.disabled = false;
sendButton.textContent = 'Send Request';
return;
}
if (!useLocalHapi && customUrl) { // Validate custom URL format
if (!method) {
alert('Please select a Request Type.');
sendButton.disabled = false;
sendButton.textContent = 'Send Request';
return;
}
if (!useLocalHapi && !customUrl) {
alert('Please enter a custom FHIR Server URL.');
fhirServerUrlInput.classList.add('is-invalid');
sendButton.disabled = false;
sendButton.textContent = 'Send Request';
return;
}
if (!useLocalHapi && customUrl) {
try { new URL(customUrl); }
catch (_) { alert('Invalid custom FHIR Server URL format.'); fhirServerUrlInput.classList.add('is-invalid'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return; }
catch (_) {
alert('Invalid custom FHIR Server URL format.');
fhirServerUrlInput.classList.add('is-invalid');
sendButton.disabled = false;
sendButton.textContent = 'Send Request';
return;
}
}
// --- Validate & Get Body (if needed) ---
// --- Validate Authentication ---
if (!useLocalHapi) {
if (authType === 'bearer' && !bearerToken) {
alert('Please enter a Bearer Token.');
bearerTokenInput.classList.add('is-invalid');
sendButton.disabled = false;
sendButton.textContent = 'Send Request';
return;
}
if (authType === 'basic' && (!username || !password)) {
alert('Please enter both Username and Password for Basic Authentication.');
if (!username) usernameInput.classList.add('is-invalid');
if (!password) passwordInput.classList.add('is-invalid');
sendButton.disabled = false;
sendButton.textContent = 'Send Request';
return;
}
}
// --- Validate & Get Body ---
if (method === 'POST' || method === 'PUT') {
body = validateRequestBody(method, path);
if (body === null) { // null indicates validation error
alert('Request body contains invalid JSON/Format.'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return;
if (body === null) {
alert('Request body contains invalid JSON/Format.');
sendButton.disabled = false;
sendButton.textContent = 'Send Request';
return;
}
// If body is empty string, ensure it's treated as such for fetch
if (body === '') body = '';
}
// --- Determine Fetch URL and Headers ---
const cleanedPath = cleanFhirPath(path);
const finalFetchUrl = '/fhir/' + cleanedPath; // Always send to the backend proxy endpoint
const finalFetchUrl = '/fhir/' + cleanedPath;
const headers = { 'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8' };
// Determine Content-Type if body exists
if (body !== undefined) {
if (body.trim().startsWith('{')) { headers['Content-Type'] = 'application/fhir+json'; }
else if (body.trim().startsWith('<')) { headers['Content-Type'] = 'application/fhir+xml'; }
else if (method === 'POST' && path.endsWith('_search') && body && !body.trim().startsWith('{') && !body.trim().startsWith('<')) { headers['Content-Type'] = 'application/x-www-form-urlencoded'; }
else if (body) { headers['Content-Type'] = 'application/fhir+json'; } // Default if unknown but present
else if (body) { headers['Content-Type'] = 'application/fhir+json'; }
}
// Add Custom Target Header if needed
if (!useLocalHapi && customUrl) {
headers['X-Target-FHIR-Server'] = customUrl.replace(/\/+$/, ''); // Send custom URL without trailing slash
headers['X-Target-FHIR-Server'] = customUrl.replace(/\/+$/, '');
console.log("Adding header X-Target-FHIR-Server:", headers['X-Target-FHIR-Server']);
if (authType === 'bearer') {
headers['Authorization'] = `Bearer ${bearerToken}`;
console.log("Adding header Authorization: Bearer <truncated>");
} else if (authType === 'basic') {
const credentials = btoa(`${username}:${password}`);
headers['Authorization'] = `Basic ${credentials}`;
console.log("Adding header Authorization: Basic <redacted>");
}
}
// Add CSRF token ONLY if sending to local proxy (for modifying methods)
const csrfTokenInput = form.querySelector('input[name="csrf_token"]');
const csrfToken = csrfTokenInput ? csrfTokenInput.value : null;
// Include DELETE method for CSRF check
if (useLocalHapi && ['POST', 'PUT', 'DELETE', 'PATCH'].includes(method) && csrfToken) {
headers['X-CSRFToken'] = csrfToken;
console.log("CSRF Token added for local request.");
@ -319,7 +396,7 @@ document.addEventListener('DOMContentLoaded', function() {
}
console.log(`Executing Fetch: Method=${method}, URL=${finalFetchUrl}, LocalHAPI=${useLocalHapi}`);
console.log("Request Headers:", headers);
console.log("Request Headers:", { ...headers, Authorization: headers.Authorization ? '<redacted>' : undefined });
if (body !== undefined) console.log("Request Body (first 300 chars):", (body || '').substring(0, 300) + ((body?.length ?? 0) > 300 ? "..." : ""));
// --- Make the Fetch Request ---
@ -327,10 +404,9 @@ document.addEventListener('DOMContentLoaded', function() {
const response = await fetch(finalFetchUrl, {
method: method,
headers: headers,
body: body // Will be undefined for GET/DELETE
body: body
});
// --- Process Response ---
responseCard.style.display = 'block';
responseStatus.textContent = `${response.status} ${response.statusText}`;
responseStatus.className = `badge ${response.ok ? 'bg-success' : 'bg-danger'}`;
@ -343,13 +419,10 @@ document.addEventListener('DOMContentLoaded', function() {
let responseBodyText = await response.text();
let displayBody = responseBodyText;
// Attempt to pretty-print JSON
if (responseContentType.includes('json') && responseBodyText.trim()) {
try { displayBody = JSON.stringify(JSON.parse(responseBodyText), null, 2); }
catch (e) { console.warn("Failed to pretty-print JSON response:", e); /* Show raw text */ }
}
// Attempt to pretty-print XML (basic indentation)
else if (responseContentType.includes('xml') && responseBodyText.trim()) {
catch (e) { console.warn("Failed to pretty-print JSON response:", e); }
} else if (responseContentType.includes('xml') && responseBodyText.trim()) {
try {
let formattedXml = '';
let indent = 0;
@ -357,24 +430,23 @@ document.addEventListener('DOMContentLoaded', function() {
for (let i = 0; i < xmlLines.length; i++) {
const node = xmlLines[i];
if (!node || node.trim().length === 0) continue;
if (node.match(/^<\/\w/)) indent--; // Closing tag
if (node.match(/^<\/\w/)) indent--;
formattedXml += ' '.repeat(Math.max(0, indent)) + node.trim() + '\n';
if (node.match(/^<\w/) && !node.match(/\/>$/) && !node.match(/^<\?/)) indent++; // Opening tag
if (node.match(/^<\w/) && !node.match(/\/>$/) && !node.match(/^<\?/)) indent++;
}
displayBody = formattedXml.trim();
} catch(e) { console.warn("Basic XML formatting failed:", e); /* Show raw text */ }
} catch (e) { console.warn("Basic XML formatting failed:", e); }
}
responseBody.textContent = displayBody;
// Highlight response body if Prism.js is available
if (typeof Prism !== 'undefined') {
responseBody.className = 'border p-2 bg-light'; // Reset base classes
responseBody.className = 'border p-2 bg-light';
if (responseContentType.includes('json')) {
responseBody.classList.add('language-json');
} else if (responseContentType.includes('xml')) {
responseBody.classList.add('language-xml');
} // Add other languages if needed
}
Prism.highlightElement(responseBody);
}
@ -384,7 +456,6 @@ document.addEventListener('DOMContentLoaded', function() {
responseStatus.textContent = `Network Error`;
responseStatus.className = 'badge bg-danger';
responseHeaders.textContent = 'N/A';
// Provide a more informative error message
let errorDetail = `Error: ${error.message}\n\n`;
if (useLocalHapi) {
errorDetail += `Could not connect to the FHIRFLARE proxy at ${finalFetchUrl}. Ensure the toolkit server and the local HAPI FHIR server (at http://localhost:8080/fhir) are running.`;
@ -393,10 +464,10 @@ document.addEventListener('DOMContentLoaded', function() {
}
responseBody.textContent = errorDetail;
} finally {
sendButton.disabled = false; sendButton.textContent = 'Send Request';
sendButton.disabled = false;
sendButton.textContent = 'Send Request';
}
}); // End sendButton listener
}); // End DOMContentLoaded Listener
});
});
</script>
{% endblock %}

View File

@ -134,11 +134,37 @@
<label class="form-label fw-bold">FHIR Server</label>
<div class="input-group">
<button type="button" class="btn btn-outline-primary" id="toggleServer">
<span id="toggleLabel">Use Local HAPI</span> </button>
<span id="toggleLabel">Use Local HAPI</span>
</button>
<input type="text" class="form-control" id="fhirServerUrl" name="fhir_server_url" placeholder="Enter FHIR Base URL e.g., https://hapi.fhir.org/baseR4" style="display: none;" aria-describedby="fhirServerHelp">
</div>
<small id="fhirServerHelp" class="form-text text-muted">Toggle to use local HAPI (/fhir proxy) or enter a custom FHIR server URL.</small>
</div>
<div class="mb-3" id="authSection" style="display: none;">
<label class="form-label fw-bold">Authentication</label>
<div class="row g-3 align-items-end">
<div class="col-md-5">
<select class="form-select" id="authType" name="auth_type">
<option value="none" selected>None</option>
<option value="bearer">Bearer Token</option>
<option value="basic">Basic Authentication</option>
</select>
</div>
<div class="col-md-7" id="authInputsGroup" style="display: none;">
<div id="bearerTokenInput" style="display: none;">
<label for="bearerToken" class="form-label">Bearer Token</label>
<input type="password" class="form-control" id="bearerToken" name="bearer_token" placeholder="Enter Bearer Token">
</div>
<div id="basicAuthInputs" style="display: none;">
<label for="username" class="form-label">Username</label>
<input type="text" class="form-control mb-2" id="username" name="username" placeholder="Enter Username">
<label for="password" class="form-label">Password</label>
<input type="password" class="form-control" id="password" name="password" placeholder="Enter Password">
</div>
</div>
</div>
<small class="form-text text-muted">Select authentication method for the custom FHIR server.</small>
</div>
<button type="button" class="btn btn-primary mb-3" id="fetchMetadata">Fetch Metadata</button>
</form>
@ -338,9 +364,15 @@ document.addEventListener('DOMContentLoaded', () => {
const swaggerUiContainer = document.getElementById('swagger-ui');
const selectedResourceSpan = document.getElementById('selectedResource');
const queryListContainer = document.getElementById('queryList');
// --- Add checks if desired ---
if (!toggleServerButton || !toggleLabel || !fhirServerUrlInput /* || other critical elements */) {
console.error("Crucial elements missing, stopping script."); return;
const authSection = document.getElementById('authSection');
const authTypeSelect = document.getElementById('authType');
const authInputsGroup = document.getElementById('authInputsGroup');
const bearerTokenInput = document.getElementById('bearerToken');
const usernameInput = document.getElementById('username');
const passwordInput = document.getElementById('password');
if (!toggleServerButton || !toggleLabel || !fhirServerUrlInput || !authSection || !authTypeSelect) {
console.error("Crucial elements missing, stopping script.");
return;
}
// --- State Variables ---
@ -355,48 +387,57 @@ document.addEventListener('DOMContentLoaded', () => {
console.log(`App Mode (Operations): ${appMode}`);
// <<< END ADD >>>
// --- Helper Function to Update Toggle Button/Input UI (MODIFY THIS FUNCTION) ---
function updateServerToggleUI() {
// Keep checks for elements
if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton) {
console.error("updateServerToggleUI: Required elements missing!");
// --- Helper Functions ---
function updateAuthInputsUI() {
console.log(`[updateAuthInputsUI] Running, authType: ${authTypeSelect.value}`);
if (!authTypeSelect || !authInputsGroup || !bearerTokenInput || !basicAuthInputs) {
console.error("[updateAuthInputsUI] Missing auth elements");
return;
}
console.log(`updateServerToggleUI: appMode=${appMode}, current isUsingLocalHapi=${isUsingLocalHapi}`); // Debug
const authType = authTypeSelect.value;
authInputsGroup.style.display = (authType === 'bearer' || authType === 'basic') ? 'block' : 'none';
bearerTokenInput.style.display = authType === 'bearer' ? 'block' : 'none';
basicAuthInputs.style.display = authType === 'basic' ? 'block' : 'none';
if (authType !== 'bearer' && bearerTokenInput) bearerTokenInput.value = '';
if (authType !== 'basic' && usernameInput) usernameInput.value = '';
if (authType !== 'basic' && passwordInput) passwordInput.value = '';
console.log(`[updateAuthInputsUI] authInputsGroup display: ${authInputsGroup.style.display}, bearer: ${bearerTokenInput.style.display}, basic: ${basicAuthInputs.style.display}`);
}
// <<< MODIFY THIS WHOLE IF/ELSE BLOCK >>>
// --- Helper Function to Update Toggle Button/Input UI (MODIFY THIS FUNCTION) ---
function updateServerToggleUI() {
if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton || !authSection) {
console.error("[updateServerToggleUI] Required elements missing!");
return;
}
console.log(`[updateServerToggleUI] appMode=${appMode}, isUsingLocalHapi=${isUsingLocalHapi}`);
if (appMode === 'lite') {
console.log("-> Applying Lite mode UI settings.");
isUsingLocalHapi = false; // Force state
toggleServerButton.disabled = true; // Set disabled attribute
toggleServerButton.classList.add('disabled'); // Add Bootstrap disabled class
// --- ADD !important to pointerEvents ---
console.log("[updateServerToggleUI] Applying Lite mode UI settings");
isUsingLocalHapi = false;
toggleServerButton.disabled = true;
toggleServerButton.classList.add('disabled');
toggleServerButton.style.pointerEvents = 'none !important';
// --- END ADD ---
toggleServerButton.setAttribute('aria-disabled', 'true'); // Accessibility
toggleServerButton.title = "Local HAPI is not available in Lite mode"; // Tooltip
toggleLabel.textContent = 'Use Custom URL'; // Set label text
fhirServerUrlInput.style.display = 'block'; // Show custom URL input
toggleServerButton.setAttribute('aria-disabled', 'true');
toggleServerButton.title = "Local HAPI is not available in Lite mode";
toggleLabel.textContent = 'Use Custom URL';
fhirServerUrlInput.style.display = 'block';
fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)";
authSection.style.display = 'block';
} else {
// Standalone mode
console.log("-> Applying Standalone mode UI settings.");
toggleServerButton.disabled = false; // Ensure enabled
toggleServerButton.classList.remove('disabled'); // Remove Bootstrap disabled class
// --- Ensure pointerEvents is auto in standalone ---
console.log("[updateServerToggleUI] Applying Standalone mode UI settings");
toggleServerButton.disabled = false;
toggleServerButton.classList.remove('disabled');
toggleServerButton.style.pointerEvents = 'auto';
// --- END ---
toggleServerButton.removeAttribute('aria-disabled'); // Accessibility
toggleServerButton.title = ""; // Clear tooltip
// Set text/display based on current standalone state
toggleServerButton.removeAttribute('aria-disabled');
toggleServerButton.title = "";
toggleLabel.textContent = isUsingLocalHapi ? 'Use Local HAPI' : 'Use Custom URL';
fhirServerUrlInput.style.display = isUsingLocalHapi ? 'none' : 'block';
fhirServerUrlInput.placeholder = "Enter FHIR Base URL e.g., https://hapi.fhir.org/baseR4";
authSection.style.display = isUsingLocalHapi ? 'none' : 'block';
}
fhirServerUrlInput.classList.remove('is-invalid'); // Clear validation state
console.log(`-> updateServerToggleUI finished. Button disabled: ${toggleServerButton.disabled}, pointer-events: ${toggleServerButton.style.pointerEvents}`); // Log pointer-events
// <<< END MODIFICATION >>>
fhirServerUrlInput.classList.remove('is-invalid');
updateAuthInputsUI();
console.log(`[updateServerToggleUI] Finished. Button disabled: ${toggleServerButton.disabled}, authSection display: ${authSection.style.display}`);
}
// <<< REFINED fetchOperationDefinition >>>
@ -482,47 +523,47 @@ document.addEventListener('DOMContentLoaded', () => {
}
// <<< END REFINED fetchOperationDefinition (v3) >>>
function updateServerToggleUI() {
// Keep checks for elements
if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton) {
console.error("updateServerToggleUI: Required elements missing!");
return;
}
console.log(`updateServerToggleUI: appMode=<span class="math-inline">\{appMode\}, current isUsingLocalHapi\=</span>{isUsingLocalHapi}`); // Debug
// function updateServerToggleUI() {
// // Keep checks for elements
// if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton) {
// console.error("updateServerToggleUI: Required elements missing!");
// return;
// }
// console.log(`updateServerToggleUI: appMode=<span class="math-inline">\{appMode\}, current isUsingLocalHapi\=</span>{isUsingLocalHapi}`); // Debug
if (appMode === 'lite') {
console.log("-> Applying Lite mode UI settings.");
isUsingLocalHapi = false; // Force state
toggleServerButton.disabled = true; // Set disabled attribute
toggleServerButton.classList.add('disabled'); // Add Bootstrap disabled class
// --- ADD !important to pointerEvents ---
toggleServerButton.style.pointerEvents = 'none !important';
// --- END ADD ---
toggleServerButton.setAttribute('aria-disabled', 'true'); // Accessibility
toggleServerButton.title = "Local HAPI is not available in Lite mode"; // Tooltip
toggleLabel.textContent = 'Use Custom URL'; // Set label text
fhirServerUrlInput.style.display = 'block'; // Show custom URL input
fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)";
} else {
// Standalone mode
console.log("-> Applying Standalone mode UI settings.");
toggleServerButton.disabled = false; // Ensure enabled
toggleServerButton.classList.remove('disabled'); // Remove Bootstrap disabled class
// --- Ensure pointerEvents is auto in standalone ---
toggleServerButton.style.pointerEvents = 'auto';
// --- END ---
toggleServerButton.removeAttribute('aria-disabled'); // Accessibility
toggleServerButton.title = ""; // Clear tooltip
// if (appMode === 'lite') {
// console.log("-> Applying Lite mode UI settings.");
// isUsingLocalHapi = false; // Force state
// toggleServerButton.disabled = true; // Set disabled attribute
// toggleServerButton.classList.add('disabled'); // Add Bootstrap disabled class
// // --- ADD !important to pointerEvents ---
// toggleServerButton.style.pointerEvents = 'none !important';
// // --- END ADD ---
// toggleServerButton.setAttribute('aria-disabled', 'true'); // Accessibility
// toggleServerButton.title = "Local HAPI is not available in Lite mode"; // Tooltip
// toggleLabel.textContent = 'Use Custom URL'; // Set label text
// fhirServerUrlInput.style.display = 'block'; // Show custom URL input
// fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)";
// } else {
// // Standalone mode
// console.log("-> Applying Standalone mode UI settings.");
// toggleServerButton.disabled = false; // Ensure enabled
// toggleServerButton.classList.remove('disabled'); // Remove Bootstrap disabled class
// // --- Ensure pointerEvents is auto in standalone ---
// toggleServerButton.style.pointerEvents = 'auto';
// // --- END ---
// toggleServerButton.removeAttribute('aria-disabled'); // Accessibility
// toggleServerButton.title = ""; // Clear tooltip
// Set text/display based on current standalone state
toggleLabel.textContent = isUsingLocalHapi ? 'Use Local HAPI' : 'Use Custom URL';
fhirServerUrlInput.style.display = isUsingLocalHapi ? 'none' : 'block';
fhirServerUrlInput.placeholder = "Enter FHIR Base URL e.g., https://hapi.fhir.org/baseR4";
}
// Clear potential validation errors regardless of mode
if(fhirServerUrlInput) fhirServerUrlInput.classList.remove('is-invalid'); // Add check for element existence
console.log(`-> updateServerToggleUI finished. Button disabled: ${toggleServerButton.disabled}, pointer-events: ${toggleServerButton.style.pointerEvents}`); // Log pointer-events
}
// // Set text/display based on current standalone state
// toggleLabel.textContent = isUsingLocalHapi ? 'Use Local HAPI' : 'Use Custom URL';
// fhirServerUrlInput.style.display = isUsingLocalHapi ? 'none' : 'block';
// fhirServerUrlInput.placeholder = "Enter FHIR Base URL e.g., https://hapi.fhir.org/baseR4";
// }
// // Clear potential validation errors regardless of mode
// if(fhirServerUrlInput) fhirServerUrlInput.classList.remove('is-invalid'); // Add check for element existence
// console.log(`-> updateServerToggleUI finished. Button disabled: ${toggleServerButton.disabled}, pointer-events: ${toggleServerButton.style.pointerEvents}`); // Log pointer-events
// }
// --- Server Toggle Functionality (REVISED - simplified) ---
function toggleServerSelection() {
@ -1271,153 +1312,278 @@ document.addEventListener('DOMContentLoaded', () => {
if (executeButton && executeWrapper && respStatusDiv && reqUrlOutput && curlOutput && respFormatSelect && copyRespButton && downloadRespButton && respOutputCode && respNarrativeDiv && respOutputPre) {
executeButton.addEventListener('click', async () => {
console.log("[LOG 1] Execute button clicked. Starting listener...");
// --- Reset UI and Disable Button ---
executeButton.disabled = true; executeButton.textContent = 'Executing...';
executeButton.disabled = true;
executeButton.textContent = 'Executing...';
executeWrapper.style.display = 'block';
// ... (reset UI elements) ...
if(reqUrlOutput) reqUrlOutput.textContent = 'Building request...'; if(curlOutput) curlOutput.textContent = 'Building request...'; if(respOutputCode) respOutputCode.textContent = ''; if(respNarrativeDiv) respNarrativeDiv.innerHTML = ''; respNarrativeDiv.style.display = 'none'; if(respOutputPre) respOutputPre.style.display = 'block'; if(respStatusDiv) respStatusDiv.textContent = 'Executing request...'; respStatusDiv.style.color = '#6c757d'; if(respFormatSelect) respFormatSelect.style.display = 'none'; respFormatSelect.value = 'json'; if(copyRespButton) copyRespButton.style.display = 'none'; if(downloadRespButton) downloadRespButton.style.display = 'none';
if (reqUrlOutput) reqUrlOutput.textContent = 'Building request...';
if (curlOutput) curlOutput.textContent = 'Building request...';
if (respOutputCode) respOutputCode.textContent = '';
if (respNarrativeDiv) respNarrativeDiv.innerHTML = '';
respNarrativeDiv.style.display = 'none';
if (respOutputPre) respOutputPre.style.display = 'block';
if (respStatusDiv) respStatusDiv.textContent = 'Executing request...';
respStatusDiv.style.color = '#6c757d';
if (respFormatSelect) respFormatSelect.style.display = 'none';
respFormatSelect.value = 'json';
if (copyRespButton) copyRespButton.style.display = 'none';
if (downloadRespButton) downloadRespButton.style.display = 'none';
// --- Get Query Definition and Base URL ---
const queryDef = JSON.parse(block.dataset.queryData); const method = queryDef.method; const headers = { 'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8' }; let body; let path = queryDef.path; const baseUrl = isUsingLocalHapi ? '/fhir' : (fhirServerUrlInput.value.trim().replace(/\/+$/, '') || '/fhir'); let url = `${baseUrl}`; let validParams = true; const missingParams = []; const bodyParamsList = [];
const queryDef = JSON.parse(block.dataset.queryData);
const method = queryDef.method;
const headers = { 'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8' };
let body;
let path = queryDef.path;
const baseUrl = isUsingLocalHapi ? '/fhir' : (fhirServerUrlInput.value.trim().replace(/\/+$/, '') || '/fhir');
let url = `${baseUrl}`;
let validParams = true;
const missingParams = [];
const bodyParamsList = [];
console.log("[LOG 2] Starting parameter processing...");
if (!isUsingLocalHapi) {
const authType = authTypeSelect.value;
const bearerToken = bearerTokenInput.value.trim();
const username = usernameInput.value.trim();
const password = passwordInput.value;
if (authType === 'bearer') {
headers['Authorization'] = `Bearer ${bearerToken}`;
console.log("Adding header Authorization: Bearer <truncated>");
} else if (authType === 'basic') {
headers['Authorization'] = `Basic ${btoa(`${username}:${password}`)}`;
console.log("Adding header Authorization: Basic <redacted>");
}
}
// --- Process Path Parameters ---
// <<< FIX: Convert NodeList to Array before forEach >>>
Array.from(block.querySelectorAll('.parameters-section tr[data-param-in="path"]')).forEach(row => {
const paramName = row.dataset.paramName; const input = row.querySelector('input'); const paramDef = queryDef.parameters.find(p => p.name === paramName && p.in === 'path'); const required = paramDef?.required; const value = input?.value.trim(); if (input) input.classList.remove('is-invalid');
if (!value && required) { validParams = false; missingParams.push(`${paramName} (path)`); if (input) input.classList.add('is-invalid'); }
else if (value) { path = path.replace(`:${paramName}`, encodeURIComponent(value)); }
else { path = path.replace(`/:${paramName}`, ''); }
const paramName = row.dataset.paramName;
const input = row.querySelector('input');
const paramDef = queryDef.parameters.find(p => p.name === paramName && p.in === 'path');
const required = paramDef?.required;
const value = input?.value.trim();
if (input) input.classList.remove('is-invalid');
if (!value && required) {
validParams = false;
missingParams.push(`${paramName} (path)`);
if (input) input.classList.add('is-invalid');
} else if (value) {
path = path.replace(`:${paramName}`, encodeURIComponent(value));
} else {
path = path.replace(`/:${paramName}`, '');
}
});
if (path.includes(':')) { const remainingPlaceholders = path.match(/:(\w+)/g) || []; const requiredRemaining = queryDef.parameters.filter(p => p.in === 'path' && remainingPlaceholders.includes(`:${p.name}`) && p.required); if (requiredRemaining.length > 0) { validParams = false; missingParams.push(...requiredRemaining.map(p => `${p.name} (path)`)); requiredRemaining.forEach(p => { const el = block.querySelector(`.parameters-section tr[data-param-name="${p.name}"][data-param-in="path"] input`); if (el) el.classList.add('is-invalid'); }); } }
if (path.includes(':')) {
const remainingPlaceholders = path.match(/:(\w+)/g) || [];
const requiredRemaining = queryDef.parameters.filter(p => p.in === 'path' && remainingPlaceholders.includes(`:${p.name}`) && p.required);
if (requiredRemaining.length > 0) {
validParams = false;
missingParams.push(...requiredRemaining.map(p => `${p.name} (path)`));
requiredRemaining.forEach(p => {
const el = block.querySelector(`.parameters-section tr[data-param-name="${p.name}"][data-param-in="path"] input`);
if (el) el.classList.add('is-invalid');
});
}
}
url += path.startsWith('/') ? path : `/${path}`;
// --- Process Query and Body Parameters ---
const searchParams = new URLSearchParams();
// <<< FIX: Convert NodeList to Array before forEach >>>
Array.from(block.querySelectorAll('.parameters-section tr[data-param-in="query"], .parameters-section tr[data-param-in="body (Parameters)"]')).forEach(row => {
const paramName = row.dataset.paramName; const paramIn = row.dataset.paramIn; const inputElement = row.querySelector('input, select'); const paramDef = queryDef.parameters.find(p => p.name === paramName && p.in === paramIn); const required = paramDef?.required; let value = ''; if (inputElement) inputElement.classList.remove('is-invalid');
if (inputElement?.type === 'checkbox') { value = inputElement.checked ? (inputElement.value || 'true') : ''; } else if (inputElement) { value = inputElement.value.trim(); }
if (value) { if (paramIn === 'query') { searchParams.set(paramName, value); } else { let paramPart = { name: paramName }; const paramType = paramDef?.type || 'string'; try { switch (paramType) { case 'boolean': paramPart.valueBoolean = (value === 'true'); break; case 'integer': case 'positiveInt': case 'unsignedInt': paramPart.valueInteger = parseInt(value, 10); break; case 'decimal': paramPart.valueDecimal = parseFloat(value); break; case 'date': paramPart.valueDate = value; break; case 'dateTime': paramPart.valueDateTime = value; break; case 'instant': try { paramPart.valueInstant = new Date(value).toISOString(); } catch (dateError) { console.warn(`Instant parse failed: ${dateError}`); paramPart.valueString = value;} break; default: paramPart[`value${paramType.charAt(0).toUpperCase() + paramType.slice(1)}`] = value; } if (Object.keys(paramPart).length > 1) { bodyParamsList.push(paramPart); } } catch (typeError) { console.error(`Error processing body param ${paramName}: ${typeError}`); bodyParamsList.push({ name: paramName, valueString: value }); } } }
else if (required) { validParams = false; missingParams.push(`${paramName} (${paramIn})`); if (inputElement) inputElement.classList.add('is-invalid'); }
const paramName = row.dataset.paramName;
const paramIn = row.dataset.paramIn;
const inputElement = row.querySelector('input, select');
const paramDef = queryDef.parameters.find(p => p.name === paramName && p.in === paramIn);
const required = paramDef?.required;
let value = '';
if (inputElement) inputElement.classList.remove('is-invalid');
if (inputElement?.type === 'checkbox') {
value = inputElement.checked ? (inputElement.value || 'true') : '';
} else if (inputElement) {
value = inputElement.value.trim();
}
if (value) {
if (paramIn === 'query') {
searchParams.set(paramName, value);
} else {
let paramPart = { name: paramName };
const paramType = paramDef?.type || 'string';
try {
switch (paramType) {
case 'boolean': paramPart.valueBoolean = (value === 'true'); break;
case 'integer': case 'positiveInt': case 'unsignedInt': paramPart.valueInteger = parseInt(value, 10); break;
case 'decimal': paramPart.valueDecimal = parseFloat(value); break;
case 'date': paramPart.valueDate = value; break;
case 'dateTime': paramPart.valueDateTime = value; break;
case 'instant': try { paramPart.valueInstant = new Date(value).toISOString(); } catch (dateError) { console.warn(`Instant parse failed: ${dateError}`); paramPart.valueString = value; } break;
default: paramPart[`value${paramType.charAt(0).toUpperCase() + paramType.slice(1)}`] = value;
}
if (Object.keys(paramPart).length > 1) { bodyParamsList.push(paramPart); }
} catch (typeError) {
console.error(`Error processing body param ${paramName}: ${typeError}`);
bodyParamsList.push({ name: paramName, valueString: value });
}
}
} else if (required) {
validParams = false;
missingParams.push(`${paramName} (${paramIn})`);
if (inputElement) inputElement.classList.add('is-invalid');
}
});
console.log("[LOG 3] Parameter processing finished. Valid:", validParams);
// --- Validation Check ---
if (!validParams) {
const errorMsg = `Error: Missing required parameter(s): ${[...new Set(missingParams)].join(', ')}`;
console.error("[LOG 3a] Validation failed:", errorMsg);
if (respStatusDiv) { respStatusDiv.textContent = errorMsg; respStatusDiv.style.color = 'red'; }
if (reqUrlOutput) reqUrlOutput.textContent = 'Error: Invalid parameters';
if (curlOutput) curlOutput.textContent = 'Error: Invalid parameters';
executeButton.disabled = false; executeButton.textContent = 'Execute'; // Re-enable button
return; // Stop execution
executeButton.disabled = false;
executeButton.textContent = 'Execute';
return;
}
// --- Finalize URL ---
const queryString = searchParams.toString(); if (queryString) url += (url.includes('?') ? '&' : '?') + queryString;
const queryString = searchParams.toString();
if (queryString) url += (url.includes('?') ? '&' : '?') + queryString;
// --- Construct Request Body ---
console.log("[LOG 4] Constructing request body...");
if (queryDef.requestBody) {
const contentType = reqContentTypeSelect ? reqContentTypeSelect.value : 'application/fhir+json';
headers['Content-Type'] = contentType;
// <<< Refined Logic >>>
if (bodyParamsList.length > 0) { // If parameters were collected for the body
console.log("[LOG 4a] Using bodyParamsList to construct body.");
if (contentType.includes('json')) { body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }, null, 2); }
else if (contentType.includes('xml')) { try { body = jsonToFhirXml({ resourceType: "Parameters", parameter: bodyParamsList }); } catch (xmlErr) { console.error("Params->XML failed:", xmlErr); body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }); headers['Content-Type'] = 'application/fhir+json'; alert("Failed to create XML body. Sending JSON."); } }
else { console.warn(`Unsupported Content-Type ${contentType} for Parameters body. Sending JSON.`); body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }, null, 2); headers['Content-Type'] = 'application/fhir+json'; }
} else if (reqBodyTextarea && reqBodyTextarea.value.trim() !== '') { // Otherwise, if textarea exists AND has non-whitespace content
console.log("[LOG 4b] Using reqBodyTextarea value for body.");
body = reqBodyTextarea.value; // Use the textarea content directly
// Convert textarea content if needed (e.g., user pasted JSON but selected XML)
if (contentType === 'application/fhir+xml' && body.trim().startsWith('{')) { try { body = jsonToFhirXml(body); } catch (e) { console.warn("Textarea JSON->XML failed", e); } }
else if (contentType === 'application/fhir+json' && body.trim().startsWith('<')) { try { body = xmlToFhirJson(body); } catch (e) { console.warn("Textarea XML->JSON failed", e); } }
} else { // No body params and textarea is missing or empty
console.log("[LOG 4c] No body parameters and textarea is empty/missing. Setting body to empty string.");
body = ''; // Ensure body is an empty string if nothing else applies
if (bodyParamsList.length > 0) {
if (contentType.includes('json')) {
body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }, null, 2);
} else if (contentType.includes('xml')) {
try { body = jsonToFhirXml({ resourceType: "Parameters", parameter: bodyParamsList }); }
catch (xmlErr) {
console.error("Params->XML failed:", xmlErr);
body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList });
headers['Content-Type'] = 'application/fhir+json';
alert("Failed to create XML body. Sending JSON.");
}
} else {
console.warn(`Unsupported Content-Type ${contentType} for Parameters body. Sending JSON.`);
body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }, null, 2);
headers['Content-Type'] = 'application/fhir+json';
}
} else if (reqBodyTextarea && reqBodyTextarea.value.trim() !== '') {
body = reqBodyTextarea.value;
if (contentType === 'application/fhir+xml' && body.trim().startsWith('{')) {
try { body = jsonToFhirXml(body); } catch (e) { console.warn("Textarea JSON->XML failed", e); }
} else if (contentType === 'application/fhir+json' && body.trim().startsWith('<')) {
try { body = xmlToFhirJson(body); } catch (e) { console.warn("Textarea XML->JSON failed", e); }
}
} else {
body = '';
}
// Add CSRF token if needed
console.log("[LOG 4d] Current isUsingLocalHapi state:", isUsingLocalHapi); // Log HAPI state
if (isUsingLocalHapi && ['POST', 'PUT', 'PATCH', 'DELETE'].includes(method)) {
console.log("[LOG 4e] Attempting to add CSRF token for local HAPI modifying request.");
// Find the CSRF token input within the form
const csrfTokenInput = fhirOperationsForm.querySelector('input[name="csrf_token"]');
if (csrfTokenInput && csrfTokenInput.value) {
// Add the token value to the request headers object
headers['X-CSRFToken'] = csrfTokenInput.value;
console.log("[LOG 4f] Added X-CSRFToken header:", headers['X-CSRFToken'] ? 'Yes' : 'No'); // Verify it was added
console.log("[LOG 4f] Added X-CSRFToken header:", headers['X-CSRFToken'] ? 'Yes' : 'No');
} else {
// Log an error if the token input wasn't found or was empty
console.error("[LOG 4g] CSRF token input not found or has no value!");
}
} else {
// Log if CSRF is not being added and why
console.log("[LOG 4h] Not adding CSRF token (not local HAPI or not modifying method). isUsingLocalHapi:", isUsingLocalHapi, "Method:", method);
}
} else {
// Ensure body is undefined if queryDef.requestBody is false
body = undefined;
}
console.log("[LOG 5] Request body constructed. Body length:", body?.length ?? 'undefined'); // Check body length
// --- Log Request ---
if (reqUrlOutput) reqUrlOutput.textContent = url;
// <<< Log body right before generating cURL >>>
console.log("[LOG 5a] Body variable before generateCurlCommand:", body ? body.substring(0,100)+'...' : body);
if(curlOutput) curlOutput.textContent = generateCurlCommand(method, url, headers, body); // Generate cURL
console.log(`Executing: ${method} ${url}`); console.log("Headers:", headers); if (body !== undefined) console.log("Body:", (body || '').substring(0, 300) + ((body?.length ?? 0) > 300 ? "..." : ""));
if (curlOutput) curlOutput.textContent = generateCurlCommand(method, url, headers, body);
console.log(`Executing: ${method} ${url}`);
console.log("Headers:", { ...headers, Authorization: headers.Authorization ? '<redacted>' : undefined });
if (body !== undefined) console.log("Body:", (body || '').substring(0, 300) + ((body?.length ?? 0) > 300 ? "..." : ""));
// --- Perform Fetch & Process Response ---
// --- Perform Fetch & Process Response ---
let respData = { json: null, xml: null, narrative: null, text: null, status: 0, statusText: '', contentType: '' };
try {
console.log("[LOG 6] Initiating fetch...");
const resp = await fetch(url, { method, headers, body: (body || undefined) });
console.log("[LOG 7] Fetch completed. Status:", resp.status);
respData.status = resp.status; respData.statusText = resp.statusText; respData.contentType = resp.headers.get('Content-Type') || '';
console.log("[LOG 8] Reading response text...");
respData.status = resp.status;
respData.statusText = resp.statusText;
respData.contentType = resp.headers.get('Content-Type') || '';
respData.text = await resp.text();
console.log("[LOG 9] Response text read. Length:", respData.text?.length);
if (respStatusDiv) { respStatusDiv.textContent = `Status: ${resp.status} ${resp.statusText}`; respStatusDiv.style.color = resp.ok ? 'green' : 'red'; }
// Process body (includes OperationOutcome check)
console.log("[LOG 10] Processing response body...");
let isOperationOutcome = false; let operationOutcomeIssuesHtml = '';
if (respData.text) { if (respData.contentType.includes('json')) { try { respData.json = JSON.parse(respData.text); try { respData.xml = jsonToFhirXml(respData.json); } catch(xmlConvErr){ respData.xml = `<error>XML conversion failed: ${xmlConvErr.message}</error>`; } if (respData.json.text?.div) respData.narrative = respData.json.text.div; if (respData.json.resourceType === 'OperationOutcome') { isOperationOutcome = true; operationOutcomeIssuesHtml = formatOperationOutcome(respData.json); } } catch (e) { respData.json = { parsingError: e.message, rawText: respData.text }; respData.xml = `<data>${escapeXml(respData.text)}</data>`; } } else if (respData.contentType.includes('xml')) { respData.xml = respData.text; try { respData.json = JSON.parse(xmlToFhirJson(respData.xml)); const p = new DOMParser(), xd = p.parseFromString(respData.xml, "application/xml"), pe = xd.querySelector("parsererror"); if (pe) throw new Error(pe.textContent); if (xd.documentElement && xd.documentElement.tagName === 'OperationOutcome') { isOperationOutcome = true; operationOutcomeIssuesHtml = formatOperationOutcome(respData.json); } const nn = xd.querySelector("div[xmlns='http://www.w3.org/1999/xhtml']"); if (nn) respData.narrative = nn.outerHTML; } catch(e) { respData.json = { parsingError: e.message, rawText: respData.text }; } } else { respData.json = { contentType: respData.contentType, content: respData.text }; respData.xml = `<data contentType="${escapeXml(respData.contentType)}">${escapeXml(respData.text)}</data>`; } } else if (resp.ok) { respData.json = { message: "Success (No Content)" }; respData.xml = jsonToFhirXml({}); } else { respData.json = { error: `Request failed: ${resp.status}`, detail: resp.statusText }; respData.xml = `<error>Request failed: ${resp.status} ${escapeXml(resp.statusText)}</error>`; }
console.log("[LOG 11] Response body processed. Is OperationOutcome:", isOperationOutcome);
// --- Update UI ---
block.dataset.responseData = JSON.stringify(respData);
if(respFormatSelect) { respFormatSelect.style.display = 'inline-block'; respFormatSelect.disabled = false; } if(copyRespButton) copyRespButton.style.display = 'inline-block'; if(downloadRespButton) downloadRespButton.style.display = 'inline-block';
// Enhanced Error Display Logic
if (!resp.ok && isOperationOutcome && respNarrativeDiv) {
console.log("[LOG 12a] Displaying formatted OperationOutcome.");
respNarrativeDiv.innerHTML = operationOutcomeIssuesHtml; respNarrativeDiv.style.display = 'block'; respOutputPre.style.display = 'none';
if (respFormatSelect) { const narrativeOpt = respFormatSelect.querySelector('option[value="narrative"]'); if (!narrativeOpt) { const opt = document.createElement('option'); opt.value = 'narrative'; opt.textContent = 'Formatted Issues'; respFormatSelect.insertBefore(opt, respFormatSelect.firstChild); } else { narrativeOpt.textContent = 'Formatted Issues'; narrativeOpt.disabled = false; } respFormatSelect.value = 'narrative'; respFormatSelect.dispatchEvent(new Event('change')); }
} else {
console.log("[LOG 12b] Updating display format dropdown for success or non-OO error.");
const narrativeOption = respFormatSelect?.querySelector('option[value="narrative"]'); if (narrativeOption) { narrativeOption.textContent = 'Narrative'; narrativeOption.disabled = !respData.narrative; }
respFormatSelect.value = (respData.narrative && respFormatSelect.value === 'narrative') ? 'narrative' : 'json';
respFormatSelect?.dispatchEvent(new Event('change')); // This will trigger highlighting via its listener
let isOperationOutcome = false;
let operationOutcomeIssuesHtml = '';
if (respData.text) {
if (respData.contentType.includes('json')) {
try {
respData.json = JSON.parse(respData.text);
try { respData.xml = jsonToFhirXml(respData.json); } catch (xmlConvErr) { respData.xml = `<error>XML conversion failed: ${xmlConvErr.message}</error>`; }
if (respData.json.text?.div) respData.narrative = respData.json.text.div;
if (respData.json.resourceType === 'OperationOutcome') {
isOperationOutcome = true;
operationOutcomeIssuesHtml = formatOperationOutcome(respData.json);
}
} catch (e) {
respData.json = { parsingError: e.message, rawText: respData.text };
respData.xml = `<data>${escapeXml(respData.text)}</data>`;
}
} else if (respData.contentType.includes('xml')) {
respData.xml = respData.text;
try {
respData.json = JSON.parse(xmlToFhirJson(respData.xml));
const p = new DOMParser(), xd = p.parseFromString(respData.xml, "application/xml"), pe = xd.querySelector("parsererror");
if (pe) throw new Error(pe.textContent);
if (xd.documentElement && xd.documentElement.tagName === 'OperationOutcome') {
isOperationOutcome = true;
operationOutcomeIssuesHtml = formatOperationOutcome(respData.json);
}
const nn = xd.querySelector("div[xmlns='http://www.w3.org/1999/xhtml']");
if (nn) respData.narrative = nn.outerHTML;
} catch (e) {
respData.json = { parsingError: e.message, rawText: respData.text };
}
} else {
respData.json = { contentType: respData.contentType, content: respData.text };
respData.xml = `<data contentType="${escapeXml(respData.contentType)}">${escapeXml(respData.text)}</data>`;
}
} else if (resp.ok) {
respData.json = { message: "Success (No Content)" };
respData.xml = jsonToFhirXml({});
} else {
respData.json = { error: `Request failed: ${resp.status}`, detail: resp.statusText };
respData.xml = `<error>Request failed: ${resp.status} ${escapeXml(resp.statusText)}</error>`;
}
console.log("[LOG 13] UI update triggered.");
} catch (e) { // Catch fetch/network/processing errors
block.dataset.responseData = JSON.stringify(respData);
if (respFormatSelect) { respFormatSelect.style.display = 'inline-block'; respFormatSelect.disabled = false; }
if (copyRespButton) copyRespButton.style.display = 'inline-block';
if (downloadRespButton) downloadRespButton.style.display = 'inline-block';
if (!resp.ok && isOperationOutcome && respNarrativeDiv) {
respNarrativeDiv.innerHTML = operationOutcomeIssuesHtml;
respNarrativeDiv.style.display = 'block';
respOutputPre.style.display = 'none';
if (respFormatSelect) {
const narrativeOpt = respFormatSelect.querySelector('option[value="narrative"]');
if (!narrativeOpt) {
const opt = document.createElement('option');
opt.value = 'narrative';
opt.textContent = 'Formatted Issues';
respFormatSelect.insertBefore(opt, respFormatSelect.firstChild);
} else {
narrativeOpt.textContent = 'Formatted Issues';
narrativeOpt.disabled = false;
}
respFormatSelect.value = 'narrative';
respFormatSelect.dispatchEvent(new Event('change'));
}
} else {
const narrativeOption = respFormatSelect?.querySelector('option[value="narrative"]');
if (narrativeOption) { narrativeOption.textContent = 'Narrative'; narrativeOption.disabled = !respData.narrative; }
respFormatSelect.value = (respData.narrative && respFormatSelect.value === 'narrative') ? 'narrative' : 'json';
respFormatSelect?.dispatchEvent(new Event('change'));
}
} catch (e) {
console.error('[LOG 14] Error during fetch or response processing:', e);
if(respStatusDiv) { respStatusDiv.textContent = `Error: ${e.message}`; respStatusDiv.style.color = 'red'; } if(respOutputCode) { respOutputCode.textContent = `Request failed: ${e.message}\nURL: ${url}`; respOutputCode.className = 'language-text'; } if(respFormatSelect) respFormatSelect.style.display = 'none'; if(copyRespButton) copyRespButton.style.display = 'none'; if(downloadRespButton) downloadRespButton.style.display = 'none'; if(respNarrativeDiv) respNarrativeDiv.style.display = 'none'; if(respOutputPre) respOutputPre.style.display = 'block';
if (respStatusDiv) { respStatusDiv.textContent = `Error: ${e.message}`; respStatusDiv.style.color = 'red'; }
if (respOutputCode) { respOutputCode.textContent = `Request failed: ${e.message}\nURL: ${url}`; respOutputCode.className = 'language-text'; }
if (respFormatSelect) respFormatSelect.style.display = 'none';
if (copyRespButton) copyRespButton.style.display = 'none';
if (downloadRespButton) downloadRespButton.style.display = 'none';
if (respNarrativeDiv) respNarrativeDiv.style.display = 'none';
if (respOutputPre) respOutputPre.style.display = 'block';
} finally {
console.log("[LOG 15] Executing finally block."); // <<< LOG 15 (Finally Block)
console.log("[LOG 15] Executing finally block.");
executeButton.disabled = false;
executeButton.textContent = 'Execute';
}
}); // End executeButton listener
});
} // End if (executeButton && ...)
// Response Format Change Listener
@ -1545,60 +1711,92 @@ document.addEventListener('DOMContentLoaded', () => {
// --- Fetch Server Metadata (FIXED Local URL Handling) ---
if (fetchMetadataButton) {
fetchMetadataButton.addEventListener('click', async () => {
// Clear previous results immediately
console.log("[fetchMetadata] Button clicked");
if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = '<span class="text-muted">Fetching...</span>';
if (resourceTypesDisplayDiv) resourceTypesDisplayDiv.style.display = 'block';
if (swaggerUiContainer) swaggerUiContainer.style.display = 'none'; // Hide old query list
fetchedMetadataCache = null; // Clear cache before fetch attempt
if (swaggerUiContainer) swaggerUiContainer.style.display = 'none';
fetchedMetadataCache = null;
availableSystemOperations = [];
// Determine Base URL - FIXED
const customUrl = fhirServerUrlInput.value.trim().replace(/\/+$/, '');
const baseUrl = isUsingLocalHapi ? '/fhir' : customUrl; // Use '/fhir' proxy path if local
const baseUrl = isUsingLocalHapi ? '/fhir' : customUrl;
// Validate custom URL only if not using local HAPI
if (!isUsingLocalHapi && !baseUrl) { // Should only happen if customUrl is empty
if (!isUsingLocalHapi && !baseUrl) {
console.error("[fetchMetadata] Custom URL required");
fhirServerUrlInput.classList.add('is-invalid');
alert('Please enter a valid FHIR server URL.');
if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = `<span class="text-danger">Error: Custom URL required.</span>`;
return;
}
// Basic format check for custom URL
if (!isUsingLocalHapi) {
try {
new URL(baseUrl); // Check if it's a parseable URL format
} catch (_) {
try { new URL(baseUrl); } catch (_) {
console.error("[fetchMetadata] Invalid custom URL format");
fhirServerUrlInput.classList.add('is-invalid');
alert('Invalid custom URL format. Please enter a valid URL (e.g., https://example.com/fhir).');
if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = `<span class="text-danger">Error: Invalid custom URL format.</span>`;
return;
}
const authType = authTypeSelect.value;
const bearerToken = bearerTokenInput.value.trim();
const username = usernameInput.value.trim();
const password = passwordInput.value;
if (authType === 'bearer' && !bearerToken) {
console.error("[fetchMetadata] Bearer token required");
alert('Please enter a Bearer Token.');
bearerTokenInput.classList.add('is-invalid');
return;
}
if (authType === 'basic' && (!username || !password)) {
console.error("[fetchMetadata] Username and password required");
alert('Please enter both Username and Password for Basic Authentication.');
if (!username) usernameInput.classList.add('is-invalid');
if (!password) passwordInput.classList.add('is-invalid');
return;
}
}
fhirServerUrlInput.classList.remove('is-invalid');
// Construct metadata URL (always add /metadata)
const url = `${baseUrl}/metadata`;
const headers = { 'Accept': 'application/fhir+json' };
if (!isUsingLocalHapi) {
const authType = authTypeSelect.value;
const bearerToken = bearerTokenInput.value.trim();
const username = usernameInput.value.trim();
const password = passwordInput.value;
if (authType === 'bearer') {
headers['Authorization'] = `Bearer ${bearerToken}`;
console.log("[fetchMetadata] Adding header Authorization: Bearer <truncated>");
} else if (authType === 'basic') {
headers['Authorization'] = `Basic ${btoa(`${username}:${password}`)}`;
console.log("[fetchMetadata] Adding header Authorization: Basic <redacted>");
}
}
console.log(`Fetching metadata from: ${url}`);
fetchMetadataButton.disabled = true; fetchMetadataButton.textContent = 'Fetching...';
console.log(`[fetchMetadata] Fetching from: ${url}`);
fetchMetadataButton.disabled = true;
fetchMetadataButton.textContent = 'Fetching...';
try {
const resp = await fetch(url, { method: 'GET', headers: { 'Accept': 'application/fhir+json' } });
if (!resp.ok) { const errText = await resp.text(); throw new Error(`HTTP ${resp.status} ${resp.statusText}: ${errText.substring(0, 500)}`); }
const resp = await fetch(url, { method: 'GET', headers });
if (!resp.ok) {
const errText = await resp.text();
throw new Error(`HTTP ${resp.status} ${resp.statusText}: ${errText.substring(0, 500)}`);
}
const data = await resp.json();
console.log('Metadata received:', data);
fetchedMetadataCache = data; // Cache successful fetch
displayMetadataAndResourceButtons(data); // Parse and display
console.log('[fetchMetadata] Metadata received:', data);
fetchedMetadataCache = data;
displayMetadataAndResourceButtons(data);
} catch (e) {
console.error('Metadata fetch error:', e);
console.error('[fetchMetadata] Error:', e);
if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = `<span class="text-danger">Error fetching metadata: ${e.message}</span>`;
if (resourceTypesDisplayDiv) resourceTypesDisplayDiv.style.display = 'block'; // Keep container visible to show error
if (resourceTypesDisplayDiv) resourceTypesDisplayDiv.style.display = 'block';
if (swaggerUiContainer) swaggerUiContainer.style.display = 'none';
alert(`Error fetching metadata: ${e.message}`);
fetchedMetadataCache = null; // Clear cache on error
fetchedMetadataCache = null;
availableSystemOperations = [];
} finally {
fetchMetadataButton.disabled = false; fetchMetadataButton.textContent = 'Fetch Metadata';
fetchMetadataButton.disabled = false;
fetchMetadataButton.textContent = 'Fetch Metadata';
}
});
} else {

View File

@ -3,7 +3,6 @@
{% block content %}
<div class="px-4 py-5 my-5 text-center">
<!-- <img class="d-block mx-auto mb-4" src="{{ url_for('static', filename='FHIRFLARE.png') }}" alt="FHIRFLARE IG Toolkit" width="192" height="192"> -->
<h1 class="display-5 fw-bold text-body-emphasis">Retrieve & Split Data</h1>
<div class="col-lg-8 mx-auto">
<p class="lead mb-4">
@ -36,31 +35,55 @@
<button type="button" class="btn btn-outline-primary" id="toggleServer">
<span id="toggleLabel">Use Local HAPI</span>
</button>
{# Ensure the input field uses the form object's data #}
{{ form.fhir_server_url(class="form-control", id="fhirServerUrl", style="display: none;", placeholder="e.g., https://fhir.hl7.org.au/aucore/fhir/DEFAULT", **{'aria-describedby': 'fhirServerHelp'}) }}
</div>
<small id="fhirServerHelp" class="form-text text-muted">Toggle to use local HAPI (/fhir proxy) or enter a custom FHIR server URL.</small>
</div>
{# --- Checkbox Row --- #}
{# Authentication Section (Shown for Custom URL) #}
<div class="mb-3" id="authSection" style="display: none;">
<label class="form-label fw-bold">Authentication</label>
<div class="row g-3 align-items-end">
<div class="col-md-5">
<select class="form-select" id="authType" name="auth_type">
<option value="none" selected>None</option>
<option value="bearer">Bearer Token</option>
<option value="basic">Basic Authentication</option>
</select>
</div>
<div class="col-md-7" id="authInputsGroup" style="display: none;">
{# Bearer Token Input #}
<div id="bearerTokenInput" style="display: none;">
<label for="bearerToken" class="form-label">Bearer Token</label>
<input type="password" class="form-control" id="bearerToken" name="bearer_token" placeholder="Enter Bearer Token">
</div>
{# Basic Auth Inputs #}
<div id="basicAuthInputs" style="display: none;">
<label for="username" class="form-label">Username</label>
<input type="text" class="form-control mb-2" id="username" name="username" placeholder="Enter Username">
<label for="password" class="form-label">Password</label>
<input type="password" class="form-control" id="password" name="password" placeholder="Enter Password">
</div>
</div>
</div>
<small class="form-text text-muted">Select authentication method for the custom FHIR server.</small>
</div>
{# Checkbox Row #}
<div class="row g-3 mb-3 align-items-center">
<div class="col-md-6">
{# Render Fetch Referenced Resources checkbox using the macro #}
{{ render_field(form.validate_references, id='validate_references_checkbox') }}
</div>
<div class="col-md-6" id="fetchReferenceBundlesGroup" style="display: none;"> {# Initially hidden #}
{# Render NEW Fetch Full Reference Bundles checkbox using the macro #}
<div class="col-md-6" id="fetchReferenceBundlesGroup" style="display: none;">
{{ render_field(form.fetch_reference_bundles, id='fetch_reference_bundles_checkbox') }}
</div>
</div>
{# --- End Checkbox Row --- #}
<button type="button" class="btn btn-primary mb-3" id="fetchMetadata">Fetch Metadata</button>
<div class="banner3 mt-3" id="resourceTypes" style="display: none;">
<h5>Resource Types</h5>
<div class="d-flex flex-wrap gap-2" id="resourceButtons"></div>
</div>
{# Render SubmitField using the form object #}
<button type="submit" class="btn btn-primary btn-lg w-100" id="retrieveButton" name="submit_retrieve">
<span class="spinner-border spinner-border-sm" role="status" aria-hidden="true" style="display: none;"></span>
<i class="bi bi-arrow-down-circle me-2"></i>{{ form.submit_retrieve.label.text }}
@ -98,9 +121,7 @@
<label class="form-check-label" for="useRetrievedBundles">Use Retrieved Bundles{% if not session.get('retrieve_params', {}).get('bundle_zip_path', None) %} (No bundles retrieved in this session){% endif %}</label>
</div>
</div>
{# Render FileField using the macro #}
{{ render_field(form.split_bundle_zip, class="form-control") }}
{# Render SubmitField using the form object #}
<button type="submit" class="btn btn-primary btn-lg w-100" id="splitButton" name="submit_split">
<span class="spinner-border spinner-border-sm" role="status" aria-hidden="true" style="display: none;"></span>
<i class="bi bi-arrow-right-circle me-2"></i>{{ form.submit_split.label.text }}
@ -144,6 +165,13 @@ document.addEventListener('DOMContentLoaded', () => {
const validateReferencesCheckbox = document.getElementById('validate_references_checkbox');
const fetchReferenceBundlesGroup = document.getElementById('fetchReferenceBundlesGroup');
const fetchReferenceBundlesCheckbox = document.getElementById('fetch_reference_bundles_checkbox');
const authSection = document.getElementById('authSection');
const authTypeSelect = document.getElementById('authType');
const authInputsGroup = document.getElementById('authInputsGroup');
const bearerTokenInput = document.getElementById('bearerToken');
const basicAuthInputs = document.getElementById('basicAuthInputs');
const usernameInput = document.getElementById('username');
const passwordInput = document.getElementById('password');
// --- State & Config Variables ---
const appMode = '{{ app_mode | default("standalone") | lower }}';
@ -169,19 +197,20 @@ document.addEventListener('DOMContentLoaded', () => {
}
function updateServerToggleUI() {
if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton) return;
if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton || !authSection) return;
if (appMode === 'lite') {
useLocalHapi = false;
toggleServerButton.disabled = true;
toggleServerButton.classList.add('disabled');
toggleServerButton.style.pointerEvents = 'none !important';
toggleServerButton.style.pointerEvents = 'none';
toggleServerButton.setAttribute('aria-disabled', 'true');
toggleServerButton.title = "Local HAPI server is unavailable in Lite mode";
toggleLabel.textContent = 'Use Custom URL';
fhirServerUrlInput.style.display = 'block';
fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)";
fhirServerUrlInput.required = true;
authSection.style.display = 'block';
} else {
toggleServerButton.disabled = false;
toggleServerButton.classList.remove('disabled');
@ -192,11 +221,24 @@ document.addEventListener('DOMContentLoaded', () => {
fhirServerUrlInput.style.display = useLocalHapi ? 'none' : 'block';
fhirServerUrlInput.placeholder = "e.g., https://hapi.fhir.org/baseR4";
fhirServerUrlInput.required = !useLocalHapi;
authSection.style.display = useLocalHapi ? 'none' : 'block';
}
fhirServerUrlInput.classList.remove('is-invalid');
updateAuthInputsUI();
console.log(`Server toggle UI updated: useLocalHapi=${useLocalHapi}, customUrl=${fhirServerUrlInput.value}`);
}
function updateAuthInputsUI() {
if (!authTypeSelect || !authInputsGroup || !bearerTokenInput || !basicAuthInputs) return;
const authType = authTypeSelect.value;
authInputsGroup.style.display = (authType === 'bearer' || authType === 'basic') ? 'block' : 'none';
bearerTokenInput.style.display = authType === 'bearer' ? 'block' : 'none';
basicAuthInputs.style.display = authType === 'basic' ? 'block' : 'none';
if (authType !== 'bearer' && bearerTokenInput) bearerTokenInput.value = '';
if (authType !== 'basic' && usernameInput) usernameInput.value = '';
if (authType !== 'basic' && passwordInput) passwordInput.value = '';
}
function toggleFetchReferenceBundles() {
if (validateReferencesCheckbox && fetchReferenceBundlesGroup) {
fetchReferenceBundlesGroup.style.display = validateReferencesCheckbox.checked ? 'block' : 'none';
@ -223,6 +265,10 @@ document.addEventListener('DOMContentLoaded', () => {
console.log(`Server toggled: useLocalHapi=${useLocalHapi}`);
});
if (authTypeSelect) {
authTypeSelect.addEventListener('change', updateAuthInputsUI);
}
if (validateReferencesCheckbox) {
validateReferencesCheckbox.addEventListener('change', toggleFetchReferenceBundles);
} else {
@ -254,11 +300,20 @@ document.addEventListener('DOMContentLoaded', () => {
fetchMetadataButton.textContent = 'Fetching...';
try {
const fetchUrl = '/fhir/metadata'; // Always target proxy
const fetchUrl = '/fhir/metadata';
const headers = { 'Accept': 'application/fhir+json' };
if (!useLocalHapi && customUrl) {
headers['X-Target-FHIR-Server'] = customUrl;
console.log(`Workspaceing metadata via proxy with X-Target-FHIR-Server: ${customUrl}`);
if (authTypeSelect && authTypeSelect.value !== 'none') {
const authType = authTypeSelect.value;
if (authType === 'bearer' && bearerTokenInput && bearerTokenInput.value) {
headers['Authorization'] = `Bearer ${bearerTokenInput.value}`;
} else if (authType === 'basic' && usernameInput && passwordInput && usernameInput.value && passwordInput.value) {
const credentials = btoa(`${usernameInput.value}:${passwordInput.value}`);
headers['Authorization'] = `Basic ${credentials}`;
}
}
console.log(`Fetching metadata via proxy with X-Target-FHIR-Server: ${customUrl}`);
} else {
console.log("Fetching metadata via proxy for local HAPI server");
}
@ -322,7 +377,10 @@ document.addEventListener('DOMContentLoaded', () => {
const selectedResources = Array.from(resourceButtonsContainer.querySelectorAll('.btn.active')).map(btn => btn.dataset.resource);
if (!selectedResources.length) {
alert('Please select at least one resource type.');
retrieveButton.disabled = false; if (spinner) spinner.style.display = 'none'; if (icon) icon.style.display = 'inline-block'; return;
retrieveButton.disabled = false;
if (spinner) spinner.style.display = 'none';
if (icon) icon.style.display = 'inline-block';
return;
}
const formData = new FormData();
@ -345,17 +403,47 @@ document.addEventListener('DOMContentLoaded', () => {
const currentFhirServerUrl = useLocalHapi ? '/fhir' : fhirServerUrlInput.value.trim();
if (!useLocalHapi && !currentFhirServerUrl) {
alert('Custom FHIR Server URL is required.'); fhirServerUrlInput.classList.add('is-invalid');
retrieveButton.disabled = false; if (spinner) spinner.style.display = 'none'; if (icon) icon.style.display = 'inline-block'; return;
alert('Custom FHIR Server URL is required.');
fhirServerUrlInput.classList.add('is-invalid');
retrieveButton.disabled = false;
if (spinner) spinner.style.display = 'none';
if (icon) icon.style.display = 'inline-block';
return;
}
formData.append('fhir_server_url', currentFhirServerUrl);
// Add authentication fields for custom URL
if (!useLocalHapi && authTypeSelect) {
const authType = authTypeSelect.value;
formData.append('auth_type', authType);
if (authType === 'bearer' && bearerTokenInput) {
if (!bearerTokenInput.value) {
alert('Please enter a Bearer Token.');
retrieveButton.disabled = false;
if (spinner) spinner.style.display = 'none';
if (icon) icon.style.display = 'inline-block';
return;
}
formData.append('bearer_token', bearerTokenInput.value);
} else if (authType === 'basic' && usernameInput && passwordInput) {
if (!usernameInput.value || !passwordInput.value) {
alert('Please enter both Username and Password for Basic Authentication.');
retrieveButton.disabled = false;
if (spinner) spinner.style.display = 'none';
if (icon) icon.style.display = 'inline-block';
return;
}
formData.append('username', usernameInput.value);
formData.append('password', passwordInput.value);
}
}
const headers = {
'Accept': 'application/x-ndjson',
'X-CSRFToken': csrfTokenInput ? csrfTokenInput.value : '',
'X-API-Key': apiKey
};
console.log(`Submitting retrieve request. Server: ${currentFhirServerUrl}, ValidateRefs: ${formData.get('validate_references')}, FetchRefBundles: ${formData.get('fetch_reference_bundles')}`);
console.log(`Submitting retrieve request. Server: ${currentFhirServerUrl}, ValidateRefs: ${formData.get('validate_references')}, FetchRefBundles: ${formData.get('fetch_reference_bundles')}, AuthType: ${formData.get('auth_type')}`);
try {
const response = await fetch('/api/retrieve-bundles', { method: 'POST', headers: headers, body: formData });
@ -381,7 +469,8 @@ document.addEventListener('DOMContentLoaded', () => {
try {
const data = JSON.parse(line);
const timestamp = new Date().toLocaleTimeString();
let messageClass = 'text-info'; let prefix = '[INFO]';
let messageClass = 'text-info';
let prefix = '[INFO]';
if (data.type === 'error') { prefix = '[ERROR]'; messageClass = 'text-danger'; }
else if (data.type === 'warning') { prefix = '[WARNING]'; messageClass = 'text-warning'; }
else if (data.type === 'success') { prefix = '[SUCCESS]'; messageClass = 'text-success'; }
@ -410,7 +499,7 @@ document.addEventListener('DOMContentLoaded', () => {
} catch (e) { console.error('Stream parse error:', e, 'Line:', line); }
}
}
if (buffer.trim()) { /* Handle final buffer if necessary */ }
if (buffer.trim()) { /* Handle final buffer */ }
} catch (e) {
console.error('Retrieval error:', e);
@ -475,7 +564,10 @@ document.addEventListener('DOMContentLoaded', () => {
if (bundleSource === 'upload') {
if (!splitBundleZipInput || splitBundleZipInput.files.length === 0) {
alert('Please select a ZIP file to upload for splitting.');
splitButton.disabled = false; if (spinner) spinner.style.display = 'none'; if (icon) icon.style.display = 'inline-block'; return;
splitButton.disabled = false;
if (spinner) spinner.style.display = 'none';
if (icon) icon.style.display = 'inline-block';
return;
}
formData.append('split_bundle_zip', splitBundleZipInput.files[0]);
console.log("Splitting uploaded file:", splitBundleZipInput.files[0].name);
@ -483,20 +575,24 @@ document.addEventListener('DOMContentLoaded', () => {
formData.append('split_bundle_zip_path', sessionZipPath);
console.log("Splitting retrieved bundle path:", sessionZipPath);
} else if (bundleSource === 'retrieved' && !sessionZipPath) {
// Check if the retrieveZipPath from the *current* run exists
if (retrieveZipPath) {
formData.append('split_bundle_zip_path', retrieveZipPath);
console.log("Splitting retrieve bundle path from current run:", retrieveZipPath);
} else {
alert('No bundle source available. Please retrieve bundles first or upload a ZIP.');
splitButton.disabled = false; if (spinner) spinner.style.display = 'none'; if (icon) icon.style.display = 'inline-block'; return;
splitButton.disabled = false;
if (spinner) spinner.style.display = 'none';
if (icon) icon.style.display = 'inline-block';
return;
}
} else {
alert('No bundle source selected.');
splitButton.disabled = false; if (spinner) spinner.style.display = 'none'; if (icon) icon.style.display = 'inline-block'; return;
splitButton.disabled = false;
if (spinner) spinner.style.display = 'none';
if (icon) icon.style.display = 'inline-block';
return;
}
const headers = {
'Accept': 'application/x-ndjson',
'X-CSRFToken': csrfTokenInput ? csrfTokenInput.value : '',
@ -508,7 +604,8 @@ document.addEventListener('DOMContentLoaded', () => {
if (!response.ok) {
const errorMsg = await response.text();
let detail = errorMsg; try { const errorJson = JSON.parse(errorMsg); detail = errorJson.message || errorJson.error || JSON.stringify(errorJson); } catch(e) {}
let detail = errorMsg;
try { const errorJson = JSON.parse(errorMsg); detail = errorJson.message || errorJson.error || JSON.stringify(errorJson); } catch (e) {}
throw new Error(`HTTP ${response.status}: ${detail}`);
}
splitZipPath = response.headers.get('X-Zip-Path');
@ -518,21 +615,30 @@ document.addEventListener('DOMContentLoaded', () => {
const decoder = new TextDecoder();
let buffer = '';
while (true) {
const { done, value } = await reader.read(); if (done) break;
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n'); buffer = lines.pop() || '';
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
if (!line.trim()) continue;
try {
const data = JSON.parse(line);
const timestamp = new Date().toLocaleTimeString();
let messageClass = 'text-info'; let prefix = '[INFO]';
let messageClass = 'text-info';
let prefix = '[INFO]';
if (data.type === 'error') { prefix = '[ERROR]'; messageClass = 'text-danger'; }
else if (data.type === 'success') { prefix = '[SUCCESS]'; messageClass = 'text-success'; }
else if (data.type === 'progress') { prefix = '[PROGRESS]'; messageClass = 'text-light'; }
else if (data.type === 'complete') { prefix = '[COMPLETE]'; messageClass = 'text-primary'; }
const messageDiv = document.createElement('div'); messageDiv.className = messageClass; messageDiv.innerHTML = `${timestamp} ${prefix} ${sanitizeText(data.message)}`; splitConsole.appendChild(messageDiv); splitConsole.scrollTop = splitConsole.scrollHeight;
if (data.type === 'complete' && splitZipPath) { downloadSplitButton.style.display = 'block'; }
const messageDiv = document.createElement('div');
messageDiv.className = messageClass;
messageDiv.innerHTML = `${timestamp} ${prefix} ${sanitizeText(data.message)}`;
splitConsole.appendChild(messageDiv);
splitConsole.scrollTop = splitConsole.scrollHeight;
if (data.type === 'complete' && splitZipPath) {
downloadSplitButton.style.display = 'block';
}
} catch (e) { console.error('Stream parse error:', e, 'Line:', line); }
}
}
@ -555,7 +661,12 @@ document.addEventListener('DOMContentLoaded', () => {
const filename = splitZipPath.split('/').pop() || 'split_resources.zip';
const downloadUrl = `/tmp/${filename}`;
console.log(`Attempting to download split ZIP from Flask endpoint: ${downloadUrl}`);
const link = document.createElement('a'); link.href = downloadUrl; link.download = 'split_resources.zip'; document.body.appendChild(link); link.click(); document.body.removeChild(link);
const link = document.createElement('a');
link.href = downloadUrl;
link.download = 'split_resources.zip';
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
setTimeout(() => {
const csrfToken = splitForm.querySelector('input[name="csrf_token"]')?.value || '';
fetch('/clear-session', { method: 'POST', headers: { 'X-CSRFToken': csrfToken }})
@ -573,8 +684,7 @@ document.addEventListener('DOMContentLoaded', () => {
// --- Initial Setup Calls ---
updateBundleSourceUI();
updateServerToggleUI();
toggleFetchReferenceBundles(); // Initial call for the new checkbox
}); // End DOMContentLoaded
toggleFetchReferenceBundles();
});
</script>
{% endblock %}

View File

@ -1,5 +1,5 @@
{% extends "base.html" %}
{% from "_form_helpers.html" import render_field %} {# Assuming you have this helper #}
{% from "_form_helpers.html" import render_field %}
{% block content %}
<div class="px-4 py-5 my-5 text-center">
@ -20,7 +20,6 @@
<h4 class="my-0 fw-normal"><i class="bi bi-cloud-upload me-2"></i>Upload Configuration</h4>
</div>
<div class="card-body">
{# --- Display WTForms validation errors --- #}
{% if form.errors %}
<div class="alert alert-danger">
<p><strong>Please correct the following errors:</strong></p>
@ -31,57 +30,52 @@
</ul>
</div>
{% endif %}
{# --- End Error Display --- #}
<form id="uploadTestDataForm" method="POST" enctype="multipart/form-data"> {# Use POST, JS will handle submission #}
{{ form.csrf_token }} {# Render CSRF token #}
<form id="uploadTestDataForm" method="POST" enctype="multipart/form-data">
{{ form.csrf_token }}
{{ render_field(form.fhir_server_url, class="form-control form-control-lg") }}
{# Authentication Row #}
<div class="row g-3 mb-3 align-items-end">
<div class="col-md-5">
{{ render_field(form.auth_type, class="form-select") }}
</div>
<div class="col-md-7" id="authTokenGroup" style="display: none;">
<div class="col-md-7" id="authInputsGroup" style="display: none;">
<div id="bearerTokenInput" style="display: none;">
{{ render_field(form.auth_token, class="form-control") }}
</div>
<div id="basicAuthInputs" style="display: none;">
{{ render_field(form.username, class="form-control mb-2", placeholder="Username") }}
{{ render_field(form.password, class="form-control", placeholder="Password", type="password") }}
</div>
</div>
</div>
{# File Input #}
{{ render_field(form.test_data_file, class="form-control") }}
<small class="form-text text-muted">Select one or more .json, .xml files, or a single .zip file containing them.</small>
{# --- Validation Options --- #}
<div class="row g-3 mt-3 mb-3 align-items-center">
<div class="col-md-6">
{# Render BooleanField using the macro #}
{{ render_field(form.validate_before_upload) }}
<small class="form-text text-muted">It is suggested to not validate against more than 500 files</small>
</div>
<div class="col-md-6" id="validationPackageGroup" style="display: none;">
{# Render SelectField using the macro #}
{{ render_field(form.validation_package_id, class="form-select") }}
</div>
</div>
{# --- END Validation Options --- #}
{# Upload Mode/Error Handling/Conditional Row #}
<div class="row g-3 mb-3">
<div class="col-md-4">
{{ render_field(form.upload_mode, class="form-select") }}
</div>
{# --- Conditional Upload Checkbox --- #}
<div class="col-md-4 d-flex align-items-end"> {# Use flex alignment #}
<div class="col-md-4 d-flex align-items-end">
{{ render_field(form.use_conditional_uploads) }}
</div>
{# --- END --- #}
<div class="col-md-4">
{{ render_field(form.error_handling, class="form-select") }}
</div>
</div>
<button type="submit" class="btn btn-primary btn-lg w-100" id="uploadButton">
<span class="spinner-border spinner-border-sm" role="status" aria-hidden="true" style="display: none;"></span>
<i class="bi bi-arrow-up-circle me-2"></i>Upload and Process
@ -90,31 +84,26 @@
</div>
</div>
{# Results Area #}
<div class="card shadow-sm">
<div class="card-header">
<h4 class="my-0 fw-normal"><i class="bi bi-terminal me-2"></i>Processing Log & Results</h4>
</div>
<div class="card-body">
{# Live Console for Streaming Output #}
<div id="liveConsole" class="border p-3 rounded bg-dark text-light mb-3" style="height: 300px; overflow-y: auto; font-family: monospace; font-size: 0.85rem;">
<span class="text-muted">Processing output will appear here...</span>
</div>
{# Final Summary Report Area #}
<div id="uploadResponse" class="mt-3">
{# Final summary message appears here #}
</div>
</div>
</div>
</div> {# End Col #}
</div> {# End Row #}
</div> {# End Container #}
</div>
</div>
</div>
{% endblock %}
{% block scripts %}
{{ super() }} {# Include scripts from base.html #}
{{ super() }}
<script>
document.addEventListener('DOMContentLoaded', function() {
const form = document.getElementById('uploadTestDataForm');
@ -123,64 +112,74 @@ document.addEventListener('DOMContentLoaded', function() {
const liveConsole = document.getElementById('liveConsole');
const responseDiv = document.getElementById('uploadResponse');
const authTypeSelect = document.getElementById('auth_type');
const authTokenGroup = document.getElementById('authTokenGroup');
const authInputsGroup = document.getElementById('authInputsGroup');
const bearerTokenInput = document.getElementById('bearerTokenInput');
const basicAuthInputs = document.getElementById('basicAuthInputs');
const authTokenInput = document.getElementById('auth_token');
const usernameInput = document.getElementById('username');
const passwordInput = document.getElementById('password');
const fileInput = document.getElementById('test_data_file');
const validateCheckbox = document.getElementById('validate_before_upload');
const validationPackageGroup = document.getElementById('validationPackageGroup');
const validationPackageSelect = document.getElementById('validation_package_id');
const uploadModeSelect = document.getElementById('upload_mode'); // Get upload mode select
const conditionalUploadCheckbox = document.getElementById('use_conditional_uploads'); // Get conditional checkbox
const uploadModeSelect = document.getElementById('upload_mode');
const conditionalUploadCheckbox = document.getElementById('use_conditional_uploads');
// --- Helper: Sanitize text ---
const sanitizeText = (str) => str ? String(str).replace(/</g, "&lt;").replace(/>/g, "&gt;") : "";
// --- Event Listener: Show/Hide Auth Token ---
if (authTypeSelect && authTokenGroup) {
if (authTypeSelect && authInputsGroup && bearerTokenInput && basicAuthInputs) {
authTypeSelect.addEventListener('change', function() {
authTokenGroup.style.display = this.value === 'bearerToken' ? 'block' : 'none';
authInputsGroup.style.display = (this.value === 'bearerToken' || this.value === 'basic') ? 'block' : 'none';
bearerTokenInput.style.display = this.value === 'bearerToken' ? 'block' : 'none';
basicAuthInputs.style.display = this.value === 'basic' ? 'block' : 'none';
if (this.value !== 'bearerToken' && authTokenInput) authTokenInput.value = '';
if (this.value !== 'basic' && usernameInput) usernameInput.value = '';
if (this.value !== 'basic' && passwordInput) passwordInput.value = '';
});
authTokenGroup.style.display = authTypeSelect.value === 'bearerToken' ? 'block' : 'none'; // Initial state
} else { console.error("Auth elements not found."); }
authInputsGroup.style.display = (authTypeSelect.value === 'bearerToken' || authTypeSelect.value === 'basic') ? 'block' : 'none';
bearerTokenInput.style.display = authTypeSelect.value === 'bearerToken' ? 'block' : 'none';
basicAuthInputs.style.display = authTypeSelect.value === 'basic' ? 'block' : 'none';
} else {
console.error("Auth elements not found.");
}
// --- Event Listener: Show/Hide Validation Package Dropdown ---
if (validateCheckbox && validationPackageGroup) {
const toggleValidationPackage = () => {
validationPackageGroup.style.display = validateCheckbox.checked ? 'block' : 'none';
};
validateCheckbox.addEventListener('change', toggleValidationPackage);
toggleValidationPackage(); // Initial state
} else { console.error("Validation checkbox or package group not found."); }
toggleValidationPackage();
} else {
console.error("Validation checkbox or package group not found.");
}
// --- Event Listener: Enable/Disable Conditional Upload Checkbox ---
if (uploadModeSelect && conditionalUploadCheckbox) {
const toggleConditionalCheckbox = () => {
// Enable checkbox only if mode is 'individual'
conditionalUploadCheckbox.disabled = (uploadModeSelect.value !== 'individual');
// Optional: Uncheck if disabled
if (conditionalUploadCheckbox.disabled) {
conditionalUploadCheckbox.checked = false;
}
};
uploadModeSelect.addEventListener('change', toggleConditionalCheckbox);
toggleConditionalCheckbox(); // Initial state
} else { console.error("Upload mode select or conditional upload checkbox not found."); }
toggleConditionalCheckbox();
} else {
console.error("Upload mode select or conditional upload checkbox not found.");
}
// --- Event Listener: Form Submission ---
if (form && uploadButton && spinner && liveConsole && responseDiv && fileInput && validateCheckbox && validationPackageSelect && conditionalUploadCheckbox) {
form.addEventListener('submit', async function(event) {
event.preventDefault();
console.log("Form submitted");
// Basic validation
if (!fileInput.files || fileInput.files.length === 0) { alert('Please select at least one file.'); return; }
const fhirServerUrl = document.getElementById('fhir_server_url').value.trim();
if (!fhirServerUrl) { alert('Please enter the Target FHIR Server URL.'); return; }
if (validateCheckbox.checked && !validationPackageSelect.value) { alert('Please select a package for validation.'); return; }
if (authTypeSelect.value === 'basic') {
if (!usernameInput.value.trim()) { alert('Please enter a username for Basic Authentication.'); return; }
if (!passwordInput.value) { alert('Please enter a password for Basic Authentication.'); return; }
}
// UI Updates
uploadButton.disabled = true;
if(spinner) spinner.style.display = 'inline-block';
const uploadIcon = uploadButton.querySelector('i');
@ -188,27 +187,30 @@ document.addEventListener('DOMContentLoaded', function() {
liveConsole.innerHTML = `<div>${new Date().toLocaleTimeString()} [INFO] Starting upload process...</div>`;
responseDiv.innerHTML = '';
// Prepare FormData
const formData = new FormData();
formData.append('fhir_server_url', fhirServerUrl);
formData.append('auth_type', authTypeSelect.value);
if (authTypeSelect.value === 'bearerToken' && authTokenInput) { formData.append('auth_token', authTokenInput.value); }
if (authTypeSelect.value === 'bearerToken' && authTokenInput) {
formData.append('auth_token', authTokenInput.value);
} else if (authTypeSelect.value === 'basic') {
formData.append('username', usernameInput.value);
formData.append('password', passwordInput.value);
}
formData.append('upload_mode', uploadModeSelect.value);
formData.append('error_handling', document.getElementById('error_handling').value);
formData.append('validate_before_upload', validateCheckbox.checked ? 'true' : 'false');
if (validateCheckbox.checked) { formData.append('validation_package_id', validationPackageSelect.value); }
formData.append('use_conditional_uploads', conditionalUploadCheckbox.checked ? 'true' : 'false'); // Add new field
formData.append('use_conditional_uploads', conditionalUploadCheckbox.checked ? 'true' : 'false');
// Append files
for (let i = 0; i < fileInput.files.length; i++) { formData.append('test_data_files', fileInput.files[i]); }
for (let i = 0; i < fileInput.files.length; i++) {
formData.append('test_data_files', fileInput.files[i]);
}
// CSRF token and API key
const csrfTokenInput = form.querySelector('input[name="csrf_token"]');
const csrfToken = csrfTokenInput ? csrfTokenInput.value : "";
const internalApiKey = {{ api_key | default("") | tojson }};
try {
// --- API Call ---
const response = await fetch('/api/upload-test-data', {
method: 'POST',
headers: { 'Accept': 'application/x-ndjson', 'X-CSRFToken': csrfToken, 'X-API-Key': internalApiKey },
@ -222,7 +224,6 @@ document.addEventListener('DOMContentLoaded', function() {
}
if (!response.body) { throw new Error("Response body missing."); }
// --- Process Streaming Response ---
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
@ -238,7 +239,6 @@ document.addEventListener('DOMContentLoaded', function() {
const data = JSON.parse(line); const timestamp = new Date().toLocaleTimeString();
let messageClass = 'text-info'; let prefix = '[INFO]';
// Handle message types including validation
if (data.type === 'error') { prefix = '[ERROR]'; messageClass = 'text-danger'; }
else if (data.type === 'success') { prefix = '[SUCCESS]'; messageClass = 'text-success'; }
else if (data.type === 'warning') { prefix = '[WARNING]'; messageClass = 'text-warning'; }
@ -248,14 +248,12 @@ document.addEventListener('DOMContentLoaded', function() {
else if (data.type === 'validation_warning') { prefix = '[VALIDATION]'; messageClass = 'text-warning'; }
else if (data.type === 'validation_error') { prefix = '[VALIDATION]'; messageClass = 'text-danger'; }
// Append to console
const messageDiv = document.createElement('div'); messageDiv.className = messageClass;
let messageText = sanitizeText(data.message) || '...';
if (data.details) { messageText += ` <small>(${sanitizeText(data.details)})</small>`; }
messageDiv.innerHTML = `${timestamp} ${prefix} ${messageText}`;
liveConsole.appendChild(messageDiv); liveConsole.scrollTop = liveConsole.scrollHeight;
// Handle final summary
if (data.type === 'complete' && data.data) {
const summary = data.data;
let alertClass = 'alert-secondary';
@ -276,16 +274,15 @@ document.addEventListener('DOMContentLoaded', function() {
${summary.errors?.length > 0 ? '<br><strong>Details:</strong><ul>' + summary.errors.map(e => `<li>${sanitizeText(e)}</li>`).join('') + '</ul>' : ''}
</div>`;
}
} catch (parseError) { console.error('Stream parse error:', parseError, 'Line:', line); /* ... log error to console ... */ }
} // end for line
} // end while
} catch (parseError) { console.error('Stream parse error:', parseError, 'Line:', line); }
}
}
// Process final buffer (if needed)
if (buffer.trim()) {
try {
const data = JSON.parse(buffer.trim());
if (data.type === 'complete' && data.data) { /* ... update summary ... */ }
} catch (parseError) { console.error('Final buffer parse error:', parseError); /* ... log error to console ... */ }
} catch (parseError) { console.error('Final buffer parse error:', parseError); }
}
} catch (error) {
@ -293,15 +290,13 @@ document.addEventListener('DOMContentLoaded', function() {
responseDiv.innerHTML = `<div class="alert alert-danger mt-3"><strong>Error:</strong> ${sanitizeText(error.message)}</div>`;
const ts = new Date().toLocaleTimeString(); const errDiv = document.createElement('div'); errDiv.className = 'text-danger'; errDiv.textContent = `${ts} [CLIENT_ERROR] ${sanitizeText(error.message)}`; liveConsole.appendChild(errDiv);
} finally {
// Re-enable button
uploadButton.disabled = false;
if(spinner) spinner.style.display = 'none';
const uploadIcon = uploadButton.querySelector('i');
if (uploadIcon) uploadIcon.style.display = 'inline-block'; // Show icon
if (uploadIcon) uploadIcon.style.display = 'inline-block';
}
}); // End submit listener
});
} else { console.error("Could not find all required elements for form submission."); }
}); // End DOMContentLoaded
});
</script>
{% endblock %}