From fa091db463dd033633aca371d230e9f7d3dbdb11 Mon Sep 17 00:00:00 2001 From: Sudo-JHare Date: Thu, 22 May 2025 20:07:11 +1000 Subject: [PATCH] QOL Changes added Basic Auth options for custom server usage across multiple modules. new footer option - OpenAPI docs - integrated swagger UI for open API documentation --- app.py | 633 +++++++++++++++++++---- forms.py | 172 +++---- requirements.txt | 3 +- services.py | 786 ++++++++++++++++------------- templates/base.html | 11 +- templates/cp_push_igs.html | 415 +++++++++------ templates/fhir_ui.html | 317 +++++++----- templates/fhir_ui_operations.html | 628 +++++++++++++++-------- templates/retrieve_split_data.html | 480 +++++++++++------- templates/upload_test_data.html | 171 +++---- 10 files changed, 2297 insertions(+), 1319 deletions(-) diff --git a/app.py b/app.py index 998e0bd..c69f1b5 100644 --- a/app.py +++ b/app.py @@ -16,6 +16,7 @@ from urllib.parse import urlparse from cachetools import TTLCache from types import SimpleNamespace import tarfile +import base64 import json import logging import requests @@ -42,6 +43,7 @@ from services import ( from forms import IgImportForm, ValidationForm, FSHConverterForm, TestDataUploadForm, RetrieveSplitDataForm from wtforms import SubmitField from package import package_bp +from flasgger import Swagger, swag_from # Import Flasgger from copy import deepcopy import tempfile from logging.handlers import RotatingFileHandler @@ -60,11 +62,45 @@ app.config['APP_BASE_URL'] = os.environ.get('APP_BASE_URL', 'http://localhost:50 app.config['HAPI_FHIR_URL'] = os.environ.get('HAPI_FHIR_URL', 'http://localhost:8080/fhir') CONFIG_PATH = '/usr/local/tomcat/conf/application.yaml' +# Basic Swagger configuration +app.config['SWAGGER'] = { + 'title': 'FHIRFLARE IG Toolkit API', + 'uiversion': 3, # Use Swagger UI 3 + 'version': '1.0.0', + 'description': 'API documentation for the FHIRFLARE IG Toolkit. This provides access to various FHIR IG management and validation functionalities.', + 'termsOfService': 'https://example.com/terms', # Replace with your terms + 'contact': { + 'name': 'FHIRFLARE Support', + 'url': 'https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/issues', # Replace with your support URL + 'email': 'xsannz@gmail.com', # Replace with your support email + }, + 'license': { + 'name': 'MIT License', # Or your project's license + 'url': 'https://github.com/Sudo-JHare/FHIRFLARE-IG-Toolkit/blob/main/LICENSE.md', # Link to your license + }, + 'securityDefinitions': { # Defines how API key security is handled + 'ApiKeyAuth': { + 'type': 'apiKey', + 'name': 'X-API-Key', # The header name for the API key + 'in': 'header', + 'description': 'API Key for accessing protected endpoints.' + } + }, + # 'security': [{'ApiKeyAuth': []}], # Optional: Apply ApiKeyAuth globally to all Flasgger-documented API endpoints by default + # If you set this, individual public endpoints would need 'security': [] in their swag_from spec. + # It's often better to define security per-endpoint in @swag_from. + 'specs_route': '/apidocs/' # URL for the Swagger UI. This makes url_for('flasgger.apidocs') work. +} +swagger = Swagger(app) # Initialize Flasgger with the app. This registers its routes. + + # Register blueprints immediately after app setup app.register_blueprint(services_bp, url_prefix='/api') app.register_blueprint(package_bp) logging.getLogger(__name__).info("Registered package_bp blueprint") + + # Set max upload size (e.g., 12 MB, adjust as needed) app.config['MAX_CONTENT_LENGTH'] = 6 * 1024 * 1024 @@ -364,6 +400,25 @@ def index(): return render_template('index.html', site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) @app.route('/debug-routes') +@swag_from({ + 'tags': ['Debugging'], + 'summary': 'List all application routes.', + 'description': 'Provides a JSON list of all registered URL rules and their endpoints. Useful for development and debugging.', + 'responses': { + '200': { + 'description': 'A list of route strings.', + 'schema': { + 'type': 'array', + 'items': { + 'type': 'string', + 'example': 'Endpoint: my_endpoint, Methods: GET,POST, URL: /my/url' + } + } + } + } + # No API key needed for this one, so you can add: + # 'security': [] +}) def debug_routes(): """ Debug endpoint to list all registered routes and their endpoints. @@ -375,6 +430,19 @@ def debug_routes(): @app.route('/api/config', methods=['GET']) @csrf.exempt +@swag_from({ + 'tags': ['HAPI Configuration'], + 'summary': 'Get HAPI FHIR server configuration.', + 'description': 'Retrieves the current HAPI FHIR server configuration from the application.yaml file.', + 'security': [{'ApiKeyAuth': []}], # Requires API Key + 'responses': { + '200': { + 'description': 'HAPI FHIR configuration.', + 'schema': { 'type': 'object' } # You can be more specific if you know the YAML structure + }, + '500': {'description': 'Error reading configuration file.'} + } +}) def get_config(): try: with open(CONFIG_PATH, 'r') as file: @@ -386,6 +454,30 @@ def get_config(): @app.route('/api/config', methods=['POST']) @csrf.exempt +@swag_from({ + 'tags': ['HAPI Configuration'], + 'summary': 'Save HAPI FHIR server configuration.', + 'description': 'Saves the provided HAPI FHIR server configuration to the application.yaml file.', + 'security': [{'ApiKeyAuth': []}], # Requires API Key + 'parameters': [ + { + 'name': 'config_payload', # Changed name to avoid conflict with function arg + 'in': 'body', + 'required': True, + 'description': 'The HAPI FHIR configuration object.', + 'schema': { + 'type': 'object', + # Add example properties if you know them + 'example': {'fhir_server': {'base_url': 'http://localhost:8080/fhir'}} + } + } + ], + 'responses': { + '200': {'description': 'Configuration saved successfully.'}, + '400': {'description': 'Invalid request body.'}, + '500': {'description': 'Error saving configuration file.'} + } +}) def save_config(): try: config = request.get_json() @@ -399,6 +491,16 @@ def save_config(): @app.route('/api/restart-tomcat', methods=['POST']) @csrf.exempt +@swag_from({ + 'tags': ['HAPI Configuration'], + 'summary': 'Restart the Tomcat server.', + 'description': 'Attempts to restart the Tomcat server using supervisorctl. Requires appropriate server permissions.', + 'security': [{'ApiKeyAuth': []}], # Requires API Key + 'responses': { + '200': {'description': 'Tomcat restart initiated successfully.'}, + '500': {'description': 'Error restarting Tomcat (e.g., supervisorctl not found or command failed).'} + } +}) def restart_tomcat(): try: result = subprocess.run(['supervisorctl', 'restart', 'tomcat'], capture_output=True, text=True) @@ -579,6 +681,16 @@ def perform_cache_refresh_and_log(): @app.route('/api/refresh-cache-task', methods=['POST']) @csrf.exempt # Ensure CSRF is handled if needed, or keep exempt +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Refresh FHIR package cache.', + 'description': 'Triggers an asynchronous background task to clear and refresh the FHIR package cache from configured registries.', + 'security': [{'ApiKeyAuth': []}], # Requires API Key + 'responses': { + '202': {'description': 'Cache refresh process started in the background.'}, + # Consider if other error codes are possible before task starts + } +}) def refresh_cache_task(): """API endpoint to trigger the background cache refresh.""" # Note: Clearing queue here might interfere if multiple users click concurrently. @@ -598,6 +710,24 @@ def refresh_cache_task(): # Modify stream_import_logs - Simpler version: relies on thread putting [DONE] @app.route('/stream-import-logs') +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Stream package import logs.', + 'description': 'Provides a Server-Sent Events (SSE) stream of logs generated during package import or cache refresh operations. The client should listen for "data:" events. The stream ends with "data: [DONE]".', + 'produces': ['text/event-stream'], + # No API key usually for SSE streams if they are tied to an existing user session/action + # 'security': [], + 'responses': { + '200': { + 'description': 'An event stream of log messages.', + 'schema': { + 'type': 'string', + 'format': 'text/event-stream', + 'example': "data: INFO: Starting import...\ndata: INFO: Package downloaded.\ndata: [DONE]\n\n" + } + } + } +}) def stream_import_logs(): logger.debug("SSE connection established to stream-import-logs") def generate(): @@ -860,6 +990,23 @@ def view_ig(processed_ig_id): config=current_app.config) @app.route('/get-example') +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Get a specific example resource from a package.', + 'description': 'Retrieves the content of an example JSON file from a specified FHIR package and version.', + 'parameters': [ + {'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Name of the FHIR package.'}, + {'name': 'version', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Version of the FHIR package.'}, + {'name': 'filename', 'in': 'query', 'type': 'string', 'required': True, 'description': 'Path to the example file within the package (e.g., "package/Patient-example.json").'}, + {'name': 'include_narrative', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False, 'description': 'Whether to include the HTML narrative in the response.'} + ], + 'responses': { + '200': {'description': 'The example FHIR resource in JSON format.', 'schema': {'type': 'object'}}, + '400': {'description': 'Missing required query parameters or invalid file path.'}, + '404': {'description': 'Package or example file not found.'}, + '500': {'description': 'Server error during file retrieval or processing.'} + } +}) def get_example(): package_name = request.args.get('package_name') version = request.args.get('version') @@ -1010,6 +1157,38 @@ def generate_snapshot(structure_def, core_package_path, local_package_path): return structure_def @app.route('/get-structure') +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Get a StructureDefinition from a package.', + 'description': 'Retrieves a StructureDefinition, optionally generating or filtering for snapshot/differential views.', + 'parameters': [ + {'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True}, + {'name': 'version', 'in': 'query', 'type': 'string', 'required': True}, + {'name': 'resource_type', 'in': 'query', 'type': 'string', 'required': True, 'description': 'The resource type or profile ID.'}, + {'name': 'view', 'in': 'query', 'type': 'string', 'required': False, 'default': 'snapshot', 'enum': ['snapshot', 'differential']}, + {'name': 'include_narrative', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False}, + {'name': 'raw', 'in': 'query', 'type': 'boolean', 'required': False, 'default': False, 'description': 'If true, returns the raw SD JSON.'}, + {'name': 'profile_url', 'in': 'query', 'type': 'string', 'required': False, 'description': 'Canonical URL of the profile to retrieve.'} + ], + 'responses': { + '200': { + 'description': 'The StructureDefinition data.', + 'schema': { + 'type': 'object', + 'properties': { + 'elements': {'type': 'array', 'items': {'type': 'object'}}, + 'must_support_paths': {'type': 'array', 'items': {'type': 'string'}}, + 'search_parameters': {'type': 'array', 'items': {'type': 'object'}}, + 'fallback_used': {'type': 'boolean'}, + 'source_package': {'type': 'string'} + } + } + }, + '400': {'description': 'Missing required parameters.'}, + '404': {'description': 'StructureDefinition not found.'}, + '500': {'description': 'Server error.'} + } +}) def get_structure(): package_name = request.args.get('package_name') version = request.args.get('version') @@ -1180,6 +1359,33 @@ def get_structure(): @app.route('/get-package-metadata') +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Get metadata for a downloaded package.', + 'parameters': [ + {'name': 'package_name', 'in': 'query', 'type': 'string', 'required': True}, + {'name': 'version', 'in': 'query', 'type': 'string', 'required': True} + ], + 'responses': { + '200': { + 'description': 'Package metadata.', + 'schema': { + 'type': 'object', + 'properties': { + 'package_name': {'type': 'string'}, + 'version': {'type': 'string'}, + 'dependency_mode': {'type': 'string'}, + 'imported_dependencies': {'type': 'array', 'items': {'type': 'object'}}, + 'complies_with_profiles': {'type': 'array', 'items': {'type': 'string'}}, + 'imposed_profiles': {'type': 'array', 'items': {'type': 'string'}} + } + } + }, + '400': {'description': 'Missing parameters.'}, + '404': {'description': 'Metadata not found.'}, + '500': {'description': 'Server error.'} + } +}) def get_package_metadata(): package_name = request.args.get('package_name') version = request.args.get('version') @@ -1203,6 +1409,38 @@ def get_package_metadata(): return jsonify({'error': f'Error retrieving metadata: {str(e)}'}), 500 @app.route('/api/import-ig', methods=['POST']) +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Import a FHIR Implementation Guide via API.', + 'description': 'Downloads and processes a FHIR IG and its dependencies.', + 'security': [{'ApiKeyAuth': []}], + 'consumes': ['application/json'], + 'parameters': [ + { + 'name': 'body', + 'in': 'body', + 'required': True, + 'schema': { + 'type': 'object', + 'required': ['package_name', 'version'], + 'properties': { + 'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'}, + 'version': {'type': 'string', 'example': '6.1.0'}, + 'dependency_mode': { + 'type': 'string', 'enum': ['recursive', 'patch-canonical', 'tree-shaking', 'direct'], + 'default': 'recursive' + } + } + } + } + ], + 'responses': { + '200': {'description': 'Package imported successfully or with warnings.'}, + '400': {'description': 'Invalid request (e.g., missing fields, invalid mode).'}, + '404': {'description': 'Package not found on registry.'}, + '500': {'description': 'Server error during import.'} + } +}) def api_import_ig(): auth_error = check_api_key() if auth_error: @@ -1275,6 +1513,48 @@ def api_import_ig(): return jsonify({"status": "error", "message": f"Unexpected server error during import: {str(e)}"}), 500 @app.route('/api/push-ig', methods=['POST']) +@csrf.exempt # Retain CSRF exemption as specified +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Push a FHIR Implementation Guide to a server via API.', + 'description': 'Uploads resources from a specified FHIR IG (and optionally its dependencies) to a target FHIR server. Returns an NDJSON stream of progress.', + 'security': [{'ApiKeyAuth': []}], + 'consumes': ['application/json'], + 'produces': ['application/x-ndjson'], + 'parameters': [ + { + 'name': 'body', + 'in': 'body', + 'required': True, + 'schema': { + 'type': 'object', + 'required': ['package_name', 'version', 'fhir_server_url'], + 'properties': { + 'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'}, + 'version': {'type': 'string', 'example': '6.1.0'}, + 'fhir_server_url': {'type': 'string', 'format': 'url', 'example': 'http://localhost:8080/fhir'}, + 'include_dependencies': {'type': 'boolean', 'default': True}, + 'auth_type': {'type': 'string', 'enum': ['apiKey', 'bearerToken', 'basic', 'none'], 'default': 'none'}, + 'auth_token': {'type': 'string', 'description': 'Required if auth_type is bearerToken or basic (for basic, use "Basic ")'}, + 'username': {'type': 'string', 'description': 'Required if auth_type is basic'}, + 'password': {'type': 'string', 'format': 'password', 'description': 'Required if auth_type is basic'}, + 'resource_types_filter': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of resource types to include.'}, + 'skip_files': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of specific file paths within packages to skip.'}, + 'dry_run': {'type': 'boolean', 'default': False}, + 'verbose': {'type': 'boolean', 'default': False}, + 'force_upload': {'type': 'boolean', 'default': False, 'description': 'If true, uploads resources even if they appear identical to server versions.'} + } + } + } + ], + 'responses': { + '200': {'description': 'NDJSON stream of push progress and results.'}, + '400': {'description': 'Invalid request parameters.'}, + '401': {'description': 'Authentication error.'}, + '404': {'description': 'Package not found locally.'}, + '500': {'description': 'Server error during push operation setup.'} + } +}) def api_push_ig(): auth_error = check_api_key() if auth_error: return auth_error @@ -1287,20 +1567,23 @@ def api_push_ig(): include_dependencies = data.get('include_dependencies', True) auth_type = data.get('auth_type', 'none') auth_token = data.get('auth_token') + username = data.get('username') # ADD: Extract username + password = data.get('password') # ADD: Extract password resource_types_filter_raw = data.get('resource_types_filter') skip_files_raw = data.get('skip_files') dry_run = data.get('dry_run', False) verbose = data.get('verbose', False) - force_upload = data.get('force_upload', False) # <<< ADD: Extract force_upload + force_upload = data.get('force_upload', False) - # --- Input Validation (Assume previous validation is sufficient) --- + # --- Input Validation --- if not all([package_name, version, fhir_server_url]): return jsonify({"status": "error", "message": "Missing required fields"}), 400 - # ... (Keep other specific validations as needed) ... - valid_auth_types = ['apiKey', 'bearerToken', 'none']; + valid_auth_types = ['apiKey', 'bearerToken', 'basic', 'none'] # ADD: 'basic' to valid auth types if auth_type not in valid_auth_types: return jsonify({"status": "error", "message": f"Invalid auth_type."}), 400 if auth_type == 'bearerToken' and not auth_token: return jsonify({"status": "error", "message": "auth_token required for bearerToken."}), 400 + if auth_type == 'basic' and (not username or not password): # ADD: Validate Basic Auth inputs + return jsonify({"status": "error", "message": "Username and password required for Basic Authentication."}), 400 - # Parse filters (same as before) + # Parse filters (unchanged) resource_types_filter = None if resource_types_filter_raw: if isinstance(resource_types_filter_raw, list): resource_types_filter = [s for s in resource_types_filter_raw if isinstance(s, str)] @@ -1312,25 +1595,27 @@ def api_push_ig(): elif isinstance(skip_files_raw, str): skip_files = [s.strip().replace('\\', '/') for s in re.split(r'[,\n]', skip_files_raw) if s.strip()] else: return jsonify({"status": "error", "message": "Invalid skip_files format."}), 400 - # --- File Path Setup (Same as before) --- + # --- File Path Setup (unchanged) --- packages_dir = current_app.config.get('FHIR_PACKAGES_DIR') if not packages_dir: return jsonify({"status": "error", "message": "Server config error: Package dir missing."}), 500 - # ... (check if package tgz exists - same as before) ... tgz_filename = services.construct_tgz_filename(package_name, version) tgz_path = os.path.join(packages_dir, tgz_filename) if not os.path.exists(tgz_path): return jsonify({"status": "error", "message": f"Package not found locally: {package_name}#{version}"}), 404 + # ADD: Handle Basic Authentication + if auth_type == 'basic': + credentials = f"{username}:{password}" + auth_token = f"Basic {base64.b64encode(credentials.encode('utf-8')).decode('utf-8')}" # --- Streaming Response --- def generate_stream_wrapper(): - yield from services.generate_push_stream( - package_name=package_name, version=version, fhir_server_url=fhir_server_url, - include_dependencies=include_dependencies, auth_type=auth_type, - auth_token=auth_token, resource_types_filter=resource_types_filter, - skip_files=skip_files, dry_run=dry_run, verbose=verbose, - force_upload=force_upload, # <<< ADD: Pass force_upload - packages_dir=packages_dir - ) + yield from services.generate_push_stream( + package_name=package_name, version=version, fhir_server_url=fhir_server_url, + include_dependencies=include_dependencies, auth_type=auth_type, + auth_token=auth_token, resource_types_filter=resource_types_filter, + skip_files=skip_files, dry_run=dry_run, verbose=verbose, + force_upload=force_upload, packages_dir=packages_dir + ) return Response(generate_stream_wrapper(), mimetype='application/x-ndjson') # Ensure csrf.exempt(api_push_ig) remains @@ -1529,6 +1814,34 @@ def proxy_hapi(subpath): @app.route('/api/load-ig-to-hapi', methods=['POST']) +@swag_from({ + 'tags': ['HAPI Integration'], + 'summary': 'Load an IG into the local HAPI FHIR server.', + 'description': 'Extracts all resources from a specified IG package and PUTs them to the configured HAPI FHIR server.', + 'security': [{'ApiKeyAuth': []}], + 'consumes': ['application/json'], + 'parameters': [ + { + 'name': 'body', + 'in': 'body', + 'required': True, + 'schema': { + 'type': 'object', + 'required': ['package_name', 'version'], + 'properties': { + 'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'}, + 'version': {'type': 'string', 'example': '6.1.0'} + } + } + } + ], + 'responses': { + '200': {'description': 'Package loaded to HAPI successfully.'}, + '400': {'description': 'Invalid request (e.g., missing package_name/version).'}, + '404': {'description': 'Package not found locally.'}, + '500': {'description': 'Error loading IG to HAPI (e.g., HAPI server connection issue, resource upload failure).'} + } +}) def load_ig_to_hapi(): data = request.get_json() package_name = data.get('package_name') @@ -1744,41 +2057,58 @@ def upload_test_data(): return render_template('upload_test_data.html', title="Upload Test Data", form=form, api_key=api_key) +# --- Updated /api/upload-test-data Endpoint --- @app.route('/api/upload-test-data', methods=['POST']) @csrf.exempt +@swag_from({ + 'tags': ['Test Data Management'], + 'summary': 'Upload and process FHIR test data.', + 'description': 'Handles multipart/form-data uploads of FHIR resources (JSON, XML, or ZIP containing these) for processing and uploading to a target FHIR server. Returns an NDJSON stream of progress.', + 'security': [{'ApiKeyAuth': []}], + 'consumes': ['multipart/form-data'], + 'produces': ['application/x-ndjson'], + 'parameters': [ + {'name': 'fhir_server_url', 'in': 'formData', 'type': 'string', 'required': True, 'format': 'url', 'description': 'Target FHIR server URL.'}, + {'name': 'auth_type', 'in': 'formData', 'type': 'string', 'enum': ['none', 'bearerToken', 'basic'], 'default': 'none'}, + {'name': 'auth_token', 'in': 'formData', 'type': 'string', 'description': 'Bearer token if auth_type is bearerToken.'}, + {'name': 'username', 'in': 'formData', 'type': 'string', 'description': 'Username if auth_type is basic.'}, + {'name': 'password', 'in': 'formData', 'type': 'string', 'format': 'password', 'description': 'Password if auth_type is basic.'}, + {'name': 'test_data_files', 'in': 'formData', 'type': 'file', 'required': True, 'description': 'One or more FHIR resource files (JSON, XML) or ZIP archives containing them.'}, + {'name': 'validate_before_upload', 'in': 'formData', 'type': 'boolean', 'default': False}, + {'name': 'validation_package_id', 'in': 'formData', 'type': 'string', 'description': 'Package ID (name#version) for validation, if validate_before_upload is true.'}, + {'name': 'upload_mode', 'in': 'formData', 'type': 'string', 'enum': ['individual', 'transaction'], 'default': 'individual'}, + {'name': 'use_conditional_uploads', 'in': 'formData', 'type': 'boolean', 'default': True, 'description': 'For individual mode, use conditional logic (GET then PUT/POST).'}, + {'name': 'error_handling', 'in': 'formData', 'type': 'string', 'enum': ['stop', 'continue'], 'default': 'stop'} + ], + 'responses': { + '200': {'description': 'NDJSON stream of upload progress and results.'}, + '400': {'description': 'Invalid request parameters or file types.'}, + '401': {'description': 'Authentication error.'}, + '413': {'description': 'Request entity too large.'}, + '500': {'description': 'Server error during upload processing.'} + } +}) def api_upload_test_data(): """API endpoint to handle test data upload and processing, using custom parser.""" - auth_error = check_api_key(); + auth_error = check_api_key() if auth_error: return auth_error - temp_dir = None # Initialize temp_dir to ensure cleanup happens + temp_dir = None try: - # --- Use Custom Form Parser --- - # Instantiate the custom parser with the desired limit parser = CustomFormDataParser() - #parser = CustomFormDataParser(max_form_parts=2000) # Match the class definition or set higher if needed - - # Parse the request using the custom parser - # We need the stream, mimetype, content_length, and options from the request - # Note: Accessing request.stream consumes it, do this first. stream = request.stream mimetype = request.mimetype content_length = request.content_length options = request.mimetype_params - - # The parse method returns (stream, form_dict, files_dict) - # stream: A wrapper around the original stream - # form_dict: A MultiDict containing non-file form fields - # files_dict: A MultiDict containing FileStorage objects for uploaded files _, form_data, files_data = parser.parse(stream, mimetype, content_length, options) logger.debug(f"Form parsed using CustomFormDataParser. Form fields: {len(form_data)}, Files: {len(files_data)}") - # --- END Custom Form Parser Usage --- - - # --- Extract Form Data (using parsed data) --- + # --- Extract Form Data --- fhir_server_url = form_data.get('fhir_server_url') auth_type = form_data.get('auth_type', 'none') auth_token = form_data.get('auth_token') + username = form_data.get('username') + password = form_data.get('password') upload_mode = form_data.get('upload_mode', 'individual') error_handling = form_data.get('error_handling', 'stop') validate_before_upload_str = form_data.get('validate_before_upload', 'false') @@ -1789,42 +2119,62 @@ def api_upload_test_data(): logger.debug(f"API Upload Request Params: validate={validate_before_upload}, pkg_id={validation_package_id}, conditional={use_conditional_uploads}") - # --- Basic Validation (using parsed data) --- - if not fhir_server_url or not fhir_server_url.startswith(('http://', 'https://')): return jsonify({"status": "error", "message": "Invalid Target FHIR Server URL."}), 400 - if auth_type not in ['none', 'bearerToken']: return jsonify({"status": "error", "message": "Invalid Authentication Type."}), 400 - if auth_type == 'bearerToken' and not auth_token: return jsonify({"status": "error", "message": "Bearer Token required."}), 400 - if upload_mode not in ['individual', 'transaction']: return jsonify({"status": "error", "message": "Invalid Upload Mode."}), 400 - if error_handling not in ['stop', 'continue']: return jsonify({"status": "error", "message": "Invalid Error Handling mode."}), 400 - if validate_before_upload and not validation_package_id: return jsonify({"status": "error", "message": "Validation Package ID required."}), 400 + # --- Basic Validation --- + if not fhir_server_url or not fhir_server_url.startswith(('http://', 'https://')): + return jsonify({"status": "error", "message": "Invalid Target FHIR Server URL."}), 400 + if auth_type not in ['none', 'bearerToken', 'basic']: + return jsonify({"status": "error", "message": "Invalid Authentication Type."}), 400 + if auth_type == 'bearerToken' and not auth_token: + return jsonify({"status": "error", "message": "Bearer Token required."}), 400 + if auth_type == 'basic' and (not username or not password): + return jsonify({"status": "error", "message": "Username and Password required for Basic Authentication."}), 400 + if upload_mode not in ['individual', 'transaction']: + return jsonify({"status": "error", "message": "Invalid Upload Mode."}), 400 + if error_handling not in ['stop', 'continue']: + return jsonify({"status": "error", "message": "Invalid Error Handling mode."}), 400 + if validate_before_upload and not validation_package_id: + return jsonify({"status": "error", "message": "Validation Package ID required."}), 400 - # --- Handle File Uploads (using parsed data) --- - # Use files_data obtained from the custom parser + # --- Handle File Uploads --- uploaded_files = files_data.getlist('test_data_files') - if not uploaded_files or all(f.filename == '' for f in uploaded_files): return jsonify({"status": "error", "message": "No files selected."}), 400 + if not uploaded_files or all(f.filename == '' for f in uploaded_files): + return jsonify({"status": "error", "message": "No files selected."}), 400 temp_dir = tempfile.mkdtemp(prefix='fhirflare_upload_') saved_file_paths = [] allowed_extensions = {'.json', '.xml', '.zip'} try: - for file_storage in uploaded_files: # Iterate through FileStorage objects + for file_storage in uploaded_files: if file_storage and file_storage.filename: filename = secure_filename(file_storage.filename) file_ext = os.path.splitext(filename)[1].lower() - if file_ext not in allowed_extensions: raise ValueError(f"Invalid file type: '{filename}'. Only JSON, XML, ZIP allowed.") + if file_ext not in allowed_extensions: + raise ValueError(f"Invalid file type: '{filename}'. Only JSON, XML, ZIP allowed.") save_path = os.path.join(temp_dir, filename) - file_storage.save(save_path) # Use the save method of FileStorage + file_storage.save(save_path) saved_file_paths.append(save_path) - if not saved_file_paths: raise ValueError("No valid files saved.") + if not saved_file_paths: + raise ValueError("No valid files saved.") logger.debug(f"Saved {len(saved_file_paths)} files to {temp_dir}") except ValueError as ve: - if temp_dir and os.path.exists(temp_dir): shutil.rmtree(temp_dir) - logger.warning(f"Upload rejected: {ve}"); return jsonify({"status": "error", "message": str(ve)}), 400 + if temp_dir and os.path.exists(temp_dir): + shutil.rmtree(temp_dir) + logger.warning(f"Upload rejected: {ve}") + return jsonify({"status": "error", "message": str(ve)}), 400 except Exception as file_err: - if temp_dir and os.path.exists(temp_dir): shutil.rmtree(temp_dir) - logger.error(f"Error saving uploaded files: {file_err}", exc_info=True); return jsonify({"status": "error", "message": "Error saving uploaded files."}), 500 + if temp_dir and os.path.exists(temp_dir): + shutil.rmtree(temp_dir) + logger.error(f"Error saving uploaded files: {file_err}", exc_info=True) + return jsonify({"status": "error", "message": "Error saving uploaded files."}), 500 # --- Prepare Server Info and Options --- - server_info = {'url': fhir_server_url, 'auth_type': auth_type, 'auth_token': auth_token} + server_info = {'url': fhir_server_url, 'auth_type': auth_type} + if auth_type == 'bearerToken': + server_info['auth_token'] = auth_token + elif auth_type == 'basic': + credentials = f"{username}:{password}" + encoded_credentials = base64.b64encode(credentials.encode('utf-8')).decode('utf-8') + server_info['auth_token'] = f"Basic {encoded_credentials}" options = { 'upload_mode': upload_mode, 'error_handling': error_handling, @@ -1839,25 +2189,30 @@ def api_upload_test_data(): with app.app_context(): yield from services.process_and_upload_test_data(server_info, options, temp_dir) finally: - try: logger.debug(f"Cleaning up temp dir: {temp_dir}"); shutil.rmtree(temp_dir) - except Exception as cleanup_e: logger.error(f"Error cleaning up temp dir {temp_dir}: {cleanup_e}") + try: + logger.debug(f"Cleaning up temp dir: {temp_dir}") + shutil.rmtree(temp_dir) + except Exception as cleanup_e: + logger.error(f"Error cleaning up temp dir {temp_dir}: {cleanup_e}") return Response(generate_stream_wrapper(), mimetype='application/x-ndjson') except RequestEntityTooLarge as e: - # Catch the specific exception if the custom parser still fails (e.g., limit too low) logger.error(f"RequestEntityTooLarge error in /api/upload-test-data despite custom parser: {e}", exc_info=True) if temp_dir and os.path.exists(temp_dir): - try: shutil.rmtree(temp_dir) - except Exception as cleanup_e: logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}") + try: + shutil.rmtree(temp_dir) + except Exception as cleanup_e: + logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}") return jsonify({"status": "error", "message": f"Upload failed: Request entity too large. Try increasing parser limit or reducing files/size. ({str(e)})"}), 413 except Exception as e: - # Catch other potential errors during parsing or setup logger.error(f"Error in /api/upload-test-data: {e}", exc_info=True) if temp_dir and os.path.exists(temp_dir): - try: shutil.rmtree(temp_dir) - except Exception as cleanup_e: logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}") + try: + shutil.rmtree(temp_dir) + except Exception as cleanup_e: + logger.error(f"Error cleaning up temp dir during exception: {cleanup_e}") return jsonify({"status": "error", "message": f"Unexpected server error: {str(e)}"}), 500 @app.route('/retrieve-split-data', methods=['GET', 'POST']) @@ -1889,6 +2244,36 @@ def retrieve_split_data(): api_key=app.config['API_KEY']) @app.route('/api/retrieve-bundles', methods=['POST']) +@csrf.exempt +@swag_from({ + 'tags': ['Test Data Management'], + 'summary': 'Retrieve FHIR resource bundles from a server.', + 'description': 'Fetches bundles for specified resource types from a FHIR server. Optionally fetches referenced resources. Returns an NDJSON stream and prepares a ZIP file for download.', + 'security': [{'ApiKeyAuth': []}], + 'consumes': ['application/x-www-form-urlencoded'], # Or multipart/form-data if files are involved + 'produces': ['application/x-ndjson'], + 'parameters': [ + {'name': 'fhir_server_url', 'in': 'formData', 'type': 'string', 'required': False, 'format': 'url', 'description': 'Target FHIR server URL. Defaults to local proxy (/fhir).'}, + {'name': 'resources', 'in': 'formData', 'type': 'array', 'items': {'type': 'string'}, 'collectionFormat': 'multi', 'required': True, 'description': 'List of resource types to retrieve (e.g., Patient, Observation).'}, + {'name': 'validate_references', 'in': 'formData', 'type': 'boolean', 'default': False, 'description': 'Fetch resources referenced by the initial bundles.'}, + {'name': 'fetch_reference_bundles', 'in': 'formData', 'type': 'boolean', 'default': False, 'description': 'If fetching references, get full bundles for referenced types instead of individual resources.'}, + {'name': 'auth_type', 'in': 'formData', 'type': 'string', 'enum': ['none', 'bearer', 'basic'], 'default': 'none'}, + {'name': 'bearer_token', 'in': 'formData', 'type': 'string', 'description': 'Bearer token if auth_type is bearer.'}, + {'name': 'username', 'in': 'formData', 'type': 'string', 'description': 'Username if auth_type is basic.'}, + {'name': 'password', 'in': 'formData', 'type': 'string', 'format': 'password', 'description': 'Password if auth_type is basic.'} + ], + 'responses': { + '200': { + 'description': 'NDJSON stream of retrieval progress. X-Zip-Path header indicates path to the created ZIP file.', + 'headers': { + 'X-Zip-Path': {'type': 'string', 'description': 'Server path to the generated ZIP file.'} + } + }, + '400': {'description': 'Invalid request parameters.'}, + '401': {'description': 'Authentication error.'}, + '500': {'description': 'Server error during retrieval.'} + } +}) def api_retrieve_bundles(): auth_error = check_api_key() if auth_error: @@ -1896,50 +2281,90 @@ def api_retrieve_bundles(): # Use request.form for standard form data params = request.form.to_dict() - resources = request.form.getlist('resources') # Get list of selected resources - - # Get boolean flags, converting string 'true' to boolean True + resources = request.form.getlist('resources') validate_references = params.get('validate_references', 'false').lower() == 'true' - # --- Get NEW flag --- fetch_reference_bundles = params.get('fetch_reference_bundles', 'false').lower() == 'true' - # --- End NEW flag --- + auth_type = params.get('auth_type', 'none') + bearer_token = params.get('bearer_token') + username = params.get('username') + password = params.get('password') - # Basic validation + # Get FHIR server URL, default to '/fhir' (local proxy) + fhir_server_url = params.get('fhir_server_url', '/fhir').strip() + if not fhir_server_url: + fhir_server_url = '/fhir' + + # Validation if not resources: return jsonify({"status": "error", "message": "No resources selected."}), 400 + valid_auth_types = ['none', 'bearer', 'basic'] + if auth_type not in valid_auth_types: + return jsonify({"status": "error", "message": f"Invalid auth_type. Must be one of {valid_auth_types}."}), 400 + if auth_type == 'bearer' and not bearer_token: + return jsonify({"status": "error", "message": "Bearer token required for bearer authentication."}), 400 + if auth_type == 'basic' and (not username or not password): + return jsonify({"status": "error", "message": "Username and password required for basic authentication."}), 400 - # Get FHIR server URL, default to '/fhir' (which targets local proxy) - fhir_server_url = params.get('fhir_server_url', '/fhir').strip() - if not fhir_server_url: # Handle empty string case - fhir_server_url = '/fhir' + # Handle authentication + auth_token = None + if auth_type == 'bearer': + auth_token = f"Bearer {bearer_token}" + elif auth_type == 'basic': + credentials = f"{username}:{password}" + auth_token = f"Basic {base64.b64encode(credentials.encode('utf-8')).decode('utf-8')}" - logger.info(f"Retrieve API: Server='{fhir_server_url}', Resources={resources}, ValidateRefs={validate_references}, FetchRefBundles={fetch_reference_bundles}") + logger.info(f"Retrieve API: Server='{fhir_server_url}', Resources={resources}, ValidateRefs={validate_references}, FetchRefBundles={fetch_reference_bundles}, AuthType={auth_type}") - # Ensure the temp directory exists (use Flask's configured upload folder or system temp) - # Using system temp is generally safer for transient data + # Ensure the temp directory exists temp_dir = tempfile.gettempdir() - # Generate a unique filename for the zip in the temp dir zip_filename = f"retrieved_bundles_{datetime.datetime.now().strftime('%Y%m%d%H%M%S')}.zip" output_zip = os.path.join(temp_dir, zip_filename) def generate(): - # Pass the NEW flag to the service function - yield from retrieve_bundles( - fhir_server_url=fhir_server_url, - resources=resources, - output_zip=output_zip, - validate_references=validate_references, - fetch_reference_bundles=fetch_reference_bundles # Pass new flag - ) + try: + yield from services.retrieve_bundles( + fhir_server_url=fhir_server_url, + resources=resources, + output_zip=output_zip, + validate_references=validate_references, + fetch_reference_bundles=fetch_reference_bundles, + auth_type=auth_type, + auth_token=auth_token + ) + except Exception as e: + logger.error(f"Error in retrieve_bundles: {e}", exc_info=True) + yield json.dumps({"type": "error", "message": f"Unexpected error: {str(e)}"}) + "\n" - # Create the response *before* starting the generator response = Response(generate(), mimetype='application/x-ndjson') - # Send back the *relative* path within the temp dir for download - response.headers['X-Zip-Path'] = os.path.join('/tmp', zip_filename) # Path for the /tmp/ route - + response.headers['X-Zip-Path'] = os.path.join('/tmp', zip_filename) return response @app.route('/api/split-bundles', methods=['POST']) +@swag_from({ + 'tags': ['Test Data Management'], + 'summary': 'Split FHIR bundles from a ZIP into individual resources.', + 'description': 'Takes a ZIP file containing FHIR bundles, extracts individual resources, and creates a new ZIP file with these resources. Returns an NDJSON stream of progress.', + 'security': [{'ApiKeyAuth': []}], + 'consumes': ['multipart/form-data'], # Assuming split_bundle_zip_path comes from a form that might include a file upload in other contexts, or it's a path string. If it's always a path string from a JSON body, change consumes. + 'produces': ['application/x-ndjson'], + 'parameters': [ + # If split_bundle_zip_path is a path sent in form data: + {'name': 'split_bundle_zip_path', 'in': 'formData', 'type': 'string', 'required': True, 'description': 'Path to the input ZIP file containing bundles (server-side path).'}, + # If it's an uploaded file: + # {'name': 'split_bundle_zip_file', 'in': 'formData', 'type': 'file', 'required': True, 'description': 'ZIP file containing bundles to split.'} + ], + 'responses': { + '200': { + 'description': 'NDJSON stream of splitting progress. X-Zip-Path header indicates path to the output ZIP file.', + 'headers': { + 'X-Zip-Path': {'type': 'string', 'description': 'Server path to the generated ZIP file with split resources.'} + } + }, + '400': {'description': 'Invalid request (e.g., missing input ZIP path/file).'}, + '401': {'description': 'Authentication error.'}, + '500': {'description': 'Server error during splitting.'} + } +}) def api_split_bundles(): auth_error = check_api_key() if auth_error: @@ -1977,6 +2402,31 @@ def clear_session(): @app.route('/api/package/', methods=['GET']) +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Get details for a specific FHIR package.', + 'description': 'Retrieves details for a FHIR IG package by its name. Data is sourced from ProcessedIg, CachedPackage, or fetched live from registries.', + 'parameters': [ + {'name': 'name', 'in': 'path', 'type': 'string', 'required': True, 'description': 'The canonical name of the package (e.g., hl7.fhir.us.core).'} + ], + 'responses': { + '200': { + 'description': 'Package details.', + 'schema': { + 'type': 'object', + 'properties': { + 'name': {'type': 'string'}, + 'latest': {'type': 'string', 'description': 'Latest known version.'}, + 'author': {'type': 'string'}, + 'fhir_version': {'type': 'string'}, + 'version_count': {'type': 'integer'}, + 'url': {'type': 'string', 'format': 'url'} + } + } + }, + '404': {'description': 'Package not found.'} + } +}) def package_details(name): """ Retrieve details for a specific FHIR Implementation Guide package by name. @@ -2252,6 +2702,19 @@ def search_and_import(): is_fetching=is_fetching) @app.route('/api/search-packages', methods=['GET'], endpoint='api_search_packages') +@swag_from({ + 'tags': ['Package Management'], + 'summary': 'Search FHIR packages (HTMX).', + 'description': 'Searches the in-memory package cache. Returns an HTML fragment for HTMX to display matching packages. Primarily for UI interaction.', + 'parameters': [ + {'name': 'search', 'in': 'query', 'type': 'string', 'required': False, 'description': 'Search term for package name or author.'}, + {'name': 'page', 'in': 'query', 'type': 'integer', 'required': False, 'default': 1} + ], + 'produces': ['text/html'], + 'responses': { + '200': {'description': 'HTML fragment containing the search results table.'} + } +}) def api_search_packages(): """ Handles HTMX search requests. Filters packages from the in-memory cache. diff --git a/forms.py b/forms.py index 8f63ab2..976521e 100644 --- a/forms.py +++ b/forms.py @@ -1,69 +1,63 @@ # forms.py from flask_wtf import FlaskForm -from wtforms import StringField, SelectField, TextAreaField, BooleanField, SubmitField, FileField +from wtforms import StringField, SelectField, TextAreaField, BooleanField, SubmitField, FileField, PasswordField from wtforms.validators import DataRequired, Regexp, ValidationError, URL, Optional, InputRequired -from flask import request # Import request for file validation in FSHConverterForm +from flask import request import json import xml.etree.ElementTree as ET import re -import logging # Import logging +import logging import os -logger = logging.getLogger(__name__) # Setup logger if needed elsewhere +logger = logging.getLogger(__name__) -# Existing form classes (IgImportForm, ValidationForm, FSHConverterForm, TestDataUploadForm) remain unchanged -# Only providing RetrieveSplitDataForm class RetrieveSplitDataForm(FlaskForm): """Form for retrieving FHIR bundles and splitting them into individual resources.""" fhir_server_url = StringField('FHIR Server URL', validators=[URL(), Optional()], render_kw={'placeholder': 'e.g., https://hapi.fhir.org/baseR4'}) - - validate_references = BooleanField('Fetch Referenced Resources', default=False, # Changed label slightly + auth_type = SelectField('Authentication Type (for Custom URL)', choices=[ + ('none', 'None'), + ('bearerToken', 'Bearer Token'), + ('basicAuth', 'Basic Authentication') + ], default='none', validators=[Optional()]) + auth_token = StringField('Bearer Token', validators=[Optional()], + render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'}) + basic_auth_username = StringField('Username', validators=[Optional()], + render_kw={'placeholder': 'Enter Basic Auth Username'}) + basic_auth_password = PasswordField('Password', validators=[Optional()], + render_kw={'placeholder': 'Enter Basic Auth Password'}) + validate_references = BooleanField('Fetch Referenced Resources', default=False, description="If checked, fetches resources referenced by the initial bundles.") - - # --- NEW FIELD --- fetch_reference_bundles = BooleanField('Fetch Full Reference Bundles (instead of individual resources)', default=False, description="Requires 'Fetch Referenced Resources'. Fetches e.g. /Patient instead of Patient/id for each reference.", - render_kw={'data-dependency': 'validate_references'}) # Add data attribute for JS - # --- END NEW FIELD --- - + render_kw={'data-dependency': 'validate_references'}) split_bundle_zip = FileField('Upload Bundles to Split (ZIP)', validators=[Optional()], render_kw={'accept': '.zip'}) submit_retrieve = SubmitField('Retrieve Bundles') submit_split = SubmitField('Split Bundles') def validate(self, extra_validators=None): - """Custom validation for RetrieveSplitDataForm.""" if not super().validate(extra_validators): return False - - # --- NEW VALIDATION LOGIC --- - # Ensure fetch_reference_bundles is only checked if validate_references is also checked if self.fetch_reference_bundles.data and not self.validate_references.data: - self.fetch_reference_bundles.errors.append('Cannot fetch full reference bundles unless "Fetch Referenced Resources" is also checked.') - return False - # --- END NEW VALIDATION LOGIC --- - - # Validate based on which submit button was pressed - if self.submit_retrieve.data: - # No specific validation needed here now, handled by URL validator and JS - pass - elif self.submit_split.data: - # Need to check bundle source radio button selection in backend/JS, - # but validate file if 'upload' is selected. - # This validation might need refinement based on how source is handled. - # Assuming 'split_bundle_zip' is only required if 'upload' source is chosen. - pass # Basic validation done by Optional() and file type checks below - - # Validate file uploads (keep existing) + self.fetch_reference_bundles.errors.append('Cannot fetch full reference bundles unless "Fetch Referenced Resources" is also checked.') + return False + if self.auth_type.data == 'bearerToken' and self.submit_retrieve.data and not self.auth_token.data: + self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected.') + return False + if self.auth_type.data == 'basicAuth' and self.submit_retrieve.data: + if not self.basic_auth_username.data: + self.basic_auth_username.errors.append('Username is required for Basic Authentication.') + return False + if not self.basic_auth_password.data: + self.basic_auth_password.errors.append('Password is required for Basic Authentication.') + return False if self.split_bundle_zip.data: if not self.split_bundle_zip.data.filename.lower().endswith('.zip'): self.split_bundle_zip.errors.append('File must be a ZIP file.') return False - return True -# Existing forms (IgImportForm, ValidationForm) remain unchanged class IgImportForm(FlaskForm): """Form for importing Implementation Guides.""" package_name = StringField('Package Name', validators=[ @@ -92,7 +86,6 @@ class ValidationForm(FlaskForm): ], default='single') sample_input = TextAreaField('Sample Input', validators=[ DataRequired(), - # Removed lambda validator for simplicity, can be added back if needed ]) submit = SubmitField('Validate') @@ -117,7 +110,7 @@ class FSHConverterForm(FlaskForm): ('info', 'Info'), ('debug', 'Debug') ], validators=[DataRequired()]) - fhir_version = SelectField('FHIR Version', choices=[ # Corrected label + fhir_version = SelectField('FHIR Version', choices=[ ('', 'Auto-detect'), ('4.0.1', 'R4'), ('4.3.0', 'R4B'), @@ -136,116 +129,125 @@ class FSHConverterForm(FlaskForm): submit = SubmitField('Convert to FSH') def validate(self, extra_validators=None): - """Custom validation for FSH Converter Form.""" - # Run default validators first if not super().validate(extra_validators): return False - - # Check file/text input based on mode - # Need to check request.files for file uploads as self.fhir_file.data might be None during initial POST validation has_file_in_request = request and request.files and self.fhir_file.name in request.files and request.files[self.fhir_file.name].filename != '' if self.input_mode.data == 'file' and not has_file_in_request: - # If it's not in request.files, check if data is already populated (e.g., on re-render after error) if not self.fhir_file.data: self.fhir_file.errors.append('File is required when input mode is Upload File.') return False if self.input_mode.data == 'text' and not self.fhir_text.data: self.fhir_text.errors.append('Text input is required when input mode is Paste Text.') return False - - # Validate text input format if self.input_mode.data == 'text' and self.fhir_text.data: try: content = self.fhir_text.data.strip() - if not content: # Empty text is technically valid but maybe not useful - pass # Allow empty text for now - elif content.startswith('{'): - json.loads(content) - elif content.startswith('<'): - ET.fromstring(content) # Basic XML check + if not content: pass + elif content.startswith('{'): json.loads(content) + elif content.startswith('<'): ET.fromstring(content) else: - # If content exists but isn't JSON or XML, it's an error self.fhir_text.errors.append('Text input must be valid JSON or XML.') return False except (json.JSONDecodeError, ET.ParseError): self.fhir_text.errors.append('Invalid JSON or XML format.') return False - - # Validate dependency format if self.dependencies.data: for dep in self.dependencies.data.splitlines(): dep = dep.strip() - # Allow versions like 'current', 'dev', etc. but require package@version format if dep and not re.match(r'^[a-zA-Z0-9\-\.]+@[a-zA-Z0-9\.\-]+$', dep): self.dependencies.errors.append(f'Invalid dependency format: "{dep}". Use package@version (e.g., hl7.fhir.us.core@6.1.0).') return False - - # Validate alias file extension (optional, basic check) - # Check request.files for alias file as well has_alias_file_in_request = request and request.files and self.alias_file.name in request.files and request.files[self.alias_file.name].filename != '' alias_file_data = self.alias_file.data or (request.files.get(self.alias_file.name) if request else None) - if alias_file_data and alias_file_data.filename: if not alias_file_data.filename.lower().endswith('.fsh'): self.alias_file.errors.append('Alias file should have a .fsh extension.') - # return False # Might be too strict, maybe just warn? - return True - class TestDataUploadForm(FlaskForm): """Form for uploading FHIR test data.""" fhir_server_url = StringField('Target FHIR Server URL', validators=[DataRequired(), URL()], render_kw={'placeholder': 'e.g., http://localhost:8080/fhir'}) - auth_type = SelectField('Authentication Type', choices=[ ('none', 'None'), - ('bearerToken', 'Bearer Token') + ('bearerToken', 'Bearer Token'), + ('basic', 'Basic Authentication') ], default='none') - auth_token = StringField('Bearer Token', validators=[Optional()], render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'}) - + username = StringField('Username', validators=[Optional()], + render_kw={'placeholder': 'Enter Basic Auth Username'}) + password = PasswordField('Password', validators=[Optional()], + render_kw={'placeholder': 'Enter Basic Auth Password'}) test_data_file = FileField('Select Test Data File(s)', validators=[InputRequired("Please select at least one file.")], render_kw={'multiple': True, 'accept': '.json,.xml,.zip'}) - validate_before_upload = BooleanField('Validate Resources Before Upload?', default=False, description="Validate resources against selected package profile before uploading.") validation_package_id = SelectField('Validation Profile Package (Optional)', choices=[('', '-- Select Package for Validation --')], validators=[Optional()], description="Select the processed IG package to use for validation.") - upload_mode = SelectField('Upload Mode', choices=[ - ('individual', 'Individual Resources'), # Simplified label - ('transaction', 'Transaction Bundle') # Simplified label + ('individual', 'Individual Resources'), + ('transaction', 'Transaction Bundle') ], default='individual') - - # --- NEW FIELD for Conditional Upload --- use_conditional_uploads = BooleanField('Use Conditional Upload (Individual Mode Only)?', default=True, description="If checked, checks resource existence (GET) and uses If-Match (PUT) or creates (PUT). If unchecked, uses simple PUT for all.") - # --- END NEW FIELD --- - error_handling = SelectField('Error Handling', choices=[ ('stop', 'Stop on First Error'), ('continue', 'Continue on Error') ], default='stop') - submit = SubmitField('Upload and Process') def validate(self, extra_validators=None): - """Custom validation for Test Data Upload Form.""" - if not super().validate(extra_validators): return False + if not super().validate(extra_validators): + return False if self.validate_before_upload.data and not self.validation_package_id.data: self.validation_package_id.errors.append('Please select a package to validate against when pre-upload validation is enabled.') return False - # Add check: Conditional uploads only make sense for individual mode if self.use_conditional_uploads.data and self.upload_mode.data == 'transaction': - self.use_conditional_uploads.errors.append('Conditional Uploads only apply to the "Individual Resources" mode.') - # We might allow this combination but warn the user it has no effect, - # or enforce it here. Let's enforce for clarity. - # return False # Optional: Make this a hard validation failure - # Or just let it pass and ignore the flag in the backend for transaction mode. - pass # Let it pass for now, backend will ignore if mode is transaction - + self.use_conditional_uploads.errors.append('Conditional Uploads only apply to the "Individual Resources" mode.') + return False + if self.auth_type.data == 'bearerToken' and not self.auth_token.data: + self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected.') + return False + if self.auth_type.data == 'basic': + if not self.username.data: + self.username.errors.append('Username is required for Basic Authentication.') + return False + if not self.password.data: + self.password.errors.append('Password is required for Basic Authentication.') + return False return True + +class FhirRequestForm(FlaskForm): + fhir_server_url = StringField('FHIR Server URL', validators=[URL(), Optional()], + render_kw={'placeholder': 'e.g., https://hapi.fhir.org/baseR4'}) + auth_type = SelectField('Authentication Type (for Custom URL)', choices=[ + ('none', 'None'), + ('bearerToken', 'Bearer Token'), + ('basicAuth', 'Basic Authentication') + ], default='none', validators=[Optional()]) + auth_token = StringField('Bearer Token', validators=[Optional()], + render_kw={'placeholder': 'Enter Bearer Token', 'type': 'password'}) + basic_auth_username = StringField('Username', validators=[Optional()], + render_kw={'placeholder': 'Enter Basic Auth Username'}) + basic_auth_password = PasswordField('Password', validators=[Optional()], + render_kw={'placeholder': 'Enter Basic Auth Password'}) + submit = SubmitField('Send Request') + + def validate(self, extra_validators=None): + if not super().validate(extra_validators): + return False + if self.fhir_server_url.data: + if self.auth_type.data == 'bearerToken' and not self.auth_token.data: + self.auth_token.errors.append('Bearer Token is required when Bearer Token authentication is selected for a custom URL.') + return False + if self.auth_type.data == 'basicAuth': + if not self.basic_auth_username.data: + self.basic_auth_username.errors.append('Username is required for Basic Authentication with a custom URL.') + return False + if not self.basic_auth_password.data: + self.basic_auth_password.errors.append('Password is required for Basic Authentication with a custom URL.') + return False + return True \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index d8fbdac..cec64a5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,4 +10,5 @@ fhir.resources==8.0.0 Flask-Migrate==4.1.0 cachetools beautifulsoup4 -feedparser==6.0.11 \ No newline at end of file +feedparser==6.0.11 +flasgger \ No newline at end of file diff --git a/services.py b/services.py index 078dab1..6fe8267 100644 --- a/services.py +++ b/services.py @@ -18,6 +18,7 @@ import subprocess import tempfile import zipfile import xml.etree.ElementTree as ET +from flasgger import swag_from # Import swag_from here # Define Blueprint services_bp = Blueprint('services', __name__) @@ -2375,6 +2376,51 @@ def import_package_and_dependencies(initial_name, initial_version, dependency_mo # --- Validation Route --- @services_bp.route('/validate-sample', methods=['POST']) +@swag_from({ + 'tags': ['Validation'], + 'summary': 'Validate a FHIR resource or bundle.', + 'description': 'Validates a given FHIR resource or bundle against profiles defined in a specified FHIR package. Uses HAPI FHIR for validation if a profile is specified, otherwise falls back to local StructureDefinition checks.', + 'security': [{'ApiKeyAuth': []}], # Assuming API key is desired + 'consumes': ['application/json'], + 'parameters': [ + { + 'name': 'validation_payload', # Changed name + 'in': 'body', + 'required': True, + 'schema': { + 'type': 'object', + 'required': ['package_name', 'version', 'sample_data'], + 'properties': { + 'package_name': {'type': 'string', 'example': 'hl7.fhir.us.core'}, + 'version': {'type': 'string', 'example': '6.1.0'}, + 'sample_data': {'type': 'string', 'description': 'A JSON string of the FHIR resource or Bundle to validate.'}, + # 'include_dependencies': {'type': 'boolean', 'default': True} # This seems to be a server-side decision now + } + } + } + ], + 'responses': { + '200': { + 'description': 'Validation result.', + 'schema': { # Define the schema of the validation_result dictionary + 'type': 'object', + 'properties': { + 'valid': {'type': 'boolean'}, + 'errors': {'type': 'array', 'items': {'type': 'string'}}, + 'warnings': {'type': 'array', 'items': {'type': 'string'}}, + 'details': {'type': 'array', 'items': {'type': 'object'}}, # more specific if known + 'resource_type': {'type': 'string'}, + 'resource_id': {'type': 'string'}, + 'profile': {'type': 'string', 'nullable': True}, + 'summary': {'type': 'object'} + } + } + }, + '400': {'description': 'Invalid request (e.g., missing fields, invalid JSON).'}, + '404': {'description': 'Specified package for validation not found.'}, + '500': {'description': 'Server error during validation.'} + } +}) def validate_sample(): """Validates a FHIR sample against a package profile.""" logger.debug("Received validate-sample request") @@ -2771,10 +2817,10 @@ def find_and_extract_search_params(tgz_path, base_resource_type): # --- END OF NEW FUNCTION --- # --- Full Replacement Function (Corrected Prefix Definitions & Unabbreviated) --- + def generate_push_stream(package_name, version, fhir_server_url, include_dependencies, auth_type, auth_token, resource_types_filter, skip_files, - dry_run, verbose, force_upload, - packages_dir): + dry_run, verbose, force_upload, packages_dir): """ Generates NDJSON stream for the push IG operation. Handles canonical resources (search by URL, POST/PUT), @@ -2787,8 +2833,8 @@ def generate_push_stream(package_name, version, fhir_server_url, include_depende skipped_count = 0 post_count = 0 put_count = 0 - total_resources_attempted = 0 # Calculated after collecting resources - processed_resources = set() # Tracks resource IDs attempted in this run + total_resources_attempted = 0 + processed_resources = set() failed_uploads_details = [] skipped_resources_details = [] filter_set = set(resource_types_filter) if resource_types_filter else None @@ -2797,12 +2843,12 @@ def generate_push_stream(package_name, version, fhir_server_url, include_depende try: # --- Start Messages --- operation_mode = " (DRY RUN)" if dry_run else "" - force_mode = " (FORCE UPLOAD)" if force_upload else "" # For initial message + force_mode = " (FORCE UPLOAD)" if force_upload else "" yield json.dumps({"type": "start", "message": f"Starting push{operation_mode}{force_mode} for {package_name}#{version} to {fhir_server_url}"}) + "\n" if filter_set: - yield json.dumps({"type": "info", "message": f"Filtering for resource types: {', '.join(sorted(list(filter_set)))}"}) + "\n" + yield json.dumps({"type": "info", "message": f"Filtering for resource types: {', '.join(sorted(list(filter_set)))}"}) + "\n" if skip_files_set: - yield json.dumps({"type": "info", "message": f"Skipping {len(skip_files_set)} specific files."}) + "\n" + yield json.dumps({"type": "info", "message": f"Skipping {len(skip_files_set)} specific files."}) + "\n" yield json.dumps({"type": "info", "message": f"Include Dependencies: {'Yes' if include_dependencies else 'No'}"}) + "\n" # --- Define packages_to_push --- @@ -2814,407 +2860,406 @@ def generate_push_stream(package_name, version, fhir_server_url, include_depende yield json.dumps({"type": "error", "message": f"Primary package file not found: {primary_tgz_filename}"}) + "\n" raise FileNotFoundError(f"Primary package file not found: {primary_tgz_path}") - # Always add the primary package packages_to_push.append((package_name, version, primary_tgz_path)) logger.debug(f"Added primary package to push list: {package_name}#{version}") - # Handle dependencies IF include_dependencies is true - # NOTE: This uses the simple dependency inclusion based on import metadata. - # Aligning with UploadFIG's --includeReferencedDependencies requires more complex logic here. if include_dependencies: yield json.dumps({"type": "info", "message": "Including dependencies based on import metadata..."}) + "\n" - metadata = get_package_metadata(package_name, version) # Assumes this helper exists - if metadata and metadata.get('imported_dependencies'): - dependencies_to_include = metadata['imported_dependencies'] + metadata = get_package_metadata(package_name, version) + if metadata and metadata.get("imported_dependencies"): + dependencies_to_include = metadata["imported_dependencies"] logger.info(f"Found {len(dependencies_to_include)} dependencies in metadata to potentially include.") for dep in dependencies_to_include: - dep_name = dep.get('name') - dep_version = dep.get('version') + dep_name = dep.get("name") + dep_version = dep.get("version") if dep_name and dep_version: dep_tgz_filename = construct_tgz_filename(dep_name, dep_version) dep_tgz_path = os.path.join(packages_dir, dep_tgz_filename) if os.path.exists(dep_tgz_path): - # Add dependency package to the list if file exists if (dep_name, dep_version, dep_tgz_path) not in packages_to_push: packages_to_push.append((dep_name, dep_version, dep_tgz_path)) logger.debug(f"Added dependency package to push list: {dep_name}#{dep_version}") else: - # Log a warning if a listed dependency file isn't found yield json.dumps({"type": "warning", "message": f"Dependency package file not found, cannot include: {dep_tgz_filename}"}) + "\n" logger.warning(f"Dependency package file listed in metadata but not found locally: {dep_tgz_path}") else: yield json.dumps({"type": "warning", "message": "Include Dependencies checked, but no dependency metadata found. Only pushing primary."}) + "\n" logger.warning(f"No dependency metadata found for {package_name}#{version} despite include_dependencies=True") - # --- End Define packages_to_push --- # --- Resource Extraction & Filtering --- resources_to_upload = [] - seen_resource_files = set() # Track filenames across all packages being pushed + seen_resource_files = set() - # Iterate through the populated list of packages to push for pkg_name, pkg_version, pkg_path in packages_to_push: - yield json.dumps({"type": "progress", "message": f"Extracting resources from: {pkg_name}#{pkg_version}..."}) + "\n" - try: + yield json.dumps({"type": "progress", "message": f"Extracting resources from: {pkg_name}#{pkg_version}..."}) + "\n" + try: with tarfile.open(pkg_path, "r:gz") as tar: for member in tar.getmembers(): - # Basic file checks: must be a file, in 'package/', end with .json - if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): - continue - # Skip common metadata files by basename - basename_lower = os.path.basename(member.name).lower() - if basename_lower in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: - continue + if not (member.isfile() and member.name.startswith("package/") and member.name.lower().endswith(".json")): + continue + basename_lower = os.path.basename(member.name).lower() + if basename_lower in ["package.json", ".index.json", "validation-summary.json", "validation-oo.json"]: + continue - # Check against skip_files list (using normalized paths) - normalized_member_name = member.name.replace('\\', '/') - if normalized_member_name in skip_files_set or member.name in skip_files_set: - if verbose: yield json.dumps({"type": "info", "message": f"Skipping file due to filter: {member.name}"}) + "\n" - continue + normalized_member_name = member.name.replace("\\", "/") + if normalized_member_name in skip_files_set or member.name in skip_files_set: + if verbose: + yield json.dumps({"type": "info", "message": f"Skipping file due to filter: {member.name}"}) + "\n" + continue - # Avoid processing the same file path if it appears in multiple packages - if member.name in seen_resource_files: - if verbose: yield json.dumps({"type": "info", "message": f"Skipping already seen file: {member.name}"}) + "\n" - continue - seen_resource_files.add(member.name) + if member.name in seen_resource_files: + if verbose: + yield json.dumps({"type": "info", "message": f"Skipping already seen file: {member.name}"}) + "\n" + continue + seen_resource_files.add(member.name) - # Extract and parse the resource JSON - try: - with tar.extractfile(member) as f: - resource_content = f.read().decode('utf-8-sig') - resource_data = json.loads(resource_content) + try: + with tar.extractfile(member) as f: + resource_content = f.read().decode("utf-8-sig") + resource_data = json.loads(resource_content) - # Basic validation of resource structure - if isinstance(resource_data, dict) and 'resourceType' in resource_data and 'id' in resource_data: - resource_type_val = resource_data.get('resourceType') # Use distinct var name - # Apply resource type filter if provided - if filter_set and resource_type_val not in filter_set: - if verbose: yield json.dumps({"type": "info", "message": f"Skipping resource type {resource_type_val} due to filter: {member.name}"}) + "\n" - continue - # Add valid resource to the upload list - resources_to_upload.append({ - "data": resource_data, - "source_package": f"{pkg_name}#{pkg_version}", - "source_filename": member.name # Store original filename - }) - else: - yield json.dumps({"type": "warning", "message": f"Skipping invalid/incomplete resource structure in file: {member.name}"}) + "\n" - # Handle errors during extraction/parsing of individual files - except json.JSONDecodeError as json_e: yield json.dumps({"type": "warning", "message": f"JSON parse error in file {member.name}: {json_e}"}) + "\n" - except UnicodeDecodeError as uni_e: yield json.dumps({"type": "warning", "message": f"Encoding error in file {member.name}: {uni_e}"}) + "\n" - except KeyError: yield json.dumps({"type": "warning", "message": f"File not found within archive (should not happen here): {member.name}"}) + "\n" - except Exception as extract_e: yield json.dumps({"type": "warning", "message": f"Error processing file {member.name}: {extract_e}"}) + "\n" - # Handle errors opening/reading the tarfile itself - except tarfile.ReadError as tar_read_e: - error_msg = f"Tar ReadError reading package {pkg_name}#{pkg_version}: {tar_read_e}. Skipping package." - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 # Count as failure for summary - failed_uploads_details.append({'resource': f"Package: {pkg_name}#{pkg_version}", 'error': f"Read Error: {tar_read_e}"}) - continue # Skip to next package in packages_to_push - except tarfile.TarError as tar_e: - error_msg = f"TarError reading package {pkg_name}#{pkg_version}: {tar_e}. Skipping package." - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({'resource': f"Package: {pkg_name}#{pkg_version}", 'error': f"Tar Error: {tar_e}"}) - continue - except Exception as pkg_e: # Catch other potential errors reading package - error_msg = f"Unexpected error reading package {pkg_name}#{pkg_version}: {pkg_e}. Skipping package." - yield json.dumps({"type": "error", "message": error_msg}) + "\n" - failure_count += 1 - failed_uploads_details.append({'resource': f"Package: {pkg_name}#{pkg_version}", 'error': f"Unexpected: {pkg_e}"}) - logger.error(f"Error reading package {pkg_path}: {pkg_e}", exc_info=True) - continue - # --- End Resource Extraction --- + if isinstance(resource_data, dict) and "resourceType" in resource_data and "id" in resource_data: + resource_type_val = resource_data.get("resourceType") + if filter_set and resource_type_val not in filter_set: + if verbose: + yield json.dumps({"type": "info", "message": f"Skipping resource type {resource_type_val} due to filter: {member.name}"}) + "\n" + continue + resources_to_upload.append({ + "data": resource_data, + "source_package": f"{pkg_name}#{pkg_version}", + "source_filename": member.name + }) + else: + yield json.dumps({"type": "warning", "message": f"Skipping invalid/incomplete resource structure in file: {member.name}"}) + "\n" + except json.JSONDecodeError as json_e: + yield json.dumps({"type": "warning", "message": f"JSON parse error in file {member.name}: {json_e}"}) + "\n" + except UnicodeDecodeError as uni_e: + yield json.dumps({"type": "warning", "message": f"Encoding error in file {member.name}: {uni_e}"}) + "\n" + except KeyError: + yield json.dumps({"type": "warning", "message": f"File not found within archive: {member.name}"}) + "\n" + except Exception as extract_e: + yield json.dumps({"type": "warning", "message": f"Error processing file {member.name}: {extract_e}"}) + "\n" + except tarfile.ReadError as tar_read_e: + error_msg = f"Tar ReadError reading package {pkg_name}#{pkg_version}: {tar_read_e}. Skipping package." + yield json.dumps({"type": "error", "message": error_msg}) + "\n" + failure_count += 1 + failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Read Error: {tar_read_e}"}) + continue + except tarfile.TarError as tar_e: + error_msg = f"TarError reading package {pkg_name}#{pkg_version}: {tar_e}. Skipping package." + yield json.dumps({"type": "error", "message": error_msg}) + "\n" + failure_count += 1 + failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Tar Error: {tar_e}"}) + continue + except Exception as pkg_e: + error_msg = f"Unexpected error reading package {pkg_name}#{pkg_version}: {pkg_e}. Skipping package." + yield json.dumps({"type": "error", "message": error_msg}) + "\n" + failure_count += 1 + failed_uploads_details.append({"resource": f"Package: {pkg_name}#{pkg_version}", "error": f"Unexpected: {pkg_e}"}) + logger.error(f"Error reading package {pkg_path}: {pkg_e}", exc_info=True) + continue total_resources_attempted = len(resources_to_upload) yield json.dumps({"type": "info", "message": f"Found {total_resources_attempted} resources matching filters across selected packages."}) + "\n" if total_resources_attempted == 0: - yield json.dumps({"type": "warning", "message": "No resources found to upload after filtering."}) + "\n" - # Go straight to completion summary if nothing to upload + yield json.dumps({"type": "warning", "message": "No resources found to upload after filtering."}) + "\n" else: # --- Resource Upload Loop Setup --- session = requests.Session() - base_url = fhir_server_url.rstrip('/') - headers = {'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'} - # Add Authentication Header - if auth_type == 'bearerToken' and auth_token: - headers['Authorization'] = f'Bearer {auth_token}' - yield json.dumps({"type": "info", "message": "Using Bearer Token authentication."}) + "\n" - elif auth_type == 'apiKey': - # Get internal key from Flask app config if available - internal_api_key = None - try: - internal_api_key = current_app.config.get('API_KEY') - except RuntimeError: - logger.warning("Cannot access current_app config outside of request context for API Key.") - if internal_api_key: - headers['X-API-Key'] = internal_api_key - yield json.dumps({"type": "info", "message": "Using internal API Key authentication."}) + "\n" - else: - yield json.dumps({"type": "warning", "message": "API Key auth selected, but no internal key configured/accessible."}) + "\n" - else: # 'none' - yield json.dumps({"type": "info", "message": "Using no authentication."}) + "\n" + base_url = fhir_server_url.rstrip("/") + headers = {"Content-Type": "application/fhir+json", "Accept": "application/fhir+json"} + # MODIFIED: Enhanced authentication handling + if auth_type in ["bearerToken", "basic"] and auth_token: + # Log the Authorization header (mask sensitive data) + auth_display = "Basic " if auth_type == "basic" else (auth_token[:10] + "..." if len(auth_token) > 10 else auth_token) + yield json.dumps({"type": "info", "message": f"Using {auth_type} auth with header: Authorization: {auth_display}"}) + "\n" + headers["Authorization"] = auth_token # Use auth_token for both Bearer and Basic + elif auth_type == "apiKey": + internal_api_key = None + try: + internal_api_key = current_app.config.get("API_KEY") + except RuntimeError: + logger.warning("Cannot access current_app config outside of request context for API Key.") + if internal_api_key: + headers["X-API-Key"] = internal_api_key + yield json.dumps({"type": "info", "message": "Using internal API Key authentication."}) + "\n" + else: + yield json.dumps({"type": "warning", "message": "API Key auth selected, but no internal key configured/accessible."}) + "\n" + else: + yield json.dumps({"type": "info", "message": "Using no authentication."}) + "\n" # --- Main Upload Loop --- for i, resource_info in enumerate(resources_to_upload, 1): local_resource = resource_info["data"] source_pkg = resource_info["source_package"] - resource_type = local_resource.get('resourceType') - resource_id = local_resource.get('id') + resource_type = local_resource.get("resourceType") + resource_id = local_resource.get("id") resource_log_id = f"{resource_type}/{resource_id}" - canonical_url = local_resource.get('url') - canonical_version = local_resource.get('version') - is_canonical_type = resource_type in CANONICAL_RESOURCE_TYPES # Assumes this set is defined + canonical_url = local_resource.get("url") + canonical_version = local_resource.get("version") + is_canonical_type = resource_type in CANONICAL_RESOURCE_TYPES - # Skip duplicates already attempted *in this run* (by ResourceType/Id) if resource_log_id in processed_resources: - if verbose: yield json.dumps({"type": "info", "message": f"Skipping duplicate ID in processing list: {resource_log_id}"}) + "\n" - # Note: Do not increment skipped_count here as it wasn't an upload attempt failure/skip + if verbose: + yield json.dumps({"type": "info", "message": f"Skipping duplicate ID in processing list: {resource_log_id}"}) + "\n" continue processed_resources.add(resource_log_id) - # --- Dry Run Handling --- if dry_run: - dry_run_action = "check/PUT" # Default action + dry_run_action = "check/PUT" if is_canonical_type and canonical_url: dry_run_action = "search/POST/PUT" yield json.dumps({"type": "progress", "message": f"[DRY RUN] Would {dry_run_action} {resource_log_id} ({i}/{total_resources_attempted}) from {source_pkg}"}) + "\n" - success_count += 1 # Count check/potential action as success in dry run - # Track package info for dry run summary + success_count += 1 pkg_found = False for p in pushed_packages_info: - if p["id"] == source_pkg: - p["resource_count"] += 1 - pkg_found = True - break - if not pkg_found: pushed_packages_info.append({"id": source_pkg, "resource_count": 1}) - continue # Skip actual request processing for dry run + if p["id"] == source_pkg: + p["resource_count"] += 1 + pkg_found = True + break + if not pkg_found: + pushed_packages_info.append({"id": source_pkg, "resource_count": 1}) + continue - # --- Determine Upload Strategy --- existing_resource_id = None existing_resource_data = None - action = "PUT" # Default action is PUT by ID - target_url = f"{base_url}/{resource_type}/{resource_id}" # Default target URL for PUT - skip_resource = False # Flag to skip upload altogether + action = "PUT" + target_url = f"{base_url}/{resource_type}/{resource_id}" + skip_resource = False - # 1. Handle Canonical Resources (Search by URL/Version) if is_canonical_type and canonical_url: - action = "SEARCH_POST_PUT" # Indicate canonical handling path - search_params = {'url': canonical_url} + action = "SEARCH_POST_PUT" + search_params = {"url": canonical_url} if canonical_version: - search_params['version'] = canonical_version + search_params["version"] = canonical_version search_url = f"{base_url}/{resource_type}" - if verbose: yield json.dumps({"type": "info", "message": f"Canonical Type: Searching {search_url} with params {search_params}"}) + "\n" + if verbose: + yield json.dumps({"type": "info", "message": f"Canonical Type: Searching {search_url} with params {search_params}"}) + "\n" try: search_response = session.get(search_url, params=search_params, headers=headers, timeout=20) - search_response.raise_for_status() # Check for HTTP errors on search + search_response.raise_for_status() search_bundle = search_response.json() - if search_bundle.get('resourceType') == 'Bundle' and 'entry' in search_bundle: - entries = search_bundle.get('entry', []) + if search_bundle.get("resourceType") == "Bundle" and "entry" in search_bundle: + entries = search_bundle.get("entry", []) if len(entries) == 1: - existing_resource_data = entries[0].get('resource') + existing_resource_data = entries[0].get("resource") if existing_resource_data: - existing_resource_id = existing_resource_data.get('id') + existing_resource_id = existing_resource_data.get("id") if existing_resource_id: - action = "PUT" # Found existing, plan to PUT + action = "PUT" target_url = f"{base_url}/{resource_type}/{existing_resource_id}" - if verbose: yield json.dumps({"type": "info", "message": f"Found existing canonical resource ID: {existing_resource_id}"}) + "\n" + if verbose: + yield json.dumps({"type": "info", "message": f"Found existing canonical resource ID: {existing_resource_id}"}) + "\n" else: yield json.dumps({"type": "warning", "message": f"Found canonical {canonical_url}|{canonical_version} but lacks ID. Skipping update."}) + "\n" - action = "SKIP"; skip_resource = True; skipped_count += 1 - skipped_resources_details.append({'resource': resource_log_id, 'reason': 'Found canonical match without ID'}) + action = "SKIP" + skip_resource = True + skipped_count += 1 + skipped_resources_details.append({"resource": resource_log_id, "reason": "Found canonical match without ID"}) else: - yield json.dumps({"type": "warning", "message": f"Search for {canonical_url}|{canonical_version} entry lacks resource data. Assuming not found."}) + "\n" - action = "POST"; target_url = f"{base_url}/{resource_type}" + yield json.dumps({"type": "warning", "message": f"Search for {canonical_url}|{canonical_version} entry lacks resource data. Assuming not found."}) + "\n" + action = "POST" + target_url = f"{base_url}/{resource_type}" elif len(entries) == 0: - action = "POST"; target_url = f"{base_url}/{resource_type}" - if verbose: yield json.dumps({"type": "info", "message": f"Canonical not found by URL/Version. Planning POST."}) + "\n" - else: # Found multiple matches - conflict! - ids_found = [e.get('resource', {}).get('id', 'unknown') for e in entries] + action = "POST" + target_url = f"{base_url}/{resource_type}" + if verbose: + yield json.dumps({"type": "info", "message": f"Canonical not found by URL/Version. Planning POST."}) + "\n" + else: + ids_found = [e.get("resource", {}).get("id", "unknown") for e in entries] yield json.dumps({"type": "error", "message": f"Conflict: Found {len(entries)} matches for {canonical_url}|{canonical_version} (IDs: {', '.join(ids_found)}). Skipping."}) + "\n" - action = "SKIP"; skip_resource = True; failure_count += 1 # Count conflict as failure - failed_uploads_details.append({'resource': resource_log_id, 'error': f"Conflict: Multiple matches ({len(entries)}) for canonical URL/Version"}) - else: # Search returned non-Bundle or empty Bundle - yield json.dumps({"type": "warning", "message": f"Search for {canonical_url}|{canonical_version} returned non-Bundle/empty. Assuming not found."}) + "\n" - action = "POST"; target_url = f"{base_url}/{resource_type}" + action = "SKIP" + skip_resource = True + failure_count += 1 + failed_uploads_details.append({"resource": resource_log_id, "error": f"Conflict: Multiple matches ({len(entries)}) for canonical URL/Version"}) + else: + yield json.dumps({"type": "warning", "message": f"Search for {canonical_url}|{canonical_version} returned non-Bundle/empty. Assuming not found."}) + "\n" + action = "POST" + target_url = f"{base_url}/{resource_type}" except requests.exceptions.RequestException as search_err: yield json.dumps({"type": "warning", "message": f"Search failed for {resource_log_id}: {search_err}. Defaulting to PUT by ID."}) + "\n" - action = "PUT"; target_url = f"{base_url}/{resource_type}/{resource_id}" # Fallback + action = "PUT" + target_url = f"{base_url}/{resource_type}/{resource_id}" except json.JSONDecodeError as json_err: - yield json.dumps({"type": "warning", "message": f"Failed parse search result for {resource_log_id}: {json_err}. Defaulting PUT by ID."}) + "\n" - action = "PUT"; target_url = f"{base_url}/{resource_type}/{resource_id}" - except Exception as e: # Catch other unexpected errors during search - yield json.dumps({"type": "warning", "message": f"Unexpected canonical search error for {resource_log_id}: {e}. Defaulting PUT by ID."}) + "\n" - action = "PUT"; target_url = f"{base_url}/{resource_type}/{resource_id}" + yield json.dumps({"type": "warning", "message": f"Failed parse search result for {resource_log_id}: {json_err}. Defaulting PUT by ID."}) + "\n" + action = "PUT" + target_url = f"{base_url}/{resource_type}/{resource_id}" + except Exception as e: + yield json.dumps({"type": "warning", "message": f"Unexpected canonical search error for {resource_log_id}: {e}. Defaulting PUT by ID."}) + "\n" + action = "PUT" + target_url = f"{base_url}/{resource_type}/{resource_id}" - # 2. Semantic Comparison (Only if PUTting an existing resource and NOT force_upload) if action == "PUT" and not force_upload and not skip_resource: - resource_to_compare = existing_resource_data # Use data from canonical search if available + resource_to_compare = existing_resource_data if not resource_to_compare: - # If not canonical or search failed/skipped, try GET by ID for comparison try: - if verbose: yield json.dumps({"type": "info", "message": f"Checking existing (PUT target): {target_url}"}) + "\n" + if verbose: + yield json.dumps({"type": "info", "message": f"Checking existing (PUT target): {target_url}"}) + "\n" get_response = session.get(target_url, headers=headers, timeout=15) if get_response.status_code == 200: resource_to_compare = get_response.json() - if verbose: yield json.dumps({"type": "info", "message": f"Found resource by ID for comparison."}) + "\n" + if verbose: + yield json.dumps({"type": "info", "message": f"Found resource by ID for comparison."}) + "\n" elif get_response.status_code == 404: - if verbose: yield json.dumps({"type": "info", "message": f"Resource {resource_log_id} not found by ID ({target_url}). Proceeding with PUT create."}) + "\n" - # No comparison needed if target doesn't exist - else: # Handle other GET errors - log warning, comparison skipped, proceed with PUT - yield json.dumps({"type": "warning", "message": f"Comparison check failed (GET {get_response.status_code}). Attempting PUT."}) + "\n" + if verbose: + yield json.dumps({"type": "info", "message": f"Resource {resource_log_id} not found by ID ({target_url}). Proceeding with PUT create."}) + "\n" + else: + yield json.dumps({"type": "warning", "message": f"Comparison check failed (GET {get_response.status_code}). Attempting PUT."}) + "\n" except Exception as get_err: - yield json.dumps({"type": "warning", "message": f"Comparison check failed (Error during GET by ID: {get_err}). Attempting PUT."}) + "\n" + yield json.dumps({"type": "warning", "message": f"Comparison check failed (Error during GET by ID: {get_err}). Attempting PUT."}) + "\n" - # Perform comparison if we have fetched an existing resource if resource_to_compare: try: - # Assumes are_resources_semantically_equal helper function exists and works if are_resources_semantically_equal(local_resource, resource_to_compare): yield json.dumps({"type": "info", "message": f"Skipping {resource_log_id} (Identical content)"}) + "\n" - skip_resource = True; skipped_count += 1 - skipped_resources_details.append({'resource': resource_log_id, 'reason': 'Identical content'}) + skip_resource = True + skipped_count += 1 + skipped_resources_details.append({"resource": resource_log_id, "reason": "Identical content"}) elif verbose: yield json.dumps({"type": "info", "message": f"{resource_log_id} exists but differs. Updating."}) + "\n" except Exception as comp_err: - # Log error during comparison but proceed with PUT - yield json.dumps({"type": "warning", "message": f"Comparison failed for {resource_log_id}: {comp_err}. Proceeding with PUT."}) + "\n" + yield json.dumps({"type": "warning", "message": f"Comparison failed for {resource_log_id}: {comp_err}. Proceeding with PUT."}) + "\n" - elif action == "PUT" and force_upload: # Force upload enabled, skip comparison - if verbose: yield json.dumps({"type": "info", "message": f"Force Upload enabled, skipping comparison for {resource_log_id}."}) + "\n" + elif action == "PUT" and force_upload: + if verbose: + yield json.dumps({"type": "info", "message": f"Force Upload enabled, skipping comparison for {resource_log_id}."}) + "\n" - # 3. Execute Upload Action (POST or PUT, if not skipped) if not skip_resource: - http_method = action if action in ["POST", "PUT"] else "PUT" # Ensure valid method + http_method = action if action in ["POST", "PUT"] else "PUT" log_action = f"{http_method}ing" yield json.dumps({"type": "progress", "message": f"{log_action} {resource_log_id} ({i}/{total_resources_attempted}) to {target_url}..."}) + "\n" try: - # Send the request if http_method == "POST": response = session.post(target_url, json=local_resource, headers=headers, timeout=30) post_count += 1 - else: # Default to PUT + else: response = session.put(target_url, json=local_resource, headers=headers, timeout=30) put_count += 1 - response.raise_for_status() # Raises HTTPError for 4xx/5xx responses + response.raise_for_status() - # Log success success_msg = f"{http_method} successful for {resource_log_id} (Status: {response.status_code})" if http_method == "POST" and response.status_code == 201: - # Add Location header info if available on create - location = response.headers.get('Location') - if location: - # Try to extract ID from location header - match = re.search(f"{resource_type}/([^/]+)/_history", location) - new_id = match.group(1) if match else "unknown" - success_msg += f" -> New ID: {new_id}" - else: - success_msg += " (No Location header)" + location = response.headers.get("Location") + if location: + match = re.search(f"{resource_type}/([^/]+)/_history", location) + new_id = match.group(1) if match else "unknown" + success_msg += f" -> New ID: {new_id}" + else: + success_msg += " (No Location header)" yield json.dumps({"type": "success", "message": success_msg}) + "\n" success_count += 1 - # Track package info for successful upload pkg_found_success = False for p in pushed_packages_info: - if p["id"] == source_pkg: - p["resource_count"] += 1 - pkg_found_success = True - break + if p["id"] == source_pkg: + p["resource_count"] += 1 + pkg_found_success = True + break if not pkg_found_success: - pushed_packages_info.append({"id": source_pkg, "resource_count": 1}) + pushed_packages_info.append({"id": source_pkg, "resource_count": 1}) - # --- CORRECTED ERROR HANDLING --- except requests.exceptions.HTTPError as http_err: outcome_text = "" - status_code = http_err.response.status_code if http_err.response is not None else 'N/A' + status_code = http_err.response.status_code if http_err.response is not None else "N/A" try: outcome = http_err.response.json() - if outcome and outcome.get('resourceType') == 'OperationOutcome': - issues = outcome.get('issue', []) - outcome_text = "; ".join([ f"{i.get('severity','info')}: {i.get('diagnostics', i.get('details',{}).get('text','No details'))}" for i in issues]) if issues else "OperationOutcome with no issues." - else: outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body" - except ValueError: outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body (or not JSON)" - # This block is now correctly indented + if outcome and outcome.get("resourceType") == "OperationOutcome": + issues = outcome.get("issue", []) + outcome_text = "; ".join([f"{i.get('severity', 'info')}: {i.get('diagnostics', i.get('details', {}).get('text', 'No details'))}" for i in issues]) if issues else "OperationOutcome with no issues." + else: + outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body" + except ValueError: + outcome_text = http_err.response.text[:200] if http_err.response is not None else "No response body (or not JSON)" error_msg = f"Failed {http_method} {resource_log_id} (Status: {status_code}): {outcome_text or str(http_err)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': error_msg}) - # --- END CORRECTION --- - + yield json.dumps({"type": "error", "message": error_msg}) + "\n" + failure_count += 1 + failed_uploads_details.append({"resource": resource_log_id, "error": error_msg}) except requests.exceptions.Timeout: error_msg = f"Timeout during {http_method} {resource_log_id}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': 'Timeout'}) + yield json.dumps({"type": "error", "message": error_msg}) + "\n" + failure_count += 1 + failed_uploads_details.append({"resource": resource_log_id, "error": "Timeout"}) except requests.exceptions.ConnectionError as conn_err: error_msg = f"Connection error during {http_method} {resource_log_id}: {conn_err}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': f'Connection Error: {conn_err}'}) + yield json.dumps({"type": "error", "message": error_msg}) + "\n" + failure_count += 1 + failed_uploads_details.append({"resource": resource_log_id, "error": f"Connection Error: {conn_err}"}) except requests.exceptions.RequestException as req_err: error_msg = f"Request error during {http_method} {resource_log_id}: {str(req_err)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': f'Request Error: {req_err}'}) + yield json.dumps({"type": "error", "message": error_msg}) + "\n" + failure_count += 1 + failed_uploads_details.append({"resource": resource_log_id, "error": f"Request Error: {req_err}"}) except Exception as e: error_msg = f"Unexpected error during {http_method} {resource_log_id}: {str(e)}" - yield json.dumps({"type": "error", "message": error_msg}) + "\n"; failure_count += 1; failed_uploads_details.append({'resource': resource_log_id, 'error': f'Unexpected: {e}'}); logger.error(f"[API Push Stream] Upload error for {resource_log_id}: {e}", exc_info=True) - # --- End Execute Action --- - - else: # Resource was skipped - # Track package info even if skipped - pkg_found_skipped = False - for p in pushed_packages_info: - if p["id"] == source_pkg: - pkg_found_skipped = True; break - if not pkg_found_skipped: - pushed_packages_info.append({"id": source_pkg, "resource_count": 0}) # Add pkg with 0 count - # --- End Main Upload Loop --- + yield json.dumps({"type": "error", "message": error_msg}) + "\n" + failure_count += 1 + failed_uploads_details.append({"resource": resource_log_id, "error": f"Unexpected: {e}"}) + logger.error(f"[API Push Stream] Upload error for {resource_log_id}: {e}", exc_info=True) + else: + pkg_found_skipped = False + for p in pushed_packages_info: + if p["id"] == source_pkg: + pkg_found_skipped = True + break + if not pkg_found_skipped: + pushed_packages_info.append({"id": source_pkg, "resource_count": 0}) # --- Final Summary --- - final_status = "success" if failure_count == 0 else \ - "partial" if success_count > 0 else "failure" - - # --- Define prefixes before use --- + final_status = "success" if failure_count == 0 else "partial" if success_count > 0 else "failure" dry_run_prefix = "[DRY RUN] " if dry_run else "" force_prefix = "[FORCE UPLOAD] " if force_upload else "" - # --- End Define prefixes --- - - # Adjust summary message construction if total_resources_attempted == 0 and failure_count == 0: - summary_message = f"{dry_run_prefix}Push finished: No matching resources found to process." - final_status = "success" # Still success if no errors occurred + summary_message = f"{dry_run_prefix}Push finished: No matching resources found to process." + final_status = "success" else: - # Use the defined prefixes - summary_message = f"{dry_run_prefix}{force_prefix}Push finished: {post_count} POSTed, {put_count} PUT, {failure_count} failed, {skipped_count} skipped ({total_resources_attempted} resources attempted)." + summary_message = f"{dry_run_prefix}{force_prefix}Push finished: {post_count} POSTed, {put_count} PUT, {failure_count} failed, {skipped_count} skipped ({total_resources_attempted} resources attempted)." - # Create summary dictionary summary = { - "status": final_status, "message": summary_message, - "target_server": fhir_server_url, "package_name": package_name, "version": version, - "included_dependencies": include_dependencies, "resources_attempted": total_resources_attempted, - "success_count": success_count, "post_count": post_count, "put_count": put_count, - "failure_count": failure_count, "skipped_count": skipped_count, - "validation_failure_count": 0, # Placeholder - "failed_details": failed_uploads_details, "skipped_details": skipped_resources_details, - "pushed_packages_summary": pushed_packages_info, "dry_run": dry_run, "force_upload": force_upload, + "status": final_status, + "message": summary_message, + "target_server": fhir_server_url, + "package_name": package_name, + "version": version, + "included_dependencies": include_dependencies, + "resources_attempted": total_resources_attempted, + "success_count": success_count, + "post_count": post_count, + "put_count": put_count, + "failure_count": failure_count, + "skipped_count": skipped_count, + "validation_failure_count": 0, + "failed_details": failed_uploads_details, + "skipped_details": skipped_resources_details, + "pushed_packages_summary": pushed_packages_info, + "dry_run": dry_run, + "force_upload": force_upload, "resource_types_filter": resource_types_filter, "skip_files_filter": sorted(list(skip_files_set)) if skip_files_set else None } yield json.dumps({"type": "complete", "data": summary}) + "\n" logger.info(f"[API Push Stream] Completed {package_name}#{version}. Status: {final_status}. {summary_message}") - # --- Final Exception Handling for setup/initialization errors --- except FileNotFoundError as fnf_err: - logger.error(f"[API Push Stream] Setup error: {str(fnf_err)}", exc_info=False) - error_response = {"status": "error", "message": f"Setup error: {str(fnf_err)}"} - try: - yield json.dumps({"type": "error", "message": error_response["message"]}) + "\n" - yield json.dumps({"type": "complete", "data": error_response}) + "\n" - except Exception as yield_e: logger.error(f"Error yielding final setup error: {yield_e}") + logger.error(f"[API Push Stream] Setup error: {str(fnf_err)}", exc_info=False) + error_response = {"status": "error", "message": f"Setup error: {str(fnf_err)}"} + try: + yield json.dumps({"type": "error", "message": error_response["message"]}) + "\n" + yield json.dumps({"type": "complete", "data": error_response}) + "\n" + except Exception as yield_e: + logger.error(f"Error yielding final setup error: {yield_e}") except Exception as e: logger.error(f"[API Push Stream] Critical error during setup or stream generation: {str(e)}", exc_info=True) error_response = {"status": "error", "message": f"Server error during push setup: {str(e)}"} try: yield json.dumps({"type": "error", "message": error_response["message"]}) + "\n" yield json.dumps({"type": "complete", "data": error_response}) + "\n" - except Exception as yield_e: logger.error(f"Error yielding final critical error: {yield_e}") + except Exception as yield_e: + logger.error(f"Error yielding final critical error: {yield_e}") # --- END generate_push_stream FUNCTION --- @@ -3612,9 +3657,15 @@ def process_and_upload_test_data(server_info, options, temp_file_dir): session = requests.Session() base_url = server_info['url'].rstrip('/') upload_headers = {'Content-Type': 'application/fhir+json', 'Accept': 'application/fhir+json'} - if server_info['auth_type'] == 'bearerToken' and server_info['auth_token']: - upload_headers['Authorization'] = f"Bearer {server_info['auth_token']}" - yield json.dumps({"type": "info", "message": "Using Bearer Token auth."}) + "\n" + if server_info['auth_type'] in ['bearerToken', 'basic'] and server_info.get('auth_token'): + # Log the Authorization header (mask sensitive data) + auth_header = server_info['auth_token'] + if auth_header.startswith('Basic '): + auth_display = 'Basic ' + else: + auth_display = auth_header[:10] + '...' if len(auth_header) > 10 else auth_header + yield json.dumps({"type": "info", "message": f"Using {server_info['auth_type']} auth with header: Authorization: {auth_display}"}) + "\n" + upload_headers['Authorization'] = server_info['auth_token'] # FIXED: Use server_info['auth_token'] else: yield json.dumps({"type": "info", "message": "Using no auth."}) + "\n" @@ -3892,10 +3943,11 @@ def process_and_upload_test_data(server_info, options, temp_file_dir): # --- END Service Function --- # --- CORRECTED retrieve_bundles function with NEW logic --- -def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references=False, fetch_reference_bundles=False): +def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references=False, fetch_reference_bundles=False, auth_type='none', auth_token=None): """ Retrieve FHIR bundles and save to a ZIP file. Optionally fetches referenced resources, either individually by ID or as full bundles by type. + Supports authentication for custom FHIR servers. Yields NDJSON progress updates. """ temp_dir = None @@ -3903,29 +3955,35 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references total_initial_bundles = 0 fetched_individual_references = 0 fetched_type_bundles = 0 - retrieved_references_or_types = set() # Track fetched items to avoid duplicates + retrieved_references_or_types = set() temp_dir = tempfile.mkdtemp(prefix="fhir_retrieve_") logger.debug(f"Created temporary directory for bundle retrieval: {temp_dir}") yield json.dumps({"type": "progress", "message": f"Starting bundle retrieval for {len(resources)} resource types"}) + "\n" if validate_references: - yield json.dumps({"type": "info", "message": f"Reference fetching ON (Mode: {'Full Type Bundles' if fetch_reference_bundles else 'Individual Resources'})"}) + "\n" + yield json.dumps({"type": "info", "message": f"Reference fetching ON (Mode: {'Full Type Bundles' if fetch_reference_bundles else 'Individual Resources'})"}) + "\n" else: - yield json.dumps({"type": "info", "message": "Reference fetching OFF"}) + "\n" + yield json.dumps({"type": "info", "message": "Reference fetching OFF"}) + "\n" - - # --- Determine Base URL and Headers for Proxy --- + # Determine Base URL and Headers for Proxy base_proxy_url = f"{current_app.config['APP_BASE_URL'].rstrip('/')}/fhir" headers = {'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8'} is_custom_url = fhir_server_url != '/fhir' and fhir_server_url is not None and fhir_server_url.startswith('http') if is_custom_url: headers['X-Target-FHIR-Server'] = fhir_server_url.rstrip('/') + if auth_type in ['bearer', 'basic'] and auth_token: + auth_display = 'Basic ' if auth_type == 'basic' else (auth_token[:10] + '...' if len(auth_token) > 10 else auth_token) + yield json.dumps({"type": "info", "message": f"Using {auth_type} auth with header: Authorization: {auth_display}"}) + "\n" + headers['Authorization'] = auth_token + else: + yield json.dumps({"type": "info", "message": "Using no authentication for custom URL"}) + "\n" logger.debug(f"Will use proxy with X-Target-FHIR-Server: {headers['X-Target-FHIR-Server']}") else: + yield json.dumps({"type": "info", "message": "Using no authentication for local HAPI server"}) + "\n" logger.debug("Will use proxy targeting local HAPI server") - # --- Fetch Initial Bundles --- - initial_bundle_files = [] # Store paths for reference scanning + # Fetch Initial Bundles + initial_bundle_files = [] for resource_type in resources: url = f"{base_proxy_url}/{quote(resource_type)}" yield json.dumps({"type": "progress", "message": f"Fetching bundle for {resource_type} via proxy..."}) + "\n" @@ -3934,18 +3992,18 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references response = requests.get(url, headers=headers, timeout=60) logger.debug(f"Proxy response for {resource_type}: HTTP {response.status_code}") if response.status_code != 200: - # ... (keep existing error handling for initial fetch) ... - error_detail = f"Proxy returned HTTP {response.status_code}." - try: error_detail += f" Body: {response.text[:200]}..." - except: pass - yield json.dumps({"type": "error", "message": f"Failed to fetch {resource_type}: {error_detail}"}) + "\n" - logger.error(f"Failed to fetch {resource_type} via proxy {url}: {error_detail}") - continue - try: bundle = response.json() + error_detail = f"Proxy returned HTTP {response.status_code}." + try: error_detail += f" Body: {response.text[:200]}..." + except: pass + yield json.dumps({"type": "error", "message": f"Failed to fetch {resource_type}: {error_detail}"}) + "\n" + logger.error(f"Failed to fetch {resource_type} via proxy {url}: {error_detail}") + continue + try: + bundle = response.json() except ValueError as e: - yield json.dumps({"type": "error", "message": f"Invalid JSON response for {resource_type}: {str(e)}"}) + "\n" - logger.error(f"Invalid JSON from proxy for {resource_type} at {url}: {e}, Response: {response.text[:500]}") - continue + yield json.dumps({"type": "error", "message": f"Invalid JSON response for {resource_type}: {str(e)}"}) + "\n" + logger.error(f"Invalid JSON from proxy for {resource_type} at {url}: {e}, Response: {response.text[:500]}") + continue if not isinstance(bundle, dict) or bundle.get('resourceType') != 'Bundle': yield json.dumps({"type": "error", "message": f"Expected Bundle for {resource_type}, got {bundle.get('resourceType', 'unknown')}"}) + "\n" logger.error(f"Expected Bundle for {resource_type}, got {bundle.get('resourceType', 'unknown')}") @@ -3956,9 +4014,10 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references # Save the bundle output_file = os.path.join(temp_dir, f"{resource_type}_bundle.json") try: - with open(output_file, 'w', encoding='utf-8') as f: json.dump(bundle, f, indent=2) + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(bundle, f, indent=2) logger.debug(f"Wrote bundle to {output_file}") - initial_bundle_files.append(output_file) # Add for scanning + initial_bundle_files.append(output_file) total_initial_bundles += 1 yield json.dumps({"type": "success", "message": f"Saved bundle for {resource_type}"}) + "\n" except IOError as e: @@ -3970,20 +4029,21 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references logger.error(f"Error retrieving bundle for {resource_type} via proxy {url}: {e}") continue except Exception as e: - yield json.dumps({"type": "error", "message": f"Unexpected error fetching {resource_type}: {str(e)}"}) + "\n" - logger.error(f"Unexpected error during initial fetch for {resource_type} at {url}: {e}", exc_info=True) - continue + yield json.dumps({"type": "error", "message": f"Unexpected error fetching {resource_type}: {str(e)}"}) + "\n" + logger.error(f"Unexpected error during initial fetch for {resource_type} at {url}: {e}", exc_info=True) + continue - # --- Fetch Referenced Resources (Conditionally) --- + # Fetch Referenced Resources (Conditionally) if validate_references and initial_bundle_files: yield json.dumps({"type": "progress", "message": "Scanning retrieved bundles for references..."}) + "\n" all_references = set() - references_by_type = defaultdict(set) # To store { 'Patient': {'id1', 'id2'}, ... } + references_by_type = defaultdict(set) - # --- Scan for References --- + # Scan for References for bundle_file_path in initial_bundle_files: try: - with open(bundle_file_path, 'r', encoding='utf-8') as f: bundle = json.load(f) + with open(bundle_file_path, 'r', encoding='utf-8') as f: + bundle = json.load(f) for entry in bundle.get('entry', []): resource = entry.get('resource') if resource: @@ -3992,49 +4052,52 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references for ref_str in current_refs: if isinstance(ref_str, str) and '/' in ref_str and not ref_str.startswith('#'): all_references.add(ref_str) - # Group by type for bundle fetch mode try: ref_type = ref_str.split('/')[0] - if ref_type: references_by_type[ref_type].add(ref_str) - except Exception: pass # Ignore parsing errors here + if ref_type: + references_by_type[ref_type].add(ref_str) + except Exception: + pass except Exception as e: - yield json.dumps({"type": "warning", "message": f"Could not scan references in {os.path.basename(bundle_file_path)}: {e}"}) + "\n" - logger.warning(f"Error processing references in {bundle_file_path}: {e}") + yield json.dumps({"type": "warning", "message": f"Could not scan references in {os.path.basename(bundle_file_path)}: {e}"}) + "\n" + logger.warning(f"Error processing references in {bundle_file_path}: {e}") - # --- Fetch Logic --- + # Fetch Logic if not all_references: yield json.dumps({"type": "info", "message": "No references found to fetch."}) + "\n" else: if fetch_reference_bundles: - # --- Fetch Full Bundles by Type --- + # Fetch Full Bundles by Type unique_ref_types = sorted(list(references_by_type.keys())) yield json.dumps({"type": "progress", "message": f"Fetching full bundles for {len(unique_ref_types)} referenced types..."}) + "\n" logger.info(f"Fetching full bundles for referenced types: {unique_ref_types}") for ref_type in unique_ref_types: - if ref_type in retrieved_references_or_types: continue # Skip if type bundle already fetched + if ref_type in retrieved_references_or_types: + continue - url = f"{base_proxy_url}/{quote(ref_type)}" # Fetch all of this type + url = f"{base_proxy_url}/{quote(ref_type)}" yield json.dumps({"type": "progress", "message": f"Fetching full bundle for type {ref_type} via proxy..."}) + "\n" logger.debug(f"Sending GET request for full type bundle {ref_type} to proxy {url} with headers: {json.dumps(headers)}") try: - response = requests.get(url, headers=headers, timeout=180) # Longer timeout for full bundles + response = requests.get(url, headers=headers, timeout=180) logger.debug(f"Proxy response for {ref_type} bundle: HTTP {response.status_code}") if response.status_code != 200: - error_detail = f"Proxy returned HTTP {response.status_code}." - try: error_detail += f" Body: {response.text[:200]}..." - except: pass - yield json.dumps({"type": "warning", "message": f"Failed to fetch full bundle for {ref_type}: {error_detail}"}) + "\n" - logger.warning(f"Failed to fetch full bundle {ref_type} via proxy {url}: {error_detail}") - retrieved_references_or_types.add(ref_type) # Mark type as attempted - continue + error_detail = f"Proxy returned HTTP {response.status_code}." + try: error_detail += f" Body: {response.text[:200]}..." + except: pass + yield json.dumps({"type": "warning", "message": f"Failed to fetch full bundle for {ref_type}: {error_detail}"}) + "\n" + logger.warning(f"Failed to fetch full bundle {ref_type} via proxy {url}: {error_detail}") + retrieved_references_or_types.add(ref_type) + continue - try: bundle = response.json() + try: + bundle = response.json() except ValueError as e: - yield json.dumps({"type": "warning", "message": f"Invalid JSON for full {ref_type} bundle: {str(e)}"}) + "\n" - logger.warning(f"Invalid JSON response from proxy for full {ref_type} bundle at {url}: {e}") - retrieved_references_or_types.add(ref_type) - continue + yield json.dumps({"type": "warning", "message": f"Invalid JSON for full {ref_type} bundle: {str(e)}"}) + "\n" + logger.warning(f"Invalid JSON response from proxy for full {ref_type} bundle at {url}: {e}") + retrieved_references_or_types.add(ref_type) + continue if not isinstance(bundle, dict) or bundle.get('resourceType') != 'Bundle': yield json.dumps({"type": "warning", "message": f"Expected Bundle for full {ref_type} fetch, got {bundle.get('resourceType', 'unknown')}"}) + "\n" @@ -4045,44 +4108,41 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references # Save the full type bundle output_file = os.path.join(temp_dir, f"ref_{ref_type}_BUNDLE.json") try: - with open(output_file, 'w', encoding='utf-8') as f: json.dump(bundle, f, indent=2) - logger.debug(f"Wrote full type bundle to {output_file}") - fetched_type_bundles += 1 - retrieved_references_or_types.add(ref_type) - yield json.dumps({"type": "success", "message": f"Saved full bundle for type {ref_type}"}) + "\n" + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(bundle, f, indent=2) + logger.debug(f"Wrote full type bundle to {output_file}") + fetched_type_bundles += 1 + retrieved_references_or_types.add(ref_type) + yield json.dumps({"type": "success", "message": f"Saved full bundle for type {ref_type}"}) + "\n" except IOError as e: - yield json.dumps({"type": "warning", "message": f"Failed to save full bundle file for {ref_type}: {e}"}) + "\n" - logger.error(f"Failed to write full bundle file {output_file}: {e}") - retrieved_references_or_types.add(ref_type) # Still mark as attempted - + yield json.dumps({"type": "warning", "message": f"Failed to save full bundle file for {ref_type}: {e}"}) + "\n" + logger.error(f"Failed to write full bundle file {output_file}: {e}") + retrieved_references_or_types.add(ref_type) except requests.RequestException as e: yield json.dumps({"type": "warning", "message": f"Error connecting to proxy for full {ref_type} bundle: {str(e)}"}) + "\n" logger.warning(f"Error retrieving full {ref_type} bundle via proxy: {e}") retrieved_references_or_types.add(ref_type) except Exception as e: - yield json.dumps({"type": "warning", "message": f"Unexpected error fetching full {ref_type} bundle: {str(e)}"}) + "\n" - logger.warning(f"Unexpected error during full {ref_type} bundle fetch: {e}", exc_info=True) - retrieved_references_or_types.add(ref_type) - # End loop through ref_types + yield json.dumps({"type": "warning", "message": f"Unexpected error fetching full {ref_type} bundle: {str(e)}"}) + "\n" + logger.warning(f"Unexpected error during full {ref_type} bundle fetch: {e}", exc_info=True) + retrieved_references_or_types.add(ref_type) else: - # --- Fetch Individual Referenced Resources --- + # Fetch Individual Referenced Resources yield json.dumps({"type": "progress", "message": f"Fetching {len(all_references)} unique referenced resources individually..."}) + "\n" logger.info(f"Fetching {len(all_references)} unique referenced resources by ID.") - for ref in sorted(list(all_references)): # Sort for consistent order - if ref in retrieved_references_or_types: continue # Skip already fetched + for ref in sorted(list(all_references)): + if ref in retrieved_references_or_types: + continue try: - # Parse reference ref_parts = ref.split('/') if len(ref_parts) != 2 or not ref_parts[0] or not ref_parts[1]: logger.warning(f"Skipping invalid reference format: {ref}") continue ref_type, ref_id = ref_parts - # Fetch individual resource using _id search search_param = quote(f"_id={ref_id}") url = f"{base_proxy_url}/{quote(ref_type)}?{search_param}" - yield json.dumps({"type": "progress", "message": f"Fetching referenced {ref_type}/{ref_id} via proxy..."}) + "\n" logger.debug(f"Sending GET request for referenced {ref} to proxy {url} with headers: {json.dumps(headers)}") @@ -4090,20 +4150,21 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references logger.debug(f"Proxy response for referenced {ref}: HTTP {response.status_code}") if response.status_code != 200: - error_detail = f"Proxy returned HTTP {response.status_code}." - try: error_detail += f" Body: {response.text[:200]}..." - except: pass - yield json.dumps({"type": "warning", "message": f"Failed to fetch referenced {ref}: {error_detail}"}) + "\n" - logger.warning(f"Failed to fetch referenced {ref} via proxy {url}: {error_detail}") - retrieved_references_or_types.add(ref) - continue + error_detail = f"Proxy returned HTTP {response.status_code}." + try: error_detail += f" Body: {response.text[:200]}..." + except: pass + yield json.dumps({"type": "warning", "message": f"Failed to fetch referenced {ref}: {error_detail}"}) + "\n" + logger.warning(f"Failed to fetch referenced {ref} via proxy {url}: {error_detail}") + retrieved_references_or_types.add(ref) + continue - try: bundle = response.json() + try: + bundle = response.json() except ValueError as e: - yield json.dumps({"type": "warning", "message": f"Invalid JSON for referenced {ref}: {str(e)}"}) + "\n" - logger.warning(f"Invalid JSON from proxy for ref {ref} at {url}: {e}") - retrieved_references_or_types.add(ref) - continue + yield json.dumps({"type": "warning", "message": f"Invalid JSON for referenced {ref}: {str(e)}"}) + "\n" + logger.warning(f"Invalid JSON from proxy for ref {ref} at {url}: {e}") + retrieved_references_or_types.add(ref) + continue if not isinstance(bundle, dict) or bundle.get('resourceType') != 'Bundle': yield json.dumps({"type": "warning", "message": f"Expected Bundle for referenced {ref}, got {bundle.get('resourceType', 'unknown')}"}) + "\n" @@ -4111,25 +4172,24 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references continue if not bundle.get('entry'): - yield json.dumps({"type": "info", "message": f"Referenced resource {ref} not found on server." }) + "\n" + yield json.dumps({"type": "info", "message": f"Referenced resource {ref} not found on server."}) + "\n" logger.info(f"Referenced resource {ref} not found via search {url}") retrieved_references_or_types.add(ref) continue # Save the bundle containing the single referenced resource - # Use a filename indicating it's an individual reference fetch output_file = os.path.join(temp_dir, f"ref_{ref_type}_{ref_id}.json") try: - with open(output_file, 'w', encoding='utf-8') as f: json.dump(bundle, f, indent=2) - logger.debug(f"Wrote referenced resource bundle to {output_file}") - fetched_individual_references += 1 - retrieved_references_or_types.add(ref) - yield json.dumps({"type": "success", "message": f"Saved referenced resource {ref}"}) + "\n" + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(bundle, f, indent=2) + logger.debug(f"Wrote referenced resource bundle to {output_file}") + fetched_individual_references += 1 + retrieved_references_or_types.add(ref) + yield json.dumps({"type": "success", "message": f"Saved referenced resource {ref}"}) + "\n" except IOError as e: - yield json.dumps({"type": "warning", "message": f"Failed to save file for referenced {ref}: {e}"}) + "\n" - logger.error(f"Failed to write file {output_file}: {e}") - retrieved_references_or_types.add(ref) - + yield json.dumps({"type": "warning", "message": f"Failed to save file for referenced {ref}: {e}"}) + "\n" + logger.error(f"Failed to write file {output_file}: {e}") + retrieved_references_or_types.add(ref) except requests.RequestException as e: yield json.dumps({"type": "warning", "message": f"Network error fetching referenced {ref}: {str(e)}"}) + "\n" logger.warning(f"Network error retrieving referenced {ref} via proxy: {e}") @@ -4138,10 +4198,8 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references yield json.dumps({"type": "warning", "message": f"Unexpected error fetching referenced {ref}: {str(e)}"}) + "\n" logger.warning(f"Unexpected error during reference fetch for {ref}: {e}", exc_info=True) retrieved_references_or_types.add(ref) - # End loop through individual references - # --- End Reference Fetching Logic --- - # --- Create Final ZIP File --- + # Create Final ZIP File yield json.dumps({"type": "progress", "message": f"Creating ZIP file {os.path.basename(output_zip)}..."}) + "\n" files_to_zip = [f for f in os.listdir(temp_dir) if f.endswith('.json')] if not files_to_zip: @@ -4150,21 +4208,23 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references else: logger.debug(f"Found {len(files_to_zip)} JSON files to include in ZIP: {files_to_zip}") try: - with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zipf: + with zipfile.ZipFile(output_zip, 'w', zipfile.ZIP_DEFLATED) as zipf: for filename in files_to_zip: file_path = os.path.join(temp_dir, filename) - if os.path.exists(file_path): zipf.write(file_path, filename) - else: logger.error(f"File {file_path} disappeared before adding to ZIP.") - yield json.dumps({"type": "success", "message": f"ZIP file created: {os.path.basename(output_zip)} with {len(files_to_zip)} files."}) + "\n" + if os.path.exists(file_path): + zipf.write(file_path, filename) + else: + logger.error(f"File {file_path} disappeared before adding to ZIP.") + yield json.dumps({"type": "success", "message": f"ZIP file created: {os.path.basename(output_zip)} with {len(files_to_zip)} files."}) + "\n" except Exception as e: - yield json.dumps({"type": "error", "message": f"Failed to create ZIP file: {e}"}) + "\n" - logger.error(f"Error creating ZIP file {output_zip}: {e}", exc_info=True) + yield json.dumps({"type": "error", "message": f"Failed to create ZIP file: {e}"}) + "\n" + logger.error(f"Error creating ZIP file {output_zip}: {e}", exc_info=True) - # --- Final Completion Message --- + # Final Completion Message completion_message = ( f"Bundle retrieval finished. Initial bundles: {total_initial_bundles}, " f"Referenced items fetched: {fetched_individual_references if not fetch_reference_bundles else fetched_type_bundles} " - f"({ 'individual resources' if not fetch_reference_bundles else 'full type bundles' })." + f"({'individual resources' if not fetch_reference_bundles else 'full type bundles'})" ) yield json.dumps({ "type": "complete", @@ -4174,16 +4234,14 @@ def retrieve_bundles(fhir_server_url, resources, output_zip, validate_references "fetched_individual_references": fetched_individual_references, "fetched_type_bundles": fetched_type_bundles, "reference_mode": "individual" if validate_references and not fetch_reference_bundles else "type_bundle" if validate_references and fetch_reference_bundles else "off" - } + } }) + "\n" except Exception as e: - # Catch errors during setup (like temp dir creation) yield json.dumps({"type": "error", "message": f"Critical error during retrieval setup: {str(e)}"}) + "\n" logger.error(f"Unexpected error in retrieve_bundles setup: {e}", exc_info=True) - yield json.dumps({ "type": "complete", "message": f"Retrieval failed: {str(e)}", "data": {"total_initial_bundles": 0, "fetched_individual_references": 0, "fetched_type_bundles": 0} }) + "\n" + yield json.dumps({"type": "complete", "message": f"Retrieval failed: {str(e)}", "data": {"total_initial_bundles": 0, "fetched_individual_references": 0, "fetched_type_bundles": 0}}) + "\n" finally: - # --- Cleanup Temporary Directory --- if temp_dir and os.path.exists(temp_dir): try: shutil.rmtree(temp_dir) diff --git a/templates/base.html b/templates/base.html index bf27b44..1437ba7 100644 --- a/templates/base.html +++ b/templates/base.html @@ -5,7 +5,7 @@ - + @@ -769,9 +769,6 @@ - @@ -801,7 +798,7 @@ Configure HAPI {% endif %} - + {# End container-fluid #} @@ -198,13 +200,17 @@ document.addEventListener('DOMContentLoaded', function() { const pushIgForm = document.getElementById('pushIgForm'); const pushButton = document.getElementById('pushButton'); const liveConsole = document.getElementById('liveConsole'); - const responseDiv = document.getElementById('pushResponse'); // Area for final report - const reportActions = document.getElementById('reportActions'); // Container for report buttons - const copyReportBtn = document.getElementById('copyReportBtn'); // New copy button - const downloadReportBtn = document.getElementById('downloadReportBtn'); // New download button + const responseDiv = document.getElementById('pushResponse'); + const reportActions = document.getElementById('reportActions'); + const copyReportBtn = document.getElementById('copyReportBtn'); + const downloadReportBtn = document.getElementById('downloadReportBtn'); const authTypeSelect = document.getElementById('authType'); - const authTokenGroup = document.getElementById('authTokenGroup'); + const authInputsGroup = document.getElementById('authInputsGroup'); + const bearerTokenInput = document.getElementById('bearerTokenInput'); + const basicAuthInputs = document.getElementById('basicAuthInputs'); const authTokenInput = document.getElementById('authToken'); + const usernameInput = document.getElementById('username'); + const passwordInput = document.getElementById('password'); const resourceTypesFilterInput = document.getElementById('resourceTypesFilter'); const skipFilesFilterInput = document.getElementById('skipFilesFilter'); const dryRunCheckbox = document.getElementById('dryRun'); @@ -226,7 +232,7 @@ document.addEventListener('DOMContentLoaded', function() { if (packageSelect && dependencyModeField) { packageSelect.addEventListener('change', function() { const packageId = this.value; - dependencyModeField.value = ''; // Clear on change + dependencyModeField.value = ''; if (packageId) { const [packageName, version] = packageId.split('#'); fetch(`/get-package-metadata?package_name=${packageName}&version=${version}`) @@ -240,24 +246,32 @@ document.addEventListener('DOMContentLoaded', function() { if (packageSelect.value) { packageSelect.dispatchEvent(new Event('change')); } } - // Show/Hide Bearer Token Input - if (authTypeSelect && authTokenGroup) { + // Show/Hide Auth Inputs + if (authTypeSelect && authInputsGroup && bearerTokenInput && basicAuthInputs) { authTypeSelect.addEventListener('change', function() { - authTokenGroup.style.display = this.value === 'bearerToken' ? 'block' : 'none'; + authInputsGroup.style.display = (this.value === 'bearerToken' || this.value === 'basic') ? 'block' : 'none'; + bearerTokenInput.style.display = this.value === 'bearerToken' ? 'block' : 'none'; + basicAuthInputs.style.display = this.value === 'basic' ? 'block' : 'none'; + // Clear inputs when switching if (this.value !== 'bearerToken' && authTokenInput) authTokenInput.value = ''; + if (this.value !== 'basic' && usernameInput) usernameInput.value = ''; + if (this.value !== 'basic' && passwordInput) passwordInput.value = ''; }); - authTokenGroup.style.display = authTypeSelect.value === 'bearerToken' ? 'block' : 'none'; + authInputsGroup.style.display = (authTypeSelect.value === 'bearerToken' || authTypeSelect.value === 'basic') ? 'block' : 'none'; + bearerTokenInput.style.display = authTypeSelect.value === 'bearerToken' ? 'block' : 'none'; + basicAuthInputs.style.display = authTypeSelect.value === 'basic' ? 'block' : 'none'; + } else { + console.error("Auth elements not found."); } - // --- NEW: Report Action Button Listeners --- + // Report Action Button Listeners if (copyReportBtn && responseDiv) { copyReportBtn.addEventListener('click', () => { - const reportAlert = responseDiv.querySelector('.alert'); // Get the alert div inside - const reportText = reportAlert ? reportAlert.innerText || reportAlert.textContent : ''; // Get text content + const reportAlert = responseDiv.querySelector('.alert'); + const reportText = reportAlert ? reportAlert.innerText || reportAlert.textContent : ''; if (reportText && navigator.clipboard) { navigator.clipboard.writeText(reportText) .then(() => { - // Optional: Provide feedback (e.g., change button text/icon) const originalIcon = copyReportBtn.innerHTML; copyReportBtn.innerHTML = ' Copied!'; setTimeout(() => { copyReportBtn.innerHTML = originalIcon; }, 2000); @@ -267,9 +281,9 @@ document.addEventListener('DOMContentLoaded', function() { alert('Failed to copy report text.'); }); } else if (!navigator.clipboard) { - alert('Clipboard API not available in this browser.'); + alert('Clipboard API not available in this browser.'); } else { - alert('No report content found to copy.'); + alert('No report content found to copy.'); } }); } @@ -292,27 +306,27 @@ document.addEventListener('DOMContentLoaded', function() { document.body.appendChild(link); link.click(); document.body.removeChild(link); - URL.revokeObjectURL(url); // Clean up + URL.revokeObjectURL(url); } else { alert('No report content found to download.'); } }); } - // --- END NEW --- - - // --- Form Submission --- + // Form Submission if (pushIgForm) { pushIgForm.addEventListener('submit', async function(event) { event.preventDefault(); - // Get form values (with null checks for elements) + // Get form values const packageId = packageSelect ? packageSelect.value : null; const fhirServerUrl = fhirServerUrlInput ? fhirServerUrlInput.value.trim() : null; if (!packageId || !fhirServerUrl) { alert('Please select package and enter FHIR Server URL.'); return; } const [packageName, version] = packageId.split('#'); const auth_type = authTypeSelect ? authTypeSelect.value : 'none'; const auth_token = (auth_type === 'bearerToken' && authTokenInput) ? authTokenInput.value : null; + const username = (auth_type === 'basic' && usernameInput) ? usernameInput.value.trim() : null; + const password = (auth_type === 'basic' && passwordInput) ? passwordInput.value : null; const resource_types_filter_raw = resourceTypesFilterInput ? resourceTypesFilterInput.value.trim() : ''; const resource_types_filter = resource_types_filter_raw ? resource_types_filter_raw.split(',').map(s => s.trim()).filter(s => s) : null; const skip_files_raw = skipFilesFilterInput ? skipFilesFilterInput.value.trim() : ''; @@ -322,12 +336,23 @@ document.addEventListener('DOMContentLoaded', function() { const include_dependencies = includeDependenciesCheckbox ? includeDependenciesCheckbox.checked : true; const force_upload = forceUploadCheckbox ? forceUploadCheckbox.checked : false; - // UI Updates & API Key - if (pushButton) { pushButton.disabled = true; pushButton.textContent = 'Processing...'; } - if (liveConsole) { liveConsole.innerHTML = `
${new Date().toLocaleTimeString()} [INFO] Starting ${dry_run ? 'DRY RUN ' : ''}${force_upload ? 'FORCE ' : ''}push for ${packageName}#${version}...
`; } - if (responseDiv) { responseDiv.innerHTML = 'Processing...'; } // Clear previous report - if (reportActions) { reportActions.style.display = 'none'; } // Hide report buttons initially - const internalApiKey = {{ api_key | default("") | tojson }}; // Use tojson filter + // Validate Basic Auth inputs + if (auth_type === 'basic') { + if (!username) { alert('Please enter a username for Basic Authentication.'); return; } + if (!password) { alert('Please enter a password for Basic Authentication.'); return; } + } + + // UI Updates + if (pushButton) { + pushButton.disabled = true; + pushButton.innerHTML = ' Processing...'; + } + if (liveConsole) { + liveConsole.innerHTML = `
${new Date().toLocaleTimeString()} [INFO] Starting ${dry_run ? 'DRY RUN ' : ''}${force_upload ? 'FORCE ' : ''}push for ${packageName}#${version}...
`; + } + if (responseDiv) { responseDiv.innerHTML = 'Processing...'; } + if (reportActions) { reportActions.style.display = 'none'; } + const internalApiKey = {{ api_key | default("") | tojson }}; try { // API Fetch @@ -335,10 +360,19 @@ document.addEventListener('DOMContentLoaded', function() { method: 'POST', headers: { 'Content-Type': 'application/json', 'Accept': 'application/x-ndjson', 'X-CSRFToken': csrfToken, 'X-API-Key': internalApiKey }, body: JSON.stringify({ - package_name: packageName, version: version, fhir_server_url: fhirServerUrl, - include_dependencies: include_dependencies, auth_type: auth_type, auth_token: auth_token, - resource_types_filter: resource_types_filter, skip_files: skip_files, - dry_run: dry_run, verbose: isVerboseChecked, force_upload: force_upload + package_name: packageName, + version: version, + fhir_server_url: fhirServerUrl, + include_dependencies: include_dependencies, + auth_type: auth_type, + auth_token: auth_token, + username: username, + password: password, + resource_types_filter: resource_types_filter, + skip_files: skip_files, + dry_run: dry_run, + verbose: isVerboseChecked, + force_upload: force_upload }) }); @@ -361,25 +395,28 @@ document.addEventListener('DOMContentLoaded', function() { const data = JSON.parse(line); const timestamp = new Date().toLocaleTimeString(); let messageClass = 'text-light'; let prefix = '[INFO]'; let shouldDisplay = false; - switch (data.type) { // Determine if message should display based on verbose + switch (data.type) { case 'start': case 'error': case 'complete': shouldDisplay = true; break; case 'success': case 'warning': case 'info': case 'progress': if (isVerboseChecked) { shouldDisplay = true; } break; default: if (isVerboseChecked) { shouldDisplay = true; console.warn("Unknown type:", data.type); prefix = '[UNKNOWN]'; } break; } - if (shouldDisplay && liveConsole) { // Set prefix/class and append to console + if (shouldDisplay && liveConsole) { if(data.type==='error'){prefix='[ERROR]';messageClass='text-danger';} else if(data.type==='complete'){const s=data.data?.status||'info';if(s==='success'){prefix='[SUCCESS]';messageClass='text-success';}else if(s==='partial'){prefix='[PARTIAL]';messageClass='text-warning';}else{prefix='[ERROR]';messageClass='text-danger';}} else if(data.type==='start'){prefix='[START]';messageClass='text-info';} - else if(data.type==='success'){prefix='[SUCCESS]';messageClass='text-success';}else if(data.type==='warning'){prefix='[WARNING]';messageClass='text-warning';}else if(data.type==='info'){prefix='[INFO]';messageClass='text-info';}else{prefix='[PROGRESS]';messageClass='text-light';} + else if(data.type==='success'){prefix='[SUCCESS]';messageClass='text-success';} + else if(data.type==='warning'){prefix='[WARNING]';messageClass='text-warning';} + else if(data.type==='info'){prefix='[INFO]';messageClass='text-info';} + else{prefix='[PROGRESS]';messageClass='text-light';} const messageDiv = document.createElement('div'); messageDiv.className = messageClass; const messageText = (data.type === 'complete' && data.data) ? data.data.message : data.message; - messageDiv.textContent = `${timestamp} ${prefix} ${sanitizeText(messageText) || 'Empty message.'}`; + messageDiv.innerHTML = `${timestamp} ${prefix} ${sanitizeText(messageText) || 'Empty message.'}`; liveConsole.appendChild(messageDiv); liveConsole.scrollTop = liveConsole.scrollHeight; } - if (data.type === 'complete' && responseDiv) { // Update final summary box + if (data.type === 'complete' && responseDiv) { const summaryData = data.data || {}; let alertClass = 'alert-info'; let statusText = 'Info'; let pushedPkgs = 'None'; let failHtml = ''; let skipHtml = ''; const isDryRun = summaryData.dry_run || false; const isForceUpload = summaryData.force_upload || false; const typeFilterUsed = summaryData.resource_types_filter ? summaryData.resource_types_filter.join(', ') : 'All'; @@ -391,28 +428,74 @@ document.addEventListener('DOMContentLoaded', function() { if (summaryData.skipped_details?.length > 0) { skipHtml = '
Skipped:
    '; summaryData.skipped_details.forEach(s => {skipHtml += `
  • ${sanitizeText(s.resource)}: ${sanitizeText(s.reason)}
  • `;}); skipHtml += '
';} responseDiv.innerHTML = `
${isDryRun?'[DRY RUN] ':''}${isForceUpload?'[FORCE] ':''}${statusText}: ${sanitizeText(summaryData.message)||'Complete.'}
Target: ${sanitizeText(summaryData.target_server)}
Package: ${sanitizeText(summaryData.package_name)}#${sanitizeText(summaryData.version)}
Config: Deps=${summaryData.included_dependencies?'Yes':'No'}, Types=${sanitizeText(typeFilterUsed)}, SkipFiles=${sanitizeText(fileFilterUsed)}, DryRun=${isDryRun?'Yes':'No'}, Force=${isForceUpload?'Yes':'No'}, Verbose=${isVerboseChecked?'Yes':'No'}
Stats: Attempt=${sanitizeText(summaryData.resources_attempted)}, Success=${sanitizeText(summaryData.success_count)}, Fail=${sanitizeText(summaryData.failure_count)}, Skip=${sanitizeText(summaryData.skipped_count)}
Pushed Pkgs:
${pushedPkgs}
${failHtml}${skipHtml}
`; - if (reportActions) { reportActions.style.display = 'block'; } // Show report buttons + if (reportActions) { reportActions.style.display = 'block'; } } - } catch (parseError) { /* (Handle JSON parse errors) */ console.error('Stream parse error:', parseError); if(liveConsole){/*...add error to console...*/} } - } // end for loop - } // end while loop - - // Process Final Buffer (if any) - if (buffer.trim()) { - try { /* (Parsing logic for final buffer, similar to above) */ } - catch (parseError) { /* (Handle final buffer parse error) */ } + } catch (parseError) { + console.error('Stream parse error:', parseError); + if (liveConsole) { + const errDiv = document.createElement('div'); + errDiv.className = 'text-danger'; + errDiv.textContent = `${new Date().toLocaleTimeString()} [ERROR] Stream parse error: ${sanitizeText(parseError.message)}`; + liveConsole.appendChild(errDiv); + liveConsole.scrollTop = liveConsole.scrollHeight; + } + } + } } - } catch (error) { // Handle overall fetch/network errors + // Process Final Buffer + if (buffer.trim()) { + try { + const data = JSON.parse(buffer.trim()); + if (data.type === 'complete' && responseDiv) { + // Same summary rendering as above + const summaryData = data.data || {}; + let alertClass = 'alert-info'; let statusText = 'Info'; let pushedPkgs = 'None'; let failHtml = ''; let skipHtml = ''; + const isDryRun = summaryData.dry_run || false; const isForceUpload = summaryData.force_upload || false; + const typeFilterUsed = summaryData.resource_types_filter ? summaryData.resource_types_filter.join(', ') : 'All'; + const fileFilterUsed = summaryData.skip_files_filter ? summaryData.skip_files_filter.join(', ') : 'None'; + + if (summaryData.pushed_packages_summary?.length > 0) { pushedPkgs = summaryData.pushed_packages_summary.map(p => `${sanitizeText(p.id)} (${sanitizeText(p.resource_count)} resources)`).join('
'); } + if (summaryData.status === 'success') { alertClass = 'alert-success'; statusText = 'Success';} else if (summaryData.status === 'partial') { alertClass = 'alert-warning'; statusText = 'Partial Success'; } else { alertClass = 'alert-danger'; statusText = 'Error'; } + if (summaryData.failed_details?.length > 0) { failHtml = '
Failures:
    '; summaryData.failed_details.forEach(f => {failHtml += `
  • ${sanitizeText(f.resource)}: ${sanitizeText(f.error)}
  • `;}); failHtml += '
';} + if (summaryData.skipped_details?.length > 0) { skipHtml = '
Skipped:
    '; summaryData.skipped_details.forEach(s => {skipHtml += `
  • ${sanitizeText(s.resource)}: ${sanitizeText(s.reason)}
  • `;}); skipHtml += '
';} + + responseDiv.innerHTML = `
${isDryRun?'[DRY RUN] ':''}${isForceUpload?'[FORCE] ':''}${statusText}: ${sanitizeText(summaryData.message)||'Complete.'}
Target: ${sanitizeText(summaryData.target_server)}
Package: ${sanitizeText(summaryData.package_name)}#${sanitizeText(summaryData.version)}
Config: Deps=${summaryData.included_dependencies?'Yes':'No'}, Types=${sanitizeText(typeFilterUsed)}, SkipFiles=${sanitizeText(fileFilterUsed)}, DryRun=${isDryRun?'Yes':'No'}, Force=${isForceUpload?'Yes':'No'}, Verbose=${isVerboseChecked?'Yes':'No'}
Stats: Attempt=${sanitizeText(summaryData.resources_attempted)}, Success=${sanitizeText(summaryData.success_count)}, Fail=${sanitizeText(summaryData.failure_count)}, Skip=${sanitizeText(summaryData.skipped_count)}
Pushed Pkgs:
${pushedPkgs}
${failHtml}${skipHtml}
`; + if (reportActions) { reportActions.style.display = 'block'; } + } + } catch (parseError) { + console.error('Final buffer parse error:', parseError); + if (liveConsole) { + const errDiv = document.createElement('div'); + errDiv.className = 'text-danger'; + errDiv.textContent = `${new Date().toLocaleTimeString()} [ERROR] Final buffer parse error: ${sanitizeText(parseError.message)}`; + liveConsole.appendChild(errDiv); + liveConsole.scrollTop = liveConsole.scrollHeight; + } + } + } + + } catch (error) { console.error("Push operation failed:", error); - if (liveConsole) { /* ... add error to console ... */ } - if (responseDiv) { responseDiv.innerHTML = `
Error: ${sanitizeText(error.message || error)}
`; } - if (reportActions) { reportActions.style.display = 'none'; } // Hide buttons on error - } finally { // Re-enable button - if (pushButton) { pushButton.disabled = false; pushButton.textContent = 'Push to FHIR Server'; } + if (liveConsole) { + const errDiv = document.createElement('div'); + errDiv.className = 'text-danger'; + errDiv.textContent = `${new Date().toLocaleTimeString()} [ERROR] ${sanitizeText(error.message || error)}`; + liveConsole.appendChild(errDiv); + liveConsole.scrollTop = liveConsole.scrollHeight; + } + if (responseDiv) { + responseDiv.innerHTML = `
Error: ${sanitizeText(error.message || error)}
`; + } + if (reportActions) { reportActions.style.display = 'none'; } + } finally { + if (pushButton) { + pushButton.disabled = false; + pushButton.innerHTML = 'Push to FHIR Server'; + } } - }); // End form submit listener + }); } else { console.error("Push IG Form element not found."); } -}); // End DOMContentLoaded listener +}); -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/templates/fhir_ui.html b/templates/fhir_ui.html index f2a9304..bd33b58 100644 --- a/templates/fhir_ui.html +++ b/templates/fhir_ui.html @@ -8,13 +8,6 @@

Interact with FHIR servers using GET, POST, PUT, or DELETE requests. Toggle between local HAPI or a custom server to explore resources or perform searches.

- @@ -34,6 +27,31 @@ Toggle to use local HAPI (http://localhost:8080/fhir) or enter a custom FHIR server URL. +
@@ -44,13 +62,10 @@
- - -
@@ -108,94 +123,90 @@ document.addEventListener('DOMContentLoaded', function() { const copyRequestBodyButton = document.getElementById('copyRequestBody'); const copyResponseHeadersButton = document.getElementById('copyResponseHeaders'); const copyResponseBodyButton = document.getElementById('copyResponseBody'); + const authSection = document.getElementById('authSection'); + const authTypeSelect = document.getElementById('authType'); + const authInputsGroup = document.getElementById('authInputsGroup'); + const bearerTokenInput = document.getElementById('bearerToken'); + const basicAuthInputs = document.getElementById('basicAuthInputs'); + const usernameInput = document.getElementById('username'); + const passwordInput = document.getElementById('password'); // Basic check for critical elements if (!form || !sendButton || !fhirPathInput || !responseCard || !toggleServerButton || !fhirServerUrlInput || !responseStatus || !responseHeaders || !responseBody || !toggleLabel) { console.error("One or more critical UI elements could not be found. Script execution halted."); alert("Error initializing UI components. Please check the console."); - return; // Stop script execution + return; } console.log("All critical elements checked/found."); // --- State Variable --- - // Default assumes standalone, will be forced otherwise by appMode check below let useLocalHapi = true; // --- Get App Mode from Flask Context --- - // Ensure this variable is correctly passed from Flask using the context_processor const appMode = '{{ app_mode | default("standalone") | lower }}'; console.log('App Mode Detected:', appMode); - // --- DEFINE HELPER FUNCTIONS --- - - // Validates request body, returns null on error, otherwise returns body string or empty string + // --- Helper Functions --- function validateRequestBody(method, path) { if (!requestBodyInput || !jsonError) return (method === 'POST' || method === 'PUT') ? '' : undefined; const bodyValue = requestBodyInput.value.trim(); - requestBodyInput.classList.remove('is-invalid'); // Reset validation + requestBodyInput.classList.remove('is-invalid'); jsonError.style.display = 'none'; - if (!bodyValue) return ''; // Empty body is valid for POST/PUT + if (!bodyValue) return ''; const isSearch = path && path.endsWith('_search'); const isJson = bodyValue.startsWith('{') || bodyValue.startsWith('['); const isXml = bodyValue.startsWith('<'); const isForm = !isJson && !isXml; - if (method === 'POST' && isSearch && isForm) { // POST Search with form params + if (method === 'POST' && isSearch && isForm) { return bodyValue; - } else if (method === 'POST' || method === 'PUT') { // Other POST/PUT expect JSON/XML + } else if (method === 'POST' || method === 'PUT') { if (isJson) { try { JSON.parse(bodyValue); return bodyValue; } catch (e) { jsonError.textContent = `Invalid JSON: ${e.message}`; } } else if (isXml) { - // Basic XML check is difficult in JS, accept it for now - // Backend or target server will validate fully return bodyValue; - } else { // Neither JSON nor XML, and not a POST search form + } else { jsonError.textContent = 'Request body must be valid JSON or XML for PUT/POST (unless using POST _search with form parameters).'; } requestBodyInput.classList.add('is-invalid'); jsonError.style.display = 'block'; - return null; // Indicate validation error + return null; } - return undefined; // Indicate no body should be sent for GET/DELETE + return undefined; } - // Cleans path for proxying function cleanFhirPath(path) { if (!path) return ''; - // Remove optional leading 'r4/' or 'fhir/', then trim slashes return path.replace(/^(r4\/|fhir\/)*/i, '').replace(/^\/+|\/+$/g, ''); } - // Copies text to clipboard async function copyToClipboard(text, button) { if (text === null || text === undefined || !button || !navigator.clipboard) return; try { await navigator.clipboard.writeText(String(text)); const originalIcon = button.innerHTML; button.innerHTML = ' Copied!'; - setTimeout(() => { if(button.isConnected) button.innerHTML = originalIcon; }, 1500); - } catch (err) { console.error('Copy failed:', err); /* Optionally alert user */ } + setTimeout(() => { if (button.isConnected) button.innerHTML = originalIcon; }, 1500); + } catch (err) { console.error('Copy failed:', err); } } - // Updates the UI elements related to the server toggle button/input function updateServerToggleUI() { if (appMode === 'lite') { - // LITE MODE: Force Custom URL, disable toggle - useLocalHapi = false; // Force state + useLocalHapi = false; toggleServerButton.disabled = true; toggleServerButton.classList.add('disabled'); - toggleServerButton.style.pointerEvents = 'none'; // Make unclickable + toggleServerButton.style.pointerEvents = 'none'; toggleServerButton.setAttribute('aria-disabled', 'true'); toggleServerButton.title = "Local HAPI server is unavailable in Lite mode"; - toggleLabel.textContent = 'Use Custom URL'; // Always show this label - fhirServerUrlInput.style.display = 'block'; // Always show input + toggleLabel.textContent = 'Use Custom URL'; + fhirServerUrlInput.style.display = 'block'; fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)"; - fhirServerUrlInput.required = true; // Make required in lite mode + fhirServerUrlInput.required = true; + authSection.style.display = 'block'; } else { - // STANDALONE MODE: Allow toggle toggleServerButton.disabled = false; toggleServerButton.classList.remove('disabled'); toggleServerButton.style.pointerEvents = 'auto'; @@ -204,122 +215,188 @@ document.addEventListener('DOMContentLoaded', function() { toggleLabel.textContent = useLocalHapi ? 'Using Local HAPI' : 'Using Custom URL'; fhirServerUrlInput.style.display = useLocalHapi ? 'none' : 'block'; fhirServerUrlInput.placeholder = "e.g., https://hapi.fhir.org/baseR4"; - fhirServerUrlInput.required = !useLocalHapi; // Required only if custom is selected + fhirServerUrlInput.required = !useLocalHapi; + authSection.style.display = useLocalHapi ? 'none' : 'block'; } - fhirServerUrlInput.classList.remove('is-invalid'); // Clear validation state on toggle + fhirServerUrlInput.classList.remove('is-invalid'); + updateAuthInputsUI(); console.log(`UI Updated: useLocalHapi=${useLocalHapi}, Button Disabled=${toggleServerButton.disabled}, Input Visible=${fhirServerUrlInput.style.display !== 'none'}, Input Required=${fhirServerUrlInput.required}`); } - // Toggles the server selection state (only effective in Standalone mode) + function updateAuthInputsUI() { + if (!authTypeSelect || !authInputsGroup || !bearerTokenInput || !basicAuthInputs) return; + const authType = authTypeSelect.value; + authInputsGroup.style.display = (authType === 'bearer' || authType === 'basic') ? 'block' : 'none'; + bearerTokenInput.style.display = authType === 'bearer' ? 'block' : 'none'; + basicAuthInputs.style.display = authType === 'basic' ? 'block' : 'none'; + if (authType !== 'bearer' && bearerTokenInput) bearerTokenInput.value = ''; + if (authType !== 'basic' && usernameInput) usernameInput.value = ''; + if (authType !== 'basic' && passwordInput) passwordInput.value = ''; + } + function toggleServer() { if (appMode === 'lite') { console.log("Toggle ignored: Lite mode active."); - return; // Do nothing in lite mode + return; } useLocalHapi = !useLocalHapi; if (useLocalHapi && fhirServerUrlInput) { - fhirServerUrlInput.value = ''; // Clear custom URL when switching to local + fhirServerUrlInput.value = ''; } - updateServerToggleUI(); // Update UI based on new state + updateServerToggleUI(); console.log(`Server toggled: Now using ${useLocalHapi ? 'Local HAPI' : 'Custom URL'}`); } - // Updates visibility of the Request Body textarea based on selected HTTP method function updateRequestBodyVisibility() { const selectedMethod = document.querySelector('input[name="method"]:checked')?.value; if (!requestBodyGroup || !selectedMethod) return; const showBody = (selectedMethod === 'POST' || selectedMethod === 'PUT'); requestBodyGroup.style.display = showBody ? 'block' : 'none'; - if (!showBody) { // Clear body and errors if body not needed + if (!showBody) { if (requestBodyInput) requestBodyInput.value = ''; if (jsonError) jsonError.style.display = 'none'; if (requestBodyInput) requestBodyInput.classList.remove('is-invalid'); } } - // --- INITIAL SETUP & MODE CHECK --- - updateServerToggleUI(); // Set initial UI based on detected mode and default state - updateRequestBodyVisibility(); // Set initial visibility based on default method (GET) + // --- Initial Setup --- + updateServerToggleUI(); + updateRequestBodyVisibility(); - // --- ATTACH EVENT LISTENERS --- + // --- Event Listeners --- toggleServerButton.addEventListener('click', toggleServer); if (methodRadios) { methodRadios.forEach(radio => { radio.addEventListener('change', updateRequestBodyVisibility); }); } - if (requestBodyInput && fhirPathInput) { requestBodyInput.addEventListener('input', () => validateRequestBody( document.querySelector('input[name="method"]:checked')?.value, fhirPathInput.value )); } + if (requestBodyInput && fhirPathInput) { requestBodyInput.addEventListener('input', () => validateRequestBody(document.querySelector('input[name="method"]:checked')?.value, fhirPathInput.value)); } if (copyRequestBodyButton && requestBodyInput) { copyRequestBodyButton.addEventListener('click', () => copyToClipboard(requestBodyInput.value, copyRequestBodyButton)); } if (copyResponseHeadersButton && responseHeaders) { copyResponseHeadersButton.addEventListener('click', () => copyToClipboard(responseHeaders.textContent, copyResponseHeadersButton)); } if (copyResponseBodyButton && responseBody) { copyResponseBodyButton.addEventListener('click', () => copyToClipboard(responseBody.textContent, copyResponseBodyButton)); } + if (authTypeSelect) { authTypeSelect.addEventListener('change', updateAuthInputsUI); } // --- Send Request Button Listener --- sendButton.addEventListener('click', async function() { console.log("Send Request button clicked."); - // --- UI Reset --- - sendButton.disabled = true; sendButton.textContent = 'Sending...'; + sendButton.disabled = true; + sendButton.textContent = 'Sending...'; responseCard.style.display = 'none'; - responseStatus.textContent = ''; responseHeaders.textContent = ''; responseBody.textContent = ''; - responseStatus.className = 'badge'; // Reset badge class - fhirServerUrlInput.classList.remove('is-invalid'); // Reset validation - if(requestBodyInput) requestBodyInput.classList.remove('is-invalid'); - if(jsonError) jsonError.style.display = 'none'; + responseStatus.textContent = ''; + responseHeaders.textContent = ''; + responseBody.textContent = ''; + responseStatus.className = 'badge'; + fhirServerUrlInput.classList.remove('is-invalid'); + if (requestBodyInput) requestBodyInput.classList.remove('is-invalid'); + if (jsonError) jsonError.style.display = 'none'; + if (bearerTokenInput) bearerTokenInput.classList.remove('is-invalid'); + if (usernameInput) usernameInput.classList.remove('is-invalid'); + if (passwordInput) passwordInput.classList.remove('is-invalid'); // --- Get Values --- const path = fhirPathInput.value.trim(); const method = document.querySelector('input[name="method"]:checked')?.value; const customUrl = fhirServerUrlInput.value.trim(); let body = undefined; + const authType = authTypeSelect ? authTypeSelect.value : 'none'; + const bearerToken = bearerTokenInput ? bearerTokenInput.value.trim() : ''; + const username = usernameInput ? usernameInput.value.trim() : ''; + const password = passwordInput ? passwordInput.value : ''; // --- Basic Input Validation --- - if (!path) { alert('Please enter a FHIR Path.'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return; } - if (!method) { alert('Please select a Request Type.'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return; } - if (!useLocalHapi && !customUrl) { // Custom URL mode needs a URL - alert('Please enter a custom FHIR Server URL.'); fhirServerUrlInput.classList.add('is-invalid'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return; + if (!path) { + alert('Please enter a FHIR Path.'); + sendButton.disabled = false; + sendButton.textContent = 'Send Request'; + return; } - if (!useLocalHapi && customUrl) { // Validate custom URL format - try { new URL(customUrl); } - catch (_) { alert('Invalid custom FHIR Server URL format.'); fhirServerUrlInput.classList.add('is-invalid'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return; } + if (!method) { + alert('Please select a Request Type.'); + sendButton.disabled = false; + sendButton.textContent = 'Send Request'; + return; + } + if (!useLocalHapi && !customUrl) { + alert('Please enter a custom FHIR Server URL.'); + fhirServerUrlInput.classList.add('is-invalid'); + sendButton.disabled = false; + sendButton.textContent = 'Send Request'; + return; + } + if (!useLocalHapi && customUrl) { + try { new URL(customUrl); } + catch (_) { + alert('Invalid custom FHIR Server URL format.'); + fhirServerUrlInput.classList.add('is-invalid'); + sendButton.disabled = false; + sendButton.textContent = 'Send Request'; + return; + } } - // --- Validate & Get Body (if needed) --- + // --- Validate Authentication --- + if (!useLocalHapi) { + if (authType === 'bearer' && !bearerToken) { + alert('Please enter a Bearer Token.'); + bearerTokenInput.classList.add('is-invalid'); + sendButton.disabled = false; + sendButton.textContent = 'Send Request'; + return; + } + if (authType === 'basic' && (!username || !password)) { + alert('Please enter both Username and Password for Basic Authentication.'); + if (!username) usernameInput.classList.add('is-invalid'); + if (!password) passwordInput.classList.add('is-invalid'); + sendButton.disabled = false; + sendButton.textContent = 'Send Request'; + return; + } + } + + // --- Validate & Get Body --- if (method === 'POST' || method === 'PUT') { - body = validateRequestBody(method, path); - if (body === null) { // null indicates validation error - alert('Request body contains invalid JSON/Format.'); sendButton.disabled = false; sendButton.textContent = 'Send Request'; return; - } - // If body is empty string, ensure it's treated as such for fetch - if (body === '') body = ''; + body = validateRequestBody(method, path); + if (body === null) { + alert('Request body contains invalid JSON/Format.'); + sendButton.disabled = false; + sendButton.textContent = 'Send Request'; + return; + } + if (body === '') body = ''; } // --- Determine Fetch URL and Headers --- const cleanedPath = cleanFhirPath(path); - const finalFetchUrl = '/fhir/' + cleanedPath; // Always send to the backend proxy endpoint + const finalFetchUrl = '/fhir/' + cleanedPath; const headers = { 'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8' }; - // Determine Content-Type if body exists if (body !== undefined) { - if (body.trim().startsWith('{')) { headers['Content-Type'] = 'application/fhir+json'; } - else if (body.trim().startsWith('<')) { headers['Content-Type'] = 'application/fhir+xml'; } - else if (method === 'POST' && path.endsWith('_search') && body && !body.trim().startsWith('{') && !body.trim().startsWith('<')) { headers['Content-Type'] = 'application/x-www-form-urlencoded'; } - else if (body) { headers['Content-Type'] = 'application/fhir+json'; } // Default if unknown but present + if (body.trim().startsWith('{')) { headers['Content-Type'] = 'application/fhir+json'; } + else if (body.trim().startsWith('<')) { headers['Content-Type'] = 'application/fhir+xml'; } + else if (method === 'POST' && path.endsWith('_search') && body && !body.trim().startsWith('{') && !body.trim().startsWith('<')) { headers['Content-Type'] = 'application/x-www-form-urlencoded'; } + else if (body) { headers['Content-Type'] = 'application/fhir+json'; } } - // Add Custom Target Header if needed if (!useLocalHapi && customUrl) { - headers['X-Target-FHIR-Server'] = customUrl.replace(/\/+$/, ''); // Send custom URL without trailing slash + headers['X-Target-FHIR-Server'] = customUrl.replace(/\/+$/, ''); console.log("Adding header X-Target-FHIR-Server:", headers['X-Target-FHIR-Server']); + if (authType === 'bearer') { + headers['Authorization'] = `Bearer ${bearerToken}`; + console.log("Adding header Authorization: Bearer "); + } else if (authType === 'basic') { + const credentials = btoa(`${username}:${password}`); + headers['Authorization'] = `Basic ${credentials}`; + console.log("Adding header Authorization: Basic "); + } } - // Add CSRF token ONLY if sending to local proxy (for modifying methods) const csrfTokenInput = form.querySelector('input[name="csrf_token"]'); const csrfToken = csrfTokenInput ? csrfTokenInput.value : null; - // Include DELETE method for CSRF check if (useLocalHapi && ['POST', 'PUT', 'DELETE', 'PATCH'].includes(method) && csrfToken) { - headers['X-CSRFToken'] = csrfToken; - console.log("CSRF Token added for local request."); + headers['X-CSRFToken'] = csrfToken; + console.log("CSRF Token added for local request."); } else if (useLocalHapi && ['POST', 'PUT', 'DELETE', 'PATCH'].includes(method) && !csrfToken) { - console.warn("CSRF token input not found for local modifying request."); + console.warn("CSRF token input not found for local modifying request."); } console.log(`Executing Fetch: Method=${method}, URL=${finalFetchUrl}, LocalHAPI=${useLocalHapi}`); - console.log("Request Headers:", headers); + console.log("Request Headers:", { ...headers, Authorization: headers.Authorization ? '' : undefined }); if (body !== undefined) console.log("Request Body (first 300 chars):", (body || '').substring(0, 300) + ((body?.length ?? 0) > 300 ? "..." : "")); // --- Make the Fetch Request --- @@ -327,10 +404,9 @@ document.addEventListener('DOMContentLoaded', function() { const response = await fetch(finalFetchUrl, { method: method, headers: headers, - body: body // Will be undefined for GET/DELETE + body: body }); - // --- Process Response --- responseCard.style.display = 'block'; responseStatus.textContent = `${response.status} ${response.statusText}`; responseStatus.className = `badge ${response.ok ? 'bg-success' : 'bg-danger'}`; @@ -343,40 +419,36 @@ document.addEventListener('DOMContentLoaded', function() { let responseBodyText = await response.text(); let displayBody = responseBodyText; - // Attempt to pretty-print JSON if (responseContentType.includes('json') && responseBodyText.trim()) { try { displayBody = JSON.stringify(JSON.parse(responseBodyText), null, 2); } - catch (e) { console.warn("Failed to pretty-print JSON response:", e); /* Show raw text */ } - } - // Attempt to pretty-print XML (basic indentation) - else if (responseContentType.includes('xml') && responseBodyText.trim()) { - try { - let formattedXml = ''; - let indent = 0; - const xmlLines = responseBodyText.replace(/>\s*<').split(/(<[^>]+>)/); - for (let i = 0; i < xmlLines.length; i++) { - const node = xmlLines[i]; - if (!node || node.trim().length === 0) continue; - if (node.match(/^<\/\w/)) indent--; // Closing tag - formattedXml += ' '.repeat(Math.max(0, indent)) + node.trim() + '\n'; - if (node.match(/^<\w/) && !node.match(/\/>$/) && !node.match(/^<\?/)) indent++; // Opening tag - } - displayBody = formattedXml.trim(); - } catch(e) { console.warn("Basic XML formatting failed:", e); /* Show raw text */ } + catch (e) { console.warn("Failed to pretty-print JSON response:", e); } + } else if (responseContentType.includes('xml') && responseBodyText.trim()) { + try { + let formattedXml = ''; + let indent = 0; + const xmlLines = responseBodyText.replace(/>\s*<').split(/(<[^>]+>)/); + for (let i = 0; i < xmlLines.length; i++) { + const node = xmlLines[i]; + if (!node || node.trim().length === 0) continue; + if (node.match(/^<\/\w/)) indent--; + formattedXml += ' '.repeat(Math.max(0, indent)) + node.trim() + '\n'; + if (node.match(/^<\w/) && !node.match(/\/>$/) && !node.match(/^<\?/)) indent++; + } + displayBody = formattedXml.trim(); + } catch (e) { console.warn("Basic XML formatting failed:", e); } } responseBody.textContent = displayBody; - // Highlight response body if Prism.js is available - if (typeof Prism !== 'undefined') { - responseBody.className = 'border p-2 bg-light'; // Reset base classes - if (responseContentType.includes('json')) { - responseBody.classList.add('language-json'); - } else if (responseContentType.includes('xml')) { - responseBody.classList.add('language-xml'); - } // Add other languages if needed - Prism.highlightElement(responseBody); - } + if (typeof Prism !== 'undefined') { + responseBody.className = 'border p-2 bg-light'; + if (responseContentType.includes('json')) { + responseBody.classList.add('language-json'); + } else if (responseContentType.includes('xml')) { + responseBody.classList.add('language-xml'); + } + Prism.highlightElement(responseBody); + } } catch (error) { console.error('Fetch error:', error); @@ -384,19 +456,18 @@ document.addEventListener('DOMContentLoaded', function() { responseStatus.textContent = `Network Error`; responseStatus.className = 'badge bg-danger'; responseHeaders.textContent = 'N/A'; - // Provide a more informative error message let errorDetail = `Error: ${error.message}\n\n`; if (useLocalHapi) { - errorDetail += `Could not connect to the FHIRFLARE proxy at ${finalFetchUrl}. Ensure the toolkit server and the local HAPI FHIR server (at http://localhost:8080/fhir) are running.`; + errorDetail += `Could not connect to the FHIRFLARE proxy at ${finalFetchUrl}. Ensure the toolkit server and the local HAPI FHIR server (at http://localhost:8080/fhir) are running.`; } else { - errorDetail += `Could not connect to the FHIRFLARE proxy at ${finalFetchUrl} or the proxy could not connect to the target server (${customUrl}). Ensure the toolkit server is running and the target server is accessible.`; + errorDetail += `Could not connect to the FHIRFLARE proxy at ${finalFetchUrl} or the proxy could not connect to the target server (${customUrl}). Ensure the toolkit server is running and the target server is accessible.`; } responseBody.textContent = errorDetail; } finally { - sendButton.disabled = false; sendButton.textContent = 'Send Request'; + sendButton.disabled = false; + sendButton.textContent = 'Send Request'; } - }); // End sendButton listener - -}); // End DOMContentLoaded Listener + }); +}); {% endblock %} \ No newline at end of file diff --git a/templates/fhir_ui_operations.html b/templates/fhir_ui_operations.html index ecc1d69..c8d1425 100644 --- a/templates/fhir_ui_operations.html +++ b/templates/fhir_ui_operations.html @@ -134,11 +134,37 @@
+ Use Local HAPI +
Toggle to use local HAPI (/fhir proxy) or enter a custom FHIR server URL.
+ @@ -338,9 +364,15 @@ document.addEventListener('DOMContentLoaded', () => { const swaggerUiContainer = document.getElementById('swagger-ui'); const selectedResourceSpan = document.getElementById('selectedResource'); const queryListContainer = document.getElementById('queryList'); - // --- Add checks if desired --- - if (!toggleServerButton || !toggleLabel || !fhirServerUrlInput /* || other critical elements */) { - console.error("Crucial elements missing, stopping script."); return; + const authSection = document.getElementById('authSection'); + const authTypeSelect = document.getElementById('authType'); + const authInputsGroup = document.getElementById('authInputsGroup'); + const bearerTokenInput = document.getElementById('bearerToken'); + const usernameInput = document.getElementById('username'); + const passwordInput = document.getElementById('password'); + if (!toggleServerButton || !toggleLabel || !fhirServerUrlInput || !authSection || !authTypeSelect) { + console.error("Crucial elements missing, stopping script."); + return; } // --- State Variables --- @@ -355,48 +387,57 @@ document.addEventListener('DOMContentLoaded', () => { console.log(`App Mode (Operations): ${appMode}`); // <<< END ADD >>> + // --- Helper Functions --- + function updateAuthInputsUI() { + console.log(`[updateAuthInputsUI] Running, authType: ${authTypeSelect.value}`); + if (!authTypeSelect || !authInputsGroup || !bearerTokenInput || !basicAuthInputs) { + console.error("[updateAuthInputsUI] Missing auth elements"); + return; + } + const authType = authTypeSelect.value; + authInputsGroup.style.display = (authType === 'bearer' || authType === 'basic') ? 'block' : 'none'; + bearerTokenInput.style.display = authType === 'bearer' ? 'block' : 'none'; + basicAuthInputs.style.display = authType === 'basic' ? 'block' : 'none'; + if (authType !== 'bearer' && bearerTokenInput) bearerTokenInput.value = ''; + if (authType !== 'basic' && usernameInput) usernameInput.value = ''; + if (authType !== 'basic' && passwordInput) passwordInput.value = ''; + console.log(`[updateAuthInputsUI] authInputsGroup display: ${authInputsGroup.style.display}, bearer: ${bearerTokenInput.style.display}, basic: ${basicAuthInputs.style.display}`); + } + // --- Helper Function to Update Toggle Button/Input UI (MODIFY THIS FUNCTION) --- function updateServerToggleUI() { - // Keep checks for elements - if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton) { - console.error("updateServerToggleUI: Required elements missing!"); - return; + if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton || !authSection) { + console.error("[updateServerToggleUI] Required elements missing!"); + return; } - console.log(`updateServerToggleUI: appMode=${appMode}, current isUsingLocalHapi=${isUsingLocalHapi}`); // Debug - - // <<< MODIFY THIS WHOLE IF/ELSE BLOCK >>> + console.log(`[updateServerToggleUI] appMode=${appMode}, isUsingLocalHapi=${isUsingLocalHapi}`); if (appMode === 'lite') { - console.log("-> Applying Lite mode UI settings."); - isUsingLocalHapi = false; // Force state - toggleServerButton.disabled = true; // Set disabled attribute - toggleServerButton.classList.add('disabled'); // Add Bootstrap disabled class - // --- ADD !important to pointerEvents --- + console.log("[updateServerToggleUI] Applying Lite mode UI settings"); + isUsingLocalHapi = false; + toggleServerButton.disabled = true; + toggleServerButton.classList.add('disabled'); toggleServerButton.style.pointerEvents = 'none !important'; - // --- END ADD --- - toggleServerButton.setAttribute('aria-disabled', 'true'); // Accessibility - toggleServerButton.title = "Local HAPI is not available in Lite mode"; // Tooltip - toggleLabel.textContent = 'Use Custom URL'; // Set label text - fhirServerUrlInput.style.display = 'block'; // Show custom URL input + toggleServerButton.setAttribute('aria-disabled', 'true'); + toggleServerButton.title = "Local HAPI is not available in Lite mode"; + toggleLabel.textContent = 'Use Custom URL'; + fhirServerUrlInput.style.display = 'block'; fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)"; + authSection.style.display = 'block'; } else { - // Standalone mode - console.log("-> Applying Standalone mode UI settings."); - toggleServerButton.disabled = false; // Ensure enabled - toggleServerButton.classList.remove('disabled'); // Remove Bootstrap disabled class - // --- Ensure pointerEvents is auto in standalone --- + console.log("[updateServerToggleUI] Applying Standalone mode UI settings"); + toggleServerButton.disabled = false; + toggleServerButton.classList.remove('disabled'); toggleServerButton.style.pointerEvents = 'auto'; - // --- END --- - toggleServerButton.removeAttribute('aria-disabled'); // Accessibility - toggleServerButton.title = ""; // Clear tooltip - - // Set text/display based on current standalone state + toggleServerButton.removeAttribute('aria-disabled'); + toggleServerButton.title = ""; toggleLabel.textContent = isUsingLocalHapi ? 'Use Local HAPI' : 'Use Custom URL'; fhirServerUrlInput.style.display = isUsingLocalHapi ? 'none' : 'block'; fhirServerUrlInput.placeholder = "Enter FHIR Base URL e.g., https://hapi.fhir.org/baseR4"; + authSection.style.display = isUsingLocalHapi ? 'none' : 'block'; } - fhirServerUrlInput.classList.remove('is-invalid'); // Clear validation state - console.log(`-> updateServerToggleUI finished. Button disabled: ${toggleServerButton.disabled}, pointer-events: ${toggleServerButton.style.pointerEvents}`); // Log pointer-events - // <<< END MODIFICATION >>> + fhirServerUrlInput.classList.remove('is-invalid'); + updateAuthInputsUI(); + console.log(`[updateServerToggleUI] Finished. Button disabled: ${toggleServerButton.disabled}, authSection display: ${authSection.style.display}`); } // <<< REFINED fetchOperationDefinition >>> @@ -482,47 +523,47 @@ document.addEventListener('DOMContentLoaded', () => { } // <<< END REFINED fetchOperationDefinition (v3) >>> - function updateServerToggleUI() { - // Keep checks for elements - if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton) { - console.error("updateServerToggleUI: Required elements missing!"); - return; - } - console.log(`updateServerToggleUI: appMode=\{appMode\}, current isUsingLocalHapi\={isUsingLocalHapi}`); // Debug + // function updateServerToggleUI() { + // // Keep checks for elements + // if (!toggleLabel || !fhirServerUrlInput || !toggleServerButton) { + // console.error("updateServerToggleUI: Required elements missing!"); + // return; + // } + // console.log(`updateServerToggleUI: appMode=\{appMode\}, current isUsingLocalHapi\={isUsingLocalHapi}`); // Debug - if (appMode === 'lite') { - console.log("-> Applying Lite mode UI settings."); - isUsingLocalHapi = false; // Force state - toggleServerButton.disabled = true; // Set disabled attribute - toggleServerButton.classList.add('disabled'); // Add Bootstrap disabled class - // --- ADD !important to pointerEvents --- - toggleServerButton.style.pointerEvents = 'none !important'; - // --- END ADD --- - toggleServerButton.setAttribute('aria-disabled', 'true'); // Accessibility - toggleServerButton.title = "Local HAPI is not available in Lite mode"; // Tooltip - toggleLabel.textContent = 'Use Custom URL'; // Set label text - fhirServerUrlInput.style.display = 'block'; // Show custom URL input - fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)"; - } else { - // Standalone mode - console.log("-> Applying Standalone mode UI settings."); - toggleServerButton.disabled = false; // Ensure enabled - toggleServerButton.classList.remove('disabled'); // Remove Bootstrap disabled class - // --- Ensure pointerEvents is auto in standalone --- - toggleServerButton.style.pointerEvents = 'auto'; - // --- END --- - toggleServerButton.removeAttribute('aria-disabled'); // Accessibility - toggleServerButton.title = ""; // Clear tooltip + // if (appMode === 'lite') { + // console.log("-> Applying Lite mode UI settings."); + // isUsingLocalHapi = false; // Force state + // toggleServerButton.disabled = true; // Set disabled attribute + // toggleServerButton.classList.add('disabled'); // Add Bootstrap disabled class + // // --- ADD !important to pointerEvents --- + // toggleServerButton.style.pointerEvents = 'none !important'; + // // --- END ADD --- + // toggleServerButton.setAttribute('aria-disabled', 'true'); // Accessibility + // toggleServerButton.title = "Local HAPI is not available in Lite mode"; // Tooltip + // toggleLabel.textContent = 'Use Custom URL'; // Set label text + // fhirServerUrlInput.style.display = 'block'; // Show custom URL input + // fhirServerUrlInput.placeholder = "Enter FHIR Base URL (Local HAPI unavailable)"; + // } else { + // // Standalone mode + // console.log("-> Applying Standalone mode UI settings."); + // toggleServerButton.disabled = false; // Ensure enabled + // toggleServerButton.classList.remove('disabled'); // Remove Bootstrap disabled class + // // --- Ensure pointerEvents is auto in standalone --- + // toggleServerButton.style.pointerEvents = 'auto'; + // // --- END --- + // toggleServerButton.removeAttribute('aria-disabled'); // Accessibility + // toggleServerButton.title = ""; // Clear tooltip - // Set text/display based on current standalone state - toggleLabel.textContent = isUsingLocalHapi ? 'Use Local HAPI' : 'Use Custom URL'; - fhirServerUrlInput.style.display = isUsingLocalHapi ? 'none' : 'block'; - fhirServerUrlInput.placeholder = "Enter FHIR Base URL e.g., https://hapi.fhir.org/baseR4"; - } - // Clear potential validation errors regardless of mode - if(fhirServerUrlInput) fhirServerUrlInput.classList.remove('is-invalid'); // Add check for element existence - console.log(`-> updateServerToggleUI finished. Button disabled: ${toggleServerButton.disabled}, pointer-events: ${toggleServerButton.style.pointerEvents}`); // Log pointer-events - } + // // Set text/display based on current standalone state + // toggleLabel.textContent = isUsingLocalHapi ? 'Use Local HAPI' : 'Use Custom URL'; + // fhirServerUrlInput.style.display = isUsingLocalHapi ? 'none' : 'block'; + // fhirServerUrlInput.placeholder = "Enter FHIR Base URL e.g., https://hapi.fhir.org/baseR4"; + // } + // // Clear potential validation errors regardless of mode + // if(fhirServerUrlInput) fhirServerUrlInput.classList.remove('is-invalid'); // Add check for element existence + // console.log(`-> updateServerToggleUI finished. Button disabled: ${toggleServerButton.disabled}, pointer-events: ${toggleServerButton.style.pointerEvents}`); // Log pointer-events + // } // --- Server Toggle Functionality (REVISED - simplified) --- function toggleServerSelection() { @@ -1269,156 +1310,281 @@ document.addEventListener('DOMContentLoaded', () => { // Execute Button (Includes Enhanced Error Display) // <<< Execute Button Listener with Logging and Array Conversion Fix >>> if (executeButton && executeWrapper && respStatusDiv && reqUrlOutput && curlOutput && respFormatSelect && copyRespButton && downloadRespButton && respOutputCode && respNarrativeDiv && respOutputPre) { - executeButton.addEventListener('click', async () => { + executeButton.addEventListener('click', async () => { console.log("[LOG 1] Execute button clicked. Starting listener..."); - // --- Reset UI and Disable Button --- - executeButton.disabled = true; executeButton.textContent = 'Executing...'; + executeButton.disabled = true; + executeButton.textContent = 'Executing...'; executeWrapper.style.display = 'block'; - // ... (reset UI elements) ... - if(reqUrlOutput) reqUrlOutput.textContent = 'Building request...'; if(curlOutput) curlOutput.textContent = 'Building request...'; if(respOutputCode) respOutputCode.textContent = ''; if(respNarrativeDiv) respNarrativeDiv.innerHTML = ''; respNarrativeDiv.style.display = 'none'; if(respOutputPre) respOutputPre.style.display = 'block'; if(respStatusDiv) respStatusDiv.textContent = 'Executing request...'; respStatusDiv.style.color = '#6c757d'; if(respFormatSelect) respFormatSelect.style.display = 'none'; respFormatSelect.value = 'json'; if(copyRespButton) copyRespButton.style.display = 'none'; if(downloadRespButton) downloadRespButton.style.display = 'none'; + if (reqUrlOutput) reqUrlOutput.textContent = 'Building request...'; + if (curlOutput) curlOutput.textContent = 'Building request...'; + if (respOutputCode) respOutputCode.textContent = ''; + if (respNarrativeDiv) respNarrativeDiv.innerHTML = ''; + respNarrativeDiv.style.display = 'none'; + if (respOutputPre) respOutputPre.style.display = 'block'; + if (respStatusDiv) respStatusDiv.textContent = 'Executing request...'; + respStatusDiv.style.color = '#6c757d'; + if (respFormatSelect) respFormatSelect.style.display = 'none'; + respFormatSelect.value = 'json'; + if (copyRespButton) copyRespButton.style.display = 'none'; + if (downloadRespButton) downloadRespButton.style.display = 'none'; - // --- Get Query Definition and Base URL --- - const queryDef = JSON.parse(block.dataset.queryData); const method = queryDef.method; const headers = { 'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8' }; let body; let path = queryDef.path; const baseUrl = isUsingLocalHapi ? '/fhir' : (fhirServerUrlInput.value.trim().replace(/\/+$/, '') || '/fhir'); let url = `${baseUrl}`; let validParams = true; const missingParams = []; const bodyParamsList = []; + const queryDef = JSON.parse(block.dataset.queryData); + const method = queryDef.method; + const headers = { 'Accept': 'application/fhir+json, application/fhir+xml;q=0.9, */*;q=0.8' }; + let body; + let path = queryDef.path; + const baseUrl = isUsingLocalHapi ? '/fhir' : (fhirServerUrlInput.value.trim().replace(/\/+$/, '') || '/fhir'); + let url = `${baseUrl}`; + let validParams = true; + const missingParams = []; + const bodyParamsList = []; - console.log("[LOG 2] Starting parameter processing..."); + if (!isUsingLocalHapi) { + const authType = authTypeSelect.value; + const bearerToken = bearerTokenInput.value.trim(); + const username = usernameInput.value.trim(); + const password = passwordInput.value; + if (authType === 'bearer') { + headers['Authorization'] = `Bearer ${bearerToken}`; + console.log("Adding header Authorization: Bearer "); + } else if (authType === 'basic') { + headers['Authorization'] = `Basic ${btoa(`${username}:${password}`)}`; + console.log("Adding header Authorization: Basic "); + } + } - // --- Process Path Parameters --- - // <<< FIX: Convert NodeList to Array before forEach >>> Array.from(block.querySelectorAll('.parameters-section tr[data-param-in="path"]')).forEach(row => { - const paramName = row.dataset.paramName; const input = row.querySelector('input'); const paramDef = queryDef.parameters.find(p => p.name === paramName && p.in === 'path'); const required = paramDef?.required; const value = input?.value.trim(); if (input) input.classList.remove('is-invalid'); - if (!value && required) { validParams = false; missingParams.push(`${paramName} (path)`); if (input) input.classList.add('is-invalid'); } - else if (value) { path = path.replace(`:${paramName}`, encodeURIComponent(value)); } - else { path = path.replace(`/:${paramName}`, ''); } + const paramName = row.dataset.paramName; + const input = row.querySelector('input'); + const paramDef = queryDef.parameters.find(p => p.name === paramName && p.in === 'path'); + const required = paramDef?.required; + const value = input?.value.trim(); + if (input) input.classList.remove('is-invalid'); + if (!value && required) { + validParams = false; + missingParams.push(`${paramName} (path)`); + if (input) input.classList.add('is-invalid'); + } else if (value) { + path = path.replace(`:${paramName}`, encodeURIComponent(value)); + } else { + path = path.replace(`/:${paramName}`, ''); + } }); - if (path.includes(':')) { const remainingPlaceholders = path.match(/:(\w+)/g) || []; const requiredRemaining = queryDef.parameters.filter(p => p.in === 'path' && remainingPlaceholders.includes(`:${p.name}`) && p.required); if (requiredRemaining.length > 0) { validParams = false; missingParams.push(...requiredRemaining.map(p => `${p.name} (path)`)); requiredRemaining.forEach(p => { const el = block.querySelector(`.parameters-section tr[data-param-name="${p.name}"][data-param-in="path"] input`); if (el) el.classList.add('is-invalid'); }); } } + if (path.includes(':')) { + const remainingPlaceholders = path.match(/:(\w+)/g) || []; + const requiredRemaining = queryDef.parameters.filter(p => p.in === 'path' && remainingPlaceholders.includes(`:${p.name}`) && p.required); + if (requiredRemaining.length > 0) { + validParams = false; + missingParams.push(...requiredRemaining.map(p => `${p.name} (path)`)); + requiredRemaining.forEach(p => { + const el = block.querySelector(`.parameters-section tr[data-param-name="${p.name}"][data-param-in="path"] input`); + if (el) el.classList.add('is-invalid'); + }); + } + } url += path.startsWith('/') ? path : `/${path}`; - // --- Process Query and Body Parameters --- const searchParams = new URLSearchParams(); - // <<< FIX: Convert NodeList to Array before forEach >>> Array.from(block.querySelectorAll('.parameters-section tr[data-param-in="query"], .parameters-section tr[data-param-in="body (Parameters)"]')).forEach(row => { - const paramName = row.dataset.paramName; const paramIn = row.dataset.paramIn; const inputElement = row.querySelector('input, select'); const paramDef = queryDef.parameters.find(p => p.name === paramName && p.in === paramIn); const required = paramDef?.required; let value = ''; if (inputElement) inputElement.classList.remove('is-invalid'); - if (inputElement?.type === 'checkbox') { value = inputElement.checked ? (inputElement.value || 'true') : ''; } else if (inputElement) { value = inputElement.value.trim(); } - if (value) { if (paramIn === 'query') { searchParams.set(paramName, value); } else { let paramPart = { name: paramName }; const paramType = paramDef?.type || 'string'; try { switch (paramType) { case 'boolean': paramPart.valueBoolean = (value === 'true'); break; case 'integer': case 'positiveInt': case 'unsignedInt': paramPart.valueInteger = parseInt(value, 10); break; case 'decimal': paramPart.valueDecimal = parseFloat(value); break; case 'date': paramPart.valueDate = value; break; case 'dateTime': paramPart.valueDateTime = value; break; case 'instant': try { paramPart.valueInstant = new Date(value).toISOString(); } catch (dateError) { console.warn(`Instant parse failed: ${dateError}`); paramPart.valueString = value;} break; default: paramPart[`value${paramType.charAt(0).toUpperCase() + paramType.slice(1)}`] = value; } if (Object.keys(paramPart).length > 1) { bodyParamsList.push(paramPart); } } catch (typeError) { console.error(`Error processing body param ${paramName}: ${typeError}`); bodyParamsList.push({ name: paramName, valueString: value }); } } } - else if (required) { validParams = false; missingParams.push(`${paramName} (${paramIn})`); if (inputElement) inputElement.classList.add('is-invalid'); } + const paramName = row.dataset.paramName; + const paramIn = row.dataset.paramIn; + const inputElement = row.querySelector('input, select'); + const paramDef = queryDef.parameters.find(p => p.name === paramName && p.in === paramIn); + const required = paramDef?.required; + let value = ''; + if (inputElement) inputElement.classList.remove('is-invalid'); + if (inputElement?.type === 'checkbox') { + value = inputElement.checked ? (inputElement.value || 'true') : ''; + } else if (inputElement) { + value = inputElement.value.trim(); + } + if (value) { + if (paramIn === 'query') { + searchParams.set(paramName, value); + } else { + let paramPart = { name: paramName }; + const paramType = paramDef?.type || 'string'; + try { + switch (paramType) { + case 'boolean': paramPart.valueBoolean = (value === 'true'); break; + case 'integer': case 'positiveInt': case 'unsignedInt': paramPart.valueInteger = parseInt(value, 10); break; + case 'decimal': paramPart.valueDecimal = parseFloat(value); break; + case 'date': paramPart.valueDate = value; break; + case 'dateTime': paramPart.valueDateTime = value; break; + case 'instant': try { paramPart.valueInstant = new Date(value).toISOString(); } catch (dateError) { console.warn(`Instant parse failed: ${dateError}`); paramPart.valueString = value; } break; + default: paramPart[`value${paramType.charAt(0).toUpperCase() + paramType.slice(1)}`] = value; + } + if (Object.keys(paramPart).length > 1) { bodyParamsList.push(paramPart); } + } catch (typeError) { + console.error(`Error processing body param ${paramName}: ${typeError}`); + bodyParamsList.push({ name: paramName, valueString: value }); + } + } + } else if (required) { + validParams = false; + missingParams.push(`${paramName} (${paramIn})`); + if (inputElement) inputElement.classList.add('is-invalid'); + } }); - console.log("[LOG 3] Parameter processing finished. Valid:", validParams); - - // --- Validation Check --- if (!validParams) { const errorMsg = `Error: Missing required parameter(s): ${[...new Set(missingParams)].join(', ')}`; console.error("[LOG 3a] Validation failed:", errorMsg); - if(respStatusDiv) { respStatusDiv.textContent = errorMsg; respStatusDiv.style.color = 'red'; } - if(reqUrlOutput) reqUrlOutput.textContent = 'Error: Invalid parameters'; - if(curlOutput) curlOutput.textContent = 'Error: Invalid parameters'; - executeButton.disabled = false; executeButton.textContent = 'Execute'; // Re-enable button - return; // Stop execution + if (respStatusDiv) { respStatusDiv.textContent = errorMsg; respStatusDiv.style.color = 'red'; } + if (reqUrlOutput) reqUrlOutput.textContent = 'Error: Invalid parameters'; + if (curlOutput) curlOutput.textContent = 'Error: Invalid parameters'; + executeButton.disabled = false; + executeButton.textContent = 'Execute'; + return; } - // --- Finalize URL --- - const queryString = searchParams.toString(); if (queryString) url += (url.includes('?') ? '&' : '?') + queryString; + const queryString = searchParams.toString(); + if (queryString) url += (url.includes('?') ? '&' : '?') + queryString; - // --- Construct Request Body --- - console.log("[LOG 4] Constructing request body..."); if (queryDef.requestBody) { - const contentType = reqContentTypeSelect ? reqContentTypeSelect.value : 'application/fhir+json'; - headers['Content-Type'] = contentType; - // <<< Refined Logic >>> - if (bodyParamsList.length > 0) { // If parameters were collected for the body - console.log("[LOG 4a] Using bodyParamsList to construct body."); - if (contentType.includes('json')) { body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }, null, 2); } - else if (contentType.includes('xml')) { try { body = jsonToFhirXml({ resourceType: "Parameters", parameter: bodyParamsList }); } catch (xmlErr) { console.error("Params->XML failed:", xmlErr); body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }); headers['Content-Type'] = 'application/fhir+json'; alert("Failed to create XML body. Sending JSON."); } } - else { console.warn(`Unsupported Content-Type ${contentType} for Parameters body. Sending JSON.`); body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }, null, 2); headers['Content-Type'] = 'application/fhir+json'; } - } else if (reqBodyTextarea && reqBodyTextarea.value.trim() !== '') { // Otherwise, if textarea exists AND has non-whitespace content - console.log("[LOG 4b] Using reqBodyTextarea value for body."); - body = reqBodyTextarea.value; // Use the textarea content directly - // Convert textarea content if needed (e.g., user pasted JSON but selected XML) - if (contentType === 'application/fhir+xml' && body.trim().startsWith('{')) { try { body = jsonToFhirXml(body); } catch (e) { console.warn("Textarea JSON->XML failed", e); } } - else if (contentType === 'application/fhir+json' && body.trim().startsWith('<')) { try { body = xmlToFhirJson(body); } catch (e) { console.warn("Textarea XML->JSON failed", e); } } - } else { // No body params and textarea is missing or empty - console.log("[LOG 4c] No body parameters and textarea is empty/missing. Setting body to empty string."); - body = ''; // Ensure body is an empty string if nothing else applies - } - // Add CSRF token if needed - console.log("[LOG 4d] Current isUsingLocalHapi state:", isUsingLocalHapi); // Log HAPI state + const contentType = reqContentTypeSelect ? reqContentTypeSelect.value : 'application/fhir+json'; + headers['Content-Type'] = contentType; + if (bodyParamsList.length > 0) { + if (contentType.includes('json')) { + body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }, null, 2); + } else if (contentType.includes('xml')) { + try { body = jsonToFhirXml({ resourceType: "Parameters", parameter: bodyParamsList }); } + catch (xmlErr) { + console.error("Params->XML failed:", xmlErr); + body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }); + headers['Content-Type'] = 'application/fhir+json'; + alert("Failed to create XML body. Sending JSON."); + } + } else { + console.warn(`Unsupported Content-Type ${contentType} for Parameters body. Sending JSON.`); + body = JSON.stringify({ resourceType: "Parameters", parameter: bodyParamsList }, null, 2); + headers['Content-Type'] = 'application/fhir+json'; + } + } else if (reqBodyTextarea && reqBodyTextarea.value.trim() !== '') { + body = reqBodyTextarea.value; + if (contentType === 'application/fhir+xml' && body.trim().startsWith('{')) { + try { body = jsonToFhirXml(body); } catch (e) { console.warn("Textarea JSON->XML failed", e); } + } else if (contentType === 'application/fhir+json' && body.trim().startsWith('<')) { + try { body = xmlToFhirJson(body); } catch (e) { console.warn("Textarea XML->JSON failed", e); } + } + } else { + body = ''; + } if (isUsingLocalHapi && ['POST', 'PUT', 'PATCH', 'DELETE'].includes(method)) { - console.log("[LOG 4e] Attempting to add CSRF token for local HAPI modifying request."); - // Find the CSRF token input within the form const csrfTokenInput = fhirOperationsForm.querySelector('input[name="csrf_token"]'); - if (csrfTokenInput && csrfTokenInput.value) { - // Add the token value to the request headers object + if (csrfTokenInput && csrfTokenInput.value) { headers['X-CSRFToken'] = csrfTokenInput.value; - console.log("[LOG 4f] Added X-CSRFToken header:", headers['X-CSRFToken'] ? 'Yes' : 'No'); // Verify it was added - } else { - // Log an error if the token input wasn't found or was empty + console.log("[LOG 4f] Added X-CSRFToken header:", headers['X-CSRFToken'] ? 'Yes' : 'No'); + } else { console.error("[LOG 4g] CSRF token input not found or has no value!"); - } + } + } } else { - // Log if CSRF is not being added and why - console.log("[LOG 4h] Not adding CSRF token (not local HAPI or not modifying method). isUsingLocalHapi:", isUsingLocalHapi, "Method:", method); - } - } else { - // Ensure body is undefined if queryDef.requestBody is false body = undefined; } - console.log("[LOG 5] Request body constructed. Body length:", body?.length ?? 'undefined'); // Check body length - // --- Log Request --- - if(reqUrlOutput) reqUrlOutput.textContent = url; - // <<< Log body right before generating cURL >>> - console.log("[LOG 5a] Body variable before generateCurlCommand:", body ? body.substring(0,100)+'...' : body); - if(curlOutput) curlOutput.textContent = generateCurlCommand(method, url, headers, body); // Generate cURL - console.log(`Executing: ${method} ${url}`); console.log("Headers:", headers); if (body !== undefined) console.log("Body:", (body || '').substring(0, 300) + ((body?.length ?? 0) > 300 ? "..." : "")); + if (reqUrlOutput) reqUrlOutput.textContent = url; + if (curlOutput) curlOutput.textContent = generateCurlCommand(method, url, headers, body); + console.log(`Executing: ${method} ${url}`); + console.log("Headers:", { ...headers, Authorization: headers.Authorization ? '' : undefined }); + if (body !== undefined) console.log("Body:", (body || '').substring(0, 300) + ((body?.length ?? 0) > 300 ? "..." : "")); - // --- Perform Fetch & Process Response --- - - // --- Perform Fetch & Process Response --- let respData = { json: null, xml: null, narrative: null, text: null, status: 0, statusText: '', contentType: '' }; try { - console.log("[LOG 6] Initiating fetch..."); const resp = await fetch(url, { method, headers, body: (body || undefined) }); - console.log("[LOG 7] Fetch completed. Status:", resp.status); - - respData.status = resp.status; respData.statusText = resp.statusText; respData.contentType = resp.headers.get('Content-Type') || ''; - console.log("[LOG 8] Reading response text..."); + respData.status = resp.status; + respData.statusText = resp.statusText; + respData.contentType = resp.headers.get('Content-Type') || ''; respData.text = await resp.text(); - console.log("[LOG 9] Response text read. Length:", respData.text?.length); - if(respStatusDiv) { respStatusDiv.textContent = `Status: ${resp.status} ${resp.statusText}`; respStatusDiv.style.color = resp.ok ? 'green' : 'red'; } + if (respStatusDiv) { respStatusDiv.textContent = `Status: ${resp.status} ${resp.statusText}`; respStatusDiv.style.color = resp.ok ? 'green' : 'red'; } - // Process body (includes OperationOutcome check) - console.log("[LOG 10] Processing response body..."); - let isOperationOutcome = false; let operationOutcomeIssuesHtml = ''; - - if (respData.text) { if (respData.contentType.includes('json')) { try { respData.json = JSON.parse(respData.text); try { respData.xml = jsonToFhirXml(respData.json); } catch(xmlConvErr){ respData.xml = `XML conversion failed: ${xmlConvErr.message}`; } if (respData.json.text?.div) respData.narrative = respData.json.text.div; if (respData.json.resourceType === 'OperationOutcome') { isOperationOutcome = true; operationOutcomeIssuesHtml = formatOperationOutcome(respData.json); } } catch (e) { respData.json = { parsingError: e.message, rawText: respData.text }; respData.xml = `${escapeXml(respData.text)}`; } } else if (respData.contentType.includes('xml')) { respData.xml = respData.text; try { respData.json = JSON.parse(xmlToFhirJson(respData.xml)); const p = new DOMParser(), xd = p.parseFromString(respData.xml, "application/xml"), pe = xd.querySelector("parsererror"); if (pe) throw new Error(pe.textContent); if (xd.documentElement && xd.documentElement.tagName === 'OperationOutcome') { isOperationOutcome = true; operationOutcomeIssuesHtml = formatOperationOutcome(respData.json); } const nn = xd.querySelector("div[xmlns='http://www.w3.org/1999/xhtml']"); if (nn) respData.narrative = nn.outerHTML; } catch(e) { respData.json = { parsingError: e.message, rawText: respData.text }; } } else { respData.json = { contentType: respData.contentType, content: respData.text }; respData.xml = `${escapeXml(respData.text)}`; } } else if (resp.ok) { respData.json = { message: "Success (No Content)" }; respData.xml = jsonToFhirXml({}); } else { respData.json = { error: `Request failed: ${resp.status}`, detail: resp.statusText }; respData.xml = `Request failed: ${resp.status} ${escapeXml(resp.statusText)}`; } - console.log("[LOG 11] Response body processed. Is OperationOutcome:", isOperationOutcome); - - // --- Update UI --- - block.dataset.responseData = JSON.stringify(respData); - if(respFormatSelect) { respFormatSelect.style.display = 'inline-block'; respFormatSelect.disabled = false; } if(copyRespButton) copyRespButton.style.display = 'inline-block'; if(downloadRespButton) downloadRespButton.style.display = 'inline-block'; - // Enhanced Error Display Logic - if (!resp.ok && isOperationOutcome && respNarrativeDiv) { - console.log("[LOG 12a] Displaying formatted OperationOutcome."); - respNarrativeDiv.innerHTML = operationOutcomeIssuesHtml; respNarrativeDiv.style.display = 'block'; respOutputPre.style.display = 'none'; - if (respFormatSelect) { const narrativeOpt = respFormatSelect.querySelector('option[value="narrative"]'); if (!narrativeOpt) { const opt = document.createElement('option'); opt.value = 'narrative'; opt.textContent = 'Formatted Issues'; respFormatSelect.insertBefore(opt, respFormatSelect.firstChild); } else { narrativeOpt.textContent = 'Formatted Issues'; narrativeOpt.disabled = false; } respFormatSelect.value = 'narrative'; respFormatSelect.dispatchEvent(new Event('change')); } + let isOperationOutcome = false; + let operationOutcomeIssuesHtml = ''; + if (respData.text) { + if (respData.contentType.includes('json')) { + try { + respData.json = JSON.parse(respData.text); + try { respData.xml = jsonToFhirXml(respData.json); } catch (xmlConvErr) { respData.xml = `XML conversion failed: ${xmlConvErr.message}`; } + if (respData.json.text?.div) respData.narrative = respData.json.text.div; + if (respData.json.resourceType === 'OperationOutcome') { + isOperationOutcome = true; + operationOutcomeIssuesHtml = formatOperationOutcome(respData.json); + } + } catch (e) { + respData.json = { parsingError: e.message, rawText: respData.text }; + respData.xml = `${escapeXml(respData.text)}`; + } + } else if (respData.contentType.includes('xml')) { + respData.xml = respData.text; + try { + respData.json = JSON.parse(xmlToFhirJson(respData.xml)); + const p = new DOMParser(), xd = p.parseFromString(respData.xml, "application/xml"), pe = xd.querySelector("parsererror"); + if (pe) throw new Error(pe.textContent); + if (xd.documentElement && xd.documentElement.tagName === 'OperationOutcome') { + isOperationOutcome = true; + operationOutcomeIssuesHtml = formatOperationOutcome(respData.json); + } + const nn = xd.querySelector("div[xmlns='http://www.w3.org/1999/xhtml']"); + if (nn) respData.narrative = nn.outerHTML; + } catch (e) { + respData.json = { parsingError: e.message, rawText: respData.text }; + } + } else { + respData.json = { contentType: respData.contentType, content: respData.text }; + respData.xml = `${escapeXml(respData.text)}`; + } + } else if (resp.ok) { + respData.json = { message: "Success (No Content)" }; + respData.xml = jsonToFhirXml({}); } else { - console.log("[LOG 12b] Updating display format dropdown for success or non-OO error."); - const narrativeOption = respFormatSelect?.querySelector('option[value="narrative"]'); if (narrativeOption) { narrativeOption.textContent = 'Narrative'; narrativeOption.disabled = !respData.narrative; } - respFormatSelect.value = (respData.narrative && respFormatSelect.value === 'narrative') ? 'narrative' : 'json'; - respFormatSelect?.dispatchEvent(new Event('change')); // This will trigger highlighting via its listener + respData.json = { error: `Request failed: ${resp.status}`, detail: resp.statusText }; + respData.xml = `Request failed: ${resp.status} ${escapeXml(resp.statusText)}`; } - console.log("[LOG 13] UI update triggered."); - } catch (e) { // Catch fetch/network/processing errors + block.dataset.responseData = JSON.stringify(respData); + if (respFormatSelect) { respFormatSelect.style.display = 'inline-block'; respFormatSelect.disabled = false; } + if (copyRespButton) copyRespButton.style.display = 'inline-block'; + if (downloadRespButton) downloadRespButton.style.display = 'inline-block'; + if (!resp.ok && isOperationOutcome && respNarrativeDiv) { + respNarrativeDiv.innerHTML = operationOutcomeIssuesHtml; + respNarrativeDiv.style.display = 'block'; + respOutputPre.style.display = 'none'; + if (respFormatSelect) { + const narrativeOpt = respFormatSelect.querySelector('option[value="narrative"]'); + if (!narrativeOpt) { + const opt = document.createElement('option'); + opt.value = 'narrative'; + opt.textContent = 'Formatted Issues'; + respFormatSelect.insertBefore(opt, respFormatSelect.firstChild); + } else { + narrativeOpt.textContent = 'Formatted Issues'; + narrativeOpt.disabled = false; + } + respFormatSelect.value = 'narrative'; + respFormatSelect.dispatchEvent(new Event('change')); + } + } else { + const narrativeOption = respFormatSelect?.querySelector('option[value="narrative"]'); + if (narrativeOption) { narrativeOption.textContent = 'Narrative'; narrativeOption.disabled = !respData.narrative; } + respFormatSelect.value = (respData.narrative && respFormatSelect.value === 'narrative') ? 'narrative' : 'json'; + respFormatSelect?.dispatchEvent(new Event('change')); + } + } catch (e) { console.error('[LOG 14] Error during fetch or response processing:', e); - if(respStatusDiv) { respStatusDiv.textContent = `Error: ${e.message}`; respStatusDiv.style.color = 'red'; } if(respOutputCode) { respOutputCode.textContent = `Request failed: ${e.message}\nURL: ${url}`; respOutputCode.className = 'language-text'; } if(respFormatSelect) respFormatSelect.style.display = 'none'; if(copyRespButton) copyRespButton.style.display = 'none'; if(downloadRespButton) downloadRespButton.style.display = 'none'; if(respNarrativeDiv) respNarrativeDiv.style.display = 'none'; if(respOutputPre) respOutputPre.style.display = 'block'; + if (respStatusDiv) { respStatusDiv.textContent = `Error: ${e.message}`; respStatusDiv.style.color = 'red'; } + if (respOutputCode) { respOutputCode.textContent = `Request failed: ${e.message}\nURL: ${url}`; respOutputCode.className = 'language-text'; } + if (respFormatSelect) respFormatSelect.style.display = 'none'; + if (copyRespButton) copyRespButton.style.display = 'none'; + if (downloadRespButton) downloadRespButton.style.display = 'none'; + if (respNarrativeDiv) respNarrativeDiv.style.display = 'none'; + if (respOutputPre) respOutputPre.style.display = 'block'; } finally { - console.log("[LOG 15] Executing finally block."); // <<< LOG 15 (Finally Block) + console.log("[LOG 15] Executing finally block."); executeButton.disabled = false; executeButton.textContent = 'Execute'; } - }); // End executeButton listener - } // End if (executeButton && ...) + }); + } // End if (executeButton && ...) // Response Format Change Listener if (respFormatSelect && respNarrativeDiv && respOutputPre && respOutputCode) { @@ -1545,63 +1711,95 @@ document.addEventListener('DOMContentLoaded', () => { // --- Fetch Server Metadata (FIXED Local URL Handling) --- if (fetchMetadataButton) { fetchMetadataButton.addEventListener('click', async () => { - // Clear previous results immediately - if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = 'Fetching...'; - if (resourceTypesDisplayDiv) resourceTypesDisplayDiv.style.display = 'block'; - if (swaggerUiContainer) swaggerUiContainer.style.display = 'none'; // Hide old query list - fetchedMetadataCache = null; // Clear cache before fetch attempt - availableSystemOperations = []; + console.log("[fetchMetadata] Button clicked"); + if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = 'Fetching...'; + if (resourceTypesDisplayDiv) resourceTypesDisplayDiv.style.display = 'block'; + if (swaggerUiContainer) swaggerUiContainer.style.display = 'none'; + fetchedMetadataCache = null; + availableSystemOperations = []; - // Determine Base URL - FIXED const customUrl = fhirServerUrlInput.value.trim().replace(/\/+$/, ''); - const baseUrl = isUsingLocalHapi ? '/fhir' : customUrl; // Use '/fhir' proxy path if local + const baseUrl = isUsingLocalHapi ? '/fhir' : customUrl; - // Validate custom URL only if not using local HAPI - if (!isUsingLocalHapi && !baseUrl) { // Should only happen if customUrl is empty + if (!isUsingLocalHapi && !baseUrl) { + console.error("[fetchMetadata] Custom URL required"); fhirServerUrlInput.classList.add('is-invalid'); alert('Please enter a valid FHIR server URL.'); if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = `Error: Custom URL required.`; return; } - // Basic format check for custom URL - if (!isUsingLocalHapi) { - try { - new URL(baseUrl); // Check if it's a parseable URL format - } catch (_) { - fhirServerUrlInput.classList.add('is-invalid'); - alert('Invalid custom URL format. Please enter a valid URL (e.g., https://example.com/fhir).'); - if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = `Error: Invalid custom URL format.`; - return; - } - } + if (!isUsingLocalHapi) { + try { new URL(baseUrl); } catch (_) { + console.error("[fetchMetadata] Invalid custom URL format"); + fhirServerUrlInput.classList.add('is-invalid'); + alert('Invalid custom URL format. Please enter a valid URL (e.g., https://example.com/fhir).'); + if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = `Error: Invalid custom URL format.`; + return; + } + const authType = authTypeSelect.value; + const bearerToken = bearerTokenInput.value.trim(); + const username = usernameInput.value.trim(); + const password = passwordInput.value; + if (authType === 'bearer' && !bearerToken) { + console.error("[fetchMetadata] Bearer token required"); + alert('Please enter a Bearer Token.'); + bearerTokenInput.classList.add('is-invalid'); + return; + } + if (authType === 'basic' && (!username || !password)) { + console.error("[fetchMetadata] Username and password required"); + alert('Please enter both Username and Password for Basic Authentication.'); + if (!username) usernameInput.classList.add('is-invalid'); + if (!password) passwordInput.classList.add('is-invalid'); + return; + } + } fhirServerUrlInput.classList.remove('is-invalid'); - // Construct metadata URL (always add /metadata) const url = `${baseUrl}/metadata`; + const headers = { 'Accept': 'application/fhir+json' }; + if (!isUsingLocalHapi) { + const authType = authTypeSelect.value; + const bearerToken = bearerTokenInput.value.trim(); + const username = usernameInput.value.trim(); + const password = passwordInput.value; + if (authType === 'bearer') { + headers['Authorization'] = `Bearer ${bearerToken}`; + console.log("[fetchMetadata] Adding header Authorization: Bearer "); + } else if (authType === 'basic') { + headers['Authorization'] = `Basic ${btoa(`${username}:${password}`)}`; + console.log("[fetchMetadata] Adding header Authorization: Basic "); + } + } - console.log(`Fetching metadata from: ${url}`); - fetchMetadataButton.disabled = true; fetchMetadataButton.textContent = 'Fetching...'; + console.log(`[fetchMetadata] Fetching from: ${url}`); + fetchMetadataButton.disabled = true; + fetchMetadataButton.textContent = 'Fetching...'; try { - const resp = await fetch(url, { method: 'GET', headers: { 'Accept': 'application/fhir+json' } }); - if (!resp.ok) { const errText = await resp.text(); throw new Error(`HTTP ${resp.status} ${resp.statusText}: ${errText.substring(0, 500)}`); } + const resp = await fetch(url, { method: 'GET', headers }); + if (!resp.ok) { + const errText = await resp.text(); + throw new Error(`HTTP ${resp.status} ${resp.statusText}: ${errText.substring(0, 500)}`); + } const data = await resp.json(); - console.log('Metadata received:', data); - fetchedMetadataCache = data; // Cache successful fetch - displayMetadataAndResourceButtons(data); // Parse and display + console.log('[fetchMetadata] Metadata received:', data); + fetchedMetadataCache = data; + displayMetadataAndResourceButtons(data); } catch (e) { - console.error('Metadata fetch error:', e); + console.error('[fetchMetadata] Error:', e); if (resourceButtonsContainer) resourceButtonsContainer.innerHTML = `Error fetching metadata: ${e.message}`; - if (resourceTypesDisplayDiv) resourceTypesDisplayDiv.style.display = 'block'; // Keep container visible to show error + if (resourceTypesDisplayDiv) resourceTypesDisplayDiv.style.display = 'block'; if (swaggerUiContainer) swaggerUiContainer.style.display = 'none'; alert(`Error fetching metadata: ${e.message}`); - fetchedMetadataCache = null; // Clear cache on error + fetchedMetadataCache = null; availableSystemOperations = []; } finally { - fetchMetadataButton.disabled = false; fetchMetadataButton.textContent = 'Fetch Metadata'; + fetchMetadataButton.disabled = false; + fetchMetadataButton.textContent = 'Fetch Metadata'; } }); - } else { +} else { console.error("Fetch Metadata button (#fetchMetadata) not found!"); } diff --git a/templates/retrieve_split_data.html b/templates/retrieve_split_data.html index 21fffb8..c930322 100644 --- a/templates/retrieve_split_data.html +++ b/templates/retrieve_split_data.html @@ -3,7 +3,6 @@ {% block content %}
-

Retrieve & Split Data

@@ -36,31 +35,55 @@ - {# Ensure the input field uses the form object's data #} {{ form.fhir_server_url(class="form-control", id="fhirServerUrl", style="display: none;", placeholder="e.g., https://fhir.hl7.org.au/aucore/fhir/DEFAULT", **{'aria-describedby': 'fhirServerHelp'}) }}

Toggle to use local HAPI (/fhir proxy) or enter a custom FHIR server URL.
- {# --- Checkbox Row --- #} + {# Authentication Section (Shown for Custom URL) #} + + + {# Checkbox Row #}
- {# Render Fetch Referenced Resources checkbox using the macro #} {{ render_field(form.validate_references, id='validate_references_checkbox') }}
- - {# --- End Checkbox Row --- #} - {# Render SubmitField using the form object #}
- {# Render FileField using the macro #} {{ render_field(form.split_bundle_zip, class="form-control") }} - {# Render SubmitField using the form object #}