diff --git a/app.py b/app.py index 27a0169..58d84b0 100644 --- a/app.py +++ b/app.py @@ -20,7 +20,6 @@ logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) app = Flask(__name__) -# --- Configuration --- app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'your-fallback-secret-key-here') app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', 'sqlite:////app/instance/fhir_ig.db') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False @@ -32,7 +31,6 @@ app.config['DISPLAY_PROFILE_RELATIONSHIPS'] = True # Ensure directories exist and are writable instance_path = '/app/instance' packages_path = app.config['FHIR_PACKAGES_DIR'] - logger.debug(f"Instance path configuration: {instance_path}") logger.debug(f"Database URI: {app.config['SQLALCHEMY_DATABASE_URI']}") logger.debug(f"Packages path: {packages_path}") @@ -49,7 +47,6 @@ except Exception as e: db = SQLAlchemy(app) csrf = CSRFProtect(app) -# --- Database Model --- class ProcessedIg(db.Model): id = db.Column(db.Integer, primary_key=True) package_name = db.Column(db.String(128), nullable=False) @@ -61,10 +58,8 @@ class ProcessedIg(db.Model): complies_with_profiles = db.Column(db.JSON, nullable=True) imposed_profiles = db.Column(db.JSON, nullable=True) optional_usage_elements = db.Column(db.JSON, nullable=True) - __table_args__ = (db.UniqueConstraint('package_name', 'version', name='uq_package_version'),) -# --- API Key Middleware --- def check_api_key(): api_key = request.headers.get('X-API-Key') if not api_key and request.is_json: @@ -78,47 +73,6 @@ def check_api_key(): logger.debug("API key validated successfully") return None -# --- Routes --- -@app.route('/') -def index(): - return render_template('index.html', site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - -@app.route('/import-ig', methods=['GET', 'POST']) -def import_ig(): - form = IgImportForm() - if form.validate_on_submit(): - name = form.package_name.data - version = form.package_version.data - dependency_mode = form.dependency_mode.data - try: - result = services.import_package_and_dependencies(name, version, dependency_mode=dependency_mode) - if result['errors'] and not result['downloaded']: - error_msg = result['errors'][0] - simplified_msg = error_msg - if "HTTP error" in error_msg and "404" in error_msg: - simplified_msg = "Package not found on registry (404). Check name and version." - elif "HTTP error" in error_msg: - simplified_msg = f"Registry error: {error_msg.split(': ', 1)[-1]}" - elif "Connection error" in error_msg: - simplified_msg = "Could not connect to the FHIR package registry." - flash(f"Failed to import {name}#{version}: {simplified_msg}", "error") - logger.error(f"Import failed critically for {name}#{version}: {error_msg}") - else: - if result['errors']: - flash(f"Partially imported {name}#{version} with errors during dependency processing. Check logs.", "warning") - for err in result['errors']: logger.warning(f"Import warning for {name}#{version}: {err}") - else: - flash(f"Successfully downloaded {name}#{version} and dependencies! Mode: {dependency_mode}", "success") - return redirect(url_for('view_igs')) - except Exception as e: - logger.error(f"Unexpected error during IG import: {str(e)}", exc_info=True) - flash(f"An unexpected error occurred downloading the IG: {str(e)}", "error") - else: - for field, errors in form.errors.items(): - for error in errors: - flash(f"Error in {getattr(form, field).label.text}: {error}", "danger") - return render_template('import_ig.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) - def list_downloaded_packages(packages_dir): packages = [] errors = [] @@ -159,7 +113,6 @@ def list_downloaded_packages(packages_dir): else: logger.warning(f"Skipping package {filename} due to invalid name ('{name}') or version ('{version}')") errors.append(f"Invalid package {filename}: name='{name}', version='{version}'") - # Build duplicate_groups name_counts = {} for pkg in packages: name = pkg['name'] @@ -172,6 +125,47 @@ def list_downloaded_packages(packages_dir): logger.debug(f"Duplicate groups: {duplicate_groups}") return packages, errors, duplicate_groups +@app.route('/') +def index(): + return render_template('index.html', site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) + +@app.route('/import-ig', methods=['GET', 'POST']) +def import_ig(): + form = IgImportForm() + if form.validate_on_submit(): + name = form.package_name.data + version = form.package_version.data + dependency_mode = form.dependency_mode.data + try: + result = services.import_package_and_dependencies(name, version, dependency_mode=dependency_mode) + if result['errors'] and not result['downloaded']: + error_msg = result['errors'][0] + simplified_msg = error_msg + if "HTTP error" in error_msg and "404" in error_msg: + simplified_msg = "Package not found on registry (404). Check name and version." + elif "HTTP error" in error_msg: + simplified_msg = f"Registry error: {error_msg.split(': ', 1)[-1]}" + elif "Connection error" in error_msg: + simplified_msg = "Could not connect to the FHIR package registry." + flash(f"Failed to import {name}#{version}: {simplified_msg}", "error") + logger.error(f"Import failed critically for {name}#{version}: {error_msg}") + else: + if result['errors']: + flash(f"Partially imported {name}#{version} with errors during dependency processing. Check logs.", "warning") + for err in result['errors']: + logger.warning(f"Import warning for {name}#{version}: {err}") + else: + flash(f"Successfully downloaded {name}#{version} and dependencies! Mode: {dependency_mode}", "success") + return redirect(url_for('view_igs')) + except Exception as e: + logger.error(f"Unexpected error during IG import: {str(e)}", exc_info=True) + flash(f"An unexpected error occurred downloading the IG: {str(e)}", "error") + else: + for field, errors in form.errors.items(): + for error in errors: + flash(f"Error in {getattr(form, field).label.text}: {error}", "danger") + return render_template('import_ig.html', form=form, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now()) + @app.route('/view-igs') def view_igs(): form = FlaskForm() @@ -230,6 +224,8 @@ def process_ig(): try: logger.info(f"Starting processing for {name}#{version} from file {filename}") package_info = services.process_package_file(tgz_path) + if package_info.get('errors'): + flash(f"Processing completed with errors for {name}#{version}: {', '.join(package_info['errors'])}", "warning") optional_usage_dict = { info['name']: True for info in package_info.get('resource_types_info', []) @@ -302,7 +298,8 @@ def delete_ig(): errors.append(f"Could not delete metadata for {filename}: {e}") logger.error(f"Error deleting {metadata_path}: {e}") if errors: - for error in errors: flash(error, "error") + for error in errors: + flash(error, "error") elif deleted_files: flash(f"Deleted: {', '.join(deleted_files)}", "success") else: @@ -356,9 +353,11 @@ def view_ig(processed_ig_id): base_list = [t for t in processed_ig.resource_types_info if not t.get('is_profile')] examples_by_type = processed_ig.examples or {} optional_usage_elements = processed_ig.optional_usage_elements or {} - logger.debug(f"Optional usage elements for {processed_ig.package_name}#{processed_ig.version}: {optional_usage_elements}") complies_with_profiles = processed_ig.complies_with_profiles or [] imposed_profiles = processed_ig.imposed_profiles or [] + logger.debug(f"Viewing IG {processed_ig.package_name}#{processed_ig.version}: " + f"{len(profile_list)} profiles, {len(base_list)} base resources, " + f"{len(optional_usage_elements)} optional elements") return render_template('cp_view_processed_ig.html', title=f"View {processed_ig.package_name}#{processed_ig.version}", processed_ig=processed_ig, @@ -462,6 +461,7 @@ def get_example_content(): package_version = request.args.get('package_version') example_member_path = request.args.get('filename') if not all([package_name, package_version, example_member_path]): + logger.warning(f"get_example_content: Missing query parameters: name={package_name}, version={package_version}, path={example_member_path}") return jsonify({"error": "Missing required query parameters: package_name, package_version, filename"}), 400 if not example_member_path.startswith('package/') or '..' in example_member_path: logger.warning(f"Invalid example file path requested: {example_member_path}") @@ -649,7 +649,8 @@ def api_push_ig(): for dep in dependencies_to_include: dep_name = dep.get('name') dep_version = dep.get('version') - if not dep_name or not dep_version: continue + if not dep_name or not dep_version: + continue dep_tgz_filename = services.construct_tgz_filename(dep_name, dep_version) dep_tgz_path = os.path.join(packages_dir, dep_tgz_filename) if os.path.exists(dep_tgz_path): @@ -789,83 +790,61 @@ def api_push_ig(): def validate_sample(): form = ValidationForm() validation_report = None - packages_for_template = [] - packages_dir = app.config.get('FHIR_PACKAGES_DIR') - if packages_dir: - try: - all_packages, errors, duplicate_groups = list_downloaded_packages(packages_dir) - if errors: - flash(f"Warning: Errors encountered while listing packages: {', '.join(errors)}", "warning") - filtered_packages = [ - pkg for pkg in all_packages - if isinstance(pkg.get('name'), str) and pkg.get('name') and - isinstance(pkg.get('version'), str) and pkg.get('version') - ] - packages_for_template = sorted([ - {"id": f"{pkg['name']}#{pkg['version']}", "text": f"{pkg['name']}#{pkg['version']}"} - for pkg in filtered_packages - ], key=lambda x: x['text']) - logger.debug(f"Packages for template: {packages_for_template}") - except Exception as e: - logger.error(f"Failed to list or process downloaded packages: {e}", exc_info=True) - flash("Error loading available packages.", "danger") - else: - flash("FHIR Packages directory not configured.", "danger") - logger.error("FHIR_PACKAGES_DIR is not configured in the Flask app.") - + packages = [] + packages_dir = app.config['FHIR_PACKAGES_DIR'] + if os.path.exists(packages_dir): + for filename in os.listdir(packages_dir): + if filename.endswith('.tgz'): + try: + with tarfile.open(os.path.join(packages_dir, filename), 'r:gz') as tar: + package_json = tar.extractfile('package/package.json') + if package_json: + pkg_info = json.load(package_json) + name = pkg_info.get('name') + version = pkg_info.get('version') + if name and version: + packages.append({'name': name, 'version': version}) + except Exception as e: + logger.warning(f"Error reading package {filename}: {e}") + continue if form.validate_on_submit(): package_name = form.package_name.data version = form.version.data include_dependencies = form.include_dependencies.data mode = form.mode.data - sample_input_raw = form.sample_input.data - try: - sample_input = json.loads(sample_input_raw) - logger.info(f"Starting validation (mode: {mode}) for {package_name}#{version}, deps: {include_dependencies}") + sample_input = json.loads(form.sample_input.data) if mode == 'single': validation_report = services.validate_resource_against_profile( package_name, version, sample_input, include_dependencies ) - elif mode == 'bundle': + else: validation_report = services.validate_bundle_against_profile( package_name, version, sample_input, include_dependencies ) - else: - flash("Invalid validation mode selected.", "error") - validation_report = None - if validation_report: - flash("Validation completed.", 'info') - logger.info(f"Validation Result: Valid={validation_report.get('valid')}, Errors={len(validation_report.get('errors',[]))}, Warnings={len(validation_report.get('warnings',[]))}") + flash("Validation completed.", 'success') except json.JSONDecodeError: flash("Invalid JSON format in sample input.", 'error') - logger.warning("Validation failed: Invalid JSON input.") validation_report = {'valid': False, 'errors': ['Invalid JSON format provided.'], 'warnings': [], 'results': {}} - except FileNotFoundError as e: - flash(f"Validation Error: Required package file not found for {package_name}#{version}. Please ensure it's downloaded.", 'error') - logger.error(f"Validation failed: Package file missing - {e}") - validation_report = {'valid': False, 'errors': [f"Required package file not found: {package_name}#{version}"], 'warnings': [], 'results': {}} except Exception as e: - logger.error(f"Error validating sample: {e}", exc_info=True) - flash(f"An unexpected error occurred during validation: {str(e)}", 'error') - validation_report = {'valid': False, 'errors': [f'Unexpected error: {str(e)}'], 'warnings': [], 'results': {}} + logger.error(f"Error validating sample: {e}") + flash(f"Error validating sample: {str(e)}", 'error') + validation_report = {'valid': False, 'errors': [str(e)], 'warnings': [], 'results': {}} else: for field, errors in form.errors.items(): field_obj = getattr(form, field, None) field_label = field_obj.label.text if field_obj and hasattr(field_obj, 'label') else field for error in errors: flash(f"Error in field '{field_label}': {error}", "danger") - return render_template( 'validate_sample.html', form=form, - packages=packages_for_template, + packages=packages, validation_report=validation_report, site_name='FHIRFLARE IG Toolkit', now=datetime.datetime.now() ) -# --- App Initialization --- def create_db(): logger.debug(f"Attempting to create database tables for URI: {app.config['SQLALCHEMY_DATABASE_URI']}") try: diff --git a/forms.py b/forms.py index 97940e0..a798229 100644 --- a/forms.py +++ b/forms.py @@ -1,18 +1,18 @@ # forms.py from flask_wtf import FlaskForm from wtforms import StringField, SelectField, TextAreaField, BooleanField, SubmitField -from wtforms.validators import DataRequired, Regexp, Optional, ValidationError +from wtforms.validators import DataRequired, Regexp, ValidationError import json class IgImportForm(FlaskForm): package_name = StringField('Package Name', validators=[ DataRequired(), Regexp(r'^[a-zA-Z0-9][a-zA-Z0-9\-\.]*[a-zA-Z0-9]$', message="Invalid package name format.") - ]) + ], render_kw={'placeholder': 'e.g., hl7.fhir.au.core'}) package_version = StringField('Package Version', validators=[ DataRequired(), Regexp(r'^[a-zA-Z0-9\.\-]+$', message="Invalid version format. Use alphanumeric characters, dots, or hyphens (e.g., 1.2.3, 1.1.0-preview, current).") - ]) + ], render_kw={'placeholder': 'e.g., 1.1.0-preview'}) dependency_mode = SelectField('Dependency Mode', choices=[ ('recursive', 'Current Recursive'), ('patch-canonical', 'Patch Canonical Versions'), diff --git a/instance/fhir_ig.db b/instance/fhir_ig.db index bc104a6..be0ca84 100644 Binary files a/instance/fhir_ig.db and b/instance/fhir_ig.db differ diff --git a/instance/fhir_ig.db.old b/instance/fhir_ig.db.old new file mode 100644 index 0000000..20d391a Binary files /dev/null and b/instance/fhir_ig.db.old differ diff --git a/instance/fhir_packages/hl7.fhir.au.base-5.1.0-preview.metadata.json b/instance/fhir_packages/hl7.fhir.au.base-5.1.0-preview.metadata.json new file mode 100644 index 0000000..54b0001 --- /dev/null +++ b/instance/fhir_packages/hl7.fhir.au.base-5.1.0-preview.metadata.json @@ -0,0 +1,21 @@ +{ + "package_name": "hl7.fhir.au.base", + "version": "5.1.0-preview", + "dependency_mode": "recursive", + "imported_dependencies": [ + { + "name": "hl7.fhir.r4.core", + "version": "4.0.1" + }, + { + "name": "hl7.terminology.r4", + "version": "6.2.0" + }, + { + "name": "hl7.fhir.uv.extensions.r4", + "version": "5.2.0" + } + ], + "complies_with_profiles": [], + "imposed_profiles": [] +} \ No newline at end of file diff --git a/instance/fhir_packages/hl7.fhir.au.base-5.1.0-preview.tgz b/instance/fhir_packages/hl7.fhir.au.base-5.1.0-preview.tgz new file mode 100644 index 0000000..12b9c12 Binary files /dev/null and b/instance/fhir_packages/hl7.fhir.au.base-5.1.0-preview.tgz differ diff --git a/instance/fhir_packages/hl7.fhir.au.core-1.1.0-preview.metadata.json b/instance/fhir_packages/hl7.fhir.au.core-1.1.0-preview.metadata.json index 92270ae..0b89e9e 100644 --- a/instance/fhir_packages/hl7.fhir.au.core-1.1.0-preview.metadata.json +++ b/instance/fhir_packages/hl7.fhir.au.core-1.1.0-preview.metadata.json @@ -1,7 +1,7 @@ { "package_name": "hl7.fhir.au.core", "version": "1.1.0-preview", - "dependency_mode": "tree-shaking", + "dependency_mode": "recursive", "imported_dependencies": [ { "name": "hl7.fhir.r4.core", @@ -29,6 +29,5 @@ } ], "complies_with_profiles": [], - "imposed_profiles": [], - "timestamp": "2025-04-13T13:43:59.139683+00:00" + "imposed_profiles": [] } \ No newline at end of file diff --git a/instance/fhir_packages/hl7.fhir.r4.core-4.0.1.metadata.json b/instance/fhir_packages/hl7.fhir.r4.core-4.0.1.metadata.json index 0bfe59a..d1f669e 100644 --- a/instance/fhir_packages/hl7.fhir.r4.core-4.0.1.metadata.json +++ b/instance/fhir_packages/hl7.fhir.r4.core-4.0.1.metadata.json @@ -1,9 +1,8 @@ { "package_name": "hl7.fhir.r4.core", "version": "4.0.1", - "dependency_mode": "tree-shaking", + "dependency_mode": "recursive", "imported_dependencies": [], "complies_with_profiles": [], - "imposed_profiles": [], - "timestamp": "2025-04-13T13:44:17.228391+00:00" + "imposed_profiles": [] } \ No newline at end of file diff --git a/instance/fhir_packages/hl7.fhir.uv.extensions.r4-5.2.0.metadata.json b/instance/fhir_packages/hl7.fhir.uv.extensions.r4-5.2.0.metadata.json new file mode 100644 index 0000000..3544d6d --- /dev/null +++ b/instance/fhir_packages/hl7.fhir.uv.extensions.r4-5.2.0.metadata.json @@ -0,0 +1,13 @@ +{ + "package_name": "hl7.fhir.uv.extensions.r4", + "version": "5.2.0", + "dependency_mode": "recursive", + "imported_dependencies": [ + { + "name": "hl7.fhir.r4.core", + "version": "4.0.1" + } + ], + "complies_with_profiles": [], + "imposed_profiles": [] +} \ No newline at end of file diff --git a/instance/fhir_packages/hl7.fhir.uv.extensions.r4-5.2.0.tgz b/instance/fhir_packages/hl7.fhir.uv.extensions.r4-5.2.0.tgz new file mode 100644 index 0000000..e3c6914 Binary files /dev/null and b/instance/fhir_packages/hl7.fhir.uv.extensions.r4-5.2.0.tgz differ diff --git a/instance/fhir_packages/hl7.fhir.uv.ipa-1.0.0.metadata.json b/instance/fhir_packages/hl7.fhir.uv.ipa-1.0.0.metadata.json new file mode 100644 index 0000000..cff87e4 --- /dev/null +++ b/instance/fhir_packages/hl7.fhir.uv.ipa-1.0.0.metadata.json @@ -0,0 +1,21 @@ +{ + "package_name": "hl7.fhir.uv.ipa", + "version": "1.0.0", + "dependency_mode": "recursive", + "imported_dependencies": [ + { + "name": "hl7.fhir.r4.core", + "version": "4.0.1" + }, + { + "name": "hl7.terminology.r4", + "version": "5.0.0" + }, + { + "name": "hl7.fhir.uv.smart-app-launch", + "version": "2.0.0" + } + ], + "complies_with_profiles": [], + "imposed_profiles": [] +} \ No newline at end of file diff --git a/instance/fhir_packages/hl7.fhir.uv.ipa-1.0.0.tgz b/instance/fhir_packages/hl7.fhir.uv.ipa-1.0.0.tgz new file mode 100644 index 0000000..79f0584 Binary files /dev/null and b/instance/fhir_packages/hl7.fhir.uv.ipa-1.0.0.tgz differ diff --git a/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.0.0.metadata.json b/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.0.0.metadata.json new file mode 100644 index 0000000..e2ffb4e --- /dev/null +++ b/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.0.0.metadata.json @@ -0,0 +1,13 @@ +{ + "package_name": "hl7.fhir.uv.smart-app-launch", + "version": "2.0.0", + "dependency_mode": "recursive", + "imported_dependencies": [ + { + "name": "hl7.fhir.r4.core", + "version": "4.0.1" + } + ], + "complies_with_profiles": [], + "imposed_profiles": [] +} \ No newline at end of file diff --git a/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.0.0.tgz b/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.0.0.tgz new file mode 100644 index 0000000..767a6df Binary files /dev/null and b/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.0.0.tgz differ diff --git a/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.1.0.metadata.json b/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.1.0.metadata.json new file mode 100644 index 0000000..d5b71d3 --- /dev/null +++ b/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.1.0.metadata.json @@ -0,0 +1,17 @@ +{ + "package_name": "hl7.fhir.uv.smart-app-launch", + "version": "2.1.0", + "dependency_mode": "recursive", + "imported_dependencies": [ + { + "name": "hl7.fhir.r4.core", + "version": "4.0.1" + }, + { + "name": "hl7.terminology.r4", + "version": "5.0.0" + } + ], + "complies_with_profiles": [], + "imposed_profiles": [] +} \ No newline at end of file diff --git a/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.1.0.tgz b/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.1.0.tgz new file mode 100644 index 0000000..326c413 Binary files /dev/null and b/instance/fhir_packages/hl7.fhir.uv.smart-app-launch-2.1.0.tgz differ diff --git a/instance/fhir_packages/hl7.terminology.r4-5.0.0.metadata.json b/instance/fhir_packages/hl7.terminology.r4-5.0.0.metadata.json new file mode 100644 index 0000000..4d7bc33 --- /dev/null +++ b/instance/fhir_packages/hl7.terminology.r4-5.0.0.metadata.json @@ -0,0 +1,13 @@ +{ + "package_name": "hl7.terminology.r4", + "version": "5.0.0", + "dependency_mode": "recursive", + "imported_dependencies": [ + { + "name": "hl7.fhir.r4.core", + "version": "4.0.1" + } + ], + "complies_with_profiles": [], + "imposed_profiles": [] +} \ No newline at end of file diff --git a/instance/fhir_packages/hl7.terminology.r4-5.0.0.tgz b/instance/fhir_packages/hl7.terminology.r4-5.0.0.tgz new file mode 100644 index 0000000..91a9345 Binary files /dev/null and b/instance/fhir_packages/hl7.terminology.r4-5.0.0.tgz differ diff --git a/instance/fhir_packages/hl7.terminology.r4-6.2.0.metadata.json b/instance/fhir_packages/hl7.terminology.r4-6.2.0.metadata.json new file mode 100644 index 0000000..fd28a4e --- /dev/null +++ b/instance/fhir_packages/hl7.terminology.r4-6.2.0.metadata.json @@ -0,0 +1,13 @@ +{ + "package_name": "hl7.terminology.r4", + "version": "6.2.0", + "dependency_mode": "recursive", + "imported_dependencies": [ + { + "name": "hl7.fhir.r4.core", + "version": "4.0.1" + } + ], + "complies_with_profiles": [], + "imposed_profiles": [] +} \ No newline at end of file diff --git a/instance/fhir_packages/hl7.terminology.r4-6.2.0.tgz b/instance/fhir_packages/hl7.terminology.r4-6.2.0.tgz new file mode 100644 index 0000000..5f0aef5 Binary files /dev/null and b/instance/fhir_packages/hl7.terminology.r4-6.2.0.tgz differ diff --git a/services.py b/services.py index 120e51d..0e5b943 100644 --- a/services.py +++ b/services.py @@ -22,7 +22,7 @@ DOWNLOAD_DIR_NAME = "fhir_packages" CANONICAL_PACKAGE = ("hl7.fhir.r4.core", "4.0.1") CANONICAL_PACKAGE_ID = f"{CANONICAL_PACKAGE[0]}#{CANONICAL_PACKAGE[1]}" -# Define standard FHIR R4 base types (used selectively) +# Define standard FHIR R4 base types FHIR_R4_BASE_TYPES = { "Account", "ActivityDefinition", "AdministrableProductDefinition", "AdverseEvent", "AllergyIntolerance", "Appointment", "AppointmentResponse", "AuditEvent", "Basic", "Binary", "BiologicallyDerivedProduct", @@ -111,13 +111,7 @@ def construct_metadata_filename(name, version): return f"{sanitize_filename_part(name)}-{sanitize_filename_part(version)}.metadata.json" def parse_package_filename(filename): - """ - Parses a standard FHIR package filename (e.g., name-version.tgz) - into package name and version. Handles common variations. - Returns (name, version) tuple, or (None, None) if parsing fails. - """ - name = None - version = None + """Parses a standard FHIR package filename into name and version.""" if not filename or not filename.endswith('.tgz'): logger.debug(f"Filename '{filename}' does not end with .tgz") return None, None @@ -133,20 +127,15 @@ def parse_package_filename(filename): return name, version else: last_hyphen_index = base_name.rfind('-', 0, last_hyphen_index) - logger.warning(f"Could not confidently parse version from '{filename}'. Treating '{base_name}' as name.") + logger.warning(f"Could not parse version from '{filename}'. Treating '{base_name}' as name.") name = base_name.replace('_', '.') version = "" return name, version def find_and_extract_sd(tgz_path, resource_identifier): - """ - Helper to find and extract StructureDefinition json from a given tgz path. - Matches by resource ID, Name, or Type defined within the SD file. - Returns (sd_data, found_path_in_tar) or (None, None). - """ + """Helper to find and extract StructureDefinition json from a tgz path.""" sd_data = None found_path = None - logger = logging.getLogger(__name__) if not tgz_path or not os.path.exists(tgz_path): logger.error(f"File not found in find_and_extract_sd: {tgz_path}") return None, None @@ -207,7 +196,7 @@ def find_and_extract_sd(tgz_path, resource_identifier): logger.error(f"Unexpected error in find_and_extract_sd for {tgz_path}: {e}", exc_info=True) raise return sd_data, found_path - + # --- Metadata Saving/Loading --- def save_package_metadata(name, version, dependency_mode, dependencies, complies_with_profiles=None, imposed_profiles=None): """Saves dependency mode, imported dependencies, and profile relationships as metadata.""" @@ -371,7 +360,6 @@ def process_package_file(tgz_path): if must_support is True: if element_id and element_path: - # Use id for sliced elements to ensure uniqueness ms_path = element_id if slice_name else element_path ms_paths_in_this_sd.add(ms_path) has_ms_in_this_sd = True @@ -432,7 +420,6 @@ def process_package_file(tgz_path): resource_type = data.get('resourceType') if not resource_type: continue - # Prioritize meta.profile matches (from oldest version for reliability) profile_meta = data.get('meta', {}).get('profile', []) found_profile_match = False if profile_meta and isinstance(profile_meta, list): @@ -446,11 +433,10 @@ def process_package_file(tgz_path): break elif profile_url in resource_info: associated_key = profile_url - found_profile_match = True + personally_match = True logger.debug(f"Example {member.name} associated with profile {associated_key} via meta.profile") break - # Fallback to base type if not found_profile_match: key_to_use = resource_type if key_to_use not in resource_info: @@ -459,7 +445,6 @@ def process_package_file(tgz_path): logger.debug(f"Example {member.name} associated with resource type {associated_key}") referenced_types.add(resource_type) else: - # Handle non-JSON examples (from oldest version) guessed_type = base_filename_lower.split('-')[0].capitalize() guessed_profile_id = base_filename_lower.split('-')[0] key_to_use = None @@ -574,16 +559,12 @@ def process_package_file(tgz_path): # --- Validation Functions --- def navigate_fhir_path(resource, path, extension_url=None): - """ - Navigates a FHIR resource using a FHIRPath-like expression. - Returns the value(s) at the specified path or None if not found. - """ + """Navigates a FHIR resource using a FHIRPath-like expression.""" if not resource or not path: return None parts = path.split('.') current = resource for part in parts: - # Handle array indexing (e.g., element[0]) match = re.match(r'^(\w+)\[(\d+)\]$', part) if match: key, index = match.groups() @@ -596,21 +577,16 @@ def navigate_fhir_path(resource, path, extension_url=None): else: return None else: - # Handle regular path components if isinstance(current, dict) and part in current: current = current[part] else: return None - # Handle extension filtering if extension_url and isinstance(current, list): current = [item for item in current if item.get('url') == extension_url] return current def validate_resource_against_profile(package_name, version, resource, include_dependencies=True): - """ - Validates a FHIR resource against a StructureDefinition in the specified package. - Returns a dict with 'valid', 'errors', and 'warnings'. - """ + """Validates a FHIR resource against a StructureDefinition in the specified package.""" logger.debug(f"Validating resource {resource.get('resourceType')} against {package_name}#{version}") result = {'valid': True, 'errors': [], 'warnings': []} download_dir = _get_download_dir() @@ -629,14 +605,13 @@ def validate_resource_against_profile(package_name, version, resource, include_d # Basic validation: check required elements and Must Support elements = sd_data.get('snapshot', {}).get('element', []) for element in elements: + path = element.get('path') if element.get('min', 0) > 0: - path = element.get('path') value = navigate_fhir_path(resource, path) if value is None or (isinstance(value, list) and not value): result['valid'] = False result['errors'].append(f"Required element {path} missing") if element.get('mustSupport', False): - path = element.get('path') value = navigate_fhir_path(resource, path) if value is None or (isinstance(value, list) and not value): result['warnings'].append(f"Must Support element {path} missing or empty") @@ -644,10 +619,7 @@ def validate_resource_against_profile(package_name, version, resource, include_d return result def validate_bundle_against_profile(package_name, version, bundle, include_dependencies=True): - """ - Validates a FHIR Bundle against profiles in the specified package. - Returns a dict with 'valid', 'errors', 'warnings', and per-resource 'results'. - """ + """Validates a FHIR Bundle against profiles in the specified package.""" logger.debug(f"Validating bundle against {package_name}#{version}") result = { 'valid': True, @@ -675,6 +647,74 @@ def validate_bundle_against_profile(package_name, version, bundle, include_depen return result +# --- Structure Definition Retrieval --- +def get_structure_definition(package_name, version, resource_type): + """Fetches StructureDefinition with slicing support.""" + download_dir = _get_download_dir() + if not download_dir: + logger.error("Could not get download directory.") + return {'error': 'Download directory not accessible'} + + tgz_filename = construct_tgz_filename(package_name, version) + tgz_path = os.path.join(download_dir, tgz_filename) + sd_data, sd_path = find_and_extract_sd(tgz_path, resource_type) + + if not sd_data: + # Fallback to canonical package + canonical_tgz = construct_tgz_filename(*CANONICAL_PACKAGE) + canonical_path = os.path.join(download_dir, canonical_tgz) + sd_data, sd_path = find_and_extract_sd(canonical_path, resource_type) + if sd_data: + logger.info(f"Using canonical SD for {resource_type} from {canonical_path}") + elements = sd_data.get('snapshot', {}).get('element', []) + return { + 'elements': elements, + 'must_support_paths': [el['path'] for el in elements if el.get('mustSupport', False)], + 'slices': [], + 'fallback_used': True, + 'source_package': f"{CANONICAL_PACKAGE[0]}#{CANONICAL_PACKAGE[1]}" + } + logger.error(f"No StructureDefinition found for {resource_type} in {package_name}#{version} or canonical package") + return {'error': f"No StructureDefinition for {resource_type}"} + + elements = sd_data.get('snapshot', {}).get('element', []) + must_support_paths = [] + slices = [] + + # Process elements for must-support and slicing + for element in elements: + path = element.get('path', '') + element_id = element.get('id', '') + slice_name = element.get('sliceName') + if element.get('mustSupport', False): + ms_path = element_id if slice_name else path + must_support_paths.append(ms_path) + if 'slicing' in element: + slice_info = { + 'path': path, + 'sliceName': slice_name, + 'discriminator': element.get('slicing', {}).get('discriminator', []), + 'nested_slices': [] + } + # Find nested slices + for sub_element in elements: + if sub_element['path'].startswith(path + '.') and 'slicing' in sub_element: + sub_slice_name = sub_element.get('sliceName') + slice_info['nested_slices'].append({ + 'path': sub_element['path'], + 'sliceName': sub_slice_name, + 'discriminator': sub_element.get('slicing', {}).get('discriminator', []) + }) + slices.append(slice_info) + + logger.debug(f"StructureDefinition for {resource_type}: {len(elements)} elements, {len(must_support_paths)} must-support paths, {len(slices)} slices") + return { + 'elements': elements, + 'must_support_paths': sorted(list(set(must_support_paths))), + 'slices': slices, + 'fallback_used': False + } + # --- Other Service Functions --- def _build_package_index(download_dir): """Builds an index of canonical URLs to package details from .index.json files.""" @@ -721,7 +761,7 @@ def _load_definition(details, download_dir): """Loads a StructureDefinition from package details.""" if not details: return None - tgz_path = os.path.join(download_dir, _construct_tgz_filename(details['package_name'], details['package_version'])) + tgz_path = os.path.join(download_dir, construct_tgz_filename(details['package_name'], details['package_version'])) try: with tarfile.open(tgz_path, "r:gz") as tar: member_path = f"package/{details['filename']}" @@ -735,7 +775,9 @@ def _load_definition(details, download_dir): except Exception as e: logger.error(f"Failed to load definition {details['filename']} from {tgz_path}: {e}") return None + def download_package(name, version): + """Downloads a single FHIR package.""" download_dir = _get_download_dir() if not download_dir: return None, "Download dir error" filename = construct_tgz_filename(name, version) @@ -760,6 +802,7 @@ def download_package(name, version): return None, f"File write error: {e}" def extract_dependencies(tgz_path): + """Extracts dependencies from package.json.""" package_json_path = "package/package.json" dependencies = {} error_message = None @@ -782,18 +825,14 @@ def extract_used_types(tgz_path): used_types = set() if not tgz_path or not os.path.exists(tgz_path): logger.error(f"Cannot extract used types: File not found at {tgz_path}") - return used_types # Return empty set - + return used_types try: with tarfile.open(tgz_path, "r:gz") as tar: for member in tar: - # Process only JSON files within the 'package/' directory if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): continue - # Skip metadata files if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: continue - fileobj = None try: fileobj = tar.extractfile(member) @@ -801,134 +840,79 @@ def extract_used_types(tgz_path): content_bytes = fileobj.read() content_string = content_bytes.decode('utf-8-sig') data = json.loads(content_string) - - if not isinstance(data, dict): continue # Skip if not a valid JSON object - + if not isinstance(data, dict): continue resource_type = data.get('resourceType') - if not resource_type: continue # Skip if no resourceType - - # Add the resource type itself + if not resource_type: continue used_types.add(resource_type) - - # --- StructureDefinition Specific Extraction --- if resource_type == 'StructureDefinition': - # Add the type this SD defines/constrains sd_type = data.get('type') if sd_type: used_types.add(sd_type) - # Add the base definition type if it's a profile base_def = data.get('baseDefinition') if base_def: base_type = base_def.split('/')[-1] - # Avoid adding primitive types like 'Element', 'Resource' etc. if not needed - if base_type and base_type[0].isupper(): - used_types.add(base_type) - - # Extract types from elements (snapshot or differential) + if base_type and base_type[0].isupper(): used_types.add(base_type) elements = data.get('snapshot', {}).get('element', []) or data.get('differential', {}).get('element', []) for element in elements: if isinstance(element, dict) and 'type' in element: for t in element.get('type', []): - # Add code (element type) code = t.get('code') if code and code[0].isupper(): used_types.add(code) - # Add targetProfile types (Reference targets) for profile_uri in t.get('targetProfile', []): - if profile_uri: - profile_type = profile_uri.split('/')[-1] - if profile_type and profile_type[0].isupper(): used_types.add(profile_type) - # Add types from contentReference - content_ref = element.get('contentReference') - if content_ref and content_ref.startswith('#'): - # This usually points to another element path within the same SD - # Trying to resolve this fully can be complex. - # We might infer types based on the path referenced if needed. - pass - - # --- General Resource Type Extraction --- + if profile_uri: + profile_type = profile_uri.split('/')[-1] + if profile_type and profile_type[0].isupper(): used_types.add(profile_type) else: - # Look for meta.profile for referenced profiles -> add profile type profiles = data.get('meta', {}).get('profile', []) for profile_uri in profiles: - if profile_uri: - profile_type = profile_uri.split('/')[-1] - if profile_type and profile_type[0].isupper(): used_types.add(profile_type) - - # ValueSet: Check compose.include.system (often points to CodeSystem) + if profile_uri: + profile_type = profile_uri.split('/')[-1] + if profile_type and profile_type[0].isupper(): used_types.add(profile_type) if resource_type == 'ValueSet': for include in data.get('compose', {}).get('include', []): system = include.get('system') - # Heuristic: If it looks like a FHIR core codesystem URL, extract type if system and system.startswith('http://hl7.org/fhir/'): type_name = system.split('/')[-1] - # Check if it looks like a ResourceType - if type_name and type_name[0].isupper() and not type_name.startswith('sid'): # Avoid things like sid/us-ssn - used_types.add(type_name) - # Could add more heuristics for other terminology servers - - # CapabilityStatement: Check rest.resource.type and rest.resource.profile + if type_name and type_name[0].isupper() and not type_name.startswith('sid'): + used_types.add(type_name) if resource_type == 'CapabilityStatement': - for rest_item in data.get('rest', []): - for resource_item in rest_item.get('resource', []): - res_type = resource_item.get('type') - if res_type and res_type[0].isupper(): used_types.add(res_type) - profile_uri = resource_item.get('profile') - if profile_uri: - profile_type = profile_uri.split('/')[-1] - if profile_type and profile_type[0].isupper(): used_types.add(profile_type) - - - # --- Generic recursive search for 'reference' fields? --- - # This could be expensive. Let's rely on SDs for now. - # def find_references(obj): - # if isinstance(obj, dict): - # for k, v in obj.items(): - # if k == 'reference' and isinstance(v, str): - # ref_type = v.split('/')[0] - # if ref_type and ref_type[0].isupper(): used_types.add(ref_type) - # else: - # find_references(v) - # elif isinstance(obj, list): - # for item in obj: - # find_references(item) - # find_references(data) - + for rest_item in data.get('rest', []): + for resource_item in rest_item.get('resource', []): + res_type = resource_item.get('type') + if res_type and res_type[0].isupper(): used_types.add(res_type) + profile_uri = resource_item.get('profile') + if profile_uri: + profile_type = profile_uri.split('/')[-1] + if profile_type and profile_type[0].isupper(): used_types.add(profile_type) except json.JSONDecodeError as e: - logger.warning(f"Could not parse JSON in {member.name} for used types: {e}") + logger.warning(f"Could not parse JSON in {member.name}: {e}") except UnicodeDecodeError as e: - logger.warning(f"Could not decode {member.name} for used types: {e}") + logger.warning(f"Could not decode {member.name}: {e}") except Exception as e: - logger.warning(f"Could not process member {member.name} for used types: {e}") + logger.warning(f"Could not process member {member.name}: {e}") finally: if fileobj: fileobj.close() - except tarfile.ReadError as e: - logger.error(f"Tar ReadError extracting used types from {tgz_path}: {e}") + logger.error(f"Tar ReadError extracting used types from {tgz_path}: {e}") except tarfile.TarError as e: - logger.error(f"TarError extracting used types from {tgz_path}: {e}") + logger.error(f"TarError extracting used types from {tgz_path}: {e}") except FileNotFoundError: - logger.error(f"Package file not found for used type extraction: {tgz_path}") + logger.error(f"Package file not found: {tgz_path}") except Exception as e: logger.error(f"Error extracting used types from {tgz_path}: {e}", exc_info=True) - - # Filter out potential primitives or base types that aren't resources? - # E.g., 'string', 'boolean', 'Element', 'BackboneElement', 'Resource' - core_non_resource_types = {'string', 'boolean', 'integer', 'decimal', 'uri', 'url', 'canonical', - 'base64Binary', 'instant', 'date', 'dateTime', 'time', 'code', 'oid', 'id', - 'markdown', 'unsignedInt', 'positiveInt', 'xhtml', - 'Element', 'BackboneElement', 'Resource', 'DomainResource', 'DataType'} + core_non_resource_types = { + 'string', 'boolean', 'integer', 'decimal', 'uri', 'url', 'canonical', 'base64Binary', 'instant', + 'date', 'dateTime', 'time', 'code', 'oid', 'id', 'markdown', 'unsignedInt', 'positiveInt', 'xhtml', + 'Element', 'BackboneElement', 'Resource', 'DomainResource', 'DataType' + } final_used_types = {t for t in used_types if t not in core_non_resource_types and t[0].isupper()} - logger.debug(f"Extracted used types from {os.path.basename(tgz_path)}: {final_used_types}") return final_used_types - def map_types_to_packages(used_types, all_dependencies, download_dir): - """Maps used types to packages by checking .index.json files for exported types.""" + """Maps used types to packages by checking .index.json files.""" type_to_package = {} processed_types = set() - - # Load .index.json from each package to map types for (pkg_name, pkg_version), _ in all_dependencies.items(): tgz_filename = construct_tgz_filename(pkg_name, pkg_version) tgz_path = os.path.join(download_dir, tgz_filename) @@ -950,30 +934,25 @@ def map_types_to_packages(used_types, all_dependencies, download_dir): if sd_name in used_types: type_to_package[sd_name] = (pkg_name, pkg_version) processed_types.add(sd_name) - logger.debug(f"Mapped type '{sd_name}' to package '{pkg_name}#{pkg_version}' via .index.json") + logger.debug(f"Mapped type '{sd_name}' to package '{pkg_name}#{pkg_version}'") except Exception as e: logger.warning(f"Failed to process .index.json for {pkg_name}#{pkg_version}: {e}") - - # Fallback: Use heuristic matching for unmapped types for t in used_types - processed_types: for (pkg_name, pkg_version), _ in all_dependencies.items(): if t.lower() in pkg_name.lower(): type_to_package[t] = (pkg_name, pkg_version) processed_types.add(t) - logger.debug(f"Fallback: Mapped type '{t}' to package '{pkg_name}#{pkg_version}' via name heuristic") + logger.debug(f"Fallback: Mapped type '{t}' to package '{pkg_name}#{pkg_version}'") break - - # Final fallback: Map remaining types to canonical package canonical_name, canonical_version = CANONICAL_PACKAGE for t in used_types - processed_types: type_to_package[t] = CANONICAL_PACKAGE logger.debug(f"Fallback: Mapped type '{t}' to canonical package {canonical_name}#{canonical_version}") - logger.debug(f"Final type-to-package mapping: {type_to_package}") return type_to_package def import_package_and_dependencies(initial_name, initial_version, dependency_mode='recursive'): - """Orchestrates recursive download and dependency extraction based on the dependency mode.""" + """Orchestrates recursive download and dependency extraction.""" logger.info(f"Starting import for {initial_name}#{initial_version} with dependency_mode={dependency_mode}") download_dir = _get_download_dir() if not download_dir: @@ -994,17 +973,14 @@ def import_package_and_dependencies(initial_name, initial_version, dependency_mo while pending_queue: name, version = pending_queue.pop(0) package_id_tuple = (name, version) - if package_id_tuple in results['processed']: logger.debug(f"Skipping already processed package: {name}#{version}") continue - logger.info(f"Processing package from queue: {name}#{version}") save_path, dl_error = download_package(name, version) if dl_error: results['errors'].append(f"Download failed for {name}#{version}: {dl_error}") continue - results['downloaded'][package_id_tuple] = save_path dependencies, dep_error = extract_dependencies(save_path) if dep_error: @@ -1015,10 +991,8 @@ def import_package_and_dependencies(initial_name, initial_version, dependency_mo results['errors'].append(f"Dependency extraction returned critical error for {name}#{version}.") results['processed'].add(package_id_tuple) continue - results['all_dependencies'][package_id_tuple] = dependencies results['processed'].add(package_id_tuple) - current_package_deps = [] for dep_name, dep_version in dependencies.items(): if isinstance(dep_name, str) and isinstance(dep_version, str) and dep_name and dep_version: @@ -1027,22 +1001,17 @@ def import_package_and_dependencies(initial_name, initial_version, dependency_mo if dep_tuple not in all_found_dependencies: all_found_dependencies.add(dep_tuple) results['dependencies'].append({"name": dep_name, "version": dep_version}) - if dep_tuple not in queued_or_processed_lookup: should_queue = False if dependency_mode == 'recursive': should_queue = True elif dependency_mode == 'patch-canonical' and dep_tuple == CANONICAL_PACKAGE: should_queue = True - # Tree-shaking dependencies are handled post-loop for the initial package if should_queue: logger.debug(f"Adding dependency to queue ({dependency_mode}): {dep_name}#{dep_version}") pending_queue.append(dep_tuple) queued_or_processed_lookup.add(dep_tuple) - save_package_metadata(name, version, dependency_mode, current_package_deps) - - # Tree-shaking: Process dependencies for the initial package if dependency_mode == 'tree-shaking' and package_id_tuple == (initial_name, initial_version): logger.info(f"Performing tree-shaking for {initial_name}#{initial_version}") used_types = extract_used_types(save_path) @@ -1052,19 +1021,16 @@ def import_package_and_dependencies(initial_name, initial_version, dependency_mo if CANONICAL_PACKAGE not in tree_shaken_deps: tree_shaken_deps.add(CANONICAL_PACKAGE) logger.debug(f"Ensuring canonical package {CANONICAL_PACKAGE} for tree-shaking") - for dep_tuple in tree_shaken_deps: if dep_tuple not in queued_or_processed_lookup: logger.info(f"Queueing tree-shaken dependency: {dep_tuple[0]}#{dep_tuple[1]}") pending_queue.append(dep_tuple) queued_or_processed_lookup.add(dep_tuple) - results['dependencies'] = [{"name": d[0], "version": d[1]} for d in all_found_dependencies] logger.info(f"Import finished for {initial_name}#{initial_version}. Processed: {len(results['processed'])}, Downloaded: {len(results['downloaded'])}, Errors: {len(results['errors'])}") return results - -# --- Standalone Test/Example Usage --- +# --- Standalone Test --- if __name__ == '__main__': logger.info("Running services.py directly for testing.") class MockFlask: diff --git a/templates/cp_downloaded_igs.html b/templates/cp_downloaded_igs.html index 70e6ee0..7ba03c1 100644 --- a/templates/cp_downloaded_igs.html +++ b/templates/cp_downloaded_igs.html @@ -11,7 +11,7 @@
Import IGs Manage FHIR Packages - Upload IG's + Upload IG's
@@ -52,7 +52,6 @@ {% for pkg in packages %} {% set is_processed = (pkg.name, pkg.version) in processed_ids %} - {% set is_duplicate = pkg.name in duplicate_groups %} {% set group_color = group_colors[pkg.name] if (is_duplicate and pkg.name in group_colors) else 'bg-warning' if is_duplicate else '' %} @@ -87,7 +86,7 @@ {% if duplicate_groups %} -

Duplicate dependencies detected: +

Duplicate dependencies detected: {% for name, versions in duplicate_groups.items() %} {% set group_color = group_colors[name] if name in group_colors else 'bg-warning' %} {{ name }} ({{ versions|join(', ') }}) @@ -109,7 +108,10 @@

Processed Packages ({{ processed_list|length }})
{% if processed_list %} -

MS = Contains Must Support Elements

+

+ MS = Contains Must Support Elements
+ Optional MS Ext = Optional Extension with Must Support Sub-Elements +

Resource Types in the list will be both Profile and Base Type:

@@ -126,7 +128,8 @@ {% if types_info %}
{% for type_info in types_info %} - {{ type_info.name }} + {{ type_info.name }} {% endfor %}
{% else %} @@ -160,4 +163,4 @@ {% block scripts %} {{ super() }} -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/templates/cp_view_processed_ig.html b/templates/cp_view_processed_ig.html index 64b6643..da8dc57 100644 --- a/templates/cp_view_processed_ig.html +++ b/templates/cp_view_processed_ig.html @@ -74,7 +74,7 @@ MS = Contains Must Support Elements
Optional MS Ext = Optional Extension with Must Support Sub-Elements -
+

{% if profile_list %}

Examples = Examples will be displayed when selecting profile Types if contained in the IG

@@ -84,24 +84,17 @@ - - {# --- Badge Logic --- #} {% if type_info.must_support %} - {# Check if it's marked as optional_usage (Extension with internal MS) #} {% if optional_usage_elements.get(type_info.name) %} - {# Blue/Info badge for Optional Extension with MS #} - {{ type_info.name }} + {{ type_info.name }} {% else %} - {# Yellow/Warning badge for standard Profile with MS #} {{ type_info.name }} {% endif %} {% else %} - {# Default light badge if no Must Support #} {{ type_info.name }} {% endif %} - {# --- End Badge Logic --- #} {% endfor %} @@ -115,11 +108,10 @@ - {# Base types usually don't have MS directly, but check just in case #} {% if type_info.must_support %} - {{ type_info.name }} + {{ type_info.name }} {% else %} {{ type_info.name }} {% endif %} @@ -193,7 +185,8 @@ {% else %} {% endif %} - + + + + + +{% endblock %} \ No newline at end of file diff --git a/templates/validate_sample.html b/templates/validate_sample.html index cb61bdf..f9b31f3 100644 --- a/templates/validate_sample.html +++ b/templates/validate_sample.html @@ -27,11 +27,11 @@ Select a package from the list below or enter a new one (e.g., hl7.fhir.us.core).
- {% if packages %} {% for pkg in packages %} - + {% endfor %} {% else %} @@ -39,14 +39,14 @@
- {{ form.package_name(class="form-control") }} + {{ form.package_name(class="form-control", id=form.package_name.id) }} {% for error in form.package_name.errors %}
{{ error }}
- {% endfor %} + {% endfor %}
- {{ form.version(class="form-control") }} + {{ form.version(class="form-control", id=form.version.id) }} {% for error in form.version.errors %}
{{ error }}
{% endfor %} @@ -125,21 +125,31 @@ {% endblock %} \ No newline at end of file