This commit is contained in:
Joshua Hare 2025-04-26 22:51:52 +10:00
parent 02d58facbb
commit cfe4f3dd4f
19 changed files with 16589 additions and 2470 deletions

365
app.py
View File

@ -587,219 +587,15 @@ def view_ig(processed_ig_id):
# return Response(json.dumps(response_data, indent=None, separators=(',', ':')), mimetype='application/json') # return Response(json.dumps(response_data, indent=None, separators=(',', ':')), mimetype='application/json')
#----------------------------------------------------------------------------------------------------------------------------------- #-----------------------------------------------------------------------------------------------------------------------------------
# --- Full /get-structure Function ---
@app.route('/get-structure')
def get_structure():
package_name = request.args.get('package_name')
package_version = request.args.get('package_version')
# This is the StructureDefinition ID/Name or base ResourceType
resource_type = request.args.get('resource_type')
view = request.args.get('view', 'snapshot') # Keep for potential future use
# --- Parameter Validation ---
if not all([package_name, package_version, resource_type]):
logger.warning("get_structure: Missing query parameters: package_name=%s, package_version=%s, resource_type=%s", package_name, package_version, resource_type)
return jsonify({"error": "Missing required query parameters: package_name, package_version, resource_type"}), 400
# --- Package Directory Setup ---
packages_dir = current_app.config.get('FHIR_PACKAGES_DIR')
if not packages_dir:
logger.error("FHIR_PACKAGES_DIR not configured.")
return jsonify({"error": "Server configuration error: Package directory not set."}), 500
# --- Paths setup ---
tgz_filename = services.construct_tgz_filename(package_name, package_version)
tgz_path = os.path.join(packages_dir, tgz_filename)
# Assuming CANONICAL_PACKAGE is defined in services (e.g., ('hl7.fhir.r4.core', '4.0.1'))
core_package_name, core_package_version = services.CANONICAL_PACKAGE
core_tgz_filename = services.construct_tgz_filename(core_package_name, core_package_version)
core_tgz_path = os.path.join(packages_dir, core_tgz_filename)
sd_data = None
search_params_data = [] # Initialize search params list
fallback_used = False
source_package_id = f"{package_name}#{package_version}"
base_resource_type_for_sp = None # Variable to store the base type for SP search
logger.debug(f"Attempting to find SD for '{resource_type}' in {tgz_filename}")
# --- Fetch SD Data (Primary Package) ---
primary_package_exists = os.path.exists(tgz_path)
core_package_exists = os.path.exists(core_tgz_path)
if primary_package_exists:
try:
# Assuming find_and_extract_sd handles narrative removal
sd_data, _ = services.find_and_extract_sd(tgz_path, resource_type)
if sd_data:
# Determine the base resource type from the fetched SD
base_resource_type_for_sp = sd_data.get('type')
logger.debug(f"Determined base resource type '{base_resource_type_for_sp}' from primary SD '{resource_type}'")
except Exception as e:
logger.error(f"Unexpected error extracting SD '{resource_type}' from primary package {tgz_path}: {e}", exc_info=True)
sd_data = None # Ensure sd_data is None if extraction failed
# --- Fallback SD Check (if primary failed or file didn't exist) ---
if sd_data is None:
logger.info(f"SD for '{resource_type}' not found or failed to load from {source_package_id}. Attempting fallback to {services.CANONICAL_PACKAGE_ID}.")
if not core_package_exists:
logger.error(f"Core package {services.CANONICAL_PACKAGE_ID} not found locally at {core_tgz_path}.")
error_message = f"SD for '{resource_type}' not found in primary package, and core package is missing." if primary_package_exists else f"Primary package {package_name}#{package_version} and core package are missing."
return jsonify({"error": error_message}), 500 if primary_package_exists else 404
try:
sd_data, _ = services.find_and_extract_sd(core_tgz_path, resource_type)
if sd_data is not None:
fallback_used = True
source_package_id = services.CANONICAL_PACKAGE_ID
base_resource_type_for_sp = sd_data.get('type') # Store base type from fallback SD
logger.info(f"Found SD for '{resource_type}' in fallback package {source_package_id}. Base type: '{base_resource_type_for_sp}'")
except Exception as e:
logger.error(f"Unexpected error extracting SD '{resource_type}' from fallback {core_tgz_path}: {e}", exc_info=True)
return jsonify({"error": f"Unexpected error reading fallback StructureDefinition: {str(e)}"}), 500
# --- Check if SD data was ultimately found ---
if not sd_data:
logger.error(f"SD for '{resource_type}' could not be found in primary or fallback packages.")
return jsonify({"error": f"StructureDefinition for '{resource_type}' not found."}), 404
# --- Fetch Search Parameters (Primary Package First) ---
# find_and_extract_search_params returns a list of dicts with basic SP info
if base_resource_type_for_sp and primary_package_exists:
try:
logger.info(f"Fetching SearchParameters for base type '{base_resource_type_for_sp}' from primary package {tgz_path}")
search_params_data = services.find_and_extract_search_params(tgz_path, base_resource_type_for_sp)
except Exception as e:
logger.error(f"Error extracting SearchParameters for '{base_resource_type_for_sp}' from primary package {tgz_path}: {e}", exc_info=True)
search_params_data = [] # Continue with empty list on error
elif not primary_package_exists:
logger.warning(f"Original package {tgz_path} not found, cannot search it for specific SearchParameters.")
elif not base_resource_type_for_sp:
logger.warning(f"Base resource type could not be determined for '{resource_type}', cannot search for SearchParameters.")
# --- Fetch Search Parameters (Fallback to Core Package if needed) ---
if not search_params_data and base_resource_type_for_sp and core_package_exists:
logger.info(f"No relevant SearchParameters found in primary package for '{base_resource_type_for_sp}'. Searching core package {core_tgz_path}.")
try:
search_params_data = services.find_and_extract_search_params(core_tgz_path, base_resource_type_for_sp)
if search_params_data:
logger.info(f"Found {len(search_params_data)} SearchParameters for '{base_resource_type_for_sp}' in core package.")
except Exception as e:
logger.error(f"Error extracting SearchParameters for '{base_resource_type_for_sp}' from core package {core_tgz_path}: {e}", exc_info=True)
search_params_data = [] # Continue with empty list on error
elif not search_params_data and not core_package_exists:
logger.warning(f"Core package {core_tgz_path} not found, cannot perform fallback search for SearchParameters.")
# --- Prepare Snapshot/Differential Elements ---
snapshot_elements = sd_data.get('snapshot', {}).get('element', [])
differential_elements = sd_data.get('differential', {}).get('element', [])
# Create set of IDs from differential elements for efficient lookup
differential_ids = {el.get('id') for el in differential_elements if el.get('id')}
logger.debug(f"Found {len(differential_ids)} unique IDs in differential.")
enriched_elements = []
if snapshot_elements:
logger.debug(f"Processing {len(snapshot_elements)} snapshot elements to add isInDifferential flag.")
for element in snapshot_elements:
element_id = element.get('id')
# Add the isInDifferential flag based on presence in differential_ids set
element['isInDifferential'] = bool(element_id and element_id in differential_ids)
enriched_elements.append(element)
# remove_narrative should ideally be handled within find_and_extract_sd,
# but applying it again here ensures it's done if the service function missed it.
enriched_elements = [services.remove_narrative(el) for el in enriched_elements]
else:
# If no snapshot, log warning. Front-end might need adjustment if only differential is sent.
logger.warning(f"No snapshot found for {resource_type} in {source_package_id}. Returning empty element list.")
enriched_elements = [] # Or consider returning differential and handle in JS
# --- Retrieve Must Support Paths from DB ---
must_support_paths = []
# Query DB once for the ProcessedIg record
processed_ig_record = ProcessedIg.query.filter_by(package_name=package_name, version=package_version).first()
if processed_ig_record and processed_ig_record.must_support_elements:
ms_elements_dict = processed_ig_record.must_support_elements
# Try getting MS paths using the profile ID/name first, fallback to base type
must_support_paths = ms_elements_dict.get(resource_type, [])
if not must_support_paths and base_resource_type_for_sp:
must_support_paths = ms_elements_dict.get(base_resource_type_for_sp, [])
if must_support_paths:
logger.debug(f"Retrieved {len(must_support_paths)} MS paths using base type key '{base_resource_type_for_sp}' from DB.")
elif must_support_paths:
logger.debug(f"Retrieved {len(must_support_paths)} MS paths using profile key '{resource_type}' from DB.")
else:
logger.debug(f"No specific MS paths found for keys '{resource_type}' or '{base_resource_type_for_sp}' in DB.")
else:
logger.debug(f"No processed IG record or no must_support_elements found in DB for {package_name}#{package_version}")
# --- Fetch and Merge Conformance Data ---
search_param_conformance_rules = {}
if base_resource_type_for_sp: # Only proceed if we identified the base type
# Reuse the DB record queried for Must Support if available
if processed_ig_record:
# Check if the record has the conformance data attribute and it's not None/empty
# **IMPORTANT**: This assumes 'search_param_conformance' column was added to the model
if hasattr(processed_ig_record, 'search_param_conformance') and processed_ig_record.search_param_conformance:
all_conformance_data = processed_ig_record.search_param_conformance
# Get the specific rules map for the current base resource type
search_param_conformance_rules = all_conformance_data.get(base_resource_type_for_sp, {})
logger.debug(f"Retrieved conformance rules for {base_resource_type_for_sp} from DB: {search_param_conformance_rules}")
else:
logger.warning(f"ProcessedIg record found, but 'search_param_conformance' attribute/data is missing or empty for {package_name}#{package_version}.")
else:
# This case should be rare if MS check already happened, but handles it
logger.warning(f"No ProcessedIg record found for {package_name}#{package_version} to get conformance rules.")
# Merge the retrieved conformance rules into the search_params_data list
if search_params_data:
logger.debug(f"Merging conformance data into {len(search_params_data)} search parameters.")
for param in search_params_data:
param_code = param.get('code')
if param_code:
# Lookup the code in the rules; default to 'Optional' if not found
conformance_level = search_param_conformance_rules.get(param_code, 'Optional')
param['conformance'] = conformance_level # Update the dictionary
else:
# Handle cases where SearchParameter might lack a 'code' (should be rare)
param['conformance'] = 'Unknown'
logger.debug("Finished merging conformance data.")
else:
logger.debug(f"No search parameters found for {base_resource_type_for_sp} to merge conformance data into.")
else:
logger.warning(f"Cannot fetch conformance data because base resource type (e.g., Patient) for '{resource_type}' could not be determined.")
# Ensure existing search params still have a default conformance
for param in search_params_data:
if 'conformance' not in param or param['conformance'] == 'N/A':
param['conformance'] = 'Optional'
# --- Construct the final response ---
response_data = {
'elements': enriched_elements,
'must_support_paths': must_support_paths,
# This list now includes the 'conformance' field with actual values (or 'Optional'/'Unknown')
'search_parameters': search_params_data,
'fallback_used': fallback_used,
'source_package': source_package_id
# Consider explicitly including the raw sd_data['differential'] if needed by JS,
# otherwise keep it excluded to reduce payload size.
# 'differential_elements': differential_elements
}
# Use Response object for consistent JSON formatting and smaller payload
# indent=None, separators=(',', ':') creates the most compact JSON
return Response(json.dumps(response_data, indent=None, separators=(',', ':')), mimetype='application/json')
# --- End of /get-structure Function ---
@app.route('/get-example') @app.route('/get-example')
def get_example(): def get_example():
package_name = request.args.get('package_name') package_name = request.args.get('package_name')
version = request.args.get('package_version') version = request.args.get('version')
filename = request.args.get('filename') filename = request.args.get('filename')
include_narrative = request.args.get('include_narrative', 'false').lower() == 'true'
if not all([package_name, version, filename]): if not all([package_name, version, filename]):
logger.warning("get_example: Missing query parameters: package_name=%s, version=%s, filename=%s", package_name, version, filename) logger.warning("get_example: Missing query parameters: package_name=%s, version=%s, filename=%s", package_name, version, filename)
return jsonify({"error": "Missing required query parameters: package_name, package_version, filename"}), 400 return jsonify({"error": "Missing required query parameters: package_name, version, filename"}), 400
if not filename.startswith('package/') or '..' in filename: if not filename.startswith('package/') or '..' in filename:
logger.warning(f"Invalid example file path requested: {filename}") logger.warning(f"Invalid example file path requested: {filename}")
return jsonify({"error": "Invalid example file path."}), 400 return jsonify({"error": "Invalid example file path."}), 400
@ -819,12 +615,9 @@ def get_example():
with tar.extractfile(example_member) as example_fileobj: with tar.extractfile(example_member) as example_fileobj:
content_bytes = example_fileobj.read() content_bytes = example_fileobj.read()
content_string = content_bytes.decode('utf-8-sig') content_string = content_bytes.decode('utf-8-sig')
# Parse JSON to remove narrative
content = json.loads(content_string) content = json.loads(content_string)
if 'text' in content: if not include_narrative:
logger.debug(f"Removing narrative text from example '{filename}'") content = services.remove_narrative(content, include_narrative=False)
del content['text']
# Return filtered JSON content as a compact string
filtered_content_string = json.dumps(content, separators=(',', ':'), sort_keys=False) filtered_content_string = json.dumps(content, separators=(',', ':'), sort_keys=False)
return Response(filtered_content_string, mimetype='application/json') return Response(filtered_content_string, mimetype='application/json')
except KeyError: except KeyError:
@ -849,6 +642,154 @@ def get_example():
logger.error(f"Unexpected error getting example '{filename}' from {tgz_filename}: {e}", exc_info=True) logger.error(f"Unexpected error getting example '{filename}' from {tgz_filename}: {e}", exc_info=True)
return jsonify({"error": f"Unexpected error: {str(e)}"}), 500 return jsonify({"error": f"Unexpected error: {str(e)}"}), 500
@app.route('/get-structure')
def get_structure():
package_name = request.args.get('package_name')
version = request.args.get('version')
resource_type = request.args.get('resource_type')
view = request.args.get('view', 'snapshot')
include_narrative = request.args.get('include_narrative', 'false').lower() == 'true'
raw = request.args.get('raw', 'false').lower() == 'true'
profile_url = request.args.get('profile_url')
if not all([package_name, version, resource_type]):
logger.warning("get_structure: Missing query parameters: package_name=%s, version=%s, resource_type=%s", package_name, version, resource_type)
return jsonify({"error": "Missing required query parameters: package_name, version, resource_type"}), 400
packages_dir = current_app.config.get('FHIR_PACKAGES_DIR')
if not packages_dir:
logger.error("FHIR_PACKAGES_DIR not configured.")
return jsonify({"error": "Server configuration error: Package directory not set."}), 500
tgz_filename = services.construct_tgz_filename(package_name, version)
tgz_path = os.path.join(packages_dir, tgz_filename)
core_package_name, core_package_version = services.CANONICAL_PACKAGE
core_tgz_filename = services.construct_tgz_filename(core_package_name, core_package_version)
core_tgz_path = os.path.join(packages_dir, core_tgz_filename)
sd_data = None
search_params_data = []
fallback_used = False
source_package_id = f"{package_name}#{version}"
base_resource_type_for_sp = None
logger.debug(f"Attempting to find SD for '{resource_type}' in {tgz_filename}")
primary_package_exists = os.path.exists(tgz_path)
core_package_exists = os.path.exists(core_tgz_path)
if primary_package_exists:
try:
sd_data, _ = services.find_and_extract_sd(tgz_path, resource_type, profile_url=profile_url, include_narrative=include_narrative, raw=raw)
if sd_data:
base_resource_type_for_sp = sd_data.get('type')
logger.debug(f"Determined base resource type '{base_resource_type_for_sp}' from primary SD '{resource_type}'")
except Exception as e:
logger.error(f"Unexpected error extracting SD '{resource_type}' from primary package {tgz_path}: {e}", exc_info=True)
sd_data = None
if sd_data is None:
logger.info(f"SD for '{resource_type}' not found or failed to load from {source_package_id}. Attempting fallback to {services.CANONICAL_PACKAGE_ID}.")
if not core_package_exists:
logger.error(f"Core package {services.CANONICAL_PACKAGE_ID} not found locally at {core_tgz_path}.")
error_message = f"SD for '{resource_type}' not found in primary package, and core package is missing." if primary_package_exists else f"Primary package {package_name}#{version} and core package are missing."
return jsonify({"error": error_message}), 500 if primary_package_exists else 404
try:
sd_data, _ = services.find_and_extract_sd(core_tgz_path, resource_type, profile_url=profile_url, include_narrative=include_narrative, raw=raw)
if sd_data is not None:
fallback_used = True
source_package_id = services.CANONICAL_PACKAGE_ID
base_resource_type_for_sp = sd_data.get('type')
logger.info(f"Found SD for '{resource_type}' in fallback package {source_package_id}. Base type: '{base_resource_type_for_sp}'")
except Exception as e:
logger.error(f"Unexpected error extracting SD '{resource_type}' from fallback {core_tgz_path}: {e}", exc_info=True)
return jsonify({"error": f"Unexpected error reading fallback StructureDefinition: {str(e)}"}), 500
if not sd_data:
logger.error(f"SD for '{resource_type}' could not be found in primary or fallback packages.")
return jsonify({"error": f"StructureDefinition for '{resource_type}' not found."}), 404
if raw:
return Response(json.dumps(sd_data, indent=None, separators=(',', ':')), mimetype='application/json')
snapshot_elements = sd_data.get('snapshot', {}).get('element', [])
differential_elements = sd_data.get('differential', {}).get('element', [])
differential_ids = {el.get('id') for el in differential_elements if el.get('id')}
logger.debug(f"Found {len(differential_ids)} unique IDs in differential.")
enriched_elements = []
if snapshot_elements:
logger.debug(f"Processing {len(snapshot_elements)} snapshot elements to add isInDifferential flag.")
for element in snapshot_elements:
element_id = element.get('id')
element['isInDifferential'] = bool(element_id and element_id in differential_ids)
enriched_elements.append(element)
enriched_elements = [services.remove_narrative(el, include_narrative=include_narrative) for el in enriched_elements]
else:
logger.warning(f"No snapshot found for {resource_type} in {source_package_id}. Returning empty element list.")
enriched_elements = []
must_support_paths = []
processed_ig_record = ProcessedIg.query.filter_by(package_name=package_name, version=version).first()
if processed_ig_record and processed_ig_record.must_support_elements:
ms_elements_dict = processed_ig_record.must_support_elements
must_support_paths = ms_elements_dict.get(resource_type, [])
if not must_support_paths and base_resource_type_for_sp:
must_support_paths = ms_elements_dict.get(base_resource_type_for_sp, [])
if must_support_paths:
logger.debug(f"Retrieved {len(must_support_paths)} MS paths using base type key '{base_resource_type_for_sp}' from DB.")
elif must_support_paths:
logger.debug(f"Retrieved {len(must_support_paths)} MS paths using profile key '{resource_type}' from DB.")
else:
logger.debug(f"No specific MS paths found for keys '{resource_type}' or '{base_resource_type_for_sp}' in DB.")
else:
logger.debug(f"No processed IG record or no must_support_elements found in DB for {package_name}#{version}")
if base_resource_type_for_sp and primary_package_exists:
try:
logger.info(f"Fetching SearchParameters for base type '{base_resource_type_for_sp}' from primary package {tgz_path}")
search_params_data = services.find_and_extract_search_params(tgz_path, base_resource_type_for_sp)
except Exception as e:
logger.error(f"Error extracting SearchParameters for '{base_resource_type_for_sp}' from primary package {tgz_path}: {e}", exc_info=True)
search_params_data = []
elif not primary_package_exists:
logger.warning(f"Original package {tgz_path} not found, cannot search it for specific SearchParameters.")
elif not base_resource_type_for_sp:
logger.warning(f"Base resource type could not be determined for '{resource_type}', cannot search for SearchParameters.")
if not search_params_data and base_resource_type_for_sp and core_package_exists:
logger.info(f"No relevant SearchParameters found in primary package for '{base_resource_type_for_sp}'. Searching core package {core_tgz_path}.")
try:
search_params_data = services.find_and_extract_search_params(core_tgz_path, base_resource_type_for_sp)
if search_params_data:
logger.info(f"Found {len(search_params_data)} SearchParameters for '{base_resource_type_for_sp}' in core package.")
except Exception as e:
logger.error(f"Error extracting SearchParameters for '{base_resource_type_for_sp}' from core package {core_tgz_path}: {e}", exc_info=True)
search_params_data = []
elif not search_params_data and not core_package_exists:
logger.warning(f"Core package {core_tgz_path} not found, cannot perform fallback search for SearchParameters.")
search_param_conformance_rules = {}
if base_resource_type_for_sp:
if processed_ig_record:
if hasattr(processed_ig_record, 'search_param_conformance') and processed_ig_record.search_param_conformance:
all_conformance_data = processed_ig_record.search_param_conformance
search_param_conformance_rules = all_conformance_data.get(base_resource_type_for_sp, {})
logger.debug(f"Retrieved conformance rules for {base_resource_type_for_sp} from DB: {search_param_conformance_rules}")
else:
logger.warning(f"ProcessedIg record found, but 'search_param_conformance' attribute/data is missing or empty for {package_name}#{version}.")
else:
logger.warning(f"No ProcessedIg record found for {package_name}#{version} to get conformance rules.")
if search_params_data:
logger.debug(f"Merging conformance data into {len(search_params_data)} search parameters.")
for param in search_params_data:
param_code = param.get('code')
if param_code:
conformance_level = search_param_conformance_rules.get(param_code, 'Optional')
param['conformance'] = conformance_level
else:
param['conformance'] = 'Unknown'
logger.debug("Finished merging conformance data.")
else:
logger.debug(f"No search parameters found for {base_resource_type_for_sp} to merge conformance data into.")
else:
logger.warning(f"Cannot fetch conformance data because base resource type (e.g., Patient) for '{resource_type}' could not be determined.")
for param in search_params_data:
if 'conformance' not in param or param['conformance'] == 'N/A':
param['conformance'] = 'Optional'
response_data = {
'elements': enriched_elements,
'must_support_paths': must_support_paths,
'search_parameters': search_params_data,
'fallback_used': fallback_used,
'source_package': source_package_id
}
return Response(json.dumps(response_data, indent=None, separators=(',', ':')), mimetype='application/json')
@app.route('/get-package-metadata') @app.route('/get-package-metadata')
def get_package_metadata(): def get_package_metadata():
package_name = request.args.get('package_name') package_name = request.args.get('package_name')

Binary file not shown.

View File

@ -18,5 +18,5 @@
], ],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:45.070781+00:00" "timestamp": "2025-04-26T06:09:44.307543+00:00"
} }

View File

@ -30,5 +30,5 @@
], ],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:20.523471+00:00" "timestamp": "2025-04-26T06:09:01.788251+00:00"
} }

View File

@ -5,5 +5,5 @@
"imported_dependencies": [], "imported_dependencies": [],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:29.230227+00:00" "timestamp": "2025-04-26T06:09:30.920844+00:00"
} }

View File

@ -10,5 +10,5 @@
], ],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:41.588025+00:00" "timestamp": "2025-04-26T06:09:40.785525+00:00"
} }

View File

@ -18,5 +18,5 @@
], ],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:49.395594+00:00" "timestamp": "2025-04-26T06:09:48.451981+00:00"
} }

View File

@ -10,5 +10,5 @@
], ],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:56.492512+00:00" "timestamp": "2025-04-26T06:09:54.450933+00:00"
} }

View File

@ -14,5 +14,5 @@
], ],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:46.943079+00:00" "timestamp": "2025-04-26T06:09:46.089555+00:00"
} }

View File

@ -10,5 +10,5 @@
], ],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:54.857273+00:00" "timestamp": "2025-04-26T06:09:52.965617+00:00"
} }

View File

@ -10,5 +10,5 @@
], ],
"complies_with_profiles": [], "complies_with_profiles": [],
"imposed_profiles": [], "imposed_profiles": [],
"timestamp": "2025-04-17T04:04:37.703082+00:00" "timestamp": "2025-04-26T06:09:36.852059+00:00"
} }

View File

@ -1,6 +1,6 @@
#FileLock #FileLock
#Fri Apr 25 13:11:59 UTC 2025 #Sat Apr 26 12:19:30 UTC 2025
server=172.19.0.2\:35493 server=172.18.0.2\:38385
hostName=499bb2429005 hostName=18113a0f20a7
method=file method=file
id=1966d138790b0be6a4873c639ee5ac2e23787fd766d id=1967209d290a29cebe723a4809b58c3cf6c80af4585

Binary file not shown.

File diff suppressed because it is too large Load Diff

1
logs/supervisord.pid Normal file
View File

@ -0,0 +1 @@
1

10720
logs/tomcat.log Normal file

File diff suppressed because it is too large Load Diff

View File

@ -244,14 +244,18 @@ def parse_package_filename(filename):
version = "" version = ""
return name, version return name, version
def remove_narrative(resource): def remove_narrative(resource, include_narrative=False):
"""Remove narrative text element from a FHIR resource.""" """Remove narrative text element from a FHIR resource if not including narrative."""
if isinstance(resource, dict): if isinstance(resource, dict) and not include_narrative:
if 'text' in resource: if 'text' in resource:
logger.debug(f"Removing narrative text from resource: {resource.get('resourceType', 'unknown')}") logger.debug(f"Removing narrative text from resource: {resource.get('resourceType', 'unknown')}")
del resource['text'] del resource['text']
if resource.get('resourceType') == 'Bundle' and 'entry' in resource: if resource.get('resourceType') == 'Bundle' and 'entry' in resource:
resource['entry'] = [dict(entry, resource=remove_narrative(entry.get('resource'))) if entry.get('resource') else entry for entry in resource['entry']] resource['entry'] = [
dict(entry, resource=remove_narrative(entry.get('resource'), include_narrative))
if entry.get('resource') else entry
for entry in resource['entry']
]
return resource return resource
def get_cached_structure(package_name, package_version, resource_type, view): def get_cached_structure(package_name, package_version, resource_type, view):
@ -300,7 +304,7 @@ def cache_structure(package_name, package_version, resource_type, view, structur
except Exception as e: except Exception as e:
logger.error(f"Error caching structure: {e}", exc_info=True) logger.error(f"Error caching structure: {e}", exc_info=True)
def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None): def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None, include_narrative=False, raw=False):
"""Helper to find and extract StructureDefinition json from a tgz path, prioritizing profile match.""" """Helper to find and extract StructureDefinition json from a tgz path, prioritizing profile match."""
sd_data = None sd_data = None
found_path = None found_path = None
@ -310,25 +314,19 @@ def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None):
try: try:
with tarfile.open(tgz_path, "r:gz") as tar: with tarfile.open(tgz_path, "r:gz") as tar:
logger.debug(f"Searching for SD matching '{resource_identifier}' with profile '{profile_url}' in {os.path.basename(tgz_path)}") logger.debug(f"Searching for SD matching '{resource_identifier}' with profile '{profile_url}' in {os.path.basename(tgz_path)}")
# Store potential matches to evaluate the best one at the end potential_matches = []
potential_matches = [] # Store tuples of (precision_score, data, member_name)
for member in tar: for member in tar:
if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')):
continue continue
# Skip common metadata files
if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']:
continue continue
fileobj = None fileobj = None
try: try:
fileobj = tar.extractfile(member) fileobj = tar.extractfile(member)
if fileobj: if fileobj:
content_bytes = fileobj.read() content_bytes = fileobj.read()
# Handle potential BOM (Byte Order Mark)
content_string = content_bytes.decode('utf-8-sig') content_string = content_bytes.decode('utf-8-sig')
data = json.loads(content_string) data = json.loads(content_string)
if isinstance(data, dict) and data.get('resourceType') == 'StructureDefinition': if isinstance(data, dict) and data.get('resourceType') == 'StructureDefinition':
sd_id = data.get('id') sd_id = data.get('id')
sd_name = data.get('name') sd_name = data.get('name')
@ -337,57 +335,32 @@ def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None):
sd_filename_base = os.path.splitext(os.path.basename(member.name))[0] sd_filename_base = os.path.splitext(os.path.basename(member.name))[0]
sd_filename_lower = sd_filename_base.lower() sd_filename_lower = sd_filename_base.lower()
resource_identifier_lower = resource_identifier.lower() if resource_identifier else None resource_identifier_lower = resource_identifier.lower() if resource_identifier else None
match_score = 0
# logger.debug(f"Checking SD: id={sd_id}, name={sd_name}, type={sd_type}, url={sd_url}, file={sd_filename_lower} against identifier='{resource_identifier}'")
match_score = 0 # Higher score means more precise match
# Highest precision: Exact match on profile_url
if profile_url and sd_url == profile_url: if profile_url and sd_url == profile_url:
match_score = 5 match_score = 5
logger.debug(f"Exact match found based on profile_url: {profile_url}") sd_data = remove_narrative(data, include_narrative)
# If we find the exact profile URL, this is the best possible match.
sd_data = remove_narrative(data)
found_path = member.name found_path = member.name
logger.info(f"Found definitive SD matching profile '{profile_url}' at path: {found_path}. Stopping search.") logger.info(f"Found definitive SD matching profile '{profile_url}' at path: {found_path}")
break # Stop searching immediately break
# Next highest precision: Exact match on id or name
elif resource_identifier_lower: elif resource_identifier_lower:
if sd_id and resource_identifier_lower == sd_id.lower(): if sd_id and resource_identifier_lower == sd_id.lower():
match_score = 4 match_score = 4
logger.debug(f"Match found based on exact sd_id: {sd_id}")
elif sd_name and resource_identifier_lower == sd_name.lower(): elif sd_name and resource_identifier_lower == sd_name.lower():
match_score = 4 match_score = 4
logger.debug(f"Match found based on exact sd_name: {sd_name}")
# Next: Match filename pattern "StructureDefinition-{identifier}.json"
elif sd_filename_lower == f"structuredefinition-{resource_identifier_lower}": elif sd_filename_lower == f"structuredefinition-{resource_identifier_lower}":
match_score = 3 match_score = 3
logger.debug(f"Match found based on exact filename pattern: {member.name}")
# Next: Match on type ONLY if the identifier looks like a base type (no hyphens/dots)
elif sd_type and resource_identifier_lower == sd_type.lower() and not re.search(r'[-.]', resource_identifier): elif sd_type and resource_identifier_lower == sd_type.lower() and not re.search(r'[-.]', resource_identifier):
match_score = 2 match_score = 2
logger.debug(f"Match found based on sd_type (simple identifier): {sd_type}")
# Lower precision: Check if identifier is IN the filename
elif resource_identifier_lower in sd_filename_lower: elif resource_identifier_lower in sd_filename_lower:
match_score = 1 match_score = 1
logger.debug(f"Potential match based on identifier in filename: {member.name}")
# Lowest precision: Check if identifier is IN the URL
elif sd_url and resource_identifier_lower in sd_url.lower(): elif sd_url and resource_identifier_lower in sd_url.lower():
match_score = 1 match_score = 1
logger.debug(f"Potential match based on identifier in url: {sd_url}")
if match_score > 0: if match_score > 0:
potential_matches.append((match_score, remove_narrative(data), member.name)) potential_matches.append((match_score, remove_narrative(data, include_narrative), member.name))
if match_score >= 3:
# If it's a very high precision match, we can potentially break early sd_data = remove_narrative(data, include_narrative)
if match_score >= 3: # Exact ID, Name, or Filename pattern
logger.info(f"Found high-confidence match for '{resource_identifier}' ({member.name}), stopping search.")
# Set sd_data here and break
sd_data = remove_narrative(data)
found_path = member.name found_path = member.name
break break
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
logger.debug(f"Could not parse JSON in {member.name}, skipping: {e}") logger.debug(f"Could not parse JSON in {member.name}, skipping: {e}")
except UnicodeDecodeError as e: except UnicodeDecodeError as e:
@ -395,36 +368,38 @@ def find_and_extract_sd(tgz_path, resource_identifier, profile_url=None):
except tarfile.TarError as e: except tarfile.TarError as e:
logger.warning(f"Tar error reading member {member.name}, skipping: {e}") logger.warning(f"Tar error reading member {member.name}, skipping: {e}")
except Exception as e: except Exception as e:
logger.warning(f"Could not read/parse potential SD {member.name}, skipping: {e}", exc_info=False) logger.warning(f"Could not read/parse potential SD {member.name}, skipping: {e}")
finally: finally:
if fileobj: if fileobj:
fileobj.close() fileobj.close()
# If the loop finished without finding an exact profile_url or high-confidence match (score >= 3)
if not sd_data and potential_matches: if not sd_data and potential_matches:
# Sort potential matches by score (highest first)
potential_matches.sort(key=lambda x: x[0], reverse=True) potential_matches.sort(key=lambda x: x[0], reverse=True)
best_match = potential_matches[0] best_match = potential_matches[0]
sd_data = best_match[1] sd_data = best_match[1]
found_path = best_match[2] found_path = best_match[2]
logger.info(f"Selected best match for '{resource_identifier}' from potential matches (Score: {best_match[0]}): {found_path}") logger.info(f"Selected best match for '{resource_identifier}' from potential matches (Score: {best_match[0]}): {found_path}")
if sd_data is None: if sd_data is None:
logger.info(f"SD matching identifier '{resource_identifier}' or profile '{profile_url}' not found within archive {os.path.basename(tgz_path)}") logger.info(f"SD matching identifier '{resource_identifier}' or profile '{profile_url}' not found within archive {os.path.basename(tgz_path)}")
elif raw:
# Return the full, unprocessed StructureDefinition JSON
with tarfile.open(tgz_path, "r:gz") as tar:
fileobj = tar.extractfile(found_path)
content_bytes = fileobj.read()
content_string = content_bytes.decode('utf-8-sig')
raw_data = json.loads(content_string)
return remove_narrative(raw_data, include_narrative), found_path
except tarfile.ReadError as e: except tarfile.ReadError as e:
logger.error(f"Tar ReadError reading {tgz_path}: {e}") logger.error(f"Tar ReadError reading {tgz_path}: {e}")
return None, None return None, None
except tarfile.TarError as e: except tarfile.TarError as e:
logger.error(f"TarError reading {tgz_path} in find_and_extract_sd: {e}") logger.error(f"TarError reading {tgz_path} in find_and_extract_sd: {e}")
raise # Re-raise critical tar errors raise
except FileNotFoundError: except FileNotFoundError:
logger.error(f"FileNotFoundError reading {tgz_path} in find_and_extract_sd.") logger.error(f"FileNotFoundError reading {tgz_path} in find_and_extract_sd.")
raise # Re-raise critical file errors raise
except Exception as e: except Exception as e:
logger.error(f"Unexpected error in find_and_extract_sd for {tgz_path}: {e}", exc_info=True) logger.error(f"Unexpected error in find_and_extract_sd for {tgz_path}: {e}", exc_info=True)
raise # Re-raise unexpected errors raise
return sd_data, found_path return sd_data, found_path
# --- Metadata Saving/Loading --- # --- Metadata Saving/Loading ---
@ -2056,25 +2031,19 @@ def process_fhir_input(input_mode, fhir_file, fhir_text, alias_file=None):
# --- ADD THIS NEW FUNCTION TO services.py --- # --- ADD THIS NEW FUNCTION TO services.py ---
def find_and_extract_search_params(tgz_path, base_resource_type): def find_and_extract_search_params(tgz_path, base_resource_type):
""" """Finds and extracts SearchParameter resources relevant to a given base resource type from a FHIR package tgz file."""
Finds and extracts SearchParameter resources relevant to a given base resource type
from a FHIR package tgz file.
"""
search_params = [] search_params = []
if not tgz_path or not os.path.exists(tgz_path): if not tgz_path or not os.path.exists(tgz_path):
logger.error(f"Package file not found for SearchParameter extraction: {tgz_path}") logger.error(f"Package file not found for SearchParameter extraction: {tgz_path}")
return search_params # Return empty list on error return search_params
logger.debug(f"Searching for SearchParameters based on '{base_resource_type}' in {os.path.basename(tgz_path)}") logger.debug(f"Searching for SearchParameters based on '{base_resource_type}' in {os.path.basename(tgz_path)}")
try: try:
with tarfile.open(tgz_path, "r:gz") as tar: with tarfile.open(tgz_path, "r:gz") as tar:
for member in tar: for member in tar:
# Basic filtering for JSON files in package directory, excluding common metadata
if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')): if not (member.isfile() and member.name.startswith('package/') and member.name.lower().endswith('.json')):
continue continue
if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']: if os.path.basename(member.name).lower() in ['package.json', '.index.json', 'validation-summary.json', 'validation-oo.json']:
continue continue
fileobj = None fileobj = None
try: try:
fileobj = tar.extractfile(member) fileobj = tar.extractfile(member)
@ -2082,31 +2051,23 @@ def find_and_extract_search_params(tgz_path, base_resource_type):
content_bytes = fileobj.read() content_bytes = fileobj.read()
content_string = content_bytes.decode('utf-8-sig') content_string = content_bytes.decode('utf-8-sig')
data = json.loads(content_string) data = json.loads(content_string)
# Check if it's a SearchParameter resource
if isinstance(data, dict) and data.get('resourceType') == 'SearchParameter': if isinstance(data, dict) and data.get('resourceType') == 'SearchParameter':
# Check if the SearchParameter applies to the requested base resource type sp_bases = data.get('base', [])
sp_bases = data.get('base', []) # 'base' is a list of applicable resource types
if base_resource_type in sp_bases: if base_resource_type in sp_bases:
# Extract relevant information
param_info = { param_info = {
'id': data.get('id'), 'id': data.get('id'),
'url': data.get('url'), 'url': data.get('url'),
'name': data.get('name'), 'name': data.get('name'),
'description': data.get('description'), 'description': data.get('description'),
'code': data.get('code'), # The actual parameter name used in searches 'code': data.get('code'),
'type': data.get('type'), # e.g., token, reference, date 'type': data.get('type'),
'expression': data.get('expression'), # FHIRPath expression 'expression': data.get('expression'),
'base': sp_bases, 'base': sp_bases,
# NOTE: Conformance (mandatory/optional) usually comes from CapabilityStatement, 'conformance': 'N/A',
# which is not processed here. Add placeholders or leave out for now. 'is_mandatory': False
'conformance': 'N/A', # Placeholder
'is_mandatory': False # Placeholder
} }
search_params.append(param_info) search_params.append(param_info)
logger.debug(f"Found relevant SearchParameter: {param_info.get('name')} (ID: {param_info.get('id')}) for base {base_resource_type}") logger.debug(f"Found relevant SearchParameter: {param_info.get('name')} (ID: {param_info.get('id')}) for base {base_resource_type}")
# --- Error handling for individual file processing ---
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
logger.debug(f"Could not parse JSON for SearchParameter in {member.name}, skipping: {e}") logger.debug(f"Could not parse JSON for SearchParameter in {member.name}, skipping: {e}")
except UnicodeDecodeError as e: except UnicodeDecodeError as e:
@ -2118,8 +2079,6 @@ def find_and_extract_search_params(tgz_path, base_resource_type):
finally: finally:
if fileobj: if fileobj:
fileobj.close() fileobj.close()
# --- Error handling for opening/reading the tgz file ---
except tarfile.ReadError as e: except tarfile.ReadError as e:
logger.error(f"Tar ReadError extracting SearchParameters from {tgz_path}: {e}") logger.error(f"Tar ReadError extracting SearchParameters from {tgz_path}: {e}")
except tarfile.TarError as e: except tarfile.TarError as e:
@ -2128,7 +2087,6 @@ def find_and_extract_search_params(tgz_path, base_resource_type):
logger.error(f"Package file not found during SearchParameter extraction: {tgz_path}") logger.error(f"Package file not found during SearchParameter extraction: {tgz_path}")
except Exception as e: except Exception as e:
logger.error(f"Unexpected error extracting SearchParameters from {tgz_path}: {e}", exc_info=True) logger.error(f"Unexpected error extracting SearchParameters from {tgz_path}: {e}", exc_info=True)
logger.info(f"Found {len(search_params)} SearchParameters relevant to '{base_resource_type}' in {os.path.basename(tgz_path)}") logger.info(f"Found {len(search_params)} SearchParameters relevant to '{base_resource_type}' in {os.path.basename(tgz_path)}")
return search_params return search_params
# --- END OF NEW FUNCTION --- # --- END OF NEW FUNCTION ---

View File

@ -8,13 +8,6 @@
<p class="lead mb-4"> <p class="lead mb-4">
View details of the processed FHIR Implementation Guide. View details of the processed FHIR Implementation Guide.
</p> </p>
<!-----------------------------------------------------------------remove the buttons-----------------------------------------------------
<div class="d-grid gap-2 d-sm-flex justify-content-sm-center">
<a href="{{ url_for('index') }}" class="btn btn-primary btn-lg px-4 gap-3">Back to Home</a>
<a href="{{ url_for('view_igs') }}" class="btn btn-outline-secondary btn-lg px-4">Manage FHIR Packages</a>
<a href="{{ url_for('push_igs') }}" class="btn btn-outline-secondary btn-lg px-4">Upload IG's</a>
</div>
----------------------------------------------------------------remove the buttons----------------------------------------------------------------------------->
</div> </div>
</div> </div>
@ -269,7 +262,7 @@
<div id="raw-structure-wrapper" class="mt-4" style="display: none;"> <div id="raw-structure-wrapper" class="mt-4" style="display: none;">
<div class="card"> <div class="card">
<div class="card-header d-flex justify-content-between align-items-center"> <div class="card-header d-flex justify-content-between align-itemsitum-center">
<span>Raw Structure Definition for <code id="raw-structure-title"></code></span> <span>Raw Structure Definition for <code id="raw-structure-title"></code></span>
<button type="button" class="btn btn-sm btn-outline-secondary btn-copy" id="copy-raw-def-button" <button type="button" class="btn btn-sm btn-outline-secondary btn-copy" id="copy-raw-def-button"
data-bs-toggle="tooltip" data-bs-placement="top" title="Copy Raw Definition JSON"> data-bs-toggle="tooltip" data-bs-placement="top" title="Copy Raw Definition JSON">
@ -285,7 +278,6 @@
</div> </div>
</div> </div>
</div> </div>
<div id="example-display-wrapper" class="mt-4" style="display: none;"> <div id="example-display-wrapper" class="mt-4" style="display: none;">
<div class="card"> <div class="card">
<div class="card-header">Examples for <code id="example-resource-type-title"></code></div> <div class="card-header">Examples for <code id="example-resource-type-title"></code></div>
@ -298,6 +290,10 @@
<div class="spinner-border spinner-border-sm text-secondary" role="status"><span class="visually-hidden">Loading...</span></div> <div class="spinner-border spinner-border-sm text-secondary" role="status"><span class="visually-hidden">Loading...</span></div>
</div> </div>
<div id="example-content-wrapper" style="display: none;"> <div id="example-content-wrapper" style="display: none;">
<div class="form-check mb-2">
<input type="checkbox" class="form-check-input" id="includeNarrative" checked>
<label class="form-check-label" for="includeNarrative">Include Narrative</label>
</div>
<h6 id="example-filename" class="mt-2 small text-muted"></h6> <h6 id="example-filename" class="mt-2 small text-muted"></h6>
<div class="row"> <div class="row">
<div class="col-md-6"> <div class="col-md-6">
@ -352,6 +348,7 @@ let copyRawDefTooltipInstance = null;
let copyRawExTooltipInstance = null; let copyRawExTooltipInstance = null;
let copyPrettyJsonTooltipInstance = null; let copyPrettyJsonTooltipInstance = null;
let structureDataCache = {}; // Cache for fetched structure data let structureDataCache = {}; // Cache for fetched structure data
let includeNarrativeCheckbox; // New global for narrative toggle
// --- Global Helper Functions --- // --- Global Helper Functions ---
function parseFhirPath(path) { function parseFhirPath(path) {
@ -379,14 +376,12 @@ function getNodeKey(element) {
const baseKey = (element.id && element.id.includes('.')) ? element.id : element.path; const baseKey = (element.id && element.id.includes('.')) ? element.id : element.path;
if (!baseKey) { if (!baseKey) {
console.warn("Cannot generate key: element missing structured id and path", element); console.warn("Cannot generate key: element missing structured id and path", element);
// Provide a fallback key if absolutely necessary, though this indicates data issues
return `no-key-${Math.random()}`; return `no-key-${Math.random()}`;
} }
// Append sliceName if present to ensure uniqueness for slices // Append sliceName if present to ensure uniqueness for slices
return element.sliceName ? `${baseKey}::slice::${element.sliceName}` : baseKey; return element.sliceName ? `${baseKey}::slice::${element.sliceName}` : baseKey;
} }
// Helper function to initialize a node // Helper function to initialize a node
function createNode(path, name, parentPathKey, element = null) { function createNode(path, name, parentPathKey, element = null) {
// Determine flags based on element data passed from backend // Determine flags based on element data passed from backend
@ -458,9 +453,6 @@ function flagSlicingEntriesWithMSSlices(node) {
// Flag THIS node if it defines slicing AND (a direct child is an MS slice OR a deeper descendant is an MS slice) // Flag THIS node if it defines slicing AND (a direct child is an MS slice OR a deeper descendant is an MS slice)
if (node.element?.slicing && (directChildIsMSSlice || containedMSSliceInChildren)) { if (node.element?.slicing && (directChildIsMSSlice || containedMSSliceInChildren)) {
node.containsMustSupportSlice = true; node.containsMustSupportSlice = true;
// --- DEBUG LOG ---
// console.log(`>>> Flagged Slicing Entry ${node.path} as containsMustSupportSlice = true (DirectChild: ${directChildIsMSSlice}, Descendant: ${containedMSSliceInChildren})`);
// --- END DEBUG LOG ---
} }
// Return true if THIS node is an MS slice OR if it was flagged as containing one below it // Return true if THIS node is an MS slice OR if it was flagged as containing one below it
@ -468,7 +460,6 @@ function flagSlicingEntriesWithMSSlices(node) {
return isMustSupportSlice || node.containsMustSupportSlice === true; return isMustSupportSlice || node.containsMustSupportSlice === true;
} // End flagSlicingEntriesWithMSSlices V2 } // End flagSlicingEntriesWithMSSlices V2
// --- Build Tree Data - V13 (Correct Parent Finding for Nested Slices) --- // --- Build Tree Data - V13 (Correct Parent Finding for Nested Slices) ---
function buildTreeData(elements, view, mustSupportPaths) { function buildTreeData(elements, view, mustSupportPaths) {
const treeData = []; const treeData = [];
@ -524,13 +515,6 @@ function buildTreeData(elements, view, mustSupportPaths) {
const nodeKey = getNodeKey(element); const nodeKey = getNodeKey(element);
const isSlice = !!element.sliceName; const isSlice = !!element.sliceName;
// Optional Debug Log
const debugPaths = ['Observation.component', 'Observation.component.code', 'Observation.component.code.coding'];
const shouldDebug = debugPaths.some(p => path?.startsWith(p));
// if (shouldDebug) { // Keep this commented out unless actively debugging parent finding
// console.log(`[Debug] Processing element ${index}: Path='${path}', SliceName='${element.sliceName}', ID='${id}', Key='${nodeKey}'`);
// }
if (!path || !nodeKey) { console.warn(`Skipping element ${index} with missing path/key`, element); return; } if (!path || !nodeKey) { console.warn(`Skipping element ${index} with missing path/key`, element); return; }
// Root element handling (Keep V12 logic) // Root element handling (Keep V12 logic)
@ -558,12 +542,9 @@ function buildTreeData(elements, view, mustSupportPaths) {
// Derive the expected parent ID/Path from the slice's ID or Path // Derive the expected parent ID/Path from the slice's ID or Path
if (id && id.includes('.')) { if (id && id.includes('.')) {
// Try deriving from ID first (more specific) // Try deriving from ID first (more specific)
// Example: id = Observation.component:DiastolicBP.code.coding:DBPCode
// We want the parent ID = Observation.component:DiastolicBP.code.coding
const idParts = id.split(':'); const idParts = id.split(':');
if (idParts.length > 1) { if (idParts.length > 1) {
const parentIdGuess = idParts.slice(0, -1).join(':'); const parentIdGuess = idParts.slice(0, -1).join(':');
// Check if the parent ID ends with a path segment (contains '.')
if (parentIdGuess.includes('.')) { if (parentIdGuess.includes('.')) {
expectedParentId = parentIdGuess; expectedParentId = parentIdGuess;
} }
@ -572,7 +553,6 @@ function buildTreeData(elements, view, mustSupportPaths) {
if (!expectedParentId && path && path.includes('.')) { if (!expectedParentId && path && path.includes('.')) {
expectedParentPath = path; // The base path is the parent slicing entry path expectedParentPath = path; // The base path is the parent slicing entry path
} }
} else if (path && path.includes('.')) { } else if (path && path.includes('.')) {
// If no complex ID, use the path // If no complex ID, use the path
expectedParentPath = path; expectedParentPath = path;
@ -593,7 +573,6 @@ function buildTreeData(elements, view, mustSupportPaths) {
parentElement = slicingEntryElement; parentElement = slicingEntryElement;
parentNodeKey = getNodeKey(parentElement); parentNodeKey = getNodeKey(parentElement);
parentFound = true; parentFound = true;
// console.log(`[Debug] Parent Check 1 (Slice V13): ${nodeKey} -> ${parentNodeKey} (ParentID: ${expectedParentId}, ParentPath: ${expectedParentPath})`);
} else { } else {
console.warn(`Slicing entry element NOT FOUND for expected parent (ID: ${expectedParentId}, Path: ${expectedParentPath}) needed by slice '${nodeKey}'. Assigning root.`); console.warn(`Slicing entry element NOT FOUND for expected parent (ID: ${expectedParentId}, Path: ${expectedParentPath}) needed by slice '${nodeKey}'. Assigning root.`);
parentElement = rootElementData; parentElement = rootElementData;
@ -601,7 +580,7 @@ function buildTreeData(elements, view, mustSupportPaths) {
parentFound = true; // Consider parent determined (fallback) parentFound = true; // Consider parent determined (fallback)
} }
} }
// Check 2: Descendant of a Slice (using ID structure) (Keep V12 logic - seems okay from logs) // Check 2: Descendant of a Slice (using ID structure) (Keep V12 logic)
else if (id && id.includes(':') && id.includes('.')) { else if (id && id.includes(':') && id.includes('.')) {
const idParts = id.split('.'); const idParts = id.split('.');
let potentialSliceParentId = null; let potentialSliceParentId = null;
@ -615,13 +594,11 @@ function buildTreeData(elements, view, mustSupportPaths) {
const potentialParentNode = nodeMap.get(potentialParentKey); const potentialParentNode = nodeMap.get(potentialParentKey);
if (potentialParentNode?.element) { if (potentialParentNode?.element) {
parentNodeKey = potentialParentKey; parentElement = potentialParentNode.element; parentFound = true; parentNodeKey = potentialParentKey; parentElement = potentialParentNode.element; parentFound = true;
// console.log(`[Debug] Parent Check 2 (Slice Descendant ID V13): ${nodeKey} -> ${parentNodeKey} (via Map Key ${potentialParentKey})`);
} else { console.warn(`Node found in map for key ${potentialParentKey} but its element was missing. Falling back.`); } } else { console.warn(`Node found in map for key ${potentialParentKey} but its element was missing. Falling back.`); }
} else { } else {
const sliceParentElement = elements.find(el => el.id === potentialSliceParentId); const sliceParentElement = elements.find(el => el.id === potentialSliceParentId);
if (sliceParentElement) { if (sliceParentElement) {
parentNodeKey = getNodeKey(sliceParentElement); parentElement = sliceParentElement; parentFound = true; parentNodeKey = getNodeKey(sliceParentElement); parentElement = sliceParentElement; parentFound = true;
// console.log(`[Debug] Parent Check 2 (Slice Descendant ID V13): ${nodeKey} -> ${parentNodeKey} (via Element ID ${potentialSliceParentId})`);
} else { console.warn(`Slice parent element ID '${potentialSliceParentId}' not found for descendant '${id}'. Falling back.`); } } else { console.warn(`Slice parent element ID '${potentialSliceParentId}' not found for descendant '${id}'. Falling back.`); }
} }
} }
@ -642,16 +619,13 @@ function buildTreeData(elements, view, mustSupportPaths) {
parentElement = potentialParentNode.element; parentElement = potentialParentNode.element;
parentNodeKey = getNodeKey(parentElement); parentNodeKey = getNodeKey(parentElement);
parentFound = true; parentFound = true;
// console.log(`[Debug] Parent Check 3/4 (Path V13): ${nodeKey} -> ${parentNodeKey}`);
} else { } else {
parentElement = rootElementData; parentNodeKey = getNodeKey(parentElement); parentElement = rootElementData; parentNodeKey = getNodeKey(parentElement);
parentFound = true; parentFound = true;
// console.warn(`[Debug] Parent Check 3/4 (Path V13): Parent node NOT FOUND for path '${parentPathString}' needed by '${nodeKey}'. Assigning root.`);
} }
} else { } else {
parentElement = rootElementData; parentNodeKey = getNodeKey(parentElement); parentElement = rootElementData; parentNodeKey = getNodeKey(parentElement);
parentFound = true; parentFound = true;
// console.log(`[Debug] Parent Check 3/4 (Path V13): Parent is root for ${nodeKey}`);
} }
} }
// --- End Parent Finding Logic --- // --- End Parent Finding Logic ---
@ -683,7 +657,6 @@ function buildTreeData(elements, view, mustSupportPaths) {
if (parentNode) { if (parentNode) {
if (!parentNode.children) parentNode.children = []; if (!parentNode.children) parentNode.children = [];
parentNode.children.push(newNode); parentNode.children.push(newNode);
// console.log(`[Debug] Attaching new node ${nodeKey} to parent ${parentNodeKey}`);
} else { } else {
console.error(`Cannot add child ${nodeKey} as parent node object is unexpectedly null.`); console.error(`Cannot add child ${nodeKey} as parent node object is unexpectedly null.`);
} }
@ -727,12 +700,8 @@ function buildTreeData(elements, view, mustSupportPaths) {
return treeData; return treeData;
} // End buildTreeData V13 } // End buildTreeData V13
// --- Render Node as List Item - V19 (Fixes isSlice error, keeps V18 logic) --- // --- Render Node as List Item - V19 (Fixes isSlice error, keeps V18 logic) ---
function renderNodeAsLi(node, mustSupportPathsSet, resourceType, level = 0, view = 'snapshot') { function renderNodeAsLi(node, mustSupportPathsSet, resourceType, level = 0, view = 'snapshot') {
// --- Constants for Primitive Types ---
// REMOVED PRIMITIVE_TYPES check to align with reference rendering
if (!node || !node.path || !node.name) { console.warn(`Skipping render for invalid node:`, node); return ''; } if (!node || !node.path || !node.name) { console.warn(`Skipping render for invalid node:`, node); return ''; }
const el = node.element || {}; const el = node.element || {};
const path = el.path || node.path || `node-${level}-${Math.random()}`; const path = el.path || node.path || `node-${level}-${Math.random()}`;
@ -758,8 +727,6 @@ function renderNodeAsLi(node, mustSupportPathsSet, resourceType, level = 0, view
mustSupportTitle = `Must Support (Slice: ${sliceName})`; mustSupportTitle = `Must Support (Slice: ${sliceName})`;
fhirPathExpression = `${path}[sliceName='${sliceName}']`; fhirPathExpression = `${path}[sliceName='${sliceName}']`;
} }
// Optional Debug Log for highlighting
// if (node.path === 'Patient.extension' || node.path === 'Patient.identifier') { console.log(...) }
const msIconClass = (isMustSupport || node.containsMustSupportSlice) ? 'text-warning' : ''; const msIconClass = (isMustSupport || node.containsMustSupportSlice) ? 'text-warning' : '';
const mustSupportDisplay = (isMustSupport || node.containsMustSupportSlice) ? `<i class="bi bi-check-circle-fill ${msIconClass} ms-1" title="${escapeHtmlAttr(mustSupportTitle)}" data-bs-toggle="tooltip" data-fhirpath="${escapeHtmlAttr(fhirPathExpression)}"></i>` : ''; const mustSupportDisplay = (isMustSupport || node.containsMustSupportSlice) ? `<i class="bi bi-check-circle-fill ${msIconClass} ms-1" title="${escapeHtmlAttr(mustSupportTitle)}" data-bs-toggle="tooltip" data-fhirpath="${escapeHtmlAttr(fhirPathExpression)}"></i>` : '';
@ -798,7 +765,6 @@ function renderNodeAsLi(node, mustSupportPathsSet, resourceType, level = 0, view
} else if (el.contentReference) { typeString = `<span class="text-info" title="Content Reference">Ref: ${escapeHtmlAttr(el.contentReference)}</span>`; } } else if (el.contentReference) { typeString = `<span class="text-info" title="Content Reference">Ref: ${escapeHtmlAttr(el.contentReference)}</span>`; }
else if (el.sliceName && path.endsWith('extension')) { typeString = '<span class="text-muted">Extension</span>'; } else if (el.sliceName && path.endsWith('extension')) { typeString = '<span class="text-muted">Extension</span>'; }
// Children HTML / Choice Type Display (V18 logic - reverted primitive filtering) // Children HTML / Choice Type Display (V18 logic - reverted primitive filtering)
let childrenOrChoiceHtml = ''; let childrenOrChoiceHtml = '';
if (showToggle) { if (showToggle) {
@ -863,7 +829,6 @@ function renderNodeAsLi(node, mustSupportPathsSet, resourceType, level = 0, view
return itemHtml; return itemHtml;
} // End renderNodeAsLi (V19) } // End renderNodeAsLi (V19)
// --- Function to Initialize Collapse Icon Listeners (Bootstrap Events) V3 - Explicit Cleanup --- // --- Function to Initialize Collapse Icon Listeners (Bootstrap Events) V3 - Explicit Cleanup ---
function initializeBootstrapCollapseListeners(containerElement) { function initializeBootstrapCollapseListeners(containerElement) {
if (!containerElement) { return; } if (!containerElement) { return; }
@ -876,8 +841,8 @@ function initializeBootstrapCollapseListeners(containerElement) {
if (!triggerEl) { console.warn(`Trigger not found for #${collapseId}`); } if (!triggerEl) { console.warn(`Trigger not found for #${collapseId}`); }
if (collapseEl._handleShow && typeof collapseEl._handleShow === 'function') { collapseEl.removeEventListener('show.bs.collapse', collapseEl._handleShow); } if (collapseEl._handleShow && typeof collapseEl._handleShow === 'function') { collapseEl.removeEventListener('show.bs.collapse', collapseEl._handleShow); }
if (collapseEl._handleHide && typeof collapseEl._handleHide === 'function') { collapseEl.removeEventListener('hide.bs.collapse', collapseEl._handleHide); } if (collapseEl._handleHide && typeof collapseEl._handleHide === 'function') { collapseEl.removeEventListener('hide.bs.collapse', collapseEl._handleHide); }
const handleShow = (event) => { if (event.target === collapseEl && icon) { icon.classList.replace('bi-chevron-right', 'bi-chevron-down'); } /* console.log(...) */ }; const handleShow = (event) => { if (event.target === collapseEl && icon) { icon.classList.replace('bi-chevron-right', 'bi-chevron-down'); } };
const handleHide = (event) => { if (event.target === collapseEl && icon) { icon.classList.replace('bi-chevron-down', 'bi-chevron-right'); } /* console.log(...) */ }; const handleHide = (event) => { if (event.target === collapseEl && icon) { icon.classList.replace('bi-chevron-down', 'bi-chevron-right'); } };
collapseEl._handleShow = handleShow; collapseEl._handleHide = handleHide; collapseEl._handleShow = handleShow; collapseEl._handleHide = handleHide;
collapseEl.addEventListener('show.bs.collapse', handleShow); collapseEl.addEventListener('show.bs.collapse', handleShow);
collapseEl.addEventListener('hide.bs.collapse', handleHide); collapseEl.addEventListener('hide.bs.collapse', handleHide);
@ -885,15 +850,14 @@ function initializeBootstrapCollapseListeners(containerElement) {
}); });
} // End initializeBootstrapCollapseListeners V3 } // End initializeBootstrapCollapseListeners V3
// --- Render Structure Tree (Calls V13 buildTreeData, V19 renderNode, V3 initBSCollapse) ---
// --- Render Structure Tree (Calls V11 buildTreeData, V19 renderNode, V3 initBSCollapse) ---
async function renderStructureTree(resourceType, view, snapshotElements, mustSupportPaths) { // Accepts snapshot elements async function renderStructureTree(resourceType, view, snapshotElements, mustSupportPaths) { // Accepts snapshot elements
const treeContainer = document.getElementById(`structure-tree-${view}`); const treeContainer = document.getElementById(`structure-tree-${view}`);
if (!treeContainer) { console.error(`Tree container missing: ${view}`); return; } if (!treeContainer) { console.error(`Tree container missing: ${view}`); return; }
treeContainer.innerHTML = ''; treeContainer.innerHTML = '';
try { try {
// Build tree using V11 buildTreeData and snapshot data // Build tree using V13 buildTreeData and snapshot data
const treeData = buildTreeData(snapshotElements, view, mustSupportPaths); const treeData = buildTreeData(snapshotElements, view, mustSupportPaths);
// Render HTML using V19 renderNodeAsLi // Render HTML using V19 renderNodeAsLi
@ -908,7 +872,13 @@ async function renderStructureTree(resourceType, view, snapshotElements, mustSup
// Init listeners/tooltips // Init listeners/tooltips
initializeBootstrapCollapseListeners(treeContainer); // V3 initializeBootstrapCollapseListeners(treeContainer); // V3
const tooltipTriggerList = treeContainer.querySelectorAll('[data-bs-toggle="tooltip"]'); const tooltipTriggerList = treeContainer.querySelectorAll('[data-bs-toggle="tooltip"]');
tooltipTriggerList.forEach(tooltipTriggerEl => { const e=bootstrap.Tooltip.getInstance(tooltipTriggerEl); if(e){e.dispose();} if(tooltipTriggerEl.getAttribute('title')||tooltipTriggerEl.getAttribute('data-bs-original-title')){new bootstrap.Tooltip(tooltipTriggerEl);} }); tooltipTriggerList.forEach(tooltipTriggerEl => {
const e = bootstrap.Tooltip.getInstance(tooltipTriggerEl);
if (e) { e.dispose(); }
if (tooltipTriggerEl.getAttribute('title') || tooltipTriggerEl.getAttribute('data-bs-original-title')) {
new bootstrap.Tooltip(tooltipTriggerEl);
}
});
} catch (error) { } catch (error) {
console.error(`Error rendering ${view} tree:`, error); console.error(`Error rendering ${view} tree:`, error);
@ -916,15 +886,21 @@ async function renderStructureTree(resourceType, view, snapshotElements, mustSup
} }
} // End renderStructureTree } // End renderStructureTree
// --- Fetch Structure Definition for Specific View --- // --- Fetch Structure Definition for Specific View ---
async function fetchStructure(resourceType, view) { async function fetchStructure(resourceType, view, includeNarrative, raw = false) {
const cacheKey = `${resourceType}-${view}`; const cacheKey = `${resourceType}-${view}-${includeNarrative}-${raw}`;
if (structureDataCache[cacheKey]) { if (structureDataCache[cacheKey]) {
try { return JSON.parse(JSON.stringify(structureDataCache[cacheKey])); } catch (e) { return structureDataCache[cacheKey]; } try { return JSON.parse(JSON.stringify(structureDataCache[cacheKey])); } catch (e) { return structureDataCache[cacheKey]; }
} }
const structureBaseUrl = "{{ url_for('get_structure') }}"; const structureBaseUrl = "{{ url_for('get_structure') }}";
const params = new URLSearchParams({ package_name: "{{ processed_ig.package_name }}", package_version: "{{ processed_ig.version }}", resource_type: resourceType, view: view }); const params = new URLSearchParams({
package_name: "{{ processed_ig.package_name }}",
version: "{{ processed_ig.version }}",
resource_type: resourceType,
view: view,
include_narrative: includeNarrative,
raw: raw
});
const fetchUrl = `${structureBaseUrl}?${params.toString()}`; const fetchUrl = `${structureBaseUrl}?${params.toString()}`;
try { try {
const response = await fetch(fetchUrl); const response = await fetch(fetchUrl);
@ -932,13 +908,11 @@ async function fetchStructure(resourceType, view) {
const data = await response.json(); const data = await response.json();
try { structureDataCache[cacheKey] = JSON.parse(JSON.stringify(data)); } catch(e) { structureDataCache[cacheKey] = data; } try { structureDataCache[cacheKey] = JSON.parse(JSON.stringify(data)); } catch(e) { structureDataCache[cacheKey] = data; }
return JSON.parse(JSON.stringify(data)); return JSON.parse(JSON.stringify(data));
} catch (error) { console.error(`Error fetching ${view} structure:`, error); throw error; } } catch (error) { console.error(`Error fetching ${raw ? 'raw' : view} structure:`, error); throw error; }
} }
// --- Populate Structure for All Views - V5 (Add Search Param Logging, Include Narrative) ---
// --- Populate Structure for All Views - V4 (Add Search Param Logging) ---
async function populateStructure(resourceType) { async function populateStructure(resourceType) {
// ... (keep existing variable assignments and UI resets at the beginning) ...
if (structureTitle) structureTitle.textContent = `${resourceType} ({{ processed_ig.package_name }}#{{ processed_ig.version }})`; if (structureTitle) structureTitle.textContent = `${resourceType} ({{ processed_ig.package_name }}#{{ processed_ig.version }})`;
if (rawStructureTitle) rawStructureTitle.textContent = `${resourceType} ({{ processed_ig.package_name }}#{{ processed_ig.version }})`; if (rawStructureTitle) rawStructureTitle.textContent = `${resourceType} ({{ processed_ig.package_name }}#{{ processed_ig.version }})`;
if (structureLoading) structureLoading.style.display = 'block'; if (structureLoading) structureLoading.style.display = 'block';
@ -955,70 +929,51 @@ async function populateStructure(resourceType) {
if (bindingsTableBody) bindingsTableBody.innerHTML = '<tr><td colspan="4" class="text-center text-muted fst-italic">Loading...</td></tr>'; if (bindingsTableBody) bindingsTableBody.innerHTML = '<tr><td colspan="4" class="text-center text-muted fst-italic">Loading...</td></tr>';
if (searchParamsTableBody) searchParamsTableBody.innerHTML = '<tr><td colspan="4" class="text-center text-muted fst-italic">Loading...</td></tr>'; if (searchParamsTableBody) searchParamsTableBody.innerHTML = '<tr><td colspan="4" class="text-center text-muted fst-italic">Loading...</td></tr>';
if (rawStructureContent) rawStructureContent.textContent = 'Loading...'; if (rawStructureContent) rawStructureContent.textContent = 'Loading...';
try { try {
console.log(`PopulateStructure V4: Fetching structure and search params for ${resourceType}`); console.log(`PopulateStructure V4: Fetching structure and search params for ${resourceType}`);
const structureBaseUrl = "{{ url_for('get_structure') }}"; const includeNarrative = includeNarrativeCheckbox ? includeNarrativeCheckbox.checked : true;
const params = new URLSearchParams({ package_name: "{{ processed_ig.package_name }}", package_version: "{{ processed_ig.version }}", resource_type: resourceType }); // Fetch processed data for UI tabs
const fetchUrl = `${structureBaseUrl}?${params.toString()}`; const processedData = await fetchStructure(resourceType, 'snapshot', includeNarrative, false);
const response = await fetch(fetchUrl); if (!processedData || !processedData.elements) { throw new Error(`Structure data or elements missing for ${resourceType}.`); }
if (!response.ok) { const snapshotElementsCopy = JSON.parse(JSON.stringify(processedData.elements));
const errorText = await response.text(); const mustSupportPaths = new Set(processedData.must_support_paths || []);
throw new Error(`HTTP error ${response.status}: ${errorText}`); const searchParams = processedData.search_parameters || [];
} const fallbackUsed = processedData.fallback_used || false;
const data = await response.json(); const sourcePackage = processedData.source_package || 'FHIR core';
if (!data || !data.elements) { throw new Error(`Structure data or elements missing for ${resourceType}.`); }
const snapshotElementsCopy = JSON.parse(JSON.stringify(data.elements));
const mustSupportPaths = new Set(data.must_support_paths || []);
const searchParams = data.search_parameters || []; // Get search params
const fallbackUsed = data.fallback_used || false;
const fallbackSource = data.source_package || 'FHIR core';
// *** ADDED LOG: Check received search parameters ***
console.log('[populateStructure] Received search_parameters:', searchParams); console.log('[populateStructure] Received search_parameters:', searchParams);
// ************************************************* // Fetch raw StructureDefinition for raw view
const rawData = await fetchStructure(resourceType, 'snapshot', includeNarrative, true);
// Update Raw JSON View
if (rawStructureContent) { if (rawStructureContent) {
rawStructureContent.textContent = JSON.stringify(data, null, 4); rawStructureContent.textContent = JSON.stringify(rawData, null, 4);
if (rawStructureContent.classList.contains('hljs')) { rawStructureContent.classList.remove('hljs'); } if (rawStructureContent.classList.contains('hljs')) { rawStructureContent.classList.remove('hljs'); }
rawStructureContent.dataset.highlighted = '';
hljs.highlightElement(rawStructureContent); hljs.highlightElement(rawStructureContent);
if (fallbackUsed && structureFallbackMessage) { if (fallbackUsed && structureFallbackMessage) {
structureFallbackMessage.textContent = `Note: Displaying structure from fallback package: ${fallbackSource}`; structureFallbackMessage.textContent = `Note: Displaying structure from fallback package: ${sourcePackage}`;
structureFallbackMessage.style.display = 'block'; structureFallbackMessage.style.display = 'block';
} }
} else { if (rawStructureContent) rawStructureContent.textContent = '(Raw content display element not found)'; } } else { if (rawStructureContent) rawStructureContent.textContent = '(Raw content display element not found)'; }
// Render the Tree Views
for (const view of views) { for (const view of views) {
console.log(`PopulateStructure V4: Rendering view '${view}' for ${resourceType}`); console.log(`PopulateStructure V4: Rendering view '${view}' for ${resourceType}`);
const treeContainer = document.getElementById(`structure-tree-${view}`); const treeContainer = document.getElementById(`structure-tree-${view}`);
if (!treeContainer) { console.error(`Container missing for ${view}`); continue; } if (!treeContainer) { console.error(`Container missing for ${view}`); continue; }
await renderStructureTree(resourceType, view, JSON.parse(JSON.stringify(snapshotElementsCopy)), mustSupportPaths); await renderStructureTree(resourceType, view, JSON.parse(JSON.stringify(snapshotElementsCopy)), mustSupportPaths);
} }
renderConstraintsTable(processedData.elements);
// Render Constraints & Bindings tables renderBindingsTable(processedData.elements);
renderConstraintsTable(data.elements); renderSearchParametersTable(searchParams);
renderBindingsTable(data.elements);
// Render Search Params table (passing potentially empty array)
renderSearchParametersTable(searchParams); // Pass the retrieved data
} catch (error) { } catch (error) {
console.error("Error populating structure:", error); console.error("Error populating structure:", error);
// ... existing error handling ...
views.forEach(v => { const tc = document.getElementById(`structure-tree-${v}`); if(tc) tc.innerHTML = `<div class="alert alert-danger m-2">Error loading structure: ${error.message}</div>`; }); views.forEach(v => { const tc = document.getElementById(`structure-tree-${v}`); if(tc) tc.innerHTML = `<div class="alert alert-danger m-2">Error loading structure: ${error.message}</div>`; });
if (constraintsTableBody) constraintsTableBody.innerHTML = `<tr><td colspan="5" class="text-danger text-center">Error loading constraints: ${error.message}</td></tr>`; if (constraintsTableBody) constraintsTableBody.innerHTML = `<tr><td colspan="5" class="text-danger text-center">Error loading constraints: ${error.message}</td></tr>`;
if (bindingsTableBody) bindingsTableBody.innerHTML = `<tr><td colspan="4" class="text-danger text-center">Error loading bindings: ${error.message}</td></tr>`; if (bindingsTableBody) bindingsTableBody.innerHTML = `<tr><td colspan="4" class="text-danger text-center">Error loading bindings: ${error.message}</td></tr>`;
if (searchParamsTableBody) searchParamsTableBody.innerHTML = `<tr><td colspan="4" class="text-danger text-center">Error loading search params: ${error.message}</td></tr>`; // Show error here too if (searchParamsTableBody) searchParamsTableBody.innerHTML = `<tr><td colspan="4" class="text-danger text-center">Error loading search params: ${error.message}</td></tr>`;
if (rawStructureContent) { rawStructureContent.textContent = `Error: ${error.message}`; } if (rawStructureContent) { rawStructureContent.textContent = `Error: ${error.message}`; }
} finally { } finally {
if (structureLoading) structureLoading.style.display = 'none'; if (structureLoading) structureLoading.style.display = 'none';
if (rawStructureLoading) rawStructureLoading.style.display = 'none'; if (rawStructureLoading) rawStructureLoading.style.display = 'none';
} }
} // End populateStructure V4 }// End populateStructure V5
// --- Populate Example Selector Dropdown --- // --- Populate Example Selector Dropdown ---
function populateExampleSelector(resourceOrProfileIdentifier) { function populateExampleSelector(resourceOrProfileIdentifier) {
@ -1040,24 +995,73 @@ async function fetchAndDisplayExample(selectedFilePath) {
const resetCopyButton = (button, tooltipInstance, originalTitle) => { if (tooltipInstance && button?.isConnected) { tooltipInstance.hide(); button.setAttribute('data-bs-original-title', originalTitle); button.querySelector('i')?.classList.replace('bi-check-lg', 'bi-clipboard'); } }; const resetCopyButton = (button, tooltipInstance, originalTitle) => { if (tooltipInstance && button?.isConnected) { tooltipInstance.hide(); button.setAttribute('data-bs-original-title', originalTitle); button.querySelector('i')?.classList.replace('bi-check-lg', 'bi-clipboard'); } };
resetCopyButton(copyRawExButton, copyRawExTooltipInstance, 'Copy Raw Content'); resetCopyButton(copyPrettyJsonButton, copyPrettyJsonTooltipInstance, 'Copy JSON'); resetCopyButton(copyRawExButton, copyRawExTooltipInstance, 'Copy Raw Content'); resetCopyButton(copyPrettyJsonButton, copyPrettyJsonTooltipInstance, 'Copy JSON');
if (!selectedFilePath) { exampleLoading.style.display = 'none'; return; } if (!selectedFilePath) { exampleLoading.style.display = 'none'; return; }
const exampleBaseUrl = "{{ url_for('get_example') }}"; const exampleParams = new URLSearchParams({ package_name: "{{ processed_ig.package_name }}", package_version: "{{ processed_ig.version }}", filename: selectedFilePath }); const exampleFetchUrl = `${exampleBaseUrl}?${exampleParams.toString()}`; const exampleBaseUrl = "{{ url_for('get_example') }}";
const includeNarrative = includeNarrativeCheckbox ? includeNarrativeCheckbox.checked : true;
const exampleParams = new URLSearchParams({
package_name: "{{ processed_ig.package_name }}",
version: "{{ processed_ig.version }}", // Changed from package_version to version
filename: selectedFilePath,
include_narrative: includeNarrative
});
const exampleFetchUrl = `${exampleBaseUrl}?${exampleParams.toString()}`;
try { try {
const response = await fetch(exampleFetchUrl); if (!response.ok) { throw new Error(`HTTP error ${response.status}`); } const rawData = await response.text(); const response = await fetch(exampleFetchUrl);
if(exampleFilename) exampleFilename.textContent = `Source: ${selectedFilePath.split('/').pop()}`; if(exampleContentRaw) exampleContentRaw.textContent = rawData; let prettyJson = '(Not valid JSON)'; if (!response.ok) { throw new Error(`HTTP error ${response.status}`); }
try { prettyJson = JSON.stringify(JSON.parse(rawData), null, 4); } catch (e) { prettyJson = rawData.startsWith('<') ? '(Content is XML, not JSON)' : `(Error parsing JSON: ${e.message})`; } if(exampleContentJson) exampleContentJson.textContent = prettyJson; const rawData = await response.text();
if (exampleContentRaw) hljs.highlightElement(exampleContentRaw); if (exampleContentJson) hljs.highlightElement(exampleContentJson); if(exampleContentWrapper) exampleContentWrapper.style.display = 'block'; if(exampleFilename) exampleFilename.textContent = `Source: ${selectedFilePath.split('/').pop()}`;
} catch (error) { console.error("Error fetching or displaying example:", error); if (exampleFilename) exampleFilename.textContent = 'Error loading example'; if (exampleContentRaw) exampleContentRaw.textContent = `Error: ${error.message}`; if (exampleContentJson) exampleContentJson.textContent = `Error: ${error.message}`; if (exampleContentWrapper) exampleContentWrapper.style.display = 'block'; } if(exampleContentRaw) exampleContentRaw.textContent = rawData;
finally { if (exampleLoading) exampleLoading.style.display = 'none'; } let prettyJson = '(Not valid JSON)';
try { prettyJson = JSON.stringify(JSON.parse(rawData), null, 4); } catch (e) { prettyJson = rawData.startsWith('<') ? '(Content is XML, not JSON)' : `(Error parsing JSON: ${e.message})`; }
if(exampleContentJson) exampleContentJson.textContent = prettyJson;
if (exampleContentRaw) hljs.highlightElement(exampleContentRaw);
if (exampleContentJson) hljs.highlightElement(exampleContentJson);
if(exampleContentWrapper) exampleContentWrapper.style.display = 'block';
} catch (error) {
console.error("Error fetching or displaying example:", error);
if (exampleFilename) exampleFilename.textContent = 'Error loading example';
if (exampleContentRaw) exampleContentRaw.textContent = `Error: ${error.message}`;
if (exampleContentJson) exampleContentJson.textContent = `Error: ${error.message}`;
if (exampleContentWrapper) exampleContentWrapper.style.display = 'block';
} finally {
if (exampleLoading) exampleLoading.style.display = 'none';
}
} }
// --- Generic Copy Logic Function --- // --- Generic Copy Logic Function ---
function setupCopyButton(buttonElement, sourceElement, tooltipInstance, successTitle, originalTitle, errorTitle, nothingTitle) { function setupCopyButton(buttonElement, sourceElement, tooltipInstance, successTitle, originalTitle, errorTitle, nothingTitle) {
if (!buttonElement || !sourceElement) { return null; } if (!buttonElement || !sourceElement) { return null; }
bootstrap.Tooltip.getInstance(buttonElement)?.dispose(); bootstrap.Tooltip.getInstance(buttonElement)?.dispose();
tooltipInstance = new bootstrap.Tooltip(buttonElement, { title: originalTitle }); tooltipInstance = new bootstrap.Tooltip(buttonElement, { title: originalTitle });
const clickHandler = () => { const textToCopy = sourceElement.textContent; const copyIcon = buttonElement.querySelector('i'); const updateTooltip = (newTitle, iconClass) => { buttonElement.setAttribute('data-bs-original-title', newTitle); tooltipInstance?.setContent({ '.tooltip-inner': newTitle }); tooltipInstance?.show(); if (copyIcon) copyIcon.className = `bi ${iconClass}`; setTimeout(() => { if (buttonElement.isConnected) { tooltipInstance?.hide(); buttonElement.setAttribute('data-bs-original-title', originalTitle); if (copyIcon) copyIcon.className = 'bi bi-clipboard'; } }, 1500); }; if (textToCopy && textToCopy.trim() && !textToCopy.startsWith('(Could not') && !textToCopy.startsWith('(Not valid') && !textToCopy.startsWith('Error:')) { navigator.clipboard.writeText(textToCopy).then(() => { updateTooltip(successTitle, 'bi-check-lg'); }).catch(err => { console.error(`Copy failed:`, err); updateTooltip(errorTitle, 'bi-exclamation-triangle'); }); } else { updateTooltip(nothingTitle, 'bi-clipboard'); } }; const clickHandler = () => {
buttonElement.removeEventListener('click', clickHandler); buttonElement.addEventListener('click', clickHandler); return tooltipInstance; const textToCopy = sourceElement.textContent;
const copyIcon = buttonElement.querySelector('i');
const updateTooltip = (newTitle, iconClass) => {
buttonElement.setAttribute('data-bs-original-title', newTitle);
tooltipInstance?.setContent({ '.tooltip-inner': newTitle });
tooltipInstance?.show();
if (copyIcon) copyIcon.className = `bi ${iconClass}`;
setTimeout(() => {
if (buttonElement.isConnected) {
tooltipInstance?.hide();
buttonElement.setAttribute('data-bs-original-title', originalTitle);
if (copyIcon) copyIcon.className = 'bi bi-clipboard';
}
}, 1500);
};
if (textToCopy && textToCopy.trim() && !textToCopy.startsWith('(Could not') && !textToCopy.startsWith('(Not valid') && !textToCopy.startsWith('Error:')) {
navigator.clipboard.writeText(textToCopy).then(() => {
updateTooltip(successTitle, 'bi-check-lg');
}).catch(err => {
console.error(`Copy failed:`, err);
updateTooltip(errorTitle, 'bi-exclamation-triangle');
});
} else {
updateTooltip(nothingTitle, 'bi-clipboard');
}
};
buttonElement.removeEventListener('click', clickHandler);
buttonElement.addEventListener('click', clickHandler);
return tooltipInstance;
} }
// --- Function to Render Constraints Table --- // --- Function to Render Constraints Table ---
@ -1075,9 +1079,7 @@ function renderConstraintsTable(elements) {
elements = []; // Prevent errors later elements = []; // Prevent errors later
} }
elements.forEach(element => { elements.forEach(element => {
// Check if element exists and has a constraint property which is an array
if (element && element.constraint && Array.isArray(element.constraint) && element.constraint.length > 0) { if (element && element.constraint && Array.isArray(element.constraint) && element.constraint.length > 0) {
element.constraint.forEach(constraint => { element.constraint.forEach(constraint => {
if (constraint) { // Ensure constraint object itself exists if (constraint) { // Ensure constraint object itself exists
@ -1106,10 +1108,8 @@ function renderConstraintsTable(elements) {
} }
// Re-run highlight.js for the new code blocks // Re-run highlight.js for the new code blocks
// Use setTimeout to allow the DOM to update before highlighting
setTimeout(() => { setTimeout(() => {
document.querySelectorAll('#constraints-table code.language-fhirpath').forEach((block) => { document.querySelectorAll('#constraints-table code.language-fhirpath').forEach((block) => {
// Remove existing highlight classes if any, before re-highlighting
block.classList.remove('hljs'); block.classList.remove('hljs');
hljs.highlightElement(block); hljs.highlightElement(block);
}); });
@ -1132,7 +1132,6 @@ function renderBindingsTable(elements) {
} }
elements.forEach(element => { elements.forEach(element => {
// Check if element exists and has a binding property which is an object
if (element && element.binding && typeof element.binding === 'object') { if (element && element.binding && typeof element.binding === 'object') {
bindingsFound = true; bindingsFound = true;
const binding = element.binding; const binding = element.binding;
@ -1143,7 +1142,6 @@ function renderBindingsTable(elements) {
if (binding.valueSet) { if (binding.valueSet) {
const valueSetLink = document.createElement('a'); const valueSetLink = document.createElement('a');
valueSetLink.href = binding.valueSet; // Ideally, resolve canonical to actual URL if possible valueSetLink.href = binding.valueSet; // Ideally, resolve canonical to actual URL if possible
// Try to show last part of URL as name, or full URL if no '/'
const urlParts = binding.valueSet.split('/'); const urlParts = binding.valueSet.split('/');
valueSetLink.textContent = urlParts.length > 1 ? urlParts[urlParts.length - 1] : binding.valueSet; valueSetLink.textContent = urlParts.length > 1 ? urlParts[urlParts.length - 1] : binding.valueSet;
valueSetLink.target = '_blank'; // Open in new tab valueSetLink.target = '_blank'; // Open in new tab
@ -1170,110 +1168,71 @@ function renderBindingsTable(elements) {
// --- Function to Render Search Parameters Table (Includes Conformance Badges & Tooltips) --- // --- Function to Render Search Parameters Table (Includes Conformance Badges & Tooltips) ---
function renderSearchParametersTable(searchParamsData) { function renderSearchParametersTable(searchParamsData) {
// Find the table body element using its ID
const tableBody = document.getElementById('searchparams-table')?.querySelector('tbody'); const tableBody = document.getElementById('searchparams-table')?.querySelector('tbody');
// Check if the table body element exists
if (!tableBody) { if (!tableBody) {
console.error('Search Params table body element (#searchparams-table > tbody) not found'); console.error('Search Params table body element (#searchparams-table > tbody) not found');
return; // Exit if the table body isn't found return;
} }
tableBody.innerHTML = ''; // Clear any existing rows (like loading indicators) tableBody.innerHTML = '';
// Log the data received for debugging
console.log('[renderSearchParametersTable] Received data:', searchParamsData); console.log('[renderSearchParametersTable] Received data:', searchParamsData);
// Check if the received data is valid and is an array with items
if (!searchParamsData || !Array.isArray(searchParamsData) || searchParamsData.length === 0) { if (!searchParamsData || !Array.isArray(searchParamsData) || searchParamsData.length === 0) {
console.log('[renderSearchParametersTable] No valid search parameter data found, displaying message.'); console.log('[renderSearchParametersTable] No valid search parameter data found, displaying message.');
// Display a message indicating no parameters were found
const row = tableBody.insertRow(); const row = tableBody.insertRow();
const cell = row.insertCell(); const cell = row.insertCell();
cell.colSpan = 4; // Span across all columns cell.colSpan = 4;
cell.textContent = 'No relevant search parameters found for this resource type.'; cell.textContent = 'No relevant search parameters found for this resource type.';
cell.style.textAlign = 'center'; cell.style.textAlign = 'center';
cell.classList.add('text-muted', 'fst-italic'); // Style the message cell.classList.add('text-muted', 'fst-italic');
return; // Exit the function return;
} }
// Sort parameters alphabetically by their 'code' before rendering
searchParamsData.sort((a, b) => (a.code || '').localeCompare(b.code || '')); searchParamsData.sort((a, b) => (a.code || '').localeCompare(b.code || ''));
// Iterate through each search parameter object in the sorted array
searchParamsData.forEach((param, index) => { searchParamsData.forEach((param, index) => {
console.log(`[renderSearchParametersTable] Processing param ${index + 1}:`, param); // Log each parameter console.log(`[renderSearchParametersTable] Processing param ${index + 1}:`, param);
const row = tableBody.insertRow();
const row = tableBody.insertRow(); // Create a new table row
// --- Column 1: Parameter Code ---
const nameCell = row.insertCell(); const nameCell = row.insertCell();
// Use escapeHtmlAttr (ensure this helper function exists in your scope)
// Display the parameter code within a <code> tag
nameCell.innerHTML = `<code>${escapeHtmlAttr(param.code || 'N/A')}</code>`; nameCell.innerHTML = `<code>${escapeHtmlAttr(param.code || 'N/A')}</code>`;
row.insertCell().textContent = param.type || 'N/A';
// --- Column 2: Parameter Type ---
row.insertCell().textContent = param.type || 'N/A'; // Display the parameter type
// --- Column 3: Conformance Level (with Badges and Tooltip) ---
const conformanceCell = row.insertCell(); const conformanceCell = row.insertCell();
// Default to 'Optional' if conformance isn't specified in the data
const conformanceValue = param.conformance || 'Optional'; const conformanceValue = param.conformance || 'Optional';
let badgeClass = 'bg-secondary'; // Default badge style let badgeClass = 'bg-secondary';
let titleText = 'Conformance level not specified in CapabilityStatement.'; // Default tooltip text let titleText = 'Conformance level not specified in CapabilityStatement.';
// Determine badge style and tooltip text based on the conformance value
if (conformanceValue === 'SHALL') { if (conformanceValue === 'SHALL') {
badgeClass = 'bg-danger'; // Red for SHALL (Mandatory) badgeClass = 'bg-danger';
titleText = 'Server SHALL support this parameter.'; titleText = 'Server SHALL support this parameter.';
} else if (conformanceValue === 'SHOULD') { } else if (conformanceValue === 'SHOULD') {
badgeClass = 'bg-warning text-dark'; // Yellow for SHOULD (Recommended) badgeClass = 'bg-warning text-dark';
titleText = 'Server SHOULD support this parameter.'; titleText = 'Server SHOULD support this parameter.';
} else if (conformanceValue === 'MAY') { } else if (conformanceValue === 'MAY') {
badgeClass = 'bg-info text-dark'; // Blue for MAY (Optional but defined) badgeClass = 'bg-info text-dark';
titleText = 'Server MAY support this parameter.'; titleText = 'Server MAY support this parameter.';
} else if (conformanceValue === 'Optional') { } else if (conformanceValue === 'Optional') {
badgeClass = 'bg-light text-dark border'; // Light grey for Optional (Not explicitly mentioned in Caps) badgeClass = 'bg-light text-dark border';
titleText = 'Server support for this parameter is optional.'; titleText = 'Server support for this parameter is optional.';
} }
// Add handling for other potential values like 'SHALL-NOT', 'SHOULD-NOT' if needed
// Set the tooltip attributes
conformanceCell.setAttribute('title', titleText); conformanceCell.setAttribute('title', titleText);
conformanceCell.setAttribute('data-bs-toggle', 'tooltip'); conformanceCell.setAttribute('data-bs-toggle', 'tooltip');
conformanceCell.setAttribute('data-bs-placement', 'top'); conformanceCell.setAttribute('data-bs-placement', 'top');
// Set the cell content with the styled badge
conformanceCell.innerHTML = `<span class="badge ${badgeClass}">${escapeHtmlAttr(conformanceValue)}</span>`; conformanceCell.innerHTML = `<span class="badge ${badgeClass}">${escapeHtmlAttr(conformanceValue)}</span>`;
// Initialize or re-initialize the Bootstrap tooltip for this specific cell
// This is important because the table rows are added dynamically
const existingTooltip = bootstrap.Tooltip.getInstance(conformanceCell); const existingTooltip = bootstrap.Tooltip.getInstance(conformanceCell);
if (existingTooltip) { if (existingTooltip) { existingTooltip.dispose(); }
existingTooltip.dispose(); // Remove any previous tooltip instance to avoid conflicts new bootstrap.Tooltip(conformanceCell);
}
new bootstrap.Tooltip(conformanceCell); // Create and initialize the new tooltip
// --- Column 4: Description (with FHIRPath Tooltip if available) ---
const descriptionCell = row.insertCell(); const descriptionCell = row.insertCell();
// Use escapeHtmlAttr for the description text
descriptionCell.innerHTML = `<small>${escapeHtmlAttr(param.description || 'No description available.')}</small>`; descriptionCell.innerHTML = `<small>${escapeHtmlAttr(param.description || 'No description available.')}</small>`;
// If a FHIRPath expression exists, add it as a tooltip
if (param.expression) { if (param.expression) {
descriptionCell.setAttribute('title', `FHIRPath: ${param.expression}`); descriptionCell.setAttribute('title', `FHIRPath: ${param.expression}`);
descriptionCell.setAttribute('data-bs-toggle', 'tooltip'); descriptionCell.setAttribute('data-bs-toggle', 'tooltip');
descriptionCell.setAttribute('data-bs-placement', 'top'); descriptionCell.setAttribute('data-bs-placement', 'top');
// Initialize or re-initialize the tooltip
const existingDescTooltip = bootstrap.Tooltip.getInstance(descriptionCell); const existingDescTooltip = bootstrap.Tooltip.getInstance(descriptionCell);
if (existingDescTooltip) { if (existingDescTooltip) { existingDescTooltip.dispose(); }
existingDescTooltip.dispose();
}
new bootstrap.Tooltip(descriptionCell); new bootstrap.Tooltip(descriptionCell);
} }
}); // End forEach loop over searchParamsData });
console.log('[renderSearchParametersTable] Finished rendering parameters.'); console.log('[renderSearchParametersTable] Finished rendering parameters.');
} // End renderSearchParametersTable function } // End renderSearchParametersTable
// --- DOMContentLoaded Listener --- // --- DOMContentLoaded Listener ---
document.addEventListener('DOMContentLoaded', function() { document.addEventListener('DOMContentLoaded', function() {
@ -1303,6 +1262,7 @@ document.addEventListener('DOMContentLoaded', function() {
copyRawDefButton = document.getElementById('copy-raw-def-button'); copyRawDefButton = document.getElementById('copy-raw-def-button');
copyRawExButton = document.getElementById('copy-raw-ex-button'); copyRawExButton = document.getElementById('copy-raw-ex-button');
copyPrettyJsonButton = document.getElementById('copy-pretty-json-button'); copyPrettyJsonButton = document.getElementById('copy-pretty-json-button');
includeNarrativeCheckbox = document.getElementById('includeNarrative');
// --- Initialize Highlight.js --- // --- Initialize Highlight.js ---
hljs.configure({ languages: ['json'] }); hljs.configure({ languages: ['json'] });
@ -1325,7 +1285,7 @@ document.addEventListener('DOMContentLoaded', function() {
const resourceType = link.dataset.resourceType; const resourceType = link.dataset.resourceType;
if (!resourceType) { console.error("Missing data-resource-type"); return; } if (!resourceType) { console.error("Missing data-resource-type"); return; }
console.log("Resource type link clicked:", resourceType); console.log("Resource type link clicked:", resourceType);
await populateStructure(resourceType); // Uses V9 buildTreeData, V2 populateStructure await populateStructure(resourceType);
populateExampleSelector(resourceType); populateExampleSelector(resourceType);
if (structureDisplayWrapper) { structureDisplayWrapper.scrollIntoView({ behavior: 'smooth', block: 'start' }); } if (structureDisplayWrapper) { structureDisplayWrapper.scrollIntoView({ behavior: 'smooth', block: 'start' }); }
}); });
@ -1336,7 +1296,21 @@ document.addEventListener('DOMContentLoaded', function() {
exampleSelect.addEventListener('change', function(event) { fetchAndDisplayExample(this.value); }); exampleSelect.addEventListener('change', function(event) { fetchAndDisplayExample(this.value); });
} else { console.warn("Example select dropdown not found."); } } else { console.warn("Example select dropdown not found."); }
// --- No Collapse Listeners Here --- // Narrative Toggle Checkbox Handler
if (includeNarrativeCheckbox) {
includeNarrativeCheckbox.addEventListener('change', async function() {
const selectedFilePath = exampleSelect?.value;
const resourceType = exampleResourceTypeTitle?.textContent;
if (selectedFilePath) {
await fetchAndDisplayExample(selectedFilePath);
}
if (resourceType && resourceType !== '(unknown)') {
await populateStructure(resourceType);
}
});
} else {
console.warn("Narrative checkbox not found.");
}
// --- Initial UI State --- // --- Initial UI State ---
if(structureDisplayWrapper) structureDisplayWrapper.style.display = 'none'; if(structureDisplayWrapper) structureDisplayWrapper.style.display = 'none';

File diff suppressed because it is too large Load Diff