added tests

This commit is contained in:
Alexander Domene
2025-10-27 08:19:13 +01:00
parent a71284ee64
commit 8650bd09a3
27 changed files with 5706 additions and 110 deletions

View File

@@ -14,27 +14,67 @@ Usage:
import argparse
import json
import logging
from collections import Counter, defaultdict
from typing import Any, Dict, List, Optional, Tuple
from utils import parse_iso_date, format_iso_date, get_ref_id, ensure_text, collect_effective_dates
from translator import CodeTranslator
from utils import (
parse_iso_date, format_iso_date, get_ref_id, ensure_text, collect_effective_dates,
validate_file_path, validate_output_path, validate_directory_path
)
from datetime import datetime
import random
# Optional deps
# Setup logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger('fhir_to_pad_converter')
# Optional deps with better error handling
try:
import jsonschema # type: ignore
HAS_JSONSCHEMA = True
except Exception:
logger.debug("jsonschema module loaded successfully")
except ImportError as e:
HAS_JSONSCHEMA = False
logger.warning("jsonschema not available - FHIR JSON Schema validation will be skipped")
logger.warning("To enable JSON Schema validation, install with: pip install jsonschema")
except Exception as e:
HAS_JSONSCHEMA = False
logger.error(f"Unexpected error loading jsonschema module: {e}")
try:
from lxml import etree # type: ignore
HAS_LXML = True
except Exception:
logger.debug("lxml module loaded successfully")
except ImportError as e:
HAS_LXML = False
logger.warning("lxml not available - XSD validation will be skipped")
logger.warning("To enable XSD validation, install with: pip install lxml")
except Exception as e:
HAS_LXML = False
logger.error(f"Unexpected error loading lxml module: {e}")
import xml.etree.ElementTree as ET
# Config validation (optional, with graceful degradation)
try:
from config_schemas import (
validate_header_config,
validate_placeholder_config,
validate_mapping_config
)
HAS_CONFIG_VALIDATION = True
logger.debug("Config validation schemas loaded successfully")
except ImportError:
HAS_CONFIG_VALIDATION = False
logger.debug("Config validation not available (config_schemas.py not found)")
except Exception as e:
HAS_CONFIG_VALIDATION = False
logger.error(f"Error loading config validation: {e}")
PAD_NS = "http://padinfo.de/ns/pad"
# ----------------------------
@@ -81,21 +121,6 @@ def compute_fhir_stats(bundle: Dict[str, Any]) -> Dict[str, Any]:
"outcomes": Counter(),
}
def collect_effective_dates(resource: Dict[str, Any]) -> List[datetime]:
dates: List[datetime] = []
for key in ["effectiveDateTime", "issued", "authoredOn", "date"]:
val = resource.get(key)
if isinstance(val, str):
d = parse_iso_date(val)
if d:
dates.append(d)
meta = resource.get("meta", {})
if isinstance(meta, dict) and isinstance(meta.get("lastUpdated"), str):
d = parse_iso_date(meta["lastUpdated"])
if d:
dates.append(d)
return dates
for e in entries:
res = e.get("resource", {}) if isinstance(e, dict) else {}
rtype = res.get("resourceType") or "Unknown"
@@ -142,21 +167,6 @@ def compute_fhir_stats(bundle: Dict[str, Any]) -> Dict[str, Any]:
# Grouping & mapping
# ----------------------------
def collect_effective_dates(resource: Dict[str, Any]) -> List[datetime]:
dates: List[datetime] = []
for key in ["effectiveDateTime", "issued", "authoredOn", "date"]:
val = resource.get(key)
if isinstance(val, str):
d = parse_iso_date(val)
if d:
dates.append(d)
meta = resource.get("meta", {})
if isinstance(meta, dict) and isinstance(meta.get("lastUpdated"), str):
d = parse_iso_date(meta["lastUpdated"])
if d:
dates.append(d)
return dates
def group_entries(bundle: Dict[str, Any]) -> Dict[Tuple[Optional[str], Optional[str]], List[Dict[str, Any]]]:
groups: Dict[Tuple[Optional[str], Optional[str]], List[Dict[str, Any]]] = defaultdict(list)
@@ -164,11 +174,14 @@ def group_entries(bundle: Dict[str, Any]) -> Dict[Tuple[Optional[str], Optional[
has_claims = any(
e.get("resource", {}).get("resourceType") == "Claim"
for e in bundle.get("entry", [])
if e is not None # Filter out None entries
)
if has_claims:
# Group by (patient_id, claim_id)
for e in bundle.get("entry", []):
if e is None: # Skip None entries
continue
res = e.get("resource", {})
if not isinstance(res, dict):
continue
@@ -197,6 +210,8 @@ def group_entries(bundle: Dict[str, Any]) -> Dict[Tuple[Optional[str], Optional[
else:
# Fallback to encounter-based grouping
for e in bundle.get("entry", []):
if e is None: # Skip None entries
continue
res = e.get("resource", {})
if not isinstance(res, dict):
continue
@@ -212,49 +227,86 @@ def group_entries(bundle: Dict[str, Any]) -> Dict[Tuple[Optional[str], Optional[
return groups
def resource_to_position(res: Dict[str, Any]) -> Dict[str, Any]:
def get_value_from_path(resource: Dict[str, Any], path: str) -> Optional[Any]:
"""Gets a value from a nested dict using a dot-separated path."""
keys = path.split('.')
value = resource
for key in keys:
if isinstance(value, dict):
value = value.get(key)
elif isinstance(value, list):
try:
idx = int(key)
if 0 <= idx < len(value):
value = value[idx]
else:
return None
except (ValueError, IndexError):
return None
else:
return None
return value
rid = res.get("id", "")
dates = collect_effective_dates(res)
def map_resource_to_position(res: Dict[str, Any], mapping_config: Dict[str, Any], translator: Optional[CodeTranslator] = None) -> Optional[Dict[str, Any]]:
"""Maps a FHIR resource to a PAD position using a configurable mapping."""
rtype = res.get("resourceType")
if not rtype or rtype not in mapping_config.get("resources", {}):
return None
date_str = format_iso_date(sorted(dates)[0]) if dates else ""
text = res.get("resourceType", "")
code = ""
disp = ""
code_el = res.get("code")
if isinstance(code_el, dict):
codings = code_el.get("coding") or []
if isinstance(codings, list) and codings:
c0 = codings[0]
code = (c0.get("code") or "") if isinstance(c0, dict) else ""
disp = (c0.get("display") or "") if isinstance(c0, dict) else ""
text = disp or text
return {
"id": rid or "",
"go": "EBM", # Default to EBM (Einheitlicher Bewertungsmaßstab) for general medical services
"ziffer": code,
"datum": date_str,
"anzahl": "1",
"text": text,
mapping = mapping_config["resources"][rtype]
position = {
"id": res.get("id", ""),
"faktor": "",
"umsatzsteuer": "",
"minderungssatz": "",
"aisbewertung": {"punktwert": "", "punktzahl": "", "einzelbetrag": ""},
}
for field, rules in mapping.get("fields", {}).items():
value = None
if "source" in rules:
value = get_value_from_path(res, rules["source"])
if "translate" in rules and translator:
translate_rules = rules["translate"]
source_system_field = translate_rules.get("source_system_field")
source_code_field = translate_rules.get("source_code_field")
if source_system_field and source_code_field:
coding_object = get_value_from_path(res, rules["source"])
if isinstance(coding_object, dict):
system = coding_object.get(source_system_field)
code = coding_object.get(source_code_field)
if system and code:
translated_code = translator.translate(system, code)
if translated_code:
value = translated_code
if value is None and "default" in rules:
value = rules["default"]
if value is None and rules.get("required"):
value = rules.get("placeholder", "")
position[field] = value if value is not None else ""
# Fallback for text
if not position.get("text"):
position["text"] = rtype
# Handle date separately for now
if 'datum' in position and position['datum']:
dt = parse_iso_date(position['datum'])
position['datum'] = format_iso_date(dt) if dt else ""
else:
# Fallback to collect_effective_dates if no specific date is mapped
dates = collect_effective_dates(res)
position['datum'] = format_iso_date(sorted(dates)[0]) if dates else ""
return position
def claim_item_to_position(item: Dict[str, Any]) -> Dict[str, Any]:
@@ -549,12 +601,16 @@ def build_person_with_placeholders(parent: ET.Element, tag: str, anrede: str, vo
def build_pad_xml(bundle: Dict[str, Any], header_cfg: Optional[Dict[str, Any]] = None,
placeholder_cfg: Optional[Dict[str, Any]] = None) -> Tuple[ET.Element, List[str], Dict[str, Any], List[str]]:
placeholder_cfg: Optional[Dict[str, Any]] = None,
mapping_config: Optional[Dict[str, Any]] = None,
translator: Optional[CodeTranslator] = None) -> Tuple[ET.Element, List[str], Dict[str, Any], List[str]]:
"""FULL implementation (no stubs) - returns a valid XML root element, a list of validation warnings, the header info, and auto-filled fields."""
if header_cfg is None:
header_cfg = {}
if placeholder_cfg is None:
placeholder_cfg = {}
if mapping_config is None:
mapping_config = {}
all_validation_warnings = []
auto_filled: List[str] = []
@@ -619,7 +675,7 @@ def build_pad_xml(bundle: Dict[str, Any], header_cfg: Optional[Dict[str, Any]] =
rechnung_count += 1
# Build rechnung attributes - skip optional empty ones
ph_rech = placeholder_cfg.get("rechnung", {})
rechnung_attrib = {"id": f"R{rechnung_count:05d}"}
rechnung_attrib = {"id": f"R{rechnung_count:05d}", "aisrechnungsnr": str(random.randint(100000000, 999999999))}
# Optional attributes - only add if they have values
eabgabe_val = current_header.get("eabgabe", "")
@@ -729,8 +785,10 @@ def build_pad_xml(bundle: Dict[str, Any], header_cfg: Optional[Dict[str, Any]] =
else:
for res in entries:
rtype = res.get("resourceType")
if rtype in {"Observation", "MedicationAdministration", "Procedure", "ServiceRequest", "DiagnosticReport"}:
positions.append(resource_to_position(res))
if rtype in mapping_config.get("resources", {}):
position = map_resource_to_position(res, mapping_config, translator)
if position:
positions.append(position)
ph_goziffer = placeholder_cfg.get("goziffer", {})
@@ -812,6 +870,49 @@ def build_pad_xml(bundle: Dict[str, Any], header_cfg: Optional[Dict[str, Any]] =
return rechnungen, all_validation_warnings, final_header, auto_filled
def build_auf_xml(header: Dict[str, Any], stats: Dict[str, Any], output_xml_filename: str) -> ET.Element:
"""Builds the AUF XML file."""
now = datetime.now()
auftrag = E("auftrag", attrib={
"erstellungsdatum": now.isoformat(),
"transfernr": str(random.randint(100000, 999999)),
"echtdaten": "true",
"dateianzahl": "1"
})
auftrag.set("xmlns", PAD_NS)
empfaenger = Sub(auftrag, "empfaenger")
logischer_empfaenger = Sub(empfaenger, "logisch")
Sub(logischer_empfaenger, "name", header.get("empfaenger_name", "UNKNOWN"))
physikalisch_empfaenger = Sub(empfaenger, "physikalisch")
Sub(physikalisch_empfaenger, "name", header.get("empfaenger_name", "UNKNOWN"))
absender = Sub(auftrag, "absender")
logischer_absender = Sub(absender, "logisch")
Sub(logischer_absender, "name", header.get("leistungserbringer_name", "UNKNOWN"))
Sub(logischer_absender, "kundennr", header.get("rechnungsersteller_kundennr", ""))
physikalisch_absender = Sub(absender, "physikalisch")
Sub(physikalisch_absender, "name", header.get("leistungserbringer_name", "UNKNOWN"))
Sub(physikalisch_absender, "kundennr", header.get("rechnungsersteller_kundennr", ""))
Sub(auftrag, "nachrichtentyp", "ADL", attrib={"version": header.get("nachrichtentyp_version", "1.0")})
system = Sub(auftrag, "system")
Sub(system, "produkt", "fhir_to_pad_converter")
Sub(system, "version", "1.0")
Sub(system, "hersteller", "Gemini")
verschluesselung = Sub(auftrag, "verschluesselung", attrib={"verfahren": "0", "idcert": "none"})
empfangsquittung = Sub(auftrag, "empfangsquittung", "false")
datei = Sub(auftrag, "datei", attrib={"id": "1", "erstellungsdatum": now.isoformat()})
Sub(datei, "dokumententyp", "PADneXt", attrib={"format": "pdf"})
Sub(datei, "name", output_xml_filename)
Sub(datei, "dateilaenge", attrib={"laenge": "0", "pruefsumme": "0" * 40})
return auftrag
# ----------------------------
# PAD validation & stats
# ----------------------------
@@ -929,7 +1030,7 @@ def verify_padnext_compliance(root: ET.Element) -> Dict[str, Any]:
# Check each invoice
for idx, rechnung in enumerate(rechnung_nodes, 1):
rng = rechnung.get("rng")
rng = rechnung.get("aisrechnungsnr")
if rng:
compliance_checks.append(f" ✓ Rechnung {idx} has RNG: {rng}")
else:
@@ -961,10 +1062,10 @@ def compute_pad_stats(root: ET.Element) -> Dict[str, Any]:
rechnung_nodes = root.findall(".//p:rechnung", ns)
fall_nodes = root.findall(".//p:abrechnungsfall", ns)
pos_nodes = root.findall(".//p:abrechnungsfall/p:positionen", ns)
goz_nodes = root.findall(".//p:abrechnungsfall/p:positionen/p:goziffer", ns)
patient_nodes = root.findall(".//p:patient", ns)
kostentraeger_nodes = root.findall(".//p:kostentraeger", ns)
pos_nodes = root.findall(".//p:abrechnungsfall/p:humanmedizin/p:positionen", ns)
goz_nodes = root.findall(".//p:abrechnungsfall/p:humanmedizin/p:positionen/p:goziffer", ns)
patient_nodes = root.findall(".//p:abrechnungsfall/p:humanmedizin/p:behandelter", ns)
kostentraeger_nodes = root.findall(".//p:rechnung/p:rechnungsempfaenger", ns)
total_positions_declared = sum(int(p.get("posanzahl") or "0") for p in pos_nodes)
total_goziffer = len(goz_nodes)
@@ -1052,11 +1153,14 @@ class ConversionLogger:
# ----------------------------
from validation import run_validation
import os
def run(input_json: str, output_xml: str, report_json: Optional[str] = None,
fhir_json_schema: Optional[str] = None, pad_xsd: Optional[str] = None,
output_auf_xml: Optional[str] = None, fhir_json_schema: Optional[str] = None, pad_xsd: Optional[str] = None,
header_cfg: Optional[Dict[str, Any]] = None,
placeholder_cfg: Optional[Dict[str, Any]] = None,
mapping_config: Optional[Dict[str, Any]] = None,
concept_maps: Optional[str] = None,
log_file: Optional[str] = None,
verbose: bool = False) -> Dict[str, Any]:
@@ -1067,6 +1171,10 @@ def run(input_json: str, output_xml: str, report_json: Optional[str] = None,
logger.log(f"Output: {output_xml}")
logger.log("")
translator = CodeTranslator()
if concept_maps:
translator.load_concept_maps(concept_maps)
with open(input_json, "r", encoding="utf-8") as f:
bundle = json.load(f)
@@ -1075,7 +1183,7 @@ def run(input_json: str, output_xml: str, report_json: Optional[str] = None,
fhir_stat = compute_fhir_stats(bundle)
# Build output XML
root, validation_warnings, final_header, auto_filled = build_pad_xml(bundle, header_cfg=header_cfg, placeholder_cfg=placeholder_cfg)
root, validation_warnings, final_header, auto_filled = build_pad_xml(bundle, header_cfg=header_cfg, placeholder_cfg=placeholder_cfg, mapping_config=mapping_config, translator=translator)
# Output validation & stats
pad_ok, pad_msgs = validate_pad_xml(root, pad_xsd)
@@ -1085,6 +1193,14 @@ def run(input_json: str, output_xml: str, report_json: Optional[str] = None,
# Save XML
ET.ElementTree(root).write(output_xml, encoding="utf-8", xml_declaration=True)
# Build and save AUF XML
if output_auf_xml:
auf_root = build_auf_xml(final_header, pad_stat, os.path.basename(output_xml))
ET.ElementTree(auf_root).write(output_auf_xml, encoding="utf-8", xml_declaration=True)
auf_ok, auf_msgs = validate_pad_xml(auf_root, "specs/padnext/padx_auf_v2.12.xsd")
else:
auf_ok, auf_msgs = None, []
report = {
"input": {
"file": input_json,
@@ -1093,11 +1209,14 @@ def run(input_json: str, output_xml: str, report_json: Optional[str] = None,
"stats": fhir_stat,
},
"output": {
"file": output_xml,
"schema_validation_ok": pad_ok,
"schema_messages": pad_msgs,
"stats": pad_stat,
"adl_file": output_xml,
"adl_schema_validation_ok": pad_ok,
"adl_schema_messages": pad_msgs,
"adl_stats": pad_stat,
"padnext_compliance": pad_compliance,
"auf_file": output_auf_xml,
"auf_schema_validation_ok": auf_ok,
"auf_schema_messages": auf_msgs,
"auto_filled_fields": auto_filled,
},
"validation_warnings": validation_warnings,
@@ -1277,32 +1396,152 @@ def run(input_json: str, output_xml: str, report_json: Optional[str] = None,
def main():
p = argparse.ArgumentParser(description="FHIR JSON -> PAD XML converter with validation & stats")
p.add_argument("--input-json", required=True, help="Path to FHIR Bundle JSON")
p.add_argument("--output-xml", required=True, help="Path to write PAD XML")
p.add_argument("--report-json", default=None, help="Optional path to write a JSON report")
p.add_argument("--log-file", default=None, help="Optional path to write detailed log (auto-generated from output XML if not specified)")
p.add_argument("--output-dir", default=".", help="Directory to save output files")
p.add_argument("--verbose", action="store_true", help="Show detailed output on console (same as log file)")
p.add_argument("--fhir-json-schema", default=None, help="Optional path to FHIR JSON Schema")
p.add_argument("--pad-xsd", default=None, help="Optional path to PAD XML XSD")
p.add_argument("--header-cfg", default=None, help="Optional path to header config JSON (fills static fields)")
p.add_argument("--placeholder-cfg", default=None, help="Optional path to placeholder config JSON (fills missing required fields)")
p.add_argument("--mapping-config", default="mapping_config.json", help="Optional path to mapping config JSON")
p.add_argument("--concept-maps", default=None, help="Path to a directory or a single file for FHIR ConceptMaps")
args = p.parse_args()
# Auto-generate log file name from output XML if not specified
log_file = args.log_file
if log_file is None and args.output_xml:
import os
base_name = os.path.splitext(args.output_xml)[0]
log_file = f"{base_name}.log"
# Enable verbose logging if requested
if args.verbose:
logger.setLevel(logging.DEBUG)
try:
# Validate input file path
logger.info(f"Validating input file: {args.input_json}")
input_json = validate_file_path(args.input_json, must_exist=True, check_readable=True)
logger.info(f"Input file validated: {input_json}")
# Validate schema paths if provided
fhir_schema = None
if args.fhir_json_schema:
logger.info(f"Validating FHIR schema path: {args.fhir_json_schema}")
fhir_schema = validate_file_path(args.fhir_json_schema, must_exist=True)
logger.info(f"FHIR schema validated: {fhir_schema}")
pad_xsd = None
if args.pad_xsd:
logger.info(f"Validating PAD XSD path: {args.pad_xsd}")
pad_xsd = validate_file_path(args.pad_xsd, must_exist=True)
logger.info(f"PAD XSD validated: {pad_xsd}")
except (ValueError, FileNotFoundError, PermissionError) as e:
logger.error(f"Input validation failed: {e}")
print(f"ERROR: {e}")
return 1
# Create timestamped output directory
import os
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
output_dir_path = os.path.join(args.output_dir, f"result__{timestamp}")
try:
logger.info(f"Creating output directory: {output_dir_path}")
output_dir = validate_directory_path(output_dir_path, must_exist=False, create=True)
logger.info(f"Output directory created: {output_dir}")
except (ValueError, PermissionError) as e:
logger.error(f"Failed to create output directory: {e}")
print(f"ERROR: Cannot create output directory: {e}")
return 1
output_xml = os.path.join(output_dir, "output.xml")
report_json = os.path.join(output_dir, "report.json")
log_file = os.path.join(output_dir, "output.log")
output_auf_xml = os.path.join(output_dir, "output_auf.xml")
# Load and validate header config
header_cfg = None
if args.header_cfg:
with open(args.header_cfg, "r", encoding="utf-8") as hf:
header_cfg = json.load(hf)
try:
logger.info(f"Loading header config: {args.header_cfg}")
header_cfg_path = validate_file_path(args.header_cfg, must_exist=True)
with open(header_cfg_path, "r", encoding="utf-8") as hf:
header_cfg = json.load(hf)
# Validate config if validation is available
if HAS_CONFIG_VALIDATION:
logger.info("Validating header configuration")
warnings = validate_header_config(header_cfg)
for warning in warnings:
logger.warning(f"Header config: {warning}")
logger.info("Header config loaded successfully")
except FileNotFoundError as e:
logger.error(f"Header config file not found: {e}")
print(f"ERROR: Header config file not found: {args.header_cfg}")
return 1
except json.JSONDecodeError as e:
logger.error(f"Invalid JSON in header config: {e}")
print(f"ERROR: Invalid JSON in header config file: {e}")
return 1
except ValueError as e:
logger.error(f"Header config validation failed: {e}")
print(f"ERROR: Header config validation failed: {e}")
return 1
# Load and validate placeholder config
placeholder_cfg = None
if args.placeholder_cfg:
with open(args.placeholder_cfg, "r", encoding="utf-8") as pf:
placeholder_cfg = json.load(pf)
try:
logger.info(f"Loading placeholder config: {args.placeholder_cfg}")
placeholder_cfg_path = validate_file_path(args.placeholder_cfg, must_exist=True)
with open(placeholder_cfg_path, "r", encoding="utf-8") as pf:
placeholder_cfg = json.load(pf)
# Validate config if validation is available
if HAS_CONFIG_VALIDATION:
logger.info("Validating placeholder configuration")
warnings = validate_placeholder_config(placeholder_cfg)
for warning in warnings:
logger.warning(f"Placeholder config: {warning}")
logger.info("Placeholder config loaded successfully")
except FileNotFoundError as e:
logger.error(f"Placeholder config file not found: {e}")
print(f"ERROR: Placeholder config file not found: {args.placeholder_cfg}")
return 1
except json.JSONDecodeError as e:
logger.error(f"Invalid JSON in placeholder config: {e}")
print(f"ERROR: Invalid JSON in placeholder config file: {e}")
return 1
except ValueError as e:
logger.error(f"Placeholder config validation failed: {e}")
print(f"ERROR: Placeholder config validation failed: {e}")
return 1
# Load and validate mapping config
mapping_cfg = None
if args.mapping_config:
try:
logger.info(f"Loading mapping config: {args.mapping_config}")
mapping_cfg_path = validate_file_path(args.mapping_config, must_exist=True)
with open(mapping_cfg_path, "r", encoding="utf-8") as mf:
mapping_cfg = json.load(mf)
# Validate config if validation is available
if HAS_CONFIG_VALIDATION:
logger.info("Validating mapping configuration")
warnings = validate_mapping_config(mapping_cfg)
for warning in warnings:
logger.warning(f"Mapping config: {warning}")
logger.info("Mapping config loaded successfully")
except FileNotFoundError:
logger.warning(f"Mapping config file not found at {args.mapping_config}. Using empty mapping.")
print(f"Warning: Mapping config file not found at {args.mapping_config}. Using empty mapping.")
mapping_cfg = {}
except json.JSONDecodeError as e:
logger.error(f"Invalid JSON in mapping config: {e}")
print(f"ERROR: Invalid JSON in mapping config file: {e}")
return 1
except ValueError as e:
logger.error(f"Mapping config validation failed: {e}")
print(f"ERROR: Mapping config validation failed: {e}")
return 1
# Sensible defaults if no header config is provided
if header_cfg is None:
@@ -1364,23 +1603,48 @@ def main():
"aisendbetrag": None
},
"abrechnungsfall": {
"behandlungsart": "UNKNOWN",
"vertragsart": "UNKNOWN"
"behandlungsart": "0",
"vertragsart": "1"
}
}
run(
input_json=args.input_json,
output_xml=args.output_xml,
report_json=args.report_json,
fhir_json_schema=args.fhir_json_schema,
pad_xsd=args.pad_xsd,
header_cfg=header_cfg,
placeholder_cfg=placeholder_cfg,
log_file=log_file,
verbose=args.verbose,
)
# Run conversion with error handling
try:
logger.info("Starting FHIR to PADneXt conversion")
run(
input_json=input_json,
output_xml=output_xml,
report_json=report_json,
output_auf_xml=output_auf_xml,
fhir_json_schema=fhir_schema,
pad_xsd=pad_xsd,
header_cfg=header_cfg,
placeholder_cfg=placeholder_cfg,
mapping_config=mapping_cfg,
concept_maps=args.concept_maps,
log_file=log_file,
verbose=args.verbose,
)
logger.info("Conversion completed successfully")
return 0
except FileNotFoundError as e:
logger.error(f"File not found: {e}")
print(f"ERROR: File not found: {e}")
return 1
except json.JSONDecodeError as e:
logger.error(f"Invalid JSON in input file: {e}")
print(f"ERROR: Invalid JSON in input file: {e}")
return 1
except PermissionError as e:
logger.error(f"Permission denied: {e}")
print(f"ERROR: Permission denied: {e}")
return 1
except Exception as e:
logger.exception(f"Unexpected error during conversion: {e}")
print(f"ERROR: Unexpected error during conversion: {e}")
print("See log file for detailed traceback")
return 1
if __name__ == "__main__":
main()
main()