def __init__(self, spec, resource): assert 'content' in resource self.spec = spec self.definition = resource self.url = resource.get('url') if self.url in self.spec.settings.enum_namemap: self.name = self.spec.settings.enum_namemap[self.url] else: self.name = self.spec.safe_enum_name(resource.get('name'), ucfirst=True) self.codes = None self.generate_enum = False concepts = self.definition.get('concept', []) if resource.get('experimental'): return self.generate_enum = 'complete' == resource['content'] if not self.generate_enum: logger.debug( "Will not generate enum for CodeSystem \"{}\" whose content is {}" .format(self.url, resource['content'])) return assert concepts, "Expecting at least one code for \"complete\" CodeSystem" if len(concepts) > 200: self.generate_enum = False logger.info( "Will not generate enum for CodeSystem \"{}\" because it has > 200 ({}) concepts" .format(self.url, len(concepts))) return self.codes = self.parsed_codes(concepts)
def render(self): if self.spec.unit_tests is None: return # render all unit test collections for coll in self.spec.unit_tests: data = { 'info': self.spec.info, 'class': coll.klass, 'tests': coll.tests, } file_pattern = coll.klass.name if self.settings.resource_modules_lowercase: file_pattern = file_pattern.lower() file_name = self.settings.tpl_unittest_target_ptrn.format(file_pattern) file_path = os.path.join(self.settings.tpl_unittest_target, file_name) self.do_render(data, self.settings.tpl_unittest_source, file_path) # copy unit test files, if any if self.settings.unittest_copyfiles is not None: for origfile in self.settings.unittest_copyfiles: utfile = os.path.join(*origfile.split('/')) if os.path.exists(utfile): target = os.path.join(self.settings.tpl_unittest_target, os.path.basename(utfile)) logger.info('Copying unittest file {} to {}'.format(os.path.basename(utfile), target)) shutil.copyfile(utfile, target) else: logger.warning("Unit test file \"{}\" configured in `unittest_copyfiles` does not exist" .format(utfile))
def read_profiles(self): """ Find all (JSON) profiles and instantiate into FHIRStructureDefinition. """ resources = [] for filename in ['profiles-types.json', 'profiles-resources.json' ]: #, 'profiles-others.json']: bundle_res = self.read_bundle_resources(filename) for resource in bundle_res: if 'StructureDefinition' == resource['resourceType']: resources.append(resource) else: logger.debug('Not handling resource of type {}'.format( resource['resourceType'])) # create profile instances for resource in resources: profile = FHIRStructureDefinition(self, resource, self.settings.sort_resources) for pattern in skip_because_unsupported: if re.search(pattern, profile.url) is not None: logger.info('Skipping "{}"'.format(resource['url'])) profile = None break if profile is not None and self.found_profile(profile): profile.process_profile()
def render(self): for profile in self.spec.writable_profiles(): classes = profile.writable_classes() if self.settings.sort_resources: classes = sorted(classes, key=lambda x: x.name) if 0 == len(classes): if profile.url is not None: # manual profiles have no url and usually write no classes logger.info('Profile "{}" returns zero writable classes, skipping'.format(profile.url)) continue imports = profile.needed_external_classes() self.set_forwards(imports, classes) data = { 'profile': profile, 'info': self.spec.info, 'imports': imports, 'classes': classes } ptrn = profile.targetname.lower() if self.settings.resource_modules_lowercase else profile.targetname source_path = self.settings.tpl_resource_source target_name = self.settings.tpl_resource_target_ptrn.format(ptrn) target_path = os.path.join(self.settings.tpl_resource_target, target_name) self.do_render(data, source_path, target_path) self.copy_files(self.settings.tpl_resource_target)
def parse_profile(self, profile): """ Parse a JSON profile into a structure. """ assert profile assert 'StructureDefinition' == profile['resourceType'] # parse structure self.url = profile.get('url') logger.info('Parsing profile "{}"'.format(profile.get('name'))) self.structure = FHIRStructureDefinitionStructure(self, profile)
def render(self): if not self.settings.tpl_codesystems_source: logger.info("Not rendering value sets and code systems since `tpl_codesystems_source` is not set") return systems = [v for k,v in self.spec.codesystems.items()] data = { 'info': self.spec.info, 'systems': sorted(systems, key=lambda x: x.name) if self.settings.sort_resources else systems, } target_name = self.settings.tpl_codesystems_target_name target_path = os.path.join(self.settings.tpl_resource_target, target_name) self.do_render(data, self.settings.tpl_codesystems_source, target_path)
def copy_files(self, target_dir): """ Copy base resources to the target location, according to settings. """ for origpath, module, contains in self.settings.manual_profiles: if not origpath: continue origpath = self.rel_to_settings_path(self.settings, self.clean_it(origpath)) if os.path.exists(origpath): tgt = os.path.join(target_dir, os.path.basename(origpath)) logger.info("Copying manual profiles in {} to {}".format(os.path.basename(origpath), tgt)) os.makedirs(os.path.dirname(tgt), exist_ok=True) shutil.copyfile(origpath, tgt) else: logger.error(f"Manual profile {origpath} does not exits")
def read_valuesets(self): resources = self.read_bundle_resources('valuesets.json') for resource in resources: if 'ValueSet' == resource['resourceType']: assert 'url' in resource self.valuesets[resource['url']] = FHIRValueSet(self, resource) elif 'CodeSystem' == resource['resourceType']: assert 'url' in resource if 'content' in resource and 'concept' in resource: self.codesystems[resource['url']] = FHIRCodeSystem( self, resource) else: logger.warning("CodeSystem with no concepts: {}".format( resource['url'])) logger.info("Found {} ValueSets and {} CodeSystems".format( len(self.valuesets), len(self.codesystems)))
def content(self): """ Process the unit test file, determining class structure from the given classes dict. :returns: A tuple with (top-class-name, [test-dictionaries]) """ if self._content is None: logger.info('Parsing unit test {}'.format( os.path.basename(self.filepath))) utest = None assert os.path.exists(self.filepath) with io.open(self.filepath, 'r', encoding='utf-8') as handle: utest = json.load(handle) assert utest self._content = utest return self._content
def load(self): """ Makes sure all the files needed have been downloaded. :returns: The path to the directory with all our files. """ if self.force_download: assert not self.force_cache # If we're not forcing anything, see whether our cached version matches what is on the server version_path = os.path.join(self.cache, 'version.info') if not (self.force_download or self.force_cache): from fhirparser.fhirspec import FHIRVersionInfo cached_version = FHIRVersionInfo(None, self.cache) \ if not self.force_cache and os.path.exists(version_path) else None self.download('version.info') server_version = FHIRVersionInfo(None, self.cache) if cached_version.version != server_version.version: logger.info(f"Server version ({server_version.version}) " f"doesn't match cache ({cached_version.version}) - Reloading cache") self.force_download = True if os.path.isdir(self.cache) and self.force_download: import shutil shutil.rmtree(self.cache) if not os.path.isdir(self.cache): os.mkdir(self.cache) # check all files and download if missing uses_cache = False for local, remote in self.__class__.needs.items(): path = os.path.join(self.cache, local) if not os.path.exists(path): if self.force_cache: raise Exception('Resource missing from cache: {}'.format(local)) logger.info('Downloading {}'.format(remote)) filename = self.download(remote) # unzip if '.zip' == filename[-4:]: logger.info('Extracting {}'.format(filename)) self.expand(filename) else: if local == 'version.info': uses_cache = True if uses_cache: logger.info('Using cached resources, supply "-f" to re-download') return self.cache
def read_bundle_resources(self, filename): """ Return an array of the Bundle's entry's "resource" elements. """ logger.info("Reading {}".format(filename)) filepath = os.path.join(self.directory, filename) if not os.path.exists(filepath) and not self.loader.force_cache: self.loader.download(filename) if not os.path.exists(filepath): raise Exception('Resource missing from cache: {}'.format(filename)) with io.open(filepath, encoding='utf-8') as handle: parsed = json.load(handle) if 'resourceType' not in parsed: raise Exception( "Expecting \"resourceType\" to be present, but is not in {}" .format(filepath)) if 'Bundle' != parsed['resourceType']: raise Exception("Can only process \"Bundle\" resources") if 'entry' not in parsed: raise Exception( "There are no entries in the Bundle at {}".format( filepath)) return [e['resource'] for e in parsed['entry']]
def parsed_codes(self, codes, prefix=None): found = [] for c in codes: if re.match(r'\d', c['code'][:1]): self.generate_enum = False logger.info( "Will not generate enum for CodeSystem \"{}\" because at least one concept code starts with a number" .format(self.url)) return None cd = c['code'] name = '{}-{}'.format( prefix, cd) if prefix and not cd.startswith(prefix) else cd c['name'] = self.spec.safe_enum_name(cd) c['definition'] = c.get('definition') or c['name'] found.append(c) # nested concepts? if 'concept' in c: fnd = self.parsed_codes(c['concept']) if fnd is None: return None found.extend(fnd) return found
def do_render(self, data, template_name, target_path): """ Render the given data using a Jinja2 template, writing to the file at the target path. :param template_name: The Jinja2 template to render, located in settings.tpl_base :param target_path: Output path """ try: template = self.jinjaenv.get_template(os.path.basename(template_name)) except TemplateNotFound as e: logger.error("Template \"{}\" not found in «{}», cannot render" .format(template_name, self.settings.tpl_base)) return if not target_path: raise Exception("No target filepath provided") dirpath = os.path.dirname(target_path) if not os.path.isdir(dirpath): os.makedirs(dirpath) with io.open(target_path, 'w', encoding='utf-8') as handle: logger.info('Writing {}'.format(target_path)) rendered = template.render(data) handle.write(rendered)
def generator(args: List[str]) -> Optional[int]: cwd = os.getcwd() opts = genargs().parse_args(args) if opts.force and opts.cached: raise ArgumentError('force and cached options cannot both be true') # Load the settings if os.path.isdir(opts.settings): opts.settings = os.path.join(opts.settings, 'settings.py') opts.settings_dir = os.path.abspath(os.path.dirname(opts.settings)) logger.info(f"Loading settings from {opts.settings}") with open(opts.settings) as f: settings_py = f.read() settings = ModuleType('settings') exec(settings_py, settings.__dict__) settings.settings_dir = opts.settings_dir # Sort option -- default if not in the settings directory if opts.nosort: settings.sort_resources = False elif getattr(settings, "sort_resources", None) is None: settings.sort_resources = True if settings.sort_resources: logger.info("Sorting resource properties") else: logger.info("Resource properties are not sorted") if opts.fhirurl: settings.specification_url = opts.fhirurl adjust_source_target_paths(settings, opts) logger.info(f"Specification: {settings.specification_url}") logger.info(f"Template directory: {os.path.relpath(settings.tpl_base, cwd)}") logger.info(f"Output directory: {os.path.relpath(settings.tpl_resource_target, cwd)}") if settings.write_unittests: logger.info(f"Unit test directory: {os.path.relpath(settings.tpl_unittest_target, cwd)}") logger.info(f"Cache directory: {opts.cachedir}") loader = fhirloader.FHIRLoader(settings, opts.cachedir, force_download=opts.force, force_cache= opts.cached) spec_source = loader.load() if not opts.loadonly: spec = fhirspec.FHIRSpec(spec_source, settings, loader) if not opts.parseonly: spec.write() return 0