def run_med_to_csv(file:str=None,out_file:str=None, params:dict=None): ''' Only supports outputing bundles, not individual meds :param file: the target input file to be output to CSV, or ALL :param out_file: basename to be used for file outputs, can be None: default name will be used :param params: Customization parameters for the CSV output :param incr: Number of medications to be saved per file (not rows) :return: ''' smoresLog.debug('Preparing to generate CSV') incr = int(util.read_config_value('OUTPUT_CONF')['file_size_max']) csv_files = {} csv_outputs = params.keys() if params is not None else '' if 'detail' in csv_outputs: for _d in params['detail'].values(): base_filename, ext = process_filename(_d['type']) csv_files[_d] = {'file': base_filename, 'ext': ext, 'detail': _d} if file is not None: kits = MedKit.get_medkit(file) if type(kits) is dict: i=1 for file, kit in kits.items(): _file, ext = process_filename(out_file=out_file, info_type=kit.file_name.split('.')[0]) save_csv_bundle(kit.m_dict, _file, incr=incr, ext=ext) i += 1 else: _file, ext = process_filename(out_file=out_file, info_type=kits.file_name.split('.')[0]) save_csv_bundle(kits.m_dict, _file, incr=incr, ext=ext) else: smores_error('#Kx001.2') return
def get_bundle_json(in_dict: md.MedicationDictionary, json_file: str): incr = int(util.read_config_value('OUTPUT_CONF')['file_size_max']) _total = in_dict.get_med_count() _iters = int(math.ceil(_total / incr)) if incr is not None else 1 _med_list = list(in_dict.med_list.values()) fhir_construct = { 'resourceType': 'Bundle', 'date': datetime.today().strftime('%Y-%m-%d %H:%M:%S%z'), 'total': 0, 'entry': [] } if _iters > 1: _count = 0 print('Medications will be saved across {0} files.'.format(_iters)) for i in trange(_iters, desc='File Number', position=0): bundle = fhir_construct.copy() list_end = _count + incr if (_count + incr) < _total else _total _b = [med.get_fhir() for med in _med_list[_count:list_end]] bundle['entry'] = _b bundle['total'] = list_end - _count write_file(output_file=json_file, iter=i + 1, data=bundle, ext='json') _count += incr else: bundle = fhir_construct for med in _med_list: bundle['entry'].append(med.get_fhir()) bundle['total'] += 1 write_file(output_file=json_file, data=fhir_construct, ext='json') return
def line_read(medkit, input, last_id=None, skip1=True): _n, line = input if _n == 0 and skip1: return False, False, None, None else: try: config_i_keys = util.read_config_value('INFILE_KEYS') i_local_key = config_i_keys['local_id_col_id'] i_code_key = config_i_keys['code_col_id'] i_code_type_key = config_i_keys['code_type_col_id'] i_name_key = config_i_keys['local_name_col_id'] i_code_name = config_i_keys['code_name_col_id'] except KeyError as e: print(e) return None, "#Cx001.6", None, None is_dup = False has_err = {} cui_type = None local_id = line[i_local_key] if local_id == last_id or m.med_exists(local_id, medkit.file_name): temp_med = m.get_med_by_id(local_id, medkit.file_name) is_dup = True else: temp_med = m.LocalMed(input_key=local_id, source=medkit.file_name) if len(line[i_code_key]) != 0: cui_type = line[i_code_type_key].upper() cui_type = 'RXNORM' if cui_type == 'RXCUI' else cui_type # Prevent automatic fail incase RXCUI instead of RXNORM if util.validate_id(line[i_code_key], cui_type): # Support for inclusion of local name for local code id's if cui_type == 'LOCAL' and not temp_med.isNameSet(): temp_med.set_name(line[i_code_name]) elif cui_type in util.OPTIONS_CUI_TYPES: if i_code_name in line.keys() and len( line[i_code_name]) > 0: _e = temp_med.add_cui(line[i_code_key], cui_type, line[i_code_name]) else: _e = temp_med.add_cui(line[i_code_key], cui_type) if _e > 0: smores_error('#Ax000.1', line[i_local_key]) has_err[line[i_code_key]] = "#Ax000.1" else: smores_error('#Ax000.3', line[i_local_key]) has_err[line[i_code_key]] = "#Ax000.3" if i_name_key in line.keys() and len(line[i_name_key]) > 0: clean_rx = r'\\["n]' temp_med.set_name(re.sub(clean_rx, '', line[i_name_key])) return local_id, is_dup, has_err, cui_type
def run_dict_to_csv(src_dict: str, out_file: str = None, params=None): """ Retrieves the corresponding MedicationDictionary for an input src and saves to CSV :param src_dict: Name of the src to be saved. Will identify the medication "bundle" to be saved :param out_file: output filename specified by the user. Def None results in default name generated :param params: Additional output parameters to be applied in building the csv :return: Function call to save the bundle of medications """ incr = int(util.read_config_value('OUTPUT_CONF')['file_size_max']) dict_to_save = md.get_med_dict_by_src(src_dict) _file, ext = process_filename(out_file=out_file, info_type=src_dict) return save_csv_bundle(dict_to_save, _file, incr, params, ext=ext)
def __init__(self, in_file, file_delim=','): self.path = in_file self.file_name = util.get_filename(in_file) self.delim = file_delim self.file_lines = self.get_file_lines() self.c_records = 0 self.records = {} self.headers = util.read_config_value('INFILE_KEYS') self.trackers = {} self.cui_types = [] self.m_dict = md.MedicationDictionary(self.file_name, link=self) MedKit.med_kits[self.file_name] = self m.Medication.med_id_list[self.file_name] = {} self.med_list = m.Medication.med_id_list[self.file_name]
def get_headers(self, delim=','): try: data = open(self.path, "r+").readline() keys = list(util.read_config_value('INFILE_KEYS').values()) map = {} _d = data.split(delim) for key in keys: for i in range(len(_d)): if _d[i].replace('\n', '') == key: map[key] = i return map except FileNotFoundError: smores_error('#Cx001.1') return None except PermissionError: smores_error('#Cx001.2') return None
def run_dict_to_csv(dict:str, out_file:str=None, params=None): csv_details = {'cui': {'src': dict}} incr = int(util.read_config_value('OUTPUT_CONF')['file_size_max']) dict_to_save = md.get_med_dict_by_src(dict) _file, ext = process_filename(out_file=out_file, info_type=dict) return save_csv_bundle(dict_to_save, _file, incr, csv_details, ext=ext)
def line_read (medkit, input, last_id=None, skip1=True): _n, line = input if _n == 0 and skip1: return False, False, None, None else: try: config_i_keys = util.read_config_value('INFILE_KEYS') i_local_key = config_i_keys['local_id_col_id'] i_code_key = config_i_keys['code_col_id'] i_code_type_key = config_i_keys['code_type_col_id'] i_name_key = config_i_keys['local_name_col_id'] i_code_name = config_i_keys['code_name_col_id'] except KeyError as e: print(e) return None, "#Cx001.6", None, None is_dup = False has_err = {} cui_type = None local_id = line[i_local_key] if local_id == last_id or m.med_exists(local_id, medkit.file_name): temp_med = m.get_med_by_id(local_id, medkit.file_name) is_dup = True else: temp_med = m.LocalMed(input_key=local_id, source=medkit.file_name) if len(line[i_code_key]) != 0: if util.validate_id(line[i_code_key], line[i_code_type_key].upper()): cui_type = line[i_code_type_key].upper() # Support for inclusion of local name for local code id's if cui_type == 'LOCAL' and not temp_med.isNameSet(): temp_med.set_name(line[i_code_name]) elif cui_type in ['RXCUI', 'RXNORM']: rxcui = m.get_rxcui(line[i_code_key]) _e = temp_med.add_cui(rxcui, 'RXNORM') if _e > 0: smores_error('#Ax000.1', line[i_local_key]) has_err[line[i_code_key]] = "#Ax000.1" elif cui_type == 'NDC': if i_code_name in line.keys() and len(line[i_code_name]) > 0: _e = temp_med.add_cui(line[i_code_key], cui_type, line[i_code_name]) else: _e = temp_med.add_cui(line[i_code_key], cui_type) if _e > 0: smores_error('#Ax000.1', line[i_local_key]) has_err[line[i_code_key]] = "#Ax000.1" elif cui_type == 'UMLS': smoresLog.debug('Code Type Check: UMLS') # TODO To Be Supported in Later Version pass elif cui_type == 'CPT': smoresLog.debug('Code Type Check: CPT') # TODO To Be Supported in Later Version pass elif cui_type == 'SNOMED': smoresLog.debug('Code Type Check: SNOMED') # TODO To Be Supported in Later Version pass else: smores_error('#Ax000.3', line[i_local_key]) has_err[line[i_code_key]] = "#Ax000.3" if i_name_key in line.keys() and len(line[i_name_key]) > 0: clean_rx = r'\\["n]' temp_med.set_name(re.sub(clean_rx, '', line[i_name_key])) return local_id, is_dup, has_err, cui_type