def get_bundle(snapshot=None, page_num=None): params = {'_count': 500, '_snapshot': snapshot, '_page': page_num} #r = get(f'{server_path}/Practitioner', headers=headers, params=params) r = get(f'{server_path}/PractitionerRole', headers=headers, params=params) return B.Bundle(loads(r.text))
def get_bundle(snapshot=None, page_num=None): params = {'_count': 500, '_snapshot': snapshot, '_page': page_num} r = get(f'{server_path}/VerificationResult', headers=headers, params=params) return B.Bundle(loads(r.text))
def split_bundle( file): #split into smaller 500 entry bundles and write and post for k in range(int(len(file.entry) / 500) + 1): nb = B.Bundle() nb.id = f"{file.id}-{k}" nb.type = "batch" nb.entry = [] for i in range(500): try: entry = B.BundleEntry(file.entry[k * 500 + i].as_json()) nb.entry.append(entry) except IndexError: break print(k * 500 + i) logging.info(f'posting bundle {file.id}...entries {k*500}-{k*500+i}') post_bundle(nb)
def main(f_type, id_list, source): f_Type = rr.r_map[source] entries = [] if f_type == 'managing-org': for item in get_csv(in_path, 'managing_orgs_data'): # get sample data from csv npi = item['id'] f_id = get_f_id(f_type, npi) logging.info(f'create resource id = {f_id}') example = f_templ.managing_org( item, f_id, f_type, npi) # create dict using fstring template example = scrub_dict(example) # scrub out nulls id_list[snakecase(f_type)].append( (npi, f'{f_Type}/{f_id}', item['name'])) entries.append( f_templ.entries_templ( server_path, example, f_Type, f_id)) # create dict using fstring template elif f_type == 'coverage': for state in states: f_id = get_f_id(f_type, spinalcase(state.lower()), source) attachment = { 'contentType': 'application/vnd.geo+json', # Mime type of the content, with charset etc. 'data': b64.geojson_b64[snakecase( state.lower())].decode('ascii'), # Data inline, base64ed 'title': f'GeoJSON for {state}', # Label to display in place of the data 'creation': timestamp # Date attachment was first created } logging.info(f'create resource id = {f_id}') example = f_templ.coverage( name=state, attachment=attachment) # create dict using fstring template id_list[snakecase(f_type)].append( (state, f'{f_Type}/{f_id}', state)) entries.append( f_templ.entries_templ( server_path, example, f_Type, f_id)) # create dict using fstring template elif f_type == 'insuranceplan': for item in get_csv(in_path, 'managing_orgs_data'): # get sample data from csv npi = item['plan_id'] f_id = get_f_id(f_type, npi) if item['is_plan'] == "TRUE": logging.info(f'create resource id = {f_id}') example = f_templ.insuranceplan( item) # create dict using fstring template example = scrub_dict(example) # scrub out nulls id_list[snakecase(f_type)].append( (item['plan_id'], f'{f_Type}/{f_id}', item['plan_name'])) entries.append( f_templ.entries_templ( server_path, example, f_Type, f_id)) # create dict using fstring template elif f_type == 'hie-orgaffiliation': for entry in bundles['orgs'].entry: z = get_zip(entry.resource.address[0].postalCode) st = entry.resource.address[0].state hie_list = [] if z in western_ma_zips: hie_list.append(('wma', 'Western Massachusetts HIE')) if st == 'RI': hie_list.append(('ri', 'Rhode Island HIE')) if z in hartford_zips: hie_list.append(('hct', 'Hartford Connecticut HIE')) for hie_item in hie_list: prefix = f'hie-{hie_item[0]}' parent_org = next( po for po in bundles['managing_orgs'].entry if po.resource.name == hie_item[1]) # get parent org logging.info( f'{hie_item[1]} orgaffiliation for org resource id = {entry.resource.id}' ) example = f_templ.hie_orgaffiliation( org=entry.resource, parent_org=parent_org.resource, prefix=prefix) # create dict using fstring template oa = OA.OrganizationAffiliation( example) # instantiate as orgaffiliation id_list[snakecase(f_type)].append( (oa.identifier[0].value, f'{f_Type}/{oa.id}', oa.organization.display)) entries.append( f_templ.entries_templ( server_path, example, f_Type, oa.id)) # create dict using fstring template elif f_type == 'addin-org-role': for n_entry in bundles['networks'].entry: network = n_entry.resource if network.name in addin_networks.keys(): for o_entry in bundles['orgs'].entry: org = o_entry.resource z = get_zip(org.address[0].postalCode) if (org.address[0].state == addin_networks[network.name][1] or (addin_networks[network.name][1] == "Greater Hartford Area" and z in hartford_zips)): # add org-affil to orgaffil bundle logging.info( f'orgaffiliation for org resource id = {network.name}' ) example = f_templ.hie_orgaffiliation( org=org, parent_org=network, prefix=addin_networks[network.name] [0]) # create dict using fstring template oa = OA.OrganizationAffiliation( example) # instantiate as orgaffiliation be = B.BundleEntry( f_templ.entries_templ(server_path, example, f_Type, oa.id)) bundles['org_roles'].entry.append( be ) # create lsit of classes to append to existing bundle for oa_entry in bundles['org_roles'].entry: logging.info(oa_entry.resource.id) logging.info(oa_entry.resource.organization.display) id_list[snakecase(f_type)].append( (oa_entry.resource.id, f'{f_Type}/{oa_entry.resource.id}', oa_entry.resource.organization.display)) elif f_type == 'addin-pract-role': p_bundles = [ v for k, v in bundles.items() if k in ['pract0', 'pract1', 'pract2', 'pract3'] ] for n_entry in bundles['networks'].entry: network = n_entry.resource if network.name in addin_networks.keys(): for p_bundle in p_bundles: for p_entry in p_bundle.entry: pract = p_entry.resource z = get_zip(pract.address[0].postalCode) if (pract.address[0].state == addin_networks[network.name][1] or (addin_networks[network.name][1] == "Greater Hartford Area" and z in hartford_zips)): # add pract-affil to practrole bundle # logging.info(f'practitionerrole for pract = {pract.name[0].text}, id = {pract.identifier[0].value} network = {network.name} identifier {network.identifier[0].value}') example = f_templ.network_member_practrole( member=pract, network=network, prefix=addin_networks[network.name] [0]) # create dict using fstring template pr = PR.PractitionerRole( example) # instantiate as practitionerrole be = B.BundleEntry( f_templ.entries_templ(server_path, example, f_Type, pr.id)) # bundles['pract_roles'].entry.append(be) # create lsit of classes to append to existing bundle id_list[snakecase(f_type)].append( (pr.identifier[0].value, f'{f_Type}/{pr.id}', pr.practitioner.display)) entries.append( f_templ.entries_templ(server_path, example, f_Type, pr.id) ) # create dict using fstring template elif f_type == 'endpoint': for k, v in bundles.items(): logging.info(f'for {k} create endpoints...') for entry in v.entry: try: example = f_templ.endpoint( r=entry.resource) # create dict using fstring template #logging.info(f'example = \n{example}') if example: ep = EP.Endpoint(example) # instantiate as endpoint id_list[snakecase(f_type)].append( (ep.id, f'{f_Type}/{ep.id}', ep.name)) entries.append( f_templ.entries_templ( server_path, example, f_Type, ep.id)) # create dict using fstring template except AttributeError: pass if f_type not in addins.keys(): batch_bundle = f_templ.batch_bundle_template( f_type, entries) # create dict using fstring template batch_json = dumps(batch_bundle, indent=3, ensure_ascii=False) #logging.info(f'{f_type} batch_bundle as json') # as dict fhir_file = B.Bundle(batch_bundle) # validate file #logging.info(f'bundle looks like = {json_file}') write_bundle(out_path, f_type, batch_json, source, 'json') # save to file write_resource_csv(spinalcase(f_type), resource_keys[snakecase(f_type)]) # TODO convert to xml xml_bundle = get_xml( convert_server, batch_json, f_type) #use fhir server to convert bundle xml to json write_bundle(out_path, f_type, xml_bundle, source, 'xml') #save to file post_bundle(server_path, batch_json) #upload to fhir server else: # append to existing bundle batch_json = dumps(bundles[addins[f_type][1]].as_json(), indent=3, ensure_ascii=False) # convert to json byte write_bundle(out_path, addins[f_type][0], batch_json, addins[f_type][0], 'json') # save to file # TODO convert to xml #xml_bundle = get_xml(convert_server,batch_json,addins[f_type][0])#use fhir server to convert bundle xml to json #write_bundle(out_path,addins[f_type][0],xml_bundle,addins[f_type][0],'xml') #save to file # post_bundle(server_path,batch_json) #upload to fhir server write_resource_csv(spinalcase(addins[f_type][0]), resource_keys[snakecase(f_type)]) return
def get_bundle(in_path, in_file): # as py fhir bundle class with open(f'{in_path}/{in_file}.json') as f: logging.info(f'reading file = {in_file}') return B.Bundle(load(f))