def load_demographic_data(redcapurl, redcapkeys): df = pd.DataFrame() if 'DepMIND2' in redcapkeys: print('loading DepMIND2 demographic data') _key = redcapkeys['DepMIND2'] _fields = ['record_id', 'subject_number', 'age', 'sex_xcount'] _events = ['screening_arm_1'] _rename = { 'subject_number': 'SUBJECT', 'age': 'AGE', 'sex_xcount': 'SEX' } # Load the records from redcap _proj = redcap.Project(redcapurl, _key) df = _proj.export_records(raw_or_label='label', format='df', fields=_fields, events=_events) # Transform for dashboard data df = df.rename(columns=_rename) df = df.dropna(subset=['SUBJECT']) df['SUBJECT'] = df['SUBJECT'].astype(int).astype(str) df = df.set_index('SUBJECT', verify_integrity=True) # All DM2 are depressed df['DEPRESS'] = '1' return df
def load_redcap(self): """ load redcap project """ rc = redcap.Project(self.api_url, self.api_key) rc_sess_fields = self.rc_fields + [rc.def_field] if self.rc_sess_event is None: sess_event_list = None else: sess_event_list = self.rc_sess_event.split(',') if self.rc_subj_event is None: subj_event_list = None else: subj_event_list = [self.rc_subj_event] # Load sess events self.rc_list = rc.export_records( fields=rc_sess_fields, raw_or_label='raw', events=sess_event_list ) # Only keep records with a vuiis number self.rc_list = [r for r in self.rc_list if r[self.rc_vuiis_field]] if self.rc_subj_event != self.rc_sess_event: # Load events with subj rc_subj_fields = [rc.def_field, self.rc_subj_field] rc_subj_list = rc.export_records( fields=rc_subj_fields, raw_or_label='raw', events=subj_event_list ) id2subj = dict([ (x[rc.def_field], x[self.rc_subj_field]) for x in rc_subj_list ]) # Add subj id to sess events for r in self.rc_list: subj_id = r[rc.def_field] r[self.rc_subj_field] = id2subj[subj_id] # Load the labels from REDCap rc_list_labels = rc.export_records( fields=rc_sess_fields, raw_or_label='label', events=sess_event_list ) # Replace project raw with project label for r_raw in self.rc_list: for r_label in rc_list_labels: # Match records if (r_raw[rc.def_field] == r_label[rc.def_field] and r_raw[self.rc_sess_field] == r_label[self.rc_sess_field]): r_raw[self.rc_proj_field] = r_label[self.rc_proj_field] # Filter out records not for this project if self.arc_proj_filter: self.rc_list = [r for r in self.rc_list if r[self.rc_proj_field] == self.arc_proj_filter]
def connect(self, evt): """ Establishes a connection with the REDCap API and stores a reference to the project object in the parent window. :param evt: Event which triggered the connection """ api_key = self.api_key_text.GetValue() redcap_url = self.redcap_url_text.GetValue() try: self.parent.project = redcap.Project(redcap_url, api_key) self.parent.report_window.fill_options() self.parent.report_window.Enable() self.parent.sizer.Fit(self.parent) w, h = self.parent.report_window.GetSize() max = wx.Display().GetClientArea() self.parent.SetSize(min(w + 100, max.Width), min(h + 100, max.Height)) # Enable scrolling after content is filled # w, h = self.parent.report_window.sizer.GetMinSize() # self.parent.report_window.SetVirtualSize((w, h)) fontsz = wx.SystemSettings.GetFont(wx.SYS_SYSTEM_FONT).GetPixelSize() self.parent.report_window.SetScrollRate(fontsz.x, fontsz.y) self.parent.report_window.EnableScrolling(True, True) # Fire a size event to fix disappearing widgets on scroll wx.PostEvent(self.parent.GetEventHandler(), wx.PyCommandEvent(wx.EVT_SIZE.typeId, self.parent.GetId())) self.parent.Show() self.Destroy() except redcap.RedcapError as e: self.error_str.SetLabelText(str(e)) self.error_str.Show() self.sizer.Fit(self)
def send_to_redcap(user, my_project, my_id, issue): import redcap import time print('connecting to redcap....') # connect to project with APIkey redcap_url = 'http://redcapint.tsi.wfubmc.edu/redcap_int/api/' # myToken = 'D742E6ADAA2F023A73B0F9F11C2AF8C9' # for Pepper Pilot my_token = '2B72CCD23A4D1E841548570DF7D44CF5' project = redcap.Project(redcap_url, my_token) # fieldnames are: projectname, date, issue myid = time.strftime('%d/%m/%y-%H:%M:%S') # print('field_names: ', project.field_names) # print('record_id: ', myid) # print('user: '******'project:', myproj) # print('issue: ', issue) # note that RedCap wants the index for the projects and users, not the names, so use a dictionary to get index to_import = [{'record_id': myid, 'user': USERS[user], 'projectname': PROJECTS[my_project], 'subjectid': my_id, 'issue': issue}] response = project.import_records(to_import) # print('Response = ', response) if response['count'] == 1: print('Upload successful') messagebox.showinfo("Redcap Upload", "Upload successful!") else: print('Upload failed - now you need to whine to tech support...') messagebox.showerror("Redcap Upload", "Upload failed - notify tech support.")
def Data(self): ''' API that pulls all data from Redcap ''' api_key = self.redcap_api_token api_url = self.redcap_url project = redcap.Project(api_url, api_key) data = project.export_records() if len(data) > 0: print('deleting ' + str(len(data)) + ' records') dfData = pd.DataFrame.from_dict(data) records = dfData['record_id'].to_list() project.delete_records(records) else: print('No existing records to delete') thisPath = 'C:/Users/mrwilliams/My Stuff/vaxIntFace/data.xlsx' print('Getting new data') excelData = pd.read_excel('data.xlsx', sheet_name='Sheet1') # jsonData = excelData.to_json() print('Importing data') response = project.import_records(excelData) print(response)
def load_madrs_data(redcapurl, redcapkeys): data = pd.DataFrame() if 'DepMIND2' in redcapkeys: print('loading DepMIND2 MADRS data') _key = redcapkeys['DepMIND2'] _cols = ['ma_tot'] _fields = ['record_id', 'ma_tot'] _map = { 'week_0baseline_arm_1': 'Baseline', 'week_6_arm_1': 'Week6', 'week_12_arm_1': 'Week12', 'week_3_arm_1': 'Week3', 'week_9_arm_1': 'Week9', } _events = _map.keys() # Connect to the redcap project _proj = redcap.Project(redcapurl, _key) # Load secondary ID def_field = _proj.def_field sec_field = _proj.export_project_info()['secondary_unique_field'] rec = _proj.export_records(fields=[def_field, sec_field], format='df') rec.dropna(subset=[sec_field], inplace=True) rec[sec_field] = rec[sec_field].astype(int).astype(str) rec = rec.reset_index() rec = rec.drop('redcap_event_name', axis=1) # Load madrs data data = _proj.export_records(raw_or_label='raw', format='df', fields=_fields, events=_events) # Merge in subject_number (probably could do this with a column map) data = data.reset_index() data = pd.merge(data, rec, how='left', left_on='record_id', right_on='record_id', sort=True) # Rename for consistency with other studies data.rename(columns={'subject_number': 'SUBJECT'}, inplace=True) # Map redcap event to xnat session type data['SESSTYPE'] = data['redcap_event_name'].map(_map) data = data.drop('redcap_event_name', axis=1) data = data.dropna() # Force int format data['ma_tot'] = data['ma_tot'].astype(int) data = data.sort_values('SUBJECT') return data
def sendToRedcap(user, myproj, issue): import redcap import time print('connecting to redcap....') # connect to project with APIkey redCapURL = 'http://redcapint.tsi.wfubmc.edu/redcap_int/api/' # myToken = 'D742E6ADAA2F023A73B0F9F11C2AF8C9' # for Pepper Pilot myToken = '2B72CCD23A4D1E841548570DF7D44CF5' project = redcap.Project(redCapURL, myToken) # fieldnames are: projectname, date, issue myid = time.strftime('%d/%m/%y-%H:%M:%S') print('field_names: ', project.field_names) print('record_id: ', myid) print('user: '******'project:', myproj) print('issue: ', issue) # note that RedCap wants the index for the projects and users, not the names, so use a dictionary to_import = [{ 'record_id': myid, 'user': USERS[user], 'projectname': PROJECTS[myproj], 'issue': issue }] response = project.import_records(to_import) print('Response = ', response)
def create_from_request(request): try: record = request.form['record'] project = request.form['project_id'] url = request.form['redcap_url'] instrument = request.form['instrument'] version = re.search('redcap_v(.*)/index', request.form['project_url']).group(1) completed = int(request.form[instrument + '_complete']) except KeyError: raise RedcapException('Redcap data entry trigger request missing a ' 'required key. Found keys: {}'.format( list(request.form.keys()))) if completed != 2: logger.info("Record {} not completed. Ignoring".format(record)) return rc = REDCAP.Project(url + 'api/', current_app.config['REDCAP_TOKEN']) server_record = rc.export_records([record]) if len(server_record) < 0: raise RedcapException('Record {} not found on redcap server {}'.format( record, url)) elif len(server_record) > 1: raise RedcapException('Found {} records matching {} on redcap server ' '{}'.format(len(server_record), record, url)) server_record = server_record[0] try: date = server_record['date'] comment = server_record['cmts'] redcap_user = server_record['ra_id'] session_name = server_record['par_id'] except KeyError: raise RedcapException('Redcap record {} from server {} missing a ' 'required field. Found keys: {}'.format( record, list(server_record.keys()))) session = set_session(session_name) try: new_record = session.add_redcap(record, project, url, instrument, date, version, redcap_user, comment) except Exception as e: raise RedcapException("Failed adding record {} from project {} on " "server {}. Reason: {}".format( record, project, url, e)) monitor_scan_import(session) study = session.get_study() site_settings = study.sites[session.site.name] if site_settings.download_script: monitor_scan_download(session) return new_record
def getRecordIDList(api_url, api_key): """Generate a list of record IDs in a REDCap project, without duplicates.""" def_field = redcap.Project(api_url, api_key).def_field record_ids_all = exportRecords(api_url, api_key, fields=[def_field], quiet=True) record_ids_all = [r[def_field] for r in record_ids_all] record_ids_all = list(unique_everseen(record_ids_all)) return record_ids_all
def _connect_redcap(self): import redcap import requests cfg = self.config.get('redcap') try: data_entry = redcap.Project(cfg.get('server'), cfg.get('data_entry_token'), verify_ssl=cfg.get('verify_ssl')) import_laptops = redcap.Project(cfg.get('server'), cfg.get('import_laptops_token'), verify_ssl=cfg.get('verify_ssl')) self.api_data_entry = data_entry self.api_import_laptops = import_laptops except KeyError, err: self.logging.info('Connect to REDCap: {}'.format(time.asctime()), '{}'.format(err), server=cfg.get('server')) sys.exit(err)
def get_project(args): # First REDCap connection for the Summary project (this is where we put data) summary_key_file = open(os.path.join( os.path.expanduser("~"), '.server_config/redcap-dataentry-token' ), 'r') summary_api_key = summary_key_file.read().strip() rc_summary = redcap.Project('https://ncanda.sri.com/redcap/api/', summary_api_key, verify_ssl=False) # Get all np reports for baseline and 1r visit = rc_summary.export_records(fields=['study_id', 'exclude', 'visit_ignore___yes'], forms=['mr_session_report','visit_date'], events=['baseline_visit_arm_1', '1y_visit_arm_1'], format='df')
def redcap_project_access(API_KEY): """ Access point to REDCap form :param API_KEY:string with REDCap database API_KEY :return: redcap Project Object """ try: project = redcap.Project('https://redcap.vanderbilt.edu/api/', API_KEY) except: LOGGER.error('ERROR: Could not access redcap. Either wrong API_URL/API_KEY or redcap down.') sys.exit(1) return project
def __init__(self, redcap_url, redcap_key, instance_settings, local_dir, general_form='general'): self._general_form = general_form self._local_dir = local_dir self._instance_settings = instance_settings # Initialize redcap project self._redcap = redcap.Project(redcap_url, redcap_key)
def load_redcap_stats(api_url, api_key): # Load the redcap project, lazy for speed _rc = redcap.Project(api_url, api_key, lazy=True) # Load the data, specify index since we loaded lazy _df = _rc.export_records(format='df', df_kwargs={'index_col': 'record_id'}) if 'wml_volume' in _df: # rename wml for NIC _df['lst_stats_wml_volume'] = _df['wml_volume'] #print(_df.columns) return _df
def get_project_entry(args=None): """ Pulls the data from REDCap """ # Get API key. summary_key_file = open(os.path.join(os.path.expanduser("~"), '.server_config', 'redcap-dataentry-token'), 'r') summary_api_key = summary_key_file.read().strip() # Connect to API. project_entry = redcap.Project('https://ncanda.sri.com/redcap/api/', summary_api_key, verify_ssl=False) return project_entry
def load_instance_settings(self, redcap_url, redcap_key, main_form='main'): self._main_form = main_form # Initialize redcap projec self._redcap = redcap.Project(redcap_url, redcap_key) # get this instance name instance_name = get_this_instance() LOGGER.debug('instance={}'.format(instance_name)) # Return the record associated with this instance_name fields = self._redcap.field_names + [main_form + '_complete'] return self._redcap.export_records(records=[instance_name], fields=fields, raw_or_label='label')[0]
def redcap_query(lab_number, project_name, api_key): api_url = 'https://redcap.icts.uiowa.edu/redcap/api/' project = redcap.Project(api_url, api_key) wanted_field_names = ['lab_id'] wanted_field_names.append(utils.project_dict[project_name]['redcap']) sub_id_df = project.export_records(fields=wanted_field_names, format='df') subject_id = str(int(sub_id_df.loc[lab_number, wanted_field_names[1]])) # AMBI subject id's need to be renamed if project_name == 'AMBI': subject_id = utils.ambi_adjust(subject_id) return (subject_id)
def exportUsers(api_url, api_key, format='csv'): """export users from REDCap project Parameters: api_url: str, API URL for REDCap project Returns: users: str, user information for REDCap project in CSV format.""" # Load project. project = redcap.Project(api_url, api_key) # Export users. users = project.export_users(format=format) # Encode 'users' (currently unicode) as UTF-8. users = users.encode('utf-8') return users
def __init__(self, redcap_url, redcap_key, instance_settings, local_dir, general_form='general'): self._general_form = general_form self._local_dir = local_dir self._instance_settings = instance_settings self.module_names = [] self.processor_names = [] self.rebuild_projects = [] self.records = {} self._redcap = redcap.Project(redcap_url, redcap_key) # Initialize by loading from redcap project self.load()
def get_subj_redcap_checklist(rc_url, rc_token, subj_id, as_str=False): # connect to the redcap api rc_project = redcap.Project(rc_url, rc_token) # get the subject's checklist data checklist = rc_project.export_records( records=[subj_id], fields=[x for x in rc_project.field_names if "checklist" in x], format="df") # convert to a single index dataframe checklist = checklist.reset_index(level=[rc_project.def_field]) # drop the def_field column (redundant copy of subject id) checklist = checklist.drop(rc_project.def_field, axis=1) # get the list of checklists checklist_per_day = list(checklist.index) # convert the list into a set of the days there are data for checklist_per_day = { x.replace('rlema_day_', '').split('_')[0] for x in checklist_per_day if 'rlema_day_' in x } # a variable that contains the a set of all expected days # (skip this for now, may be handled in the front-end) # drop the rows that are not checklist data checklist = checklist.drop( [x for x in checklist.index if "rlema_day_" not in x]) # get a dictionary from raw column name to label (subject 123 has all checkboxes set to 1 to populate as reference) checklist_labels = rc_project.export_records( records=['123'], fields=[x for x in rc_project.field_names if "checklist" in x], format="df", export_checkbox_labels=True, raw_or_label='label') checklist_labels = [ x.replace(' ', '_') for x in checklist_labels.loc[(123, 'RL-EMA Day 1')] ] checklist_labels[checklist_labels.index('Yes')] = 'EMA_Checklist_Complete' # rename the columns to be informative checklist.columns = checklist_labels # if set to return as csv string if as_str == True: return checklist.to_csv(index=False) # return the dataframe return checklist
def main(): #credentials = read_args() credentials = get_env_var_credentials() syn = sc.login(credentials['synapseUsername'], credentials['synapsePassword']) proj = redcap.Project(url=credentials['redcapURL'], token=credentials['redcapToken']) exported_records_label = proj.export_records(fields=proj.field_names + NON_STANDARD_FIELDS, raw_or_label="label", format="df", export_survey_fields=True) exported_records_raw = proj.export_records(raw_or_label="raw", fields=proj.field_names + NON_STANDARD_FIELDS, format="df", export_survey_fields=True) exported_records_label = filter_identifiers(exported_records_label) exported_records_raw = filter_identifiers(exported_records_raw) store_to_synapse(syn, exported_records_label, "exported_records.csv") store_to_synapse(syn, exported_records_raw, "exported_records_raw.csv")
def upload_update_date_redcap(project_list, type_update, start_end): """ Upload the timestamp of when bin ran on a project (start and finish). :param project_list: List of projects that were updated :param type_update: What type of process ran: dax_build (1), dax_update_tasks (2), dax_launch (3) :param start_end: starting timestamp (1) and ending timestamp (2) :return: None """ logger = logging.getLogger('dax') if DAX_SETTINGS.get_api_url() and DAX_SETTINGS.get_api_key_dax( ) and DAX_SETTINGS.get_dax_manager_config(): redcap_project = None try: redcap_project = redcap.Project(DAX_SETTINGS.get_api_url(), DAX_SETTINGS.get_api_key_dax()) except: logger.warn( 'Could not access redcap. Either wrong DAX_SETTINGS. API_URL/API_KEY or redcap down.' ) if redcap_project: data = list() for project in project_list: to_upload = dict() to_upload[DAX_SETTINGS.get_dax_manager_config() ['project']] = project if type_update == 1: to_upload = set_variables_dax_manager( to_upload, 'dax_build', start_end) elif type_update == 2: to_upload = set_variables_dax_manager( to_upload, 'dax_update_tasks', start_end) elif type_update == 3: to_upload = set_variables_dax_manager( to_upload, 'dax_launch', start_end) data.append(to_upload) XnatUtils.upload_list_records_redcap(redcap_project, data)
def main(args=None): if args.verbose: print("Validating input csv.") csv = parse_csv(args.input) # currently just working with one visit event = event_mapping.get(csv.visit_id.get(0)) if args.verbose: print("Creating connection...") dataentry_key_file = open(os.path.join(os.path.expanduser("~"), '.server_config', 'redcap-dataentry-token'), 'r') dataentry_api_key = dataentry_key_file.read().strip() project_entry = redcap.Project('https://ncanda.sri.com/redcap/api/', dataentry_api_key, verify_ssl=False) mri_form = project_entry.export_records(forms=['mr_session_report'], events=[event], fields=['subject_id'], format='df') visit_form = project_entry.export_records(forms=['visit'], events=[event], fields=['visit_ignore', 'visit_ignore_why'], format='df') # These are subject ids from the list that are in REDCap. rc_filter = mri_form.mri_xnat_sid.isin(csv.subject_id) rc_cases = mri_form[rc_filter] # These are the cases that are not in REDCap (i.e., missing sessions) non_rc_filter = ~csv.subject_id.isin(rc_cases.mri_xnat_sid) non_rc_cases = csv[non_rc_filter] # now look up what info is on xnat # Create interface using stored configuration ifc = pyxnat.Interface(config=os.path.join(os.path.expanduser("~"), '.server_config/ncanda.cfg')) print visit_form
def get_project(event_list): # First REDCap connection for the Summary project (this is where we put data) summary_key_file = open( os.path.join(os.path.expanduser("~"), '.server_config/redcap-dataentry-token'), 'r') summary_api_key = summary_key_file.read().strip() rc_summary = redcap.Project('https://ncanda.sri.com/redcap/api/', summary_api_key, verify_ssl=False) # Get all the mri session reports if len(event_list): mri = rc_summary.export_records( fields=['study_id', 'exclude', 'visit_ignore___yes'], forms=['mr_session_report', 'visit_date', 'demographics'], events=event_list.split(","), format='df') else: mri = rc_summary.export_records( fields=['study_id', 'exclude', 'visit_ignore___yes'], forms=['mr_session_report', 'visit_date', 'demographics'], format='df') return mri
def create_from_request(self, request): try: self.redcap_url = request.form['redcap_url'] self.record_id = request.form['record'] self.instrument = request.form['instrument'] self.project_id = request.form['project_id'] if int(request.form[self.instrument + '_complete']) == 2: self.instrument_completed = True except KeyError: raise redcap_exception('Required key not found in request object.' '{}'.format(request.form.keys())) try: rc = REDCAP.Project(self.redcap_url + 'api/', REDCAP_TOKEN) redcap_record = rc.export_records([self.record_id]) if len(redcap_record) < 0: raise redcap_exception('Record:{} not found in redcap'.format( self.record_id)) elif len(redcap_record) > 1: raise redcap_exception( 'Record:{} is not unique in redcap'.format(self.record_id)) else: self.redcap_record = redcap_record[0] except REDCAP.RedcapError as e: raise e try: self.date = self.redcap_record['date'] self.comment = self.redcap_record['cmts'] self.rc_user = self.redcap_record['ra_id'] self.__set_session(self.redcap_record['par_id']) except KeyError: raise redcap_exception('Required field not found in recap record.' '{}'.format(self.redcap_record.keys()))
redcap_tokens = pd.DataFrame.from_dict(redcap_tokens, orient='index', columns=['token']) # No need to keep the call one site at a time - we can iterate through all for site in args.site: log.info("%s: Started processing", site) try: # Get device list from main Redcap project try: rc_token = redcap_tokens.loc[site, 'token'] except KeyError: log.error('%s: Redcap token ID is not available!', site) continue rc_api = rc.Project(REDCAP_URL, rc_token) rc_fit_fields = ['fitc_device_dte'] rc_devices = rc_api.export_records( fields=rc_fit_fields + [rc_api.def_field], events=[REDCAP_EVENT], export_data_access_groups=True, df_kwargs={ 'parse_dates': rc_fit_fields, # Only setting record id field as index here, instead of it # *and* redcap_event_name, in order to facilitate easy join # with the Fitabase DataFrame 'index_col': [rc_api.def_field] }, format='df') if not args.all:
def buildProjects(config): #### Read user's settings.yml file, which will be used to get API tokens and URLs. api_settings = ApiSettings() ## Build a list of "projects" - dicts which store data and settings for the project. projects = config["projects"] ## Verify the settings for each project. for project in projects: code_name = project["code_name"] # Get args to pass to exportRecords. if (not "exportRecords_args" in project) or (project["exportRecords_args"] is None): project["exportRecords_args"] = {} # If use_getIPSSIDs is True, get list of record IDs to export. if project["options"]["use_getIPSSIDs"]: # If use_getIPSSIDs is True, but no options provided, raise warning. if (not "getIPSSIDs_args" in project) or (project["getIPSSIDs_args"] is None): print "Warning: in project '" + code_name + "', 'use_getIPSSIDs' is True, but 'getIPSSIDs_args' not provided for project. Exporting all record IDs from project." record_id_list = None else: getIPSSIDs_args = project["getIPSSIDs_args"] record_id_list = getIPSSIDs(**getIPSSIDs_args) # If exportRecords_args has an entry for record_id_list, but use_getIPSSIDs is True, raise warning. if (project["options"]["use_getIPSSIDs"]) and ( "record_id_list" in project["exportRecords_args"]): print "Warning: in project '" + code_name + "', the specified 'record_id_list' will be ignored, since 'use_getIPSSIDs' is True." # Overwrite the record_id_list argument in exportRecords_args project["exportRecords_args"]["record_id_list"] = record_id_list ## Get args to pass to exportRecords. If key does not exist, or it is not set to a value, set it to an empty dict (i.e. exportRecords_args = project[ "exportRecords_args"] # has a value (possibly {}). # Convert exportRecords_args arguments to strings as needed. convert_to_strings = ["fields", "forms", "events", "record_id_list"] for arg in convert_to_strings: if arg in exportRecords_args.keys(): if (exportRecords_args[arg] == 'None' ): # these arguments could be lists or None # Convert string 'None' to Python None. exportRecords_args[arg] = None else: # Convert list to list of strings. Currently, list might contain integers etc. new_list = [str(val) for val in exportRecords_args[arg]] exportRecords_args[arg] = new_list ## Get API credentials for current project. api_url, api_key, code_name = api_settings.getApiCredentials( code_name=code_name) project["api_url"] = api_url project["api_key"] = api_key ## Export requested data for current project data_csv = exportRecords(api_url, api_key, format="csv", **exportRecords_args) data_csv_file = StringIO(data_csv) data_df = pandas.read_csv(data_csv_file, dtype=unicode, encoding='utf-8').fillna('') project["chunks"] = [ data_df ] # this list of dataframes will be broken into pieces, each piece containing data to be placed in a different tab. ## Retrieve project settings and add them to the dict for the current project pycap_project = redcap.Project(api_url, api_key) def_field = pycap_project.def_field project_info = exportProjectInfo(api_url, api_key) longitudinal = bool(project_info["is_longitudinal"]) repeating = bool(project_info["has_repeating_instruments_or_events"]) events = getEvents(api_url, api_key, quiet=True) metadata_raw = pycap_project.export_metadata() form_event_mapping = exportFormEventMapping(pycap_project, longitudinal) repeating_forms_events = exportRepeatingFormsEvents( api_url, api_key, repeating) forms = exportFormsOrdered(api_url, api_key) form_repetition_map = createFormRepetitionMap(longitudinal, repeating, form_event_mapping, repeating_forms_events, forms) metadata = parseMetadata(pycap_project.def_field, project_info, longitudinal, repeating, events, metadata_raw, form_event_mapping, repeating_forms_events, forms, form_repetition_map, write_branching_logic_function=False) project["pycap_project"] = pycap_project project["def_field"] = def_field project["project_info"] = project_info project["longitudinal"] = longitudinal project["repeating"] = repeating project["events"] = events project["form_event_mapping"] = form_event_mapping project["repeating_forms_events"] = repeating_forms_events project["forms"] = forms project["form_repetition_map"] = form_repetition_map project["metadata"] = metadata # Create dict which maps each form to a list of events containing that form. if longitudinal: form_to_events_dict = {} for form_event_entry in form_event_mapping: form = form_event_entry['form'] event = form_event_entry['unique_event_name'] if (not form in form_to_events_dict): form_to_events_dict[form] = [event] else: form_to_events_dict[form].append(event) else: form_to_events_dict = None project["form_to_events_dict"] = form_to_events_dict ## Build lists of variables which appear in the export data. # columns which uniquely identify a row primary_key = [def_field] if project["longitudinal"]: primary_key.append("redcap_event_name") if project["repeating"]: primary_key.append("redcap_repeat_instrument") primary_key.append("redcap_repeat_instance") project["primary_key"] = primary_key primary_key_and_dag = primary_key if ("redcap_data_access_group" in data_df.columns): primary_key_and_dag.append("redcap_data_access_group") project["primary_key_and_dag"] = primary_key_and_dag # form_complete fields form_complete_fields = [ field for field in data_df.columns if ((field.endswith("_complete")) and (not field in metadata) and ( not field in primary_key) and ( not field == "redcap_data_access_group")) ] project["form_complete_fields"] = form_complete_fields # data fields data_fields = [ field for field in data_df.columns if ((not field in primary_key + form_complete_fields) and ( not field == "redcap_data_access_group")) ] project["data_fields"] = data_fields return projects
) # if the input folder is named correctly, it is the ID that will replace the pt name name_in_redcap = False if contact_redcap: # check the redcap database to make sure the folder name is (pt_id) is in the redcap database print('\nContacting the REDCap database...') name_in_redcap = True try: api_url = 'https://redcap.vanderbilt.edu/api/' token_loc = '/Users/manusdonahue/Desktop/Projects/redcaptoken_scd_real.txt' token = open(token_loc).read() project = redcap.Project(api_url, token) project_data_raw = project.export_records() project_data = pd.DataFrame(project_data_raw) mri_cols = [ 'mr1_mr_id', 'mr2_mr_id', 'mr3_mr_id', 'mr4_mr_id', 'mr5_mr_id', 'mr6_mr_id' ] which_scan = [pt_id in list(project_data[i]) for i in mri_cols] if not any(which_scan): has_ans = False while not has_ans: print( f'The mr_id ({pt_id}) was not found in the REDCap database'
REDCAP_EVENT = '2_year_follow_up_y_arm_1' # FIXME: Move to args? args = parse_arguments() if args.verbose: log.getLogger().setLevel(log.DEBUG) with open(os.path.join(CURRENT_DIR, '../../../secure/tokens.json')) as data_file: redcap_tokens = json.load(data_file) redcap_tokens = pd.DataFrame.from_dict(redcap_tokens, orient='index', columns=['token']) with open(os.path.join(CURRENT_DIR, 'notifications_token.json')) as token_file: notif_token = json.load(token_file).get('token') notif_api = rc.Project(REDCAP_URL, notif_token) log.info('Started run with invocation: %s', sys.argv) # No need to keep the call one site at a time - we can iterate through all for site in args.site: try: # Get device list from main Redcap project rc_token = redcap_tokens.loc[site, 'token'] rc_api = rc.Project(REDCAP_URL, rc_token) rc_fit_datefields = ['fitc_device_dte', 'fitc_last_sync_date'] rc_fit_fields = [ 'fitc_last_battery_level', 'fitc_fitabase_exists', 'fitc_fitabase_profile_id', 'fitc_withdrawal', 'fitc_extension' ] rc_devices = rc_api.export_records(
#for row in registry_data: # if (row["substud___8"] == "1"): # record_ids.remove(row["ipssid"]) record_ids = getIPSSIDs(inc_registry_only=False, inc_unknown_stroke_type=False) record_ids_post_2014 = getIPSSIDs(inc_registry_only=False, inc_unknown_stroke_type=False, inc_pre_2014=False) record_ids_non_sk = getIPSSIDs(inc_registry_only=False, inc_unknown_stroke_type=False, inc_sk_patients=False) record_ids_psom = getIPSSIDs(db="psom", inc_registry_only=False, inc_unknown_stroke_type=False) # Load REDCap project (a PyCap object). project = redcap.Project(api_url_ipss, api_key_ipss) def_field = project.def_field project_info = exportProjectInfo(api_url_ipss, api_key_ipss) project_longitudinal = bool(project_info["is_longitudinal"]) project_repeating = bool(project_info["has_repeating_instruments_or_events"]) events = getEvents(api_url_ipss, api_key_ipss) metadata_raw = project.export_metadata() form_event_mapping = exportFormEventMapping(project, project_longitudinal) repeating_forms_events = exportRepeatingFormsEvents(api_url_ipss, api_key_ipss, project_repeating) forms = exportFormsOrdered(api_url_ipss, api_key_ipss) form_repetition_map = createFormRepetitionMap(project_longitudinal, project_repeating, form_event_mapping, repeating_forms_events, forms) metadata = parseMetadata(def_field, project_info, project_longitudinal,