def test_bad_creds(project_urls, project_token): bad_url = project_urls["bad_url"] bad_token = "1" with pytest.raises(AssertionError): Project(bad_url, project_token) with pytest.raises(AssertionError): Project(project_urls["simple_project"], bad_token)
def create_projects(self): self.add_long_project_response() self.add_normalproject_response() self.add_ssl_project() self.add_survey_project() self.long_proj = Project(self.long_proj_url, self.reg_token) self.reg_proj = Project(self.normal_proj_url, self.reg_token) self.ssl_proj = Project(self.ssl_proj_url, self.reg_token, verify_ssl=False) self.survey_proj = Project(self.survey_proj_url, self.reg_token)
def load_non_comp(report_id, rc_uri, rc_token, era_login, era_pass, delay, long_delay, logger): project = Project(rc_uri, rc_token) non_comp = project.export_reports(report_id=report_id, format='df') non_comp.reset_index(level=0, inplace=True) non_comp['pmid'] = non_comp['pmid'].astype(str) attempt = 1 while attempt <= 3: try: driver = ncbi_login(era_login, era_pass) attempt = 4 except Exception as err: logger.warning( 'Unable to log into ERA Commons, attempt %i; error: %s' % (attempt, str(err))) attempt += 1 time.sleep(2) time.sleep(delay) driver.get( 'https://www.ncbi.nlm.nih.gov/myncbi/collections/mybibliography/') clear_my_bib(driver, delay, logger) print('*Cleared MyBib') time.sleep(long_delay) add_to_my_bib(driver, non_comp['pmid'], delay, long_delay, logger) driver.close() success_msg = 'Non-Compliant Loaded Into MyBibliography' return success_msg
def check_redcap_connection(redcap_uri, redcap_token): try: project = Project(redcap_uri, redcap_token) logging.info( "Successfully established connection with REDCap instance") except RedcapError as e: logging.info(e.message)
def __init__( self, Token, URL, get_all_field=True, ): """ Create a project using PyCap :param Token: :param URL: :param get_all_field by default, get all fields since for Mothers table cluster, we do not need associative information. :return: """ # Several key properties we'll use throughout self.project = Project(URL, Token) # These are very important ID fields from the # fields_keyid = ["babyid", "motherid", "baby_patieui"] # For now, make sure to onyl get the data related to these key ids to reduce load time # self.data = get_fields(self.project, fields_keyid) # if specified, get all the records. if get_all_field: self.data = self.project.export_records()
def get_redcap_project(project, api_token=None): if not api_token: print( '\nRequested action requires API access. Enter API token to continue.' ) api_token = getpass() project = Project(URL, api_token) return project
def test_redcap_connection(self): """Tests connecting to the REDCap database.""" URL = 'https://hcbredcap.com.br/api/' TOKEN = 'F2C5AEE8A2594B0A9E442EE91C56CC7A' project = Project(URL, TOKEN) self.assertIsNotNone(project) print "test_redcap_connenction: PASSED"
def ExportRedcapData(api_url, api_key, ids, fields, indexname): ''' Exports redcap data (as type string) into pandas dataframe ''' project = Project(api_url, api_key) data = project.export_records(records=ids, fields=fields, format='df', df_kwargs={'dtype': str}) data.set_index(indexname, inplace=True) return data
def createProject(Token, URL): """ Create a project using PyCap :param Token: :param URL: :return: """ # Two constants we'll use throughout project_admission = Project(URL, Token) return project_admission
def get_redcap_records(api_url, api_token, default_password, default_start_date, project_name): # Get data from Redcap try: project = Project(api_url, api_token) user_records = project.export_records() except RedcapError as er: raise IOError('Error getting Redcap Project {}', er) return [ UserData(d, default_password, default_start_date, project_name) for d in user_records ]
def __init__(self, redcap_uri, token, verify_ssl=True): self.redcap_uri = redcap_uri msg = 'Initializing redcap interface for: ' + redcap_uri logger.info(msg) self.token = token self.verify_ssl = verify_ssl try: self.project = Project(redcap_uri, token, "", verify_ssl) logger.info("redcap interface initialzed") except (RequestException, RedcapError) as e: logger.exception(e.message) raise
def get_config(): [config_fn, pid] = argv[1:3] config = configparser.SafeConfigParser() config_fp = openf(config_fn) config.readfp(config_fp, filename=config_fn) api_url = config.get('api', 'api_url') verify_ssl = config.getboolean('api', 'verify_ssl') log.debug('API URL: %s', api_url) bs_token = config.get(pid, 'bootstrap_token') log.debug('bootstrap token: %s...%s', bs_token[:4], bs_token[-4:]) bs_proj = Project(api_url, bs_token, verify_ssl=verify_ssl) data_token = config.get(pid, 'data_token') data_proj = Project(api_url, data_token, verify_ssl=verify_ssl) def open_dest(file_name, file_format): file_dest = config.get(pid, 'file_dest') return openf( os_path.join(file_dest, file_name + '.' + file_format), 'wb') return pid, bs_proj, data_proj, open_dest
def connect_project(self): try: if datetime.datetime.strptime( self.exp_date, "%Y-%m-%d").date() <= datetime.date.today(): raise exc.RedCAPTokenExpired(exp_date) # There can be multiple project that have our data for site, tok in self.token.iteritems(): print 'site' print site self.project[site] = Project(self.cap_url, tok, verify_ssl=True) log.info('Project connected to: %s data' % site) except: raise
def get_redcap_project(study_name, password): user = getuser() try: conn_str = (r'DRIVER={Microsoft Access Driver (*.mdb, *.accdb)};' r'DBQ=' + DB_PATH_TEMPLATE.format(user) + ';' r'PWD=' + password) conn = pyodbc.connect(conn_str) except pyodbc.Error: exit('Error connecting to API token access database') cursor = conn.cursor() sql = 'SELECT api_token FROM {}_api_tokens WHERE userid = ?'.format( study_name) cursor.execute(sql, (user, )) api_token = cursor.fetchone()[0] return Project(REDCAP_URL, api_token)
def import_data(object, apiKey, api_url): """ Function that imports data to Redcap Parameters Object : list of redcap data Returns Imported data """ imported = False events = object['events'] sheetID = object['id'] encCreds = bytes(object["creds"].encode("utf-8")) print(object["key"]) key = bytes(object["key"].encode("utf-8")) print(key) fernet = Fernet(key) creds = fernet.decrypt(encCreds).decode() service = createService(creds) # config = getConfig() project = Project(api_url, apiKey) if events == "All Events": events = getEvents(service, sheetID) for event in events: response = import_redcap(event, service, project, sheetID) if response == "Import Data to RedCap Successful": imported = response continue else: return response else: for event in events: response = import_redcap(event, service, project, sheetID) if response == "Import Data to RedCap Successful": imported = response continue else: return response return imported
def simple_project(project_urls, project_token, mocked_responses) -> Project: """Mocked simple REDCap project""" def request_callback_simple(req): request_data, request_headers, request_type = parse_request(req) request_handler = get_simple_project_request_handler(request_type) response = request_handler(data=request_data, headers=request_headers) return response simple_project_url = project_urls["simple_project"] mocked_responses.add_callback( responses.POST, simple_project_url, callback=request_callback_simple, content_type="application/json", ) return Project(simple_project_url, project_token)
def survey_project(project_urls, project_token, mocked_responses) -> Project: """Mocked simple REDCap project, with survey fields""" def request_callback_survey(req): request_data, request_headers, request_type = parse_request(req) request_handler = get_survey_project_request_handler(request_type) response = request_handler(data=request_data, headers=request_headers) return response survey_project_url = project_urls["survey_project"] mocked_responses.add_callback( responses.POST, survey_project_url, callback=request_callback_survey, content_type="application/json", ) return Project(survey_project_url, project_token, verify_ssl=False)
def connect_project(settings): url = settings['cap_url'] key = settings['cap_key'] exp_date = settings['exp_date'] try: if datetime.datetime.strptime( exp_date, "%Y-%m-%d").date() <= datetime.date.today(): raise exc.RedCAPTokenExpired(exp_date) project = Project(url, key, verify_ssl=True) log.info('Project connected: %s' % project) except: log.critical('Exception on RedCap Project connect') turbomail.send( turbomail.Message( to=settings['notify.error'].split(), subject='[The Early Test]: RedCap Connection failure', plain=traceback.format_exc())) raise return project
def __init__( self, Token, URL, get_all_field=False, ): """ Create a project using PyCap :param Token: :param URL: :return: """ # Several key properties we'll use throughout self.project = Project(URL, Token) fields_keyid = ["caseid", "cnbpid", "babyid"] # For now, make sure to onyl get the data related to these key ids to reduce load time self.data = self.get_fields(fields_keyid) # if specified, get all the records. if get_all_field: self.data = self.project.export_records()
def ImportRecords(api_url, api_key, data, imported_textfile): ''' Imports records as type string to redcap using a redcap Project object and pandas dataframe as argument ''' project = Project(api_url, api_key) print('Importing...') try: imported = project.import_records(data, format='csv', overwrite='normal', return_format='csv', return_content='ids') except redcap.RedcapError: print("oops this hasn't worked") if imported.split("\n", 1)[0] == 'id': print('Imported IDs:') print(imported.split("\n", 1)[1]) text_file = open(imported_textfile, 'a') text_file.write(imported.split("\n", 1)[1]) text_file.write('\n') text_file.close() print 'records imported stored in %s' % imported_textfile else: print(imported)
import pycurl import cStringIO import pandas import itertools from redcap import Project, RedcapError apiurl = 'https://redcap.stanford.edu/api/' token = '' rc_project = Project(apiurl, token) long_proj = Project(apiurl, token) ssl_proj = Project(apiurl, token, verify_ssl=True) survey_proj = Project(apiurl, '') def metadata_to_df(rc_project): df = rc_project.export_metadata(format='df') return df def export_always_include_def_field(rc_project): """ Ensure def_field always comes in the output even if not explicity given in a requested form """ # If we just ask for a form, must also get def_field in there records = rc_project.export_records(forms=['imaging']) def is_longitudinal(rc_project): "Test the is_longitudinal method" rc_project.assertFalse(rc_project.reg_proj.is_longitudinal()) rc_project.assertTrue(rc_project.long_proj.is_longitudinal())
def test_bad_creds(self): "Test that exceptions are raised with bad URL or tokens" with self.assertRaises(RedcapError): Project(self.bad_url, self.reg_token) with self.assertRaises(RedcapError): Project(self.bad_url, '1')
#print(records_of_interest,outfile_name) fields_of_interest = ['participationid', 'scan_validation','scan_req_ack', 'baby_ga_at_birth_weeks','baby_gender','baby_birth_weight','baby_babyhc','baby_baby_length', 'fscan_ga_at_scan_weeks', 'nscan_ga_at_scan_weeks','nscan_age_at_scan_days','xscan_baby_weight', 'xscan_head_circumference','xscan_baby_length','xscan_baby_skin_fold'] events_of_interest = ['fetal_scan_arm_1', 'neonatal_scan_arm_1','baby_born_arm_1'] #print(records_of_interest,fields_of_interest) fields = { 'token': '', 'content': 'arm', 'format': 'json' } project = Project(api_url,api_key) #get data for this participant data = project.export_records(records=records_of_interest,fields=fields_of_interest,events=events_of_interest,format='json') #output is a list of do dictionaries where each dictionary corresponds to a baby_born, fetal_scan or neonatal_scan event #each field of interest will appera in every dictionary so we've got a lot of nulls. We'd also like to fix some of the naming #so it's common betweem fetal and neonatal scans baby_born={} data_strip=[] #new container for stripped down list of dictionaries for event in data: event_strip ={}
## GFDX notebook: WHO Recommendation Analysis """ # Install package to allow import from REDCap API from redcap import Project import pandas as pd import numpy as np import os from tqdm.notebook import tqdm # progress bar api_key = os.environ.get("APIKEY") # Connecting to GFDx Redcap API URL = "https://redcap.emory.edu/api/" project = Project(URL, api_key) # Pulls out variables of interest from REDCap fields_of_interest = [ "country_code", "standard_nutrient", "nutrient_level", "nutrient_compound", "latest_intake_api", "food_status_api", ] subset = project.export_records(fields=fields_of_interest, format="df") # Reset index df = subset.copy() df.reset_index(inplace=True)
def test_export_of_simple_project(): url = "https://redcapdemo.vanderbilt.edu/api/" token = os.getenv("REDCAPDEMO_SIMPLE_TOKEN") simple_proj = Project(url, token) proj_records_export = simple_proj.export_records() assert len(proj_records_export) == 3
def export_healthpro_records(url, api_key): project = Project(url, api_key) all_records = project.export_records() return pd.DataFrame(all_records)
def main(): parser = argparse.ArgumentParser( description='Read project metadata from a REDCap Project') parser.add_argument( '--token', dest='token', default='', required=True, help= 'Specify the authentication/authorization token that will provide access to the REDCap project' ) parser.add_argument( '--url', dest='url', default='', required=True, help='Specify the url of the REDCap server to connect with') parser.add_argument( '--verify_ssl', dest='verify_ssl', default=True, help= 'Specify whether the SSL cert of the REDCap server should be checked') # parser.add_argument( # '-f', # '--forms', # dest='forms', # default='', # help='Specify a list of forms, separated by spaces, for which metadata should be returned.') # parser.add_argument( # '--fields', # dest='fields', # default='', # help='Specify a list of fields, separated by spaces, for which metadata should be returned.') # prepare the arguments we were given args = vars(parser.parse_args()) # Turn the 'verify_ssl' parameter into the truth value we need to make a # REDCap connection if args['verify_ssl'] == 'y': args['verify_ssl'] = True elif args['verify_ssl'] == 'n': args['verify_ssl'] = False else: args['verify_ssl'] = True # Attempt to connect to the REDCap project try: project = Project(args['url'], args['token'], "", args['verify_ssl']) except: print "Cannot connect to project at " + args[ 'url'] + ' with token ' + args['token'] quit() # my_forms = args['forms'].split() # my_fields = args['fields'].split() data = project.export_metadata( # forms=my_forms, # fields=my_fields, format='csv') print unicode(data)
a = 0 for i in content: # First item in the list is the key. Add to the Keys List. if a == 0: dictionary_keys.append(int(i)) a += 1 # Second item in the list is the value. Add to the Values List. else: dictionary_values.append(i) dictionary = dict(zip(dictionary_keys, dictionary_values)) return dictionary tk.cop_coaching_evals() project = Project(tk.api_url, tk.api_token) # CPS-AIM MetaData df = project.export_records(format='df') meta_df = project.export_metadata(format='df') trainers = meta_dict(meta_df, 'trainer1') percent_attend = meta_dict(meta_df, 'percent_coaching_attended') percent_attend = dict( (keys, int(values)) for keys, values in percent_attend.items()) df['percent_coaching_attended'] = ( df['percent_coaching_attended'].map(percent_attend)) df['trainer1'] = df['trainer1'].map(trainers) rating_col_names = [ 'Lecuture', ' Slides', 'Videos', 'Handouts', 'Polling', 'Case Study',
import redcap from redcap import Project from collections import Counter import statistics import numpy as np from statistics import mode import requests %load_ext rpy2 ## Setting up the main data pull process api_url = 'https://redcap.ucsf.edu/api/' #Remember to Include the API key api_key = '' project = Project(api_url,api_key, verify_ssl=True) project.export_records(filter_logic='[3_mo_arm_1][eortcquality_of_life_complete]=2') pd.options.display.max_columns=999 pd.options.display.max_rows=100 #set up the fields of interest for baseline fields_bl = ['record_id','tod','age', 'sex','cbduse','currentuse','last_cdb','lenght','freqcbd','reascbd','other_use','cbdmedical', 'oth_sym','desc1','overallsym_scorebe','overallsym_scoreaf','moduse','cbd_oth','know_strain','strain','cbd_thc','cbd_ratio','recommcbd', 'recom_oth','obtain','eq5d_mb_5l_usa_eng','eq5d_sc_5l_usa_eng', 'eq5d_ua_5l_usa_eng','eq5d_pd_5l_usa_eng', 'eq5d_ad_5l_usa_eng','eq5d5l_vas2_usa_eng', 'in_pt','dob_qol','tod','c30_1','c30_2','c30_3','c30_4','c30_5','c30_6','c30_7','c30_8','c30_9','c30_10', 'c30_11','c30_12','c30_13','c30_14','c30_15','c30_16','c30_17','c30_18','c30_19','c30_20','c30_21','c30_22','c30_23','c30_24','c30_25', 'c30_26','c30_27','c30_28','c30_29','c30_30','bn20_31','bn20_32','bn20_33','bn20_34','bn20_35','bn20_36','bn20_37','bn20_38','bn20_39', 'bn20_40','bn20_41','bn20_42','bn20_43','bn20_44','bn20_45','bn20_46','bn20_47','bn20_48','bn20_49','bn20_50','ethnicity','education','employment','economic','ethn','race','maritalsts', 'vitalsts','last_alive','dod','sex','inital_dx','ini_surgery','ini_surgery_type','ini_tum_lateral','ini_tum_location','ini_path_dx','ini_tumor_grade'] #subset = project.export_records(fields=fields_oi)
# -*- coding: utf-8 -*- """ Created on Mon Oct 26 14:29:25 2020 @author: SekoB """ from redcap import Project import sqlite3 from datetime import datetime api_url = api_key_pg = project_pg = Project(api_url, api_key_pg) api_key_lf = project_lf = Project(api_url, api_key_lf) json_data1 = project_pg.export_records() #original project design json_data2 = project_lf.export_records() #updated project design db = sqlite3.connect('') def json_2_db(json_data, database, table): columns = [] column = [] for data in json_data: column = list(data.keys()) for col in column: if col not in columns: columns.append(col)