예제 #1
0
def load_non_comp(report_id, rc_uri, rc_token, era_login, era_pass, delay,
                  long_delay, logger):
    project = Project(rc_uri, rc_token)
    non_comp = project.export_reports(report_id=report_id, format='df')
    non_comp.reset_index(level=0, inplace=True)
    non_comp['pmid'] = non_comp['pmid'].astype(str)

    attempt = 1
    while attempt <= 3:
        try:
            driver = ncbi_login(era_login, era_pass)
            attempt = 4
        except Exception as err:
            logger.warning(
                'Unable to log into ERA Commons, attempt %i; error: %s' %
                (attempt, str(err)))
            attempt += 1
            time.sleep(2)

    time.sleep(delay)
    driver.get(
        'https://www.ncbi.nlm.nih.gov/myncbi/collections/mybibliography/')

    clear_my_bib(driver, delay, logger)
    print('*Cleared MyBib')
    time.sleep(long_delay)

    add_to_my_bib(driver, non_comp['pmid'], delay, long_delay, logger)

    driver.close()
    success_msg = 'Non-Compliant Loaded Into MyBibliography'
    return success_msg
예제 #2
0
파일: query_mother.py 프로젝트: CNBP/RCAPI
    def __init__(
        self,
        Token,
        URL,
        get_all_field=True,
    ):
        """
        Create a project using PyCap
        :param Token:
        :param URL:
        :param get_all_field by default, get all fields since for Mothers table cluster, we do not need associative information. 
        :return:
        """
        # Several key properties we'll use throughout
        self.project = Project(URL, Token)

        # These are very important ID fields from the
        # fields_keyid = ["babyid", "motherid", "baby_patieui"]

        # For now, make sure to onyl get the data related to these key ids to reduce load time
        # self.data = get_fields(self.project, fields_keyid)

        # if specified, get all the records.
        if get_all_field:
            self.data = self.project.export_records()
예제 #3
0
def test_bad_creds(project_urls, project_token):
    bad_url = project_urls["bad_url"]
    bad_token = "1"

    with pytest.raises(AssertionError):
        Project(bad_url, project_token)
    with pytest.raises(AssertionError):
        Project(project_urls["simple_project"], bad_token)
예제 #4
0
 def setUp(self):
     self.url = 'https://redcap.vanderbilt.edu/api/'
     self.bad_url = 'https://redcap.vanderbilt.edu/api'
     self.reg_token = '8E66DB6844D58E990075AFB51658A002'
     self.long_proj = Project(self.url, '1387872621BBF1C17CC47FD8AE25FF54')
     self.reg_proj = Project(self.url, self.reg_token)
     self.ssl_proj = Project(self.url, self.reg_token,
         verify_ssl=False)
예제 #5
0
def extraer_redcap(URL, API_KEY , VARIABLES_REDCap):
    
    from redcap import Project, RedcapError
    project = Project(URL, API_KEY)
    fields_of_interest = VARIABLES_REDCap
    subset = project.export_records(fields=fields_of_interest)

    return subset
예제 #6
0
    def create_projects(self):
        self.add_long_project_response()
        self.add_normalproject_response()
        self.add_ssl_project()
        self.add_survey_project()

        self.long_proj = Project(self.long_proj_url, self.reg_token)
        self.reg_proj = Project(self.normal_proj_url, self.reg_token)
        self.ssl_proj = Project(self.ssl_proj_url, self.reg_token, verify_ssl=False)
        self.survey_proj = Project(self.survey_proj_url, self.reg_token)
def extraer_redcap(URL, API_KEY , VARIABLES_REDCap):
    
    from redcap import Project, RedcapError
##    URL = 'http://claimproject.com/redcap570/api/'
##    API_KEY_SOCIODEMOGRAFICA = 'FC8BD5EE83AE0DECD2BF247031BD028E'
##    API_KEY_SINTOMAS ='744F5C67890BDA39B9CC2B4A9CE77F12';
    project = Project(URL, API_KEY)
    fields_of_interest = VARIABLES_REDCap
    subset = project.export_records(fields=fields_of_interest)

    return subset
예제 #8
0
def ExportRedcapData(api_url, api_key, ids, fields, indexname):
    '''
    Exports redcap data (as type string) into pandas dataframe
    '''
    project = Project(api_url, api_key)
    data = project.export_records(records=ids,
                                  fields=fields,
                                  format='df',
                                  df_kwargs={'dtype': str})
    data.set_index(indexname, inplace=True)
    return data
예제 #9
0
def get_redcap_records(api_url, api_token, default_password,
                       default_start_date, project_name):
    # Get data from Redcap
    try:
        project = Project(api_url, api_token)
        user_records = project.export_records()
    except RedcapError as er:
        raise IOError('Error getting Redcap Project {}', er)

    return [
        UserData(d, default_password, default_start_date, project_name)
        for d in user_records
    ]
예제 #10
0
    def __init__(self, redcap_uri, token, verify_ssl=True):
        self.redcap_uri = redcap_uri
        msg = 'Initializing redcap interface for: ' + redcap_uri
        logger.info(msg)
        self.token = token
        self.verify_ssl = verify_ssl

        try:
            self.project = Project(redcap_uri, token, "", verify_ssl)
            logger.info("redcap interface initialzed")
        except (RequestException, RedcapError) as e:
            logger.exception(e.message)
            raise
예제 #11
0
def check_redcap_connection(redcap_uri, redcap_token):
    try:
        project = Project(redcap_uri, redcap_token)
        logging.info(
            "Successfully established connection with REDCap instance")
    except RedcapError as e:
        logging.info(e.message)
예제 #12
0
 def __init__(self,  key, url, table_name, connection='', driver='',server='',database='',  dev=False, project='',  records='', forms = []):
     self.connection                      = connection
     self.key                                  = key
     self.url                                  = url
     #self.server                                  = server
     #self.driver                          = driver
     #self.database                           = database
     self.dev                                 = dev
     self.table_name                         = table_name
     self.records                            = records
     self.forms                              = forms
     if self.dev == False:
         self.project                        = Project(self.url, self.key)
         print 'Production Environment'
     if self.dev == True:
         self.project                        = Project(self.url,self.key, verify_ssl=False)
         print 'Dev Environment'
예제 #13
0
    def create_projects(self):
        self.add_long_project_response()
        self.add_normalproject_response()
        self.add_ssl_project()
        self.add_survey_project()

        self.long_proj = Project(self.long_proj_url, self.reg_token)
        self.reg_proj = Project(self.normal_proj_url, self.reg_token)
        self.ssl_proj = Project(self.ssl_proj_url, self.reg_token, verify_ssl=False)
        self.survey_proj = Project(self.survey_proj_url, self.reg_token)
예제 #14
0
파일: backend.py 프로젝트: sburns/red-light
class RCDB(DB):
    """Class representing a redcap database"""
    def __init__(self, url, api, initial_fields):
        self.proj = Project(url, api)
        self.make_df(initial_fields)

    def make_df(self, fields):
        csv = StringIO(self.proj.export_records(fields=fields, format='csv'))
        self.df = pd.read_csv(csv, index_col=self.proj.def_field)
예제 #15
0
def get_redcap_project(project, api_token=None):
    if not api_token:
        print(
            '\nRequested action requires API access. Enter API token to continue.'
        )
        api_token = getpass()

    project = Project(URL, api_token)
    return project
예제 #16
0
파일: backend.py 프로젝트: sburns/red-light
class RCDB(DB):
    """Class representing a redcap database"""

    def __init__(self, url, api, initial_fields):
        self.proj = Project(url, api)
        self.make_df(initial_fields)

    def make_df(self, fields):
        csv = StringIO(self.proj.export_records(fields=fields, format='csv'))
        self.df = pd.read_csv(csv, index_col=self.proj.def_field)
예제 #17
0
    def test_redcap_connection(self):
        """Tests connecting to the REDCap database."""

        URL = 'https://hcbredcap.com.br/api/'
        TOKEN = 'F2C5AEE8A2594B0A9E442EE91C56CC7A'

        project = Project(URL, TOKEN)
        self.assertIsNotNone(project)

        print "test_redcap_connenction: PASSED"
예제 #18
0
파일: query_mother.py 프로젝트: CNBP/RCAPI
class mother_project(ProjectMixins):
    """
    One baby can have many admissions CaseIDs.
    One hospital record can have many CaseIDs.
    One baby has only one hospital record number.
    """
    def __init__(
        self,
        Token,
        URL,
        get_all_field=True,
    ):
        """
        Create a project using PyCap
        :param Token:
        :param URL:
        :param get_all_field by default, get all fields since for Mothers table cluster, we do not need associative information. 
        :return:
        """
        # Several key properties we'll use throughout
        self.project = Project(URL, Token)

        # These are very important ID fields from the
        # fields_keyid = ["babyid", "motherid", "baby_patieui"]

        # For now, make sure to onyl get the data related to these key ids to reduce load time
        # self.data = get_fields(self.project, fields_keyid)

        # if specified, get all the records.
        if get_all_field:
            self.data = self.project.export_records()

    def get_records_mother(self, MotherID: str or List[str]):
        """
        Retrieve the records based on their INDEX which is the MotherID in the Mother table.
        :param MotherID:
        :return:
        """
        if type(MotherID) is str:
            MotherID = [MotherID]
        cases_data = self.project.export_records(records=MotherID)
        return cases_data
예제 #19
0
def createProject(Token, URL):
    """
    Create a project using PyCap
    :param Token:
    :param URL:
    :return:
    """

    # Two constants we'll use throughout
    project_admission = Project(URL, Token)
    return project_admission
예제 #20
0
    def __init__(
        self,
        Token,
        URL,
        get_all_field=False,
    ):
        """
        Create a project using PyCap
        :param Token:
        :param URL:
        :return:
        """
        # Several key properties we'll use throughout
        self.project = Project(URL, Token)
        fields_keyid = ["caseid", "cnbpid", "babyid"]

        # For now, make sure to onyl get the data related to these key ids to reduce load time
        self.data = self.get_fields(fields_keyid)

        # if specified, get all the records.
        if get_all_field:
            self.data = self.project.export_records()
예제 #21
0
파일: redcapClient.py 프로젝트: indera/redi
    def __init__(self, redcap_uri, token, verify_ssl=True):
        self.redcap_uri = redcap_uri
        msg = 'Initializing redcap interface for: ' + redcap_uri
        logger.info(msg)
        self.token = token
        self.verify_ssl = verify_ssl

        try:
            self.project = Project(redcap_uri, token, "", verify_ssl)
            logger.info("redcap interface initialzed")
        except (RequestException, RedcapError) as e:
            logger.exception(e.message)
            raise
    def get_config():
        [config_fn, pid] = argv[1:3]

        config = configparser.SafeConfigParser()
        config_fp = openf(config_fn)
        config.readfp(config_fp, filename=config_fn)

        api_url = config.get('api', 'api_url')
        verify_ssl = config.getboolean('api', 'verify_ssl')
        log.debug('API URL: %s', api_url)

        bs_token = config.get(pid, 'bootstrap_token')
        log.debug('bootstrap token: %s...%s', bs_token[:4], bs_token[-4:])
        bs_proj = Project(api_url, bs_token, verify_ssl=verify_ssl)
        data_token = config.get(pid, 'data_token')
        data_proj = Project(api_url, data_token, verify_ssl=verify_ssl)

        def open_dest(file_name, file_format):
            file_dest = config.get(pid, 'file_dest')
            return openf(
                os_path.join(file_dest, file_name + '.' + file_format), 'wb')

        return pid, bs_proj, data_proj, open_dest
예제 #23
0
def ImportRecords(api_url, api_key, data, imported_textfile):
    '''
    Imports records as type string to redcap using a redcap Project object and pandas dataframe as argument
    '''
    project = Project(api_url, api_key)
    print('Importing...')
    try:
        imported = project.import_records(data,
                                          format='csv',
                                          overwrite='normal',
                                          return_format='csv',
                                          return_content='ids')
    except redcap.RedcapError:
        print("oops this hasn't worked")
    if imported.split("\n", 1)[0] == 'id':
        print('Imported IDs:')
        print(imported.split("\n", 1)[1])
        text_file = open(imported_textfile, 'a')
        text_file.write(imported.split("\n", 1)[1])
        text_file.write('\n')
        text_file.close()
        print 'records imported stored in %s' % imported_textfile
    else:
        print(imported)
예제 #24
0
파일: redcapi.py 프로젝트: faanwar/avrc
 def connect_project(self):
     try:
         if datetime.datetime.strptime(
                 self.exp_date, "%Y-%m-%d").date() <= datetime.date.today():
             raise exc.RedCAPTokenExpired(exp_date)
         # There can be multiple project that have our data
         for site, tok in self.token.iteritems():
             print 'site'
             print site
             self.project[site] = Project(self.cap_url,
                                          tok,
                                          verify_ssl=True)
             log.info('Project connected to: %s data' % site)
     except:
         raise
예제 #25
0
def get_redcap_project(study_name, password):
    user = getuser()
    try:
        conn_str = (r'DRIVER={Microsoft Access Driver (*.mdb, *.accdb)};'
                    r'DBQ=' + DB_PATH_TEMPLATE.format(user) + ';'
                    r'PWD=' + password)
        conn = pyodbc.connect(conn_str)
    except pyodbc.Error:
        exit('Error connecting to API token access database')

    cursor = conn.cursor()
    sql = 'SELECT api_token FROM {}_api_tokens WHERE userid = ?'.format(
        study_name)
    cursor.execute(sql, (user, ))
    api_token = cursor.fetchone()[0]
    return Project(REDCAP_URL, api_token)
예제 #26
0
def import_data(object, apiKey, api_url):
    """
    Function that imports data to Redcap

    Parameters
    Object : list of redcap data

    Returns
    Imported data
    """

    imported = False
    events = object['events']
    sheetID = object['id']
    encCreds = bytes(object["creds"].encode("utf-8"))
    print(object["key"])
    key = bytes(object["key"].encode("utf-8"))
    print(key)

    fernet = Fernet(key)

    creds = fernet.decrypt(encCreds).decode()
    service = createService(creds)

    # config = getConfig()

    project = Project(api_url, apiKey)

    if events == "All Events":
        events = getEvents(service, sheetID)
        for event in events:
            response = import_redcap(event, service, project, sheetID)
            if response == "Import Data to RedCap Successful":
                imported = response
                continue
            else:
                return response

    else:
        for event in events:
            response = import_redcap(event, service, project, sheetID)
            if response == "Import Data to RedCap Successful":
                imported = response
                continue
            else:
                return response
    return imported
예제 #27
0
def survey_project(project_urls, project_token, mocked_responses) -> Project:
    """Mocked simple REDCap project, with survey fields"""
    def request_callback_survey(req):
        request_data, request_headers, request_type = parse_request(req)
        request_handler = get_survey_project_request_handler(request_type)
        response = request_handler(data=request_data, headers=request_headers)
        return response

    survey_project_url = project_urls["survey_project"]
    mocked_responses.add_callback(
        responses.POST,
        survey_project_url,
        callback=request_callback_survey,
        content_type="application/json",
    )

    return Project(survey_project_url, project_token, verify_ssl=False)
예제 #28
0
def simple_project(project_urls, project_token, mocked_responses) -> Project:
    """Mocked simple REDCap project"""
    def request_callback_simple(req):
        request_data, request_headers, request_type = parse_request(req)
        request_handler = get_simple_project_request_handler(request_type)
        response = request_handler(data=request_data, headers=request_headers)
        return response

    simple_project_url = project_urls["simple_project"]
    mocked_responses.add_callback(
        responses.POST,
        simple_project_url,
        callback=request_callback_simple,
        content_type="application/json",
    )

    return Project(simple_project_url, project_token)
예제 #29
0
파일: post_redcap.py 프로젝트: faanwar/avrc
def connect_project(settings):
    url = settings['cap_url']
    key = settings['cap_key']
    exp_date = settings['exp_date']

    try:
        if datetime.datetime.strptime(
                exp_date, "%Y-%m-%d").date() <= datetime.date.today():
            raise exc.RedCAPTokenExpired(exp_date)
        project = Project(url, key, verify_ssl=True)
        log.info('Project connected: %s' % project)
    except:
        log.critical('Exception on RedCap Project connect')
        turbomail.send(
            turbomail.Message(
                to=settings['notify.error'].split(),
                subject='[The Early Test]: RedCap Connection failure',
                plain=traceback.format_exc()))
        raise
    return project
예제 #30
0
class redcapClient:
    # Helper class for getting data from redcap instace

    project = None
    """
    __init__:
    This constructor in redcapClient takes a SimpleConfigParser object and establishes connection with REDCap instance.
    Parameters:
        settings: an object of class SimpleConfigParser (in SimpleConfigParser module) that is used for parsing configuration details
    """

    def __init__(self, redcap_uri, token, verify_ssl):

        self.redcap_uri = redcap_uri
        msg = "Initializing redcap interface for: " + redcap_uri
        logger.info(msg)
        self.token = token
        self.verify_ssl = verify_ssl

        try:
            self.project = Project(redcap_uri, token, "", verify_ssl)
            logger.info("redcap interface initialzed")
        except (RequestException, RedcapError) as e:
            logger.exception(e.message)
            raise

    """
    get_data_from_redcap:
    This function is used to get data from the REDCap instance
    Parameters:
        records_to_fetch    : a list object containing records
        events_to_fetch     : a list object containing events
        fields_to_fetch     : a list object containing fields
        forms_to_fetch      : a list object containing forms
        return_format       : specifies the format of the REDCap response. Default value is xml
    """

    def get_data_from_redcap(
        self,
        records_to_fetch=None,
        events_to_fetch=None,
        fields_to_fetch=None,
        forms_to_fetch=None,
        return_format="xml",
    ):
        logger.info("getting data from redcap")
        try:
            response = self.project.export_records(
                records=records_to_fetch,
                events=events_to_fetch,
                fields=fields_to_fetch,
                forms=forms_to_fetch,
                format=return_format,
            )
        except RedcapError as e:
            logger.debug(e.message)
        return response

    """
    send_data:
    This function is used to send data to the REDCap instance
    Parameters:
        data: This parameter contains the data that should be sent to the REDCap instance.
    """

    def send_data_to_redcap(self, data, overwrite=False):
        # logger.info('Sending data for subject id: ' + data[0]['dm_subjid'])
        # logger.info(data)
        overwrite_value = "normal"

        if overwrite:
            overwrite_value = "overwrite"

        try:
            response = self.project.import_records(data, overwrite=overwrite_value)
            return response
        except RedcapError as e:
            logger.debug(e.message)
            raise
예제 #31
0
def main():

    parser = argparse.ArgumentParser(description="Read some data from a REDCap Project")
    parser.add_argument(
        "--token",
        dest="token",
        default="",
        required=True,
        help="Specify the authentication/authorization token that will provide access to the REDCap project",
    )
    parser.add_argument(
        "--url", dest="url", default="", required=True, help="Specify the url of the REDCap server to connect with"
    )
    parser.add_argument(
        "--verify_ssl",
        dest="verify_ssl",
        default=True,
        help="Specify whether the SSL cert of the REDCap server should be checked",
    )
    parser.add_argument(
        "-i", "--import_data", dest="import_data", default="", help="Specify the input data file to load into REDCap"
    )
    parser.add_argument(
        "-f",
        "--forms",
        dest="forms",
        default="",
        help="Specify a list of forms, separated by spaces, for which data should be returned.",
    )
    parser.add_argument(
        "-t",
        "--type",
        choices=["json", "csv", "xml"],
        dest="data_type",
        default="csv",
        help="Specify the file type used as input or output. Valid types: json, csv, xml",
    )
    parser.add_argument(
        "--fields",
        dest="fields",
        default="",
        help="Specify a list of fields, separated by space or comma, for which data should be returned.",
    )
    parser.add_argument(
        "-e",
        "--events",
        dest="events",
        default="",
        help="Specify a list of events, separated by space or comma, for which data should be returned.",
    )
    parser.add_argument(
        "-r",
        "--records",
        dest="records",
        default="",
        help="Specify a list of records, separated by spaces or comma, for which data should be returned.",
    )
    # Additional verbosity
    parser.add_argument(
        "-d",
        "--debug",
        dest="loglevel",
        default=logging.WARNING,
        const=logging.DEBUG,
        action="store_const",
        help="Print even more detailed output",
    )
    parser.add_argument(
        "-v", "--verbose", dest="loglevel", const=logging.INFO, action="store_const", help="Print detailed output"
    )

    # prepare the arguments we were given
    args = vars(parser.parse_args())

    # configure logger
    logging.basicConfig(level=args["loglevel"])

    # According to http://pycap.readthedocs.org/en/latest/api.html
    # allowed data_types are: csv, json, xml
    data_type = args["data_type"]

    # Turn the 'verify_ssl' parameter into the truth value we need to make a
    # REDCap connection
    if args["verify_ssl"] == "y":
        args["verify_ssl"] = True
    elif args["verify_ssl"] == "n":
        args["verify_ssl"] = False
    else:
        args["verify_ssl"] = True

    # Attempt to connect to the REDCap project
    try:
        project = Project(args["url"], args["token"], "", args["verify_ssl"])
    except:

        # Produce varying levels of output corresponding to loglevel
        logging.debug(traceback.format_list(traceback.extract_tb(sys.exc_traceback)))
        logging.info(traceback.format_exc())
        logging.error(
            "Cannot connect to project at "
            + args["url"]
            + " with token "
            + args["token"]
            + "\nAdd '-d, --debug' flag for more info"
        )

        quit()

    # either we export data...
    if args["import_data"] == "":
        my_forms = args["forms"].split()
        my_fields = args["fields"].split()
        my_events = args["events"].split()
        my_records = args["records"].split()
        data = project.export_records(
            forms=my_forms,
            format=data_type,
            fields=my_fields,
            events=my_events,
            records=my_records,
            event_name="unique",
        )
        if "json" == data_type:
            print json.dumps(data, ensure_ascii=False)
        else:
            print str(data)
    else:
        # ...or we import data
        file = args["import_data"]
        try:
            input = open(file, "r")
        except IOError:
            # Produce varying levels of output corresponding to loglevel
            logging.debug(traceback.format_list(traceback.extract_tb(sys.exc_traceback)))
            logging.info(traceback.format_exc())
            logging.error("Cannot open file" + file)
            quit()
        if "json" == data_type:
            json_data = json.load(input)
            response = project.import_records(json_data)
        else:
            response = project.import_records(input.read(), format=data_type)

        print response
예제 #32
0
class ProjectTests(unittest.TestCase):
    """docstring for ProjectTests"""

    long_proj_url = 'https://redcap.longproject.edu/api/'
    normal_proj_url = 'https://redcap.normalproject.edu/api/'
    ssl_proj_url = 'https://redcap.sslproject.edu/api/'
    survey_proj_url = 'https://redcap.surveyproject.edu/api/'
    bad_url = 'https://redcap.badproject.edu/api'
    reg_token = 'supersecrettoken'

    def setUp(self):
        self.create_projects()

    def tearDown(self):
        pass

    def add_long_project_response(self):
        def request_callback_long(request):
            parsed = urlparse.urlparse("?{}".format(request.body))
            data = urlparse.parse_qs(parsed.query)
            headers = {"Content-Type": "application/json"}

            request_type = data["content"][0]

            if "returnContent" in data:
                resp = {"count": 1}

            elif (request_type == "metadata"):
                resp = [{
                    'field_name': 'record_id',
                    'field_label': 'Record ID',
                    'form_name': 'Test Form',
                    "arm_num": 1,
                    "name": "test"
                }]
            elif (request_type == "version"):
                resp = b'8.6.0'
                headers = {'content-type': 'text/csv; charset=utf-8'}
                return (201, headers, resp)
            elif (request_type == "event"):
                resp = [{
                    'unique_event_name': "raw"
                }]
            elif (request_type == "arm"):
                resp = [{
                    "arm_num": 1,
                    "name": "test"
                }]
            elif (request_type in ["record", "formEventMapping"]):
                if "csv" in data["format"]:
                    resp = "record_id,test,redcap_event_name\n1,1,raw"
                    headers = {'content-type': 'text/csv; charset=utf-8'}
                    return (201, headers, resp)
                else:
                    resp = [{"field_name":"record_id"}, {"field_name":"test"}]

            return (201, headers, json.dumps(resp))

        responses.add_callback(
            responses.POST,
            self.long_proj_url,
            callback=request_callback_long,
            content_type="application/json",
        )

    def add_normalproject_response(self):
        def request_callback_normal(request):
            parsed = urlparse.urlparse("?{}".format(request.body))
            data = urlparse.parse_qs(parsed.query)
            headers = {"Content-Type": "application/json"}

            if " filename" in data:
                resp = {}
            else:
                request_type = data.get("content", ['unknown'])[0]

                if "returnContent" in data:
                    if "non_existent_key" in data["data"][0]:
                        resp = {"error": "invalid field"}
                    else:
                        resp = {"count": 1}
                elif (request_type == "metadata"):
                    if "csv" in data["format"]:
                        resp = "field_name,field_label,form_name,arm_num,name\n"\
                            "record_id,Record ID,Test Form,1,test\n"
                        headers = {'content-type': 'text/csv; charset=utf-8'}
                        return (201, headers, resp)

                    else:
                        resp = [{
                            'field_name': 'record_id',
                            'field_label': 'Record ID',
                            'form_name': 'Test Form',
                            "arm_num": 1,
                            "name": "test",
                            "field_type": "text",
                        }, {
                            'field_name': 'file',
                            'field_label': 'File',
                            'form_name': 'Test Form',
                            "arm_num": 1,
                            "name": "file",
                            "field_type": "file",
                        }, {
                            'field_name': 'dob',
                            'field_label': 'Date of Birth',
                            'form_name': 'Test Form',
                            "arm_num": 1,
                            "name": "dob",
                            "field_type": "date",
                        }]
                elif (request_type == "version"):
                    resp = {
                        'error': "no version info"
                    }
                elif (request_type == "event"):
                    resp = {
                        'error': "no events"
                    }
                elif (request_type == "arm"):
                    resp = {
                        'error': "no arm"
                    }
                elif (request_type == "record"):
                    if "csv" in data["format"]:
                        resp = "record_id,test,first_name,study_id\n1,1,Peter,1"
                        headers = {'content-type': 'text/csv; charset=utf-8'}
                        return (201, headers, resp)
                    elif "exportDataAccessGroups" in data:
                        resp = [
                            {"field_name":"record_id", "redcap_data_access_group": "group1"},
                            {"field_name":"test", "redcap_data_access_group": "group1"}
                        ]
                    elif "label" in data.get("rawOrLabel"):
                        resp = [{"matcheck1___1": "Foo"}]
                    else:
                        resp = [
                            {"record_id": "1", "test": "test1"},
                            {"record_id": "2", "test": "test"}
                        ]
                elif (request_type == "file"):
                    resp = {}
                    headers["content-type"] = "text/plain;name=data.txt"
                elif (request_type == "user"):
                    resp = [
                        {
                            'firstname': "test",
                            'lastname': "test",
                            'email': "test",
                            'username': "******",
                            'expiration': "test",
                            'data_access_group': "test",
                            'data_export': "test",
                            'forms': "test"
                        }
                    ]

            return (201, headers, json.dumps(resp))

        responses.add_callback(
            responses.POST,
            self.normal_proj_url,
            callback=request_callback_normal,
            content_type="application/json",
        )

    def add_ssl_project(self):
        def request_callback_ssl(request):
            parsed = urlparse.urlparse("?{}".format(request.body))
            data = urlparse.parse_qs(parsed.query)

            request_type = data["content"][0]
            if (request_type == "metadata"):
                resp = [{
                    'field_name': 'record_id',
                    'field_label': 'Record ID',
                    'form_name': 'Test Form',
                    "arm_num": 1,
                    "name": "test"
                }]
            if (request_type == "version"):
                resp = {
                    'error': "no version info"
                }
            if (request_type == "event"):
                resp = {
                    'error': "no events"
                }
            if (request_type == "arm"):
                resp = {
                    'error': "no arm"
                }

            headers = {"Content-Type": "application/json"}
            return (201, headers, json.dumps(resp))

        responses.add_callback(
            responses.POST,
            self.ssl_proj_url,
            callback=request_callback_ssl,
            content_type="application/json",
        )

    def add_survey_project(self):
        def request_callback_survey(request):
            parsed = urlparse.urlparse("?{}".format(request.body))
            data = urlparse.parse_qs(parsed.query)

            request_type = data["content"][0]
            if (request_type == "metadata"):
                resp = [{
                    'field_name': 'record_id',
                    'field_label': 'Record ID',
                    'form_name': 'Test Form',
                    "arm_num": 1,
                    "name": "test"
                }]
            elif (request_type == "version"):
                resp = {
                    'error': "no version info"
                }
            elif (request_type == "event"):
                resp = {
                    'error': "no events"
                }
            elif (request_type == "arm"):
                resp = {
                    'error': "no arm"
                }
            elif (request_type == "record"):
                resp = [
                    {"field_name":"record_id", "redcap_survey_identifier": "test", "demographics_timestamp": "a_real_date"},
                    {"field_name":"test", "redcap_survey_identifier": "test", "demographics_timestamp": "a_real_date"}
                ]

            headers = {"Content-Type": "application/json"}
            return (201, headers, json.dumps(resp))

        responses.add_callback(
            responses.POST,
            self.survey_proj_url,
            callback=request_callback_survey,
            content_type="application/json",
        )

    @responses.activate
    def create_projects(self):
        self.add_long_project_response()
        self.add_normalproject_response()
        self.add_ssl_project()
        self.add_survey_project()

        self.long_proj = Project(self.long_proj_url, self.reg_token)
        self.reg_proj = Project(self.normal_proj_url, self.reg_token)
        self.ssl_proj = Project(self.ssl_proj_url, self.reg_token, verify_ssl=False)
        self.survey_proj = Project(self.survey_proj_url, self.reg_token)


    def test_good_init(self):
        """Ensure basic instantiation """

        self.assertIsInstance(self.long_proj, Project)
        self.assertIsInstance(self.reg_proj, Project)
        self.assertIsInstance(self.ssl_proj, Project)

    def test_normal_attrs(self):
        """Ensure projects are created with all normal attrs"""

        for attr in ('metadata', 'field_names', 'field_labels', 'forms',
            'events', 'arm_names', 'arm_nums', 'def_field'):
            self.assertTrue(hasattr(self.reg_proj, attr))

    def test_long_attrs(self):
        "proj.events/arm_names/arm_nums should not be empty in long projects"

        self.assertIsNotNone(self.long_proj.events)
        self.assertIsNotNone(self.long_proj.arm_names)
        self.assertIsNotNone(self.long_proj.arm_nums)

    def test_is_longitudinal(self):
        "Test the is_longitudinal method"
        self.assertFalse(self.reg_proj.is_longitudinal())
        self.assertTrue(self.long_proj.is_longitudinal())

    def test_regular_attrs(self):
        """proj.events/arm_names/arm_nums should be empty tuples"""
        for attr in 'events', 'arm_names', 'arm_nums':
            attr_obj = getattr(self.reg_proj, attr)
            self.assertIsNotNone(attr_obj)
            self.assertEqual(len(attr_obj), 0)

    @responses.activate
    def test_json_export(self):
        """ Make sure we get a list of dicts"""
        self.add_normalproject_response()
        data = self.reg_proj.export_records()
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    @responses.activate
    def test_long_export(self):
        """After determining a unique event name, make sure we get a
        list of dicts"""
        self.add_long_project_response()
        unique_event = self.long_proj.events[0]['unique_event_name']
        data = self.long_proj.export_records(events=[unique_event])
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    @responses.activate
    def test_import_records(self):
        "Test record import"
        self.add_normalproject_response()
        data = self.reg_proj.export_records()
        response = self.reg_proj.import_records(data)
        self.assertIn('count', response)
        self.assertNotIn('error', response)

    @responses.activate
    def test_import_exception(self):
        "Test record import throws RedcapError for bad import"
        self.add_normalproject_response()
        data = self.reg_proj.export_records()
        data[0]['non_existent_key'] = 'foo'
        with self.assertRaises(RedcapError) as cm:
            self.reg_proj.import_records(data)
        exc = cm.exception
        self.assertIn('error', exc.args[0])

    def is_good_csv(self, csv_string):
        "Helper to test csv strings"
        return is_str(csv_string)

    @responses.activate
    def test_csv_export(self):
        """Test valid csv export """
        self.add_normalproject_response()
        csv = self.reg_proj.export_records(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    @responses.activate
    def test_metadata_export(self):
        """Test valid metadata csv export"""
        self.add_normalproject_response()
        csv = self.reg_proj.export_metadata(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    def test_bad_creds(self):
        "Test that exceptions are raised with bad URL or tokens"
        with self.assertRaises(RedcapError):
            Project(self.bad_url, self.reg_token)
        with self.assertRaises(RedcapError):
            Project(self.bad_url, '1')

    @responses.activate
    def test_fem_export(self):
        """ Test fem export in json format gives list of dicts"""
        self.add_long_project_response()
        fem = self.long_proj.export_fem(format='json')
        self.assertIsInstance(fem, list)
        for arm in fem:
            self.assertIsInstance(arm, dict)

    @responses.activate
    def test_file_export(self):
        """Test file export and proper content-type parsing"""
        self.add_normalproject_response()
        record, field = '1', 'file'
        #Upload first to make sure file is there
        self.import_file()
        # Now export it
        content, headers = self.reg_proj.export_file(record, field)
        self.assertTrue(is_bytestring(content))
        # We should at least get the filename in the headers
        for key in ['name']:
            self.assertIn(key, headers)
        # needs to raise ValueError for exporting non-file fields
        with self.assertRaises(ValueError):
            self.reg_proj.export_file(record=record, field='dob')

    def import_file(self):
        upload_fname = self.upload_fname()
        with open(upload_fname, 'r') as fobj:
            response = self.reg_proj.import_file('1', 'file', upload_fname, fobj)
        return response

    def upload_fname(self):
        import os
        this_dir, this_fname = os.path.split(__file__)
        return os.path.join(this_dir, 'data.txt')

    @responses.activate
    def test_file_import(self):
        "Test file import"
        self.add_normalproject_response()
        # Make sure a well-formed request doesn't throw RedcapError
        try:
            response = self.import_file()
        except RedcapError:
            self.fail("Shouldn't throw RedcapError for successful imports")
        self.assertTrue('error' not in response)
        # Test importing a file to a non-file field raises a ValueError
        fname = self.upload_fname()
        with open(fname, 'r') as fobj:
            with self.assertRaises(ValueError):
                response = self.reg_proj.import_file('1', 'first_name',
                    fname, fobj)

    @responses.activate
    def test_file_delete(self):
        "Test file deletion"
        self.add_normalproject_response()
        # make sure deleting doesn't raise
        try:
            self.reg_proj.delete_file('1', 'file')
        except RedcapError:
            self.fail("Shouldn't throw RedcapError for successful deletes")

    @responses.activate
    def test_user_export(self):
        "Test user export"
        self.add_normalproject_response()
        users = self.reg_proj.export_users()
        # A project must have at least one user
        self.assertTrue(len(users) > 0)
        req_keys = ['firstname', 'lastname', 'email', 'username',
                    'expiration', 'data_access_group', 'data_export',
                    'forms']
        for user in users:
            for key in req_keys:
                self.assertIn(key, user)

    def test_verify_ssl(self):
        """Test argument making for SSL verification"""
        # Test we won't verify SSL cert for non-verified project
        post_kwargs = self.ssl_proj._kwargs()
        self.assertIn('verify', post_kwargs)
        self.assertFalse(post_kwargs['verify'])
        # Test we do verify SSL cert in normal project
        post_kwargs = self.reg_proj._kwargs()
        self.assertIn('verify', post_kwargs)
        self.assertTrue(post_kwargs['verify'])

    @responses.activate
    def test_export_data_access_groups(self):
        """Test we get 'redcap_data_access_group' in exported data"""
        self.add_normalproject_response()
        records = self.reg_proj.export_records(export_data_access_groups=True)
        for record in records:
            self.assertIn('redcap_data_access_group', record)
        # When not passed, that key shouldn't be there
        records = self.reg_proj.export_records()
        for record in records:
            self.assertNotIn('redcap_data_access_group', record)

    @responses.activate
    def test_export_survey_fields(self):
        """Test that we get the appropriate survey keys in the exported
        data.

        Note that the 'demographics' form has been setup as the survey
        in the `survey_proj` project. The _timestamp field will vary for
        users as their survey form will be named differently"""
        self.add_survey_project()
        self.add_normalproject_response()
        records = self.survey_proj.export_records(export_survey_fields=True)
        for record in records:
            self.assertIn('redcap_survey_identifier', record)
            self.assertIn('demographics_timestamp', record)
        # The regular project doesn't have a survey setup. Users should
        # be able this argument as True but it winds up a no-op.
        records = self.reg_proj.export_records(export_survey_fields=True)
        for record in records:
            self.assertNotIn('redcap_survey_identifier', record)
            self.assertNotIn('demographics_timestamp', record)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_metadata_to_df(self):
        """Test metadata export --> DataFrame"""
        self.add_normalproject_response()
        df = self.reg_proj.export_metadata(format='df')
        self.assertIsInstance(df, pd.DataFrame)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_export_to_df(self):
        """Test export --> DataFrame"""
        self.add_normalproject_response()
        self.add_long_project_response()
        df = self.reg_proj.export_records(format='df')
        self.assertIsInstance(df, pd.DataFrame)
        # Test it's a normal index
        self.assertTrue(hasattr(df.index, 'name'))
        # Test for a MultiIndex on longitudinal df
        long_df = self.long_proj.export_records(format='df', event_name='raw')
        self.assertTrue(hasattr(long_df.index, 'names'))

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_export_df_kwargs(self):
        """Test passing kwargs to export DataFrame construction"""
        self.add_normalproject_response()
        df = self.reg_proj.export_records(format='df',
            df_kwargs={'index_col': 'first_name'})
        self.assertEqual(df.index.name, 'first_name')
        self.assertTrue('study_id' in df)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_metadata_df_kwargs(self):
        """Test passing kwargs to metadata DataFrame construction"""
        self.add_normalproject_response()
        df = self.reg_proj.export_metadata(format='df',
            df_kwargs={'index_col': 'field_label'})
        self.assertEqual(df.index.name, 'field_label')
        self.assertTrue('field_name' in df)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_import_dataframe(self):
        """Test importing a pandas.DataFrame"""
        self.add_normalproject_response()
        self.add_long_project_response()
        df = self.reg_proj.export_records(format='df')
        response = self.reg_proj.import_records(df)
        self.assertIn('count', response)
        self.assertNotIn('error', response)
        long_df = self.long_proj.export_records(event_name='raw', format='df')
        response = self.long_proj.import_records(long_df)
        self.assertIn('count', response)
        self.assertNotIn('error', response)

    @responses.activate
    def test_date_formatting(self):
        """Test date_format parameter"""
        self.add_normalproject_response()

        def import_factory(date_string):
            return [{'study_id': '1',
                     'dob': date_string}]

        # Default YMD with dashes
        import_ymd = import_factory('2000-01-01')
        response = self.reg_proj.import_records(import_ymd)
        self.assertEqual(response['count'], 1)

        # DMY with /
        import_dmy = import_factory('31/01/2000')
        response = self.reg_proj.import_records(import_dmy, date_format='DMY')
        self.assertEqual(response['count'], 1)

        import_mdy = import_factory('12/31/2000')
        response = self.reg_proj.import_records(import_mdy, date_format='MDY')
        self.assertEqual(response['count'], 1)

    def test_get_version(self):
        """Testing retrieval of REDCap version associated with Project"""
        self.assertTrue(isinstance(semantic_version.Version('1.0.0'), type(self.long_proj.redcap_version)))

    @responses.activate
    def test_export_checkbox_labels(self):
        """Testing the export of checkbox labels as field values"""
        self.add_normalproject_response()
        self.assertEqual(
            self.reg_proj.export_records(
                raw_or_label='label',
                export_checkbox_labels=True)[0]['matcheck1___1'],
                'Foo'
        )

    @responses.activate
    def test_export_always_include_def_field(self):
        """ Ensure def_field always comes in the output even if not explicity
        given in a requested form """
        self.add_normalproject_response()
        # If we just ask for a form, must also get def_field in there
        records = self.reg_proj.export_records(forms=['imaging'])
        for record in records:
            self.assertIn(self.reg_proj.def_field, record)
        # , still need it def_field even if not asked for in form and fields
        records = self.reg_proj.export_records(forms=['imaging'], fields=['foo_score'])
        for record in records:
            self.assertIn(self.reg_proj.def_field, record)
        # If we just ask for some fields, still need def_field
        records = self.reg_proj.export_records(fields=['foo_score'])
        for record in records:
            self.assertIn(self.reg_proj.def_field, record)
예제 #33
0
#print(records_of_interest,outfile_name)

fields_of_interest = ['participationid', 'scan_validation','scan_req_ack',
                      'baby_ga_at_birth_weeks','baby_gender','baby_birth_weight','baby_babyhc','baby_baby_length',
                      'fscan_ga_at_scan_weeks',
                      'nscan_ga_at_scan_weeks','nscan_age_at_scan_days','xscan_baby_weight',
                      'xscan_head_circumference','xscan_baby_length','xscan_baby_skin_fold']
events_of_interest = ['fetal_scan_arm_1', 'neonatal_scan_arm_1','baby_born_arm_1']
#print(records_of_interest,fields_of_interest)
fields = {
    'token': '',
    'content': 'arm',
    'format': 'json'
}

project = Project(api_url,api_key)


#get data for this participant

data = project.export_records(records=records_of_interest,fields=fields_of_interest,events=events_of_interest,format='json')

#output is a list of do dictionaries where each dictionary corresponds to a baby_born, fetal_scan or neonatal_scan event
#each field of interest will appera in every dictionary so we've got a lot of nulls.  We'd also like to fix some of the naming
#so it's common betweem fetal and neonatal scans

baby_born={}
data_strip=[] #new container for stripped down list of dictionaries

for event in data:
    event_strip ={}
예제 #34
0
def main():

    parser = argparse.ArgumentParser(
        description='Read project metadata from a REDCap Project')
    parser.add_argument(
        '--token',
        dest='token',
        default='',
        required=True,
        help='Specify the authentication/authorization token that will provide access to the REDCap project')
    parser.add_argument(
        '--url',
        dest='url',
        default='',
        required=True,
        help='Specify the url of the REDCap server to connect with')
    parser.add_argument(
        '--verify_ssl',
        dest='verify_ssl',
        default=True,
        help='Specify whether the SSL cert of the REDCap server should be checked')
    # parser.add_argument(
    #     '-f',
    #     '--forms',
    #     dest='forms',
    #     default='',
    #     help='Specify a list of forms, separated by spaces, for which metadata should be returned.')
    # parser.add_argument(
    #     '--fields',
    #     dest='fields',
    #     default='',
    #     help='Specify a list of fields, separated by spaces, for which metadata should be returned.')
    # Additional verbosity
    parser.add_argument(
        '-d',
        '--debug',
        dest="loglevel",
        default=logging.WARNING,
        const=logging.DEBUG,
        action="store_const",
        help="Print even more detailed output"
        )
    parser.add_argument(
        '-v',
        '--verbose',
        dest="loglevel",
        const=logging.INFO,
        action="store_const",
        help="Print detailed output"
        )

    # prepare the arguments we were given
    args = vars(parser.parse_args())

    # configure logger
    logging.basicConfig(level=args['loglevel'])

    # prepare the arguments we were given
    args = vars(parser.parse_args())

    # Turn the 'verify_ssl' parameter into the truth value we need to make a
    # REDCap connection
    if args['verify_ssl'] == 'y':
        args['verify_ssl'] = True
    elif args['verify_ssl'] == 'n':
        args['verify_ssl'] = False
    else:
        args['verify_ssl'] = True

    # Attempt to connect to the REDCap project
    try:
        project = Project(args['url'], args['token'], "", args['verify_ssl'])
    except:
        
        # Produce varying levels of output corresponding to loglevel
        logging.debug(traceback.format_list(traceback.extract_tb(sys.exc_traceback)))
        logging.info(traceback.format_exc())
        logging.error("Cannot connect to project at " + args['url'] + ' with token ' + args['token'] + "\nAdd '-d, --debug' flag for more info")
        
        quit()

    # my_forms = args['forms'].split()
    # my_fields = args['fields'].split()
    data = project.export_metadata(
        # forms=my_forms,
        # fields=my_fields,
        format='csv')
    print unicode(data)
예제 #35
0
class ProjectTests(unittest.TestCase):
    """docstring for ProjectTests"""

    def setUp(self):
        self.url = 'https://redcap.vanderbilt.edu/api/'
        self.long_proj = Project(self.url, '1387872621BBF1C17CC47FD8AE25FF54')
        self.reg_proj = Project(self.url, '8E66DB6844D58E990075AFB51658A002')
        self.ssl_proj = Project(self.url, '8E66DB6844D58E990075AFB51658A002',
            verify_ssl=False)

    def tearDown(self):
        pass

    def test_good_init(self):
        """Ensure basic instantiation """
        self.assertIsInstance(self.long_proj, Project)
        self.assertIsInstance(self.reg_proj, Project)
        self.assertIsInstance(self.ssl_proj, Project)

    def test_normal_attrs(self):
        """Ensure projects are created with all normal attrs"""
        for attr in ('metadata', 'field_names', 'field_labels', 'forms',
            'events', 'arm_names', 'arm_nums', 'def_field'):
            self.assertTrue(hasattr(self.reg_proj, attr))

    def test_long_attrs(self):
        "proj.events/arm_names/arm_nums should not be empty in long projects"
        self.assertIsNotNone(self.long_proj.events)
        self.assertIsNotNone(self.long_proj.arm_names)
        self.assertIsNotNone(self.long_proj.arm_nums)

    def test_is_longitudinal(self):
        "Test the is_longitudinal method"
        self.assertFalse(self.reg_proj.is_longitudinal())
        self.assertTrue(self.long_proj.is_longitudinal())

    def test_regular_attrs(self):
        """proj.events/arm_names/arm_nums should be empty tuples"""
        for attr in 'events', 'arm_names', 'arm_nums':
            attr_obj = getattr(self.reg_proj, attr)
            self.assertIsNotNone(attr_obj)
            self.assertEqual(len(attr_obj), 0)

    def test_obj_export(self):
        """ Make sure we get a list of dicts"""
        data = self.reg_proj.export_records()
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    def test_long_export(self):
        """After determining a unique event name, make sure we get a
        list of dicts"""
        unique_event = self.long_proj.events[0]['unique_event_name']
        data = self.long_proj.export_records(events=[unique_event])
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    def is_good_csv(self, csv_string):
        "Helper to test csv strings"
        return isinstance(csv_string, basestring)

    def test_csv_export(self):
        """Test valid csv export """
        csv = self.reg_proj.export_records(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    def test_metadata_export(self):
        """Test valid metadata csv export"""
        csv = self.reg_proj.export_metadata(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    def test_fem_export(self):
        """ Test fem export in obj format gives list of dicts"""
        fem = self.long_proj.export_fem(format='obj')
        self.assertIsInstance(fem, list)
        for arm in fem:
            self.assertIsInstance(arm, dict)

    def test_file_export(self):
        """Test file export and proper content-type parsing"""
        record, field = '1', 'file'
        #Upload first to make sure file is there
        self.import_file()
        # Now export it
        content, headers = self.reg_proj.export_file(record, field)
        self.assertIsInstance(content, basestring)
        # We should at least get the filename in the headers
        for key in ['name']:
            self.assertIn(key, headers)
        # needs to raise ValueError for exporting non-file fields
        with self.assertRaises(ValueError):
            self.reg_proj.export_file(record=record, field='dob')
        # Delete and make sure we get an RedcapError with next export
        self.reg_proj.delete_file(record, field)
        with self.assertRaises(RedcapError):
            self.reg_proj.export_file(record, field)

    def import_file(self):
        upload_fname = self.upload_fname()
        with open(upload_fname, 'r') as fobj:
            response = self.reg_proj.import_file('1', 'file', upload_fname, fobj)
        return response

    def upload_fname(self):
        import os
        this_dir, this_fname = os.path.split(__file__)
        return os.path.join(this_dir, 'data.txt')

    def test_file_import(self):
        "Test file import"
        # Make sure a well-formed request doesn't throw RedcapError
        try:
            response = self.import_file()
        except RedcapError:
            self.fail("Shouldn't throw RedcapError for successful imports")
        self.assertTrue('error' not in response)
        # Test importing a file to a non-file field raises a ValueError
        fname = self.upload_fname()
        with open(fname, 'r') as fobj:
            with self.assertRaises(ValueError):
                response = self.reg_proj.import_file('1', 'first_name',
                    fname, fobj)

    def test_file_delete(self):
        "Test file deletion"
        # upload a file
        fname = self.upload_fname()
        with open(fname, 'r') as fobj:
            self.reg_proj.import_file('1', 'file', fname, fobj)
        # make sure deleting doesn't raise
        try:
            self.reg_proj.delete_file('1', 'file')
        except RedcapError:
            self.fail("Shouldn't throw RedcapError for successful deletes")

    def test_user_export(self):
        "Test user export"
        users = self.reg_proj.export_users()
        # A project must have at least one user
        self.assertTrue(len(users) > 0)
        req_keys = ['firstname', 'lastname', 'email', 'username',
                    'expiration', 'data_access_group', 'data_export',
                    'forms']
        for user in users:
            for key in req_keys:
                self.assertIn(key, user)

    def test_verify_ssl(self):
        """Test argument making for SSL verification"""
        # Test we won't verify SSL cert for non-verified project
        post_kwargs = self.ssl_proj._kwargs()
        self.assertIn('verify', post_kwargs)
        self.assertFalse(post_kwargs['verify'])
        # Test we do verify SSL cert in normal project
        post_kwargs = self.reg_proj._kwargs()
        self.assertIn('verify', post_kwargs)
        self.assertTrue(post_kwargs['verify'])


    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_metadata_to_df(self):
        """Test metadata export --> DataFrame"""
        df = self.reg_proj.export_metadata(format='df')
        self.assertIsInstance(df, pd.DataFrame)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_export_to_df(self):
        """Test export --> DataFrame"""
        df = self.reg_proj.export_records(format='df')
        self.assertIsInstance(df, pd.DataFrame)
        # Test it's a normal index
        self.assertTrue(hasattr(df.index, 'name'))
        # Test for a MultiIndex on longitudinal df
        long_df = self.long_proj.export_records(format='df', event_name='raw')
        self.assertTrue(hasattr(long_df.index, 'names'))

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_export_df_kwargs(self):
        """Test passing kwargs to export DataFrame construction"""
        df = self.reg_proj.export_records(format='df',
            df_kwargs={'index_col': 'first_name'})
        self.assertEqual(df.index.name, 'first_name')
        self.assertTrue('study_id' in df)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_metadata_df_kwargs(self):
        """Test passing kwargs to metadata DataFrame construction"""
        df = self.reg_proj.export_metadata(format='df',
            df_kwargs={'index_col': 'field_label'})
        self.assertEqual(df.index.name, 'field_label')
        self.assertTrue('field_name' in df)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_import_dataframe(self):
        """Test importing a pandas.DataFrame"""
        df = self.reg_proj.export_records(format='df')
        response = self.reg_proj.import_records(df)
        self.assertIn('count', response)
        self.assertNotIn('error', response)
        long_df = self.long_proj.export_records(event_name='raw', format='df')
        response = self.long_proj.import_records(long_df)
        self.assertIn('count', response)
        self.assertNotIn('error', response)
예제 #36
0
__author__ = 'od0236'

from redcap import Project, RedcapError
URL = 'http://redcap.dfdev.biz/redcap/api/'
API_KEY = 'B882733EE6C4FC181C7591DB30D07CF5'
project = Project(URL, API_KEY)

print (project.field_names)


import json
import fileinput

json_in = ""
for line in fileinput.input():
    json_in = json_in + line

json_in_dict = json.loads(json_in)

response = project.import_records(json_in_dict)

print (json_in_dict)
예제 #37
0
class RedcapClient(object):
    """ Client for a REDCap server.

    :param redcap_uri: URI for to REDCap server's API
    :param token: API Token for a REDCap project.
    :param verify_ssl: verify the SSL certificate? (default: True)
    :raises RedcapError: if we failed to get the project's metadata
    :raises RequestException: if some other network-related failure occurs
    """
    def __init__(self, redcap_uri, token, verify_ssl=True):
        self.redcap_uri = redcap_uri
        msg = 'Initializing redcap interface for: ' + redcap_uri
        logger.info(msg)
        self.token = token
        self.verify_ssl = verify_ssl

        try:
            self.project = Project(redcap_uri, token, "", verify_ssl)
            logger.info("redcap interface initialzed")
        except (RequestException, RedcapError) as e:
            logger.exception(e.message)
            raise

    def get_data_from_redcap(self,
                             records_to_fetch=None,
                             events_to_fetch=None,
                             fields_to_fetch=None,
                             forms_to_fetch=None,
                             return_format='xml'):
        """ Exports REDCap records.

        :param records_to_fetch: if specified, only includes records in this
            list. Otherwise, includes all records.
        :type records_to_fetch: list or None

        :param events_to_fetch: if specified, only includes events in this list.
            Otherwise, includes all events.
        :type events_to_fetch: list or None

        :param fields_to_fetch: if specified, only includes fields in this list.
            Otherwise, includes all fields
        :type fields_to_fetch: list or None

        :param forms_to_fetch: if specified, only includes forms in this list.
            Otherwise, includes all forms.
        :type forms_to_fetch: list or None

        :param return_format: specifies the format of the REDCap response
            (default: xml)

        :return: response
        """
        logger.info('getting data from redcap')
        try:
            response = self.project.export_records(records=records_to_fetch,
                                                   events=events_to_fetch,
                                                   fields=fields_to_fetch,
                                                   forms=forms_to_fetch,
                                                   format=return_format)
        except RedcapError as e:
            logger.debug(e.message)
        return response

    def send_data_to_redcap(self,
                            data,
                            max_retry_count,
                            overwrite=False,
                            retry_count=0):
        """ Sends records to REDCap.

        :param list of dict objects data: records to send.
        :param bool overwrite: treat blank values as intentional?
            (default: False) When sending a record, if a field is blank, by
            default REDCap will not overwrite any existing value with a blank.
        :return: response
        :raises RedcapError: if failed to send records for any reason.
        :If MaxRetryError is caught, the function will try resending the same
         data for a maximum of max_retry_count times before exitting. For each
         attempt the wait time before sending is (the_attempt_no * 6)
        """
        overwrite_value = 'overwrite' if overwrite else 'normal'

        try:
            # The following line simulates github issue #108:
            # raise MaxRetryError('', 'localhost:8998', None)
            # The following line simulates
            #raise NewConnectionError('localhost:8998', 443)
            response = self.project.import_records(data,
                                                   overwrite=overwrite_value)
            return response
        except (MaxRetryError, NewConnectionError, ConnectionError) as e:
            logger.error("Exception encountered: ", exc_info=True)
            logger.debug(str(e.message) + ", Attempt no.: " + str(retry_count))
            if (retry_count == max_retry_count):
                message = "Exiting since network connection timed out after"\
                " reaching the maximum retry limit for resending data."
                logger.debug(message)
                sys.exit(message)
            # wait for some time before resending data
            time.sleep(retry_count * 6)
            self.send_data_to_redcap(data, max_retry_count, 'overwrite',
                                     retry_count + 1)
        except RedcapError as e:
            logger.debug(e.message)
            raise
예제 #38
0
파일: rcv.py 프로젝트: sburns/ebrl_emails

import re
import time

from redcap import Project
from ebrl.mail import mail
from ebrl.config import rc as redcap_keys

URL = 'https://redcap.vanderbilt.edu/api/'
TO = ('*****@*****.**','*****@*****.**', '*****@*****.**')
#TO = ('*****@*****.**',)

if __name__ == '__main__':

    project = Project(URL, redcap_keys['rc'])    
    
    #  Fields we want from redcap
    fields = ['scan_num', 'im_date', 'eprime_number']

    data = project.export_records(fields=fields)
    
    #  Sort on behav id
    data.sort(key=lambda x: int(x['participant_id'].split('_')[0]))

    body = """<html><body><h2>RCV Redcap database as of {time}</h2>
    
<table border="1">
<tr><th>{0}</th><th>{1}</th><th>{2}</th></tr>
{d}
</table>
예제 #39
0
import datetime

"""
in `project_info.txt` store
your token (first line)
REDCap-specific API URL (second line)
"""
fh = open('project_info.txt','r')
token = fh.next().strip()
apiurl = fh.next().strip()
fh.close()

"""
The heavy lifting is done in 2 lines of code
using Scott Burns' awesome PyCap module
"""
bpcoll = Project( apiurl, token )
bpcoll_csvrecords = bpcoll.export_records( format='csv' )

"""
Save a snapshot of the blood pressure data to a csv
"""
filenowname = 'bpcoll_{0}.csv'.format( datetime.datetime.today().strftime('%Y%m%d_%H%M') )
filecurrentname = 'bpcoll_current.csv'
fhout = open( filenowname, 'w' )
fhout.write( bpcoll_csvrecords )
fhout.close()
fhout = open( filecurrentname, 'w' )
fhout.write( bpcoll_csvrecords )
fhout.close()
예제 #40
0
파일: redcapClient.py 프로젝트: indera/redi
class RedcapClient(object):
    """ Client for a REDCap server.

    :param redcap_uri: URI for to REDCap server's API
    :param token: API Token for a REDCap project.
    :param verify_ssl: verify the SSL certificate? (default: True)
    :raises RedcapError: if we failed to get the project's metadata
    :raises RequestException: if some other network-related failure occurs
    """
    def __init__(self, redcap_uri, token, verify_ssl=True):
        self.redcap_uri = redcap_uri
        msg = 'Initializing redcap interface for: ' + redcap_uri
        logger.info(msg)
        self.token = token
        self.verify_ssl = verify_ssl

        try:
            self.project = Project(redcap_uri, token, "", verify_ssl)
            logger.info("redcap interface initialzed")
        except (RequestException, RedcapError) as e:
            logger.exception(e.message)
            raise

    def get_data_from_redcap(
            self,
            records_to_fetch=None,
            events_to_fetch=None,
            fields_to_fetch=None,
            forms_to_fetch=None,
            return_format='xml'):
        """ Exports REDCap records.

        :param records_to_fetch: if specified, only includes records in this
            list. Otherwise, includes all records.
        :type records_to_fetch: list or None

        :param events_to_fetch: if specified, only includes events in this list.
            Otherwise, includes all events.
        :type events_to_fetch: list or None

        :param fields_to_fetch: if specified, only includes fields in this list.
            Otherwise, includes all fields
        :type fields_to_fetch: list or None

        :param forms_to_fetch: if specified, only includes forms in this list.
            Otherwise, includes all forms.
        :type forms_to_fetch: list or None

        :param return_format: specifies the format of the REDCap response
            (default: xml)

        :return: response
        """
        logger.info('getting data from redcap')
        try:
            response = self.project.export_records(
                records=records_to_fetch,
                events=events_to_fetch,
                fields=fields_to_fetch,
                forms=forms_to_fetch,
                format=return_format)
        except RedcapError as e:
            logger.debug(e.message)
        return response

    def send_data_to_redcap(self, data, overwrite=False):
        """ Sends records to REDCap.

        :param list of dict objects data: records to send.
        :param bool overwrite: treat blank values as intentional?
            (default: False) When sending a record, if a field is blank, by
            default REDCap will not overwrite any existing value with a blank.
        :return: response
        :raises RedcapError: if failed to send records for any reason.
        """
        overwrite_value = 'overwrite' if overwrite else 'normal'

        try:
            response = self.project.import_records(data,
                                                   overwrite=overwrite_value)
            return response
        except RedcapError as e:
            logger.debug(e.message)
            raise
예제 #41
0
class redcap_connection(object):
    '''Contains properties and values necessary to transform REDCap projects into SQL Database tables
    Required parameters:
            url   --> this should be the url of your redcap database. Usually of the form https://redcap.host/api/
            key  --> this is the API key generated within redcap itself. If you do not have one contact your system administrator for more information about gaining access
            table_name  --> this is the SQL table name you will be inserting into your ODBC SQL database. 
    Optional parameters:
        **WARNING: when this flag is set to True you are susceptible to a man-in the middle attack, use at your own risk**
        dev = True  --> This will not verify ssl and assumes you are working in a dev environment (or one without an up to date ssl certificate)    
        dev = False -->  This setting will require an ssl certificate from the hosted redcap database
            
        records
        forms
        '''
    def __init__(self,  key, url, table_name, connection='', driver='',server='',database='',  dev=False, project='',  records='', forms = []):
        self.connection                      = connection
        self.key                                  = key
        self.url                                  = url
        #self.server                                  = server
        #self.driver                          = driver
        #self.database                           = database
        self.dev                                 = dev
        self.table_name                         = table_name
        self.records                            = records
        self.forms                              = forms
        if self.dev == False:
            self.project                        = Project(self.url, self.key)
            print 'Production Environment'
        if self.dev == True:
            self.project                        = Project(self.url,self.key, verify_ssl=False)
            print 'Dev Environment'
       

            
        
                
            
    def schema(self, participant_id= True):
        '''Processes the REDCap metadata to generate sql schema and creates a table reflecting that schema in your ODBC destination
        PARAMS:
            participant_id:
                    If flagged true (default) it will make the first column in your schema = participant_id [varchar(50)]
        '''
        #Exports metadata for the forms specifed. If no forms are specified all forms are included. 
        self.metadata = self.project.export_metadata(forms=self.forms)
        
        #These if statements check if table name is given and handles the participant_id flag for inclusion of that field in the database
        if self.table_name:
            if participant_id == True:
                #participant ids are handled differently than the other columns because it is typically used as a primary key and should be included on tables created for all forms. 
                participant_id_string = '''[participant_id][varchar](500) PRIMARY KEY, \n'''
                #If the table name already exists in the database it is dropped so the new table may be created
                sqlstring = '''IF EXISTS (
                SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].['''+self.table_name+''']') AND type in (N'U')) 
                DROP TABLE [dbo].[''' + self.table_name + ''']
                CREATE TABLE [dbo].[''' + self.table_name + '''](\n''' + participant_id_string
        #In the case that participant_id is not set to True the same process as above occurs but the participant_id is not added as the first column in the table
        if participant_id != True:
            sqlstring = '''IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[dbo].['''+self.table_name+''']') AND type in (N'U')) 
                        DROP TABLE [dbo].[''' + self.table_name + '''] CREATE TABLE [dbo].[''' + self.table_name + '''](\n'''
            print 'participant_id != True'
            return
        
        #Escapes function if table_name not provided
        elif not self.table_name:
            print "Please provide a table name"
            return
    
 
   
        #This for loop reads the metadata and adds the appropriate column name and Microsoft SQL Server 2008 R2 datatype, 
        #these datatypes may require modification if destination server is not SQL Server 2008 or compatible, but that modification should be relatively painless. 
        
        
        #Loop iterations
        #1.) Iterate through all forms specified
        #2.)For each form , iterate through its metadata and process each column based off of field_type
        for form in self.forms:
        #Redcap metadata should be passed to this function as a list of dictionaries
            for dictionary in self.metadata:
                #This if statement verifies that only wanted forms are processed in the schema
                if dictionary['form_name'] == form:
                    #REDCap checkbox fields require that their output be stored as multiple columns, these next few lines create appropriate columns for each checkbox column
                    if dictionary['field_type'] == 'checkbox':
                        ls = []
                        #checkbox choices are split and processed from the string first by \\n which is a newline character to form a ls of items in the form ['1, Native-American', '2, Black']                    
                        ls = dictionary['select_choices_or_calculations'].split('|')
                        for i in ls:
                            #for each choice value the field_name + a choice number are given as strings, comments could be injected into the sql code here if desired as easy reminders of what the checkboxes correlate to
                            #for example sqlstring = sqlstring + '/*' + dictionary['select_choices_or_calculations'] + '*/'
                            sqlstring = sqlstring + '['+ dictionary['field_name'] + '___'+ str(int(i.split(",")[0])) + '] [Float] NULL, \n'
                            
                    #Descriptive fields are ignored as they contain no record information
                    elif dictionary['field_type'] == 'descriptive':
                        continue
                        
                    #This block of elif statements handles date / datetime fields in redcap by calling them as datetime sql 
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'date_ymd':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'date_mdy':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'date_dmy':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'datetime_ymd':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'datetime_mdy':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'datetime_dmy':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'datetime_seconds_ymd':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'datetime_seconds_mdy':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                    elif dictionary['field_type'] == 'text' and dictionary['text_validation_type_or_show_slider_number'] == 'datetime_seconds_dmy':
                        sqlstring = sqlstring + '[' +dictionary['field_name'] + '][datetime] NULL, \n'
                        
                        
                        
                    #This logic codes REDCap text fields as SQL varchar(500) *NOTE* participant_id is handled at instantiation of the function, so it is removed here  
                    elif dictionary['field_type'] == 'text' and dictionary['field_name'] != 'participant_id' and dictionary['text_validation_type_or_show_slider_number'] != 'number' :
                        sqlstring = sqlstring +'[' +dictionary['field_name'] + '][varchar](500) NULL, \n'
                     
                    elif dictionary['field_type'] == 'text' and dictionary['field_name'] != 'participant_id' and dictionary['text_validation_type_or_show_slider_number'] == 'number' :
                        sqlstring = sqlstring +'[' +dictionary['field_name'] + '][float] NULL, \n'    
                    
                   #     
                    elif dictionary['field_type'] == 'dropdown' or dictionary['field_type'] == 'radio' or dictionary['field_type'] == 'calc' :
                        sqlstring = sqlstring +'[' +dictionary['field_name'] + '][float] NULL, \n'
                        
                    elif dictionary['field_type'] == 'notes':
                        sqlstring = sqlstring +'[' +dictionary['field_name'] + '][NVARCHAR](MAX) NULL, \n'
                    
                    elif dictionary['field_name'] == 'participant_id':
                        continue 
                    else:
                        print dictionary['field_name'] + " does not appear to have a coded SQL data type. Please bring this to the attention of the database administrator."
    
        
            
            sqlstring = sqlstring + '[' + form + '_complete' + '] [Float]  NULL, \n'
        #sqlstring[:-3] removes the last 3 characters from the sqlstring which should be ' \n'
        sqlstring = sqlstring[:-3]    
        sqlstring = sqlstring + ''' ) on [PRIMARY]
        '''  
        #Encoding the sqlstring to ASCII helps reduce errors when writing strings to be passed to other programs from python 2.7
        sqlstring = sqlstring.encode('ASCII')                          
        c = cxn(self.connection)
        c = c.cursor()
        c.execute(sqlstring)
        c.commit()
        return #sqlstring
    

 
 
    def insert_records(self, second_entry=False):
        '''Commits a sqlstring to the database and table_name and connection string
        provided''' 
        print "Updating " + self.table_name +" this process may take several minutes. "   
        if not self.table_name:
            print 'A table_name must be specified whose schema matches your recordset'
            return
        
        if not self.connection:
            print 'A connection string must be provided in the format "DRIVER=[SQL Server]; SERVER=[SERVER NAME];DATABASE=[DATABASE NAME]'
            return
        
        #Process: In order to reduce strain on the REDCap server the record export is broken into batches of 10 or less (the last batch may be 1-10)
        #ls is a list placeholder for the master list
        ls = []
        #This for loop downloads the entire list of participant_ids in a REDCap project
        
        
        for id in self.project.export_records(fields=['participant_id']):
            #This try --> except block should ensure that no double entry records are input
            try:
                int(id['participant_id'])
                ls.append(str(id['participant_id']))
            except ValueError:
                continue
        #This next line divides ls into a list of batches (tuple) each containing 10 records (1-10 records for the last list) 
        
        
        divided_list = tuple(ls[i:i+500] for i in xrange(0,len(ls),500))
       
            
        
        
        
        for i in divided_list:
            self.records = self.project.export_records(forms= self.forms, records = i)
            for pid in self.records:
                sqlstring = 'INSERT INTO dbo.' + '[' + self.table_name + ']('
                #print  ' PARTICIPANT ID ---------->  ' + pid['participant_id']
                for field in pid:
                    sqlstring = sqlstring + '[' + field + ']' + ','
                sqlstring = sqlstring[:-1] + ') VALUES('
                for value in pid:
                    if pid[value] != '':
                        sqlstring = sqlstring + "'" + pid[value].replace("'","''")+"'" +','
                    else:
                        sqlstring = sqlstring + 'NULL,'
                sqlstring = sqlstring[:-1] + ')'
                ls.append(sqlstring)
                
                c = self.connection
                c = cxn(c).cursor()
                c.execute(sqlstring)
                c.commit()
예제 #42
0
class RedcapClient(object):
    """ Client for a REDCap server.

    :param redcap_uri: URI for to REDCap server's API
    :param token: API Token for a REDCap project.
    :param verify_ssl: verify the SSL certificate? (default: True)
    :raises RedcapError: if we failed to get the project's metadata
    :raises RequestException: if some other network-related failure occurs
    """
    def __init__(self, redcap_uri, token, verify_ssl=True):
        self.redcap_uri = redcap_uri
        msg = 'Initializing redcap interface for: ' + redcap_uri
        logger.info(msg)
        self.token = token
        self.verify_ssl = verify_ssl

        try:
            self.project = Project(redcap_uri, token, "", verify_ssl)
            logger.info("redcap interface initialzed")
        except (RequestException, RedcapError) as e:
            logger.exception(e.message)
            raise

    def get_data_from_redcap(
            self,
            records_to_fetch=None,
            events_to_fetch=None,
            fields_to_fetch=None,
            forms_to_fetch=None,
            return_format='xml'):
        """ Exports REDCap records.

        :param records_to_fetch: if specified, only includes records in this
            list. Otherwise, includes all records.
        :type records_to_fetch: list or None

        :param events_to_fetch: if specified, only includes events in this list.
            Otherwise, includes all events.
        :type events_to_fetch: list or None

        :param fields_to_fetch: if specified, only includes fields in this list.
            Otherwise, includes all fields
        :type fields_to_fetch: list or None

        :param forms_to_fetch: if specified, only includes forms in this list.
            Otherwise, includes all forms.
        :type forms_to_fetch: list or None

        :param return_format: specifies the format of the REDCap response
            (default: xml)

        :return: response
        """
        logger.info('getting data from redcap')
        try:
            response = self.project.export_records(
                records=records_to_fetch,
                events=events_to_fetch,
                fields=fields_to_fetch,
                forms=forms_to_fetch,
                format=return_format)
        except RedcapError as e:
            logger.debug(e.message)
        return response

    def send_data_to_redcap(self, data, max_retry_count, overwrite=False,
        retry_count=0):
        """ Sends records to REDCap.

        :param list of dict objects data: records to send.
        :param bool overwrite: treat blank values as intentional?
            (default: False) When sending a record, if a field is blank, by
            default REDCap will not overwrite any existing value with a blank.
        :return: response
        :raises RedcapError: if failed to send records for any reason.
        :If MaxRetryError is caught, the function will try resending the same
         data for a maximum of max_retry_count times before exitting. For each
         attempt the wait time before sending is (the_attempt_no * 6)
        """
        overwrite_value = 'overwrite' if overwrite else 'normal'

        try:
            # The following line simulates github issue #108:
            # raise MaxRetryError('', 'localhost:8998', None)
            # The following line simulates 
            #raise NewConnectionError('localhost:8998', 443)
            response = self.project.import_records(data,
                overwrite=overwrite_value)
            return response
        except (MaxRetryError, NewConnectionError, ConnectionError) as e:
            logger.error("Exception encountered: ", exc_info = True)
            logger.debug(str(e.message) + ", Attempt no.: " + str(retry_count))
            if (retry_count == max_retry_count):
                message = "Exiting since network connection timed out after"\
                " reaching the maximum retry limit for resending data."
                logger.debug(message)
                sys.exit(message)
            # wait for some time before resending data
            time.sleep(retry_count*6)
            self.send_data_to_redcap(data, max_retry_count, 'overwrite',
                retry_count+1)
        except RedcapError as e:
            logger.debug(e.message)
            raise
예제 #43
0
def main():

    parser = argparse.ArgumentParser(
        description='Read some data from a REDCap Project')
    parser.add_argument(
        '--token',
        dest='token',
        default='',
        required=True,
        help='Specify the authentication/authorization token that will provide access to the REDCap project')
    parser.add_argument(
        '--url',
        dest='url',
        default='',
        required=True,
        help='Specify the url of the REDCap server to connect with')
    parser.add_argument(
        '--verify_ssl',
        dest='verify_ssl',
        default=True,
        help='Specify whether the SSL cert of the REDCap server should be checked')
    parser.add_argument('-i', '--import_data', dest='import_data', default='',
                        help='Specify the input data file to load into REDCap')
    parser.add_argument(
        '-f',
        '--forms',
        dest='forms',
        default='',
        help='Specify a list of forms, separated by spaces, for which data should be returned.')

    # prepare the arguments we were given
    args = vars(parser.parse_args())

    # Turn the 'verify_ssl' parameter into the truth value we need to make a
    # REDCap connection
    if args['verify_ssl'] == 'y':
        args['verify_ssl'] = True
    elif args['verify_ssl'] == 'n':
        args['verify_ssl'] = False

    # Attempt to connect to the REDCap project
    try:
        project = Project(args['url'], args['token'], "", args['verify_ssl'])
    except:
        print "Cannot connect to project at " + args['url'] + ' with token ' + args['token']
        quit()

    # either we export data...
    if args['import_data'] == '':
        my_forms = args['forms'].split()
        data = project.export_records(
            forms=my_forms,
            format='csv',
            event_name='unique')
        print str(data)
    else:  # ...or we import data
        file = args['import_data']
        try:
            input = open(file, 'r')
        except:
            print "Cannot open file " + file
            quit()
        response = project.import_records(input.read(), format='csv')
        print response
예제 #44
0
파일: backend.py 프로젝트: sburns/red-light
 def __init__(self, url, api, initial_fields):
     self.proj = Project(url, api)
     self.make_df(initial_fields)
예제 #45
0
def main():

    parser = argparse.ArgumentParser(
        description='Read project metadata from a REDCap Project')
    parser.add_argument(
        '--token',
        dest='token',
        default='',
        required=True,
        help=
        'Specify the authentication/authorization token that will provide access to the REDCap project'
    )
    parser.add_argument(
        '--url',
        dest='url',
        default='',
        required=True,
        help='Specify the url of the REDCap server to connect with')
    parser.add_argument(
        '--verify_ssl',
        dest='verify_ssl',
        default=True,
        help=
        'Specify whether the SSL cert of the REDCap server should be checked')
    # parser.add_argument(
    #     '-f',
    #     '--forms',
    #     dest='forms',
    #     default='',
    #     help='Specify a list of forms, separated by spaces, for which metadata should be returned.')
    # parser.add_argument(
    #     '--fields',
    #     dest='fields',
    #     default='',
    #     help='Specify a list of fields, separated by spaces, for which metadata should be returned.')

    # prepare the arguments we were given
    args = vars(parser.parse_args())

    # Turn the 'verify_ssl' parameter into the truth value we need to make a
    # REDCap connection
    if args['verify_ssl'] == 'y':
        args['verify_ssl'] = True
    elif args['verify_ssl'] == 'n':
        args['verify_ssl'] = False
    else:
        args['verify_ssl'] = True

    # Attempt to connect to the REDCap project
    try:
        project = Project(args['url'], args['token'], "", args['verify_ssl'])
    except:
        print "Cannot connect to project at " + args[
            'url'] + ' with token ' + args['token']
        quit()

    # my_forms = args['forms'].split()
    # my_fields = args['fields'].split()
    data = project.export_metadata(
        # forms=my_forms,
        # fields=my_fields,
        format='csv')
    print unicode(data)
from redcap import Project
import os
import csv
import pandas

api_url = 'https://redcap.ucdenver.edu/api/'
api_key = os.environ['R_API_TOKEN']

project = Project(api_url, api_key)

csv_data = project.export_records(format='csv')

data_frame = project.export_records(format='df')

pandas.DataFrame.to_csv(data_frame,
"/work/ics/data/projects/wagerlab/labdata/data/Pain_Gen/Behavioral/raw/surveys/redcap/paingen_redcap.csv")
예제 #47
0
def main():

    parser = argparse.ArgumentParser(
        description='Read some data from a REDCap Project')
    parser.add_argument(
        '--token',
        dest='token',
        default='',
        required=True,
        help='Specify the authentication/authorization token that will provide access to the REDCap project')
    parser.add_argument(
        '--url',
        dest='url',
        default='',
        required=True,
        help='Specify the url of the REDCap server to connect with')
    parser.add_argument(
        '--verify_ssl',
        dest='verify_ssl',
        default=True,
        help='Specify whether the SSL cert of the REDCap server should be checked')
    parser.add_argument('-i', '--import_data', dest='import_data', default='',
                        help='Specify the input data file to load into REDCap')
    parser.add_argument(
        '-f',
        '--forms',
        dest='forms',
        default='',
        help='Specify a list of forms, separated by spaces, for which data should be returned.')
    parser.add_argument(
        '-t',
        '--type',
        choices=['json', 'csv', 'xml'],
        dest='data_type',
        default='csv',
        help='Specify the file type used as input or output. Valid types: json, csv, xml')
    parser.add_argument(
        '--fields',
        dest='fields',
        default='',
        help='Specify a list of fields, separated by spaces, for which data should be returned.')
    parser.add_argument(
        '-e',
        '--events',
        dest='events',
        default='',
        help='Specify a list of events, separated by spaces, for which data should be returned.')
    parser.add_argument(
        '-r',
        '--records',
        dest='records',
        default='',
        help='Specify a list of records, separated by spaces, for which data should be returned.')

    # prepare the arguments we were given
    args = vars(parser.parse_args())

    # According to http://pycap.readthedocs.org/en/latest/api.html
    # allowed data_types are: csv, json, xml
    data_type = args['data_type']

    # Turn the 'verify_ssl' parameter into the truth value we need to make a
    # REDCap connection
    if args['verify_ssl'] == 'y':
        args['verify_ssl'] = True
    else:
        args['verify_ssl'] = False

    # Attempt to connect to the REDCap project
    try:
        project = Project(args['url'], args['token'], "", args['verify_ssl'])
    except:
        print "Cannot connect to project at " + args['url'] + ' with token ' + args['token']
        quit()

    # either we export data...
    if args['import_data'] == '':
        my_forms = args['forms'].split()
        my_fields = args['fields'].split()
        my_events = args['events'].split()
        my_records = args['records'].split()
        data = project.export_records(
            forms=my_forms,
            format = data_type,
            fields=my_fields,
            events=my_events,
            records=my_records,
            event_name='unique')
        if 'json' == data_type:
            print json.dumps(data, ensure_ascii=False)
        else:
            print str(data)
    else:
        # ...or we import data
        file = args['import_data']
        try:
            input = open(file, 'r')
        except IOError:
            print "Cannot open file " + file
            quit()
        if 'json' == data_type:
            json_data = json.load(input)
            response = project.import_records(json_data)
        else:
            response = project.import_records(input.read(), format = data_type)

        print response
예제 #48
0
import pycurl
import cStringIO
import pandas
import itertools
from redcap import Project, RedcapError


apiurl = 'https://redcap.stanford.edu/api/'
token = ''
rc_project = Project(apiurl, token)
long_proj = Project(apiurl, token)
ssl_proj = Project(apiurl, token, verify_ssl=True)
survey_proj = Project(apiurl, '')


def metadata_to_df(rc_project):
    df = rc_project.export_metadata(format='df')
    return df


def export_always_include_def_field(rc_project):
    """ Ensure def_field always comes in the output even if not explicity
    given in a requested form """
    # If we just ask for a form, must also get def_field in there
    records = rc_project.export_records(forms=['imaging'])


def is_longitudinal(rc_project):
    "Test the is_longitudinal method"
    rc_project.assertFalse(rc_project.reg_proj.is_longitudinal())
    rc_project.assertTrue(rc_project.long_proj.is_longitudinal())
예제 #49
0
    for field_name in field_list:
        if (isinstance(in_row[i], str)):
            val_str = in_row[i]
        else:
            val_str = in_row[i].strftime("%Y-%m-%d %H:%M:%S")
        out_row[field_name] = val_str
        i += 1
    out_row.update( fixed_field_list )
    out_rows.append(out_row)
    in_row = cur.fetchone()

print ( "Read ", len(out_rows), " records from sql database: ", config["sql-database"] )

conn.close()

from redcap import Project, RedcapError
URL = (config["redcapURL"])
API_KEY = (config["redcapApiKey"])
project = Project(URL, API_KEY)


batch_start = 0
while ( batch_start < len(out_rows)):
    response = project.import_records(out_rows[batch_start : batch_start + batch_count])
    if ( batch_start + batch_count <= len(out_rows)):
        print ("Added a batch of ", batch_count, " records")
    else:
        print ("Added a batch of ", len(out_rows) - batch_start, " records")
    batch_start += batch_count

예제 #50
0
class ProjectTests(unittest.TestCase):
    """docstring for ProjectTests"""

    def setUp(self):
        self.url = 'https://redcap.vanderbilt.edu/api/'
        self.long_proj = Project(self.url, '1387872621BBF1C17CC47FD8AE25FF54')
        self.reg_proj = Project(self.url, '8E66DB6844D58E990075AFB51658A002')

    def tearDown(self):
        pass

    def test_good_init(self):
        """Ensure basic instantiation """
        self.assertIsInstance(self.long_proj, Project)
        self.assertIsInstance(self.reg_proj, Project)

    def test_normal_attrs(self):
        """Ensure projects are created with all normal attrs"""
        for attr in ('metadata', 'field_names', 'field_labels', 'forms',
            'events', 'arm_names', 'arm_nums', 'def_field'):
            self.assertTrue(hasattr(self.reg_proj, attr))

    def test_long_attrs(self):
        "proj.events/arm_names/arm_nums should not be empty in long projects"
        self.assertIsNotNone(self.long_proj.events)
        self.assertIsNotNone(self.long_proj.arm_names)
        self.assertIsNotNone(self.long_proj.arm_nums)

    def test_regular_attrs(self):
        """proj.events/arm_names/arm_nums should be empty tuples"""
        for attr in 'events', 'arm_names', 'arm_nums':
            attr_obj = getattr(self.reg_proj, attr)
            self.assertIsNotNone(attr_obj)
            self.assertEqual(len(attr_obj), 0)

    def test_obj_export(self):
        """ Make sure we get a list of dicts"""
        data = self.reg_proj.export_records()
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    def test_long_export(self):
        """After determining a unique event name, make sure we get a
        list of dicts"""
        unique_event = self.long_proj.events[0]['unique_event_name']
        data = self.long_proj.export_records(events=[unique_event])
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    def is_good_csv(self, csv_string):
        "Helper to test csv strings"
        return isinstance(csv_string, basestring)

    def test_csv_export(self):
        """Test valid csv export """
        csv = self.reg_proj.export_records(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    def test_metadata_export(self):
        """Test valid metadata csv export"""
        csv = self.reg_proj.export_metadata(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    def test_file_export(self):
        """Test file export and proper content-type parsing"""
        content, headers = self.reg_proj.export_file(record='1', field='file')
        self.assertIsInstance(content, basestring)
        # We should at least get the filename in the headers
        for key in ['name']:
            self.assertIn(key, headers)
        # needs to raise for bad file export requests
        from requests.exceptions import HTTPError
        with self.assertRaises(HTTPError):
            self.reg_proj.export_file(record='1', field='dob')

    def test_file_import(self):
        "Test file import"
        import os
        this_dir, this_fname = os.path.split(__file__)
        upload_fname = os.path.join(this_dir, 'data.txt')
        # Test a well-formed request
        with open(upload_fname, 'r') as fobj:
            response = self.reg_proj.import_file('1', 'file', upload_fname, fobj)
        self.assertTrue('error' not in response)
        # Test importing a file to a non-file field raises a ValueError
        with open(upload_fname, 'r') as fobj:
            with self.assertRaises(ValueError):
                response = self.reg_proj.import_file('1', 'first_name',
                    upload_fname, fobj)

    @unittest.skipIf(skip_pd, "Couldnl't import pandas")
    def test_metadata_to_df(self):
        """Test metadata export --> DataFrame"""
        df = self.reg_proj.export_metadata(format='df')
        self.assertIsInstance(df, pd.DataFrame)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_export_to_df(self):
        """Test export --> DataFrame"""
        df = self.reg_proj.export_records(format='df')
        self.assertIsInstance(df, pd.DataFrame)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_export_df_kwargs(self):
        """Test passing kwargs to export DataFrame construction"""
        df = self.reg_proj.export_records(format='df',
            df_kwargs={'index_col': 'first_name'})
        self.assertEqual(df.index.name, 'first_name')
        self.assertTrue('study_id' in df)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_metadata_df_kwargs(self):
        """Test passing kwargs to metadata DataFrame construction"""
        df = self.reg_proj.export_metadata(format='df',
            df_kwargs={'index_col': 'field_label'})
        self.assertEqual(df.index.name, 'field_label')
        self.assertTrue('field_name' in df)
예제 #51
0
## GFDX notebook: WHO Recommendation Analysis
"""

# Install package to allow import from REDCap API
from redcap import Project
import pandas as pd
import numpy as np
import os
from tqdm.notebook import tqdm  # progress bar

api_key = os.environ.get("APIKEY")

# Connecting to GFDx Redcap API
URL = "https://redcap.emory.edu/api/"
project = Project(URL, api_key)

# Pulls out variables of interest from REDCap
fields_of_interest = [
    "country_code",
    "standard_nutrient",
    "nutrient_level",
    "nutrient_compound",
    "latest_intake_api",
    "food_status_api",
]
subset = project.export_records(fields=fields_of_interest, format="df")

# Reset index
df = subset.copy()
df.reset_index(inplace=True)
예제 #52
0
 def setUp(self):
     self.url = 'https://redcap.vanderbilt.edu/api/'
     self.long_proj = Project(self.url, '1387872621BBF1C17CC47FD8AE25FF54')
     self.reg_proj = Project(self.url, '8E66DB6844D58E990075AFB51658A002')
예제 #53
0
class ProjectTests(unittest.TestCase):
    """docstring for ProjectTests"""

    long_proj_url = 'https://redcap.longproject.edu/api/'
    normal_proj_url = 'https://redcap.normalproject.edu/api/'
    ssl_proj_url = 'https://redcap.sslproject.edu/api/'
    survey_proj_url = 'https://redcap.surveyproject.edu/api/'
    bad_url = 'https://redcap.badproject.edu/api'
    reg_token = 'supersecrettoken'

    def setUp(self):
        self.create_projects()

    def tearDown(self):
        pass

    def add_long_project_response(self):
        def request_callback_long(request):
            parsed = urlparse.urlparse("?{}".format(request.body))
            data = urlparse.parse_qs(parsed.query)
            headers = {"Content-Type": "application/json"}

            request_type = data["content"][0]

            if "returnContent" in data:
                resp = {"count": 1}

            elif (request_type == "metadata"):
                resp = [{
                    'field_name': 'record_id',
                    'field_label': 'Record ID',
                    'form_name': 'Test Form',
                    "arm_num": 1,
                    "name": "test"
                }]
            elif (request_type == "version"):
                resp = b'8.6.0'
                headers = {'content-type': 'text/csv; charset=utf-8'}
                return (201, headers, resp)
            elif (request_type == "event"):
                resp = [{
                    'unique_event_name': "raw"
                }]
            elif (request_type == "arm"):
                resp = [{
                    "arm_num": 1,
                    "name": "test"
                }]
            elif (request_type in ["record", "formEventMapping"]):
                if "csv" in data["format"]:
                    resp = "record_id,test,redcap_event_name\n1,1,raw"
                    headers = {'content-type': 'text/csv; charset=utf-8'}
                    return (201, headers, resp)
                else:
                    resp = [{"field_name":"record_id"}, {"field_name":"test"}]

            return (201, headers, json.dumps(resp))

        responses.add_callback(
            responses.POST,
            self.long_proj_url,
            callback=request_callback_long,
            content_type="application/json",
        )

    def add_normalproject_response(self):
        def request_callback_normal(request):
            parsed = urlparse.urlparse("?{}".format(request.body))
            data = urlparse.parse_qs(parsed.query)
            headers = {"Content-Type": "application/json"}

            if " filename" in data:
                resp = {}
            else:
                request_type = data.get("content", ['unknown'])[0]

                if "returnContent" in data:
                    if "non_existent_key" in data["data"][0]:
                        resp = {"error": "invalid field"}
                    else:
                        resp = {"count": 1}
                elif (request_type == "metadata"):
                    if "csv" in data["format"]:
                        resp = "field_name,field_label,form_name,arm_num,name\n"\
                            "record_id,Record ID,Test Form,1,test\n"
                        headers = {'content-type': 'text/csv; charset=utf-8'}
                        return (201, headers, resp)

                    else:
                        resp = [{
                            'field_name': 'record_id',
                            'field_label': 'Record ID',
                            'form_name': 'Test Form',
                            "arm_num": 1,
                            "name": "test",
                            "field_type": "text",
                        }, {
                            'field_name': 'file',
                            'field_label': 'File',
                            'form_name': 'Test Form',
                            "arm_num": 1,
                            "name": "file",
                            "field_type": "file",
                        }, {
                            'field_name': 'dob',
                            'field_label': 'Date of Birth',
                            'form_name': 'Test Form',
                            "arm_num": 1,
                            "name": "dob",
                            "field_type": "date",
                        }]
                elif (request_type == "version"):
                    resp = {
                        'error': "no version info"
                    }
                elif (request_type == "event"):
                    resp = {
                        'error': "no events"
                    }
                elif (request_type == "arm"):
                    resp = {
                        'error': "no arm"
                    }
                elif (request_type == "record"):
                    if "csv" in data["format"]:
                        resp = "record_id,test,first_name,study_id\n1,1,Peter,1"
                        headers = {'content-type': 'text/csv; charset=utf-8'}
                        return (201, headers, resp)
                    elif "exportDataAccessGroups" in data:
                        resp = [
                            {"field_name":"record_id", "redcap_data_access_group": "group1"},
                            {"field_name":"test", "redcap_data_access_group": "group1"}
                        ]
                    elif "label" in data.get("rawOrLabel"):
                        resp = [{"matcheck1___1": "Foo"}]
                    else:
                        resp = [
                            {"record_id": "1", "test": "test1"},
                            {"record_id": "2", "test": "test"}
                        ]
                elif (request_type == "file"):
                    resp = {}
                    headers["content-type"] = "text/plain;name=data.txt"
                elif (request_type == "user"):
                    resp = [
                        {
                            'firstname': "test",
                            'lastname': "test",
                            'email': "test",
                            'username': "******",
                            'expiration': "test",
                            'data_access_group': "test",
                            'data_export': "test",
                            'forms': "test"
                        }
                    ]

            return (201, headers, json.dumps(resp))

        responses.add_callback(
            responses.POST,
            self.normal_proj_url,
            callback=request_callback_normal,
            content_type="application/json",
        )

    def add_ssl_project(self):
        def request_callback_ssl(request):
            parsed = urlparse.urlparse("?{}".format(request.body))
            data = urlparse.parse_qs(parsed.query)

            request_type = data["content"][0]
            if (request_type == "metadata"):
                resp = [{
                    'field_name': 'record_id',
                    'field_label': 'Record ID',
                    'form_name': 'Test Form',
                    "arm_num": 1,
                    "name": "test"
                }]
            if (request_type == "version"):
                resp = {
                    'error': "no version info"
                }
            if (request_type == "event"):
                resp = {
                    'error': "no events"
                }
            if (request_type == "arm"):
                resp = {
                    'error': "no arm"
                }

            headers = {"Content-Type": "application/json"}
            return (201, headers, json.dumps(resp))

        responses.add_callback(
            responses.POST,
            self.ssl_proj_url,
            callback=request_callback_ssl,
            content_type="application/json",
        )

    def add_survey_project(self):
        def request_callback_survey(request):
            parsed = urlparse.urlparse("?{}".format(request.body))
            data = urlparse.parse_qs(parsed.query)

            request_type = data["content"][0]
            if (request_type == "metadata"):
                resp = [{
                    'field_name': 'record_id',
                    'field_label': 'Record ID',
                    'form_name': 'Test Form',
                    "arm_num": 1,
                    "name": "test"
                }]
            elif (request_type == "version"):
                resp = {
                    'error': "no version info"
                }
            elif (request_type == "event"):
                resp = {
                    'error': "no events"
                }
            elif (request_type == "arm"):
                resp = {
                    'error': "no arm"
                }
            elif (request_type == "record"):
                resp = [
                    {"field_name":"record_id", "redcap_survey_identifier": "test", "demographics_timestamp": "a_real_date"},
                    {"field_name":"test", "redcap_survey_identifier": "test", "demographics_timestamp": "a_real_date"}
                ]

            headers = {"Content-Type": "application/json"}
            return (201, headers, json.dumps(resp))

        responses.add_callback(
            responses.POST,
            self.survey_proj_url,
            callback=request_callback_survey,
            content_type="application/json",
        )

    @responses.activate
    def create_projects(self):
        self.add_long_project_response()
        self.add_normalproject_response()
        self.add_ssl_project()
        self.add_survey_project()

        self.long_proj = Project(self.long_proj_url, self.reg_token)
        self.reg_proj = Project(self.normal_proj_url, self.reg_token)
        self.ssl_proj = Project(self.ssl_proj_url, self.reg_token, verify_ssl=False)
        self.survey_proj = Project(self.survey_proj_url, self.reg_token)


    def test_good_init(self):
        """Ensure basic instantiation """

        self.assertIsInstance(self.long_proj, Project)
        self.assertIsInstance(self.reg_proj, Project)
        self.assertIsInstance(self.ssl_proj, Project)

    def test_normal_attrs(self):
        """Ensure projects are created with all normal attrs"""

        for attr in ('metadata', 'field_names', 'field_labels', 'forms',
            'events', 'arm_names', 'arm_nums', 'def_field'):
            self.assertTrue(hasattr(self.reg_proj, attr))

    def test_long_attrs(self):
        "proj.events/arm_names/arm_nums should not be empty in long projects"

        self.assertIsNotNone(self.long_proj.events)
        self.assertIsNotNone(self.long_proj.arm_names)
        self.assertIsNotNone(self.long_proj.arm_nums)

    def test_is_longitudinal(self):
        "Test the is_longitudinal method"
        self.assertFalse(self.reg_proj.is_longitudinal())
        self.assertTrue(self.long_proj.is_longitudinal())

    def test_regular_attrs(self):
        """proj.events/arm_names/arm_nums should be empty tuples"""
        for attr in 'events', 'arm_names', 'arm_nums':
            attr_obj = getattr(self.reg_proj, attr)
            self.assertIsNotNone(attr_obj)
            self.assertEqual(len(attr_obj), 0)

    @responses.activate
    def test_json_export(self):
        """ Make sure we get a list of dicts"""
        self.add_normalproject_response()
        data = self.reg_proj.export_records()
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    @responses.activate
    def test_long_export(self):
        """After determining a unique event name, make sure we get a
        list of dicts"""
        self.add_long_project_response()
        unique_event = self.long_proj.events[0]['unique_event_name']
        data = self.long_proj.export_records(events=[unique_event])
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    @responses.activate
    def test_import_records(self):
        "Test record import"
        self.add_normalproject_response()
        data = self.reg_proj.export_records()
        response = self.reg_proj.import_records(data)
        self.assertIn('count', response)
        self.assertNotIn('error', response)

    @responses.activate
    def test_import_exception(self):
        "Test record import throws RedcapError for bad import"
        self.add_normalproject_response()
        data = self.reg_proj.export_records()
        data[0]['non_existent_key'] = 'foo'
        with self.assertRaises(RedcapError) as cm:
            self.reg_proj.import_records(data)
        exc = cm.exception
        self.assertIn('error', exc.args[0])

    def is_good_csv(self, csv_string):
        "Helper to test csv strings"
        return isinstance(csv_string, basestring)

    @responses.activate
    def test_csv_export(self):
        """Test valid csv export """
        self.add_normalproject_response()
        csv = self.reg_proj.export_records(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    @responses.activate
    def test_metadata_export(self):
        """Test valid metadata csv export"""
        self.add_normalproject_response()
        csv = self.reg_proj.export_metadata(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    def test_bad_creds(self):
        "Test that exceptions are raised with bad URL or tokens"
        with self.assertRaises(RedcapError):
            Project(self.bad_url, self.reg_token)
        with self.assertRaises(RedcapError):
            Project(self.bad_url, '1')

    @responses.activate
    def test_fem_export(self):
        """ Test fem export in json format gives list of dicts"""
        self.add_long_project_response()
        fem = self.long_proj.export_fem(format='json')
        self.assertIsInstance(fem, list)
        for arm in fem:
            self.assertIsInstance(arm, dict)

    @responses.activate
    def test_file_export(self):
        """Test file export and proper content-type parsing"""
        self.add_normalproject_response()
        record, field = '1', 'file'
        #Upload first to make sure file is there
        self.import_file()
        # Now export it
        content, headers = self.reg_proj.export_file(record, field)
        self.assertIsInstance(content, basestring)
        # We should at least get the filename in the headers
        for key in ['name']:
            self.assertIn(key, headers)
        # needs to raise ValueError for exporting non-file fields
        with self.assertRaises(ValueError):
            self.reg_proj.export_file(record=record, field='dob')

    def import_file(self):
        upload_fname = self.upload_fname()
        with open(upload_fname, 'r') as fobj:
            response = self.reg_proj.import_file('1', 'file', upload_fname, fobj)
        return response

    def upload_fname(self):
        import os
        this_dir, this_fname = os.path.split(__file__)
        return os.path.join(this_dir, 'data.txt')

    @responses.activate
    def test_file_import(self):
        "Test file import"
        self.add_normalproject_response()
        # Make sure a well-formed request doesn't throw RedcapError
        try:
            response = self.import_file()
        except RedcapError:
            self.fail("Shouldn't throw RedcapError for successful imports")
        self.assertTrue('error' not in response)
        # Test importing a file to a non-file field raises a ValueError
        fname = self.upload_fname()
        with open(fname, 'r') as fobj:
            with self.assertRaises(ValueError):
                response = self.reg_proj.import_file('1', 'first_name',
                    fname, fobj)

    @responses.activate
    def test_file_delete(self):
        "Test file deletion"
        self.add_normalproject_response()
        # make sure deleting doesn't raise
        try:
            self.reg_proj.delete_file('1', 'file')
        except RedcapError:
            self.fail("Shouldn't throw RedcapError for successful deletes")

    @responses.activate
    def test_user_export(self):
        "Test user export"
        self.add_normalproject_response()
        users = self.reg_proj.export_users()
        # A project must have at least one user
        self.assertTrue(len(users) > 0)
        req_keys = ['firstname', 'lastname', 'email', 'username',
                    'expiration', 'data_access_group', 'data_export',
                    'forms']
        for user in users:
            for key in req_keys:
                self.assertIn(key, user)

    def test_verify_ssl(self):
        """Test argument making for SSL verification"""
        # Test we won't verify SSL cert for non-verified project
        post_kwargs = self.ssl_proj._kwargs()
        self.assertIn('verify', post_kwargs)
        self.assertFalse(post_kwargs['verify'])
        # Test we do verify SSL cert in normal project
        post_kwargs = self.reg_proj._kwargs()
        self.assertIn('verify', post_kwargs)
        self.assertTrue(post_kwargs['verify'])

    @responses.activate
    def test_export_data_access_groups(self):
        """Test we get 'redcap_data_access_group' in exported data"""
        self.add_normalproject_response()
        records = self.reg_proj.export_records(export_data_access_groups=True)
        for record in records:
            self.assertIn('redcap_data_access_group', record)
        # When not passed, that key shouldn't be there
        records = self.reg_proj.export_records()
        for record in records:
            self.assertNotIn('redcap_data_access_group', record)

    @responses.activate
    def test_export_survey_fields(self):
        """Test that we get the appropriate survey keys in the exported
        data.

        Note that the 'demographics' form has been setup as the survey
        in the `survey_proj` project. The _timestamp field will vary for
        users as their survey form will be named differently"""
        self.add_survey_project()
        self.add_normalproject_response()
        records = self.survey_proj.export_records(export_survey_fields=True)
        for record in records:
            self.assertIn('redcap_survey_identifier', record)
            self.assertIn('demographics_timestamp', record)
        # The regular project doesn't have a survey setup. Users should
        # be able this argument as True but it winds up a no-op.
        records = self.reg_proj.export_records(export_survey_fields=True)
        for record in records:
            self.assertNotIn('redcap_survey_identifier', record)
            self.assertNotIn('demographics_timestamp', record)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_metadata_to_df(self):
        """Test metadata export --> DataFrame"""
        self.add_normalproject_response()
        df = self.reg_proj.export_metadata(format='df')
        self.assertIsInstance(df, pd.DataFrame)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_export_to_df(self):
        """Test export --> DataFrame"""
        self.add_normalproject_response()
        self.add_long_project_response()
        df = self.reg_proj.export_records(format='df')
        self.assertIsInstance(df, pd.DataFrame)
        # Test it's a normal index
        self.assertTrue(hasattr(df.index, 'name'))
        # Test for a MultiIndex on longitudinal df
        long_df = self.long_proj.export_records(format='df', event_name='raw')
        self.assertTrue(hasattr(long_df.index, 'names'))

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_export_df_kwargs(self):
        """Test passing kwargs to export DataFrame construction"""
        self.add_normalproject_response()
        df = self.reg_proj.export_records(format='df',
            df_kwargs={'index_col': 'first_name'})
        self.assertEqual(df.index.name, 'first_name')
        self.assertTrue('study_id' in df)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_metadata_df_kwargs(self):
        """Test passing kwargs to metadata DataFrame construction"""
        self.add_normalproject_response()
        df = self.reg_proj.export_metadata(format='df',
            df_kwargs={'index_col': 'field_label'})
        self.assertEqual(df.index.name, 'field_label')
        self.assertTrue('field_name' in df)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    @responses.activate
    def test_import_dataframe(self):
        """Test importing a pandas.DataFrame"""
        self.add_normalproject_response()
        self.add_long_project_response()
        df = self.reg_proj.export_records(format='df')
        response = self.reg_proj.import_records(df)
        self.assertIn('count', response)
        self.assertNotIn('error', response)
        long_df = self.long_proj.export_records(event_name='raw', format='df')
        response = self.long_proj.import_records(long_df)
        self.assertIn('count', response)
        self.assertNotIn('error', response)

    @responses.activate
    def test_date_formatting(self):
        """Test date_format parameter"""
        self.add_normalproject_response()

        def import_factory(date_string):
            return [{'study_id': '1',
                     'dob': date_string}]

        # Default YMD with dashes
        import_ymd = import_factory('2000-01-01')
        response = self.reg_proj.import_records(import_ymd)
        self.assertEqual(response['count'], 1)

        # DMY with /
        import_dmy = import_factory('31/01/2000')
        response = self.reg_proj.import_records(import_dmy, date_format='DMY')
        self.assertEqual(response['count'], 1)

        import_mdy = import_factory('12/31/2000')
        response = self.reg_proj.import_records(import_mdy, date_format='MDY')
        self.assertEqual(response['count'], 1)

    def test_get_version(self):
        """Testing retrieval of REDCap version associated with Project"""
        self.assertTrue(isinstance(semantic_version.Version('1.0.0'), type(self.long_proj.redcap_version)))

    @responses.activate
    def test_export_checkbox_labels(self):
        """Testing the export of checkbox labels as field values"""
        self.add_normalproject_response()
        self.assertEqual(
            self.reg_proj.export_records(
                raw_or_label='label',
                export_checkbox_labels=True)[0]['matcheck1___1'],
                'Foo'
        )

    @responses.activate
    def test_export_always_include_def_field(self):
        """ Ensure def_field always comes in the output even if not explicity
        given in a requested form """
        self.add_normalproject_response()
        # If we just ask for a form, must also get def_field in there
        records = self.reg_proj.export_records(forms=['imaging'])
        for record in records:
            self.assertIn(self.reg_proj.def_field, record)
        # , still need it def_field even if not asked for in form and fields
        records = self.reg_proj.export_records(forms=['imaging'], fields=['foo_score'])
        for record in records:
            self.assertIn(self.reg_proj.def_field, record)
        # If we just ask for some fields, still need def_field
        records = self.reg_proj.export_records(fields=['foo_score'])
        for record in records:
            self.assertIn(self.reg_proj.def_field, record)
예제 #54
0
def csv_from_redcap(KEY):
    API_URL = os.environ['API_URL']
    API_KEY= os.environ[KEY]
    rc_fields=['record_id','experiment_xnat','scan_xnat','scan_sd_xnat','process_name_xnat','quality_control_complete']
    p = Project(API_URL, API_KEY)
    return p.export_records(fields=rc_fields,format='csv')
예제 #55
0
 def test_bad_creds(self):
     "Test that exceptions are raised with bad URL or tokens"
     with self.assertRaises(RedcapError):
         Project(self.bad_url, self.reg_token)
     with self.assertRaises(RedcapError):
         Project(self.bad_url, '1')
예제 #56
0
pp = pprint.PrettyPrinter(indent=4, width = 200)
pp.pprint(out_rows)


import copy

from redcap import Project, RedcapError
URL = (config["redcapURL"])
batch_count = int(config["batchCount"])
df_path_map = (config["dfPathMap"])

for path in out_rows:
    record_set = out_rows[path]
    if path in df_path_map:
        api_key = df_path_map[path]
        project = Project(URL, api_key)
        print("Adding", len(record_set), "records for dFpath", path)
        batch_start = 0
        while ( batch_start < len(record_set)):
            response = project.import_records(record_set[batch_start : batch_start + batch_count])
            if ( batch_start + batch_count <= len(record_set)):
                print ("Added a batch of ", batch_count, " records")
            else:
                print ("Added a batch of ", len(record_set) - batch_start, " records")
            batch_start += batch_count
    else:
        print("Skipping", len(record_set), "records for dFpath", path)



예제 #57
0
# Filename: try_pycap_ver1.py

# Import PyCap and csv module

from redcap import Project, RedcapError
import csv

# Define the URL of our REDCap installation and the token of the project we are requesting a response.

URL = 'https://poa-redcap.med.yale.edu/api/'
TOKEN = '0BD89F3E896E72920DF3CFAC8DD739D7'
TOKEN2 = 'CAF3713EF2307C80BAD789DBAFBAA8C7'

# Call PyCap Project function

family = Project(URL, TOKEN)
srs = Project(URL, TOKEN2)


# Get the metatdata of the project

#metadata = family.export_metadata()
#metadata2 = srs.export_metadata()


#for field in metadata:
#	print "%s (%s) ---> %s" % (field['field_name'], field['field_type'], field['field_label'])



#for field in metadata2:
    # Static dict to may yes/no variable to the associate file variable
    REDCAP_VAR_MAP = dict()
    REDCAP_VAR_MAP['capratings_edat_exist'] = 'capratings_edat_file'

    # Attempt to retrieve the API key from env
    try:
        API_KEY = os.environ['API_KEY_ZALD_SCAN_LOG']
    except KeyError as KE:
        sys.stderr.write('ERROR: Failed to fetch environment variable '
                         'API_KEY_ZALD_SCAN_LOG for env.\nExiting with'
                         ' error %s\n' % KE.message)
        sys.exit(1)

    # Attempt to connect to redcap
    try:
        REDCAP_PROJECT = Project(url=API_URL, token=API_KEY)
    except requests.exceptions.RequestException as RE:
        sys.stderr.write('ERROR: Failed to connect to REDCap. Please'
                         'check that API_KEY_ZALD_SCAN_LOG is correct in your'
                         '~/.bashrc.\n')
        sys.exit(1)

    # Check to make sure csv file exists
    if not os.path.isfile(sys.argv[1]):
        raise OSError('File %s not found. Exiting' % sys.argv[1])

    # Get the Y_SU_IDS from the csv file
    Y_SU_IDS = list()
    Y_SU_ID_TO_INTERVIEW_DATE = dict()
    Y_SU_ID_TO_AGE = dict()
    Y_SU_ID_TO_GENDER = dict()
예제 #59
0
파일: data.py 프로젝트: VUIIS/vxhr
def csv_from_redcap():
    RCURL, RCAPI = os.environ['VXHR_RCURL'], os.environ['VXHR_RCTOKEN']
    p = Project(RCURL, RCAPI)
    return p.export_records(format='csv')
예제 #60
0
class ProjectTests(unittest.TestCase):
    """docstring for ProjectTests"""

    def setUp(self):
        self.url = 'https://redcap.vanderbilt.edu/api/'
        self.long_proj = Project(self.url, '1387872621BBF1C17CC47FD8AE25FF54')
        self.reg_proj = Project(self.url, '8E66DB6844D58E990075AFB51658A002')

    def tearDown(self):
        pass

    def test_good_init(self):
        """Ensure basic instantiation """
        self.assertIsInstance(self.long_proj, Project)
        self.assertIsInstance(self.reg_proj, Project)

    def test_normal_attrs(self):
        """Ensure projects are created with all normal attrs"""
        for attr in ('metadata', 'field_names', 'field_labels', 'forms',
            'events', 'arm_names', 'arm_nums', 'def_field'):
            self.assertTrue(hasattr(self.reg_proj, attr))

    def test_long_attrs(self):
        "proj.events/arm_names/arm_nums should not be empty in long projects"
        self.assertIsNotNone(self.long_proj.events)
        self.assertIsNotNone(self.long_proj.arm_names)
        self.assertIsNotNone(self.long_proj.arm_nums)

    def test_regular_attrs(self):
        """proj.events/arm_names/arm_nums should be empty tuples"""
        for attr in 'events', 'arm_names', 'arm_nums':
            attr_obj = getattr(self.reg_proj, attr)
            self.assertIsNotNone(attr_obj)
            self.assertEqual(len(attr_obj), 0)

    def test_obj_export(self):
        """ Make sure we get a list of dicts"""
        data = self.reg_proj.export_records()
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    def test_long_export(self):
        """After determining a unique event name, make sure we get a
        list of dicts"""
        unique_event = self.long_proj.events[0]['unique_event_name']
        data = self.long_proj.export_records(events=[unique_event])
        self.assertIsInstance(data, list)
        for record in data:
            self.assertIsInstance(record, dict)

    def is_good_csv(self, csv_string):
        "Helper to test csv strings"
        return isinstance(csv_string, basestring)

    def test_csv_export(self):
        """Test valid csv export """
        csv = self.reg_proj.export_records(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    def test_metadata_export(self):
        """Test valid metadata csv export"""
        csv = self.reg_proj.export_metadata(format='csv')
        self.assertTrue(self.is_good_csv(csv))

    @unittest.skipIf(skip_pd, "Couldnl't import pandas")
    def test_metadata_to_df(self):
        """Test metadata export --> DataFrame"""
        df = self.reg_proj.export_metadata(format='df')
        self.assertIsInstance(df, pd.DataFrame)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_export_to_df(self):
        """Test export --> DataFrame"""
        df = self.reg_proj.export_records(format='df')
        self.assertIsInstance(df, pd.DataFrame)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_export_df_kwargs(self):
        """Test passing kwargs to export DataFrame construction"""
        df = self.reg_proj.export_records(format='df',
            df_kwargs={'index_col': 'first_name'})
        self.assertEqual(df.index.name, 'first_name')
        self.assertTrue('study_id' in df)

    @unittest.skipIf(skip_pd, "Couldn't import pandas")
    def test_metadata_df_kwargs(self):
        """Test passing kwargs to metadata DataFrame construction"""
        df = self.reg_proj.export_metadata(format='df',
            df_kwargs={'index_col': 'field_label'})
        self.assertEqual(df.index.name, 'field_label')
        self.assertTrue('field_name' in df)