Example #1
0
def scaleScan(config):
    log = logging.getLogger(__name__)
    log.info('Data checking started.')
    # Download Standard Scale Sheet
    sss_url = config["SERVER"] + 'study'
    log.info('Successfully get server address.')
    sss = safeRequest(sss_url, config)
    log.info('Successfully read study schedule.')
    d = Checker(sss)
    # Download updated TaskLog
    log_url = config["SERVER"] + 'export/TaskLog'
    logs = safeRequest(log_url, config)
    log.info("Successfully get TaskLog information.")
    # Create checking table
    result = pd.DataFrame(index=d.completed_list(),
                          columns=[
                              'data_found', 'entries_in_log',
                              'entries_in_dataset', 'missing_rate'
                          ])
    log.info("Report format ready.")
    #newest = max(glob.iglob(config["PATH"]+'active_data/TaskLog'+'*.csv'), key=os.path.getctime)
    taskLog = json_normalize(logs.json())
    print taskLog
    log.info("Ready to check.")
    for scaleName in d.completed_list():
        log.info("Ready to search file %s", scaleName)
        filename = config['PATH'] + 'active_data/' + scaleName + '*.csv'
        exist = False
        try:
            scale_data = pd.read_csv(
                max(glob.iglob(filename), key=os.path.getctime))
            exist = True
            log.info("%s data found.", str(scaleName))
        except:
            print "Data not found."
            # Check if data exists for scale
        log.info("Data retrived successfully.")
        if (exist):
            ## add JsPsychTrial  condition count the last trial in this sesstion
            result.set_value(scaleName, 'data_found', True)
            a = len(taskLog[(taskLog['taskName'] == scaleName)])
            result.set_value(scaleName, 'entries_in_log', a)
            log.info("Report generated.")
            if scaleName == 'JsPsychTrial':
                b = len(
                    scale_data[scale_data.stimulus == 'final score screen'])
                result.set_value(scaleName, 'entries_in_dataset', b)
            else:
                b = len(scale_data)
                result.set_value(scaleName, 'entries_in_dataset', b)
            result.set_value(scaleName, 'missing_rate',
                             "{:.9f}".format(1 - float(b) / float(a)))
            log.info("Counting completed.")
        else:
            result.set_value(scaleName, 'data_found', False)
            log.info("Data not found for %s", str(scaleName))
    #print tabulate(result, headers='keys',tablefmt='psql')
    print result

    return result
Example #2
0
def export(scaleName, config):
    log = logging.getLogger('martin')
    log.info(
        """Hi PACT Lab, this is faithful android Martin from Laura\'s server. Everything is alright here, and seems to be
     a good time for a hunt. I am going out for a regular check and will come back soon. Don't miss me PACT Lab, it wouldn't
     take too long.""")
    pathCheck(config)  #Check storage path
    log.info(" (Martin is out for hunting data......) ")

    oneShot = safeRequest(config['SERVER'] + "export/", config)

    if oneShot != None:
        log.info(
            """Alright I am back! Pretty fruitful. Seem like it is going to be comfortable for a little while. Alright,
     I am heading to the server for a little rest, will talk to you guys in PACT Lab in a little while. -- Martin"""
        )
        if (oneShot.json() != None):
            safeExport(oneShot.json(), scaleName, config)
    else:
        log.warning(
            """This is weired... It seems that there is nothing out there or I am blocked from MindTrails. You might already get an email from me
     reporting some network issues. Be alerted, stay tuned. Server: %s.""",
            config['SERVER'])
    log.info(
        "I am tired and I am going back to Laura's server for a rest. See you later!"
    )
Example #3
0
def safeCollect(scale, config):
    log = logging.getLogger(__name__)
    s = 0
    response = safeRequest(config["SERVER"] + 'export/' + scale['name'],
                           config)
    if response != None:
        quest = response.json()
        if quest != None:
            if scale['size'] != 0:
                ks = list(quest[0].keys())
                ks.sort()
                log.info("Questionnaire %s updated - %s new entries received.",
                         str(scale['name']), str(scale['size']))
                safeSave(
                    response, ks, str(scale['name']), scale['deleteable'],
                    config
                )  # Safely write the whoe questionnaire into the data file
                s += 1
            else:
                log.info("No new entries found in %s", str(scale['name']))
        else:
            log.warning(
                """This is weired... It seems that there is nothing out there or I am blocked from MindTrails. You might already get an email from me
            reporting some network issues. Be alerted, stay tuned. Server: %s""",
                config['SERVER'])
    return s
Example #4
0
def export(scaleName,config):
    log = logging.getLogger('martin')
    log.info("""Hi PACT Lab, this is faithful android Martin from Laura\'s server. Everything is alright here, and seems to be
     a good time for a hunt. I am going out for a regular check and will come back soon. Don't miss me PACT Lab, it wouldn't
     take too long.""")
    pathCheck(config) #Check storage path
    log.info(" (Martin is out for hunting data......) ")
    oneShot = safeRequest(config["SERVER"], config)
    if oneShot != None:
        log.info("""Alright I am back! Pretty fruitful. Seem like it is going to be comfortable for a little while. Alright,
     I am heading to the server for a little rest, will talk to you guys in PACT Lab in a little while. -- Martin""")
        if (oneShot.json() != None): safeExport(oneShot.json(),scaleName,config)
    else:
        log.warning("""This is weired... It seems that there is nothing out there or I am blocked from MindTrails. You might already get an email from me
     reporting some network issues. Be alerted, stay tuned. Server: %s.""",config['SERVER'])
    log.info("I am tired and I am going back to Laura's server for a rest. See you later!")
Example #5
0
def safeCollect(scale,config):
    log = logging.getLogger(__name__)
    s = 0
    response = safeRequest(config["SERVER"]+'/'+scale['name'], config)
    if response!= None:
        quest = response.json()
        if quest != None:
            if scale['size'] != 0:
                ks = list(quest[0].keys())
                ks.sort()
                log.info("Questionnaire %s updated - %s new entries received.", str(scale['name']), str(scale['size']))
                safeSave(response, ks, str(scale['name']), scale['deleteable'], config) # Safely write the whoe questionnaire into the data file
                s += 1
            else: log.info("No new entries found in %s", str(scale['name']))
        else:
            log.warning("""This is weired... It seems that there is nothing out there or I am blocked from MindTrails. You might already get an email from me
            reporting some network issues. Be alerted, stay tuned. Server: %s""", config['SERVER'])
    return s
Example #6
0
def clientScan(config):
    log = logging.getLogger(__name__)
    log.info('Data checking started.')
    # Download Standard Scale Sheet
    sss_url = config["SERVER"] + 'study'
    log.info('Successfully get server address.')
    sss = safeRequest(sss_url, config)
    log.info('Successfully read study schedule.')
    d = Checker(sss)
    log_url = config["SERVER"] + 'export/TaskLog'
    task_data = safeRequest(log_url, config)
    pat_url = config["SERVER"] + 'export/Participant'
    pat_data = safeRequest(pat_url, config)
    log.info("Ready for checking.")

    #counting tasks finished in TaskLog
    table = pd.DataFrame(task_data.json())
    date = pd.to_datetime(table.dateCompleted)

    table['datetime_CR'] = date
    task_count = table.groupby(['participantId'],
                               sort=True)['datetime_CR'].count()
    task_count = pd.DataFrame(task_count).reset_index()
    task_count.rename(columns={'participantId': 'participant_id'},
                      inplace=True)
    task_count.rename(columns={'datetime_CR': 'task_no'}, inplace=True)

    log.info("Task count table created.")
    # get status of current task
    pi = []
    cs = []
    ct = []
    adm = []
    tg = []
    for k in pat_data.json():
        pi.append(k['id'])
        adm.append(k['admin'])
        cs.append(k['study']['currentSession']['name'])
        ct.append(k['study']['currentSession']['currentTask']['name'])
        tg.append(k['study']['currentSession']['currentTask']['tag'])
    log.info("Content filled.")
    current_status = pd.DataFrame({
        'participant_id': pi,
        'current_session': cs,
        'current_task': ct,
        'admin': adm,
        'tag': tg
    })
    log.info("Current Task Status Table created.")
    current_status = current_status.merge(task_count,
                                          on='participant_id',
                                          how='outer')
    result = current_status

    result.fillna(0)

    log.info("Tables Are Merged.")
    # Get checking information
    result['target_task_no'] = result.apply(
        lambda entry: d.correct_number(entry), axis=1)
    log.info("Checking completed.")
    #result['logged_task_no'] = result.apply(lambda entry:len(table[table.participantdao_id == entry['participant_id']]), axis = 1)
    result['Missing_no'] = result.apply(
        lambda entry: entry['target_task_no'] - entry['task_no'], axis=1)

    log.info('Number of participants finished as least a task: %s. \n',
             str(len(result)))
    #print tabulate(result, headers='keys',tablefmt='psql')
    #result.to_csv('/Users/any/Desktop/testing data/reports/client_report_3.18.csv')
    if not os.path.exists(config["PATH"] + 'report/'):
        os.makedirs(config["PATH"] + 'report/')
    result.to_csv(config["PATH"] + 'report/' + str(config['NAME']) +
                  '_client_report' + '_' +
                  time.strftime(config["DATE_FORMAT"] + '_' +
                                time.strftime(config["TIME_FORMAT"]) + '.csv'))
    #df = pd.read_csv('/Users/any/Desktop/testing data/reports/client_report_3.18.csv');
    #df.fillna(0);
    #print df
    print tabulate(result,
                   headers=[
                       'admin', 'current_session', 'current_task',
                       "participant_id", 'tag', 'task_no', 'target_task_no',
                       'Missing_no'
                   ],
                   tablefmt='psql')
    print 'data saved'
    return result
Example #7
0
            log.info("Successfully created active_data folder.")
    except:
        log.critical("Server: %s. Failed to create data or log files, fatal, emailed admin.", config['SERVER'], exc_info=1)

# ------------------------------------------#
# This is the main module
def export(scaleName,config):
    log = logging.getLogger('martin')
    log.info("""Hi PACT Lab, this is faithful android Martin from Laura\'s server. Everything is alright here, and seems to be
     a good time for a hunt. I am going out for a regular check and will come back soon. Don't miss me PACT Lab, it wouldn't
     take too long.""")
    pathCheck(config) #Check storage path
    log.info(" (Martin is out for hunting data......) ")
<<<<<<< HEAD
    print config["SERVER"] + 'export/'
    oneShot = safeRequest(config["SERVER"]+'export/', config)
=======
    oneShot = safeRequest(config['SERVER']+"export/", config)
>>>>>>> c0eb6a5360891ab924480aef91cd5292423017bd
    if oneShot != None:
        log.info("""Alright I am back! Pretty fruitful. Seem like it is going to be comfortable for a little while. Alright,
     I am heading to the server for a little rest, will talk to you guys in PACT Lab in a little while. -- Martin""")
        if (oneShot.json() != None): safeExport(oneShot.json(),scaleName,config)
    else:
        log.warning("""This is weired... It seems that there is nothing out there or I am blocked from MindTrails. You might already get an email from me
     reporting some network issues. Be alerted, stay tuned. Server: %s.""",config['SERVER'])
    log.info("I am tired and I am going back to Laura's server for a rest. See you later!")

# This is a over all program
def martin(task_list,serverName,scaleName):
    log = logging.getLogger('martin')