Пример #1
0
report = {}
tasks = ['PegTx','Cutting','Suturing','ClipApply']
%load_ext autoreload
%autoreload

for f in ffs:
    print f
    uid = f.split('.')[-3]
    task = int(f.split('.')[-2])
    if not report.get(uid, False): report[uid] = {'Summary':[0,0,0,0], tasks[0]:[], tasks[1]:[], tasks[2]:[], tasks[3]:[]}
    
    data, meta = myS3.getData(bucket, f, labeled=True)
    #Compute Summary Metrics
    jsonSimscore = vm.summary_metrics(meta, data, conn)
    jsonSimscore = vm.data_metrics_append(jsonSimscore, data, f)
    jsonSimscore = vm.machine_health_append(jsonSimscore, meta, data)
    failtypes = jsonSimscore['FailTypes']
    
    report[uid]['Summary'][task] += 1
    report[uid][tasks[task]].append([f, failtypes])
        

# <codecell>

import json
from openpyxl.workbook import Workbook
from openpyxl.writer.excel import ExcelWriter

from openpyxl.cell import get_column_letter
Пример #2
0
# <codecell>

import time

filestoredo = []

for filename in biglist:
    try:
        data, meta = myS3.getData(bucket, filename, labeled=True)
        minmax = validate.findMinMax(data)
        
        old_oors = validate.oldFindOutOfRange(minmax)
        old_deads = validate.oldFindDeadSensor(minmax, isClipTask(filename))
        js = vm.summary_metrics(meta, data, conn)
        js = vm.data_metrics_append(js, data, filename)
        new_oors = js['OutOfRange']
        new_deads = js['DeadSensors']
        ignore = js['IgnoreErrors']
        
        _oors = []
        _deads = []
        for oor in new_oors:
            if oor not in old_oors and 'OutOfRange' not in ignore.get(oor, []):
                _oors.append(oor)
                if filename not in filestoredo: filestoredo.append(filename)
        for dead in new_deads:
            if dead not in old_deads and 'DeadSensors' not in ignore.get(dead, []):
                _deads.append(dead)
                if filename not in filestoredo: filestoredo.append(filename)
                    
Пример #3
0
def main():  
    
    #Get a file off the SQS stack using 20sec long poll
    rs = q.read(wait_time_seconds=20)
    
    #if there's a file in the queue,
    if rs:
        '''Compute all metrics and send'''
        try:
            #Pull filename and from queued message
            filename = mySQS.get_sqs_filename(rs) #'edge6/2012/11/05.18.46.23.340.0.txt'
            bucketname = mySQS.get_sqs_bucket(rs)
            logit(log,'{0}\n{1}\nProcessing {2}\nfrom bucket {3}\n'.format('-'*20,datetime.now(),filename, bucketname) )
            
            #Ensure this isn't a Reference block trace
            if 'Trace' in filename: 
                logit(log,'Is a reference block trace.\n'); 
                d = q.delete_message(rs)
                logit(log, 'Deleted from queue\n'); print 'Deleted %s from queue' %filename
                return rs
                
            #If everything looks good, load the dataaa!
            data, meta = myS3.getData(whichBucket(bucketname), filename, labeled=True)
            if not data: raise ValueError, "Data file is empty!"
            
            '''Where the magic happens'''
            #Compute Summary Metrics
            jsonSimscore = vm.summary_metrics(meta, data, conn)
            jsonSimscore = vm.data_metrics_append(jsonSimscore, data, filename)
            jsonSimscore = vm.machine_health_append(jsonSimscore, meta, data)
            jsonSimscore['Bucket'] = bucketname
            #Score data
            jsonSimscore.update({'Score': 'None'}) #scoring.score_test(data, meta)} )
            
            #cleanup long floats, NaN values
            jsonSimscore = vm.round_dict(jsonSimscore,3)
            jsonSimscore = vm.nan_replace(jsonSimscore)
            logit(log,'Successfully processed.\n'); print 'Successfully processed.'
            
            '''Processing is completed --Add json to new SQS stack for POST'''
            receipt = mySQS.append_to_queue(jsonSimscore, shipq, raw=False)
            assert receipt, "Could not write to queue"    
            #If json is DEFINITELY received by new SQS, delete from original Files2Process queue
            d = q.delete_message(rs)
            
            assert d, "Could not delete from queue"
            logit(log, 'Deleted from queue\n'); print 'Deleted %s from queue' %filename
                        
            s = add_file_sdb(sdb_domain, jsonSimscore)
            assert s, "SimpleDB not updated"
            logit(log, "Updated SimpleDB\n")
            
        except Exception as err:

            #make more invisible, print/log exception, email me, then continue
            rs.change_visibility(5*60)
            if filename == None: filename = 'Unknown'
            logit(log,'ERROR: %s - %s\n'%(filename, str(err)) )
            #Connect to ses
            ses_conn = boto.connect_ses(aws_ak, aws_sk)
            send_fail('Error computing {0}. computeSimscore.py error: {1}.'.format(filename, err), ses_conn)
            
            #TODO handle incoming-simscore-org-test bucket requests in better way
            if err.message in ["File not found on S3","Data file is empty!"]:
                d = q.delete_message(rs)
                logit(log, 'Deleted from queue\n'); print 'Deleted %s from queue' %filename
    return rs