コード例 #1
0
def parse_data(data):
    ds = DataSet()
    for build_id in data:
        times = data[build_id]['steps'][-1]['times']
        start_time = times[0]
        end_time = times[1]    
        properties = data[build_id]['properties']  
        for p in properties:
            if p[-1] == 'SetProperty Step':
                att = p[0]            
                satt = att.split('-', 3)
                what = satt[0]
                config = satt[1]
                domain = satt[2]
                problem = satt[3]                
                
                value = p[1]
                
                d = {'start_time':start_time, 
                     'end_time':end_time,
                     'what':what, 
                     'config': config, 
                     'domain':domain, 
                     'problem':problem, 
                     'value':value}
                di = DataItem(d)
                ds.append(di)
    return ds            
コード例 #2
0
def parse_data(data):
    ds = DataSet()
    for build_id in data:
        times = data[build_id]['steps'][-1]['times']
        start_time = times[0]
        end_time = times[1]
        properties = data[build_id]['properties']
        for p in properties:
            if p[-1] == 'SetProperty Step':
                att = p[0]
                satt = att.split('-', 3)
                what = satt[0]
                config = satt[1]
                domain = satt[2]
                problem = satt[3]

                value = p[1]

                d = {
                    'start_time': start_time,
                    'end_time': end_time,
                    'what': what,
                    'config': config,
                    'domain': domain,
                    'problem': problem,
                    'value': value
                }
                di = DataItem(d)
                ds.append(di)
    return ds
コード例 #3
0
 def _get_data(self):
     """
     The data is reloaded for every attribute
     """
     dump_path = os.path.join(self.eval_dir, 'data_dump')
     
     dump_exists = os.path.exists(dump_path)
     # Reload when the user requested it or when no dump exists
     if self.reload or not dump_exists:
         data = DataSet()
         logging.info('Started collecting data')
         for base, dir, files in os.walk(self.eval_dir):
             for file in files:
                 if file == 'properties':
                     file = os.path.join(base, file)
                     props = tools.Properties(file)
                     data.append(**props)
         # Pickle data for faster future use
         cPickle.dump(data, open(dump_path, 'w'))
         logging.info('Wrote data dump')
         logging.info('Finished collecting data')
     else:
         data = cPickle.load(open(dump_path))
     return data
コード例 #4
0
 def group_dict(self):
     data = DataSet(self.data)
     
     if not self.order:
         self.order = ['id']
     data.sort(*self.order)
     #print 'SORTED'
     #data.dump()
     
     if self.filter_funcs or self.filter_pairs:
         data = data.filtered(*self.filter_funcs, **self.filter_pairs)
         #print 'FILTERED'
         #data.dump()
     
     group_dict = data.group_dict(*self.grouping)
     #print 'GROUPED'
     #print group_dict
             
     return group_dict
コード例 #5
0
ファイル: tools.py プロジェクト: Doddzy/DAS-Fast-Downward
 def get_dataset(self):
     data = DataSet()
     for run_id, run in sorted(self.items()):
         data.append(**run)
     return data