def test_run_dir(run_dir): print 'testing metrics', run_dir recent_dir = join(run_dir, 'brain', 'Recent') for root, dirs, files in os.walk(recent_dir): break for dir in dirs: time_dir = join(recent_dir, dir) base_metric_file = join(time_dir, BASE_METRIC_FILE) base_digest = datalib.parse_digest(base_metric_file) base_tables = base_digest['tables'] base_nrows = base_tables[base_tables.keys()[0]]['nrows'] files_to_test = [] for expression in METRIC_FILES_TO_TEST: files_to_test += glob.glob(join(time_dir, expression)) for metric_file in files_to_test: if metric_file != base_metric_file: if os.access(metric_file, os.F_OK): metric_digest = datalib.parse_digest(metric_file) metric_tables = metric_digest['tables'] metric_nrows = metric_tables[metric_tables.keys() [0]]['nrows'] if metric_nrows != base_nrows: print 'bad: %s (%d != %d)' % (metric_file, metric_nrows, base_nrows)
def test_run_dir(run_dir): print 'testing metrics', run_dir recent_dir = join(run_dir, 'brain', 'Recent') for root, dirs, files in os.walk(recent_dir): break for dir in dirs: time_dir = join(recent_dir, dir) base_metric_file = join(time_dir, BASE_METRIC_FILE) base_digest = datalib.parse_digest(base_metric_file) base_tables = base_digest['tables'] base_nrows = base_tables[base_tables.keys()[0]]['nrows'] files_to_test = [] for expression in METRIC_FILES_TO_TEST: files_to_test += glob.glob(join(time_dir, expression)) for metric_file in files_to_test: if metric_file != base_metric_file: if os.access(metric_file, os.F_OK): metric_digest = datalib.parse_digest(metric_file) metric_tables = metric_digest['tables'] metric_nrows = metric_tables[metric_tables.keys()[0]]['nrows'] if metric_nrows != base_nrows: print 'bad: %s (%d != %d)' % (metric_file, metric_nrows, base_nrows)
def get_random_classifications(path_run, recent_type = None): if recent_type == None: classifications = [] for recent_type in common_functions.RECENT_TYPES: classifications += get_random_classifications(path_run, recent_type) return classifications path = path_avr( path_run, None, recent_type ) if os.path.exists( path ): digest = datalib.parse_digest( path ) classifications = set() for name in digest['tables'].keys(): index = name.find(RANDOM_PIECE) if index > -1: classifications.add( name[index + len(RANDOM_PIECE):] ) return list( classifications ) else: return []
def get_random_classifications(path_run, recent_type = None): if recent_type == None: classifications = [] for recent_type in common_functions.RECENT_TYPES: classifications += get_random_classifications(path_run, recent_type) return classifications path = path_avr( path_run, recent_type ) if os.path.exists( path ): digest = datalib.parse_digest( path ) classifications = set() for name in digest['tables'].keys(): index = name.find(RANDOM_PIECE) if index > -1: classifications.add( name[index + len(RANDOM_PIECE):] ) return list( classifications ) else: return []
def getAgentCount(run): return datalib.parse_digest(os.path.join(run, "lifespans.txt"))["tables"]["LifeSpans"]["nrows"]