def collect_bn_models( paths: Iterable or Path, bn_deserializer=find_bn_type, ffilter=lambda x: x.is_file() and re.search(MODEL_NAME_PATTERN, x.name)): files = dict() bns = defaultdict(list) for path in futils.cpaths(paths, recursive=3): # print(path.is_file(), re.search(MODEL_NAME_PATTERN, path.name)) if path.is_dir(): f, bn, *_ = collect_bn_models(path.iterdir(), bn_deserializer=bn_deserializer, ffilter=ffilter) bns.update(**bn) files.update(**f) elif ffilter(path): # print(path) name = path.with_suffix('').name bns[name] = bn_deserializer(read_json(path)) files[name] = path return files, bns
default=3, type=int, choices=[1, 2, 3], help='Level of merge between dataset of the same model.') parser.add_argument( '-r', '--recursively', default=True, help='DO NOT Recursively explore directories for valid datasets.', action='store_false') args = parse_args(parser=parser, config_converter=Config.from_file) data = OrderedDict( **pu.collect_data(cpaths(args.config.test_data_path), fpattern=r'rtest_data_(?:bn_subopt_)?' + f'{fu.FNAME_PATTERN}.json', recursively=args.recursively, ds_merge_level=args.merge_level, data_getter=get_data)) bninfos = OrderedDict() for path in cpaths(args.config.bn_model_path, recursive=3): if path.is_file() and 'json' in path.suffix and 'bn' in path.name: jsonrepr = read_json(path) try:
cutoff = trajectory.index(binstate(actual_state)) # pprint(trajectory[cutoff:]) ts.update({tuple(trajectory[cutoff:]): None}) return [*ts.keys()] ############################################################################### if __name__ == "__main__": load_global_config() ps = cpaths(GLOBALS.bn_model_path, recursive=3) + cpaths( list(map(Path, GLOBALS.slct_behaviours_map.values()))) for path in ps: bn = OpenBooleanNetwork.from_json(read_json(path)) luts = lookup_attractors(bn) print('BN attractors:') pprint(bn.atm.mapped_attractors(binstate)) print(len(bn.atm.attractors), end='\n\n') print('Simulated Attractors:') pprint(luts) print(len(luts), end='\n\n') print( sum(
########################################################################### ########################################################################### if __name__ == "__main__": load_global_config() NP = cpu_count() pool = Pool(processes=NP) mapper = lambda f, p: pool.imap_unordered(f, p, chunksize=2 * NP) for path in cpaths(GLOBALS.bn_model_path, recursive=3): print(path) bnjson = read_json(path) bn = SelectiveBooleanNetwork.from_json(bnjson) # print(2**len(bn)) # print(bnjson['gen_params']) i = max(map(len, bn.atm.attractors)) * len(bn) * 20 t = time.perf_counter() if isinstance(GLOBALS.slct_target_transition_tau, list): tTau_map = {
right=0.99, bottom=0.1, top=0.99, wspace=0.0, hspace=0.0) plotter.legend(frameon=False, loc='upper right') plotter.show() ################################################################ if __name__ == "__main__": load_global_config() for path in cpaths(GLOBALS.bn_model_path): bn = find_bn_type(read_json(path)) i = [] o = [] if isinstance(bn, OpenBooleanNetwork): i = bn.input_nodes o = bn.output_nodes plot_booleannetwork( get_simple_fname( path.with_suffix('').name, FNAME_PATTERN, ['%s', '%s', '%s']), bn, i, o)
with open(path, 'r') as fp: for line in filter(lambda x: x.startswith('it'), fp.readlines()): m = re.search(pattern, str(line)) if m is not None: data.append(float(m.group(1))) return data if __name__ == "__main__": load_global_config() files = cpaths(GLOBALS.app_output_path, recursive=3) is_train_output = lambda x: (str(x.name).startswith('generate') or str( x.name).startswith('enhance')) and is_file(x) plots = list() ds = list() for f in filter(is_train_output, files): fname = get_simple_fname(f.name, FNAME_PATTERN, uniqueness=1) data = parse_output(f) ds.append(data)