def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) death = np.array(file['Death']['upd_' + str(upd)]) live = np.array(file['Live']['upd_' + str(upd)]) image = np.array([ [ # dead (0.0, 0.0, 0.0) if not val_live else { 0: (1.0, 1.0, 1.0), # alive 1: (0.0, 1.0, 0.0), # apoptosis 2: (1.0, 0.0, 0.0), # bankrupt 3: (0.0, 0.0, 1.0), # replaced }[val_death] for val_death, val_live in zip(row_death, row_live) ] for row_death, row_live in zip(death, live) ]) plt.figure(figsize=(18, 18)) plt.imshow(image, extent=(0, image.shape[1], image.shape[0], 0)) plt.axis('off') plt.grid(b=None) lines = LineCollection([((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1))], linestyle='solid', colors='black') plt.gca().add_collection(lines) plt.savefig(kn.pack({ 'title': 'death_viz', 'update': str(upd), 'seed': kn.unpack(filename)['seed'], 'treat': kn.unpack(filename)['treat'], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), '_source_hash': kn.unpack(filename)['_source_hash'], 'ext': '.png' }), transparent=True, bbox_inches='tight', pad_inches=0) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) live = np.array(file['Live']['upd_' + str(upd)]) pop = np.array(file['Population']['upd_' + str(upd)]) triggers = np.array(file['Triggers']['upd_' + str(upd)]) image = np.array([[(0.0, 0.0, 0.0) if not val_live else (1.0, 1.0, 1.0) for val_live in row_live] for row_live in live]) plt.figure(figsize=(18, 18)) plt.imshow(image, extent=(0, image.shape[1], image.shape[0], 0)) plt.axis('off') plt.grid(b=None) lines = LineCollection( [((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if (pop[y][x] != pop[dest[1] - 1][dest[0] - 1] or triggers[y][x] != triggers[dest[1] - 1][dest[0] - 1])], linestyle='solid', colors='red') plt.gca().add_collection(lines) plt.savefig(kn.pack({ 'title': 'death_viz', 'update': str(upd), 'seed': kn.unpack(filename)['seed'], 'treat': kn.unpack(filename)['treat'], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), '_source_hash': kn.unpack(filename)['_source_hash'], 'ext': '.png' }), transparent=True, bbox_inches='tight', pad_inches=0) plt.clf() plt.close(plt.gcf())
# adapted from https://stackoverflow.com/a/48135340 try: df = pd.read_csv(filename) df["seed"] = int(kn.unpack(Path(filename).parts[-2])["seed"]) df["step"] = int(kn.unpack(Path(filename).parts[-2])["step"]) dfs.append(df) except pd.io.common.EmptyDataError: print(filename, " is empty and has been skipped.") df = pd.concat(dfs) df = df[df["step"] < 1050] outfile = kn.pack({ 'title' : 'mastergenerations', '_data_hathash_hash' : fsh.FilesHash().hash_files(filenames), '_script_fullcat_hash' : fsh.FilesHash( file_parcel="full_parcel", files_join="cat_join" ).hash_files([sys.argv[0]]), '_source_hash' :kn.unpack(filenames[0])['_source_hash'], 'ext' : '.csv' }) df.to_csv(outfile, index=False) print('Output saved to', outfile) x = df.groupby(["step", "seed", "Level"]).mean() y = x.reset_index().groupby(["seed", "Level"]).sum().reset_index()
for filename in tqdm(filenames): df = pd.read_csv(filename) df["seed"] = int(kn.unpack(Path(filename).parts[-2])["seed"]) df["step"] = int(kn.unpack(Path(filename).parts[-2])["step"]) dfs.append(df) df = pd.concat(dfs) df = df[df["step"] < 1050] outfile = kn.pack({ 'title': 'mastersystematics', '_data_hathash_hash': fsh.FilesHash().hash_files(filenames), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), '_source_hash': kn.unpack(filenames[0])['_source_hash'], 'ext': '.csv' }) df.to_csv(outfile, index=False) print('Output saved to', outfile) x = df.groupby(["step", "seed", "update"]).mean() y = x.reset_index().groupby(["step", "seed"]).max()
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) channel = np.array( file['Channel']['lev_'+str(nlev-1)]['upd_'+str(upd)] ).flatten() regulators = [ np.array( file['Regulators']['dir_'+str(dir)]['upd_'+str(upd)] ).flatten() for dir in range(4) ] live = np.array(file['Live']['upd_'+str(upd)]) index = np.array(file['Index']['own']) data_0 = np.array(file['Channel']['lev_0']['upd_'+str(upd)]) data_1 = ( np.array(file['Channel']['lev_0']['upd_'+str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_'+str(upd)]) ) # get unique group IDs ids = { id for id in channel.flatten() } # for each group, get all regulators cmapper = {} for id in ids: tags_to_regs = [] idxs = [] for flat_idx, idx in enumerate(index.flatten()): if channel[flat_idx] == id: idxs.append(idx) if live.flatten()[flat_idx]: archives = [ json.loads( regulator[flat_idx].decode("utf-8") )['value0'] for regulator in regulators ] tags = { d['key'] : d['value']['value0']['value0'] for d in archives[0]['tags'] } regulatorsum = defaultdict(lambda: 0.0) for archive in archives: for d in archive['regulators']: regulatorsum[d['key']] += d['value'] tags_to_regs.append({ tags[uid] : regulatorsum[uid] for uid in archives[0]['uids'] }) df = pd.DataFrame.from_records( tags_to_regs ).fillna(1) if pcamapper[id] is not None: pca, cols, minv, ptpv = pcamapper[id] pc = pca.transform(df[cols].to_numpy()) pc = (pc - minv) / ptpv for idx, row in zip(idxs, pc): cmapper[idx] = ( row[0] if row.size >= 1 and not np.isnan(row[0]) else 0.5, row[1] if row.size >= 2 and not np.isnan(row[1]) else 0.5, row[2] if row.size >= 3 and not np.isnan(row[2]) else 0.5, ) else: for idx in idxs: cmapper[idx] = (0.5, 0.5, 0.5) image = np.array([ [ cmapper[val_index] if val_live else (0.0,0.0,0.0) for val_index, val_live in zip(row_index, row_live) ] for row_index, row_live in zip(index, live)]) plt.figure(figsize=(18,18)) plt.imshow( image, extent = (0, image.shape[1], image.shape[0], 0) ) plt.axis('off') plt.grid(b=None) lines_0 = LineCollection([ ((x,y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_0[y][x] != data_0[dest[1]-1][dest[0]-1] ], linestyle='solid', colors='white') plt.gca().add_collection(lines_0) lines_1 = LineCollection([ ((x,y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_1[y][x] != data_1[dest[1]-1][dest[0]-1] ], linestyle='solid', colors='black') plt.gca().add_collection(lines_1) plt.savefig( kn.pack({ 'title' : 'consistent_regulator_viz', 'update' : str(upd), 'seed' : kn.unpack(filename)['seed'], 'treat' : kn.unpack(filename)['treat'], '_data_hathash_hash' : fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash' : fsh.FilesHash( file_parcel="full_parcel", files_join="cat_join" ).hash_files([sys.argv[0]]), '_source_hash' :kn.unpack(filename)['_source_hash'], 'ext' : '.png' }), transparent=True, bbox_inches='tight', pad_inches=0 ) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) data_0 = np.array(file['Channel']['lev_0']['upd_'+str(upd)]) data_1 = ( np.array(file['Channel']['lev_0']['upd_'+str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_'+str(upd)]) ) image = np.array([ [ tuple(colorsys.hsv_to_rgb( (val_1/2**63)%1.0, (val_0/2**63)%0.6 + 0.4, 1.0 )) if val_0 and val_1 else (0,0,0) for val_0, val_1 in zip(row_0, row_1) ] for row_0, row_1 in zip(data_0, data_1)]) plt.figure(figsize=(18,18)) plt.imshow( image, extent = (0, image.shape[1], image.shape[0], 0) ) plt.axis('off') plt.grid(b=None) lines_0 = LineCollection([ ((x,y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_0[y][x] != data_0[dest[1]-1][dest[0]-1] ], linestyle='solid', colors='white') plt.gca().add_collection(lines_0) lines_1 = LineCollection([ ((x,y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_1[y][x] != data_1[dest[1]-1][dest[0]-1] ], linestyle='solid', colors='black') plt.gca().add_collection(lines_1) plt.savefig( kn.pack({ 'title' : 'channel_viz', 'update' : str(upd), 'seed' : kn.unpack(filename)['seed'], 'treat' : kn.unpack(filename)['treat'], '_data_hathash_hash' : fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash' : fsh.FilesHash( file_parcel="full_parcel", files_join="cat_join" ).hash_files([sys.argv[0]]), '_source_hash' :kn.unpack(filename)['_source_hash'], 'ext' : '.png' }), transparent=True, bbox_inches='tight', pad_inches=0 ) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) own = np.array(file['Index']['own']).flatten() dirs = { 'top': np.array(file['Index']['dir_0']).flatten(), 'bottom': np.array(file['Index']['dir_1']).flatten(), 'left': np.array(file['Index']['dir_3']).flatten(), 'right': np.array(file['Index']['dir_2']).flatten(), } chans = [ np.array(file['Channel']['lev_' + str(lev)]['upd_' + str(upd)]).flatten() for lev in range(nlev) ] cage = np.array(file['CellAge']['upd_' + str(upd)]).flatten() pvch = np.array(file['PrevChan']['upd_' + str(upd)]).flatten() ppos = np.array(file['ParentPos']['upd_' + str(upd)]).flatten() live = np.array(file['Live']['upd_' + str(upd)]) data_0 = np.array(file['Channel']['lev_0']['upd_' + str(upd)]) data_1 = (np.array(file['Channel']['lev_0']['upd_' + str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_' + str(upd)])) res = defaultdict(dict) for idx in range(own.size): for dir, drct in dirs.items(): type = NONE if pvch[idx] == chans[-1][drct[idx]]: type = P_CHILD elif pvch[drct[idx]] == chans[-1][idx]: type = P_PARENT else: # grayscale channel ID type = (chans[-1][idx] / 2**64) * 0.8 res[own[idx]][dir] = type own = np.array(file['Index']['own']) live = np.array(file['Live']['upd_' + str(upd)]) image = np.flip(np.rot90(np.transpose( np.block([[ np.transpose( RenderTriangles(res[val_own]['top'], res[val_own]['bottom'], res[val_own]['right'], res[val_own]['left'], val_live)) for val_own, val_live in zip(row_own, row_live) ] for row_own, row_live in zip(own, live)])), k=1), axis=0) plt.figure(figsize=(18, 18)) plt.imshow(image, extent=(0, image.shape[1], image.shape[0], 0)) plt.axis('off') plt.grid(b=None) rescale = lambda coord: [v * 42 for v in coord] lines_0 = LineCollection( [[rescale(coord) for coord in ((x, y), dest)] for x in range(data_0.shape[0]) for y in range(data_0.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if data_0[y][x] != data_0[dest[1] - 1][dest[0] - 1]], linestyle=(0, (1, 3)), colors='0.5') plt.gca().add_collection(lines_0) lines_1 = LineCollection( [[rescale(coord) for coord in ((x, y), dest)] for x in range(data_1.shape[0]) for y in range(data_1.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if data_1[y][x] != data_1[dest[1] - 1][dest[0] - 1]], linestyle='solid', colors='black') plt.gca().add_collection(lines_1) plt.savefig(kn.pack({ 'title': 'directional_propagule_viz', 'update': str(upd), 'seed': kn.unpack(filename)['seed'], 'treat': kn.unpack(filename)['treat'], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), '_source_hash': kn.unpack(filename)['_source_hash'], 'ext': '.png' }), transparent=True, bbox_inches='tight', pad_inches=0) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) top = np.array(file['InboxTraffic']['dir_0']['upd_'+str(upd)]) bottom = np.array(file['InboxTraffic']['dir_1']['upd_'+str(upd)]) left = np.array(file['InboxTraffic']['dir_3']['upd_'+str(upd)]) right = np.array(file['InboxTraffic']['dir_2']['upd_'+str(upd)]) live = np.array(file['Live']['upd_'+str(upd)]) data_0 = np.array(file['Channel']['lev_0']['upd_'+str(upd)]) data_1 = ( np.array(file['Channel']['lev_0']['upd_'+str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_'+str(upd)]) ) image = np.flip(np.rot90(np.transpose(np.block([ [ np.transpose(RenderTriangles( val_top, val_bottom, val_right, val_left, val_live )) for val_top, val_bottom, val_left, val_right, val_live in zip( row_top, row_bottom, row_left, row_right, row_live ) ] for row_top, row_bottom, row_left, row_right, row_live in zip( top, bottom, left, right, live ) ])),k=1),axis=0) plt.figure(figsize=(18,18)) plt.imshow( image, extent = (0, image.shape[1], image.shape[0], 0) ) plt.axis('off') plt.grid(b=None) rescale = lambda coord: [v * 42 for v in coord] lines_0 = LineCollection([ [ rescale(coord) for coord in ((x,y), dest) ] for x in range(data_0.shape[0]) for y in range(data_0.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_0[y][x] != data_0[dest[1]-1][dest[0]-1] ], linestyle=(0, (1, 3)), colors='0.5') plt.gca().add_collection(lines_0) lines_1 = LineCollection([ [ rescale(coord) for coord in ((x,y), dest) ] for x in range(data_1.shape[0]) for y in range(data_1.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_1[y][x] != data_1[dest[1]-1][dest[0]-1] ], linestyle='solid', colors='black') plt.gca().add_collection(lines_1) plt.savefig( kn.pack({ 'title' : 'directional_messaging_viz', 'update' : str(upd), 'seed' : kn.unpack(filename)['seed'], 'treat' : kn.unpack(filename)['treat'], '_data_hathash_hash' : fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash' : fsh.FilesHash( file_parcel="full_parcel", files_join="cat_join" ).hash_files([sys.argv[0]]), '_source_hash' :kn.unpack(filename)['_source_hash'], 'ext' : '.png' }), transparent=True, bbox_inches='tight', pad_inches=0 ) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) # display current stockpile AND inbound resource stock = np.array(file['Stockpile']['upd_'+str(upd)]) share = np.array(file['TotalContribute']['upd_'+str(upd)]) live = np.array(file['Live']['upd_'+str(upd)]) data_0 = np.array(file['Channel']['lev_0']['upd_'+str(upd)]) data_1 = ( np.array(file['Channel']['lev_0']['upd_'+str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_'+str(upd)]) ) image = np.array([ [ # dead (0.0, 0.0, 0.0) if not val_live else # enough resource to reproduce (green to yellow) ( min(1.0, (val_stock + val_share) - 1.0), 1.0, 0.0 ) if val_stock + val_share > 1.0 else # not yet enough resource to reproduce (blue) ( 1.0 - (val_stock + val_share), 1.0 - (val_stock + val_share), 1.0 ) if val_stock + val_share > 0.0 else # netative resource (red) ( 1.0, max(0.0, 1.0 + (val_stock + val_share) / 1.25), max(0.0, 1.0 + (val_stock + val_share) / 1.25) ) for val_stock, val_share, val_live in zip(row_stock, row_share, row_live) ] for row_stock, row_share, row_live in zip(stock, share, live)]) plt.figure(figsize=(18,18)) plt.imshow( image, extent = (0, image.shape[1], image.shape[0], 0) ) plt.axis('off') plt.grid(b=None) lines_0 = LineCollection([ ((x,y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_0[y][x] != data_0[dest[1]-1][dest[0]-1] ], linestyle=(0, (1, 3)), colors='0.5') plt.gca().add_collection(lines_0) lines_1 = LineCollection([ ((x,y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_1[y][x] != data_1[dest[1]-1][dest[0]-1] ], linestyle='solid', colors='black') plt.gca().add_collection(lines_1) plt.savefig( kn.pack({ 'title' : 'stockpile_viz', 'update' : str(upd), 'seed' : kn.unpack(filename)['seed'], 'treat' : kn.unpack(filename)['treat'], '_data_hathash_hash' : fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash' : fsh.FilesHash( file_parcel="full_parcel", files_join="cat_join" ).hash_files([sys.argv[0]]), '_source_hash' :kn.unpack(filename)['_source_hash'], 'ext' : '.png' }), transparent=True, bbox_inches='tight', pad_inches=0 ) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) channel = np.array( file['Channel']['lev_'+str(nlev-1)]['upd_'+str(upd)] ).flatten() regulator = [ np.array( file['Regulators']['dir_'+str(dir)]['upd_'+str(upd)] ).flatten() for dir in range(4) ] decoder = np.array( file['Regulators']['decoder']['upd_'+str(upd)] ).flatten() live = np.array(file['Live']['upd_'+str(upd)]) index = np.array(file['Index']['own']) data_0 = np.array(file['Channel']['lev_0']['upd_'+str(upd)]) data_1 = ( np.array(file['Channel']['lev_0']['upd_'+str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_'+str(upd)]) ) # get unique group IDs ids = { id for id in channel.flatten() } # for each group, get all functions cmapper = [ {} for dir in range(4) ] for id in ids: tags_to_regs = [] idxs = [] dirs = [] for flat_idx, idx in enumerate(index.flatten()): if channel[flat_idx] == id: for dir in range(4): idxs.append( idx ) dirs.append( dir ) if live.flatten()[flat_idx]: archive = json.loads( decoder[regulator[dir][flat_idx]].decode("utf-8") )['value0'] tags = { d['key'] : d['value']['value0']['value0'] for d in archive['tags'] } regulators = { d['key'] : d['value'] for d in archive['regulators'] } tags_to_regs.append({ tags[uid] : regulators[uid]["state"] for uid in archive['uids'] }) df = pd.DataFrame.from_records(tags_to_regs).fillna(1) n=min(3, len(df.columns), len(df)) if n: pca = PCA(n_components=n) pc = None with warnings.catch_warnings(): # ignore sklearn and divide by zero warnings # (we handle them below) warnings.simplefilter("ignore") pc = pca.fit_transform(df.to_numpy()) pc = (pc - pc.min(0)) / pc.ptp(0) for idx, dir, row in zip(idxs, dirs, pc): cmapper[dir][idx] = ( row[0] if row.size >= 1 and not np.isnan(row[0]) else 0.5, row[1] if row.size >= 2 and not np.isnan(row[1]) else 0.5, row[2] if row.size >= 3 and not np.isnan(row[2]) else 0.5, ) else: for idx, dir in zip(idxs, dirs): cmapper[dir][idx] = (0.5, 0.5, 0.5) image = np.flip(np.rot90(np.transpose(np.block([ [ np.transpose(RenderTriangles( cmapper[0][val_index], cmapper[1][val_index], cmapper[2][val_index], cmapper[3][val_index], val_live )) for val_live, val_index in zip(row_live, row_index) ] for row_live, row_index in zip(live, index) ])),k=1),axis=0) plt.figure(figsize=(18,18)) plt.imshow( image, extent = (0, image.shape[1], image.shape[0], 0) ) plt.axis('off') plt.grid(b=None) rescale = lambda coord: [v * 42 for v in coord] lines_0 = LineCollection([ [ rescale(coord) for coord in ((x,y), dest) ] for x in range(index.shape[0]) for y in range(index.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_0[y][x] != data_0[dest[1]-1][dest[0]-1] ], linestyle='solid', colors='white', linewidths=(2,)) plt.gca().add_collection(lines_0) lines_1 = LineCollection([ [ rescale(coord) for coord in ((x,y), dest) ] for x in range(index.shape[0]) for y in range(index.shape[1]) for dest in ((x+1,y), (x,y+1)) if data_1[y][x] != data_1[dest[1]-1][dest[0]-1] ], linestyle='solid', colors='black', linewidths=(2,)) plt.gca().add_collection(lines_1) plt.savefig( kn.pack({ 'title' : 'directional_regulator_viz', 'update' : str(upd), 'seed' : kn.unpack(filename)['seed'], 'treat' : kn.unpack(filename)['treat'], '_data_hathash_hash' : fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash' : fsh.FilesHash( file_parcel="full_parcel", files_join="cat_join" ).hash_files([sys.argv[0]]), '_source_hash' :kn.unpack(filename)['_source_hash'], 'ext' : '.png' }), transparent=True, bbox_inches='tight', pad_inches=0 ) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd_key, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) live = np.array(file['Live'][upd_key]) traffic = np.array(file['InboxTraffic']['dir_4'][upd_key]) image = np.array([ [ # dead (0.0, 0.0, 0.0) if not val_live else (lambda n: (1.0, 1.0, 0.0) if n > 4 else { 0: (1.0, 1.0, 1.0), # white 1: (0.0, 1.0, 0.0), # green 2: (0.0, 0.0, 1.0), # blue 3: (1.0, 0.0, 1.0), # purple 4: (1.0, 0.0, 0.0), # red }[n])(val_traffic) for val_traffic, val_live in zip(row_traffic, row_live) ] for row_traffic, row_live in zip(traffic, live) ]) plt.figure(figsize=(18, 18)) plt.imshow(image, extent=(0, image.shape[1], image.shape[0], 0)) plt.axis('off') plt.grid(b=None) lines = LineCollection([((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1))], linestyle='solid', colors='black') plt.gca().add_collection(lines) plt.savefig(kn.pack({ 'title': 'messaging_spiker_viz', 'update': ''.join(c for c in upd_key if c.isdigit()), 'seed': kn.unpack(filename)['seed'], 'treat': kn.unpack(filename)['treat'], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), '_source_hash': kn.unpack(filename)['_source_hash'], 'ext': '.png' }), transparent=True, bbox_inches='tight', pad_inches=0) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) channel = np.array( file['Channel']['lev_' + str(nlev - 1)]['upd_' + str(upd)]).flatten() function = [ np.array(file['Functions']['dir_' + str(dir)]['upd_' + str(upd)]).flatten() for dir in range(4) ] live = np.array(file['Live']['upd_' + str(upd)]) index = np.array(file['Index']['own']) data_0 = np.array(file['Channel']['lev_0']['upd_' + str(upd)]) data_1 = (np.array(file['Channel']['lev_0']['upd_' + str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_' + str(upd)])) # get unique group IDs ids = {id for id in channel.flatten()} # for each group, get all functions cmapper = {} for id in ids: fps_to_counts = [] idxs = [] for flat_idx, idx in enumerate(index.flatten()): if channel[flat_idx] == id: idxs.append(idx) if live.flatten()[flat_idx]: fpcounts = defaultdict(lambda: 0) for dir in range(4): fps = map( lambda x: x['value0']['value0'], json.loads(function[dir][flat_idx].decode("utf-8")) ['value0']) for fp in fps: fpcounts[fp] += 1 fps_to_counts.append(fpcounts) df = pd.DataFrame.from_records(fps_to_counts).fillna(0) pca = PCA(n_components=min(3, len(df.columns))) n = min(3, len(df.columns), len(df)) if n: pca = PCA(n_components=n) pc = None with warnings.catch_warnings(): # ignore sklearn and divide by zero warnings # (we handle them below) warnings.simplefilter("ignore") pc = pca.fit_transform(df.to_numpy()) pc = (pc - pc.min(0)) / pc.ptp(0) for idx, row in zip(idxs, pc): cmapper[idx] = ( row[0] if row.size >= 1 and not np.isnan(row[0]) else 0.5, row[1] if row.size >= 2 and not np.isnan(row[1]) else 0.5, row[2] if row.size >= 3 and not np.isnan(row[2]) else 0.5, ) else: for idx in idxs: cmapper[idx] = (0.5, 0.5, 0.5) image = np.array([[ cmapper[val_index] if val_live else (0.0, 0.0, 0.0) for val_index, val_live in zip(row_index, row_live) ] for row_index, row_live in zip(index, live)]) plt.figure(figsize=(18, 18)) plt.imshow(image, extent=(0, image.shape[1], image.shape[0], 0)) plt.axis('off') plt.grid(b=None) lines_0 = LineCollection( [((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if data_0[y][x] != data_0[dest[1] - 1][dest[0] - 1]], linestyle='solid', colors='white') plt.gca().add_collection(lines_0) lines_1 = LineCollection( [((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if data_1[y][x] != data_1[dest[1] - 1][dest[0] - 1]], linestyle='solid', colors='black') plt.gca().add_collection(lines_1) plt.savefig(kn.pack({ 'title': 'function_viz', 'update': str(upd), 'seed': kn.unpack(filename)['seed'], 'treat': kn.unpack(filename)['treat'], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), '_source_hash': kn.unpack(filename)['_source_hash'], 'ext': '.png' }), transparent=True, bbox_inches='tight', pad_inches=0) plt.clf() plt.close(plt.gcf())
def ExtractSeed(filename): file = h5py.File(filename, 'r') lives = list(np.array(file['Live']['upd_' + str(update)]).flatten()) prog_decoder = list( np.array(file['Population']['decoder']['upd_' + str(update)]).flatten()) progs = list(np.array(file['Population']['upd_' + str(update)]).flatten()) trigger_decoder = list( np.array(file['Triggers']['decoder']['upd_' + str(update)]).flatten()) triggers = list(np.array(file['Triggers']['upd_' + str(update)]).flatten()) reg_decoder = list( np.array(file['Regulators']['decoder']['upd_' + str(update)]).flatten()) regs = list( zip(*[ list( np.array(file['Regulators']['dir_' + str(dir)]['upd_' + str(update)]).flatten()) for dir in range(4) ])) dominant_prog = max(set(prog for prog, live in zip(progs, lives) if live), key=progs.count) triggers = [t for p, t in zip(progs, triggers) if p == dominant_prog] dominant_trigger = max(set(triggers), key=triggers.count) regs = [t for p, t in zip(progs, regs) if p == dominant_prog] dominant_regs = max(set(regs), key=regs.count) with open( kn.pack({ "component": "program", "treat": kn.unpack(filename)["treat"], "seed": kn.unpack(filename)["seed"], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), "ext": ".txt", }), 'w') as file: file.write(prog_decoder[dominant_prog].decode("utf-8")) with open( kn.pack({ "component": "triggers", "treat": kn.unpack(filename)["treat"], "seed": kn.unpack(filename)["seed"], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), "ext": ".json", }), 'w') as file: file.write(trigger_decoder[dominant_trigger].decode("utf-8")) for dir, reg in enumerate(dominant_regs): with open( kn.pack({ "component": "regulator", "dir": dir, "treat": kn.unpack(filename)["treat"], "seed": kn.unpack(filename)["seed"], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files( [sys.argv[0]]), "ext": ".json", }), 'w') as file: file.write(reg_decoder[reg].decode("utf-8")) return None
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) cellage = np.array(file['CellAge']['upd_' + str(upd)]) live = np.array(file['Live']['upd_' + str(upd)]) data_0 = np.array(file['Channel']['lev_0']['upd_' + str(upd)]) data_1 = (np.array(file['Channel']['lev_0']['upd_' + str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_' + str(upd)])) image = np.array([ [ # dead (0.0, 0.0, 0.0) if not val_live else ( 0.0, 1.0, 0.0, ) if val_cellage < 0 else ( 1.0, 0.0, 0.0, ) if val_cellage < 8 else ( 0.0, 1.0, 0.0, ) if val_cellage < 16 else ( 0.0, 0.0, 1.0, ) if val_cellage < 24 else ( 1.0, 1.0, 1.0, ) for val_cellage, val_live in zip(row_cellage, row_live) ] for row_cellage, row_live in zip(cellage, live) ]) plt.figure(figsize=(18, 18)) plt.imshow(image, extent=(0, image.shape[1], image.shape[0], 0)) plt.axis('off') plt.grid(b=None) lines_0 = LineCollection( [((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if data_0[y][x] != data_0[dest[1] - 1][dest[0] - 1]], linestyle=(0, (1, 3)), colors='0.5') plt.gca().add_collection(lines_0) lines_1 = LineCollection( [((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if data_1[y][x] != data_1[dest[1] - 1][dest[0] - 1]], linestyle='solid', colors='black') plt.gca().add_collection(lines_1) plt.savefig(kn.pack({ 'title': 'cellage_viz', 'update': str(upd), 'seed': kn.unpack(filename)['seed'], 'treat': kn.unpack(filename)['treat'], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), '_source_hash': kn.unpack(filename)['_source_hash'], 'ext': '.png' }), transparent=True, bbox_inches='tight', pad_inches=0) plt.clf() plt.close(plt.gcf())
def RenderAndSave(upd, filename): file = h5py.File(filename, 'r') nlev = int(file.attrs.get('NLEV')) channel = np.array( file['Channel']['lev_' + str(nlev - 1)]['upd_' + str(upd)]).flatten() regulators = [ np.array(file['Regulators']['dir_' + str(dir)]['upd_' + str(upd)]).flatten() for dir in range(4) ] live = np.array(file['Live']['upd_' + str(upd)]) index = np.array(file['Index']['own']) data_0 = np.array(file['Channel']['lev_0']['upd_' + str(upd)]) data_1 = (np.array(file['Channel']['lev_0']['upd_' + str(upd)]) if nlev == 1 else np.array(file['Channel']['lev_1']['upd_' + str(upd)])) # get unique group IDs ids = {id for id in channel.flatten()} # for each group, get all regulators cmapper = {} for id in ids: tags_to_regs = [] idxs = [] for idx in range(index.flatten().size): if channel[idx] == id and live.flatten()[idx]: idxs.append(idx) archives = [ json.loads(regulator[idx].decode("utf-8"))['value0'] for regulator in regulators ] tags = { d['key']: d['value']['value0']['value0'] for d in archives[0]['tags'] } regulatorsum = defaultdict(lambda: 0.0) for archive in archives: for d in archive['regulators']: regulatorsum[d['key']] += d['value'] tags_to_regs.append({ tags[uid]: regulatorsum[uid] for uid in archives[0]['uids'] }) # if less than half the cells have the regulator, drop it # otherwise (e.g., probably a few cells lost it), assume it's default df = pd.DataFrame.from_records(tags_to_regs) df = df.dropna(thresh=len(df) / 2).fillna(1) n = min(3, len(df.columns), len(df)) if n: pca = PCA(n_components=n) pc = None with warnings.catch_warnings(): # ignore sklearn and divide by zero warnings # (we handle them below) warnings.simplefilter("ignore") pc = pca.fit_transform(df.to_numpy()) pc = (pc - pc.min(0)) / pc.ptp(0) for idx, row in zip(idxs, pc): cmapper[idx] = ( row[0] if row.size >= 1 and not np.isnan(row[0]) else 0.5, row[1] if row.size >= 2 and not np.isnan(row[1]) else 0.5, row[2] if row.size >= 3 and not np.isnan(row[2]) else 0.5, ) else: for idx in idxs: cmapper[idx] = (0.5, 0.5, 0.5) image = np.array([[ cmapper[val_index] if val_live else (0.0, 0.0, 0.0) for val_index, val_live in zip(row_index, row_live) ] for row_index, row_live in zip(index, live)]) plt.figure(figsize=(18, 18)) plt.imshow(image, extent=(0, image.shape[1], image.shape[0], 0)) plt.axis('off') plt.grid(b=None) lines_0 = LineCollection( [((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if data_0[y][x] != data_0[dest[1] - 1][dest[0] - 1]], linestyle='solid', colors='white') plt.gca().add_collection(lines_0) lines_1 = LineCollection( [((x, y), dest) for x in range(image.shape[0]) for y in range(image.shape[1]) for dest in ((x + 1, y), (x, y + 1)) if data_1[y][x] != data_1[dest[1] - 1][dest[0] - 1]], linestyle='solid', colors='black') plt.gca().add_collection(lines_1) plt.savefig(kn.pack({ 'title': 'regulator_viz', 'update': str(upd), 'seed': kn.unpack(filename)['seed'], 'treat': kn.unpack(filename)['treat'], '_data_hathash_hash': fsh.FilesHash().hash_files([filename]), '_script_fullcat_hash': fsh.FilesHash(file_parcel="full_parcel", files_join="cat_join").hash_files([sys.argv[0]]), '_source_hash': kn.unpack(filename)['_source_hash'], 'ext': '.png' }), transparent=True, bbox_inches='tight', pad_inches=0) plt.clf() plt.close(plt.gcf())