def load_and_unpack(self): """ Load a Kappa snapshot file. """ if not os.path.isfile(self.kappa_file): raise Exception("Cannot find snapshot file %s" % self.kappa_file) else: with open(self.kappa_file, "r") as data: self.data = data self.event = float(data.readline().split('Event:')[1][:-2].strip()) data.readline() t = data.readline().split('T0')[1][:-2] self.time = float(re.sub(r'"', ' ', t).strip()) data.readline() self.current_line = data.readline()[:-1] # this should be the first line of the first complex while True: entry = self.next_complex_from_file() if not entry: break if self.use_kappy: komplex = None # until we can repair this sys.exit('Avoid kappy for now') # komplex = kappy.KappaComplex.from_string(entry) else: # DIY... # parse the entry match = re.findall(r'%init: (.*?) \/\*(.*?) agents\*\/ (.*?)$', entry)[0] # build the internal representation komplex = kt.KappaComplex(match[2].strip(), count=int(match[0])) self.complexes.append(komplex) self.number_of_distinct_complexes = len(self.complexes)
def unpack(self): # # Reminder: # data['snapshot_agents'] [i] [1] [0] [j] # | # ith complex # | # jth agent # self.snap_name = self.data['snapshot_file'] self.time = float(self.data['snapshot_time']) self.event = int(self.data['snapshot_event']) self.number_of_distinct_complexes = len(self.data['snapshot_agents']) # this function encodes part of the JSON spec that's being parsed # there should instead be just one spot where JSON (or KA) is parsed # and converted into an internal representation. ideally this should be # done by kappy and this package should be agnostic to the # file format spec -YK for c in self.data['snapshot_agents']: abundance, comp_info = c[0], c[1] # deal with old JSON format which didn't have # here a list of lists (ugh..) -YK if type(comp_info[0]) != list: comp_info = [comp_info] if self.use_kappy: komplex = None # until we can repair this sys.exit('Avoid kappy for now') # komplex = kappy.KappaComplex.from_string(comp_info[0]) else: # DYI... komplex = kt.KappaComplex(comp_info[0], count=c[0]) self.complexes.append(komplex)
if not uniform: self.nxGraph.nodes[node]['shape'] = shapelette[shapes[self.nxGraph.nodes[node]['type']]] else: self.nxGraph.nodes[node]['shape'] = shape nx.nx_agraph.write_dot(self.nxGraph, filename) if __name__ == '__main__': import kappathings as kt import kappaviz as viz import kappasnap as ks # usage scenarios # kapparing2 = 'A(r[7] l[1]),A(r[1] l[2]),A(r[2] l[3]),A(r[3] l[4]),A(r[4] l[5]),A(r[5] l[6]),A(r[6] l[7])' # kappanoring = 'A(r[.] l[1]),A(r[1] l[2]),A(r[2] l[3]),A(r[3] l[4]),A(r[4] l[5]),A(r[5] l[6]),A(r[6] l[.])' kapparing = 'A(r[.] l[1]),A(r[1] l[2] m[7]),A(r[2] l[3]),A(r[3] l[4]),A(r[4] l[5] m[7]),A(r[5] l[6]),A(r[6] l[.])' # (that's the normalized=False flag). c = kt.KappaComplex(kapparing) c.show() g = KappaGraph(c) cycle = g.get_cycle() basis, n = g.get_cycle_basis() print(cycle) print(basis) r = viz.Renderer(c) # r.html_render() r.render() r.color_edgelists(edge_list=cycle) r.show()
# create a KappaComplex with whatever assignment of node identifiers arises # (that's the normalized=False flag). line = "A(l[19] r[.] p[2]), A(l[53] r[19] p[42]), A(l[37] r[53] p[45]), A(l[.] r[37] p[29]), P(a1[3] a2[51] a3[" \ "29] d[.]), A(l[20] r[.] p[3]), A(l[.] r[20] p[27]), P(a1[27] a2[.] a3[.] d[44]), P(a1[.] a2[.] a3[.] d[" \ "44]), A(l[.] r[.] p[51]), P(a1[14] a2[.] a3[45] d[22]), A(l[24] r[.] p[14]), A(l[.] r[24] p[50]), " \ "P(a1[50] a2[.] a3[30] d[.]), A(l[13] r[16] p[30]), A(l[.] r[13] p[21]), P(a1[21] a2[35] a3[1] d[9]), " \ "A(l[.] r[.] p[35]), A(l[.] r[26] p[1]), A(l[26] r[.] p[32]), P(a1[32] a2[.] a3[.] d[9]), A(l[16] r[54] p[" \ ".]), A(l[54] r[38] p[40]), A(l[38] r[.] p[7]), P(a1[.] a2[.] a3[7] d[52]), P(a1[18] a2[.] a3[4] d[52]), " \ "A(l[.] r[.] p[18]), A(l[23] r[.] p[4]), A(l[49] r[23] p[47]), A(l[28] r[49] p[11]), A(l[6] r[28] p[31]), " \ "A(l[.] r[6] p[12]), P(a1[.] a2[12] a3[.] d[.]), P(a1[.] a2[31] a3[.] d[15]), P(a1[.] a2[5] a3[.] d[15]), " \ "A(l[.] r[.] p[5]), P(a1[8] a2[.] a3[11] d[36]), A(l[.] r[39] p[8]), A(l[39] r[.] p[25]), P(a1[41] a2[.] " \ "a3[25] d[33]), A(l[.] r[.] p[41]), P(a1[.] a2[43] a3[.] d[33]), A(l[.] r[46] p[43]), A(l[46] r[.] p[10]), " \ "P(a1[10] a2[.] a3[.] d[22]), P(a1[34] a2[.] a3[.] d[36]), A(l[.] r[.] p[34]), P(a1[.] a2[47] a3[.] d[.]), " \ "P(a1[.] a2[40] a3[42] d[17]), P(a1[48] a2[2] a3[.] d[17]), A(l[.] r[.] p[48]) " line2 = "A(l[.] r[4] p[1]), A(l[4] r[.] p[3]), P(a1[3] a2[1] a3[.] d[2]), P(a1[.] a2[.] a3[.] d[2])" c1 = kt.KappaComplex(line) # r = Renderer(c1) # r.render(labels='no', node_size=20, font_size=9, line_width=1, edge_color='gray') # show() c2 = kt.KappaComplex(line2) plt.ion() canvas = Canvas(2, 2) r = Renderer(c1, canvas) r2 = Renderer(c2, canvas) r.render((1, 2), labels='no', node_size=20, font_size=9,
h_type = h_partner.split('.')[0] if ghost_type != h_type or ghost_site != h_site: return False elif '.' in h_bond: # h_bond is also a stub if p_bond != h_bond: return False return True if __name__ == '__main__': import kappasnap as ks import time import re SGM = SiteGraphMatcher() G1 = kt.KappaComplex('B(b[1]{a}), B(b[1])') maps = SGM.automorphisms(G1) print_map(maps) SGM = SiteGraphMatcher() G1 = kt.KappaComplex('B(b[1]), B(b[1]{a})') G2 = kt.KappaComplex('B(b[2]{b}), B(b[2]{a})') maps = SGM.all_embeddings(G1, G2) print_map(maps) maps = SGM.all_embeddings(G2, G1) print_map(maps) G1 = kt.KappaComplex( 'A(r[5] p[.]), A(l[1] p[2]), A(r[1] p[3] l[5]), P(a3[2] d[4]), P(a1[3] d[4])' ) G2a = kt.KappaComplex(