#dbindexes=sys.argv[3:]; #with open(docsfile,'r') as docstream: # for line in docstream: for line in iter(sys.stdin.readline, ''): maxconedoc = json.loads(line.rstrip("\n")) nform = mat2py(maxconedoc['NORMALFORM']) facetverts = [x for x in nform if not all([y == 0 for y in x])] facet = LatticePolytope(facetverts) facetpts = [list(x) for x in facet.boundary_points()] try: with time_limit(3600): facetntriang_fine, facetntriang_fine_reg = n_FSRT_facet_from_resolved_verts( facetpts) except TimeoutException("Timed out!"): pass else: facetntriang = n_FSRT_facet_from_resolved_verts(facetpts) print( "+MAXCONE." + json.dumps({'NORMALFORM': py2mat(nform)}, separators=(',', ':')) + ">" + json.dumps( { 'FACETNTRIANG': facetntriang_fine, 'FACETNREGTRIANG': facetntriang_fine_reg }, separators=(',', ':'))) print("@") #print("@"+basecoll+"."+json.dumps(dict([(x,maxconedoc[x]) for x in dbindexes]),separators=(',',':'))); sys.stdout.flush() except Exception as e: PrintException()
C = PolynomialRing(QQ, names=['t'] + ['D' + str(i + 1) for i in range(len(dresverts))] + ['J' + str(i + 1) for i in range(ndivsJ)]) DD = list(C.gens()[1:-ndivsJ]) JJ = list(C.gens()[-ndivsJ:]) #Define the matrix the converts toric divisors to basis divisors DtoJmat = [[1 if i == j else 0 for i in range(len(DD))] for j in basisinds] #Determine the number of triangulations corresponding to the polytope nalltriangs = len(triangs) #Add new properties to the base tier of the JSON print "+POLY1." + json.dumps({'POLYID': polyid}, separators=(',', ':')) + ">" + json.dumps( { 'DVERTS': py2mat(dverts), 'DRESVERTS': py2mat(dresverts), 'CWS': py2mat(cws), 'RESCWS': py2mat(rescws), 'FAV': fav, 'DTOJ': py2mat(DtoJmat), 'FUNDGP': fgp, 'NALLTRIANGS': nalltriangs }, separators=(',', ':')) for i in range(nalltriangs): print "+TRIANG1." + json.dumps( { 'POLYID': polyid, 'GEOMN': i + 1, 'TRIANGN': 1
itensXD_L += [x['ITENSXD']] c2Xnums_L += [x['CHERN2XNUMS']] eX_L += [x['EULERX']] mori_rows_L += [x['MORIMATP']] #Determine which triangulations to glue together to_glue_L = glue_groups(itensXD_L, c2Xnums_L, eX_L, mori_rows_L) #print to_glue_L; #sys.stdout.flush(); #Compress properties that should remain the same across the whole polytope into single variables JtoDmat = postchow[0]['JTOD'] invbasis = postchow[0]['INVBASIS'] #Add new properties to base tier of JSON print "+POLY." + json.dumps({'POLYID': polyid}, separators=(',', ':')) + ">" + json.dumps( { 'BASIS': py2mat(basis), 'EULER': int(eX_L[0]), 'NGEOMS': len(to_glue_L), 'JTOD': py2mat(JtoDmat), 'INVBASIS': py2mat(invbasis) }, separators=(',', ':')) sys.stdout.flush() #Glue triangulations into their compositie geometries g_mori_rows_L = [] g_kahler_rows_L = [] for i in range(len(to_glue_L)): #Compute glued Mori and Kahler cone matrices mori_rows_group = [mori_rows_L[j] for j in to_glue_L[i]] g_mori_rows, g_kahler_rows = glue_mori(DtoJmat, mori_rows_group) g_mori_rows_L += [g_mori_rows]
toricswisscheese_chunk=toricswisscheese_NL_chunk[0]; gath+=[toricswisscheese_chunk]; posttoricswisscheese_group=comm.gather(gath,root=0); #Signal ranks to exit current process (if there are no other processes, then exit other ranks) scatt=[-1 for j in range(size)]; pretoricswisscheese=comm.scatter(scatt,root=0); #Reorganize gathered information into a serial form posttoricswisscheese=[x for y in posttoricswisscheese_group for x in y]; #posttoricswisscheese=mongojoin.transpose_list(posttoricswisscheese_redist); #print posttoricswisscheese; if len(posttoricswisscheese)==0: print "None"; else: for toricswisscheese_NL in posttoricswisscheese: if len(toricswisscheese_NL[1])>0 and toricswisscheese_NL[1]!="unfav": print "+SWISSCHEESE."+json.dumps({'POLYID':polyid,'GEOMN':geomn,'NLARGE':toricswisscheese_NL[0]},separators=(',',':'))+">"+json.dumps({'POLYID':polyid,'GEOMN':geomn,'NLARGE':toricswisscheese_NL[0],'H11':h11,'RMAT2CYCLE':py2mat(toricswisscheese_NL[1][0]),'RMAT4CYCLE':py2mat(toricswisscheese_NL[1][1]),'INTBASIS2CYCLE':bool(toricswisscheese_NL[2]),'INTBASIS4CYCLE':bool(toricswisscheese_NL[3]),'HOM':bool(toricswisscheese_NL[4])},separators=(',',':')); else: print "None"; sys.stdout.flush(); ####################################################################################################################### ##Recombine gathered chunks into a single list of rotation matrices for each geometry ##Loop over numbers of large cycles for current geometry #NL=1; #for NLx in posttoricswisscheese: # #Loop over pairs of swiss cheese solutions taken from each chunk for the current number of large cycles # tsc_L=[]; # scflag=False; # for y in NLx: # #If both rotation matrices rotate into integer bases, use them and skip to the next chunk. Else, add them to a list # if (y[1] and y[2]): # #tscNL_L+=[y];
#IO Definitions polydoc = json.loads(sys.argv[1]) #Read in pertinent fields from JSON polyid = polydoc['POLYID'] nverts = mat2py(polydoc['NVERTS']) lp = LatticePolytope(nverts) dlp = lp.polar() dverts = [list(x) for x in dlp.normal_form().column_matrix().columns()] lp_interpts = [ y for x in lp.faces_lp(codim=1) for y in x.interior_points() ] lp_noninterpts = [list(x) for x in lp.points() if x not in lp_interpts] dlp_interpts = [ y for x in dlp.faces_lp(codim=1) for y in x.interior_points() ] dlp_noninterpts = [list(x) for x in dlp.points() if x not in dlp_interpts] #Add new properties to the base tier of the JSON print "+POLY." + json.dumps({'POLYID': polyid}, separators=(',', ':')) + ">" + json.dumps( { 'DVERTS': py2mat(dverts), 'NNINTPTS': py2mat(lp_noninterpts), 'DNINTPTS': py2mat(dlp_noninterpts) }, separators=(',', ':')) sys.stdout.flush() except Exception as e: PrintException()
SR = SR_ideal(dresverts, triang) SRideal = [prod([DD[j] for j in x]) for x in SR] Ichow = C.ideal(linideal + basechangeideal + SRideal) #Obtain information about the Chow ring ipolyAD, itensAD, ipolyXD, itensXD, invbasis, JtoDmat, cnAD, cnAJ = chowAmb( C, DD, JJ, dresverts, triang, Ichow) ipolyAJ, itensAJ, ipolyXJ, itensXJ, c2XD, c3XD, c2XJ, c3XJ, c2Xnums, eX = chowHysurf( C, DD, JJ, DtoJmat, itensAD, itensXD, cnAD, cnAJ) #Add new properties to base tier of JSON print "+TRIANGtemp." + json.dumps( { 'POLYID': polyid, 'ALLTRIANGN': alltriangn }, separators=(',', ':')) + ">" + json.dumps( { 'BASIS': py2mat(basis), 'EULER': int(eX), 'JTOD': py2mat(JtoDmat), 'INVBASIS': py2mat(invbasis), 'CHERN2XJ': py2mat(c2XJ), 'CHERN2XNUMS': py2mat(c2Xnums), 'IPOLYXJ': py2mat(ipolyXJ), 'ITENSXJ': py2mat(itensXJ), 'SRIDEAL': py2mat(SRideal), 'CHERN2XD': py2mat(c2XD), 'IPOLYAD': py2mat(ipolyAD), 'ITENSAD': py2mat(itensAD), 'IPOLYXD': py2mat(ipolyXD), 'ITENSXD': py2mat(itensXD), 'IPOLYAJ': py2mat(ipolyAJ), 'ITENSAJ': py2mat(itensAJ),
facet = LatticePolytope([x for x in nform if x != [0, 0, 0, 0]]) #faceinfolist_dup=[]; #for x in dlp.faces_lp(dim=3): nform = [ list(w) for w in LatticePolytope( facet.vertices().column_matrix().columns() + [vector((0, 0, 0, 0))]).normal_form().column_matrix().columns() ] dim0 = [facet.nvertices()] dim1 = [[ len(z), sum(z), min(z), max(z) ] for z in [[len(y.interior_points()) for y in facet.faces_lp(dim=1)]]][0] dim2 = [[ len(z), sum(z), min(z), max(z) ] for z in [[len(y.interior_points()) for y in facet.faces_lp(dim=2)]]][0] faceinfo = dim0 + dim1 + dim2 #faceinfolist_dup+=[[nform]+[dim0+dim1+dim2]]; #faceinfolist=[faceinfolist_dup[i] for i in range(len(faceinfolist_dup)) if faceinfolist_dup[i] not in faceinfolist_dup[:i]]; #Add new properties to the base tier of the JSON #print "+POLY."+json.dumps({'POLYID':polyid},separators=(',',':'))+">"+json.dumps({'DVERTS':py2mat(dverts),'NNINTPTS':py2mat(lp_noninterpts),'DNINTPTS':py2mat(dlp_noninterpts),'FACETNINTPTS':py2mat(dlp_facetpts),'FACETNTRIANGS':py2mat(facetntriangs),'MAXCONENORMALS':[py2mat(x) for x in dlp_maxcone_normalform_dup]},separators=(',',':')); #for nform,faceinfo in faceinfolist: #print "&MAXCONES."+json.dumps({'NORMALFORM':py2mat(maxcone)},separators=(',',':'))+">"+json.dumps({'POS':{'POLYID':polyid,'NINST':ninstances,'SAMENTRIANG':samentriang},'FACETNTRIANGLIST':facetntriang},separators=(',',':')); print "+MAXCONE." + json.dumps({'NORMALFORM': py2mat(nform)}, separators=(',', ':')) + ">" + json.dumps( {'FACEINFO': py2mat(faceinfo)}, separators=(',', ':')) sys.stdout.flush() except Exception as e: PrintException()
dlp_facetpts = [dlp_facetpts[i] for i in labels] dlp_maxcone_normalform_dup = [ dlp_maxcone_normalform_dup[i] for i in labels ] dlp_maxcone_normalform_inds = [ i for i in range(len(dlp_maxcone_normalform_dup)) if dlp_maxcone_normalform_dup[i] not in dlp_maxcone_normalform_dup[:i] ] #poten_triangs0=list(itertools.product(*facettriangs)); #poten_triangs=[[sorted([sorted(x) for x in y]) for y in z] for z in poten_triangs0]; #Add new properties to the base tier of the JSON print "+POLY." + json.dumps({ 'POLYID': polyid }, separators=(',', ':')) + ">" + json.dumps( { 'DVERTS': py2mat(dverts), 'NNINTPTS': py2mat(lp_noninterpts), 'DNINTPTS': py2mat(dlp_noninterpts), 'FACETNINTPTS': py2mat(dlp_facetpts), 'FACETNTRIANGS': py2mat(facetntriangs), 'MAXCONENORMALS': [py2mat(x) for x in dlp_maxcone_normalform_dup] }, separators=(',', ':')) for i in dlp_maxcone_normalform_inds: maxcone = dlp_maxcone_normalform_dup[i] facetntriang = facetntriangs[i] ninstances = dlp_maxcone_normalform_dup.count(maxcone) samentriang = all([ facetntriangs[j] == facetntriang for j in range(len(dlp_maxcone_normalform_dup))
j = to_glue_L[i][0] print "+GEOM." + json.dumps( { 'POLYID': polyid, 'GEOMN': i + 1 }, separators=(',', ':')) + ">" + json.dumps( { 'POLYID': polyid, 'GEOMN': i + 1, 'H11': h11, 'NTRIANGS': len(to_glue_L[i]), 'CHERN2XJ': triangdocs[j]['CHERN2XJ'], 'CHERN2XNUMS': triangdocs[j]['CHERN2XNUMS'], 'IPOLYXJ': triangdocs[j]['IPOLYXJ'], 'ITENSXJ': triangdocs[j]['ITENSXJ'], 'MORIMAT': py2mat(g_mori_rows), 'KAHLERMAT': py2mat(g_kahler_rows) }, separators=(',', ':')) sys.stdout.flush() #Add new properties to triangdocs tier of JSON m = 0 for k in to_glue_L[i]: print "+TRIANG." + json.dumps( { 'POLYID': polyid, 'GEOMN': i + 1, 'TRIANGN': m + 1 }, separators=(',', ':')) + ">" + json.dumps( {