def main(): if len(sys.argv) >= 3: if "-v" in sys.argv[3:]: #global VERBOSE Generator.setVerbose(True) #VERBOSE = True if "-V" in sys.argv[3:]: global VERBOSE #Generator.setVerbose(True) VERBOSE = True if "-db" in sys.argv[3:]: global DEBUG DEBUG = True grammar = ProcessGrammar(sys.argv[1]) table = GetTable(grammar) InitilizeScanFile(sys.argv[2]) LLDriver(grammar, table) else: print "!help! this program is uses command line input" print print "!help! try /> python LLParser.py <Grammarfilename> <ProgramFilename>" print "!help! sometimes windows uses /> LLParser.py <Grammarfilename> <ProgramFilename" pass
def upload_file(): folder_name = request.cookies.get('foldername') if not folder_name: print("no foldername") folder_name = get_foldername() folder_path = os.makedirs(UPLOAD_FOLDER_PREFIX.format(folder_name)) print(folder_path) # folder_name = request.cookies.get('foldername') upload_folder = UPLOAD_FOLDER_PREFIX.format(folder_name) if request.method == 'POST': upload_files = request.files.getlist("file[]") ticks = int(request.form['ticks']) nsize = int(request.form['nsize']) policy = request.form['policy'] for f in upload_files: if f and allowed_file(f.filename): filename = secure_filename(f.filename) destination = os.path.join(upload_folder, filename) f.save(destination) Generator.generate(filename, ticks, folder=upload_folder, nsize=nsize, policy=policy) return redirect(url_for('upload_file')) songs = os.listdir(upload_folder) resp = make_response(render_template("index.html", title='Sebastian Music', songs=songs, upload_folder=upload_folder)) resp.set_cookie('foldername', folder_name) return resp
def move_data(input, output_filename): with open("in.txt", "w") as f: f.write(input) Generator.main_f() with open("out.tex", 'r') as f: output = f.read() with open(output_filename, "w") as f: f.write(output)
def extract_feature(file,func): for line in file: # format: i j k l nodes splitted by '\t' # parse line contents = line.strip().split('\t') i,j,k,l = map(int,contents[0:4]) if k<=j or i>j or k > l: continue # unvalid sentence nodes = ['0/null/null/null/0'] nodes.extend(contents[4:]) tree = Generator.build_tree(nodes) cat = [] for w in range(len(nodes)): c = 'N' if w>0: if w >= i and w <=j: c = 'C' if w >= k and w <= l: c = 'E' cat.append(c) for w in range(1,len(nodes)): # for every w (word index), we extract context/features for it context = get_context(w,tree,cat) # extract contexts from the dependency subtree where w belongs to print 'context of `',str(tree.sentence[w]),'`',context # feature incremental for feat_dict func(context,cat[w])
def predictword(tree,i,hist): assert tree assert len(hist) == i context = Generator.get_context(i,tree,hist) result = m.eval_all(context) return result
def generate(self): n = int(self.menu_gen.get()) seed = self.inp_seed.get() self.output = Generator.convert(seed, n) if len(self.output) > 0: self.generated = True self.butt_draw.config( state= 'normal') self.chek_fullscrn.config(state= 'normal') self.clearOutput(self.output)
def generateDataTypeTest(table, testIndent): indent = testIndent + 4 tests = "" for column in table.columns: tests = "%sdef\ttestInsertInvalid%s(self):\n"%(" " *testIndent, generator.asClassName(column.name)) tests += "%s_%s = %s"%(indent, generator.asElementName(column.name), generator.invalidValueByType(column)) tests += "%sself.assertRaisesRegexp(DataError, self.insert%s ,\n"%(indent, generator.asClassName(table.name)) return tests
def generateAssertEquals(table, notSelfElement, indent, prefix = ""): asserts = "" index = 0 for column in table.columns: refer = column.getReferenceIfExists(table) name = "" if refer is not None: name ="self.%s.%s"%(generator.asElementName(refer.referToTable), generator.asElementName(refer.referToColumn)) else: if prefix == "": if column.name != notSelfElement: name = "self.%s"%generator.asElementName(column.name) else: name = "_%s"%generator.asElementName(column.name) else: if column.name != notSelfElement: name = "%s.%s"%(prefix, generator.asElementName(column.name)) asserts += "%sself.assertEqual(%s, str(rows[0][%s]))\n"%(" "*8, name, index) index += 1 return asserts
def generateDropTest(tables, table, testIndent): indent = testIndent + 4 identifier = table.columns[0].name idElement = generator.asElementName(identifier) drop = "%sdef\ttestDrop%s(self):\n"%(" " *testIndent, generator.asClassName(table.name)) drop += generator.insertToDB(table, indent) drop += """%sdb.deleteFromTable(self.conn, "%s", %s=self.%s)\n"""%(" "*indent, table.name, identifier, idElement) drop += """%srows = db.selectFrom(self.conn, {"%s"}, "*", %s=self.%s)\n"""%(" "*indent, table.name, identifier, idElement) drop += "%sself.assertEqual(rows, [])\n\n"%(" "*indent) referencingTables = table.getReferencingTables(tables) for refTable in referencingTables: drop += "%sdef\ttestDrop%sBy%s(self):\n"%(" " *testIndent, generator.asClassName(refTable.name), generator.asClassName(table.name)) drop += generator.insertToDB(table, indent) drop += generator.insertToDB(refTable, indent, generator.asElementName(refTable.name)) drop += """%sdb.deleteFromTable(self.conn, "%s", %s=self.%s)\n"""%(" "*indent, table.name, identifier, idElement) drop += """%srows = db.selectFrom(self.conn, {"%s"}, "*", %s=self.%s.%s)\n"""%(" "*indent, refTable.name, refTable.columns[0].name, generator.asElementName(refTable.name), generator.asElementName(refTable.columns[0].name)) drop += "%sself.assertEqual(rows, [])\n\n"%(" "*indent) return drop
def predict_file(filein, fileout): for line in filein: contents = line.strip().split('\t') nodes = ['0/null/null/null/0'] nodes.extend(contents[4:]) tree = Generator.build_tree(nodes) if len(tree.sentence) == 0: continue results = predict_sentence(tree, 3) assert (len(tree.sentence) == len(results)) for i in range(1,len(tree.sentence)): w = tree.sentence[i].word fileout.write('%s/%s ' % (w, results[i])) print >> fileout
def post(self): # We set the same parent key on the 'Greeting' to ensure each # Greeting is in the same entity group. Queries across the # single entity group will be consistent. However, the write # rate to a single entity group should be limited to # ~1/second. guestbook_name = self.request.get('guestbook_name', DEFAULT_GUESTBOOK_NAME) greeting = Greeting(parent=guestbook_key(guestbook_name)) greeting.content = self.request.get('content') level = 0 #greeting.sudoku_id = random.randint(0,99) greeting.sudoku_id = 0 if greeting.content == "Easy": level = 1 if greeting.content == "Medium": level = 2 if greeting.content == "Difficult": level = 3 s = [[0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0], [0,0,0,0,0,0,0,0,0]] s1 = [0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0] Generator.fill_sudoku(s,0,0) Generator.reduce_sudoku(s,level) for i in range(0,9): for j in range(0,9): sent_cells[9*i+j]=str(s[i][j]) """fname = 'SudokuPuzzles.txt' with open(fname) as f: content = f.readlines() content = [x.strip('\n') for x in content] s1 = content[greeting.sudoku_id] for i in range(0,81): sent_cells[i] = str(s1[i])""" if users.get_current_user(): greeting.author = Author( identity= users.get_current_user().user_id(), email= users.get_current_user().email(), cells= sent_cells) greeting.put() query_params = {'guestbook_name': guestbook_name} #taskqueue.add(url='/genSudoku', params={'key': greeting.content}) self.redirect('/?' + urllib.urlencode(query_params))
#Networkx Graph Gen as Seed, Alpha Beta after Testing #G = nx.watts_strogatz_graph(5, 2, 0.1) #nx.draw(G) #plt.show() # if you want to visualize your seed graph first #init = InitMatrix(nodes) #init = init.makeStochasticABFromNetworkxGraph(G, 0.75, 0.5) #Networkx Graph Gen as Seed Testing, not Stochastic after G = nx.watts_strogatz_graph(5, 2, 0.1) #nx.draw(G) #plt.show() # if you want to visualize your seed graph first init = InitMatrix(nodes) init = init.makeFromNetworkxGraph(G) init.addSelfEdges() # if you want to ensure self edges for Kronecker k = 3 print "Seed Matrix Nodes:" print nodes print "Kronecker Iterations:" print k nxgraph = Generator.generateStochasticKron(init, k, False) #for line in nx.generate_edgelist(nxgraph, data=False): # print(line) print "Done Creating Network!" nx.draw(nxgraph) #pos=nx.random_layout(nxgraph) plt.show() #print "Creating Histogram..." #histogramInput = create_graph_stats(nxgraph) #test.histogram(histogramInput, 41)
def game(): # Which interval should be two numbers from FIRST to SECOND a = list(Generator.rand_gener(1, 99)) # Modify which number should be written out Generator.game_inputs(a, 1, 99)
__author__ = 'Paperwork' #!/usr/bin/python from twython import Twython import Generator io_stream = open("password.txt", "r+") #Password in a seperate txt file not commited to github # TODO: Encrypt the API tokens instead of storing it as plain txt temp = io_stream.read() io_stream.close() temp = temp.split(",") app_key = temp[0] app_secret = temp[1] oauth_token = temp[2] oauth_token_secret = temp[3] twitter = Twython(app_key, app_secret, oauth_token, oauth_token_secret) #The above should just be a single line, without the break twitter.update_status(status=Generator.main()) #calls Generator.py
def generate(templatePath, outputPath): Log.debug("Generating %s" % (outputPath[-1])) Generator.generate(templates = [Resource.getPath(*templatePath)], namespace = namespace, outputFile = open(os.path.join(*outputPath), "w"))
# - plot (array) - import Generator as G import Transform as T import Converter as C # Create a cartesian grid a = G.cart((0, 0, 0), (1, 1, 1), (10, 10, 10)) # plot mesh C.plot(a) a = T.translate(a, (0.5 * 1, 0, 0)) C.plot(a) a = T.translate(a, (0.5 * 1, 0, 0)) C.plot(a)
# - enforceX, enforceY, ... monotonic (array) - import Converter as C import Generator as G import KCore.test as test a = G.cart((0, 0, 0), (1, 1, 1), (20, 20, 10)) b = G.enforceX(a, 5., 0.2, 10, 5) b = G.enforceX(b, 15., 0.2, 10, 5) b = G.enforceY(b, 10., 0.1, 5, 5) b = G.enforcePlusY(b, 0.01, 20, 5) test.testA([a, b])
# - distance2Walls (array) - import Dist2Walls import Generator as G import Converter as C import Geom as D # Bloc dont on cherche la distance a la paroi a = G.cart((0., 0., 0.), (0.1, 0.1, 0.1), (10, 10, 10)) # Paroi sphere = D.sphere((1.2, 0., 0.), 0.2, 30) cellN = C.initVars(sphere, 'cellN', 1.) # Calcul de la distance a la paroi dist = Dist2Walls.distance2Walls(a, [sphere], cellnbodies=[cellN], loc='centers', type='ortho') ac = C.node2Center(a) ac = C.addVars([ac, dist]) C.convertArrays2File([ac], 'out.plt')
#!/usr/bin/env python # coding: utf-8 r"""axisym (array)""" import Generator as G import Converter as C import Geom as D import Transform as T # Axisym a curve a0 = D.line((0.5, 0, 0), (0.6, 0, 1)) a = D.axisym(a0, (0., 0., 0.), (0., 0., 1.), 360., 360) C.convertArrays2File([a], "out.plt") # Axisym a curve with varying r a0 = D.line((1.0, 0, 0), (0., 0, 1)) a1 = D.circle((0, 0, 0), 2.) import Modeler.Models as Models a1 = Models.circle2(1, 0.8) a = D.axisym(a0, (0., 0., 0.), (0., 0., 1.), rmod=a1) C.convertArrays2File([a, a0, a1], "out.plt") # Axisym a 2D cart grid a0 = G.cart((0., 0., 0.), (0.1, 0.1, 0.2), (10, 10, 1)) a = D.axisym(a0, (1., 0., 0.), (0., 1., 0.), 30., 4) C.convertArrays2File([a], "out.plt")
def polyQuadMesher(polyQuad, h, hf, density, next): """Generate a multiple mesh for a polyquad. Usage: polyQuadMesher( polyQuad, h, hf, density, next)""" import Converter as C import Transform as T polyQuad = G.close(polyQuad) addFactor = 0.2 if (len(polyQuad) != 4): raise TypeError("polyQuadMesher: requires a QUAD array.") else: if (polyQuad[3] != 'QUAD'): raise TypeError("polyQuadMesher: requires a QUAD array.") f = polyQuad[1] c = polyQuad[2] ne = c.shape[1] deuxPiSur3 = 2. * math.pi / 3. # Calcul des longueurs minimum et maximum des arretes lmin = 1.e6 lmax = 0. for i in xrange(ne): ind1 = c[0, i] - 1 ind2 = c[1, i] - 1 x1 = f[0, ind1] y1 = f[1, ind1] z1 = f[2, ind1] x2 = f[0, ind2] y2 = f[1, ind2] z2 = f[2, ind2] l = math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) + (z1 - z2) * (z1 - z2)) lmin = min(lmin, l) lmax = max(lmax, l) ind1 = c[1, i] - 1 ind2 = c[2, i] - 1 x1 = f[0, ind1] y1 = f[1, ind1] z1 = f[2, ind1] x2 = f[0, ind2] y2 = f[1, ind2] z2 = f[2, ind2] l = math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) + (z1 - z2) * (z1 - z2)) lmin = min(lmin, l) lmax = max(lmax, l) ind1 = c[2, i] - 1 ind2 = c[3, i] - 1 x1 = f[0, ind1] y1 = f[1, ind1] z1 = f[2, ind1] x2 = f[0, ind2] y2 = f[1, ind2] z2 = f[2, ind2] l = math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) + (z1 - z2) * (z1 - z2)) lmin = min(lmin, l) lmax = max(lmax, l) ind1 = c[3, i] - 1 ind2 = c[0, i] - 1 x1 = f[0, ind1] y1 = f[1, ind1] z1 = f[2, ind1] x2 = f[0, ind2] y2 = f[1, ind2] z2 = f[2, ind2] l = math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) + (z1 - z2) * (z1 - z2)) lmin = min(lmin, l) lmax = max(lmax, l) # Detection de la hauteur maximum admissible if (h > 0.9 * lmin): h = 0.9 * lmin print "Warning: height changed to", h, "..." print "...because length of a line segment is", lmin # Detection de la densite minimum nk = int(h * density) + 1 if (nk < 4): density = 4. / h print "Warning: density changed to", density, "to have 4 points in height." n = int(lmax * density) + 1 if (n < 4): density = 4. / lmax print "Warning: density changed to", density, "to have 4 points per segment", i # Calcul automatique de l'extension # extension = int(h*density) # Calculs prealables nk = int(h * density) + 1 distribk = G.cart((0, 0, 0), (1. / nk, 1, 1), (nk + 1, 1, 1)) add = max(nk * h / (20 * hf), 1) add = min(int(add), 3 * nk) add = int(addFactor * add) distribk = G.enforcePlusX(distribk, hf / h, nk - 1, add) nk = distribk[2] - 1 delta = C.array('d', nk, 1, 1) for i in xrange(nk): delta[1][0, i] = h * (distribk[1][0, i + 1] - distribk[1][0, i]) mesh = [] walls = [] # Generation des maillages for i in xrange(ne): ind1 = c[0, i] - 1 ind2 = c[1, i] - 1 ind3 = c[2, i] - 1 ind4 = c[3, i] - 1 x1 = f[0, ind1] y1 = f[1, ind1] z1 = f[2, ind1] x2 = f[0, ind2] y2 = f[1, ind2] z2 = f[2, ind2] x3 = f[0, ind3] y3 = f[1, ind3] z3 = f[2, ind3] x4 = f[0, ind4] y4 = f[1, ind4] z4 = f[2, ind4] # ext = 1 ; extension chimere # ext = 0 ; TFI paroi # ext = -1; TFI MD + TTM iQ1 = findNeighbourIndex(polyQuad, i, ind1, ind2) iQ2 = findNeighbourIndex(polyQuad, i, ind2, ind3) iQ3 = findNeighbourIndex(polyQuad, i, ind3, ind4) iQ4 = findNeighbourIndex(polyQuad, i, ind4, ind1) [nx, ny, nz] = normalVector(polyQuad, i) [n1x, n1y, n1z] = normalVector(polyQuad, iQ1) [n2x, n2y, n2z] = normalVector(polyQuad, iQ2) [n3x, n3y, n3z] = normalVector(polyQuad, iQ3) [n4x, n4y, n4z] = normalVector(polyQuad, iQ4) [t1x, t1y, t1z] = tangentVector(polyQuad, i, ind1, ind2) [t2x, t2y, t2z] = tangentVector(polyQuad, i, ind2, ind3) [t3x, t3y, t3z] = tangentVector(polyQuad, i, ind3, ind4) [t4x, t4y, t4z] = tangentVector(polyQuad, i, ind4, ind1) a1 = -(nx * n1x + ny * n1y + nz * n1z) a1 = min(a1, 1.) a1 = max(a1, -1.) a2 = -(nx * n2x + ny * n2y + nz * n2z) a2 = min(a2, 1.) a2 = max(a2, -1.) a3 = -(nx * n3x + ny * n3y + nz * n3z) a3 = min(a3, 1.) a3 = max(a3, -1.) a4 = -(nx * n4x + ny * n4y + nz * n4z) a4 = min(a4, 1.) a4 = max(a4, -1.) if (t1x * n1x + t1y * n1y + t1z * n1z < -1.e-10): # extension ext1 = 1 elif (math.acos(a1) < deuxPiSur3): # TFI 2 parois ext1 = 0 else: # TFI MD + TTM ext1 = -1 if (t2x * n2x + t2y * n2y + t2z * n2z < -1.e-10): # extension ext2 = 1 elif (math.acos(a2) < deuxPiSur3): # TFI 2 parois ext2 = 0 else: # TFI MD + TTM ext2 = -1 if (t3x * n3x + t3y * n3y + t3z * n3z < -1.e-10): # extension ext3 = 1 elif (math.acos(a3) < deuxPiSur3): # TFI 2 parois ext3 = 0 else: # TFI MD + TTM ext3 = -1 if (t4x * n4x + t4y * n4y + t4z * n4z < -1.e-10): # extension ext4 = 1 elif (math.acos(a4) < deuxPiSur3): # TFI 2 parois ext4 = 0 else: # TFI MD + TTM ext4 = -1 lext1 = max(0, ext1) lext2 = max(0, ext2) lext3 = max(0, ext3) lext4 = max(0, ext4) n = int(lmax * density) + 1 ni = n + (lext2 + lext4) * next nj = n + (lext1 + lext3) * next nextsn1 = next / (n - 1.) # p1 q0x = x4 + lext4 * (x4 - x3) * nextsn1 q0y = y4 + lext4 * (y4 - y3) * nextsn1 q0z = z4 + lext4 * (z4 - z3) * nextsn1 q1x = x1 + lext4 * (x1 - x2) * nextsn1 q1y = y1 + lext4 * (y1 - y2) * nextsn1 q1z = z1 + lext4 * (z1 - z2) * nextsn1 r0x = x2 + lext1 * (x2 - x3) * nextsn1 r0y = y2 + lext1 * (y2 - y3) * nextsn1 r0z = z2 + lext1 * (z2 - z3) * nextsn1 r1x = x1 + lext1 * (x1 - x4) * nextsn1 r1y = y1 + lext1 * (y1 - y4) * nextsn1 r1z = z1 + lext1 * (z1 - z4) * nextsn1 ux = q1x - q0x uy = q1y - q0y uz = q1z - q0z vx = (r1y - r0y) * nz - (r1z - r0z) * ny vy = (r1z - r0z) * nx - (r1x - r0x) * nz vz = (r1x - r0x) * ny - (r1y - r0y) * nx wx = q0x - r0x wy = q0y - r0y wz = q0z - r0z s = -(vx * wx + vy * wy + vz * wz) / (vx * ux + vy * uy + vz * uz) p1x = q0x + s * (q1x - q0x) p1y = q0y + s * (q1y - q0y) p1z = q0z + s * (q1z - q0z) # p2 q0x = x1 + lext1 * (x1 - x4) * nextsn1 q0y = y1 + lext1 * (y1 - y4) * nextsn1 q0z = z1 + lext1 * (z1 - z4) * nextsn1 q1x = x2 + lext1 * (x2 - x3) * nextsn1 q1y = y2 + lext1 * (y2 - y3) * nextsn1 q1z = z2 + lext1 * (z2 - z3) * nextsn1 r0x = x3 + lext2 * (x3 - x4) * nextsn1 r0y = y3 + lext2 * (y3 - y4) * nextsn1 r0z = z3 + lext2 * (z3 - z4) * nextsn1 r1x = x2 + lext2 * (x2 - x1) * nextsn1 r1y = y2 + lext2 * (y2 - y1) * nextsn1 r1z = z2 + lext2 * (z2 - z1) * nextsn1 ux = q1x - q0x uy = q1y - q0y uz = q1z - q0z vx = (r1y - r0y) * nz - (r1z - r0z) * ny vy = (r1z - r0z) * nx - (r1x - r0x) * nz vz = (r1x - r0x) * ny - (r1y - r0y) * nx wx = q0x - r0x wy = q0y - r0y wz = q0z - r0z s = -(vx * wx + vy * wy + vz * wz) / (vx * ux + vy * uy + vz * uz) p2x = q0x + s * (q1x - q0x) p2y = q0y + s * (q1y - q0y) p2z = q0z + s * (q1z - q0z) # p3 q0x = x2 + lext2 * (x2 - x1) * nextsn1 q0y = y2 + lext2 * (y2 - y1) * nextsn1 q0z = z2 + lext2 * (z2 - z1) * nextsn1 q1x = x3 + lext2 * (x3 - x4) * nextsn1 q1y = y3 + lext2 * (y3 - y4) * nextsn1 q1z = z3 + lext2 * (z3 - z4) * nextsn1 r0x = x4 + lext3 * (x4 - x1) * nextsn1 r0y = y4 + lext3 * (y4 - y1) * nextsn1 r0z = z4 + lext3 * (z4 - z1) * nextsn1 r1x = x3 + lext3 * (x3 - x2) * nextsn1 r1y = y3 + lext3 * (y3 - y2) * nextsn1 r1z = z3 + lext3 * (z3 - z2) * nextsn1 ux = q1x - q0x uy = q1y - q0y uz = q1z - q0z vx = (r1y - r0y) * nz - (r1z - r0z) * ny vy = (r1z - r0z) * nx - (r1x - r0x) * nz vz = (r1x - r0x) * ny - (r1y - r0y) * nx wx = q0x - r0x wy = q0y - r0y wz = q0z - r0z s = -(vx * wx + vy * wy + vz * wz) / (vx * ux + vy * uy + vz * uz) p3x = q0x + s * (q1x - q0x) p3y = q0y + s * (q1y - q0y) p3z = q0z + s * (q1z - q0z) # p4 q0x = x3 + lext3 * (x3 - x2) * nextsn1 q0y = y3 + lext3 * (y3 - y2) * nextsn1 q0z = z3 + lext3 * (z3 - z2) * nextsn1 q1x = x4 + lext3 * (x4 - x1) * nextsn1 q1y = y4 + lext3 * (y4 - y1) * nextsn1 q1z = z4 + lext3 * (z4 - z1) * nextsn1 r0x = x1 + lext4 * (x1 - x2) * nextsn1 r0y = y1 + lext4 * (y1 - y2) * nextsn1 r0z = z1 + lext4 * (z1 - z2) * nextsn1 r1x = x4 + lext4 * (x4 - x3) * nextsn1 r1y = y4 + lext4 * (y4 - y3) * nextsn1 r1z = z4 + lext4 * (z4 - z3) * nextsn1 ux = q1x - q0x uy = q1y - q0y uz = q1z - q0z vx = (r1y - r0y) * nz - (r1z - r0z) * ny vy = (r1z - r0z) * nx - (r1x - r0x) * nz vz = (r1x - r0x) * ny - (r1y - r0y) * nx wx = q0x - r0x wy = q0y - r0y wz = q0z - r0z s = -(vx * wx + vy * wy + vz * wz) / (vx * ux + vy * uy + vz * uz) p4x = q0x + s * (q1x - q0x) p4y = q0y + s * (q1y - q0y) p4z = q0z + s * (q1z - q0z) if (ext1 == 1): n1x = ny * (p2z - p1z) - nz * (p2y - p1y) n1y = nz * (p2x - p1x) - nx * (p2z - p1z) n1z = nx * (p2y - p1y) - ny * (p2x - p1x) elif (ext1 == -1): rx = (n1y + ny) * (z2 - z1) - (n1z + nz) * (y2 - y1) ry = (n1z + nz) * (x2 - x1) - (n1x + nx) * (z2 - z1) rz = (n1x + nx) * (y2 - y1) - (n1y + ny) * (x2 - x1) norme = math.sqrt(rx * rx + ry * ry + rz * rz) n1x = rx / norme n1y = ry / norme n1z = rz / norme if (ext2 == 1): n2x = ny * (p3z - p2z) - nz * (p3y - p2y) n2y = nz * (p3x - p2x) - nx * (p3z - p2z) n2z = nx * (p3y - p2y) - ny * (p3x - p2x) elif (ext2 == -1): rx = (n2y + ny) * (z3 - z2) - (n2z + nz) * (y3 - y2) ry = (n2z + nz) * (x3 - x2) - (n2x + nx) * (z3 - z2) rz = (n2x + nx) * (y3 - y2) - (n2y + ny) * (x3 - x2) norme = math.sqrt(rx * rx + ry * ry + rz * rz) n2x = rx / norme n2y = ry / norme n2z = rz / norme if (ext3 == 1): n3x = ny * (p4z - p3z) - nz * (p4y - p3y) n3y = nz * (p4x - p3x) - nx * (p4z - p3z) n3z = nx * (p4y - p3y) - ny * (p4x - p3x) elif (ext3 == -1): rx = (n3y + ny) * (z4 - z3) - (n3z + nz) * (y4 - y3) ry = (n3z + nz) * (x4 - x3) - (n3x + nx) * (z4 - z3) rz = (n3x + nx) * (y4 - y3) - (n3y + ny) * (x4 - x3) norme = math.sqrt(rx * rx + ry * ry + rz * rz) n3x = rx / norme n3y = ry / norme n3z = rz / norme if (ext4 == 1): n4x = ny * (p1z - p4z) - nz * (p1y - p4y) n4y = nz * (p1x - p4x) - nx * (p1z - p4z) n4z = nx * (p1y - p4y) - ny * (p1x - p4x) elif (ext4 == -1): rx = (n4y + ny) * (z1 - z4) - (n4z + nz) * (y1 - y4) ry = (n4z + nz) * (x1 - x4) - (n4x + nx) * (z1 - z4) rz = (n4x + nx) * (y1 - y4) - (n4y + ny) * (x1 - x4) norme = math.sqrt(rx * rx + ry * ry + rz * rz) n4x = rx / norme n4y = ry / norme n4z = rz / norme dh = nx * (x1 + h * nx) + ny * (y1 + h * ny) + nz * (z1 + h * nz) d1 = n1x * p1x + n1y * p1y + n1z * p1z d2 = n2x * p2x + n2y * p2y + n2z * p2z d3 = n3x * p3x + n3y * p3y + n3z * p3z d4 = n4x * p4x + n4y * p4y + n4z * p4z n41 = nx * n4y * n1z - nx * n4z * n1y + ny * n4z * n1x - ny * n4x * n1z + nz * n4x * n1y - nz * n4y * n1x p5x = (dh * (n4y * n1z - n4z * n1y) + d4 * (n1y * nz - n1z * ny) + d1 * (ny * n4z - nz * n4y)) / n41 p5y = (dh * (n4z * n1x - n4x * n1z) + d4 * (n1z * nx - n1x * nz) + d1 * (nz * n4x - nx * n4z)) / n41 p5z = (dh * (n4x * n1y - n4y * n1x) + d4 * (n1x * ny - n1y * nx) + d1 * (nx * n4y - ny * n4x)) / n41 n12 = nx * n1y * n2z - nx * n1z * n2y + ny * n1z * n2x - ny * n1x * n2z + nz * n1x * n2y - nz * n1y * n2x p6x = (dh * (n1y * n2z - n1z * n2y) + d1 * (n2y * nz - n2z * ny) + d2 * (ny * n1z - nz * n1y)) / n12 p6y = (dh * (n1z * n2x - n1x * n2z) + d1 * (n2z * nx - n2x * nz) + d2 * (nz * n1x - nx * n1z)) / n12 p6z = (dh * (n1x * n2y - n1y * n2x) + d1 * (n2x * ny - n2y * nx) + d2 * (nx * n1y - ny * n1x)) / n12 n23 = nx * n2y * n3z - nx * n2z * n3y + ny * n2z * n3x - ny * n2x * n3z + nz * n2x * n3y - nz * n2y * n3x p7x = (dh * (n2y * n3z - n2z * n3y) + d2 * (n3y * nz - n3z * ny) + d3 * (ny * n2z - nz * n2y)) / n23 p7y = (dh * (n2z * n3x - n2x * n3z) + d2 * (n3z * nx - n3x * nz) + d3 * (nz * n2x - nx * n2z)) / n23 p7z = (dh * (n2x * n3y - n2y * n3x) + d2 * (n3x * ny - n3y * nx) + d3 * (nx * n2y - ny * n2x)) / n23 n34 = nx * n3y * n4z - nx * n3z * n4y + ny * n3z * n4x - ny * n3x * n4z + nz * n3x * n4y - nz * n3y * n4x p8x = (dh * (n3y * n4z - n3z * n4y) + d3 * (n4y * nz - n4z * ny) + d4 * (ny * n3z - nz * n3y)) / n34 p8y = (dh * (n3z * n4x - n3x * n4z) + d3 * (n4z * nx - n4x * nz) + d4 * (nz * n3x - nx * n3z)) / n34 p8z = (dh * (n3x * n4y - n3y * n4x) + d3 * (n4x * ny - n4y * nx) + d4 * (nx * n3y - ny * n3x)) / n34 l1 = math.sqrt((p1x - p2x) * (p1x - p2x) + (p1y - p2y) * (p1y - p2y) + (p1z - p2z) * (p1z - p2z)) l2 = math.sqrt((p2x - p3x) * (p2x - p3x) + (p2y - p3y) * (p2y - p3y) + (p2z - p3z) * (p2z - p3z)) l3 = math.sqrt((p3x - p4x) * (p3x - p4x) + (p3y - p4y) * (p3y - p4y) + (p3z - p4z) * (p3z - p4z)) l4 = math.sqrt((p4x - p1x) * (p4x - p1x) + (p4y - p1y) * (p4y - p1y) + (p4z - p1z) * (p4z - p1z)) distribi1 = G.cart((0, 0, 0), (1. / (ni - 1), 1, 1), (ni, 1, 1)) distribi2 = G.cart((0, 0, 0), (1. / (ni - 1), 1, 1), (ni, 1, 1)) if (ext4 == 0): distribi1 = G.enforcePlusX(distribi1, hf / max(l1, l3), nk - 1, add) distribi2 = G.enforceMoinsX(distribi2, hf / max(l1, l3), nk - 1, add) if (ext2 == 0): distribi1 = G.enforceMoinsX(distribi1, hf / max(l1, l3), nk - 1, add) distribi2 = G.enforcePlusX(distribi2, hf / max(l1, l3), nk - 1, add) distribj = G.cart((0, 0, 0), (1. / (nj - 1), 1, 1), (nj, 1, 1)) if (ext1 == 0): distribj = G.enforcePlusX(distribj, hf / max(l2, l4), nk - 1, add) if (ext3 == 0): distribj = G.enforceMoinsX(distribj, hf / max(l2, l4), nk - 1, add) Q0 = meshQuad((p1x, p1y, p1z), (p2x, p2y, p2z), (p3x, p3y, p3z), (p4x, p4y, p4z), distribi1, distribj) Q1 = meshQuad((p2x, p2y, p2z), (p1x, p1y, p1z), (p5x, p5y, p5z), (p6x, p6y, p6z), distribi2, distribk) Q2 = meshQuad((p2x, p2y, p2z), (p3x, p3y, p3z), (p7x, p7y, p7z), (p6x, p6y, p6z), distribj, distribk) Q3 = meshQuad((p3x, p3y, p3z), (p4x, p4y, p4z), (p8x, p8y, p8z), (p7x, p7y, p7z), distribi2, distribk) Q4 = meshQuad((p1x, p1y, p1z), (p4x, p4y, p4z), (p8x, p8y, p8z), (p5x, p5y, p5z), distribj, distribk) Qh = meshQuad((p5x, p5y, p5z), (p6x, p6y, p6z), (p7x, p7y, p7z), (p8x, p8y, p8z), distribi1, distribj) m = G.TFI([Q4, Q2, Q1, Q3, Q0, Qh]) mesh.append(m) # Walls rangesw = [] if (ext4 != 1): i1 = 1 else: i1 = next + 1 if (ext2 != 1): i2 = m[2] else: i2 = m[2] - next if (ext1 != 1): j1 = 1 else: j1 = next + 1 if (ext3 != 1): j2 = m[3] else: j2 = m[3] - next range = [i1, i2, j1, j2, 1, 1] rangesw.append(range) if (ext1 == 0): if (ext4 == 1): if (ext2 == 1): range = [next + 1, m[2] - next, 1, 1, 1, m[4]] else: range = [next + 1, m[2], 1, 1, 1, m[4]] else: if (ext2 == 1): range = [1, m[2] - next, 1, 1, 1, m[4]] else: range = [1, m[2], 1, 1, 1, m[4]] rangesw.append(range) if (ext2 == 0): if (ext1 == 1): if (ext3 == 1): range = [m[2], m[2], next + 1, m[3] - next, 1, m[4]] else: range = [m[2], m[2], next + 1, m[3], 1, m[4]] else: if (ext3 == 1): range = [m[2], m[2], 1, m[3] - next, 1, m[4]] else: range = [m[2], m[2], 1, m[3], 1, m[4]] rangesw.append(range) if (ext3 == 0): if (ext4 == 1): if (ext2 == 1): range = [next + 1, m[2] - next, m[3], m[3], 1, m[4]] else: range = [next + 1, m[2], m[3], m[3], 1, m[4]] else: if (ext2 == 1): range = [1, m[2] - next, m[3], m[3], 1, m[4]] else: range = [1, m[2], m[3], m[3], 1, m[4]] rangesw.append(range) if (ext4 == 0): if (ext1 == 1): if (ext3 == 1): range = [1, 1, next + 1, m[3] - next, 1, m[4]] else: range = [1, 1, next + 1, m[3], 1, m[4]] else: if (ext3 == 1): range = [1, 1, 1, m[3] - next, 1, m[4]] else: range = [1, 1, 1, m[3], 1, m[4]] rangesw.append(range) walls.append(rangesw) return [mesh, walls, h, density]
# - conformizeNGon (array) - import Generator as G import Converter as C import Transform as T a = G.cartNGon((0, 0, 0), (0.1, 0.1, 1), (11, 11, 1)) b = G.cartNGon((1., 0, 0), (0.1, 0.2, 1), (11, 6, 1)) a = G.cartNGon((0, 0, 0), (1, 1, 1), (3, 3, 1)) b = G.cartNGon((2., 0, 0), (2, 2, 1), (2, 2, 1)) res = T.join(a, b) res2 = C.conformizeNGon(res) C.convertArrays2File(res2, 'out.plt')
# - projectAllDirs (array) - import Geom as D import Converter as C import Generator as G import Transform as T import KCore.test as test # Structure a = D.sphere((0, 0, 0), 1., 20) a = C.initVars(a, 'F', 1) b = G.cart((1.1, -0.1, -0.1), (0.1, 0.1, 0.1), (1, 5, 5)) n = G.getNormalMap(b) n = C.center2Node(n) b = C.addVars([b, n]) b = C.initVars(b, 'F', 1) c = T.projectAllDirs([b], [a], ['sx', 'sy', 'sz']) test.testA(c, 1) # Non structure a = D.sphere((0, 0, 0), 1., 20) a = C.initVars(a, 'F', 1) b = G.cartTetra((1.1, -0.1, -0.1), (0.1, 0.1, 0.1), (1, 5, 5)) n = G.getNormalMap(b) n = C.center2Node(n) b = C.addVars([b, n]) b = C.initVars(b, 'F', 1) c = T.projectAllDirs([b], [a], ['sx', 'sy', 'sz']) test.testA(c, 2) a = C.convertArray2Tetra(a) b = G.cartTetra((1.1, -0.1, -0.1), (0.1, 0.1, 0.1), (1, 5, 5))
def generateMock(tables): content = tables.content mockModule =""" import string import random import uuid as uid import hashlib import datetime def randomText(size=64 , chars=string.ascii_letters + string.digits + string.punctuation): return ''.join(random.choice(chars) for _ in range(random.randint(5, size))) def randomEmail(size=64): return randomText(size - 10) + "@sahabe.de" def randomFixedLengthText(size=64, chars=string.ascii_letters + string.digits + string.punctuation): return ''.join(random.choice(chars) for _ in range(size)) def uuid(): return str(uid.uuid4()) def timeStamp(): timeStamp = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') return str(timeStamp) def SHA2(): return hashlib.sha256(randomText(16)).hexdigest() def MD5(): return hashlib.md5(randomText(16)).hexdigest() class DBMock(): """ init =""" def __init__(self):\n""" for table in content: elementName = generator.asElementName(table.name) className = generator.asClassName(table.name) dependencies = generator.getDependencies(tables, table.name) classArgs = "" for dependency in dependencies: classArgs += "self.%s, "%generator.asElementName(dependency) classArgs = classArgs.rstrip(", ") init +="%sself.%s = DBMock.%s(%s)\n"%(" "*8, elementName, className, classArgs) classes ="" for table in content: elementName = generator.asElementName(table.name) className = generator.asClassName(table.name) dependencies = generator.getDependencies(tables, table.name) classArgs = "" for dependency in dependencies: classArgs += ", %s"%generator.asElementName(dependency) tableClass = """ class %s(object): def __init__(self%s):\n"""%(className, classArgs) elements = "" for column in table.columns: value = "" references = column.getReferenceIfExists(table) if references is not None: value = "%s.%s"%(generator.asElementName(references.referToTable), generator.asElementName(references.referToColumn)) else: value = generator.randomValueByType(column) elements += "%sself.%s = %s\n"%(" "*12, generator.asElementName(column.name), value) tableClass += elements classes += tableClass print "generate Mock class for %s"%className module = open("generated/MockModule.py", "w+") module.write(mockModule+ init+ classes) module.close()
# - addNormalLayers (array) - import Generator as G import Converter as C import Geom as D d = C.array('d', 3, 1, 1) d[1][0, 0] = 0.1 d[1][0, 1] = 0.2 d[1][0, 2] = 0.3 a = D.sphere((0, 0, 0), 1, 50) a = G.addNormalLayers(a, d) C.convertArrays2File([a], 'out.plt')
def main(): with tf.Graph().as_default(): with tf.device("/gpu:1"): session_conf = tf.ConfigProto( allow_soft_placement=FLAGS.allow_soft_placement, log_device_placement=FLAGS.log_device_placement) sess = tf.Session(config=session_conf) with sess.as_default(), open(log_precision, "w") as log, open( loss_precision, "w") as loss_log: DIS_MODEL_FILE = "model/pre-trained.model" # overfitted DNS param = pickle.load(open(DIS_MODEL_FILE, "rb")) # param= None loss_type = "pair" discriminator = Discriminator.Discriminator( sequence_length=FLAGS.max_sequence_length, batch_size=FLAGS.batch_size, vocab_size=len(vocab), embedding_size=FLAGS.embedding_dim, filter_sizes=list(map(int, FLAGS.filter_sizes.split(","))), num_filters=FLAGS.num_filters, learning_rate=FLAGS.learning_rate, l2_reg_lambda=FLAGS.l2_reg_lambda, # embeddings=embeddings, embeddings=None, paras=param, loss=loss_type) generator = Generator.Generator( sequence_length=FLAGS.max_sequence_length, batch_size=FLAGS.batch_size, vocab_size=len(vocab), embedding_size=FLAGS.embedding_dim, filter_sizes=list(map(int, FLAGS.filter_sizes.split(","))), num_filters=FLAGS.num_filters, learning_rate=FLAGS.learning_rate * 0.1, l2_reg_lambda=FLAGS.l2_reg_lambda, # embeddings=embeddings, embeddings=None, paras=param, loss=loss_type) sess.run(tf.global_variables_initializer()) # evaluation(sess,discriminator,log,0) for i in range(FLAGS.num_epochs): if i > 0: samples = generate_gan(sess, generator) # for j in range(FLAGS.d_epochs_num): for _index, batch in enumerate( insurance_qa_data_helpers.batch_iter( samples, num_epochs=FLAGS.d_epochs_num, batch_size=FLAGS.batch_size, shuffle=True)): # try: feed_dict = { discriminator.input_x_1: batch[:, 0], discriminator.input_x_2: batch[:, 1], discriminator.input_x_3: batch[:, 2] } _, step, current_loss, accuracy = sess.run([ discriminator.train_op, discriminator.global_step, discriminator.loss, discriminator.accuracy ], feed_dict) line = ("%s: DIS step %d, loss %f with acc %f " % (datetime.datetime.now().isoformat(), step, current_loss, accuracy)) if _index % 10 == 0: print(line) loss_log.write(line + "\n") loss_log.flush() evaluation(sess, discriminator, log, i) for g_epoch in range(FLAGS.g_epochs_num): for _index, pair in enumerate(raw): q = pair[2] a = pair[3] neg_alist_index = [ item for item in range(len(alist)) ] sampled_index = np.random.choice( neg_alist_index, size=[FLAGS.pools_size - 1], replace=False) sampled_index = list(sampled_index) sampled_index.append(_index) pools = np.array(alist)[sampled_index] samples = insurance_qa_data_helpers.loadCandidateSamples( q, a, pools, vocab) predicteds = [] for batch in insurance_qa_data_helpers.batch_iter( samples, batch_size=FLAGS.batch_size): feed_dict = { generator.input_x_1: batch[:, 0], generator.input_x_2: batch[:, 1], generator.input_x_3: batch[:, 2] } predicted = sess.run(generator.gan_score, feed_dict) predicteds.extend(predicted) exp_rating = np.exp( np.array(predicteds) * FLAGS.sampled_temperature) prob = exp_rating / np.sum(exp_rating) neg_index = np.random.choice( np.arange(len(pools)), size=FLAGS.gan_k, p=prob, replace=False) # 生成 FLAGS.gan_k个负例 subsamples = np.array( insurance_qa_data_helpers.loadCandidateSamples( q, a, pools[neg_index], vocab)) feed_dict = { discriminator.input_x_1: subsamples[:, 0], discriminator.input_x_2: subsamples[:, 1], discriminator.input_x_3: subsamples[:, 2] } reward = sess.run( discriminator.reward, feed_dict ) # reward= 2 * (tf.sigmoid( score_13 ) - 0.5) samples = np.array(samples) feed_dict = { generator.input_x_1: samples[:, 0], generator.input_x_2: samples[:, 1], generator.neg_index: neg_index, generator.input_x_3: samples[:, 2], generator.reward: reward } _, step, current_loss, positive, negative = sess.run( # 应该是全集上的softmax 但是此处做全集的softmax开销太大了 [ generator.gan_updates, generator.global_step, generator.gan_loss, generator.positive, generator.negative ], # self.prob= tf.nn.softmax( self.cos_13) feed_dict ) # self.gan_loss = -tf.reduce_mean(tf.log(self.prob) * self.reward) line = ( "%s: GEN step %d, loss %f positive %f negative %f" % (datetime.datetime.now().isoformat(), step, current_loss, positive, negative)) if _index % 100 == 0: print(line) loss_log.write(line + "\n") loss_log.flush() evaluation(sess, generator, log, i * FLAGS.g_epochs_num + g_epoch) log.flush()
# - extCenter2Node (array) - import Converter as C import Generator as G import KCore.test as test ni = 30 nj = 40 nk = 1 a = G.cart((0, 0, 0), (1, 1, 1), (ni, nj, nk)) a = C.initVars(a, 'F={x}*{x}+{y}*{y}') ac = C.node2ExtCenter(a) a = C.extCenter2Node(ac) test.testA([a], 1)
# - getActivePoint (array) - import Generator as G import CPlot import time a = G.cart((0, 0, 0), (1, 1, 1), (5, 5, 5)) CPlot.display([a]) l = [] while l == []: l = CPlot.getActivePoint() time.sleep(0.1) print('ActivePoint: ', l) #>> ActivePoint: [3.9996489035268743, 2.127948294736359, 2.41771355073051]
# - octree (array) - import Generator as G import Geom as D import KCore.test as test # cas 2D : contours->QUAD s = D.circle((0, 0, 0), 1., N=100) snear = 0.1 res = G.octree([s], [snear], dfar=5.) test.testA([res], 1) s = D.circle((0, 0, 0), 1., N=100) snear = 0.1 res = G.octree([s], [snear], dfar=5., balancing=1) test.testA([res], 2) # cas 3D : surface TRI->HEXA s = D.sphere((0, 0, 0), 1., 100) snear = 0.1 res = G.octree([s], [snear], dfar=5., balancing=0) test.testA([res], 3) s = D.sphere((0, 0, 0), 1., 100) snear = 0.1 res = G.octree([s], [snear], dfar=5., balancing=1) test.testA([res], 4) # cas liste de dfar s = D.sphere((0, 0, 0), 1., 100) snear = 0.1 s2 = D.sphere((8, 8, 8), 1, 100) res = G.octree([s, s2], [snear, snear],
def test_default_values(self): genobj = gen.VirtualAddressGenerator(self.simulation_values, self.address_stream, self.event) time.sleep(5) self.assertTrue(len(genobj.address_stream) > 0)
elif deg == 2 : return x*x + 2.*y*y + 3*z elif deg == 3 : return x*x*y + 2.*y*y*y + 3*z elif deg == 4 : return x*x*x*x + 2.*y*y*y*y +z*z elif deg == 5 : return 2*x*x*x*x*x + 2.*y*y*z + z*z else : print 'Error : unknown degree of polynomials' import sys sys.exit() # Maillage en noeuds ni = 101; nj = 101; nk = 11 m = G.cylinder((0,0,0), 1., 10.,45., 145., 1., (ni,nj,nk)) m = T.reorder(m, (-1,2,3)) # as=[] as.append(m) # init by function m = C.addVars(m, 'F') m = C.initVars(m, 'F', F, ['x','y','z']) # Cree un maillage d'extraction ni2 = 30; nj2 = 30 a = G.cart( (-1.,2.,0.4), (1./(ni2-1),1./(nj2-1),0.1), (ni2,nj2,2)) as.append(a) C.convertArrays2File(as,"out.plt","bin_tp") # # Extrait la solution sur le maillage d'extraction
def generate(templatePath, outputPath): Generator.generate(templates = [Resource.getPath(*templatePath)], namespace = namespace, outputFile = open(os.path.join(*outputPath), "w"))
# - projectDir (array) - import Geom as D import Converter as C import Generator as G import Transform as T a = D.sphere((0, 0, 0), 1., 20) b = G.cart((1.1, -0.1, -0.1), (0.03, 0.03, 0.03), (1, 50, 50)) c = T.projectDir(b, [a], (1., 0, 0)) d = T.projectDir([b], [a], (1., 0, 0), smooth=1) C.convertArrays2File([a, b, c] + d, 'out.plt')
# - interiorFaces (array) - import Converter as C import Post as P import Generator as G import KCore.test as test # test faces interieures au sens large # faces ayant 2 voisins a = G.cartTetra((0, 0, 0), (1, 1., 1), (20, 2, 1)) b = P.interiorFaces(a) test.testA([b], 1) # test faces interieures au sens strict : # faces n'ayant que des noeuds interieurs a = G.cartTetra((0, 0, 0), (1, 1., 1), (20, 3, 1)) b = P.interiorFaces(a, 1) test.testA([b], 2) # test faces interieures au sens strict : #faces n'ayant que des noeuds interieurs # ici aucune a = G.cartTetra((0, 0, 0), (1, 1., 1), (20, 2, 1)) b = P.interiorFaces(a, 1) if b[1].shape[1] != 0: print('FAILED...')
# - snapFront (array) - import Generator as G import Converter as C import Geom as D import Connector as X import Transform as T s = D.circle((0, 0, 0), 1., N=100) s = T.addkplane(s) # Grille cartesienne (reguliere) BB = G.bbox([s]) ni = 100 nj = 100 nk = 3 xmin = BB[0] ymin = BB[1] zmin = BB[2] - 0.5 xmax = BB[3] ymax = BB[4] zmax = BB[5] + 0.5 hi = (xmax - xmin) / (ni - 1) hj = (ymax - ymin) / (nj - 1) h = min(hi, hj) ni = int((xmax - xmin) / h) + 7 nj = int((ymax - ymin) / h) + 7 b = G.cart((xmin - 3 * h, ymin - 3 * h, zmin), (h, h, 1.), (ni, nj, nk)) celln = C.array('cellN', ni, nj, nk)
def TFITri(a1, a2, a3): import Transform as T import Generator as G import Geom as D N1 = a1[2] N2 = a2[2] N3 = a3[2] # Verif de N Nt = N3 - N2 + N1 + 1 if (Nt / 2 - Nt * 0.5 != 0): raise ValueError("TFITri: N3-N2+N1 must be odd.") N = Nt / 2 if (N < 2): raise ValueError( "TFITri: invalid number of points for this operation.") if (N > N1 - 1): raise ValueError( "TFITri: invalid number of points for this operation.") if (N > N2 - 1): raise ValueError( "TFITri: invalid number of points for this operation.") # Assure la continuite P0 = (a1[1][0, N1 - 1], a1[1][1, N1 - 1], a1[1][2, N1 - 1]) P00 = (a2[1][0, 0], a2[1][1, 0], a2[1][2, 0]) P01 = (a2[1][0, N2 - 1], a2[1][1, N2 - 1], a2[1][2, N2 - 1]) if (abs(P0[0] - P00[0]) + abs(P0[1] - P00[1]) + abs(P0[2] - P00[2]) > 1.e-6 and abs(P0[0] - P01[0]) + abs(P0[1] - P01[1]) + abs(P0[2] - P01[2]) > 1.e-6): t = a2 a2 = a3 a3 = t N2 = a2[2] N3 = a3[2] P00 = (a2[1][0, 0], a2[1][1, 0], a2[1][2, 0]) if abs(P0[0] - P00[0]) > 1.e-6: a2 = T.reorder(a2, (-1, 2, 3)) elif abs(P0[1] - P00[1]) > 1.e-6: a2 = T.reorder(a2, (-1, 2, 3)) elif abs(P0[2] - P00[2]) > 1.e-6: a2 = T.reorder(a2, (-1, 2, 3)) P0 = (a2[1][0, N2 - 1], a2[1][1, N2 - 1], a2[1][2, N2 - 1]) P00 = (a3[1][0, 0], a3[1][1, 0], a3[1][2, 0]) if abs(P0[0] - P00[0]) > 1.e-6: a3 = T.reorder(a3, (-1, 2, 3)) elif abs(P0[1] - P00[1]) > 1.e-6: a3 = T.reorder(a3, (-1, 2, 3)) elif abs(P0[2] - P00[2]) > 1.e-6: a3 = T.reorder(a3, (-1, 2, 3)) #C.convertArrays2File([a1,a2,a3], 'order.plt') # Center w1 = C.array('weight', N1, 1, 1) w1 = C.initVars(w1, 'weight', 1) w2 = C.array('weight', N2, 1, 1) w2 = C.initVars(w2, 'weight', 1) w3 = C.array('weight', N3, 1, 1) w3 = C.initVars(w3, 'weight', 1) CC = G.barycenter([a1, a2, a3], [w1, w2, w3]) # Subzones s1 = T.subzone(a1, (1, 1, 1), (N1 - N + 1, 1, 1)) s2 = T.subzone(a1, (N1 - N + 1, 1, 1), (N1, 1, 1)) s3 = T.subzone(a2, (1, 1, 1), (N2 - N1 + N, 1, 1)) s4 = T.subzone(a2, (N2 - N1 + N, 1, 1), (N2, 1, 1)) s5 = T.subzone(a3, (1, 1, 1), (N, 1, 1)) s6 = T.subzone(a3, (N, 1, 1), (N3, 1, 1)) # Lines index = N1 - N P01 = (a1[1][0, index], a1[1][1, index], a1[1][2, index]) index = N2 - N1 + N - 1 P12 = (a2[1][0, index], a2[1][1, index], a2[1][2, index]) index = N - 1 P23 = (a3[1][0, index], a3[1][1, index], a3[1][2, index]) l1 = D.line(CC, P01, N=N2 - N1 + N) l2 = D.line(CC, P12, N=N) l3 = D.line(CC, P23, N=N1 - N + 1) # TFIs m1 = G.TFI([s1, l1, l3, s6]) m2 = G.TFI([l1, s2, s3, l2]) m3 = G.TFI([l2, s4, s5, l3]) #return [l1,l2,l3,s1,s2,s3,s4,s5,s6] #return [s1,l1,l3,s6,m1] return [m1, m2, m3]
# - enforcePlusZ - import Generator as G import KCore.test as test # Distribution Ni = 50 Nj = 1 Nk = 50 a = G.cart((0, 0, 0), (1. / (Ni - 1), 1., 0.5 / (Nk - 1)), (Ni, Nj, Nk)) b = G.enforcePlusZ(a, 1.e-3, 10, 20) test.testA([b], 1)
def new(self, size, name): #Map-Size Generator.create(size, name, self.blocksize)
def PatternTheory(inputFile, semanticBondPath, topK, outFile): BondID = count(0) genID = count(0) localSwapSpace = {} topKLabel = 0 priorScale = 100 #Load Feature labels and create feature generators equivalence = {} feature_generators = [] with open(inputFile) as f: for featFile in f: feat = str.split(featFile.replace('\n', '')) feat = str.split(featFile.replace('/home/saakur/Desktop/', './')) equivalence = {} with open(feat[1]) as fl: for line in fl: l = str.split(line.replace('\n', '')) equivalence[l[0]] = float(l[1]) if topKLabel > 0: sorted_equiv = sorted(equivalence.items(), key=operator.itemgetter(1), reverse=True) equivalence = {} for k,v in sorted_equiv[:topKLabel]: equivalence[k] = v*supBondWeight G = gen.Generator(next(genID), feat[0], feat[0], "Feature", feat[0]) supBG = sup_b.SupportBond(next(BondID), "SupportBond", "OUT", G.generatorID) supBG.compatible = deepcopy(equivalence) G.addOutBond(supBG) feature_generators.append(G) localSwapSpace[G.feature] = [] filelist = [ f for f in os.listdir(semanticBondPath) if f.endswith(".txt")] semBondDict = {} #Load semantic bonds for semanticBondFile in filelist: semBondName = str.split(str.split(semanticBondFile.replace('\n', ''), '_').pop(), '.')[0] if semBondName not in semBondDict.keys(): semBondDict[semBondName] = {} equivalence = {} with open(semanticBondPath+semanticBondFile) as fl: for line in fl: semBondDict_Concept = {} l = str.split(line.replace('\n', ''), ',') label = l.pop(0) label = str.split(label,'-')[0] for l1 in l: l2 = str.split(l1.replace('\n', ''), ':') if(semBondName != "Similarity"): semBondDict_Concept[l2[0]] = float(l2[1]) * priorScale * semBondWeight else: semBondDict_Concept[l2[0]] = float(l2[1]) * semBondWeight * priorScale equivalence[label] = dict(semBondDict_Concept.copy()) semBondDict[semBondName] = dict((equivalence.copy())) #For each of the feature generators, create generators for each of the label possibilties # and bonds for each of the generators for g in feature_generators: #Determine the label category to create generators for them genType = g.generatorName #For each of the outbound bonds of the feature generators, create the generators for each of the label candidates for bondID in g.outBonds.keys(): b = g.outBonds[bondID] #For each of the each of the label candidates, create the generators and bonds for label in b.compatible: #Create the generator G = gen.Generator(next(genID), label, label, genType, g.generatorName) #Create and add complementary support bond supBG_IN = sup_b.SupportBond(next(BondID), "SupportBond", "IN", G.generatorID) G.addInBond(supBG_IN) for semBondName in semBondDict.keys(): if label in semBondDict[semBondName].keys(): #Create a semantic bond semBG_OUT = sem_b.SemanticBond(next(BondID), semBondName, "OUT", G.generatorID) #Add the equivalence table for the semantic bond semBG_OUT.compatible = deepcopy(semBondDict[semBondName][label]) #Create complementary semantic bond semBG_IN = sem_b.SemanticBond(next(BondID), semBondName, "IN", G.generatorID) #Add created bonds to the generator G.addOutBond(semBG_OUT) G.addInBond(semBG_IN) #Add created generator to list with generators of equivalent modality localSwapSpace[G.feature].append(G) globalSwapSpace = {} for fg in feature_generators: globalSwapSpace[fg.generatorID] = fg debugFIle = "./debugFile.txt" # globalProposalChance Helps avoid local minima. Need to be less than 1 when using MCMC candidate proposal globalProposalChance = 1.0 PS = inf.Inference(localSwapSpace, globalSwapSpace, False, debugFIle, topK, globalProposalChance) topKConfig = PS.run_inference() topVal = 1 ftFile = open(outFile, 'w') ftFile.write("Top %s results:\n" %topK) ftFile.close() for key in sorted(topKConfig): topKConfig[key].printConfig(outFile, localSwapSpace.keys()) topVal += 1 # Clean up for Anneal filelist = [ f for f in os.listdir(".") if f.endswith(".state") ] for f in filelist: os.remove(f)
def TFIHalfO__(a1, a2, weight, offset=0): import Transform as T import Geom as D Nt1 = a1[2] Nt2 = a2[2] if Nt1 > Nt2: ap = a2 a2 = a1 a1 = ap Np = Nt2 Nt2 = Nt1 Nt1 = Np # le plus long est toujours Nt2 # Check P0 = (a1[1][0, 0], a1[1][1, 0], a1[1][2, 0]) P00 = (a2[1][0, 0], a2[1][1, 0], a2[1][2, 0]) if abs(P0[0] - P00[0]) > 1.e-6: a2 = T.reorder(a2, (-1, 2, 3)) elif abs(P0[1] - P00[1]) > 1.e-6: a2 = T.reorder(a2, (-1, 2, 3)) elif abs(P0[2] - P00[2]) > 1.e-6: a2 = T.reorder(a2, (-1, 2, 3)) # Round w1 = C.array('weight', Nt1, 1, 1) w1 = C.initVars(w1, 'weight', 1.) w = C.array('weight', Nt2, 1, 1) w = C.initVars(w, 'weight', 1.) w[1][0, 0:Nt2 / 2 + 1] = weight P3 = G.barycenter([a2, a1], [w, w1]) w = C.initVars(w, 'weight', 1.) w[1][0, Nt2 / 2:Nt2] = weight P4 = G.barycenter([a2, a1], [w, w1]) # Projection b = C.convertArray2Hexa(a2) b = G.close(b) PP = C.array('x,y,z', 2, 1, 1) C.setValue(PP, 0, (P3[0], P3[1], P3[2])) C.setValue(PP, 1, (P4[0], P4[1], P4[2])) PPP = T.projectOrtho(PP, [b]) PPP = PPP[1] PP3 = (PPP[0, 0], PPP[1, 0], PPP[2, 0]) PP4 = (PPP[0, 1], PPP[1, 1], PPP[2, 1]) indexPP3 = D.getNearestPointIndex(a2, PP3)[0] + offset if ((Nt1 - Nt2) / 2 - (Nt1 - Nt2) * 0.5 == 0 and (indexPP3 + 1) / 2 - (indexPP3 + 1) * 0.5 != 0): indexPP3 += 1 if ((Nt1 - Nt2) / 2 - (Nt1 - Nt2) * 0.5 != 0 and (indexPP3 + 1) / 2 - (indexPP3 + 1) * 0.5 == 0): indexPP3 += 1 #if (indexPP3 == 0): indexPP3 = 1 #elif (indexPP3 == (Nt2-1)/2): indexPP3 += -1 PP3 = (a2[1][0, indexPP3], a2[1][1, indexPP3], a2[1][2, indexPP3]) N1 = indexPP3 + 1 indexPP4 = Nt2 - N1 PP4 = (a2[1][0, indexPP4], a2[1][1, indexPP4], a2[1][2, indexPP4]) N2 = Nt2 - 2 * N1 + 2 # Straight N3 = (Nt1 - N2 + 2) / 2 ind = N3 - 1 P1 = (a1[1][0, ind], a1[1][1, ind], a1[1][2, ind]) P2 = (a1[1][0, Nt1 - ind - 1], a1[1][1, Nt1 - ind - 1], a1[1][2, Nt1 - ind - 1]) # Lines l1 = D.line(P1, P3, N=N1) l2 = D.line(P3, P4, N=N2) l3 = D.line(P4, P2, N=N1) p1 = D.line(P3, PP3, N=N3) p2 = D.line(P4, PP4, N=N3) # subzones s1 = T.subzone(a1, (1, 1, 1), (ind + 1, 1, 1)) s2 = T.subzone(a1, (ind + 1, 1, 1), (Nt1 - ind, 1, 1)) s3 = T.subzone(a1, (Nt1 - ind, 1, 1), (Nt1, 1, 1)) s4 = T.subzone(a2, (1, 1, 1), (indexPP3 + 1, 1, 1)) s5 = T.subzone(a2, (indexPP3 + 1, 1, 1), (indexPP4 + 1, 1, 1)) s6 = T.subzone(a2, (indexPP4 + 1, 1, 1), (Nt2, 1, 1)) #C.convertArrays2File([l1,l2,l3,p1,p2,s1,s2,s3,s4,s5,s6], 'lines.plt') # TFIs m = G.TFI([l1, l2, l3, s2]) m1 = G.TFI([s1, s4, p1, l1]) m2 = G.TFI([s5, p1, p2, l2]) m3 = G.TFI([s6, p2, s3, l3]) return [m, m1, m2, m3]
# - stack (array) - import Generator as G import Converter as C import Transform as T import KCore.test as test a = G.cylinder((0, 0, 0), 1, 1.3, 360, 0, 1., (50, 10, 1)) b = T.rotate(a, (0, 0, 0), (1, 0, 0), 5.) b = T.translate(b, (0, 0, 0.5)) c = G.stack(a, b) test.testA([c], 1)
def OnGenerate(self, event): self.OnSave(event) Generator.create_header(parameters) dial = wx.MessageDialog(None,'Header file Generated', 'RTEMS', wx.OK) dial.ShowModal()
ni = 30 nj = 40 def F(x): return M.cos(x) n = (0., 0.2, 0.) pt = (0.55, 0.5, 0.) #pt = (0.1,5,0) # ne marche pas pt = (5, 5, 0) # ne marche pas vect = ['rou', 'rov', 'row'] # Maillage en noeuds # 3D m1 = G.cart((0, 0, 0), (10. / (ni - 1), 10. / (nj - 1), 1), (ni, nj, 2)) m2 = G.cart((5.5, 0, 0), (9. / (ni - 1), 9. / (nj - 1), 1), (ni, nj, 2)) m = [m1, m2] m = C.initVars(m, 'rou', 1.) m = C.initVars(m, 'rov', F, ['x']) m = C.initVars(m, 'row', 0.) # 2D s1 = G.cart((0, 0, 0), (9. / (ni - 1), 9. / (nj - 1), 1), (ni, nj, 1)) s2 = G.cart((5.5, 0, 0), (9. / (ni - 1), 9. / (nj - 1), 1), (ni, nj, 1)) s = [s1, s2] s = C.initVars(s, 'rou', 1.) s = C.initVars(s, 'rov', F, ['x']) s = C.initVars(s, 'row', 0.) # 3D struct
def sync(options, target, args): """Synchronize the makefile""" f = open("Makefile", "w") Generator.generateMakefile(f) f.close() Log.notice("Makefile synchronized")
def generateTest(tables): content = tables.content for table in content: tableName = generator.asClassName(table.name) moduleName = tableName + ".py" imports = """ import Tables as tables import MockModule as mock import db.main.DBApiModule as db from _mysql_exceptions import DataError, OperationalError, IntegrityError """ classDef=""" class %s(tables.Tables): ''' - Test: insertion and fetching data from/to table - Test: update entry and its references - Test: drop entry and its references - Test: references. - Test: inserting invalid modifiedAt - Test: NOT NULLS constrains - Test: UNIQUE constrains - Test: FOREIGN KEY CONSTRAINS ''' """%(tableName) dependencies=""" def __initDependencies(self): self.initDBMockContents()\n%s%s"""%(generator.attributes(table, 8), generator.insertReferences(tables, table, 8)) setUp =""" def setUp(self): self.connect() self.__initDependencies() """ tearDown =""" def tearDown(self): self.conn.close() """ select =""" rows = db.selectFrom(self.conn, {"%s"}, "*", id=self.%s) """%(table.name, table.columns[0].name) insertion =""" ''' INSERTION TESTS ''' def testInsertion(self):\n%s%s\n%s """%(generator.insertToDB(table, 8), select, generateAssertEquals(table, "", 8)) updates =""" ''' UPDATE TESTS '''\n\n%s"""%(generateUpdateTest(table, 4)) drop =""" ''' DROP TESTS '''\n\n%s"""%(generateDropTest(tables, table, 4)) dataTypes =""" ''' DATA TYPE TESTS '''\n\n%s"""%(generateDataTypeTest(table, 4)) testModule = (imports + classDef + dependencies + setUp + tearDown + insertion + updates + drop) module = open("generated/"+moduleName, "w+") module.write(testModule) module.close() print "ws_test for %s were generated"%tableName
def TFIO__(a, weight, offset=0): import Transform as T import Geom as D Nt = a[2] # Calcul des points P1, P2, P3, P4 w = C.array('weight', Nt, 1, 1) w = C.initVars(w, 'weight', 1.) w[1][0, 0:Nt / 4 + 1] = weight P1 = G.barycenter(a, w) w = C.initVars(w, 'weight', 1.) w[1][0, Nt / 4:Nt / 2 + 1] = weight P2 = G.barycenter(a, w) w = C.initVars(w, 'weight', 1.) w[1][0, Nt / 2:3 * Nt / 4 + 1] = weight P3 = G.barycenter(a, w) w = C.initVars(w, 'weight', 1.) w[1][0, 3 * Nt / 4:Nt] = weight P4 = G.barycenter(a, w) # Calcul de P'1: projete de P1 sur le cercle b = C.convertArray2Hexa(a) b = G.close(b) PP = C.array('x,y,z', 4, 1, 1) C.setValue(PP, 0, (P1[0], P1[1], P1[2])) C.setValue(PP, 1, (P2[0], P2[1], P2[2])) C.setValue(PP, 2, (P3[0], P3[1], P3[2])) C.setValue(PP, 3, (P4[0], P4[1], P4[2])) PPP = T.projectOrtho(PP, [b]) PPP = PPP[1] PP1 = (PPP[0, 0], PPP[1, 0], PPP[2, 0]) PP2 = (PPP[0, 1], PPP[1, 1], PPP[2, 1]) indexPP1 = D.getNearestPointIndex(a, PP1)[0] + offset if (indexPP1 < 0): indexPP1 = Nt + indexPP1 - 1 if (indexPP1 > Nt - 1): indexPP1 = indexPP1 - Nt - 1 # Renumerote a a partir de PP1 b = T.subzone(a, (1, 1, 1), (indexPP1 + 1, 1, 1)) c = T.subzone(a, (indexPP1 + 1, 1, 1), (Nt, 1, 1)) a = T.join(c, b) indexPP1 = 0 PP1 = (a[1][0, indexPP1], a[1][1, indexPP1], a[1][2, indexPP1]) indexPP2 = D.getNearestPointIndex(a, PP2)[0] - offset PP2 = (a[1][0, indexPP2], a[1][1, indexPP2], a[1][2, indexPP2]) indexPP3 = indexPP1 + Nt / 2 PP3 = (a[1][0, indexPP3], a[1][1, indexPP3], a[1][2, indexPP3]) N1 = indexPP2 - indexPP1 + 1 N2 = Nt / 2 - N1 + 2 indexPP4 = indexPP3 + N1 - 1 PP4 = (a[1][0, indexPP4], a[1][1, indexPP4], a[1][2, indexPP4]) # Lines l1 = D.line(P1, P2, N=N1) l2 = D.line(P2, P3, N=N2) l3 = D.line(P3, P4, N=N1) l4 = D.line(P4, P1, N=N2) dist1 = D.getLength(l1) p1 = D.line(P1, PP1, N=10) dist2 = D.getLength(p1) Np = int(dist2 / dist1 * N1) + 1 p1 = D.line(P1, PP1, N=Np) p2 = D.line(P2, PP2, N=Np) p3 = D.line(P3, PP3, N=Np) p4 = D.line(P4, PP4, N=Np) # subzones s1 = T.subzone(a, (indexPP1 + 1, 1, 1), (indexPP2 + 1, 1, 1)) s2 = T.subzone(a, (indexPP2 + 1, 1, 1), (indexPP3 + 1, 1, 1)) s3 = T.subzone(a, (indexPP3 + 1, 1, 1), (indexPP4 + 1, 1, 1)) s4 = T.subzone(a, (indexPP4 + 1, 1, 1), (Nt, 1, 1)) # TFIs m = G.TFI([l1, l2, l3, l4]) m1 = G.TFI([s1, p1, p2, l1]) m2 = G.TFI([s2, p2, p3, l2]) m3 = G.TFI([s3, p3, p4, l3]) m4 = G.TFI([s4, p4, p1, l4]) return [m, m1, m2, m3, m4]
# - addNormalLayers (array) - import Generator as G import Converter as C import Geom as D import KCore.test as test # Tests avec lissage d = C.array('d', 3, 1, 1) d[1][0, 0] = 0.1 d[1][0, 1] = 0.2 d[1][0, 2] = 0.3 # Structured (i,j-array) a = D.sphere((0, 0, 0), 1, 50) a = G.addNormalLayers(a, d, niter=4) test.testA([a], 1) # Unstructured (TRI) a = D.sphere((0, 0, 0), 1, 50) a = C.convertArray2Tetra(a) a = G.addNormalLayers(a, d, niter=4) a = C.convertArray2Tetra(a) test.testA([a], 2) # Unstructured (QUAD) a = D.sphere((0, 0, 0), 1, 50) a = C.convertArray2Hexa(a) a = G.addNormalLayers(a, d, niter=4) a = C.convertArray2Tetra(a) test.testA([a], 3)
def generateUpdateTest(table, testIndent): indent = testIndent + 4 updates = "" for column in table.columns: fk = column.getReferenceIfExists(table) if fk is not None: """ UPDATE TO EXISTING """ updates += "%sdef\ttestUpdateToExisting%s(self):\n"%(" " *testIndent,generator.asClassName(column.name)) elementName = generator.asElementName(column.name) updates += generator.insertToDB(table, indent) +"\n" tableElement = generator.asElementName(table.name) updates += "%s%s = self.%s\n"%(" "*indent, tableElement, tableElement) updates += "%sself.__initDependencies()\n"%(" "*indent) identifier = table.columns[0].name updates += """%sdb.updateInTable(self.conn, {"%s":self.%s}, "%s", %s=%s.%s)\n"""%(" "*indent, column.name, elementName, table.name, identifier, tableElement, generator.asElementName(identifier)) updates += """%srows = db.selectFrom(self.conn, {"%s"}, "*", %s=%s.%s)\n\n"""%(" "*indent, table.name, identifier, tableElement, generator.asElementName(identifier)) updates += generateAssertEquals(table, column.name, indent, tableElement)+"\n" """ UPDATE TO NONE EXISTING """ updates += "%sdef\ttestUpdateToNonExisting%s(self):\n"%(" " *testIndent,generator.asClassName(column.name)) elementName = generator.asElementName(column.name) updates += generator.insertToDB(table, indent) +"\n" tableElement = generator.asElementName(table.name) updates += "%s_%s = mock.%s\n"%(" "*indent, elementName, generator.randomValueByType(column)) identifier = table.columns[0].name updates += """%sself.assertRaisesRegexp(IntegrityError, "foreign key constraint fails",db.updateInTable, self.conn, {"%s":_%s}, "%s", %s=self.%s)\n"""%(" "*indent, column.name, elementName, table.name, identifier, generator.asElementName(identifier)) else: updates += "%sdef\ttestUpdate%s(self):\n"%(" " *testIndent,generator.asClassName(column.name)) elementName = generator.asElementName(column.name) updates += generator.insertToDB(table, indent) +"\n" updates += "%s_%s = mock.%s\n"%(" "*indent, elementName, generator.randomValueByType(column)) identifier = table.columns[0].name updates += """%sdb.updateInTable(self.conn, {"%s":_%s}, "%s", %s=self.%s)\n"""%(" "*indent, column.name, elementName, table.name, identifier, generator.asElementName(identifier)) whereValue = "" if column.name == identifier: whereValue = "_" + elementName else: whereValue = "self." + generator.asElementName(identifier) updates += """%srows = db.selectFrom(self.conn, {"%s"}, "*", %s=%s)\n\n"""%(" "*indent, table.name, identifier, whereValue) updates += generateAssertEquals(table, column.name, indent)+"\n" return updates
# - bboxOfCells (array) - import Generator as G a = G.cart((0., 0., 0.), (0.1, 0.1, 1.), (20, 20, 20)) b = G.bboxOfCells(a) print(b)
# - rmGhostCellsNGon (array) - import Converter as C import Generator as G a = G.cartNGon((0, 0, 0), (1, 1, 1), (21, 21, 1)) a = G.close(a) b = C.addGhostCellsNGon(a, depth=2) b = C.rmGhostCellsNGon(b, depth=2) C.convertArrays2File([b], "out.plt")
# - bboxIntersection (array) - import Generator as G import Transform as T import KCore.test as test ni = 11 nj = 3 nk = 11 a1 = G.cart((0., 0., 0.), (0.1, 0.1, 0.2), (ni, nj, nk)) a2 = G.cart((0.5, 0.05, 0.01), (0.1, 0.1, 0.2), (ni, nj, nk)) intersect = G.bboxIntersection(a1, a2) test.testO(intersect)
# - TFI TETRA (array) - import Generator as G import Converter as C import Geom as D n = 15 P1 = (0, 0, 0) P2 = (1, 0, 0) P3 = (0, 1, 0) P4 = (0, 0, 1) # face P1P2P3 l1 = D.line(P1, P2, n) #P1P2 l2 = D.line(P1, P3, n) #P1P3 l3 = D.line(P2, P3, n) #P2P3 tri1 = G.TFI([l2, l1, l3]) # face P2P3P4 l1 = D.line(P2, P3, n) #P2P3 l2 = D.line(P2, P4, n) #P2P4 l3 = D.line(P3, P4, n) #P3P4 tri2 = G.TFI([l2, l1, l3]) # face P3P4P1 l1 = D.line(P3, P4, n) #P3P4 l2 = D.line(P3, P1, n) #P3P1 l3 = D.line(P4, P1, n) #P4P1 tri3 = G.TFI([l2, l1, l3]) # face P4P1P2 l1 = D.line(P4, P1, n) #P4P1 l2 = D.line(P4, P2, n) #P4P2
# - getMouseState (array) - import Generator as G import CPlot import time a = G.cartTetra((0, 0, 0), (1, 1, 1), (5, 5, 1)) CPlot.display([a], dim=2) c = 1000 while c > 0: l = CPlot.getMouseState() time.sleep(0.5) print(l) c -= 1