def deleteCADIt(CADfile): #deletes a CAD file records and corresponding files cnn, crr = cnt_X('NCC') filePath = """D:\\IDPcode\\CatiaFiles\\SourceFile\\""" + CADfile + ".CatPart" # As file at filePath is deleted now, so we should check if file exists or not not before deleting them if os.path.exists(filePath): os.remove(filePath) else: print("Can not delete the file as it doesn't exists") filePath = """D:\\IDPcode\\CatiaFiles\\SourceFile\\""" + CADfile + ".igs" # As file at filePath is deleted now, so we should check if file exists or not not before deleting them if os.path.exists(filePath): os.remove(filePath) else: print("Can not delete the file as it doesn't exists") query = """Delete FROM sim_Cad_iterations where RefInput = '""" + CADfile + """';""" print(query) crr.execute(query) cnn.commit() query = "drop table " + CADfile print(query) crr.execute(query) cnn.commit() dc_X('NCC', cnn, crr)
def Linda(generation, specie, GENtable, varVal, varVar): lPath = os.path.dirname(os.path.abspath(__file__)) #Linda2 is for GUI cnnT, crrT = cnt_X('NCC') #print(GENtable) query = """SELECT * FROM """ + GENtable + """ where fitness is null and specie = '""" + str( specie) + """' and generation = '""" + str(generation) + """';""" #print(query) crrT.execute(query) rows = crrT.fetchall() #close SQL handles dc_X('NCC', cnnT, crrT) for row in rows: recordID = row[0] #Sets fitness to 0, so that the same failed simulation isnt attempted twice cnnT, crrT = cnt_X('NCC') #mandrel speed might be adjusted during simulation - hence the re-import query = """UPDATE """ + GENtable + """ SET fitness = """ + str( 0) + """ Where (id = """ + str(recordID) + """);""" crrT.execute(query) cnnT.commit() #adjust variables dictionary by iterated variables i = 0 while i < len(varVar): varVal[varVar[i]] = row[i + 3] i = i + 1 #close SQL handles dc_X('NCC', cnnT, crrT) #evaluate the simulations -- use fitness function to establish value of each individual with open(lPath + "\\temporary\\underground.txt", "a") as text_file: text_file.write("Initiating complete analysis.\n") fitness, arunID = SingleLoop(varVal) print("Hurrah! New individual was born into the populaiton!") #stores the result, reference simulaiton number, and mandrel_speed in case change was required due to minimal angle limit cnnT, crrT = cnt_X('NCC') #mandrel speed might be adjusted during simulation - hence the re-import query = """UPDATE """ + GENtable + """ SET fitness = """ + str( fitness) + """, arunID = """ + str( arunID) + """ Where (id = """ + str(recordID) + """);""" crrT.execute(query) cnnT.commit() #close SQL handles dc_X('NCC', cnnT, crrT)
def loopCAD(): st1 = time.time() chord_min = 0.15 chord_max = 0.3 part = "SparIteration" project = "IDP" tdm = str(time.strftime("%m")) tdy = str(time.strftime("%y")) liikkee = project + "_" + part + "_" + tdy + tdm + "_" #Find previous versions of CAD for the same project, part and date #the input to sim, through SQL cnnB, crrB = cnt_X('NCC') #looks for other meshes with the same source geomertry here = """'%""" + liikkee + """%'""" query = "SELECT version FROM mesh_list where CADfile like " + here #print(query) crrB.execute(query) #get results sd = [] rows = crrB.fetchall() dc_X('NCC', cnnB, crrB) #creates a list of version numbers for row in rows: try: sd.append(int(row[0])) #break except TypeError: print("x") #highest version number is stored if is_empty(sd) == True: maxNo = 0 else: maxNo = max(sd) #next version number - used for this analysis vni = maxNo + 1 x = 1 while x < 20: y = 0 while y < 8: #each section defined by: span position, aerofoil, size multiplier (taper...), sweep, twist, dihedral sectioned = np.matrix([[ 0, "AerofoilCollection\\clarkYdata.dat", 150, 0, 0, 0 ], [ (150), "AerofoilCollection\\clarkYdata.dat", 150, 2, 0, 0 ], [400, "AerofoilCollection\\clarkYdata.dat", 100, 0, (y / 2), 0], [500, "AerofoilCollection\\clarkYdata.dat", 80, 2, y, x]]) ex1(project, part, sectioned, vni, chord_min, chord_max) vni = vni + 1 y = y + 2 x = x + 5 print("--- %s seconds ---" % (time.time() - st1))
def DROPtable(): #this is to be used only manually cnn, crr = cnt_X('NCC') no = 1 while no < 5: query = "drop table IDP_oscar_A001_b00" + str(no) print(query) crr.execute(query) cnn.commit() no = no + 1 dc_X('NCC', cnn, crr)
def CreateTable(): #this is used for manual table creation #the green lines are alteredd manually cnnB, crrB = cnt_X('NCC') query = "CREATE TABLE " query += "Mesh_list" query += "(id int IDENTITY(1,1) PRIMARY KEY,CADfile varchar(255),MeshFile varchar(255),xs_seed int,span_ele_size numeric(8,3),version int,verified_mesh varchar(255))" print(query) crrB.execute(query) cnnB.commit() dc_X('NCC', cnnB, crrB)
def obtainVariable(CADfile): conn, cursor = cnt_X('NCC') query = "SELECT MAX(half_span) FROM " + CADfile #print(query) cursor.execute(query) #get results sd = np.zeros(2) rows = cursor.fetchall() for row in rows: sd[0] = int(row[0]) dc_X('NCC', conn, cursor) half_span = sd[0] #print(half_span) return (half_span)
def storeCADinstance(varVal, name, chord_min, chord_max, dihedral): #add error handling? (use the error imported, and try function) #establish connection with the database cnn, crr = cnt_X('NCC') query = "CREATE TABLE " query += name query += "(id int IDENTITY(1,1) PRIMARY KEY,half_span numeric(9,0),airfoil varchar(255),chord_length numeric(8,0),twist numeric(3,1),sweep numeric(3,1),dihedral numeric(3,1))" #print(query) crr.execute(query) #cnn.commit() #if section count is changed additional spanwise variables have to be added sectCount = 4 io = 0 while io < sectCount: query = "INSERT INTO " + name + "(half_span,airfoil,chord_length,twist,sweep,dihedral) VALUES(" query += str((varVal['span'] / (sectCount - 1) * io)) + """,'""" + str( varVal['airfoil_' + str(io)]) + """',""" + str( varVal['chord_' + str(io)]) + "," + str( varVal['twist_' + str(io)]) + "," + str( varVal['sweep_' + str(io)]) + "," + str(dihedral) + ")" #print(query) crr.execute(query) io = io + 1 query = "INSERT INTO SIM_CAD_iterations(Product, Version, Iteration, RefInput,chord_min,chord_max,Created_On) VALUES(" product = name.split("_")[0] version = (name.split("_")[2]).split()[0][0] iteration = str(name.split("_")[2]) iteration = iteration[1:4] tdm = str(time.strftime("%m")) tdy = str(time.strftime("%y")) strDate = str(tdy + tdm) query += """'""" + product + """','""" + version + """','""" + str( iteration) + """','""" + name + """',""" + str( chord_min) + """,""" + str( chord_max) + """,'""" + strDate + """')""" #print(query) crr.execute(query) cnn.commit() dc_X('NCC', cnn, crr)
def maxVersion(trimfile): conn, cursor = cnt_X('NCC') here = """'%""" + trimfile + """%'""" query = "SELECT version FROM BraidMain where GENfile like " + here #print(query) cursor.execute(query) #get results sd = [] rows = cursor.fetchall() for row in rows: sd.append(int(row[0])) dc_X('NCC', conn, cursor) if is_empty(sd) == True: maxNo = 0 else: maxNo = max(sd) bIT = maxNo + 1 return (bIT)
def Toby(generation, specie, GENtable): cnnT, crrT = cnt_X('NCC') # obtain data from the last generation query = """SELECT * FROM """ + GENtable + """ where specie = '""" + specie + """';""" crrT.execute(query) rows = crrT.fetchall() varN = np.size(rows, 1) - 4 pop = np.zeros([1, varN]) popi = np.zeros([1, varN]) dc_X('NCC', cnnT, crrT) for row in rows: i = 0 while i < varN: popi[0, i] = (row[int(i + 3)]) i = i + 1 pop = np.concatenate((pop, popi), axis=0) pop = np.delete(pop, (0), axis=0) return (pop)
def deleteBraidIt(BraidFile, cnn, crr): #deletes a Braiding record and corresponding files cnn, crr = cnt_X('NCC') filePath = """D:\\IDPcode\\CatiaFiles\\BraidFiles\\""" + BraidFile + ".CatPart" # As file at filePath is deleted now, so we should check if file exists or not not before deleting them if os.path.exists(filePath): os.remove(filePath) else: print("Can not delete the file as it doesn't exists") query = """Delete FROM braidmain where GENfile = '""" + BraidFile + """';""" print(query) crr.execute(query) cnn.commit() query = "drop table " + BraidFile print(query) crr.execute(query) cnn.commit() dc_X('NCC', cnn, rr)
def SingleCAD(project, part, varVal): st1 = time.time() #chord_min = 0.175 #chord_max = 0.4 #tdm = str(time.strftime("%m")) #tdy = str(time.strftime("%y")) liikkee = project + "_" + part + "_" #Find previous versions of CAD for the same project, part and date #the input to sim, through SQL cnnB, crrB = cnt_X('NCC') #looks for other meshes with the same source geomertry here = """'%""" + liikkee + """%'""" query = "SELECT iteration FROM SIM_CAD_iterations where RefInput like " + here #print(query) crrB.execute(query) #get results sd = [] rows = crrB.fetchall() dc_X('NCC', cnnB, crrB) #creates a list of version numbers for row in rows: try: sd.append(int(row[0])) #break except TypeError: print("x") #highest version number is stored if is_empty(sd) == True: maxNo = 0 else: maxNo = max(sd) #next version number - used for this analysis vni = maxNo + 1 #2 stands for default radius of corners, should be made into iteratable variable #each "sectioned" defined by: span position, aerofoil, size multiplier (taper...), sweep, twist, dihedral CADfile = ex1(project, part, varVal, vni) print("--- %s seconds ---" % (time.time() - st1)) return (CADfile)
def deleteMeshIt(MeshFile, cnn, crr): #deletes a meshing record and corresponding files# cnn, crr = cnt_X('NCC') filePath = """D:\\IDPcode\\CatiaFiles\\MeshFiles\\""" + MeshFile + ".CatPart" # As file at filePath is deleted now, so we should check if file exists or not not before deleting them if os.path.exists(filePath): os.remove(filePath) else: print("Can not delete the file as it doesn't exists") filePath = """D:\\IDPcode\\CatiaFiles\\MeshFiles\\""" + MeshFile + ".igs" # As file at filePath is deleted now, so we should check if file exists or not not before deleting them if os.path.exists(filePath): os.remove(filePath) else: print("Can not delete the file as it doesn't exists") query = """Delete FROM mesh_list where MeshFile = '""" + MeshFile + """';""" print(query) crr.execute(query) cnn.commit() dc_X('NCC', cnn, crr)
def Steph(population, specie, generation, BCs, optiTable, varVar, varVal): #creates a random first generation cnnT, crrT = cnt_X('NCC') specimen = np.zeros([population, 5]) i = 0 while i < population: #create new population -- number of individuals, fully random each variable in range specimen[ i, 0] = (random.uniform(0, 1)) * (BCs[0, 1] - BCs[0, 0]) + (BCs[0, 0]) specimen[ i, 1] = (random.uniform(0, 1)) * (BCs[1, 1] - BCs[1, 0]) + (BCs[1, 0]) specimen[ i, 2] = (random.uniform(0, 1)) * (BCs[2, 1] - BCs[2, 0]) + (BCs[2, 0]) specimen[ i, 3] = (random.uniform(0, 1)) * (BCs[3, 1] - BCs[3, 0]) + (BCs[3, 0]) specimen[ i, 4] = (random.uniform(0, 1)) * (BCs[4, 1] - BCs[4, 0]) + (BCs[4, 0]) specimen[i, 3] = int(specimen[i, 3]) # export the new generation into SQL table query = "INSERT INTO " + optiTable + "(specie,generation,cs1,cs2,cs3, no_layers, mandrel_speed) VALUES(" query += """'""" + specie + """','""" + str( generation) + """','""" + str(specimen[i, 0]) + """','""" + str( specimen[i, 1]) + """','""" + str( specimen[i, 2]) + """','""" + str( specimen[i, 3]) + """','""" + str( specimen[i, 4]) + """')""" crrT.execute(query) cnnT.commit() i = i + 1 #close SQL handles dc_X('NCC', cnnT, crrT) return (BCs)
def dropDownInfo(): cnn, crr = cnt_X('NCC') query = """SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE' AND TABLE_CATALOG='DIGIProps' and TABLE_NAME like '%_iters_%'""" crr.execute(query) rows = crr.fetchall() seznam = [] for row in rows: r = str(row) #r= r.replace("""'""","") r = r.replace("(", "") r = r.replace(")", "") r = r.replace(",", "") r = r.replace(" ", "") # print(r) query = "SELECT Column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = " + str( r) + "ORDER BY ORDINAL_POSITION" crr.execute(query) lines = crr.fetchall() sz = str(r) for line in lines: l = str(line) l = l.replace(",", "") l = l.replace("(", "") l = l.replace(")", "") l = l.replace("""'""", "") l = l.replace(" ", "") # print(l) if l != str("id") and l != str("Specie") and l != str( "Generation") and l != str("fitness") and l != str( "arunID"): sz = sz + "," + str(l) seznam.append(sz) #print(seznam) dc_X('NCC', cnn, crr) return (seznam)
def AgentPytlik(iters,varVal,varMin,varMax): GENtable = iters.split(",")[0] GENtable = GENtable.replace("""'""","") x = iters.count(',') i = 1 varVar = [] while i <= x: varVar.append(iters.split(",")[i]) i = i + 1 lPath = os.path.dirname(os.path.abspath(__file__)) #Ant colony optimisatoin, or gradient based adjustment optimisation #initial number of randoms #LHS or other initial sampling method should be run prior to this #population = 2 #start with 2 for testing, but likely 10+ is good initial number specie = "test" generation = 420 #untill combination of opt_algos is required # rows : variables : cs1,cs2,cs3,MS,NL # columns: min, max BCs = np.matrix([[0.000,0.000]]) temp = np.matrix([[0.000,0.000]]) i = 0 #AFLe = 0 varN = len(varVar) print("varN",varN) while i < varN: if varMin[varVar[i]]!=False: temp[0,0] = varMin[varVar[i]] temp[0,1] = varMax[varVar[i]] BCs = np.concatenate((BCs,temp),axis = 0) elif "airfoil" in varVar[i]: img_folder_path = lPath+'\\aerofoilcollection\\' dirListing = os.listdir(img_folder_path) AFLS = len(dirListing) temp[0,0] = 0 temp[0,1] = AFLS BCs = np.concatenate((BCs,temp),axis = 0) #AFLe = 1 else: #0-10 now arbitrarily selected for string values (eg. material) #this needs to be replaced by a lookup function that checks the #number of airfoils temp[0,0] = 0 temp[0,1] = 10 BCs = np.concatenate((BCs,temp),axis = 0) i = i + 1 BCs =np.delete(BCs,0,axis=0) #check if specie exists cnnW,crrW = cnt_X('NCC') query = """SELECT * FROM """+str(GENtable)+""" where specie like '"""+specie+"""';""" print(query) crrW.execute(query) rows = crrW.fetchall() dc_X('NCC',cnnW,crrW) if is_empty(rows) ==True: print("Error: Sampling method should be created to generate initial population. No sample was detected") #IDP_assistants.Steph(population,specie,generation,BCs,optiTable,varVar,varVal) iiiii = 0 while iiiii < 432.11: IDP_assistants.Linda(generation,specie,GENtable,varVal,varVar) #obtain all results pop = IDP_assistants.Toby(generation,specie,GENtable) #count the number of results popSize = pop.shape[0] #find top 20% entries TOP = int(popSize*0.2) print("High end population size is:",TOP) #which column is pheromone level? pheroNo = np.size(pop,1) TOPmat = IDP_assistants.Fifi(TOP,pop,pheroNo) print(TOPmat) #random new values specimen = np.zeros([1,varN]) i = 0 while i < varN: specimen[0,i] = (random.uniform(0,1))*(BCs[i,1]-BCs[i,0])+(BCs[i,0]) #negative iteration to go through most impactufull (highest pheromone) individual last ii = TOPmat.shape[0]-1 print("random entry",specimen[0,i]) while ii > -0.1: specimen[0,i] = specimen[0,i] + (TOPmat[ii,i]-specimen[0,i])*(TOPmat[ii,pheroNo-1]**2)*0.2 print(TOPmat[ii,i]) #this means that subsequent pushes have less effect than the initial one (best result influences more, the 0.1 is greater in magnitude) #but it also means that proper motion to minima is not going to be achieved --- 0.1 for the last push in negligible 0.9 is still the difference print("adjusted specimen",specimen[0,i]) ii = ii -1 i = i + 1 #specimen[0,3] = int(specimen[0,3]) #BELOW ORIGINAL : ERRASE AFTER NEW VERSION BELOW TESTED #cnnW,crrW = cnt_X('NCC') #query = "INSERT INTO "+GENtable+"(specie,generation,cs1,cs2,cs3, no_layers, mandrel_speed) VALUES(" #query += """'"""+specie+"""','"""+str(generation)+"""','"""+str(specimen[0,0])+"""','"""+str(specimen[0,1])+"""','"""+str(specimen[0,2])+"""','"""+str(specimen[0,3])+"""','"""+str(specimen[0,4])+"""')""" #crrW.execute(query) #cnnW.commit() #close SQL handles #dc_X('NCC',cnnW,crrW) cnnW,crrW = cnt_X('NCC') #filling SQL table with any number of columns used i = 0 while i < 1: #just one new specimen created at a time? #sampleMAT[i,3] = int(sampleMAT[i,3]) query = "INSERT INTO "+GENtable+"(specie,generation," ii = 0 while ii < varN: query += varVar[ii]+"," ii = ii + 1 query = query[:-1] query += ") VALUES(" query += """'"""+specie+"""',"""+str(generation)+""",""" ii = 0 while ii < varN: if "airfoil" in varVar[ii]: # Will raise StopIteration if you don't have x files fileNO = int(specimen[i,ii]) file1000 = next(itertools.islice(os.scandir(lPath+'\\aerofoilcollection\\'), fileNO, None)).path file1000 = file1000.split(lPath+"\\aerofoilcollection\\")[1] query += """'"""+str(file1000)+"""',""" else: query += str(specimen[i,ii])+"," ii = ii + 1 query = query[:-1] query += ");" print("i",i) print(query) crrW.execute(query) cnnW.commit() i = i + 1 dc_X('NCC',cnnW,crrW) iiiii = iiiii + 1
def MeshOne(CADfile, spn, rnc): lPath = os.path.dirname(os.path.abspath(__file__)) #this function assembles all meshing steps time1 = time.time() #next section only when run from here md0 = size cell lengthwise, md1 number of cells round the spar, md = np.zeros(2) md[0] = spn #spar definition, passed from Mysql, for the particular automatically generated CAD conn, cursor = cnt_X('NCC') #replace max(half_span) by half_span and create a matrix query = "SELECT half_span,chord_length FROM " + CADfile print(query) cursor.execute(query) #get results sd = np.zeros([1, 2]) sdTemp = np.zeros([1, 2]) rows = cursor.fetchall() for row in rows: sdTemp[0, 0] = float(row[0]) sdTemp[0, 1] = float(row[1]) sd = np.concatenate((sd, sdTemp), axis=0) sd = np.delete(sd, (0), axis=0) dc_X('NCC', conn, cursor) meshMAT = np.matrix([float(0), spn]) meshMATtemp = np.matrix([float(0), float(0)]) i = 1 ii = 0 while i < sd.shape[0]: z1 = sd[i - 1, 0] z2 = sd[i, 0] ratELE = sd[i, 1] / sd[i - 1, 1] maxELE = meshMAT[ii, 1] minELE = ratELE * maxELE z = (meshMAT[ii, 0]) i = i + 1 while z < z2: ii = ii + 1 meshMATtemp[0, 1] = ((z - z1) / (z2 - z1)) * (minELE - maxELE) + maxELE meshMATtemp[0, 0] = meshMAT[ii - 1, 0] + meshMATtemp[0, 1] meshMAT = np.concatenate((meshMAT, meshMATtemp), axis=0) z = meshMATtemp[0, 0] #the distance of the far edge of last element from the part edge diff1 = abs(meshMAT[meshMAT.shape[0] - 1, 0] - sd[sd.shape[0] - 1, 0]) #distance of the second last element edge from the part edge diff2 = abs(meshMAT[meshMAT.shape[0] - 2, 0] - sd[sd.shape[0] - 1, 0]) if diff1 < diff2: #shorten the last element and ammend the size meshMAT[(meshMAT.shape[0] - 1), 0] = sd[sd.shape[0] - 1, 0] meshMAT[(meshMAT.shape[0] - 1), 1] = meshMAT[( meshMAT.shape[0] - 1 ), 1] #-diff1 purposefully letting the element size be larger helps assign properties else: #the element length of the two sections needs to be added - excess meshMAT[(meshMAT.shape[0] - 2), 1] = meshMAT[(meshMAT.shape[0] - 1), 1] + meshMAT[(meshMAT.shape[0] - 2), 1] - diff1 meshMAT = np.delete(meshMAT, (meshMAT.shape[0] - 1), axis=0) meshMAT[(meshMAT.shape[0] - 1), 0] = sd[sd.shape[0] - 1, 0] MD = meshMAT #count spanwise elements : cse cse = np.size(MD, 0) #for each of the sections #calculate the end section size of element #find average element size in the section #iterate through elements, new element size is the local ratio from max to min #the meshing functions:~~~~~~~~~~~~ replace this #MD = catia_mesh.paraRedef(md,sd) part1, HSF1, partDocument1 = catia_mesh.openPart(CADfile) catia_mesh.planes(part1, HSF1, MD, rnc) catia_mesh.insects(part1, HSF1, MD, rnc) catia_mesh.pts(part1, HSF1, MD, rnc) catia_mesh.cnxl(part1, HSF1, MD, rnc) catia_mesh.sweep(part1, HSF1, MD, rnc) np.save(lPath + '\\Temporary\\for_spheres', MD) print("---Meshing this part took: %s seconds ---" % (time.time() - time1)) return (partDocument1, cse)
def braidAV(secPTS,secVECy,secVECz, BraidFile,secs,varVal): secLen = varVal['span']/secs #the output is warp-angle, weft-angle, warp-pitch, weft-pitch for output = np.zeros([(secs*12),5]) #WW corresponds to warp-weft binary options WW = 0 #boundary conditions matrix to be populated segBC = np.zeros([1,7]) while WW < 2: i = 0 #i loops through sections, warp, then weft while i < secs: #SQL connection create cnnF,crrF = cnt_X('NCC') # select min-max y and x #z direction boundary conditions, defined in original coordinate system low = i*secLen high = (i+1)*secLen mid = (low+high)/2 #obtain relevant braiding data for the 12 sections around the x-section query = "SELECT * FROM "+BraidFile+" where (z < "+str(high)+") and (z > "+str(low)+") and (warpORweft ="+str(WW)+" )" #print(query) crrF.execute(query) #get results rows = crrF.fetchall() #reference points on centreline xAnchor = float(secPTS[i,0]) yAnchor = float(secPTS[i,1]) zAnchor = float(secPTS[i,2]) dc_X('NCC',cnnF,crrF) secMATlocal = np.zeros([1,6]) #get all the braiding data and translate them to local coordinate systems for row in rows: r = np.zeros([1,6]) #create the translated coordinates #new y and new z coordinate systems are imported to this script, new x is calculated secVECx = np.cross(secVECy[i,:],secVECz[i,:]) #vectors of new coordinate system cSYS2 = np.array(([secVECx[0],secVECx[1],secVECx[2]],[secVECy[i,0],secVECy[i,1],secVECy[i,2]],[secVECz[i,0],secVECz[i,1],secVECz[i,2]])) point1 = np.array([0,0,0]) #3d coordinates of new coordinate system point2 = np.array([xAnchor,yAnchor,zAnchor]) #corresponds to default coordinate system cSYS1 = np.array(([1,0,0],[0,1,0],[0,0,1])) #GCP is the point with braiding information imported, referenced in global coordinate system GCP = np.array([float(row[2]),float(row[3]),float(row[4])]) #use the function to translate coordinates LCP = GlobalToLocal(point1,point2, cSYS1,cSYS2,GCP) #populate matrix for further processing r[0,0],r[0,1],r[0,2],r[0,3],r[0,4],r[0,5] = row[1],LCP[0],LCP[1],LCP[2],row[5],row[9] #append all the processed data into one matrix secMATlocal = np.concatenate((secMATlocal,r),axis=0) #delete the first row, initial zeroes secMATlocal = np.delete(secMATlocal, (0), axis=0) #min-max should be calculated in secondary coordinate system xMAX = max(secMATlocal[:,1]) yMAX = max(secMATlocal[:,2]) xMIN = min(secMATlocal[:,1]) yMIN = min(secMATlocal[:,2]) #THIS SECTION IS NEW - ADJUST - AND TEST #section treshold ~~ one day include as a parameter #Calculate treshold requirement based on RAD, large radius means greater portion is part of the section s = varVal['span'] c_L = 150 #default chord_0 size yu = 0 while yu < 3: c1= varVal['chord_'+str(yu)] c2= varVal['chord_'+str(yu+1)] s1 = yu*s/3 s2 = (yu+1)*s/3 if s1 < mid < s2: c_L = ((mid-s1)/(s2-s1))*(c2-c1)+c1 yu = yu + 1 c_R = c_L/varVal['RAD'] #the 0.7 can be different for x and y...this should be done based on the width of the spar eventually #50 is the base ratio c/R --subject to change if default values change if c_R < 50: #From last checs it appears that the gradients, eg. 0.2/350, can be a bit increased sty = 0.55 + (0.2/200)*(c_R-50) stx = 0.65 + (0.2/200)*(c_R-50) else: sty = 0.55 + (0.2/40)*(c_R-50) stx = 0.65 + (0.2/40)*(c_R-50) #sty = 0.7 #stx = 0.6 #segBC columns: section ID, xMax,xMIn,yMax,yMin,zMax,zMin #run only for warp, no point duplicating data #This section is part specific. if WW < 1: segBC = np.concatenate((segBC,(np.matrix([(i*12+1),9999999,stx*xMAX,9999999,sty*yMAX,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+2),9999999,stx*xMAX,sty*yMAX,0,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+3),9999999,stx*xMAX,0,sty*yMIN,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+4),9999999,stx*xMAX,sty*yMIN,-999999,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+5),stx*xMAX,0,sty*yMIN,-999999,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+6),0,stx*xMIN,sty*yMIN,-999999,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+7),stx*xMIN,-999999,sty*yMIN,-999999,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+8),stx*xMIN,-999999,0,sty*yMIN,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+9),stx*xMIN,-999999,sty*yMAX,0,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+10),stx*xMIN,-999999,9999999,sty*yMAX,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+11),0,stx*xMIN,9999999,sty*yMAX,high,low]))),0) segBC = np.concatenate((segBC,(np.matrix([(i*12+12),stx*xMAX,0,9999999,sty*yMAX,high,low]))),0) #~~~~ there might be more efficient way of assigning these, future work~~~~ #ac1,ac2,ac3,ac4 = [],[],[],[] #pc1,pc2,pc3,pc4 = [],[],[],[] af1, af2, af3, af4, af5, af6, af7, af8, af9, af10, af11,af12 = [],[],[],[],[],[],[],[],[],[],[],[] pf1, pf2, pf3, pf4, pf5, pf6, pf7, pf8, pf9, pf10, pf11, pf12 = [],[],[],[],[],[],[],[],[],[],[],[] sz = np.size(secMATlocal,0) #print(sz) #go through each set of values within the spanwise zone #assign each set of values to one of the 12 sectiosn (4 corners, and 8 straight sectiosn) iii = 0 while iii < sz: if secMATlocal[iii,1] > (stx*xMAX) and secMATlocal[iii,2] >(sty*yMAX): #top right corner af1.append(float(secMATlocal[iii,4])) pf1.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] > (stx*xMAX) and secMATlocal[iii,2] < (sty*yMIN): #bottom right corner af4.append(float(secMATlocal[iii,4])) pf4.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] < (stx*xMIN) and secMATlocal[iii,2] < (sty*yMIN): #bottom left corner af7.append(float(secMATlocal[iii,4])) pf7.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] < (stx*xMIN) and secMATlocal[iii,2] > (sty*yMAX): #top left corner af10.append(float(secMATlocal[iii,4])) pf10.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] > (stx*xMAX) and secMATlocal[iii,2] < (sty*yMAX) and secMATlocal[iii,2] > 0: #right side, upper straight section af2.append(float(secMATlocal[iii,4])) pf2.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] > (stx*xMAX) and secMATlocal[iii,2] > (sty*yMIN) and secMATlocal[iii,2] < 0: #right side, lower straight section af3.append(float(secMATlocal[iii,4])) pf3.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] < (stx*xMAX) and secMATlocal[iii,2] < (sty*yMIN) and secMATlocal[iii,1] > 0: #bottom side, right straight section af5.append(float(secMATlocal[iii,4])) pf5.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] > (stx*xMIN) and secMATlocal[iii,2] < (sty*yMIN) and secMATlocal[iii,1] < 0: #bottom side, left straight section af6.append(float(secMATlocal[iii,4])) pf6.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] < (stx*xMIN) and secMATlocal[iii,2] > (sty*yMIN) and secMATlocal[iii,2] < 0: #left side, bottom straight section af8.append(float(secMATlocal[iii,4])) pf8.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] < (stx*xMIN) and secMATlocal[iii,2] < (sty*yMAX) and secMATlocal[iii,2] > 0: #left side, top straight section af9.append(float(secMATlocal[iii,4])) pf9.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] > (stx*xMIN) and secMATlocal[iii,2] > (sty*yMAX) and secMATlocal[iii,1] < 0: #top side, left straight section af11.append(float(secMATlocal[iii,4])) pf11.append(float(secMATlocal[iii,5])) elif secMATlocal[iii,1] < (stx*xMAX) and secMATlocal[iii,2] > (sty*yMAX) and secMATlocal[iii,1] > 0: #top side, right straight section af12.append(float(secMATlocal[iii,4])) pf12.append(float(secMATlocal[iii,5])) iii = iii + 1 #the output is warp-angle, weft-angle, warp-pitch, weft-pitch for #WW serves as a toggle between warp and weft ii = 0 while ii < 12: #the reference point BuildCommand = "output[(i*12)+"+str(ii)+",0] = (i*12)+"+str(ii+1)+"" exec(BuildCommand) #angle BuildCommand = "output[(i*12)+"+str(ii)+",(1+WW)] = mean(af"+str(ii+1)+")" exec(BuildCommand) #pitch BuildCommand = "output[(i*12)+"+str(ii)+",(3+WW)] = mean(pf"+str(ii+1)+")" exec(BuildCommand) ii = ii + 1 i = i + 1 WW = WW + 1 #if data is missing for a section, average the values based on neighbouring sections #loop through all braid data rows iv = 0 count = 0 while iv < output.shape[0]: #loop through all potential 0 values iiv = 1 while iiv < 5: #in an event of zero value being present if output[iv,iiv] == 0: c = 0 v = 0 #for the last of the xs-wise sections (12th) if (iv+1)/12 == int((iv+1)/12): #add to division count for averaging c = c + 2 #add the value of before and after sections v = v + output[iv-1,iiv] +output[iv-11,iiv] #for the first of the xs-wise sections (1st) elif (iv)/12 == int((iv)/12): c = c + 2 v = v + output[iv+1,iiv] + output[iv+11,iiv] #for all but the first and last of the xs-sections else: c = c + 2 v = v + output[iv+1,iiv] + output[iv-1,iiv] # for all but tip section of the spar if (iv+12) < output.shape[0]: #add to division count for averaging c = c + 1 #add the next spanwise section for averaging v = v + output[iv+12,iiv] #for all but the root section of the spar if (iv-12) >0: c = c + 1 v = v + output[iv-12,iiv] output[iv,iiv]=v/c print("0 value encountered, average of neighbouring sections is used:",output[iv,iiv]) count = count + 1 if count > 100: print("100 materials were replaced, TROUBLESHOOT!") print("___simulation being crashed __") np.set_printoptions(threshold=sys.maxsize) print(output) breakhere iiv = iiv + 1 iv = iv + 1 segBC = np.delete(segBC, (0), axis=0) #Troubleshooting section, enable in case of suspected error: #np.set_printoptions(threshold=sys.maxsize) variable7 = output with open("Temporary\\Tshoot_segmentation_output.txt", "w") as text_file: text_file.write(str(variable7)) #segBC in transformed coordinates #print("",secPTS,"") #print("",secVECy,"") #print("",secVECz,"") return(output,segBC,secPTS,secVECy,secVECz)
def centPTS_P(BraidFile, span, secs): lPath = os.path.dirname(os.path.abspath(__file__)) #this script finds points/positions on centreline for reference of locations relative to centreline #obtain the pitch data based on BraidFile cnnE,crrE = cnt_X('NCC') #(change when the CATIA terminology is changed) #tdm = str(time.strftime("%m")) #tdy = str(time.strftime("%y")) Cfile = BraidFile.split("_")[0] +"_"+ BraidFile.split("_")[1] +"_"+BraidFile.split("_")[2]+"_JK" query = "Select half_span, twist from "+Cfile crrE.execute(query) #get results sd = np.zeros([1,2]) rows = crrE.fetchall() #creates a list of version numbers for row in rows: try: sdx = np.zeros([1,2]) sdx[0,0],sdx[0,1] = float(row[0]),float(row[1]) sd = np.concatenate((sd,sdx),axis=0) #break except TypeError: print("x") sd = np.delete(sd, (0), axis=0) #close SQL handles dc_X('NCC',cnnE,crrE) #creates empty matrix based on number of sections ==> number of reference points required secPTS = np.zeros([secs,3]) secVECz = np.zeros([secs,3]) secVECy = np.zeros([secs,3]) #section lenght calculate secLen = span/secs cdArr = np.load(lPath+"\\temporary\\cdArr.npy") i = 0 #loop through sections while i < secs: #the coordinate systems are based in the middle of section (hence 0.5) rang1 = (i+0.5)*secLen #offset and intersection to create 0 point for new coo system # OBTAIN X,Y,Z POINT BASED ON cdArr ii = 0 z = 0 while z < rang1: z = cdArr[ii,3] ii = ii + 1 #secPTS secPTS[i,0] = (cdArr[ii,1]-cdArr[ii-1,1])*((rang1-cdArr[ii-1,3])/(cdArr[ii,3]-cdArr[ii-1,3]))+cdArr[ii-1,1] secPTS[i,1] = (cdArr[ii,2]-cdArr[ii-1,2])*((rang1-cdArr[ii-1,3])/(cdArr[ii,3]-cdArr[ii-1,3]))+cdArr[ii-1,2] secPTS[i,2] = (cdArr[ii,3]-cdArr[ii-1,3])*((rang1-cdArr[ii-1,3])/(cdArr[ii,3]-cdArr[ii-1,3]))+cdArr[ii-1,3] # OBtain a vector on centreline #secVECz #t-shoot 18/03/2020 unverified : not avergage but subtract secVECz[i,0] = (cdArr[ii,1]-cdArr[ii-1,1]) secVECz[i,1] = (cdArr[ii,2]-cdArr[ii-1,2]) secVECz[i,2] = (cdArr[ii,3]-cdArr[ii-1,3]) ii = 0 s2 = 0 while rang1 > s2: s1 = sd[ii,0] s2 = sd[ii+1,0] # this minus is used to match angle of attack effect with the vector transform.... verify t1 = -sd[ii,1] t2 = -sd[ii+1,1] ii = ii + 1 #interpolation to find local twist prog = (rang1-s1)/(s2-s1) twist = t1 + (t2-t1)*prog #print(twist) #(also add dihedral at some point) #position of third point for surface creation (~~~~ eventually provide drawing of this ?~~~~) #export second vector secVECy[i,0] = 0 secVECy[i,1] = 100 secVECy[i,2] = 100*math.tan(twist*math.pi/180) i = i + 1 #These hashed lines can be used to visualize the operations in CATIA: #silo = "D:\\IDPcode\\CatiaFiles\\TEST.CATPART" #partDocument1.SaveAs(silo) #breakthis #print(secVECy) #print(secPTS,secVECy,secVECz) return(secPTS, secVECy, secVECz)
def centPTS_C(BraidFile, span, secs): lPath = os.path.dirname(os.path.abspath(__file__)) #this script finds points/positions on centreline for reference of locations relative to centreline #obtain the pitch data based on BraidFile cnnE,crrE = cnt_X('NCC') #(change when the CATIA terminology is changed) #tdm = str(time.strftime("%m")) #tdy = str(time.strftime("%y")) Cfile = BraidFile.split("_")[0] +"_"+ BraidFile.split("_")[1] +"_"+BraidFile.split("_")[2]+"_JK" query = "Select half_span, twist from "+Cfile crrE.execute(query) #get results sd = np.zeros([1,2]) rows = crrE.fetchall() #creates a list of version numbers for row in rows: try: sdx = np.zeros([1,2]) sdx[0,0],sdx[0,1] = float(row[0]),float(row[1]) sd = np.concatenate((sd,sdx),axis=0) #break except TypeError: print("x") sd = np.delete(sd, (0), axis=0) #close SQL handles dc_X('NCC',cnnE,crrE) #creates empty matrix based on number of sections ==> number of reference points required secPTS = np.zeros([secs,3]) secVECz = np.zeros([secs,3]) secVECy = np.zeros([secs,3]) #section lenght calculate secLen = span/secs CATIA = win32com.client.dynamic.Dispatch("CATIA.Application") CATIA.RefreshDisplay = False #location of CATIA braid-file str15 = lPath+"\\CatiaFiles\\BraidFiles\\"+BraidFile+".CatPart" partDocument1 = CATIA.Documents.Open(str15) part1 = partDocument1.Part HSF1 = part1.HybridShapeFactory hbs1 = part1.HybridBodies #Catia reference set - geometries fixed names - created by braid scripts hb1 = hbs1.Item("CentrelineGeo") hb2 = hbs1.Item("CentreSpline") hs1 = hb2.HybridShapes hss1 = hs1.Item("Centreline") ref2 = part1.CreateReferenceFromObject(hss1) hb3 = hbs1.Add() hb3.Name="CentrelinePoints" #create a reference from one of the origin planes originElements1 = part1.OriginElements plane1 = originElements1.PlaneXY ref1 = part1.CreateReferenceFromObject(plane1) i = 0 #loop through sections while i < secs: #the coordinate systems are based in the middle of section (hence 0.5) rang1 = (i+0.5)*secLen #offset and intersection to create 0 point for new coo system off1 = HSF1.AddNewPlaneOffset(ref1, rang1, False) hb1.AppendHybridShape(off1) ref3 = part1.CreateReferenceFromObject(off1) ref2 = part1.CreateReferenceFromObject(hss1) hsi1 = HSF1.AddNewIntersection(ref2, ref3) hsi1.PointType = 0 hb3.AppendHybridShape(hsi1) #create a second point to generate vector line #this is stored in hidden geometry #the 5 is 5mm of step on centreline to create the base vector off2 = HSF1.AddNewPlaneOffset(ref1,(rang1+5),False) hb1.AppendHybridShape(off2) ref4 = part1.CreateReferenceFromObject(off2) hsi2 = HSF1.AddNewIntersection(ref2,ref4) hsi2.PointType = 0 hb1.AppendHybridShape(hsi2) #creating straight line between two lines on centreline ref5 = part1.CreateReferenceFromObject(hsi1) ref6 = part1.CreateReferenceFromObject(hsi2) hslx = HSF1.AddNewLinePtPt(ref5,ref6) hb3.AppendHybridShape(hslx) part1.Update() #obtain the vector using wrmmm script partDocument1.ExportData(lPath+"\\Temporary\\xxx.wrl", "wrl") vec, point = wrmmm() #hide unused geomtry selection1 = partDocument1.Selection selection1.Clear visPropertySet1 = selection1.VisProperties selection1.Add(hslx) visPropertySet1 = visPropertySet1.Parent visPropertySet1.SetShow(1) selection1.Clear #output point and z vector secPTS[i,0] = point[0,0] secPTS[i,1] = point[0,1] secPTS[i,2] = point[0,2] secVECz[i,0] = vec[0,0] secVECz[i,1] = vec[0,1] secVECz[i,2] = vec[0,2] ii = 0 s2 = 0 while rang1 > s2: s1 = sd[ii,0] s2 = sd[ii+1,0] # this minus is used to match angle of attack effect with the vector transform.... verify t1 = -sd[ii,1] t2 = -sd[ii+1,1] ii = ii + 1 #interpolation to find local twist prog = (rang1-s1)/(s2-s1) twist = t1 + (t2-t1)*prog #print(twist) #position of third point for surface creation (~~~~ eventually provide drawing of this ?~~~~) buildPointZ = point[0,2] buildPointY = point[0,1]+100 buildPointX = point[0,0] + 100*math.tan(twist*math.pi/180) #direction of angle is to be checked later ~~~~~~~~~~~~~~~~~ hspc = HSF1.AddNewPointCoord(buildPointX, buildPointY, buildPointZ) hb1.AppendHybridShape(hspc) ref7 = part1.CreateReferenceFromObject(hspc) #plane made of three points PP1 = HSF1.AddNewPlane3Points(ref5, ref6, ref7) hb1.AppendHybridShape(PP1) #create a line on the plane at 90 degrees to z direction vector ref8 =hslx ref9 = PP1 sla1 = HSF1.AddNewLineAngle(ref8, ref9, ref5, False, 0.000000, 20.000000, 90.000000, False) hb3.AppendHybridShape(sla1) #update, export, obtain vector part1.Update() partDocument1.ExportData(lPath+"\\Temporary\\xxx.wrl", "wrl") vec, point = wrmmm() #hide useless geometry selection1 = partDocument1.Selection selection1.Clear visPropertySet1 = selection1.VisProperties selection1.Add(sla1) selection1.Add(hsi1) visPropertySet1 = visPropertySet1.Parent visPropertySet1.SetShow(1) selection1.Clear #export second vector secVECy[i,0] = vec[0,0] secVECy[i,1] = vec[0,1] secVECy[i,2] = vec[0,2] i = i + 1 #These hashed lines can be used to visualize the operations in CATIA: #silo = "D:\\IDPcode\\CatiaFiles\\TEST.CATPART" #partDocument1.SaveAs(silo) #breakthis partDocument1.Close() CATIA.RefreshDisplay = True #print(secVECy) #print(secPTS,secVECy,secVECz) return(secPTS, secVECy, secVECz)
#from mysql.connector import MySQLConnection, Error #from python_mysql_dbconfig import read_db_config #from IDP_cheats import togglePulse from IDP_databases import cnt_X, dc_X #connection to the database cnnI, crrI = cnt_X('NCC') #main results table query = "CREATE TABLE arun (idArun int IDENTITY(1,1) PRIMARY KEY,project varchar(45),"\ "part varchar(45),Iteration_count int,CADfile varchar(45),"\ "braidFile varchar(45),meshFile varchar(45),FEfile varchar(45),"\ "span_ele_size numeric(8,3),xs_seed int,root_perimeter decimal(8,3),"\ "pher float,simulation_time float,date date"\ ");" crrI.execute(query) #main FE table query = "CREATE TABLE fe_inst (ID int IDENTITY(1,1) PRIMARY KEY,meshFile varchar(255),"\ "braidFile varchar(255),material varchar(255),"\ "feFile varchar(255),version int,"\ "max_deflection decimal(8,3),mass float,"\ "no_layers int,force_N float,"\ "spanwise_sections int);" crrI.execute(query) #fibre material properties table query = "CREATE TABLE fibre_properties (id int IDENTITY(1,1) PRIMARY KEY,"\
def ps(pitch1, pitch2, angle1, angle2, varVal): #get matrix and fibre properties from SQL cnnG, crrG = cnt_X('NCC') #finds the matrix material matrix = varVal['matrix'] query = """SELECT E,poisson,G,density from matrix_properties where material_name = '""" + matrix + """';""" crrG.execute(query) rows = crrG.fetchall() sd = [] for row in rows: sd.append((row)) #extract the matrix properties Em = float(sd[0][0]) vm = float(sd[0][1]) Gm = float(sd[0][2]) Dm = float(sd[0][3]) #find the fibre material fibre = varVal['reinforcement'] query = """SELECT E1,E2,G12,v12,fibre_dia,density,perme_coeff from fibre_properties where material_name = '""" + fibre + """';""" crrG.execute(query) rows = crrG.fetchall() sd = [] for row in rows: sd.append((row)) #extract the fibre properties Ef1 = float(sd[0][0]) Ef2 = float(sd[0][1]) Gf = float(sd[0][2]) vf = float(sd[0][3]) fR = float(sd[0][4]) / 2 Df = float(sd[0][5]) CII = float(sd[0][6]) #close SQL handles dc_X('NCC', cnnG, crrG) seq = np.matrix([angle1, angle2]) sym = "yes" #eliptical thickness now outputted Vf1, Vf2, t = VolumesF(pitch1, pitch2, fR, angle1, angle2) #density calculation density1 = Df * Vf1 + Dm * (1 - Vf1) density2 = Df * Vf2 + Dm * (1 - Vf2) #assuming both layers are the same thickness ~~~~~~~~ dens = (density1 + density2) / 2 Vf = (Vf1 + Vf2) / 2 #print("temporary check, Vf1:",Vf1,"average Vf:",Vf) E1, E2, v12, G = RoM(Vf, Ef1, Ef2, Em, Gf, Gm, vf, vm) #thickness is not too relevant for classical laminate analysis, doubling stacking with same symetry is going to result in same output values #t = 2*fR lamina1 = [E1, E2, v12, G, t] #print("l1",lamina1) #second direction E1, E2, v12, G = RoM(Vf2, Ef1, Ef2, Em, Gf, Gm, vf, vm) #print("output variables 2") #t = 2*fR lamina2 = [E1, E2, v12, G, t] #print("l2",lamina2) #print(lamina2,"lamina2") #print("A1",angle1,"A2",angle2) mABD, memProp = ABD(seq, sym, lamina1, lamina2, angle1, angle2) #print("memProp",memProp) Vf_av = (Vf1 + Vf2) / 2 K1, K2 = permeability(Vf_av, angle1, angle2, CII) #troubleshooting negative values - delete once sorted #i=0 #while i < 5: # if memProp[i] < 0: # print("lamina1",lamina1) # print("lamina2",lamina2) # print(memProp) # i = i + 1 return (memProp, dens, t, K1, K2, Vf_av, t)
def MultiMesh(CADfile, varVal): lPath = os.path.dirname(os.path.abspath(__file__)) #configure SQL connection st118 = time.time() cnnB, crrB = cnt_X('NCC') #the loop is dormant - can be used to mesh multiple files after one-another #while x < 28: #the if function translates numbers into corresponding file name parts #pass in file name and mesh characteristics (size of elements span-wise and number of elements around cross section) #the two variables should change based on mesh sensitivity analysis? (maybe add at some point) # USE PERIMETER INFO TO SETUP THE MESH SIZE query = """SELECT root_perimeter FROM arun where Cadfile = '""" + CADfile + """' and root_perimeter is not null;""" crrB.execute(query) rows = crrB.fetchall() for row in rows: try: fd = float(row[0]) #break except TypeError: print("We are DOOMED") span_ele_size = varVal[ 'mesh_size'] #7 base for all optimisations before 13/09/2019 # originally span_ele_size # ROUND THIS TO CLOSEST FACTOR OF 2 ned = fd / span_ele_size #round to closest multiplication of 2, has to be even for the number is halfed within the meshing simulation xs_seed = int(ned / 2) * 2 partDocument1, count_span_el = MeshOne(CADfile, span_ele_size, xs_seed) #Find previous mesh verstions of the same CAD #the input to sim, through SQL #looks for other meshes with the same source geomertry here = """'%""" + CADfile + """%'""" query = "SELECT version FROM mesh_list where CADfile like " + here #print(query) crrB.execute(query) #get results sd = [] rows = crrB.fetchall() #creates a list of version numbers for row in rows: try: sd.append(int(row[0])) #break except TypeError: print("x") #highest version number is stored if is_empty(sd) == True: maxNo = 0 else: maxNo = max(sd) #next version number - used for this analysis version = maxNo + 1 #core part of CADfile name trimfile = CADfile.split("_")[0] + "_" + CADfile.split( "_")[1] + "_" + CADfile.split("_")[2] + "_" #file definition based on version number if version < 10: vn = "00" + str(int(version)) elif version < 100: vn = "0" + str(int(version)) else: vn = str(int(version)) #create a MeshFile name from CadFile and mesh version MeshFile = trimfile + "M" + vn #correct for calc error in number of spanwise elements count_span_el = count_span_el - 1 #store Mesh information in SQL mTime = time.time() - st118 query = "INSERT INTO mesh_list(CADfile,MeshFile,xs_seed,span_ele_size,version,meshing_time) VALUES(" query += """'""" + CADfile + """','""" + MeshFile + """',""" + str( xs_seed) + """,""" + str(count_span_el) + """,""" + str( version) + """,""" + str(mTime) + """)""" crrB.execute(query) cnnB.commit() #close SQL handles dc_X('NCC', cnnB, crrB) #save the CADfile of the mesh (might not be necessary) silo = lPath + "\\CatiaFiles\\MeshFiles\\" + MeshFile + "_JK.CatPart" partDocument1.SaveAs(silo) #save the IGES file silo2 = lPath + "\\CatiaFiles\\MeshFiles\\" + MeshFile + "_JK.igs" partDocument1.ExportData(silo2, "igs") #x = x + 1 return (MeshFile, span_ele_size, xs_seed)
def AgentDarwin(GENtable,varVar,varVal,varMin,varMax): #specie is the name of the optimisation -- therefore it can be continued if generation already exists population = 8 lPath = os.path.dirname(os.path.abspath(__file__)) #here obtain highest generation... instead of prescribing generation = 1 specie = "test" #limits of variables - not physical, estimated at this point ~~~~~~ # rows : variables : cs1,cs2,cs3,MS,NL # columns: min, max BCs = np.matrix([[0.000,0.000]]) temp = np.matrix([[0.000,0.000]]) i = 0 #AFLe = 0 varN = len(varVar) while i < varN: if varMin[varVar[i]]!=False: temp[0,0] = varMin[varVar[i]] temp[0,1] = varMax[varVar[i]] BCs = np.concatenate((BCs,temp),axis = 0) elif "airfoil" in varVar[i]: img_folder_path = lPath+'\\aerofoilcollection\\' dirListing = os.listdir(img_folder_path) AFLS = len(dirListing) temp[0,0] = 0 temp[0,1] = AFLS BCs = np.concatenate((BCs,temp),axis = 0) #AFLe = 1 else: #0-10 now arbitrarily selected for string values (eg. material) #this needs to be replaced by a lookup function that checks the #number of airfoils temp[0,0] = 0 temp[0,1] = 10 BCs = np.concatenate((BCs,temp),axis = 0) #check if specie exists (let Steph do this?) cnnW,crrW = cnt_X('NCC') query = """SELECT * FROM ga1 where specie like '"""+specie+"""';""" crrW.execute(query) sd = [] rows = crrW.fetchall() maxG = 0 #creates a list of version numbers for row in rows: try: #highest version number is stored if row[2]> maxG: maxG = row[2] sd.append(row[2]) #break except TypeError: print("No previous population, or other unaccounted error") # if no population exists: if is_empty(sd) ==True: specimen = np.zeros([population,np.size(BCs,0)]) i = 0 while i < population: #create new population -- number of individuals, fully random each variable in range ii = 0 while ii < np.size(BCs,0): specimen[i,ii] = (random.uniform(0,1))*(BCs(ii,1)-BCs(ii,0))+(BCs(ii,0)) ii = ii + 1 cnnW,crrW = cnt_X('NCC') # export the new generation into SQL table #filling SQL table with any number of columns used I = 0 while I < 1: #just one new specimen created at a time? #sampleMAT[i,3] = int(sampleMAT[i,3]) query = "INSERT INTO "+GENtable+"(specie,"+str(generation)+"," II = 0 while II < varN: query += varVar[II]+"," II = II + 1 query = query[:-1] query += ") VALUES(" query += """'"""+specie+"""',"""+str(generation)+""",""" II = 0 while II < varN: if "airfoil" in varVar[II]: # Will raise StopIteration if you don't have x files fileNO = int(specimen[I,II]) file1000 = next(itertools.islice(os.scandir(lPath+'\\aerofoilcollection\\'), fileNO, None)).path file1000 = file1000.split(lPath+"\\aerofoilcollection\\")[1] query += """'"""+str(file1000)+"""',""" else: query += str(specimen[I,II])+"," II = II + 1 query = query[:-1] query += ");" crrW.execute(query) cnnW.commit() I = I + 1 dc_X('NCC',cnnW,crrW) i= i +1 maxG = 1 #check which generation we are on, if this is 0 error has occured as at least 1 should exist now if maxG > 0: print("Current generation is ",maxG) else: print("something is clearly wrong") exit() # check for uncalculated individuals (value null), and calculate those query = """SELECT * FROM ga1 where fitness is null and generation = '"""+str(maxG)+"""';""" #print(query) crrW.execute(query) sd = [] rows = crrW.fetchall() #close SQL handles dc_X('NCC',cnnW,crrW) #for row in rows: #error handling switched off for now #try: #STOPPED HERE IN TRANSLATION OF INFINITE VARIABLES #evaluate the simulations -- use fitness function to establish value of each individual IDP_assistants.Linda(generation,specie,GENtable,varVal,varVar) # if error during calculation assign value 0 #except TypeError: #print("Error has occured, this individual is assumed 0 fitness") #recordID = row[0] #query = """UPDATE irut.ga1 SET fitness = """+str(0)+""", arunID = """+str(0)+""" Where (idnew_table = """+str(recordID)+""");""" #crrW.execute(query) #crrW.execute(query) #cnnW.commit() # obtain data from the last generation cnnW,crrW = cnt_X('NCC') query = """SELECT * FROM """+GENtable+""" where generation = '"""+str(maxG)+"""' and specie = '"""+specie+"""';""" crrW.execute(query) pop = np.zeros([1,len(varVal)]) popi = np.zeros([1,len(varVal)]) rows = crrW.fetchall() for row in rows: i = 0 while i < len(varVal): popi[0,i] = row[3+i] pop = np.concatenate((pop, popi), axis=0) pop = np.delete(pop, (0), axis=0) #pick suitable individuals #while the population is too large pop = np.size(popi,0) while pop.shape[0] > (population/2): i = 0 current_min = 999 #loop through current population available while i < pop.shape[0]: #indicate the lowest of the current population if pop[i,5] < current_min: current_min = pop[i,3+len(varVal)] ii = i i = i + 1 #delete the lowest of the current population pop = np.delete(pop, (ii), axis=0) #shuffles randomly the rows in pop np.random.shuffle(pop) #crossover # currently 4 offspring of 2 pairs,faster convergence is expected, other alternatives can be tested newpop = np.zeros([1,len(varVal)]) newpopS = np.zeros([1,len(varVal)]) i = int(0) while i < pop.shape[0]/2: #second match from the other end of matrix y = pop.shape[0]-1 - i iii = 0 while iii < 4: ii = 0 #fist child of parents i while ii < len(varVal): #randomise which parent fits the bill Parent = (random.randint(0, 1)) if Parent == 0: s = int(y) if Parent == 1: s = int(i) newpopS[0,ii] = pop[s,ii] ii = ii + 1 newpop = np.concatenate((newpop, newpopS), axis=0) iii = iii + 1 i = i + 1 newpop = np.delete(newpop, (0), axis=0) #randomely select a gene in population to mutate #this will need rework for more variables -- matrix of limits~~~~~~~~~~~~~~~~~~~~~ #the mutation rate decreases with generations mutR = 5 - maxG i = 0 while i < mutR: member = random.randint(0, (population-1)) trait = random.randint(0,len(varVal)) MutVar = (random.uniform(0,1))*(BCs[trait,1]-BCs[trait,0])+(BCs[trait,0]) newpop[member,trait] = MutVar i = i + 1 print(newpop) i = 0 maxG = maxG + 1 cnnW,crrW = cnt_X('NCC') while i < newpop.shape[0]: # export the new generation into SQL table #filling SQL table with any number of columns used I = 0 while I < 1: #just one new specimen created at a time? #sampleMAT[i,3] = int(sampleMAT[i,3]) query = "INSERT INTO "+GENtable+"(specie,"+str(maxG)+"," II = 0 while II < varN: query += varVar[II]+"," II = II + 1 query = query[:-1] query += ") VALUES(" query += """'"""+specie+"""',"""+str(generation)+""",""" II = 0 while II < varN: if "airfoil" in varVar[II]: # Will raise StopIteration if you don't have x files fileNO = int(specimen[I,II]) file1000 = next(itertools.islice(os.scandir(lPath+'\\aerofoilcollection\\'), fileNO, None)).path file1000 = file1000.split(lPath+"\\aerofoilcollection\\")[1] query += """'"""+str(file1000)+"""',""" else: query += str(specimen[I,II])+"," II = II + 1 query = query[:-1] query += ");" crrW.execute(query) cnnW.commit() I = I + 1 i = i + 1 dc_X('NCC',cnnW,crrW)
def baseBraid(varVal, CADfile, MeshFile): lPath = os.path.dirname(os.path.abspath(__file__)) st1 = time.time() MD = np.load(lPath + "\\catiafiles\\meshfiles\\" + MeshFile + "_nodes.npy") MS = varVal["mandrel_speed"] spoolsPhy = varVal['spools'] rotax = 0.15 #travel per second (rad/s) #0.11 for braid comparison oscar #ammending the number of iterations required based on expected braid angles ratio2 = MS / rotax spoolsWa = max(4, int(0.8 * ratio2)) print(spoolsWa) trimfile = CADfile.split("_")[0]+"_"+CADfile.split("_")[1]+"_"\ +CADfile.split("_")[2]+"_" #Checks SQL braiding simulations table for latest version name. bIT = maxVersion(trimfile) #If function addpats number into correct filename segment. if bIT < 10: vn = "00" + str(int(bIT)) elif bIT < 100: vn = "0" + str(int(bIT)) else: vn = str(int(bIT)) vn = "B" + vn BraidFile = trimfile + vn print(BraidFile) #Create a table to store braidign point by point data GENfile = BraidFile #CADfile.replace("_A0", "_X0") cnnB, crrB = cnt_X('NCC') query = "CREATE TABLE " query += GENfile query += "(id int IDENTITY(1,1) PRIMARY KEY,YARN integer,x numeric(8,3),y numeric(8,3),z numeric(8,3),bAngle numeric(4,2),xN numeric(8,3),yN numeric(8,3),zN numeric(8,3),pitch numeric(8,3),iteration_time numeric(8,4),warpORweft integer)" crrB.execute(query) cnnB.commit() #upload the high level braiding information to main braiding table query = "INSERT INTO BraidMain(GENfile,version, spoolsWa, rota, travel, GuideRadius,InitialMandrelDistance,simulation_time) VALUES(" query += """'""" + GENfile + """',""" + str(bIT) + """,""" + str( spoolsWa) + """,""" + str(rotax) + """,""" + str( varVal["mandrel_speed"]) + """,""" + str( varVal["guide_rad"]) + """,""" + str( varVal["IMD"]) + """,""" + str(0) + """)""" crrB.execute(query) cnnB.commit() dc_X('NCC', cnnB, crrB) #find points on mandrel centreline datum, cdArr = IDP_geometry.centreline(MD) print(datum) WW = 0 while WW < 2: YARN = 0 #for each yarn while YARN < spoolsWa: #following function simulates positioning of a specific yarn on mandrel surface pocList = poc(MD, varVal, YARN, WW, spoolsWa, spoolsPhy, datum, cdArr, CADfile, rotax) cnnB, crrB = cnt_X('NCC') i = 0 while i < np.size(pocList, 0): #pocList decomposed for clarity of the script x = pocList[i, 1] y = pocList[i, 2] z = pocList[i, 3] xN = pocList[i, 4] yN = pocList[i, 5] zN = pocList[i, 6] bAngle = pocList[i, 7] pitch = pocList[i, 9] tt = pocList[i, 8] query = "INSERT INTO " + GENfile + "(YARN,x,y,z,bAngle,xN,yN,zN,pitch,iteration_time,warpORweft) VALUES(" query += str(YARN) + "," + str(x) + "," + str(y) + "," + str( z) + "," + str(bAngle) + "," + str(xN) + "," + str( yN) + "," + str(zN) + "," + str(pitch) + "," + str( tt) + "," + str(WW) + ")" #print(query) crrB.execute(query) i = i + 1 cnnB.commit() dc_X('NCC', cnnB, crrB) YARN = YARN + 1 WW = WW + 1 bstt = time.time() - st1 cnnB, crrB = cnt_X('NCC') print("Total braiding simulation time:--- %s seconds ---" % (bstt)) query = "INSERT INTO BraidMain(simulation_time) VALUES(" + str(bstt) + ")" crrB.execute(query) cnnB.commit() dc_X('NCC', cnnB, crrB) return (BraidFile)
def mRTM(MeshFile,BraidFile,resin,varVal): lPath = os.path.dirname(os.path.abspath(__file__)) #This function prepares input information for RTM simulation and then #runs the simulation. #In many places phrase "ACTIVATE FOR SQL USAGE:" is used. Those sections #are required for interface with MySQL database. REP = 1 st393 = time.time() #ACTIVATE FOR SQL USAGE: conn,cursor = cnt_X('NCC') query = """SELECT span_ele_size, xs_seed FROM mesh_list where MeshFile = '"""+MeshFile+"""' ;""" print(query) cursor.execute(query) #get mesh related info sd = np.zeros(2) #ACTIVATE FOR SQL USAGE: rows = cursor.fetchall() #REPLACE THE sd[] VALUES for row in rows: #for no. 147 sd[0] = row[0] #130 sd[1] = row[1] #for nu. 158: #sd[0] = 126 #sd[1] = 38 #ACTIVATE FOR SQL USAGE: dc_X('NCC',conn,cursor) print(sd) #One of the input files for visual-rtm scripts np.save(lPath+"\\temporary\\mesh_info.npy", sd) #Flow matrix baseline flowM = np.zeros([1000,12]) flowM = flowM + (varVal['flow_rate'])*varVal['no_layers'] np.save(lPath+"\\temporary\\flowMAT.npy", flowM) #find the highest iteration in the MySQL table #the input to sim, through SQL conn,cursor = cnt_X('NCC') #looks for other analysis of the same source geomertry here = """'%"""+MeshFile+"""%'""" here1 = """'%"""+BraidFile+"""%'""" query = "SELECT version FROM rtm_main where MeshFile like "+here+" and BraidFile like "+here1 cursor.execute(query) #get results sd = [] rows = cursor.fetchall() #creates a list of version numbers for row in rows: try: sd.append(int(row[0])) #break except TypeError: print("x") #highest version number is stored if is_empty(sd) ==True: maxNo = 0 else: maxNo = max(sd) #Next version number - used for this analysis. Can be taken from MySQL, if #active. version = maxNo#32 print(version) #File definition based on version number. if version < 10: vn = "00"+str(int(version)) elif version <100: vn = "0"+str(int(version)) else: vn = str(int(version)) #Define the name of new RTM iteration. BraidSection = BraidFile.split("_")[3] RTMF = MeshFile+"_"+BraidSection+"_R"+vn RTMFile = RTMF+"_"+str(REP) #Upload input data query = "INSERT INTO rtm_main(MeshFile,BraidFile,RTMFile,resin,version,Injection_T,Tool_T,Injection_P,Vent_P,Flow_rate) VALUES(" query += """'"""+MeshFile+"""','"""+BraidFile+"""','"""+RTMFile+"""','"""+resin+"""',"""+str(version)+""","""+str(varVal['inlet_temp'])+""","""+str(varVal['tool_temp'])+""","""+str(varVal['inlet_pressure'])+""","""+str(varVal['vent_pressure'])+""","""+str(varVal['flow_rate'])+""")""" print(query) cursor.execute(query) conn.commit() #close SQL handles dc_X('NCC',conn,cursor) #Create main input file for visual-RTM. STRx = MeshFile+"---"+RTMFile+"---"+resin+"---"+str(varVal['inlet_temp'])+"---"+str(varVal['tool_temp'])+"---"+str(varVal['inlet_pressure'])+"---"+str(varVal['vent_pressure'])+"---"+str(varVal['flow_rate'])+"---"+str(st393)+"---"+str(vn)+"---"+str("0")+"---"+str(RTMF)+"---"+str(varVal['no_layers'])+"---"+str(varVal['span']) with open("Temporary\\RTM_in.txt", "w") as text_file: text_file.write(STRx) #The list of surfaces is created with their 3D coordinates. #Can be hashed if the same part is being re-analysed. cmd("VEBatch -activeconfig Trade:CompositesandPlastics -activeapp VisualRTM -sessionrun "+lPath+"\\RTM_surfaces.py") #Only for troubleshooting. surf_mat = np.load(lPath+"\\temporary\\RTM_surfaces.npy") c = np.size(surf_mat,0) print(c,"how many elements?") #This is the main section, runs the visual-rtm model generation script. cmd("VEBatch -activeconfig Trade:CompositesandPlastics -activeapp VisualRTM -sessionrun "+lPath+"\\RTM_toolbox.py") cmd2("VEBatch -activeconfig Trade:CompositesandPlastics -activeapp VisualRTM -sessionrun "+lPath+"\\RTM_run.py",RTMFile) #For shorter runs RTM_lil_toolbox can be used. (#REP must be adjusted manually) #REP=6 #RTMFile = RTMF+"_"+str(REP) #cmd2("VEBatch -activeconfig Trade:CompositesandPlastics -activeapp VisualRTM -sessionrun D:\\IDPcode\\SpecialRTMTestIDP\\IDP_zip_2.0\\IDPcode\\RTM_lil_toolbox.py",RTMFile) #Do post processing results = False while results == False: try: maxFill,I_time = RTM_postProc.outputS1(RTMFile) results = True except: print("Result not available yet x, waiting") time.sleep(30) pass #When flow rate adjustments do not improve flow front, "Unmoved" is increased #which makes the simulaiton progress with imperfect flow front. #UNmoved = 0 #the following section is used for flow front improvement, isn't really needed ''' # Add the looping in case of unfilled. # Current method attempts to make the flow front more uniform. if maxFill < 99: FF_check = False else: FF_check = True while FF_check == False: RTMFile = RTMF+"_"+str(REP) print(type(RTMFile),"RTMFile",RTMFile) print(type(I_time),"I_time",I_time) FF_check, UNmoved = RTM_postProc.ff_check(RTMFile,I_time,UNmoved,RTMF) print("Unmoved:",UNmoved) if FF_check == False: #if ff_check revealed issue the simulation is to be re-run with new flow_front velocities #delete old flow result file here: fif = RTMFile+".log" path = 'D:\\IDPcode\\pamrtm\\mainSimFiles\\' for i in os.listdir(path): if os.path.isfile(os.path.join(path,i)) and str(fif) in i: shutil.move(os.path.join("D:\\IDPcode\\pamrtm\\mainSimFiles\\",i), os.path.join("D:\\IDPcode\\pamrtm\\mainSimFiles\\trash\\", i)) REP = REP + 1 RTMFile = RTMF+"_"+str(REP) #Create main input file for visual-RTM. STRx = MeshFile+"---"+RTMFile+"---"+resin+"---"+str(I_T)+"---"+str(T_T)+"---"+str(I_P)+"---"+str(V_P)+"---"+str(FR)+"---"+str(st393)+"---"+str(vn)+"---"+str("0")+"---"+str(RTMF) with open("Temporary\\RTM_in.txt", "w") as text_file: text_file.write(STRx) for i in os.listdir(path): if os.path.isfile(os.path.join(path,i)) and 'FILLING_FACTOR' in i: shutil.move(os.path.join("D:\\IDPcode\\pamrtm\\mainSimFiles\\",i), os.path.join("D:\\IDPcode\\pamrtm\\mainSimFiles\\trash\\", i)) #Run the same simulation with altered flow-rate matrix. cmd2("VEBatch -activeconfig Trade:CompositesandPlastics -activeapp VisualRTM -sessionrun D:\\IDPcode\\RTM_lil_toolbox.py",RTMFile) results = False while results == False: try: maxFill,I_time = RTM_postProc.outputS1(RTMFile) results = True except: print("Result not available yet, waiting") time.sleep(30) pass ''' simTime = time.time() - st393 cnnW,crrW = cnt_X('NCC') #stores relevant results of the FE analysis query = "UPDATE rtm_main SET sim_runtime = "+str(simTime)+""" WHERE (MeshFile = '"""+MeshFile+"""') and version = """+vn+";" crrW.execute(query) cnnW.commit() query = "UPDATE rtm_main SET infusionTime = "+str(I_time)+""" WHERE (MeshFile = '"""+MeshFile+"""') and version = """+vn+";" crrW.execute(query) cnnW.commit() query = "UPDATE rtm_main SET infusionPercentage = "+str(maxFill)+""" WHERE (MeshFile = '"""+MeshFile+"""') and version = """+vn+";" crrW.execute(query) cnnW.commit() #close SQL handles dc_X('NCC',cnnW,crrW) print("Total RTM sim time:--- %s seconds ---" % (time.time() - st393)) #mf,time = outputS1() #print("Fill:",mf) #print("Infusion time:",time) # #unused? #RTM_postProc.cmdReach(RTMFile) #with open(lPath+"\\pamrtm\\mainSimFiles\\FILLING_FACTOR124.txt", "r") as fin: # data = fin.read().splitlines(True) #with open(lPath+'\\pamrtm\\mainSimFiles\\FILLING_FACTOR125.txt', 'w') as fout: # fout.writelines(data[14:]) return(RTMFile)
def SingleLoop(varVal): lPath = os.path.dirname(os.path.abspath(__file__)) #input: variable variation (5 atm) #this is where the loop really starts. #The project and part should be the same for all parts subject to same #optimisation loop. part = "UAV4" project = "IDP" #If "new" is set, the corresponding simulation is run. #If an existent name of file is set, the simulation just records #the name of the file and proceeds to next simulation CADfile = "new" BraidFile = "new" #Optional specification lines: #BraidFile = "IDP_spar_A147_b001" #CADfile = "IDP_spar_A147_JK" #CADfile = "IDP_oscar_A044_NA" #cannot have old braiding without old CADfile if BraidFile != "new" and CADfile == "new": print("cannot have old braiding simulation with new CAD") quit() MeshFile = "new" #MeshFile = "IDP_Spar_A147_M001" st999 = time.time() print(datetime.datetime.now()) cnnC, crrC = cnt_X('NCC') #SETUP NEW ENTRY IN SQL #looks for other analysis of the same source geomertry query = """SELECT Iteration_count FROM arun where project like '"""\ +project+"""' and part like '"""+part+"""'""" crrC.execute(query) #get results sd = [] rows = crrC.fetchall() #creates a list of version numbers for row in rows: try: sd.append(int(row[0])) #break except TypeError: print("x") #highest version number is stored if is_empty(sd) == True: maxNo = 0 else: maxNo = max(sd) #next version number - used for this analysis Iteration_count = maxNo + 1 #input information into SQL query = "INSERT INTO arun(Project,part,Iteration_count) VALUES(" query += """'""" + project + """','""" + part + """',""" + str( Iteration_count) + """)""" crrC.execute(query) cnnC.commit() #close SQL handles dc_X('NCC', cnnC, crrC) with open(lPath + "\\temporary\\underground.txt", "a") as text_file: text_file.write("Generating CAD shape.\n") CATbin = True #key12345 #MAIN BODY OF THE SCRIPT if CATbin == True: if CADfile == "new": #start CATIA and run the script os.startfile( r"C:\Program Files\Dassault Systemes\B27\win_b64\code\bin\CNEXT.exe" ) CADfile = SingleCAD(project, part, varVal) os.system("TASKKILL /F /IM Cnext.exe") #update SQL after CAD creation cnnC, crrC = cnt_X('NCC') query = """UPDATE arun SET CADfile = '"""+str(CADfile)+"""'"""\ """WHERE (part = '"""+part+"""') and (project = '"""+project+"""')"""\ """and (iteration_count = """+str(Iteration_count)+""");""" crrC.execute(query) cnnC.commit() #close SQL handles dc_X('NCC', cnnC, crrC) with open(lPath + "\\temporary\\underground.txt", "a") as text_file: text_file.write("Running braiding simulation.\n") #Braiding and meshing could be run in parallel - if separate catia #machine is available. print("CAD module finished " + CADfile + ", commencing Meshing module") #meshType should be selected in the GUI eventually... #also the sizes available with meshTypes differ meshType = "numimesh" if MeshFile == "new": if meshType == "CATIA": os.startfile( r"C:\Program Files\Dassault Systemes\B27\win_b64\code\bin\CNEXT.exe" ) MeshFile, span_ele_size, xs_seed = MultiMesh(CADfile, varVal) os.system("TASKKILL /F /IM Cnext.exe") elif meshType == "numimesh": MeshFile, span_ele_size, xs_seed = numimesh.XSP(varVal, CADfile) #Open and close MySQL connection. cnnC, crrC = cnt_X('NCC') query = """UPDATE arun SET span_ele_size = '"""+str(span_ele_size)+\ """' WHERE (part = '"""+part+"""') and (project = '"""+project+"""')\ and (iteration_count = """+str(Iteration_count)+""");""" crrC.execute(query) cnnC.commit() query = """UPDATE arun SET xs_seed = '"""+str(xs_seed)+"""'\ WHERE (part = '"""+part+"""') and (project = '"""\ +project+"""') and (iteration_count = """\ +str(Iteration_count)+""");""" crrC.execute(query) cnnC.commit() else: #Open and close MySQL connection. cnnC, crrC = cnt_X('NCC') query = """SELECT xs_seed FROM arun where MeshFile = '"""\ +MeshFile+"""' and xs_seed is not null;""" crrC.execute(query) rows = crrC.fetchall() xs_seed = 0 for row in rows: xs_seed = float(row[0]) query = """UPDATE arun SET xs_seed = '"""+str(xs_seed)+"""'\ WHERE (part = '"""+part+"""') and (project = '"""+project+\ """') and (iteration_count = """+str(Iteration_count)+\ """);""" crrC.execute(query) cnnC.commit() query = """SELECT span_ele_size FROM arun where MeshFile = '"""\ +MeshFile+"""' and span_ele_size is not null;""" crrC.execute(query) rows = crrC.fetchall() span_ele_size = 0 for row in rows: span_ele_size = float(row[0]) query = """UPDATE arun SET span_ele_size = '"""+str(span_ele_size)\ +"""' WHERE (part = '"""+part+"""') and (project = '"""\ +project+"""') and (iteration_count = """\ +str(Iteration_count)+""");""" crrC.execute(query) cnnC.commit() #meshing data into SQL query = """UPDATE arun SET MeshFile = '""" + str(MeshFile) + """'\ WHERE (part = '""" + part + """') and (project = '""" + project + """')\ and (iteration_count = """ + str(Iteration_count) + """);""" crrC.execute(query) cnnC.commit() #close SQL handles dc_X('NCC', cnnC, crrC) print("Meshing module finished " + MeshFile + ", commencing Braiding module") if BraidFile == "new": #Success check, if braiding below allowed braid angle the #braid sim has to be re-run. #use this as selection later? braid = "P" if braid == "P": BraidFile = Braid_CMD_P.baseBraid(varVal, CADfile, MeshFile) elif braid == "C": MS = varVal[ 'mandrel_speed'] # relic from original braiding simulation success = 0 while success == 0: os.startfile( r"C:\Program Files\Dassault Systemes\B27\win_b64\code\bin\CNEXT.exe" ) BraidFile, success, MS = Braid_CMD_C.baseBraid( CADfile, MS, varVal) os.system("TASKKILL /F /IM Cnext.exe") else: #Reuse root_perimeter if braiding simulation is not run. #Open and close MySQL connection. cnnC, crrC = cnt_X('NCC') query = """SELECT root_perimeter FROM arun where Cadfile = '"""\ +CADfile+"""' and root_perimeter is not null;""" crrC.execute(query) rows = crrC.fetchall() root_perimeter = 0 for row in rows: root_perimeter = float(row[0]) query = """UPDATE arun SET root_perimeter = '"""+str(root_perimeter)\ +"""' WHERE (part = '"""+part+"""') and (project = '"""\ +project+"""') and (iteration_count = """\ +str(Iteration_count)+""");""" crrC.execute(query) cnnC.commit() #Populate the SQL row with suitable braid data. #close SQL handles dc_X('NCC', cnnC, crrC) #give it time to close pulse time.sleep(5) #Open and close MySQL connection. cnnC, crrC = cnt_X('NCC') query = """UPDATE arun SET BraidFile = '"""+str(BraidFile)+"""'"""\ """WHERE (part = '"""+part+"""') and (project = '"""+project+"""')"""\ """and (iteration_count = """+str(Iteration_count)+""");""" crrC.execute(query) cnnC.commit() #close SQL handles dc_X('NCC', cnnC, crrC) with open(lPath + "\\temporary\\underground.txt", "a") as text_file: text_file.write("Meshing.\n") print("Braiding module finished " + BraidFile + ", commencing FE module") maxDeflection, mass, FeFile = abaMain(BraidFile, MeshFile, CADfile, varVal, meshType, xs_seed) print("Maximum deflection with current setup is " + str(maxDeflection)) pher = AgentTyrael(maxDeflection, mass) #FE SQL entry cnnC, crrC = cnt_X('NCC') query = """UPDATE arun SET FeFile = '""" + str(FeFile) + """'\ WHERE (part = '""" + part + """') and (project = '""" + project + """')\ and (iteration_count = '""" + str(Iteration_count) + """');""" crrC.execute(query) cnnC.commit() query = """UPDATE arun SET date = GetDate() WHERE (part = '"""+part+"""')\ and (project = '"""+project+"""') and (iteration_count = """\ +str(Iteration_count)+""");""" crrC.execute(query) cnnC.commit() query = """UPDATE arun SET pher = '""" + str(pher) + """' \ WHERE (part = '""" + part + """') and (project = '""" + project + """')\ and (iteration_count = """ + str(Iteration_count) + """);""" crrC.execute(query) cnnC.commit() query = """SELECT idARUN FROM arun where Cadfile = '""" + CADfile + """'\ and (iteration_count = """ + str(Iteration_count) + """);""" crrC.execute(query) rows = crrC.fetchall() root_perimeter = 0 for row in rows: arunID = int(row[0]) #close SQL handles dc_X('NCC', cnnC, crrC) #Abaqus analysis finished, commencing infusion simulation. #______insert RTM_main here .... (number of layers, name of part) #toggle if I want to run RTM simulation (maybe set a fitness treshold?) if pher > 0.9: RTMsim = True else: RTMsim = False if RTMsim == True: os.startfile( r"C:\Program Files\Dassault Systemes\B27\win_b64\code\bin\CNEXT.exe" ) hold = varVal["mesh_size"] if varVal["mesh_size"] < 5: varVal["mesh_size"] = 5 MeshFile, span_ele_size, xs_seed = MultiMesh(CADfile, varVal) os.system("TASKKILL /F /IM Cnext.exe") varVal["mesh_size"] = hold with open(lPath + "\\temporary\\underground.txt", "a") as text_file: text_file.write("Running flow simulation.\n") print("FE module finished " + FeFile + ", commencing infusion module") #you need a resin for both structural and rtm analysis... no data yet RTMfile = mRTM(MeshFile, BraidFile, "Hexion_9600", varVal) cnnC, crrC = cnt_X('NCC') query = """UPDATE arun SET RTMfile = '""" + str(RTMfile) + """'\ WHERE (part = '""" + part + """') and (project = '""" + project + """')\ and (iteration_count = """ + str(Iteration_count) + """);""" crrC.execute(query) cnnC.commit() #close SQL handles dc_X('NCC', cnnC, crrC) simulation_time = time.time() - st999 cnnC, crrC = cnt_X('NCC') query = """UPDATE arun SET simulation_time = """+str(simulation_time)\ +""" WHERE (part = '"""+part+"""') and (project = '"""\ +project+"""') and (iteration_count = """\ +str(Iteration_count)+""");""" crrC.execute(query) cnnC.commit() dc_X('NCC', cnnC, crrC) simulation_time = time.time() - st999 #time also into sql print("Combined simulation time:--- %s seconds ---" % (simulation_time)) #add overall fitness result to the SQL with open(lPath + "\\temporary\\underground.txt", "a") as text_file: text_file.write("atm:" + str(datetime.datetime.now()) + "\n") #If pre-GUI scripts are used MS should be passed back as well #Switch on the below to avoid running in certain hours #datetime.datetime.today() #datetime.datetime(2012, 3, 23, 23, 24, 55, 173504) #dtt = datetime.datetime.today().weekday() #now = datetime.datetime.now() # #while dtt == 4 and (6 < now.hour < 17): # print("day:"+str(dtt)) # print("hour:"+str(now.hour)) # time.sleep(300) return (pher, arunID)
def abaMain(BraidFile,MeshFile,CADfile,varVal,meshType,XSS): lPath = os.path.dirname(os.path.abspath(__file__)) st986 = time.time() #runs from python, it is used to run other abaqus related scripts through command line #SQL is used here to store iteration data, while the inputs and outputs to command line scripts are done through temporary text files NL = varVal['no_layers'] #the setup - variables etc. matrix = varVal["matrix"] fibre =varVal["reinforcement"] Material = matrix +"||"+fibre spanwise_sections = cnn_main.aba_inputProp(BraidFile,CADfile,varVal) #other variables fN = -1 #total Newton force applied at the tip #the input to sim, through SQL cnnA,crrA = cnt_X('NCC') #looks for other analysis of the same source geomertry here = """'%"""+MeshFile+"""%'""" here1 = """'%"""+BraidFile+"""%'""" query = "SELECT version FROM fe_inst where MeshFile like "+here+" and BraidFile like "+here1 #print(query) crrA.execute(query) #get results sd = [] rows = crrA.fetchall() #creates a list of version numbers for row in rows: try: sd.append(int(row[0])) #break except TypeError: print("x") #highest version number is stored if is_empty(sd) ==True: maxNo = 0 else: maxNo = max(sd) #next version number - used for this analysis version = maxNo + 1 #file definition based on version number if version < 10: vn = "00"+str(int(version)) elif version <100: vn = "0"+str(int(version)) else: vn = str(int(version)) BraidSection = BraidFile.split("_")[3] FeFile = MeshFile+"_"+BraidSection+"_F"+ vn #input information into SQL query = "INSERT INTO fe_inst(MeshFile,BraidFile,material,FeFile,version,no_layers,force_N) VALUES(" query += """'"""+MeshFile+"""','"""+BraidFile+"""','"""+Material+"""','"""+FeFile+"""',"""+str(version)+""","""+str(NL)+""","""+str(fN)+""")""" crrA.execute(query) cnnA.commit() #close SQL handles dc_X('NCC',cnnA,crrA) #all input information for pre-processing passed through this text file STRx = MeshFile +"---"+FeFile+"---"+str(NL)+"---"+str(fN)+"---"+meshType+"---"+str(varVal["mesh_size"])+"---"+str(XSS) with open("Temporary\\fe_in.txt", "w") as text_file: text_file.write(STRx) #run pre-processing abaqus script cmd("abaqus cae noGUI=abaqus_inst.py") #move generated files into temporary folder, from the script folder #path = 'D:\\IDPcode\\' for i in os.listdir(lPath): if os.path.isfile(os.path.join(lPath,i)) and 'Task-1.odb' in i: shutil.move(os.path.join(lPath+"\\",i), os.path.join(lPath+"\\Temporary\\", i)) #run post-processing abaqus script cmd("abaqus cae noGUI=abaqus_postProc.py") #obtains relevant information from abaqus results fl = open(lPath+"\\Temporary\\fe_out.txt", "rt") flstr = fl.read() flval = float(flstr) flval = round(flval, 3) fl = open(lPath+"\\Temporary\\mass_out.txt","rt") flstr = fl.read() flval2 = float(flstr) mass = flval2 cnnA,crrA = cnt_X('NCC') #stores relevant results of the FE analysis query = "UPDATE fe_inst SET max_deflection = "+str(flval)+""" WHERE (MeshFile = '"""+MeshFile+"""') and version = """+str(version)+";" crrA.execute(query) cnnA.commit() query = "UPDATE fe_inst SET mass = "+str(flval2)+""" WHERE (MeshFile = '"""+MeshFile+"""') and version = """+str(version)+";" crrA.execute(query) cnnA.commit() query = "UPDATE fe_inst SET spanwise_sections = "+str(spanwise_sections)+""" WHERE (MeshFile = '"""+MeshFile+"""') and version = """+str(version)+";" crrA.execute(query) cnnA.commit() query = "UPDATE fe_inst SET simulation_time = "+str((time.time() - st986))+""" WHERE (MeshFile = '"""+MeshFile+"""') and version = """+str(version)+";" crrA.execute(query) cnnA.commit() #delete the output, to prevent it from showing on the next analysis if error occurs with open("Temporary\\fe_out.txt", "w") as text_file: text_file.write("") #close SQL handles dc_X('NCC',cnnA,crrA) #move task-, .rpy, .sat and .rec files from script folder - empty trash folder manually (dont want to accidnetally move a script) for i in os.listdir(lPath): if os.path.isfile(os.path.join(lPath,i)) and '.rec' in i: shutil.move(os.path.join(lPath+"\\",i), os.path.join(lPath+"\\trash\\", i)) for i in os.listdir(lPath): if os.path.isfile(os.path.join(lPath,i)) and '.rpy' in i: shutil.move(os.path.join(lPath+"\\",i), os.path.join(lPath+"\\trash\\", i)) for i in os.listdir(lPath): if os.path.isfile(os.path.join(lPath,i)) and '.sat' in i: shutil.move(os.path.join(lPath+"\\",i), os.path.join(lPath+"\\trash\\", i)) for i in os.listdir(lPath): if os.path.isfile(os.path.join(lPath,i)) and 'Task-' in i: shutil.move(os.path.join(lPath+"\\",i), os.path.join(lPath+"\\trash\\", i)) path2 = lPath+'\\Temporary\\' for i in os.listdir(path2): if os.path.isfile(os.path.join(path2,i)) and 'Task-' in i: shutil.move(os.path.join(lPath+"\\Temporary\\",i), os.path.join(lPath+"\\trash\\", i)) print("Total FEA time:--- %s seconds ---" % (time.time() - st986)) return(flval,mass,FeFile)
def poc(MD,varVal,YARN,WW,spoolsWa,spoolsPhy,datum,cdArr,CADfile,rota): lPath = os.path.dirname(os.path.abspath(__file__)) st2 = time.time() #change for iteration #POC1 find, fell point csL = 0 csl_mat = [0] i = 0 #perimeter calculations while i < np.size(MD,0): if i == (np.size(MD,0)-1): localLen = np.sqrt((MD[i,1,0]-MD[0,1,0])**2 + (MD[i,2,0]-MD[0,2,0])**2) else: localLen = np.sqrt((MD[i,1,0]-MD[i+1,1,0])**2 + (MD[i,2,0]-MD[i+1,2,0])**2) csL = csL + localLen csl_mat.append(csL) i = i + 1 #store perimeter as it is used in other scripts as well if YARN == 0: cnnB,crrB = cnt_X('NCC') query = """UPDATE arun SET root_perimeter = '"""+str(csL)+\ """' WHERE CADfile = '"""+CADfile+"""';""" crrB.execute(query) cnnB.commit() #close SQL handles dc_X('NCC',cnnB,crrB) #initial position depends on yarn type Weft/warp, one is iterated clockwise one anti clockwise if WW == 0: pos = ((YARN/spoolsWa))*csL else: pos = (1-(YARN/spoolsWa))*csL #find between which two points on the perimeter is the current position i = 0 while i < np.size(csl_mat): if csl_mat[i] <= pos <= csl_mat[i+1]: cc = i i = i + 1000000 i = i + 1 #how far from one point to the other is the current position ratio = (pos-csl_mat[cc])/(csl_mat[cc+1]-csl_mat[cc]) #Exception for lenght calculation for the last point on circumference, #as this point follows to the first point of circumference. if cc != (np.size(MD,0)-1): x = (MD[cc+1,1,0] - MD[cc,1,0])*ratio + MD[cc,1,0] y = (MD[cc+1,2,0] - MD[cc,2,0])*ratio + MD[cc,2,0] z = 0 else: x = (MD[0,1,0] - MD[cc,1,0])*ratio + MD[cc,1,0] y = (MD[0,2,0] - MD[cc,2,0])*ratio + MD[cc,2,0] z = 0 poc1 = np.array([x,y,z]) #initiates matrix for collection of all fell-points pocList= np.matrix([[YARN,poc1[0],poc1[1],poc1[2],0,0,0,0,0,0]]) #useful variables maxR = 21 gd = varVal["guide_rad"] MS = varVal["mandrel_speed"] imd = varVal["IMD"] datum = np.array([datum[0,0],datum[0,1],datum[0,2]]) #for iterations: snip = 0.05 #looking for point on on elements sides with these increments T= 0 #timewise propagation Tstep = 1 z = 0 seg = 0 SPP = noSerpent(T,YARN,maxR,gd,WW,rota,spoolsWa,MS,imd,datum) sppList= np.matrix([[SPP[0],SPP[1],SPP[2]]]) #Finding initial element by 4 points. CP0 = findClosest(MD,2,0,poc1) CP1 = findClosest(MD,2,1,poc1) #Find nearest points at local z: #pt1 is further away from SPP #pt2 is closer to SPP p1d = np.sqrt((SPP[0]-CP0[0,0])**2+(SPP[1]-CP0[0,1])**2+(SPP[2]-CP0[0,2])**2) p2d = np.sqrt((SPP[0]-CP0[1,0])**2+(SPP[1]-CP0[1,1])**2+(SPP[2]-CP0[1,2])**2) if p1d > p2d: p1 = CP0[0,:] p2 = CP0[1,:] else: p1 = CP0[1,:] p2 = CP0[0,:] #find the index of each point e = 0 while e < np.size(MD,0): if p1[0] == MD[e,1,0] and p1[1] == MD[e,2,0] and p1[2] == MD[e,3,0]: Ip1 = e if p2[0] == MD[e,1,0] and p2[1] == MD[e,2,0] and p2[2] == MD[e,3,0]: Ip2 = e e = e + 1 #find nearest points at z + mesh #pt3 is further away from spp #pt4 is closer to spp p3d = np.sqrt((SPP[0]-CP1[0,0])**2+(SPP[1]-CP1[0,1])**2+(SPP[2]-CP1[0,2])**2) p4d = np.sqrt((SPP[0]-CP1[1,0])**2+(SPP[1]-CP1[1,1])**2+(SPP[2]-CP1[1,2])**2) if p3d > p4d: p3 = CP1[0,:] p4 = CP1[1,:] else: p3 = CP1[1,:] p4 = CP1[0,:] #Main iteration #Distancing matrix used for troubleshooting dCheck = np.matrix([[0,0,0]]) ts = np.matrix([[SPP[0],SPP[1],SPP[2],poc1[0],poc1[1],poc1[2]]]) while seg < np.size(MD,2)-2: #find current position of spool SPP = noSerpent(T,YARN,maxR,gd,WW,rota,spoolsWa,MS,imd,datum) #find normal to points 2,3,4 l1 = p4 - p3 l2 = p4 - p2 normal = np.cross(l1,l2) #turn into unit vector n_mag = np.sqrt((normal[0])**2+(normal[1])**2+(normal[2])**2) if n_mag == 0: print("ots right now",p1,p2,p3,p4) print("Ip1",Ip1,"Ip2",Ip2) print(WW) print(normal) print(l1,l2) """ if n_mag == 0: print("normal broken, two of the same point used") print("p1",p1) print("p2",p2) print("p3",p3) print("p4",p4) print("bn",bn) #try to increase the search for cross section point by 1 #currently unverified method if p1[0]==p2[0] and p1[1]==p2[1]: #note findClosest now searches for 4 closest points CPX = findClosest(MD,4,seg,p1) dt = 9999999999 i = 0 while i < np.size(CPX,0): pt = np.array([CPX[i,0],CPX[i,1],CPX[i,2]]) dist = np.sqrt((SPP[0]-pt[0])**2+(SPP[1]-pt[1])**2+(SPP[2]-pt[2])**2) if dist < dt: p2 = np.copy(pt) dt = dist i = i + 1 #prints out the fix, so that user can check if it worked print("pt2",p2, "fixed?") if p3[0]==p4[0] and p3[1]==p4[1]: CPX = findClosest(MD,4,seg,p3) dt = 9999999999 i = 0 while i < np.size(CPX,0): pt = np.array([CPX[i,0],CPX[i,1],CPX[i,2]]) dist = np.sqrt((SPP[0]-pt[0])**2+(SPP[1]-pt[1])**2+(SPP[2]-pt[2])**2) if dist < dt: p4 = np.copy(pt) dt = dist i = i + 1 #prints out the fix, so that user can check if it worked print("pt4",p4, "fixed?") l1 = p4 - p3 l2 = p4 - p2 normal = np.cross(l1,l2) #turn into unit vector n_mag = np.sqrt((normal[0])**2+(normal[1])**2+(normal[2])**2) """ #Turn normal into unit vector normal = normal/n_mag #check that vector is pointing away from the origin (assumed at datum) check_point = p4 + normal point_d = np.sqrt((p4[0]-datum[0])**2+(p4[1]-datum[1])**2+(p4[2]-datum[2])**2) check_point_d = np.sqrt((check_point[0]-datum[0])**2+(check_point[1]-datum[1])**2+(check_point[2]-datum[2])**2) if point_d > check_point_d: normal = normal * -1 #making sure normal points away from datum pocList[np.size(pocList,0)-1,4:7] = normal #find if spp is above or below plane tst = np.dot(normal,(SPP-p4)) #for now normals dont have z element.... shall see what happesn normalX = np.copy(normal) #normalX[2] = 0 ui = 0 #if the spool point is above the element plane, move spool until #it gets below the plane. Then itera through elements again. while tst > 0: normal[2] = 0 T = T + Tstep SPP = noSerpent(T,YARN,maxR,gd,WW,rota,spoolsWa,MS,imd,datum) #move spool (by rotation and mandrel speed) #find if spp is above or below plane tsTemp = np.matrix([[SPP[0],SPP[1],SPP[2],poc1[0],poc1[1],poc1[2]]]) ts = np.concatenate((ts,tsTemp),0) tst = np.dot(normalX,(SPP-p4)) #tst dot(normal,(spp-p0)) -- where p0 is p4 ui = ui + 1 if ui > 100: #When the spool didn't get over horizon in 100 spools moves. #Prints for troubleshooting. Does not force break of sim. #normal[2] = 0 print("SPP",SPP) print("Houston, we have a problem") print("p1",p1,"p2",p2,"p3",p3,"p4",p4) print(bn) breakhere #now that tst < 0 l1_mag = np.sqrt((l1[0])**2+(l1[1])**2+(l1[2])**2) l1 = l1/l1_mag l2_mag = np.sqrt((l2[0])**2+(l2[1])**2+(l2[2])**2) l2 = l2/l2_mag #find the shortest yarn path between poc1 and spp mD = 999999999 prop = np.array([0,0,0]) i = 0 bn = 0 #l1 = pt4-pt3 #itereate through first element line while i <=l1_mag: ptx = p3+(l1*i) #compute distance point to spp and point to poc X1 = np.sqrt((SPP[0]-ptx[0])**2+(SPP[1]-ptx[1])**2+(SPP[2]-ptx[2])**2) X2 = np.sqrt((poc1[0]-ptx[0])**2+(poc1[1]-ptx[1])**2+(poc1[2]-ptx[2])**2) X = X1 + X2 #if this is shortest distance so far, store location if X < mD: mD = X prop = ptx i = i + snip i = 0 #l2 = pt4-pt2 #iterate through second element line while i <=l2_mag: ptx = p2+(l2*i) #compute distance point to spp and point to poc X1 = np.sqrt((SPP[0]-ptx[0])**2+(SPP[1]-ptx[1])**2+(SPP[2]-ptx[2])**2) X2 = np.sqrt((poc1[0]-ptx[0])**2+(poc1[1]-ptx[1])**2+(poc1[2]-ptx[2])**2) X = X1 + X2 #if this is shortest distance so far, store location if X < mD: mD = X prop = ptx bn = 1 i = i + snip #check if p4 is actually the closest point X1 = np.sqrt((SPP[0]-p4[0])**2+(SPP[1]-p4[1])**2+(SPP[2]-p4[2])**2) X2 = np.sqrt((poc1[0]-p4[0])**2+(poc1[1]-p4[1])**2+(poc1[2]-p4[2])**2) X = X1 + X2 if X <= mD: bn = 2 poc1 = prop tsTemp = np.matrix([[SPP[0],SPP[1],SPP[2],poc1[0],poc1[1],poc1[2]]]) ts = np.concatenate((ts,tsTemp),0) #propagate points (p1,p2,p3,p4) #refer to sketch for point propagation. TBD if bn == 0: #propagation spanwise seg = seg+1 p1 = np.copy(p3) p2 = np.copy(p4) p3 = np.array([MD[Ip1,1,seg+1],MD[Ip1,2,seg+1],MD[Ip1,3,seg+1]]) p4 = np.array([MD[Ip2,1,seg+1],MD[Ip2,2,seg+1],MD[Ip2,3,seg+1]]) else: #for propagation spanwise or diagonal if WW == 0: if Ip1 == np.size(MD,0)-1: Ip1 = 0 else: Ip1 = Ip1 + 1 if Ip2 == np.size(MD,0)-1: Ip2 = 0 else: Ip2 = Ip2 + 1 elif WW == 1: if Ip1 == 0: Ip1 = np.size(MD,0)-1 else: Ip1 = Ip1 - 1 if Ip2 == 0: Ip2 = np.size(MD,0)-1 else: Ip2 = Ip2 - 1 if bn == 1: #propagation along cross-section p1 = np.copy(p2) p3 = np.copy(p4) p2 = np.array([MD[Ip2,1,seg],MD[Ip2,2,seg],MD[Ip2,3,seg]]) p4 = np.array([MD[Ip2,1,seg+1],MD[Ip2,2,seg+1],MD[Ip2,3,seg+1]]) elif bn == 2: #both spanwsie and xs-wise propagation p1 = np.copy(p4) seg = seg + 1 p2 = np.array([MD[Ip2,1,seg],MD[Ip2,2,seg],MD[Ip2,3,seg]]) p3 = np.array([MD[Ip1,1,seg+1],MD[Ip1,2,seg+1],MD[Ip1,3,seg+1]]) p4 = np.array([MD[Ip2,1,seg+1],MD[Ip2,2,seg+1],MD[Ip2,3,seg+1]]) #find normal to points 2,3,4 l1 = p4 - p3 l2 = p4 - p2 normal1 = np.cross(l1,l2) #turn into unit vector n_mag = np.sqrt((normal1[0])**2+(normal1[1])**2+(normal1[2])**2) normal1 = normal1/n_mag #get f, yarn vector f = SPP -poc1 #get t, vector along centreline t = [] i = 0 while i < np.size(cdArr,0)-1: if cdArr[i,3] <= z <= cdArr[i+1,3]: t = [cdArr[i,0],cdArr[i,1],cdArr[i,2]] i = i + 999999999 i = i + 1 #find the local braid angle iv, a = braidAngle(normal,t,f) #find the local pitch pitch = IDP_geometry.pitch(a,MD,z,spoolsPhy) pocList[np.size(pocList,0)-1,7] = a pocList[np.size(pocList,0)-1,9] = pitch pocList = np.concatenate((pocList,np.matrix([[YARN,poc1[0],poc1[1],poc1[2],0,0,0,0,T,0]])),axis=0) sppList = np.concatenate((sppList,np.matrix([[SPP[0],SPP[1],SPP[2]]]))) #append to list of pocs dCheckT = np.matrix([[poc1[2],SPP[2],(SPP[2]-poc1[2])]]) dCheck = np.concatenate((dCheck,dCheckT),0) z = poc1[2] #seg = seg + 1 #save the list of pocs as numpy for testing #Final braid angle computation #get f f = SPP -poc1 #get t t = [] i = 0 while i < np.size(cdArr,0)-1: if cdArr[i,3] <= z <= cdArr[i+1,3]: t = [cdArr[i,0],cdArr[i,1],cdArr[i,2]] i = i + 999999999 i = i + 1 #find the last braid angle iv, a = braidAngle(normal,t,f) #find last pitch pitch = IDP_geometry.pitch(a,MD,z,spoolsPhy) pocList[np.size(pocList,0)-1,7] = a pocList[np.size(pocList,0)-1,9] = pitch #store pocList and sppList for troubleshooting and review np.save(lPath+"\\temporary\\pocList.npy", pocList[:,1:4]) np.save(lPath+"\\temporary\\sppList.npy", sppList) #for t-shoot: #np.save(lPath+"\\temporary\\dCheck.npy",dCheck) #np.save(lPath+"\\temporary\\"+str(YARN)+"yarn"+str(WW)+".npy",ts) #add to matrix? print("YARN "+str(YARN)+" simulation time :--- %s seconds ---" % (time.time() - st2)) if sppList[0,1] == sppList[np.size(sppList,0)-1,1]: print(sppList) return(pocList)
def collector(tlist): #tlist lists the tables that are to be collected colis = [] for number in tlist: #loop thorugh all tables, search for column names cnnT,crrT = cnt_X('NCC') query = """SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '_iters_"""+str(number)+"""' ORDER BY ORDINAL_POSITION""" crrT.execute(query) rows = crrT.fetchall() #for each column that was not listed (and does not correspond to default column), add to list for row in rows: if row[0] not in colis: colis.append(row[0]) dc_X('NCC',cnnT,crrT) colis.remove('fitness') colis.remove('Specie') colis.remove('Generation') colis.remove('arunID') colis.remove('id') #print(colis) #make zeros line for the required number of vars + fitness + def + weight sz = len(colis) + 3 dt = np.zeros([1,sz]) #for each table for number in tlist: colisy = [] colisx = colis.copy() #print(colisx) cnnT,crrT = cnt_X('NCC') query = """SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '_iters_"""+str(number)+"""' ORDER BY ORDINAL_POSITION""" crrT.execute(query) rows = crrT.fetchall() #for each column that was not listed (and does not correspond to default column), add to list query = "SELECT " for row in rows: if row[0] in colisx: #colisx will be the remaining entries that will be added from fixed variables table #colisy is the list of variables obtained here from the main table query += row[0]+"," colisy.append(row[0]) colisx.remove(row[0]) query += """fitness,arunID FROM [DIGIProps].[dbo].[_iters_"""+str(number)+"""] where fitness > 0;""" crrT.execute(query) mt = crrT.fetchall() #mt is later used for construction of the overall table #obtain variables from fixed variables table if len(colisx) > 0: query = "SELECT " for i in colisx: query += str(i)+"," query = query[:-1] query += """ From [DIGIProps].[dbo].[UserDefIterations] where ref_no = """+str(number)+""";""" crrT.execute(query) addx = crrT.fetchall() for a in addx: adx = a #populate each row of data from corresponding sources dtt = np.zeros([1,sz]) for m in mt: i = 0 while i < len(colis): ii = 0 #from fixed variables table while ii < len(colisx): if colis[i] == colisx[ii]: dtt[0,i] = adx[ii] ii = ii + 1 ii = 0 #from main table while ii < len(colisy): if colis[i] == colisy[ii]: dtt[0,i] = m[ii] ii = ii + 1 i = i + 1 #add fitness fn = np.size(dtt,1)-3 mn = len(m)-2 dtt[0,fn] = m[mn] #find idArun mn = len(m)-1 idArun = m[mn] #use the idArun to obtain the deconstructed fitness (mass, deflection) query = """SELECT FEfile FROM arun where idArun = """+str(idArun)+""";""" crrT.execute(query) FEfile = crrT.fetchall() for mu in FEfile: mu1 = str(mu[0]) query = """SELECT max_deflection, mass FROM fe_inst where FEfile = '"""+mu1+"""';""" #print(query) crrT.execute(query) fitComp = crrT.fetchall() #print(fitComp) for nu in fitComp: #max_deflectoin dtt[0,np.size(dtt,1)-1] = nu[0] #mass dtt[0,np.size(dtt,1)-2] = nu[1] dt = np.concatenate((dt,dtt),axis=0) dc_X('NCC',cnnT,crrT) dt = np.delete(dt,0,axis=0) return(len(colis), dt, colis)
def AgentSutler(varVar,varVal,fixedVars,varMin,varMax,specie): #this function is used by the GUI #Based on user input new iteration table in SQL is created lPath = os.path.dirname(os.path.abspath(__file__)) #creates a string of iteratable variables list for SQL record i = 0 varVarS = "" while i < len(varVar): varVarS = varVarS+varVar[i]+"_" i = i + 1 cnnW,crrW = cnt_X('NCC') query = """SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE' AND TABLE_CATALOG='DIGIProps' and TABLE_NAME like '%_iters_%'""" crrW.execute(query) rows = crrW.fetchall() cc = 0 for row in rows: x = int(row[0].split('_iters_')[1]) if x > cc: cc = x cc = cc + 1 GENtable = '_iters_'+str(cc) #Build User defined iteratable variable table query ="CREATE TABLE " query += GENtable query +="(id int IDENTITY(1,1) PRIMARY KEY,Specie varchar(100),Generation int," i = 0 while i < len(varVar): query+=str(varVar[i])+" " if type(varVal[varVar[i]]) is str: query +="varchar(100)," else: query +="float," i = i + 1 query += "fitness float,arunID int)" crrW.execute(query) cnnW.commit() #insert fixed variables into iteration table query = "INSERT INTO UserDefIterations(IterateVar,ref_no," i = 0 while i < len(fixedVars): query +=fixedVars[i]+"," i = i + 1 query = query[:-1] query += ") VALUES("+"""'"""+varVarS+"""',"""+str(cc)+""",""" i = 0 while i < len(fixedVars): if type(varVal[fixedVars[i]]) is str: query +="""'"""+str(varVal[fixedVars[i]])+"""',""" else: query +=str(varVal[fixedVars[i]])+"," i = i + 1 query = query[:-1] query +=")" crrW.execute(query) cnnW.commit() generation = 420 #untill combination of opt_algos is required #obtain data based on Latin Hypercube sampling method s = 200 varN = len(varVar) sampleM = lhsmdu.sample(varN, s) sampleMAT = np.ndarray.transpose(sampleM) print(varMin,varMax) BCs = np.matrix([[0.000,0.000]]) temp = np.matrix([[0.000,0.000]]) i = 0 #AFLe = 0 while i < varN: if varMin[varVar[i]]!=False: temp[0,0] = varMin[varVar[i]] temp[0,1] = varMax[varVar[i]] BCs = np.concatenate((BCs,temp),axis = 0) elif "airfoil" in varVar[i]: img_folder_path = lPath+'\\aerofoilcollection\\' dirListing = os.listdir(img_folder_path) AFLS = len(dirListing) temp[0,0] = 0 temp[0,1] = AFLS BCs = np.concatenate((BCs,temp),axis = 0) #AFLe = 1 elif "reinforcement" in varVar[i]: query = "SELECT COUNT(*) FROM dbo.fibre_properties;" crrW.execute(query) rows = crrW.fetchall() for row in rows: r = int(row[0]) temp[0,0] = 0 temp[0,1] = r BCs = np.concatenate((BCs,temp),axis = 0) elif "matrix" in varVar[i]: query = "SELECT COUNT(*) FROM dbo.matrix_properties;" crrW.execute(query) rows = crrW.fetchall() for row in rows: r = int(row[0]) temp[0,0] = 0 temp[0,1] = r BCs = np.concatenate((BCs,temp),axis = 0) else: #0-10 now arbitrarily selected for string values (eg. material) #this needs to be replaced by a lookup function that checks the #number of airfoils temp[0,0] = 0 temp[0,1] = 10 BCs = np.concatenate((BCs,temp),axis = 0) i = i + 1 BCs =np.delete(BCs,0,axis=0) print(BCs) #Integer values turned into integers (LHS likely provided no integer values.) IntPos = 979 IntPos2 = 979 IntPos3 = 979 IntPos4 = 979 IntPos5 = 979 IntPos6 = 979 IntPos7 = 979 IntPos8 = 979 if 'no_layers' in varVar : IntPos = varVar.index('no_layers') if 'spools' in varVar: IntPos2 = varVar.index('spools') if 'c_max' in varVar: IntPos3 = varVar.index('c_max') if 'c_min' in varVar: IntPos4 = varVar.index('c_min') if 'chord_1' in varVar: IntPos5 = varVar.index('chord_1') if 'chord_2' in varVar: IntPos6 = varVar.index('chord_2') if 'chord_3' in varVar: IntPos7 = varVar.index('chord_3') if 'chord_0' in varVar: IntPos8 = varVar.index('chord_0') #transofrm the ratio values to actual variables and populate new entries to SQL i = 0 while i < s: ii = 0 while ii < varN: sampleMAT[i,ii]= (sampleMAT[i,ii])*(BCs[ii,1]-BCs[ii,0])+BCs[ii,0] if IntPos == ii or IntPos2 == ii: sampleMAT[i,ii] = int(sampleMAT[i,ii]) # if c_max and c_min are iterated if ii == max(IntPos3,IntPos4): #and if c_min is larger than c_max if sampleMAT[i,IntPos3] < sampleMAT[i,IntPos4]: #switch values of c_max and c_min c_max = np.copy(sampleMAT[i,IntPos4]) c_min = np.copy(sampleMAT[i,IntPos3]) sampleMAT[i,IntPos4] = c_min sampleMAT[i,IntPos3] = c_max #prevents negative taper, if subsequent chord size is larger it is #decreassed to the value of previous chord if ii == max(IntPos5,IntPos6,IntPos7,IntPos8): if sampleMAT[i,IntPos8] < sampleMAT[i,IntPos5]: sampleMAT[i,IntPos5] = sampleMAT[i,IntPos8] if sampleMAT[i,IntPos5] < sampleMAT[i,IntPos6]: sampleMAT[i,IntPos6] = sampleMAT[i,IntPos5] if sampleMAT[i,IntPos6] < sampleMAT[i,IntPos7]: sampleMAT[i,IntPos7] = sampleMAT[i,IntPos6] ii = ii + 1 i = i + 1 i = 0 while i < s: #sampleMAT[i,3] = int(sampleMAT[i,3]) query = "INSERT INTO "+GENtable+"(specie,generation," ii = 0 while ii < varN: query += varVar[ii]+"," ii = ii + 1 query = query[:-1] query += ") VALUES(" query += """'"""+specie+"""',"""+str(generation)+""",""" ii = 0 while ii < varN: if "airfoil" in varVar[ii]: # Will raise StopIteration if you don't have x files fileNO = int(sampleMAT[i,ii]) file1000 = next(itertools.islice(os.scandir(lPath+'\\aerofoilcollection\\'), fileNO, None)).path file1000 = file1000.split(lPath+"\\aerofoilcollection\\")[1] query += """'"""+str(file1000)+"""',""" elif "matrix" in varVar[ii]: qr = "SELECT Material_name FROM dbo.matrix_properties;" crrW.execute(qr) rows = crrW.fetchall() fileNO = int(sampleMAT[i,ii]) iii = 0 for row in rows: if iii == fileNO: mt = (row[0]) iii = iii + 1 query += """'"""+str(mt)+"""',""" elif "reinforcement" in varVar[ii]: qr = "SELECT Material_name FROM dbo.fibre_properties;" crrW.execute(qr) rows = crrW.fetchall() fileNO = int(sampleMAT[i,ii]) iii = 0 for row in rows: if iii == fileNO: mt = (row[0]) iii = iii + 1 query += """'"""+str(mt)+"""',""" else: query += str(sampleMAT[i,ii])+"," ii = ii + 1 query = query[:-1] query += ");" crrW.execute(query) cnnW.commit() i = i + 1 dc_X('NCC',cnnW,crrW) with open(lPath+"\\temporary\\underground.txt", "a") as text_file: text_file.write("LHS run, begin evaluation.\n") IDP_assistants.Linda(generation,specie,GENtable,varVal,varVar) print("Finished sampling the Hypercube")