def writeOutputParamVals2caselist(cases, csvTemplateName, paramTable, caselist, outputParamsFileAddress): # Read the desired metric from each output file for icase, case in enumerate(cases): # Read values from the Metrics Extraction file first if csvTemplateName: extractedFile = csvTemplateName.replace("@@i@@", str(icase)) fcaseMetrics = data_IO.open_file(extractedFile, 'r') caseOutStr = "" for param in paramTable: if param[1] >= 0: param_icase = data_IO.read_float_from_file_pointer( fcaseMetrics, param[0], ',', param[1]) caseOutStr += "," + str(param_icase) caselist[icase] += caseOutStr fcaseMetrics.close() if outputParamsFileAddress: foutParams = data_IO.open_file(outputParamsFileAddress, 'r') allDesiredOutputs = foutParams.read() allDesiredOutputs = allDesiredOutputs.splitlines() # Read parameters from other files if provided # The format is: # outputName;outputFileNameTemplate;outputFlag;delimitor;locationInFile # # For example: # pressure_drop;results/case_@@i@@_pressure_drop.txt;;" ";1 # outputName;outputFileNameTemplate;delimitor;locationInFile # # For example: # pressure_drop;results/case_@@i@@_pressure_drop.txt; ;1 for param in paramTable: if param[1] == -1: outFile = data_IO.read_str_from_strList( allDesiredOutputs, param[0], ";", 0, 0) outFile = outFile.replace("@@i@@", str(icase)) foutFile = data_IO.open_file(outFile, 'r') outFileParamFlag = data_IO.read_str_from_strList( allDesiredOutputs, param[0], ";", 1, 0) outFileDelimiter = data_IO.read_str_from_strList( allDesiredOutputs, param[0], ";", 2, 0)[1] locnInOutFile = int( data_IO.read_str_from_strList(allDesiredOutputs, param[0], ";", 3, 0)) param_icase = data_IO.read_float_from_file_pointer( foutFile, outFileParamFlag, outFileDelimiter, locnInOutFile) caseOutStr = "," + str(param_icase) caselist[icase] += caseOutStr return caselist
def readKPIJsonFile(kpiFile): fp_jsonIn = data_IO.open_file(kpiFile) kpihash = json.load(fp_jsonIn, object_pairs_hook=OrderedDict) orderPreservedKeys = data_IO.byteify(list(kpihash.keys())) kpihash = data_IO.byteify(kpihash) fp_jsonIn.close() return kpihash, orderPreservedKeys
def read_num_layers_from_pass_coor_file(self): fcp = data_IO.open_file(self.pass_coor_path) # First get the number of layers: num_layers = data_IO.read_int_from_file_line_offset( fcp, 'Number-of-Layers') fcp.close() self.num_layers = num_layers
def read_passes_from_pass_coor_file(self): fcp = data_IO.open_file(self.pass_coor_path) # Then, read the passes in each layer num_passes = 0 for layer in range(self.num_layers): data = data_IO.read_ints_from_file_line_offset( fcp, 'Layer,Number-of-Passes', delimiter=',', offset=layer, end_line=1) num_passes = num_passes + data[1] fcp.close() self.num_passes = num_passes
def writeOutParamVals2caselist(cases, csvTemplateName, paramTable, caselist, kpihash): # Read the desired metric from each output file for icase, case in enumerate(cases): # Read values from the Metrics Extraction file first readMEXCSVFile = False if any(param[1] >= 0 for param in paramTable): readMEXCSVFile = True if readMEXCSVFile: PVcsvAddress = csvTemplateName.format(icase) fPVcsv = data_IO.open_file(PVcsvAddress, 'r') for param in paramTable: if param[1] >= 0: param_icase = data_IO.read_float_from_file_pointer( fPVcsv, param[0], ',', param[1]) else: # Read parameters from other files if provided metrichash = kpihash[param[0]] dataFile = metrichash['resultFile'].format(icase) dataFileParamFlag = metrichash['DEXoutputFlag'] dataFileDelimiter = metrichash['delimiter'] if not dataFileDelimiter: dataFileDelimiter = None locnInOutFile = int( metrichash['locationInFile']) - 1 # Start from 0 fdataFile = data_IO.open_file(dataFile, 'r') param_icase = data_IO.read_float_from_file_pointer( fdataFile, dataFileParamFlag, dataFileDelimiter, locnInOutFile) fdataFile.close() caselist[icase] += "," + str(param_icase) if readMEXCSVFile: fPVcsv.close() return caselist
def writeOutputParamVals2caselist(cases, resultsDirRootName, extractedFileName, paramTable, caselist): # Read the desired metric from each output file for icase, case in enumerate(cases): #extractedFile = resultsDirRootName + str(icase) + '/' + extractedFileName extractedFile = resultsDirRootName + "case_" + str(icase) + '.csv' fcaseMetrics = data_IO.open_file(extractedFile, 'r') caseOutStr = "" for param in paramTable: param_icase = data_IO.read_float_from_file_pointer( fcaseMetrics, param[0], ',', param[1]) caseOutStr += "," + str(param_icase) caselist[icase] += caseOutStr fcaseMetrics.close() return caselist
def getOutImgsFromKPI(kpiFile): fp_jsonIn = data_IO.open_file(kpiFile) kpihash = json.load(fp_jsonIn, object_pairs_hook=OrderedDict) orderPreservedKeys = data_IO.byteify(list(kpihash.keys())) kpihash = data_IO.byteify(kpihash) fp_jsonIn.close() outputPNGs = [] for kpi in orderPreservedKeys: metrichash = kpihash[kpi] if 'image' in metrichash: kpiimage = metrichash['image'] else: kpiimage = "None" if kpiimage != "None" and kpiimage != "": outputPNGs.append(kpi) return outputPNGs
def read_uncoupled_step_time_from_inp(inp_file_path): """Read time period of UNCOUPLED TEMPERATURE-DISPLACEMENT steps from ccx input file""" finp = data_IO.open_file(inp_file_path) lines = finp.readlines() finp.close() line_num = 0 times = [] while line_num is not None: line_num = data_IO.get_index_in_str_list( lines, 'UNCOUPLED TEMPERATURE-DISPLACEMENT', start_from=line_num) if line_num is not None: times.append( data_IO.read_floats_from_string(lines[line_num + 1], ',')[1]) line_num = line_num + 1 return times
def getOutputParamsStatList(outputParamsFileAddress, outputParamNames, stats2include=['ave', 'min', 'max']): # If the outputParamsFileAddress exists, read the output variables and their desired stats from file if outputParamsFileAddress: foutParams = data_IO.open_file(outputParamsFileAddress, 'r') allDesiredOutputs = foutParams.read() allDesiredOutputs = allDesiredOutputs.splitlines() # First get the name of parameters to read from metric extraction csv files outParamsFromCSV = allDesiredOutputs[0] outParamsFromCSV = outParamsFromCSV.split(',') # Make sure all the varialbes in outputParamsList exist in outputParamNames: outParamsList_existIncsv = [] for param in outParamsFromCSV: paramName = param[:param.find("(")] if paramName in outputParamNames: outParamsList_existIncsv.append(param) outParamsList = outParamsList_existIncsv # Read parameters from other files if provided # The format is: # outputName;outputFileNameTemplate;outputFlag;delimitor;locationInFile # # For example: # pressure_drop;results/case_@@i@@_pressure_drop.txt;;" ";1 # outParamsFromOtherFiles = [] for line in allDesiredOutputs[1:]: if line: outputReadParams = line.split(";") outParamsFromOtherFiles.append(outputReadParams[0]) outParamsList.extend(outParamsFromOtherFiles) else: outParamsList = [] for paramName in outputParamNames: for stat in stats2include: outParamsList.append(paramName + "(" + stat + ")") return outParamsList
def getOutputParamsFromKPI(kpiFile): fp_jsonIn = data_IO.open_file(kpiFile) kpihash = json.load(fp_jsonIn, object_pairs_hook=OrderedDict) orderPreservedKeys = data_IO.byteify(list(kpihash.keys())) kpihash = data_IO.byteify(kpihash) fp_jsonIn.close() outputParams = [] for kpi in orderPreservedKeys: metrichash = kpihash[kpi] kpitype = metrichash['type'] if kpitype == "StreamLines": metrichash['extractStats'] = "False" if 'extractStats' in metrichash: extractStats = data_IO.str2bool(metrichash['extractStats']) else: extractStats = True if extractStats: outputParams.append(kpi) return outputParams
import sys import data_IO # Input arguments: if len(sys.argv) < 4: print("Number of provided arguments: ", len(sys.argv) - 1) print("Usage: python writeCGXfbdFile <cgxFile.fbd> <inputMeshFile> <outputMeshFile>") sys.exit() cgxFile = sys.argv[1] inputMeshFile = sys.argv[2] outputMeshFile = sys.argv[3] ''' Sample fbc file reading mesh file box_mesh1.inp and writing results into test.msh: read box_mesh1.inp zap STRI35 send all abq sys mv all.msh test.msh ''' fCgx = data_IO.open_file(cgxFile, "w") fCgx.write("read " + inputMeshFile + "\n") fCgx.write("zap STRI35\n") fCgx.write("send all abq\n") fCgx.write("sys mv all.msh " + outputMeshFile + "\n") fCgx.close()
# Input arguments: # if len(sys.argv) < 3: # print("Number of provided arguments: ", len(sys.argv) -1 ) # print( "Usage: python boxMesh <inputFile.in> <geomFile.step>") # print( " [<meshFileName=box_mesh.unv>") # sys.exit() inputFileName = "/home/marmar/Dropbox/parallelWorks/weldingProject/boxGeom/inputs/geomMeshParams.in" geomFileAddress = "inputs/box.step" meshFileName = "outputs/box_mesh.unv" # Read parameters from input file in_fp = data_IO.open_file(inputFileName) Length = data_IO.read_float_from_file_pointer(in_fp, "Length") Height = data_IO.read_float_from_file_pointer(in_fp, "Height") Width = data_IO.read_float_from_file_pointer(in_fp, "Width") highResWidth = data_IO.read_float_from_file_pointer(in_fp, "highResWidth") meshScale = data_IO.read_float_from_file_pointer(in_fp, "meshScale") in_fp.close() salome.salome_init() theStudy = salome.myStudy ### ### GEOM component ### import GEOM
import sys import data_IO # Input arguments: if len(sys.argv) < 4: print("Number of provided arguments: ", len(sys.argv) - 1) print( "Usage: python writeSimParamFiles <cases.list> <simFilesDir> <simFileRootName>" ) sys.exit() caseListFileName = sys.argv[1] simFilesDir = sys.argv[2] simFileRootName = sys.argv[3] cl_fp = data_IO.open_file(caseListFileName) for i, line in enumerate(cl_fp): line = line.replace(",", "\n") line = line.replace("=", " ") simFileAddress = simFilesDir + "/" + simFileRootName + str(i) + ".in" simf = data_IO.open_file(simFileAddress, "w") simf.write(line) simf.write("\n") simf.close() cl_fp.close()
case, paramTypes, xmlFile, helpStr='Whitespace delimited or range/step (e.g. min:max:step)', paramUnits=[]): """Write the input section of the xml file for generating input forms on the Parallel Works platform""" paramVals = convertListOfDicts2Dict(case) # sort the keys by parameter types: paramsBytype = {} paramsSortedBytype = sorted(paramTypes.items()) paramsTypeVal = mergeParamTypesParamValsDict(paramTypes, paramVals) print(list(paramVals.keys())) unitStr = "" f = data_IO.open_file(xmlFile, "w") # Write the xml file header: f.write("<tool id=\'test_params_forms\' name=\'test_params_forms\'> \n" "\t<command interpreter=\'swift\'>main.swift</command> \n" "\t<inputs> \n") paramTypes = set(paramTypes.values()) # Write the parameters of each type under a section expanded = 'true' for sectionName in paramTypes: # Write the section header # e.g. <section name='design_space' type='section' title='Cyclone Geometry Parameter Space' expanded='true'> f.write("\t\t<section name=\'" + sectionName + "\' type=\'section\' title='" + sectionName.capitalize() + " Parameters\' expanded=\'" + expanded + "\'> \n")
import sys import data_IO inputFile = sys.argv[1] fi = data_IO.open_file(inputFile) delimiter = None param2read = "line NT int" numParameters = 1 data = data_IO.read_int_from_file_pointer(fi, param2read, delimiter,1) print(param2read + " value(s):" + str(data)) delimiter = ',' param2read = "line NT comma int" numParameters = 1 data = data_IO.read_int_from_file_pointer(fi, param2read, delimiter) print(param2read + " value(s):" + str(data)) delimiter = ',' param2read = "line NT comma float" numParameters = 1 data = data_IO.read_floats_from_file_pointer(fi, param2read,2, delimiter,1) print(param2read + " value(s):" + str(data))
print("Number of provided arguments: ", len(sys.argv) - 1) print( "Usage: python writeBoundaryFile.py <boundaryFilePath> <boundaryRefFile>" ) sys.exit() boundaryFilePath = sys.argv[1] boundaryRefFile = sys.argv[2] print(boundaryRefFile) print(boundaryFilePath) import time time.sleep(1) # Read correct boundary types from the boundary reference file: bfRef = data_IO.open_file(boundaryRefFile) nBnds, bndDefStartLine = foamutils.getNumBoundariesFromFile(bfRef) bndInfo = foamutils.getBoundaryTypesFromFile(bfRef, bndDefStartLine) # Read mesh data from the auto generated boundary file bfMeshData = data_IO.open_file(boundaryFilePath + 'boundary', 'r+') for boundary in bndInfo: nFaces, startFace = foamutils.getBoundaryMeshInfo(boundary, bfMeshData) bndInfo[boundary]['nFaces'] = nFaces bndInfo[boundary]['startFace'] = startFace # Overwrite the auto generated boundary file bfMeshData.seek(0) bfRef.seek(0)
import sys import data_IO import json import pvutils if len(sys.argv) < 4: print("Number of provided arguments: ", len(sys.argv) - 1) print( "Usage: pvpython convertcsv2json.py <desiredMetrics.csv> <desiredMetrics.json> <outputDir/>" ) sys.exit() csvkpiAddress = sys.argv[1] jsonFileName = sys.argv[2] outputDir = sys.argv[3] # Read the desired outputs/metrics from the csv file: fp_csvin = data_IO.open_file(csvkpiAddress) kpihash = pvutils.read_csv(fp_csvin) fp_csvin.close() obj_json = kpihash #print(json.dumps(obj_json, indent=4)) fkjson = data_IO.open_file(outputDir + "/" + jsonFileName, "w") fkjson.write(json.dumps(obj_json, indent=4)) fkjson.close()
def read_node_sets_from_inp(self, inp_file): fin = data_IO.open_file(inp_file) self.node_sets = extract_sets_from_inp(fin, 'NSET') fin.close()
def read_element_sets_from_inp(self, inp_file): fin = data_IO.open_file(inp_file) self.element_sets = extract_sets_from_inp(fin, 'ELSET') fin.close()
print("Number of provided arguments: ", len(sys.argv) - 1) print( "Usage: python writeBlockMeshDictFile.py <stlFileAddress> <blockMeshDictPath>" ) sys.exit() stlFileAddress = sys.argv[1] blockMeshFilePath = sys.argv[2] tightBndBox = foamutils.getBoundingBoxFromStl(stlFileAddress) bndBox = foamutils.calcLooseBoundingBox(tightBndBox) vertices = foamutils.getBoxVertices(bndBox) blockMeshFile = data_IO.open_file(blockMeshFilePath + '/blockMeshDict', "w") blockMeshFile.write("FoamFile \n" "{ \n" " version 2.0; \n" " format ascii; \n" " class dictionary; \n" " location system; \n" " object blockMeshDict; \n" "} \n" " \n" " convertToMeters 1; \n" " vertices \n" " ( \n") for vertex in vertices: blockMeshFile.write(" ({} {} {}) \n".format(float(vertex[0]),
data2Read = pvutils.getfieldsfromkpihash(kpihash) dataReader = pvutils.readDataFile(dataFileAddress, data2Read) # Initialize renderView and display renderView1, readerDisplay = pvutils.initRenderView(dataReader, viewSize, backgroundColor) print("Generating KPIs") # Set the default values for missing fields in the kpihash for kpi in kpihash: kpihash[kpi] = metricsJsonUtils.setKPIFieldDefaults(kpihash[kpi], kpi) if not (kpihash[kpi]['field'] == 'None'): kpihash[kpi] = pvutils.correctfieldcomponent(dataReader, kpihash[kpi]) fp_csv_metrics = data_IO.open_file(metricFile, "w") fp_csv_metrics.write(",".join(['metric', 'ave', 'min', 'max', 'sd']) + "\n") renderView1.InteractionMode = '2D' renderView1.OrientationAxesVisibility = 0 for kpi in kpihash: if not data_IO.str2bool(kpihash[kpi]['IsParaviewMetric']): continue metrichash = kpihash[kpi] kpitype = metrichash['type'] kpifield = metrichash['field'] kpiComp = metrichash['fieldComponent'] kpiimage = metrichash['image'] extractStats = data_IO.str2bool(metrichash['extractStats']) makeAnim = data_IO.str2bool(metrichash['animation'])
import sys import data_IO # Input arguments: if len(sys.argv) < 3: print("Number of provided arguments: ", len(sys.argv) - 1) print("Usage: python writeCCXinpFile <simParams.in> <ccxInputFile.inp>") sys.exit() simParamsAddress = sys.argv[1] ccxInputFile = sys.argv[2] # Read parameters from input file fsimParams = data_IO.open_file(simParamsAddress, "r") dt = data_IO.read_float_from_file_pointer(fsimParams, "sim_dt") TotalTime = data_IO.read_float_from_file_pointer(fsimParams, "sim_totalTime") Temp0 = data_IO.read_float_from_file_pointer(fsimParams, "Temp0") fsimParams.close() fCcxInput = data_IO.open_file(ccxInputFile, "w") fCcxInput.write('*include, input=allinone.inp \n' '** material definition \n' '*include, input=materialLib.mat \n') fCcxInput.write('*solid section, elset=EbeadSolid, material=x6 \n') fCcxInput.write('*solid section, elset=EplateSolid, material=steel2 \n') fCcxInput.write('*initial conditions, type=temperature \n') fCcxInput.write('Nall,' + str(Temp0) + '\n') fCcxInput.write(' \n'
# Read the desired output metrics with open(outputParamsFileAddress) as foutParams: outParamsList = foutParams.read().splitlines()[0] outParamsList = outParamsList.split(',') paramTable = genOutputLookupTable(outParamsList) # Add outputs to the header for param in paramTable: header += ",out:" + param[0] # Read the desired metric from each output file for icase, case in enumerate(cases): extractedFile = resultsDirRootName + str(icase) + '/' + extractedFileName fcaseMetrics = data_IO.open_file(extractedFile, 'r') caseOutStr = "" for param in paramTable: param_icase = data_IO.read_float_from_file_pointer(fcaseMetrics,param[0], ',', param[1]) caseOutStr += "," + str(param_icase) caselist[icase] += caseOutStr fcaseMetrics.close() # Write the Desing Explorer csv file: f = open(outcsvFileAddress, "w") f.write(header+'\n') casel = "\n".join(caselist) f.write(casel+'\n')
# Input arguments: if len(sys.argv) < 4: print("Number of provided arguments: ", len(sys.argv) - 1) print( "Usage: python writeCCXinpFile <simParams.in> <inputMeshFile> <caseInputFile.inp>" ) sys.exit() simParamsAddress = sys.argv[1] inputMeshFile = sys.argv[2] caseInputFile = sys.argv[3] # Read parameters from input file fInput = data_IO.open_file(simParamsAddress, "r") dt = data_IO.read_float_from_file_pointer(fInput, "sim_dt") TotalTime = data_IO.read_float_from_file_pointer(fInput, "sim_totalTime") fInput.close() fInputCase = data_IO.open_file(caseInputFile, "w") fInputCase.write('*include, input=' + inputMeshFile + '\n') fInputCase.write(' \n' '** material definition \n' '*material, name=steel \n' '*elastic \n' '210000,0.333333333,0 \n' '*density \n'
import sys import data_IO # Input arguments: if len(sys.argv) < 3: print("Number of provided arguments: ", len(sys.argv) - 1) print("Usage: python writeDFluxFile <fortranFile.f> <inputFile.in>") sys.exit() fortranFileAddress = sys.argv[1] inputFileAddress = sys.argv[2] # Read parameters from input file fInput = data_IO.open_file(inputFileAddress, "r") a = data_IO.read_float_from_file_pointer(fInput, "weld_a") b = data_IO.read_float_from_file_pointer(fInput, "weld_b") c = data_IO.read_float_from_file_pointer(fInput, "weld_c") x0 = data_IO.read_float_from_file_pointer(fInput, "weld_x0") y0 = data_IO.read_float_from_file_pointer(fInput, "weld_y0") z0 = data_IO.read_float_from_file_pointer(fInput, "weld_z0") Q = data_IO.read_float_from_file_pointer(fInput, "weld_Q") vx = data_IO.read_float_from_file_pointer(fInput, "weld_vx") vy = data_IO.read_float_from_file_pointer(fInput, "weld_vy") vz = data_IO.read_float_from_file_pointer(fInput, "weld_vz") fInput.close()
) sys.exit() dataFileAddress = sys.argv[1] kpiFileAddress = sys.argv[2] outputDir = sys.argv[3] metricFile = sys.argv[4] # Image settings: individualImages = True magnification = 2 viewSize = [700, 600] backgroundColor = [1, 1, 1] # set background color to white # Read the desired outputs/metrics from the csv file: fp_jsonIn = data_IO.open_file(kpiFileAddress) kpihash = json.load(fp_jsonIn) kpihash = pvutils.byteify(kpihash) fp_jsonIn.close() print(kpihash) # disable automatic camera reset on 'Show' paraview.simple._DisableFirstRenderCameraReset() # Read data file data2Read = pvutils.getfieldsfromkpihash(kpihash) dataReader = pvutils.readDataFile(dataFileAddress, data2Read) # Initialize renderView and display renderView1, readerDisplay = pvutils.initRenderView(dataReader, viewSize, backgroundColor)
sys.exit() # solveexoFileAddress = \ # '/home/marmar/Dropbox/parallelWorks/weldingProject/paraviewPostProcess/outputs/case0/solve.exo' # kpiFileAddress = 'boxKPI.csv' # metricFileName = "metrics.csv" solveexoFileAddress = sys.argv[1] kpiFileAddress = sys.argv[2] outputDir = sys.argv[3] metricFileName = sys.argv[4] individualImages = True magnification = 2 # Read the desired outputs/metrics from the csv file: fp_csvin = data_IO.open_file(kpiFileAddress) kpihash = pvutils.read_csv(fp_csvin) fp_csvin.close() cellsarrays = pvutils.getfieldsfromkpihash(kpihash) #### disable automatic camera reset on 'Show' paraview.simple._DisableFirstRenderCameraReset() ## Read the results file : create a new 'ExodusIIReader' solveExo = ExodusIIReader(FileName=solveexoFileAddress) # get animation scene animationScene1 = GetAnimationScene() # update animation scene based on data timesteps