def update(self): self.sim_mode.get_model().clear() lines=[] self.store_list=[] files=inp_lsdir() if files!=False: for i in range(0,len(files)): if files[i].endswith(".inp"): inp_load_file(lines,files[i]) value=inp_search_token_value(lines, "#sim_menu_name") if value!=False: if value.count("@")==1: value=value.rstrip() command,module=value.split("@") self.sim_mode.append_text(command) self.store_list.append(store(command,module)) token=inp_get_token_value("sim.inp", "#simmode") command,module=token.split("@") liststore = self.sim_mode.get_model() for i in xrange(len(liststore)): if liststore[i][0] == command: self.sim_mode.set_active(i)
def __init__(self,index): self.index=index QWidget.__init__(self) layout=QHBoxLayout() label=QLabel() label.setText(_("Load type:")) layout.addWidget(label) self.sim_mode = QComboBox(self) self.sim_mode.setEditable(True) layout.addWidget(self.sim_mode) self.setLayout(layout) self.sim_mode.addItem("open_circuit") self.sim_mode.addItem("load") self.sim_mode.addItem("ideal_diode_ideal_load") lines=[] inp_load_file(lines,os.path.join(get_inp_file_path(),"pulse"+str(self.index)+".inp")) token=inp_get_token_value("pulse"+str(self.index)+".inp", "#pulse_sim_mode") all_items = [self.sim_mode.itemText(i) for i in range(self.sim_mode.count())] for i in range(0,len(all_items)): if all_items[i] == token: self.sim_mode.setCurrentIndex(i) self.sim_mode.currentIndexChanged.connect(self.call_back_sim_mode_changed)
def init(self,index): self.index=index self.sim_mode = gtk.combo_box_entry_new_text() self.sim_mode.set_size_request(-1, 20) lines=[] inp_load_file(lines,find_data_file("pulse"+str(self.index)+".inp")) self.sim_mode.append_text("open_circuit") self.sim_mode.append_text("load") self.sim_mode.child.connect('changed', self.call_back_sim_mode_changed) token=inp_get_token_value("pulse"+str(self.index)+".inp", "#pulse_sim_mode") liststore = self.sim_mode.get_model() for i in xrange(len(liststore)): if liststore[i][0] == token: self.sim_mode.set_active(i) lable=gtk.Label("Load type:") #lable.set_width_chars(15) lable.show() hbox = gtk.HBox(False, 2) hbox.pack_start(lable, False, False, 0) hbox.pack_start(self.sim_mode, False, False, 0) self.add(hbox); self.show_all()
def draw(self): emission=False lines=[] for i in range(0,epitaxy_get_layers()): if epitaxy_get_pl_file(i)!="none": if inp_load_file(lines,epitaxy_get_pl_file(i)+".inp")==True: if str2bool(lines[1])==True: emission=True tot=0 for i in range(0,epitaxy_get_layers()): tot=tot+epitaxy_get_width(i) pos=0.0 l=epitaxy_get_layers()-1 lines=[] for i in range(0,epitaxy_get_layers()): thick=200.0*epitaxy_get_width(l-i)/tot pos=pos+thick path=os.path.join(get_materials_path(),epitaxy_get_mat_file(l-i),"mat.inp") if inp_load_file(lines,path)==True: red=float(inp_search_token_value(lines, "#Red")) green=float(inp_search_token_value(lines, "#Green")) blue=float(inp_search_token_value(lines, "#Blue")) else: print "Could not load",path red=0.0 green=0.0 blue=0.0 self.draw_box(200,450.0-pos,thick*0.9,red,green,blue,l-i) step=50.0 lines=[] if inp_load_file(lines,os.path.join(os.getcwd(),"light.inp"))==True: self.sun=float(inp_search_token_value(lines, "#Psun")) if self.sun<=0.01: step=200 elif self.sun<=0.1: step=100 elif self.sun<=1.0: step=50 elif self.sun<=10.0: step=10 else: step=5.0 if self.sun!=0: for x in range(0,200,step): self.draw_photon(270+x,50) if emission==True: for x in range(0,200,50): self.draw_photon_up(240+x,180) self.draw_mode(200,250,200)
def populate_combo_box_using_input_file(self,combobox,input_file): # try: lines=[] inp_load_file(lines,os.path.join(self.path,input_file)) combobox.clear() for i in range(0, len(lines)): lines[i]=lines[i].rstrip() if len(lines[i])>0: if lines[i][0]=="#": combobox.addItem(lines[i])
def load(self): lines=[] self.tab.clear() self.tab.setHorizontalHeaderLabels([_("Function"), _("Enabled"), _("a"), _("b"), _("c")]) inp_load_file(lines,self.file_name) print(self.file_name,lines) pos=0 while True: if lines[pos]=="#end": break if lines[pos]=="#ver": break tag=lines[pos] scan_item_add(self.file_name,tag,tag,1) pos=pos+1 #skip hash tag function=lines[pos] #read label pos=pos+1 tag=lines[pos] scan_item_add(self.file_name,tag,tag,1) pos=pos+1 #skip hash tag enabled=lines[pos] #read value pos=pos+1 tag=lines[pos] scan_item_add(self.file_name,tag,tag,1) pos=pos+1 #skip hash tag a=lines[pos] #read value pos=pos+1 tag=lines[pos] scan_item_add(self.file_name,tag,tag,1) pos=pos+1 #skip hash tag b=lines[pos] #read value pos=pos+1 tag=lines[pos] scan_item_add(self.file_name,tag,tag,1) pos=pos+1 #skip hash tag c=lines[pos] #read value pos=pos+1 tab_add(self.tab,[ str(function), str(enabled), str(a), str(b), str(c)])
def scan_populate_from_file(filename): lines = [] inp_load_file(lines, filename) my_token_lib = tokens() for i in range(0, len(lines)): token = lines[i] if len(token) > 0: if token[0] == "#": result = my_token_lib.find(token) if result != False: if scan_items_index_item(token) == -1: scan_item_add(filename, token, result.info, 1)
def load_data(self): lines=[] self.start_fx=0.0 self.list=[] file_name="fxmesh"+str(self.index)+".inp" ret=inp_load_file(lines,file_name) if ret==True: if inp_search_token_value(lines, "#ver")=="1.0": pos=0 token,value,pos=inp_read_next_item(lines,pos) self.fx_start=float(value) token,value,pos=inp_read_next_item(lines,pos) segments=int(value) for i in range(0, segments): token,length,pos=inp_read_next_item(lines,pos) token,dfx,pos=inp_read_next_item(lines,pos) token,mul,pos=inp_read_next_item(lines,pos) self.list.append((length,dfx,mul)) print self.list return True else: print "file "+file_name+"wrong version" exit("") return False else: print "file "+file_name+" not found" return False return False
def recalculate(self): self.colors=[] lines=[] if dat_file_read(self.graph_data,self.graph_path)==True: #print(self.graph_path) self.graph_z_max,self.graph_z_min=dat_file_max_min(self.graph_data) #print(self.graph_z_max,self.graph_z_min) val=inp_get_token_value("light.inp", "#Psun") self.suns=float(val) l=epitaxy_get_layers()-1 for i in range(0,epitaxy_get_layers()): path=os.path.join(get_materials_path(),epitaxy_get_mat_file(l-i),"mat.inp") if inp_load_file(lines,path)==True: red=float(inp_search_token_value(lines, "#Red")) green=float(inp_search_token_value(lines, "#Green")) blue=float(inp_search_token_value(lines, "#Blue")) else: red=0.0 green=0.0 blue=0.0 self.colors.append(color(red,green,blue)) self.colors.reverse() self.update()
def __init__(self, file_name): QTabWidget.__init__(self) self.file_name = file_name css_apply(self, "tab_default.css") lines = [] self.index = int(extract_number_from_file_name(file_name)) lines = inp_load_file( os.path.join(get_sim_path(), "fit" + str(self.index) + ".inp")) if lines != False: self.tab_name = inp_search_token_value(lines, "#fit_name") else: self.tab_name = "" #self.setTabsClosable(True) #self.setMovable(True) self.tmesh_real = fit_window_plot_real(self.index) self.addTab(self.tmesh_real, _("Experimental data")) self.tmesh = fit_window_plot(self.index) self.addTab(self.tmesh, _("Delta=Experiment - Simulation")) config = tab_class( os.path.join(get_sim_path(), "fit" + str(self.index) + ".inp")) self.addTab(config, _("Configure fit")) self.fit_patch = fit_patch(self.index) self.addTab(self.fit_patch, _("Fit patch")) self.matlab_editor = matlab_editor(self.index) self.addTab(self.matlab_editor, _("MATLAB code"))
def update(self): lines = inp_load_file(self.get_file_name()) if lines != False: enabled = str2bool(inp_search_token_value(lines, "#enabled")) if enabled == True: self.tmesh_real.update() self.tmesh.update()
def mesh_load(vector): file_name="mesh_"+vector+".inp" if vector=="x": mesh_clear_xlist() elif vector=="y": mesh_clear_ylist() elif vector=="z": mesh_clear_zlist() global xlist my_list=[] pos=0 lines=[] print("loading",os.path.join(os.getcwd(),file_name)) if inp_load_file(lines,os.path.join(os.getcwd(),file_name))==True: pos=pos+1 #first comment mesh_layers=int(lines[pos]) for i in range(0, mesh_layers): #thick pos=pos+1 #token token=lines[pos] pos=pos+1 thick=lines[pos] #read value #points pos=pos+1 #token token=lines[pos] pos=pos+1 points=lines[pos] #read value mesh_add(vector,thick,points)
def create_model_math(self): self.tab_math.clear() self.tab_math.setColumnCount(7) self.tab_math.setSelectionBehavior(QAbstractItemView.SelectRows) #math fit_target2.inp #fxdomain_Ji ./sim/39.811Hz/sim_info.dat #fxdomain_Ji abs(20*(a-b)) self.tab_math.setHorizontalHeaderLabels([ _("File (a)"), _("Token (a)"), _("Path (a)"), _("File (b)"), _("Token (b)"), _("Path (b)"), _("Equation") ]) lines = [] pos = 0 lines = inp_load_file(self.file_name) if lines != False: mylen = len(lines) while (1): if lines[pos] == "#end": break line = lines[pos].split() if line[0] == "math": print(line) path_a = scan_items_lookup_item(line[1], line[2]) path_b = scan_items_lookup_item(line[3], line[4]) self.insert_row_math(self.tab_math.rowCount(), line[1], line[2], path_a, line[3], line[4], path_b, line[5]) pos = pos + 1
def create_model_mm(self): self.tab_mm.clear() self.tab_mm.setColumnCount(7) self.tab_mm.setSelectionBehavior(QAbstractItemView.SelectRows) self.tab_mm.setHorizontalHeaderLabels([ _("File"), _("Token"), _("Path"), _("Function"), _("Max"), _("Min"), _("Error") ]) self.tab_mm.setColumnWidth(2, 300) #self.tab_mm.setColumnWidth(5, 200) lines = [] pos = 0 lines = inp_load_file(self.file_name) if lines != False: mylen = len(lines) while (1): if lines[pos] == "#end": break line = lines[pos].split() print(line) if line[0] == "mm": path = scan_items_lookup_item(line[1], line[2]) self.tab_mm.insertRow(self.tab_mm.rowCount()) self.insert_row_mm(self.tab_mm.rowCount() - 1, line[1], line[2], path, line[3], line[4], line[5], line[6]) pos = pos + 1
def __init__(self,index): QTabWidget.__init__(self) lines=[] self.index=index if inp_load_file(lines,"fit"+str(self.index)+".inp")==True: self.tab_name=inp_search_token_value(lines, "#fit_name") else: self.tab_name="" self.setTabsClosable(True) self.setMovable(True) self.tmesh = fit_window_plot(self.index) self.addTab(self.tmesh,_("Fit error")) self.tmesh_real = fit_window_plot_real(self.index) self.addTab(self.tmesh_real,_("Experimental data")) self.fit_patch = fit_patch(self.index) self.addTab(self.fit_patch, _("Fit patch")) config=tab_class() config.init("fit"+str(self.index)+".inp",self.tab_name) self.addTab(config,_("Configure fit"))
def load_config(self): lines = [] lines = inp_load_file(self.config_file_path) if lines != False: self.file_name = inp_get_token_value_from_list( lines, "#import_file_path") if os.path.isfile(self.file_name) == False: return False self.x_combo.setCurrentIndex( int(inp_get_token_value_from_list(lines, "#import_x_combo_pos"))) self.y_combo.setCurrentIndex( int(inp_get_token_value_from_list(lines, "#import_y_combo_pos"))) self.title_entry.setText( inp_get_token_value_from_list(lines, "#import_title")) self.xlabel_entry.setText( inp_get_token_value_from_list(lines, "#import_xlabel")) self.ylabel_entry.setText( inp_get_token_value_from_list(lines, "#import_ylabel")) self.area_entry.setText( inp_get_token_value_from_list(lines, "#import_area")) self.x_spin.setValue( int(inp_get_token_value_from_list(lines, "#import_x_spin"))) self.y_spin.setValue( int(inp_get_token_value_from_list(lines, "#import_y_spin"))) return True else: return False
def __init__(self,index): QTabWidget.__init__(self) self.index=index lines=[] if inp_load_file(lines,"pulse"+str(self.index)+".inp")==True: self.tab_name=inp_search_token_value(lines, "#sim_menu_name") else: self.tab_name="" self.setTabsClosable(True) self.setMovable(True) self.tmesh = tab_time_mesh(self.index) self.addTab(self.tmesh,_("time mesh")) self.circuit=circuit(self.index) self.addTab(self.circuit,_("Circuit")) tab=tab_class() tab.init("pulse"+str(self.index)+".inp","Configure") self.addTab(tab,"Configure")
def load(self): self.mesh_model.clear() lines=[] pos=0 if inp_load_file(lines,os.path.join(os.getcwd(),"mesh.inp"))==True: pos=pos+1 #first comment mesh_layers=int(lines[pos]) for i in range(0, mesh_layers): pos=pos+1 #token token=lines[pos] scan_item_add("mesh.inp",token,"Mesh width"+str(i),1) pos=pos+1 thicknes=lines[pos] #read value pos=pos+1 #token token=lines[pos] scan_item_add("mesh.inp",token,"Mesh points"+str(i),1) pos=pos+1 points=lines[pos] #read value iter = self.mesh_model.append() self.mesh_model.set (iter, MESH_THICKNES, str(thicknes), MESH_POINTS, str(points) )
def mesh_load(vector): file_name="mesh_"+vector+".inp" if vector=="x": mesh_clear_xlist() elif vector=="y": mesh_clear_ylist() elif vector=="z": mesh_clear_zlist() my_list=[] pos=0 lines=inp_load_file(file_name) if lines!=False: if lines[pos]!="#remesh_enable": #Check we are not trying to open an old version return False pos=pos+1 #first comment remesh=str2bool(lines[pos]) pos=pos+1 #remesh if vector=="x": global xlist xlist.remesh=remesh elif vector=="y": global ylist ylist.remesh=remesh elif vector=="z": global zlist zlist.remesh=remesh pos=pos+1 #first comment mesh_layers=int(lines[pos]) for i in range(0, mesh_layers): #thick pos=pos+1 #token token=lines[pos] pos=pos+1 thick=lines[pos] #length pos=pos+1 #token token=lines[pos] pos=pos+1 points=lines[pos] #points pos=pos+1 #token token=lines[pos] pos=pos+1 mul=lines[pos] #mul pos=pos+1 #token token=lines[pos] pos=pos+1 left_right=lines[pos] #left_right mesh_add(vector,thick,points,mul,left_right) return True
def load_ref(file_name): r = ref() #we could have zipped the file archive = os.path.basename(os.path.dirname(file_name)) + ".zip" file_name = os.path.splitext(file_name)[0] + ".ref" #if os.path.isfile(file_name)==False: # return None lines = inp_load_file(file_name, archive=archive) #print(file_name,lines) if lines == False: return None text = "" r.website = inp_get_token_value_from_list(lines, "#ref_website") if r.website == None: r.website = "" r.group = inp_get_token_value_from_list(lines, "#ref_research_group") if r.group == None: r.group = "" r.author = inp_get_token_value_from_list(lines, "#ref_authors") if r.author == None: r.author = "" r.journal = inp_get_token_value_from_list(lines, "#ref_jounral") if r.journal == None: r.journal = "" r.title = inp_get_token_value_from_list(lines, "#ref_title") if r.title == None: r.title = "" r.volume = inp_get_token_value_from_list(lines, "#ref_volume") if r.volume == None: r.volume = "" r.pages = inp_get_token_value_from_list(lines, "#ref_pages") if r.pages == None: r.pages = "" r.year = inp_get_token_value_from_list(lines, "#ref_year") if r.year == None: r.year = "" r.doi = inp_get_token_value_from_list(lines, "#ref_doi") if r.doi == None: r.doi = "" r.unformatted = inp_get_token_value_from_list(lines, "#ref_unformatted") if r.unformatted == None: r.unformatted = "" return r
def import_data(self, file_name, x_col=0, y_col=1, skip_lines=0, known_col_sep=None): """This is an import filter for xy data""" lines = [] lines = inp_load_file(file_name) if lines == False: return False if len(lines) < skip_lines: return False x_col = col_name_to_pos(lines, x_col, known_col_sep) y_col = col_name_to_pos(lines, y_col, known_col_sep) lines = lines[skip_lines:] self.x_scale = [] self.y_scale = [] self.z_scale = [] self.data = [] data_started = False self.data = [[[0.0 for k in range(0)] for j in range(1)] for i in range(1)] for i in range(0, len(lines)): s, label = decode_line(lines[i], known_col_sep=known_col_sep) #print(s) l = len(s) if l > 0: if data_started == False: if is_number(s[0]) == True: data_started = True if s[0] == "#end": break if data_started == True: if max(x_col, y_col) < l: duplicate = False for c in range(0, len(self.y_scale)): if self.y_scale[c] == float(s[x_col]): duplicate = True break if duplicate == False: self.y_scale.append(float(s[x_col])) self.data[0][0].append(float(s[y_col])) self.x_len = 1 self.y_len = len(self.data[0][0]) self.z_len = 1 return True
def update(self): lines = inp_load_file( os.path.join(get_sim_path(), "fit" + str(self.index) + ".inp")) if lines != False: enabled = str2bool(inp_search_token_value(lines, "#enabled")) if enabled == True: self.tmesh_real.update() self.tmesh.update()
def load(self): lines = [] self.tab.clear() self.tab.setHorizontalHeaderLabels( [_("Function"), _("Enabled"), _("a"), _("b"), _("c")]) lines = inp_load_file(self.file_name) #print(self.file_name,lines) pos = 0 while True: if lines[pos] == "#end": break if lines[pos] == "#ver": break tag = lines[pos] scan_item_add(self.file_name, tag, tag, 1) pos = pos + 1 #skip hash tag function = lines[pos] #read label pos = pos + 1 tag = lines[pos] scan_item_add(self.file_name, tag, tag, 1) pos = pos + 1 #skip hash tag enabled = lines[pos] #read value pos = pos + 1 tag = lines[pos] scan_item_add(self.file_name, tag, tag, 1) pos = pos + 1 #skip hash tag a = lines[pos] #read value pos = pos + 1 tag = lines[pos] scan_item_add(self.file_name, tag, tag, 1) pos = pos + 1 #skip hash tag b = lines[pos] #read value pos = pos + 1 tag = lines[pos] scan_item_add(self.file_name, tag, tag, 1) pos = pos + 1 #skip hash tag c = lines[pos] #read value pos = pos + 1 tab_add( self.tab, [str(function), str(enabled), str(a), str(b), str(c)])
def load(self): self.tab.blockSignals(True) lines = [] self.tab.clear() self.tab.setHorizontalHeaderLabels( [_("Function"), _("Enabled"), _("a"), _("b"), _("c")]) lines = inp_load_file(self.file_name) pos = 0 row = 0 while True: if lines[pos] == "#end": break if lines[pos] == "#ver": break tag = lines[pos] pos = pos + 1 #skip hash tag function = lines[pos] #read label pos = pos + 1 tag = lines[pos] pos = pos + 1 #skip hash tag enabled = lines[pos] #read value pos = pos + 1 tag = lines[pos] pos = pos + 1 #skip hash tag a = lines[pos] #read value pos = pos + 1 tag = lines[pos] pos = pos + 1 #skip hash tag b = lines[pos] #read value pos = pos + 1 tag = lines[pos] pos = pos + 1 #skip hash tag c = lines[pos] #read value pos = pos + 1 if (row + 1) > self.tab.rowCount(): self.tab.insert_row() self.add_row(row, function, enabled, a, b, c) row = row + 1 self.tab.blockSignals(False)
def load(self): ret=inp_get_token_array(self.file_name, self.token) if ret!=False: #We have found the file and got the token self.ui.text.setText("\n".join(ret)) else: self.ui.text.setText(_("New file")) if inp_check_ver(self.file_name, "1.0")==True: #The file exists but there is no token. lines=[] inp_load_file(lines,self.file_name) lines=inp_add_token(lines,self.token,self.ui.text.toPlainText()) print("written to 1",self.file_name) inp_save_lines(self.file_name,lines) else: #The file does not exist or there is an error lines=inp_new_file() lines=inp_add_token(lines,self.token,self.ui.text.toPlainText()) print("written to 2",self.file_name,lines) inp_save_lines(self.file_name,lines)
def update(self): self.sim_mode.clear() lines=[] self.store_list=[] files=inp_lsdir() if files!=False: for i in range(0,len(files)): if files[i].endswith(".inp"): inp_load_file(lines,files[i]) value=inp_search_token_value(lines, "#sim_menu_name") if value!=False: if value.count("@")==1: value=value.rstrip() command,module=value.split("@") self.sim_mode.addItem(command) a=store(command,module) self.store_list.append(a) token=inp_get_token_value("sim.inp", "#simmode") if token.count("@")!=0: command,module=token.split("@") else: command=token found=False all_items = [self.sim_mode.itemText(i) for i in range(self.sim_mode.count())] for i in range(0,len(all_items)): if all_items[i] == command: self.sim_mode.setCurrentIndex(i) found=True #if there is no known mode, just set it to jv mode if found==False: for i in range(0,len(self.store_list)): if self.store_list[i].token=="jv": self.sim_mode.activated(i) inp_update_token_value("sim.inp", "#simmode", "jv@jv",1) break
def epitaxy_populate_rgb(): global epi path = os.path.join(get_materials_path(), epi[-1].mat_file, "mat.inp") mat_lines = inp_load_file(path) ret = inp_search_token_array(mat_lines, "#red_green_blue") if ret != False: epi[-1].r = float(ret[0]) epi[-1].g = float(ret[1]) epi[-1].b = float(ret[2]) epi[-1].alpha = float(inp_search_token_value(mat_lines, "#mat_alpha"))
def populate_combo_box_using_input_file(self, combobox, input_file): # try: lines = [] lines = inp_load_file(os.path.join(self.path, input_file)) combobox.clear() for i in range(0, len(lines)): lines[i] = lines[i].rstrip() if len(lines[i]) > 0: if lines[i][0] == "#": combobox.addItem(lines[i])
def sync_to_electrical_mesh(self): tot=0 for i in range(0,len(self.model)): if yes_no(self.model[i][COLUMN_DEVICE])==True: tot=tot+float(self.model[i][COLUMN_THICKNES]) lines=[] if inp_load_file(lines,os.path.join(os.getcwd(),"mesh_y.inp"))==True: mesh_layers=int(inp_search_token_value(lines, "#mesh_layers")) if mesh_layers==1: inp_update_token_value(os.path.join(os.getcwd(),"mesh_y.inp"), "#mesh_layer_length0", str(tot),1)
def load(self): global wlist wlist=[] lines=[] if inp_load_file(lines,"window_list.inp")==True: number=int(lines[0]) for i in range(0,number): a=window_item() a.name=lines[i*3+1] a.x=lines[i*3+2] a.y=lines[i*3+3] wlist.append(a)
def plot_load_oplot_file(plot_token, file_name): lines = inp_load_file(file_name) if lines != False: plot_token.logy = str2bool(inp_search_token_value(lines, "#logy")) plot_token.logx = str2bool(inp_search_token_value(lines, "#logx")) plot_token.logz = str2bool(inp_search_token_value(lines, "#logz")) plot_token.grid = str2bool(inp_search_token_value(lines, "#grid")) plot_token.invert_y = str2bool( inp_search_token_value(lines, "#invert_y")) plot_token.normalize = str2bool( inp_search_token_value(lines, "#normalize")) plot_token.norm_to_peak_of_all_data = str2bool( inp_search_token_value(lines, "#norm_to_peak_of_all_data")) plot_token.subtract_first_point = str2bool( inp_search_token_value(lines, "#subtract_first_point")) plot_token.add_min = str2bool(inp_search_token_value( lines, "#add_min")) plot_token.file0 = inp_search_token_value(lines, "#file0") plot_token.file1 = inp_search_token_value(lines, "#file1") plot_token.file2 = inp_search_token_value(lines, "#file2") plot_token.tag0 = inp_search_token_value(lines, "#tag0") plot_token.tag1 = inp_search_token_value(lines, "#tag1") plot_token.tag2 = inp_search_token_value(lines, "#tag2") plot_token.legend_pos = inp_search_token_value(lines, "#legend_pos") plot_token.key_units = inp_search_token_value(lines, "#key_units") plot_token.label_data = str2bool( inp_search_token_value(lines, "#label_data")) plot_token.type = inp_search_token_value(lines, "#type") plot_token.x_label = inp_search_token_value(lines, "#x_label") plot_token.y_label = inp_search_token_value(lines, "#y_label") plot_token.z_label = inp_search_token_value(lines, "#z_label") plot_token.data_label = inp_search_token_value(lines, "#data_label") plot_token.x_units = inp_search_token_value(lines, "#x_units") plot_token.y_units = inp_search_token_value(lines, "#y_units") plot_token.y_units = inp_search_token_value(lines, "#z_units") plot_token.data_units = inp_search_token_value(lines, "#data_units") plot_token.x_mul = float(inp_search_token_value(lines, "#x_mul")) plot_token.y_mul = float(inp_search_token_value(lines, "#y_mul")) plot_token.z_mul = float(inp_search_token_value(lines, "#z_mul")) plot_token.data_mul = float(inp_search_token_value(lines, "#data_mul")) plot_token.key_units = inp_search_token_value(lines, "#key_units") plot_token.x_start = float(inp_search_token_value(lines, "#x_start")) plot_token.x_stop = float(inp_search_token_value(lines, "#x_stop")) plot_token.x_points = float(inp_search_token_value(lines, "#x_points")) plot_token.y_start = float(inp_search_token_value(lines, "#y_start")) plot_token.y_stop = float(inp_search_token_value(lines, "#y_stop")) plot_token.y_points = float(inp_search_token_value(lines, "#y_points")) plot_token.time = float(inp_search_token_value(lines, "#time")) plot_token.Vexternal = float( inp_search_token_value(lines, "#Vexternal")) return True return False
def update(self): self.sim_mode.clear() lines=[] files=inp_lsdir() if files!=False: for i in range(0,len(files)): if files[i].endswith(".inp"): inp_load_file(lines,files[i]) value=inp_search_token_value(lines, "#laser_name") if value!=False: value=value.rstrip() self.sim_mode.addItem(value) token=inp_get_token_value(self.config_file, "#pump_laser") all_items = [self.sim_mode.itemText(i) for i in range(self.sim_mode.count())] for i in range(0,len(all_items)): if all_items[i] == token: self.sim_mode.setCurrentIndex(i) found=True
def update(self): self.sim_mode.clear() lines = [] self.store_list = [] temp = [] files = inp_lsdir("sim.gpvdm") if files != False: for i in range(0, len(files)): if files[i].endswith(".inp") and files[i].count("/") == 0: lines = inp_load_file(files[i]) value = inp_search_token_value(lines, "#sim_menu_name") if value != False: if value.count("@") == 1: temp.append(value) temp.sort() for i in range(0, len(temp)): value = temp[i].rstrip() command, module = value.split("@") self.sim_mode.addItem(command) a = store(command, module) self.store_list.append(a) print(os.getcwd()) token = inp_get_token_value("sim.inp", "#simmode") print(token) if token.count("@") != 0: command, module = token.split("@") else: command = token found = False all_items = [ self.sim_mode.itemText(i) for i in range(self.sim_mode.count()) ] for i in range(0, len(all_items)): if all_items[i] == command: self.sim_mode.setCurrentIndex(i) found = True #if there is no known mode, just set it to jv mode if found == False: for i in range(0, len(self.store_list)): if self.store_list[i].token == "jv": self.sim_mode.setCurrentIndex(i) inp_update_token_value( os.path.join(get_sim_path(), "sim.inp"), "#simmode", "jv@jv") break
def init(self): total = 0 self.pos = 0 lines = inp_load_file(lines, "mesh_y.inp") if lines != False: total = inp_sum_items(lines, "#mesh_layer_points0") lines = inp_load_file(lines, "dump.inp") if lines != False: self.pos = str2bool( inp_search_token_value(lines, "#dump_energy_slice_pos")) label = gtk.Label("Energy slice dump") label.show() self.pack_start(label, True, True, 0) check = gtk.CheckButton("Enable") self.pack_start(check, True, True, 0) print("total=", total) adj = gtk.Adjustment(self.pos, 0, total, 1.0, 1.0, 1.0) adj.connect("value_changed", self.scroll) self.vscale = gtk.HScale(adj) self.vscale.set_size_request(150, 30) self.pack_start(self.vscale, True, True, 0) self.vscale.show() self.enable = False lines = inp_load_file(lines, "dump.inp") if lines != False: self.enable = str2bool( inp_search_token_value(lines, "#dump_energy_slice_switch")) check.set_active(self.enable) self.vscale.set_sensitive(self.enable) check.unset_flags(gtk.CAN_FOCUS) check.connect("clicked", self.check_clicked)
def scan_populate_from_file(filename): lines = [] lines = inp_load_file(filename) my_token_lib = tokens() for i in range(0, len(lines)): token = lines[i] if len(token) > 0: if token[0] == "#": result = my_token_lib.find(token) if result != False: if scan_items_index_item(token) == -1: scan_item_add(filename, token, result.info, 1)
def load_data(self): self.tab.setColumnCount(7) self.tab.clear() self.tab.setSelectionBehavior(QAbstractItemView.SelectRows) lines=[] self.start_time=0.0 self.fs_laser_time=0.0 self.list=[] self.tab.setHorizontalHeaderLabels([_("Length"),_("dt"), _("Start Voltage"), _("Stop Voltage"), _("step multiplyer"), _("Suns"),_("Laser")]) file_name="time_mesh_config"+str(self.index)+".inp" print("loading",file_name) ret=inp_load_file(lines,file_name) if ret==True: if inp_search_token_value(lines, "#ver")=="1.1": pos=0 token,value,pos=inp_read_next_item(lines,pos) self.start_time=float(value) token,value,pos=inp_read_next_item(lines,pos) self.fs_laser_time=float(value) token,value,pos=inp_read_next_item(lines,pos) segments=int(value) for i in range(0, segments): token,length,pos=inp_read_next_item(lines,pos) token,dt,pos=inp_read_next_item(lines,pos) token,voltage_start,pos=inp_read_next_item(lines,pos) token,voltage_stop,pos=inp_read_next_item(lines,pos) token,mul,pos=inp_read_next_item(lines,pos) token,sun,pos=inp_read_next_item(lines,pos) token,laser,pos=inp_read_next_item(lines,pos) tab_add(self.tab,[str(length),str(dt),str(voltage_start),str(voltage_stop),str(mul),str(sun),str(laser)]) return True else: print("file "+file_name+"wrong version") exit("") return False else: print("file "+file_name+" not found") return False return False
def init(self): total=0 self.pos=0 lines=[] if inp_load_file(lines,"mesh.inp")==True: total=inp_sum_items(lines, "#mesh_layer_points0") if inp_load_file(lines,"dump.inp")==True: self.pos=str2bool(inp_search_token_value(lines, "#dump_energy_slice_pos")) label=gtk.Label("Energy slice dump") label.show() self.pack_start(label, True, True, 0) check = gtk.CheckButton("Enable") self.pack_start(check, True, True, 0) print "total=",total adj=gtk.Adjustment(self.pos, 0, total, 1.0, 1.0, 1.0) adj.connect("value_changed", self.scroll) self.vscale = gtk.HScale(adj) self.vscale.set_size_request(150, 30) self.pack_start(self.vscale, True, True, 0) self.vscale.show() self.enable=False if inp_load_file(lines,"dump.inp")==True: self.enable=str2bool(inp_search_token_value(lines, "#dump_energy_slice_switch")) check.set_active(self.enable) self.vscale.set_sensitive(self.enable) check.unset_flags(gtk.CAN_FOCUS) check.connect("clicked", self.check_clicked)
def load_ref(file_name): r = ref() file_name = os.path.splitext(file_name)[0] + ".ref" if os.path.isfile(file_name) == False: return None lines = inp_load_file(file_name) if lines != False: text = "" r.group = inp_get_token_value_from_list(lines, "#ref_research_group") if r.group == None: r.group = "" r.author = inp_get_token_value_from_list(lines, "#ref_authors") if r.author == None: r.author = "" r.journal = inp_get_token_value_from_list(lines, "#ref_jounral") if r.journal == None: r.journal = "" r.title = inp_get_token_value_from_list(lines, "#ref_title") if r.title == None: r.title = "" r.volume = inp_get_token_value_from_list(lines, "#ref_volume") if r.volume == None: r.volume = "" r.pages = inp_get_token_value_from_list(lines, "#ref_pages") if r.pages == None: r.pages = "" r.year = inp_get_token_value_from_list(lines, "#ref_year") if r.year == None: r.year = "" r.doi = inp_get_token_value_from_list(lines, "#ref_doi") if r.doi == None: r.doi = "" r.unformatted = inp_get_token_value_from_list(lines, "#ref_unformatted") if r.unformatted == None: r.unformatted = "" return r
def init(self,index): self.tab_label=None self.index=index lines=[] if inp_load_file(lines,"fxdomain"+str(self.index)+".inp")==True: self.tab_name=inp_search_token_value(lines, "#sim_menu_name") else: self.tab_name="" self.title_hbox=gtk.HBox() self.title_hbox.set_size_request(-1, 25) self.label=gtk.Label(self.tab_name.split("@")[0]) self.label.set_justify(gtk.JUSTIFY_LEFT) self.title_hbox.pack_start(self.label, False, True, 0) self.close_button = gtk.Button() close_image = gtk.Image() close_image.set_from_file(os.path.join(get_image_file_path(),"close.png")) close_image.show() self.close_button.add(close_image) self.close_button.props.relief = gtk.RELIEF_NONE self.close_button.set_size_request(25, 25) self.close_button.show() self.title_hbox.pack_end(self.close_button, False, False, 0) self.title_hbox.show_all() self.notebook=gtk.Notebook() self.notebook.show() self.fxmesh = tab_fxmesh() self.fxmesh.init(self.index) self.notebook.append_page(self.fxmesh, gtk.Label(_("Frequency mesh"))) self.pack_start(self.notebook, False, False, 0) self.circuit=circuit() self.circuit.init(self.index) self.notebook.append_page(self.circuit, gtk.Label(_("Circuit"))) self.show()
def __init__(self, file_name): QTabWidget.__init__(self) css_apply(self, "tab_default.css") lines = [] self.file_name = os.path.join(get_sim_path(), file_name) lines = inp_load_file(self.file_name) if lines != False: self.tab_name = inp_search_token_value(lines, "#sim_menu_name") else: self.tab_name = "" self.setMovable(True) tab = tab_class(self.file_name) self.addTab(tab, _("Configure"))
def load_data(self): self.tab.clear() self.tab.setColumnCount(4) self.tab.setSelectionBehavior(QAbstractItemView.SelectRows) self.tab.setHorizontalHeaderLabels([ _("Frequency start"), _("Frequency stop"), _("points"), _("Multiply") ]) self.tab.setColumnWidth(0, 200) self.tab.setColumnWidth(1, 200) lines = [] self.start_fx = 0.0 file_name = "fxmesh" + str(self.index) + ".inp" lines = inp_load_file(os.path.join(get_sim_path(), file_name)) if lines != False: if inp_search_token_value(lines, "#ver") == "1.1": pos = 0 while (1): token, start, pos = inp_read_next_item(lines, pos) if token == "#ver" or token == "#end": break token, stop, pos = inp_read_next_item(lines, pos) token, points, pos = inp_read_next_item(lines, pos) token, mul, pos = inp_read_next_item(lines, pos) tab_add(self.tab, [str(start), str(stop), str(points), str(mul)]) return True else: print("file " + file_name + "wrong version") exit("") return False else: print("file " + file_name + " not found") return False return False
def epitaxy_load(): lines=[] global layers global electrical_layers global width global mat_file global electrical_layer global pl_file global name layers=0 electrical_layers=0 width=[] mat_file=[] electrical_layer=[] pl_file=[] name=[] if inp_load_file(lines,"epitaxy.inp")==True: pos=0 pos=pos+1 for i in range(0, int(lines[pos])): pos=pos+1 #token pos=pos+1 name.append(lines[pos]) pos=pos+1 #token pos=pos+1 width.append(float(lines[pos])) pos=pos+1 #token pos=pos+1 mat_file.append(lines[pos]) pos=pos+1 #token pos=pos+1 electrical_layer.append(lines[pos]) #value if lines[pos].startswith("dos")==True: electrical_layers=electrical_layers+1 pos=pos+1 #token pos=pos+1 pl_file.append(lines[pos]) #value layers=layers+1
def contacts_load(): global store store=[] lines=[] pos=0 if inp_load_file(lines,os.path.join(os.getcwd(),"contacts.inp"))==True: pos=pos+1 #first comment layers=int(lines[pos]) for i in range(0, layers): #start pos=pos+1 #token token=lines[pos] pos=pos+1 start=lines[pos] #read value #width pos=pos+1 #token token=lines[pos] pos=pos+1 width=lines[pos] #read value #depth pos=pos+1 #token token=lines[pos] pos=pos+1 depth=lines[pos] #read value #voltage pos=pos+1 #token token=lines[pos] pos=pos+1 voltage=lines[pos] #read value #active pos=pos+1 #token token=lines[pos] pos=pos+1 active=lines[pos] #read value contacts_append(float(start),float(depth),float(voltage),float(width),str2bool(active))
def code_ctrl_load(): lines=[] global store_enable_webupdates global store_enable_webbrowser global store_enable_cluster global store_enable_betafeatures if inp_load_file(lines,os.path.join(get_inp_file_path(),"ver.inp"))==True: store_enable_webupdates=yes_no(inp_search_token_value(lines, "#enable_webupdates")) store_enable_webbrowser=yes_no(inp_search_token_value(lines, "#enable_webbrowser")) store_enable_cluster=yes_no(inp_search_token_value(lines, "#enable_cluster")) store_enable_betafeatures=yes_no(inp_search_token_value(lines, "#enable_betafeatures")) else: print("Can not load ver.inp file") store_enable_webupdates=False store_enable_webbrowser=False store_enable_cluster=False store_enable_betafeatures=False
def load_data(self): self.tab.clear() self.tab.setColumnCount(3) self.tab.setSelectionBehavior(QAbstractItemView.SelectRows) self.tab.setHorizontalHeaderLabels([_("start (m)"), _("stop (m)"), _("Python Equation")]) self.tab.setColumnWidth(2, 500) lines = [] pos = 0 if inp_load_file(lines, os.path.join(self.path, self.file_name)) == True: token, self.points, pos = inp_read_next_item(lines, pos) token, equations, pos = inp_read_next_item(lines, pos) equations = int(equations) self.points = int(self.points) for i in range(0, equations): token, start, pos = inp_read_next_item(lines, pos) token, stop, pos = inp_read_next_item(lines, pos) token, equation, pos = inp_read_next_item(lines, pos) tab_add(self.tab, [str(start), str(stop), str(equation)])
def scan_populate_from_file(filename, human_name=""): lines = [] lines = inp_load_file(filename) if human_name == "": human_name = filename my_token_lib = tokens() for i in range(0, len(lines)): token = lines[i] if len(token) > 0: if token[0] == "#": result = my_token_lib.find(token) if result != False: if scan_items_index_item(token) == -1: scan_item_add(filename, token, os.path.join(human_name, result.info), 1)
def epitaxy_populate_rgb(): global epi path = os.path.join(os.path.join(get_materials_path(), epi[-1].mat_file), "mat.inp") zip_file = os.path.basename(epi[-1].mat_file) + ".zip" mat_lines = inp_load_file(path, archive=zip_file) if mat_lines == False: return ret = inp_search_token_array(mat_lines, "#red_green_blue") if ret != False: epi[-1].r = float(ret[0]) epi[-1].g = float(ret[1]) epi[-1].b = float(ret[2]) epi[-1].alpha = float(inp_search_token_value(mat_lines, "#mat_alpha"))
def load_data(self): lines=[] self.start_time=0.0 self.fs_laser_time=0.0 self.list=[] file_name="time_mesh_config"+str(self.index)+".inp" ret=inp_load_file(lines,file_name) if ret==True: if inp_search_token_value(lines, "#ver")=="1.1": pos=0 token,value,pos=inp_read_next_item(lines,pos) self.start_time=float(value) token,value,pos=inp_read_next_item(lines,pos) self.fs_laser_time=float(value) token,value,pos=inp_read_next_item(lines,pos) segments=int(value) for i in range(0, segments): token,length,pos=inp_read_next_item(lines,pos) token,dt,pos=inp_read_next_item(lines,pos) token,voltage_start,pos=inp_read_next_item(lines,pos) token,voltage_stop,pos=inp_read_next_item(lines,pos) token,mul,pos=inp_read_next_item(lines,pos) token,sun,pos=inp_read_next_item(lines,pos) token,laser,pos=inp_read_next_item(lines,pos) self.list.append((length,dt,voltage_start,voltage_stop,mul,sun,laser)) print self.list return True else: print "file "+file_name+"wrong version" exit("") return False else: print "file "+file_name+" not found" return False return False
def icons_load(): lines = inp_load_file(os.path.join(get_inp_file_path(), "icons.inp")) pos = 0 global icon_db while (1): file_type = lines[pos] if file_type == "#end": break token = file_type pos = pos + 1 icon_name = lines[pos] pos = pos + 1 icon = QIcon_load(icon_name, save=False) if icon != False: icon_db.append([token[1:], icon_name, icon]) else: print("Icon not found:" + icon_name) sys.exit(0)
def ver_load_info(): lines=[] global core global mat global ver_error core="" mat="" ver_error="" ver_file_path=os.path.join(get_inp_file_path(),"ver.inp") if inp_load_file(lines,ver_file_path)==True: core=lines[1] mat=lines[5] return True else: ver_error="I can not find the file sim.gpvdm/ver.inp.\n\nI have tried looking in "+ver_file_path+"\n\nThe share path is"+get_share_path()+"\n\nThe bin path is"+get_bin_path()+"\n\nThe current working dir is "+os.getcwd()+"\n\nTry reinstalling a new version of gpvdm and/or report the bug to me at [email protected]." return False
def load(self): self.tab.blockSignals(True) self.tab.clear() self.tab.setHorizontalHeaderLabels( [_("File Name"), _("Width"), _("Start"), _("Stop")]) layers = epitaxy_get_layers() for i in range(0, layers): dos_file = epitaxy_get_dos_file(i) width = epitaxy_get_width(i) if dos_file != "none": lines = [] print("loading", dos_file) file_name = os.path.join(get_sim_path(), dos_file + ".inp") lines = inp_load_file(file_name) if lines != False: doping_start = float( inp_search_token_value(lines, "#doping_start")) doping_stop = float( inp_search_token_value(lines, "#doping_stop")) print("add", dos_file) count = self.tab.rowCount() self.tab.insertRow(count) item1 = QTableWidgetItem(str(dos_file)) self.tab.setItem(count, 0, item1) item2 = QTableWidgetItem(str(width)) self.tab.setItem(count, 1, item2) item3 = QTableWidgetItem(str(doping_start)) self.tab.setItem(count, 2, item3) item3 = QTableWidgetItem(str(doping_stop)) self.tab.setItem(count, 3, item3) self.tab.blockSignals(False) return
def workbook_from_inp(ws, my_row, filename, title=""): lines = [] lines = inp_load_file(filename) if lines == False: return my_row if title != "": ws.cell(column=1, row=my_row, value=title) my_row = my_row + 1 pos = 0 my_token_lib = tokens() while (1): ret, pos = inp_get_next_token_array(lines, pos) token = ret[0] if token == "#ver": break if token == "#end": break if token.startswith("#"): show = False units = "Units" value = ret[1] result = my_token_lib.find(token) if result != False: units = result.units text_info = result.info show = True if show == True and is_number(value): ws.cell(column=1, row=my_row, value=text_info) ws.cell(column=2, row=my_row, value=float(value)) my_row = my_row + 1 return my_row
def create_model(self): lines = [] self.tab.clear() self.tab.setColumnCount(4) self.tab.setSelectionBehavior(QAbstractItemView.SelectRows) self.tab.setHorizontalHeaderLabels( [_("File"), _("Token"), _("Path"), _("Values")]) self.tab.setColumnWidth(2, 300) self.file_name = os.path.join(get_sim_path(), "fit_patch" + str(self.index) + ".inp") lines = inp_load_file(self.file_name) if lines != False: pos = 0 mylen = len(lines) while (1): t = lines[pos] if t == "#end": break pos = pos + 1 f = lines[pos] if f == "#end": break pos = pos + 1 path = lines[pos] if f == "#end": break pos = pos + 1 v = lines[pos] if v == "#end": break pos = pos + 1 self.insert_row(self.tab.rowCount(), f, t, path, v) if pos > mylen: break
def ver_load_info(): lines=[] global core global ver_error global subver core="" ver_error="" ver_file_path=os.path.join(get_inp_file_path(),"ver.inp") lines=inp_load_file(ver_file_path,archive="base.gpvdm") if lines!=False: core=inp_search_token_value(lines,"#core") subver=inp_search_token_value(lines,"#sub_ver") return True else: ver_error="I can not find the file sim.gpvdm/ver.inp.\n\nI have tried looking in "+ver_file_path+"\n\nThe share path is"+get_share_path()+"\n\nThe bin path is"+get_bin_path()+"\n\nThe current working dir is "+get_sim_path()+"\n\nTry reinstalling a new version of gpvdm and/or report the bug to me at [email protected]." return False
def dat_file_import_filter(out, file_name, x_col=0, y_col=1): """This is an import filter for xy data""" lines = [] lines = inp_load_file(file_name) if lines == False: return False out.x_scale = [] out.y_scale = [] out.z_scale = [] out.data = [] data_started = False out.data = [[[0.0 for k in range(0)] for j in range(1)] for i in range(1)] for i in range(0, len(lines)): temp = lines[i] temp = re.sub('\t', ' ', temp) temp = re.sub(' +', ' ', temp) s = temp.split() l = len(s) if l > 0: if data_started == False: if is_number(s[0]) == True: data_started = True if s[0] == "#end": break if data_started == True: if max(x_col, y_col) < l: out.y_scale.append(float(s[x_col])) out.data[0][0].append(float(s[y_col])) out.x_len = 1 out.y_len = len(out.data[0][0]) out.z_len = 1 return True
def device_lib_fix_ver(file_name, ver ): archives=find_device_libs() for i in range(0,len(archives)): src_file=os.path.join(os.path.dirname(archives[i]),file_name) archive=os.path.basename(archives[i]) value=inp_get_token_value(src_file, "#ver",archive=archives[i]) if value!=None and value != ver: inp_update_token_value(src_file, "#ver", ver,archive=archives[i]) print(value) if value==None: lines=inp_load_file(src_file,archive=archives[i]) for ii in range(0,len(lines)): if lines[ii]=="#end": lines[ii]="#ver" lines[ii+1]=ver lines.append("#end") print(lines) inp_save(src_file,lines,archive=archives[i])
def load_data(self): self.tab.clear() self.tab.setColumnCount(3) self.tab.setSelectionBehavior(QAbstractItemView.SelectRows) self.tab.setHorizontalHeaderLabels([_("Frequency segment"), _("dfx"), _("Multiply")]) lines=[] self.start_fx=0.0 self.list=[] file_name="fxmesh"+str(self.index)+".inp" ret=inp_load_file(lines,file_name) if ret==True: if inp_search_token_value(lines, "#ver")=="1.0": pos=0 token,value,pos=inp_read_next_item(lines,pos) self.fx_start=float(value) token,value,pos=inp_read_next_item(lines,pos) segments=int(value) for i in range(0, segments): token,length,pos=inp_read_next_item(lines,pos) token,dfx,pos=inp_read_next_item(lines,pos) token,mul,pos=inp_read_next_item(lines,pos) self.list.append((length,dfx,mul)) tab_add(self.tab,[str(length),str(dfx),str(mul)]) return True else: print("file "+file_name+"wrong version") exit("") return False else: print("file "+file_name+" not found") return False return False
def plot_load_oplot_file(plot_token, file_name): lines = [] if inp_load_file(lines, file_name) == True: plot_token.logy = str2bool(inp_search_token_value(lines, "#logy")) plot_token.logx = str2bool(inp_search_token_value(lines, "#logx")) plot_token.grid = str2bool(inp_search_token_value(lines, "#grid")) plot_token.invert_y = str2bool(inp_search_token_value(lines, "#invert_y")) plot_token.normalize = str2bool(inp_search_token_value(lines, "#normalize")) plot_token.norm_to_peak_of_all_data = str2bool(inp_search_token_value(lines, "#norm_to_peak_of_all_data")) plot_token.subtract_first_point = str2bool(inp_search_token_value(lines, "#subtract_first_point")) plot_token.add_min = str2bool(inp_search_token_value(lines, "#add_min")) plot_token.file0 = inp_search_token_value(lines, "#file0") plot_token.file1 = inp_search_token_value(lines, "#file1") plot_token.file2 = inp_search_token_value(lines, "#file2") plot_token.tag0 = inp_search_token_value(lines, "#tag0") plot_token.tag1 = inp_search_token_value(lines, "#tag1") plot_token.tag2 = inp_search_token_value(lines, "#tag2") plot_token.legend_pos = inp_search_token_value(lines, "#legend_pos") plot_token.key_units = inp_search_token_value(lines, "#key_units") plot_token.label_data = str2bool(inp_search_token_value(lines, "#label_data")) plot_token.type = inp_search_token_value(lines, "#type") plot_token.x_label = inp_search_token_value(lines, "#x_label") plot_token.y_label = inp_search_token_value(lines, "#y_label") plot_token.x_units = inp_search_token_value(lines, "#x_units") plot_token.y_units = inp_search_token_value(lines, "#y_units") plot_token.x_mul = float(inp_search_token_value(lines, "#x_mul")) plot_token.y_mul = float(inp_search_token_value(lines, "#y_mul")) plot_token.key_units = inp_search_token_value(lines, "#key_units") plot_token.x_start = float(inp_search_token_value(lines, "#x_start")) plot_token.x_stop = float(inp_search_token_value(lines, "#x_stop")) plot_token.x_points = float(inp_search_token_value(lines, "#x_points")) plot_token.y_start = float(inp_search_token_value(lines, "#y_start")) plot_token.y_stop = float(inp_search_token_value(lines, "#y_stop")) plot_token.y_points = float(inp_search_token_value(lines, "#y_points")) plot_token.time = float(inp_search_token_value(lines, "#time")) plot_token.Vexternal = float(inp_search_token_value(lines, "#Vexternal")) return True return False
def wpos_load(): #print("load") global wlist wlist = [] lines = [] pos = 0 lines = inp_load_file("window_list2.inp") if lines != False: while (1): token, name, pos = inp_read_next_item(lines, pos) if token == "#end" or token == "#ver": break token, x, pos = inp_read_next_item(lines, pos) token, y, pos = inp_read_next_item(lines, pos) a = window_item() a.name = name a.x = float(x) a.y = float(y) wlist.append(a)
def config_load(self): lines=[] if inp_load_file(lines,"gui_cmp_config.inp")==True: if self.snapshot_list.count(inp_search_token_value(lines, "#entry0"))!=0: self.entry0.set_active(self.snapshot_list.index(inp_search_token_value(lines, "#entry0"))) else: self.entry0.set_active(0) if self.snapshot_list.count(inp_search_token_value(lines, "#entry1"))!=0: self.entry1.set_active(self.snapshot_list.index(inp_search_token_value(lines, "#entry1"))) else: self.entry1.set_active(0) self.entry2.set_text(inp_search_token_value(lines, "#entry2")) self.entry3.set_text(inp_search_token_value(lines, "#entry3")) else: self.entry0.set_active(0) self.entry1.set_active(0) self.entry2.set_text("n p") self.entry3.set_text("")