def main(): logging.basicConfig(level=logging.INFO, format='%(message)s') parser = argparse.ArgumentParser() parser.add_argument("file_path", help="Path to properties file", type=str) file = parser.parse_args().file_path start = time.time() # Props & Globdat conf = Properties() props = Properties() globdat = Properties() props.parseFile(file) # Chain module module = ChainModule() module.pushBack(InputModule("input")) module.pushBack(InitModule()) module.pushBack(NonlinModule("nonlin")) module.pushBack(SampleModule("sample")) module.pushBack(ControlModule("control")) # Execute Execute(module, conf, props, globdat) stop = time.time() logging.info("Elapsed time is {}".format(stop - start)) mesh = globdat.get("mesh") mesh.plotDeformed(globdat.get("solu"), 1)
def main(): parser = argparse.ArgumentParser() parser.add_argument("file_path", help="Path to properties file", type=str) file = parser.parse_args().file_path start = time.time() # Props & Globdat conf = Properties() props = Properties() globdat = Properties() props.parseFile(file) # Chain module module = ChainModule() # Create mesh module.pushBack(InputModule("input")) # Create model, cons, mbuilder and vectors module.pushBack(InitModule()) # Linear analysis module.pushBack(LinSolveModule("linsolve")) # Execute Execute(module, conf, props, globdat) stop = time.time() print("Elapsed time is ", stop - start) # globdat.mesh.updateGeometry(disp) mesh = globdat.get("mesh") mesh.plotDeformed(globdat.get("solu"), 1, rank=2) print(globdat.get("solu"))
def cluster(): query = request.form['query'] thepath = request.form['thepath'] if 'thepath' in request.form else None if xr.is_example_query(query): pp = Properties(query, xr.get_example_query_index(query)) else: pp = Properties(query, thepath=thepath) finalresult = rr.read_file(pp.RESULTS_C, pp.RESULTS_D) selcluster = request.form['explore'] cluster_num = int(selcluster.split()[-1]) - 1 return render_template('cluster.html', cluster_num=cluster_num + 1, cluster=finalresult["clusters"][cluster_num], query=query, example_queries=xr.read_example_queries(), thepath=thepath)
def main(): props = Properties("config.properties") train_patterns = [] with open(props.training_file) as f: train_patterns = read_patterns(f) input_size = len(train_patterns[0].input) output_size = len(train_patterns[0].expected_output) layers_sizes = [input_size] + props.hidden_layer_sizes + [output_size] test_patterns = [] if props.test_file != "": with open(props.test_file) as f: test_patterns = read_patterns(f) network = NeuralNetwork(train_patterns, test_patterns, props.etha) network.init_weights(layers_sizes) network.learn_patterns(props.max_epochs) all_patterns = [] with open(props.filename) as f: all_patterns = read_patterns(f) print("Output | Expected output") with open(props.function_file, "w+") as f: for pattern in all_patterns: output = network.get_output(pattern.input) f.write(";".join(str(x) for x in pattern.input)) f.write(";") f.write(";".join(str(x) for x in output)) f.write(";") f.write(";".join(str(x) for x in pattern.expected_output)) f.write("\n") print("{} | {}".format(output, pattern.expected_output))
def run(self, output_filename=None, output_filename_order=None): Preprocessing_agg(self.data).run() M = self.Vectorization() shapley_DictEncode, shapley_DictDecode = self.Calc(M) properties = Properties(self.data, shapley_DictEncode) if properties.Efficiency(): print("Properties / Efficience : +") if properties.DummyPlayer(M): print("Properties / DummyPlayer : +") shapley_calc = pd.DataFrame(shapley_DictDecode.items(), columns=[CHANNEL_NAME, SHAPLEY_VALUE]) shapley_calcOrder = shapleyOrderLib(self.data).run( M, shapley_DictDecode, self.encryped_dict) record_paths = [output_filename, output_filename_order] returnORwrite = all([(val == None) for val in record_paths]) if returnORwrite == True: return shapley_calc, shapley_calcOrder else: shapley_calc.to_csv(output_filename) shapley_calcOrder.to_csv(output_filename_order)
def _init_config(self, conf_json): if self._configType == 'xml': return Xml(self._configPath, self._configPath) elif self._configType == 'properties': return Properties(self._configPath) else: raise TypeError("The [%s] configType Error!" % self._configType)
def ScanForDevices(self): props = Properties() # Power off: print "Powering off..." props.PowerOff() time.sleep(5) # Power on: print "Powering back on..." props.PowerOn() adapter1 = self.bus.get(self.busName, self.path) adapter1.StartDiscovery() for i in range(10, 0, -1): print(i) time.sleep(1) # List devices: objMgr = ObjectManager() # objMgr.ListDevices() # List addresses: devices = objMgr.GetAddresses() for address in devices: print "%s\t%s" % (address, devices[address]['Name'])
def __init__(self, local_debug=False): self._client = None self.schedule_id = None self._init_log() # 获取runtime信息 self._fill_runtime_args() # 获取naga平台信息 properties = Properties('{0}/runtime.properties'.format( self.job_name)).get_properties() rpc_server = properties.get('api.server') rpc_port = properties.get('api.server.port') logging.info('rpcServer={},rpcPort={}'.format(rpc_server, rpc_port)) if rpc_server and rpc_port: self._naga_api = NagaApi(rpc_server, rpc_port) job_config = self._naga_api.get_job_config(self.task_name, self.job_name, self.flow_exec_id) if job_config: self.team = job_config['team'] self.config = job_config self._naga_api.save_job_runtime_config(self.task_name, self.job_name, self.flow_exec_id, self.config) else: logging.info('get job config failed!') self.hadoop_home = os.getenv('HADOOP_HOME') if self.hadoop_home is None: self.hadoop_home = '/soft/home/hadoop-2.8.5' logging.info('HADOOP_HOME:{}'.format(self.hadoop_home))
def __init__(self, extractor, slide, slide_number): self.__slide = slide self.__extractor = extractor self.__slide_number = slide_number self.__external_properties = Properties(extractor.settings) super().__init__(slide_number, self.__external_properties.pathways) # Find `layer-id` text boxes so we have a valid ID **before** using # it when setting a shape's `path_id`. if slide.has_notes_slide: notes_slide = slide.notes_slide notes_text = notes_slide.notes_text_frame.text if notes_text.startswith('.'): layer_directive = Parser.layer_directive(notes_text) if 'error' in layer_directive: super().error( 'Slide {}: invalid layer directive: {}'.format( slide_number, notes_text)) else: self.layer_id = layer_directive.get('id') self.background_for = layer_directive.get('background-for', '') self.description = layer_directive.get( 'description', self.layer_id.capitalize()) self.models = layer_directive.get('models', '') self.queryable_nodes = layer_directive.get( 'queryable-nodes', False) self.selectable = self.background_for == '' and not layer_directive.get( 'not-selectable') self.selected = layer_directive.get('selected', False) self.zoom = layer_directive.get('zoom', None) self.__current_group = [] # Cannot overlap with slide shape ids... self.__next_local_id = 100001
def _fill_runtime_args(self): if self._is_spark_job(): spark = SparkSession.builder.master( 'yarn').enableHiveSupport().getOrCreate() spark_config = spark.sparkContext.getConf() java_options = spark_config.get('spark.executor.extraJavaOptions') logging.info( 'getJavaOptions spark.executor.extraJavaOptions={}'.format( java_options)) self._set_java_options_sys_property(java_options) props_file = None for parent, dirnames, filenames in os.walk('./'): for filename in filenames: print filename if 'props' in filename: props_file = filename break break properties = Properties(props_file).get_properties() self.flow_exec_id = properties.get("azkaban.flow.execid") self.runtime_job_id = properties.get("azkaban.job.id") self.job_name = properties.get("azkaban.job.id") self.task_name = properties.get("azkaban.flow.projectname") logging.info("getEnv result:[azkaban.flowid = {},azkaban.execid = {}, \ azkaban.jobid = {},azkaban.jobname = {},azkaban.projectname = {}]". format(self.flow_name, self.flow_exec_id, self.runtime_job_id, self.job_name, self.task_name))
def __init__(self, path): """ - creates power plant and storage models - reads input timeseries :param path: (string) path to *.main_ctrl.json """ self.__prop = Properties(path=path[0]) self.__gs = GeoStorage(self.__prop) self.__pp_info = pp.load_models(self.__prop) self.__directory = os.path.dirname(self.__gs.simulation_files()) self.__basename = os.path.basename(self.__gs.simulation_files()) info('INTERFACE powerplant models loaded') self.__input_ts = read_csv(os.path.join( self.__prop.working_dir, self.__prop.input_timeseries_file), delimiter=',', decimal='.') info('INTERFACE Input time series read') cols = [ 'time', 'Q_target', 'Q_actual', 'Q_sto', 'P_plant', 'ti_plant', 'T_ff_sys', 'T_rf_sys', 'T_ff_sto', 'T_rf_sto', 'v_sto', 'pp_err' ] self.__output_ts = DataFrame(index=np.arange( 0, self.__prop.t_steps_total), columns=cols)
def paned_window(self): self.panedwindow = ttk.Panedwindow(self.parent, orient = tk.HORIZONTAL) self.panedwindow.pack(expand = True, fill = tk.BOTH) self.left_pane = ttk.Frame(self.panedwindow, height = root.winfo_screenheight() - 140, relief = tk.SUNKEN) self.middle_pane = ttk.Frame(self.panedwindow, height = (root.winfo_screenheight() - 140), relief = tk.SUNKEN) self.right_pane = ttk.Frame(self.panedwindow, height = (root.winfo_screenheight() - 140), relief = tk.SUNKEN) self.panedwindow.add(self.left_pane, weight = 1) self.panedwindow.add(self.middle_pane, weight = 1) self.panedwindow.add(self.right_pane, weight = 10) self.panedwindow_left = ttk.Panedwindow(self.left_pane, orient = tk.VERTICAL) self.panedwindow_left.pack(expand = True, fill = tk.BOTH) self.pane_projects = ttk.Frame(self.panedwindow_left, height = (root.winfo_screenheight() - 140) / 2, relief = tk.SUNKEN) self.pane_actions = ttk.Frame(self.panedwindow_left, height = (root.winfo_screenheight() - 140) / 2, relief = tk.SUNKEN) self.panedwindow_left.add(self.pane_projects, weight = 1) self.panedwindow_left.add(self.pane_actions, weight = 1) self.panewindow_middle = ttk.PanedWindow(self.middle_pane, orient = tk.VERTICAL) self.panewindow_middle.pack(expand = True, fill = tk.BOTH) self.pane_canvas = ttk.Frame(self.panewindow_middle, relief = tk.SUNKEN) self.pane_resources = ttk.Frame(self.panewindow_middle, width = 100, relief = tk.SUNKEN) self.panewindow_middle.add(self.pane_canvas, weight = 5) self.panewindow_middle.add(self.pane_resources, weight = 1) self.menubar = Menubar(self.parent) self.properties = Properties(self.right_pane) self.canvas = Canvas(self.properties) self.toolbar = Toolbar(self.pane_canvas, self.canvas) self.project_explorer = ProjectExplorer(self.pane_projects) self.canvas.create_Ui(self.pane_canvas) self.actions = Actions(self.pane_actions, self.canvas, self.properties) self.resources = Resources(self.pane_resources)
def __init__(self, app=QtGui.QApplication): super(self.__class__, self).__init__() self.app = app self.properties = Properties() self.setupUi(self) self.trigger = Trigger() if self.properties['main']['ip'] == None: self.changeIpAddress() self.setupComplete = False elif self.properties['main']['autosetup']: self.setCurrentSettings() self.setupPlotThreads() else: self.setVisuals() self.setupPlotThreads() self.setupPlots() self._setLeValidator() self._connections()
def __init__(self, connection, get_login=False): # Every new property should be defind here with the corresponding method of Properties.properties.gather_physical_information prop = Properties() prop.gather_physical_information(connection) self.hostname = prop.get_hostname() self.disks = prop.get_disks() self.cpu = prop.get_cpu_usage() self.memory = prop.get_memory_usage() self.virtualization = None login = Logon(self.hostname, 4624) try: if get_login: self.login_info = { "most_login": None, "login_info": login.get_logged_users() } login.close_log() most_login = { "amount_logins": 0, "name": "None", "last_login": None } for user in self.login_info["login_info"]: if user["name"] == "liberton": continue if int(user["amount_logins"]) > int( most_login["amount_logins"]): most_login = user self.login_info["most_login"] = most_login except Exception as e: print(e) self.login_info = None
def _our_json_decode(o): """Unpack JSON objects using __class__ annotations.""" if isinstance(o, dict) and '__class__' in o: class_name = o['__class__'] del o['__class__'] if class_name == 'Properties': keys = o.keys() return Properties(**dict((k, o[k]) for k in keys)) elif class_name == 'Element': allowed_users = [] if 'allowed_users' in o.keys(): if isinstance(o['allowed_users'], list): allowed_users = o['allowed_users'] del o['allowed_users'] element = Element(**o) # Unpack dict as keyword-arguments element.allowed_users = allowed_users return element elif class_name == 'Device': o['affordances'] = _our_json_decode(o['affordances']) return Device(**o) elif class_name == 'User': element_importances = {} if 'element_importances' in o.keys(): if isinstance(o['element_importances'], dict): element_importances = o['element_importances'] del o['element_importances'] user = User(**o) user.importance = element_importances return user elif isinstance(o, list): out = [] for item in o: out.append(_our_json_decode(item)) return out return o
def __init__(self, connection, vname): props = Properties() props.gather_virtual_information(connection, vname) self.name = props.get_vhostname() self.status = props.get_vstatus() self.disks = props.get_vdisks() self.memory = props.get_vmemory_usage() self.cpu = props.get_vcpu()
def __init__(self, QtCoreModule, QtGuiModule, creatorPolicy): self.factory = QObjectCreator(creatorPolicy) self.wprops = Properties(self.factory, QtCoreModule, QtGuiModule) global QtCore, QtGui QtCore = QtCoreModule QtGui = QtGuiModule self.reset()
def __init__(self, arg): tp = type(arg) self.mProperties = Properties() if tp == int: self.mTypeId = arg elif tp == Object: self.mTypeId = arg.mTypeId self.mProperties = arg.mProperties
def __readProperties(self): properties = Properties() while (self.xml.readNextStartElement()): if (self.xml.name() == "property"): self.__readProperty(properties) else: self.__readUnknownElement() return properties
def __init__(self, connection): props = Properties() props.define_vms(connection) self.vms_list = props.get_vms() self.vms_props = [] if self.vms_list: self.virtualization = "enabled" else: self.virtualization = "disabled"
def __init__(self, *args, propFile=None, **kwargs): super(FindFile, self).__init__(*args, **kwargs) if propFile is None: propFile = "FindFile.properties" self.propFile = propFile self.properties = Properties() if self.propFile is not None and os.path.exists(self.propFile): self.properties.load(open(self.propFile)) self.InitUI()
def results(): query = request.form['text'] thepath = request.form['thepath'] if 'thepath' in request.form else None if query.startswith("___"): query = query[3:] else: if not xr.is_example_query(query): thepath = main(query, request.remote_addr) if xr.is_example_query(query): pp = Properties(query, xr.get_example_query_index(query)) else: pp = Properties(query, thepath=thepath) finalresult = rr.read_file(pp.RESULTS_C, pp.RESULTS_D) return render_template('results.html', results=finalresult, query=query, example_queries=xr.read_example_queries(), thepath=thepath)
def main(datasetName, probFromSource): props = Properties('config.properties', datasetName) srcfile = Properties.BASEDIR + datasetName + Properties.SRCAPPEND trgfile = Properties.BASEDIR + datasetName + Properties.TRGAPPEND mgr = Manager(srcfile, trgfile) Properties.logger.info(props.summary()) Properties.logger.info('Start Stream Simulation') mgr.startFusion(datasetName, probFromSource)
def mkProps(props, dir): path = os.path.join(dir, 'mk.cfg') if os.path.exists(path): p = Properties(path) for name in p.keys(): value = p.get(name) if value.startswith('\\'): value = props.get(name) + ' ' + value[1:] props.assign(name, value) return props
def main(query, remaddress): current_date_time = remaddress.replace( '.', '_') + "___" + datetime.datetime.now().isoformat('_').replace( '-', '_').replace(':', '_').replace('.', '_') pp = Properties(query, thepath=current_date_time) download_snippets(pp) evaluate_apis(pp) evaluate_readability(pp) cluster_present_results(pp) return current_date_time
def addLibrariesToProject(self, libs): if self.main: path = self.mainPath() mkPath = os.path.join(path, "mk.cfg") props = Properties(mkPath) lst = props.get("LINK_LIBS").split(',') lst = [x for x in lst if len(x) > 0] for l in libs: lst.append(l) props.assign("LINK_LIBS", ",".join(lst)) props.save(mkPath) self.depsChanged.emit(path)
def editDependencies(self): item = self.currentItem() path = item.data(0, DirectoryRole).toString() mkPath = os.path.join(path, "mk.cfg") props = Properties(mkPath) libs = re.split(' |;|,', props.get('LINK_LIBS')) d = DependenciesDialog(libs) if d.exec_(): props.assign("LINK_LIBS", ",".join(d.libs)) props.save(mkPath) self.depsChanged.emit(path) return True return False
def replacefile(source, target, propname, propcontent): # 如果target不存在,就先将source复制到target中 print '开始替换...' print 'source : ' + source print 'target : ' + target print '属性名' + propname print '属性内容' + propcontent path = os.path.split(target) if not os.path.exists(path[0]): os.makedirs(path[0]) shutil.copy2(source, target) targetfile = Properties(target) targetfile.put(propname, propcontent) print '...backup结束'
def main(datasetName, method): if method not in ['kmm', 'kliep', 'arulsif']: print('Methods allowed are : kmm, kliep or arulsif. Please try again.') return props = Properties('config.properties', datasetName) srcfile = Properties.BASEDIR + datasetName + Properties.SRCAPPEND trgfile = Properties.BASEDIR + datasetName + Properties.TRGAPPEND mgr = Manager(srcfile, trgfile) Properties.logger.info(props.summary(method)) Properties.logger.info('Start classification for biased label data.') mgr.startClassification(datasetName, method)
def deploy(filename): print '开始deploy...' + filename for x in templates: print '******开始进行第' + str(x['id']) + '项操作****** ' if x.has_key('sourcedir'): # 如果是dir,则进行复制 target = gettarget(x.get('source'), x.get('target')) for y in target: for z in x.get('sourcedir'): # 将sourcedir中所有的目录复制到target中 sourcedirt = rootdirdict.get(x.get('source')) + z targetdirt = target[y] + z backup(sourcedirt, targetdirt) elif x.has_key('sourcefile'): # 如果是文件,需要分析是替换还是复制 target = gettarget(x.get('source'), x.get('target')) updatetype = x.get('update_type') if updatetype == 'update_line': updatename = x.get('update_name') if updatename and len(x.get('sourcefile')) == 1: for y in target: for z in x.get('sourcefile'): # 将sourcedir中所有的目录复制到target中 sourcet = rootdirdict.get(x.get('source')) + z targett = target[y] + z updatecontent = x.get('update_content') propcontent = Properties(sourcet).get(updatename) if updatecontent: if updatecontent.get(y): replacefile(sourcet, targett, updatename, updatecontent.get(y)) else: replacefile(sourcet, targett, updatename, propcontent) else: replacefile(sourcet, targett, updatename, propcontent) else: print '模板中update_type是逐行替换,则update_name必须有值,且sourcefile数量只能为1' else: for y in target: for z in x.get('sourcefile'): # 将sourcedir中所有的目录复制到target中 sourcet = rootdirdict.get(x.get('source')) + z targett = target[y] + z backupfile(sourcet, targett) else: print '模板错误:id为' + x['id'] print '...deploy结束'