def testEmptyProperty3(self): lb = LineBuffer() lb.append("x= \n") dict = { "x" : "" } p = Properties() p.load(lb) self.assertTrue(p.equalsDict(dict))
def testEscape1(self): lb = LineBuffer() lb.append("x=4\\t2\n") dict = { "x" : "4\t2" } p = Properties() p.load(lb) self.assertTrue(p.equalsDict(dict))
def testWhitespace3(self): lb = LineBuffer() lb.append("x=42\n") dict = { "x" : "42" } p = Properties() p.load(lb) self.assertTrue(p.equalsDict(dict))
def __init__(self, app=QtGui.QApplication): super(self.__class__, self).__init__() self.app = app self.properties = Properties() self.setupUi(self) self.trigger = Trigger() if self.properties['main']['ip'] == None: self.changeIpAddress() self.setupComplete = False elif self.properties['main']['autosetup']: self.setCurrentSettings() self.setupPlotThreads() else: self.setVisuals() self.setupPlotThreads() self.setupPlots() self._setLeValidator() self._connections()
def main(): parser = argparse.ArgumentParser() parser.add_argument("file_path", help="Path to properties file", type=str) file = parser.parse_args().file_path start = time.time() # Props & Globdat conf = Properties() props = Properties() globdat = Properties() props.parseFile(file) # Chain module module = ChainModule() # Create mesh module.pushBack(InputModule("input")) # Create model, cons, mbuilder and vectors module.pushBack(InitModule()) # Linear analysis module.pushBack(LinSolveModule("linsolve")) # Execute Execute(module, conf, props, globdat) stop = time.time() print("Elapsed time is ", stop - start) # globdat.mesh.updateGeometry(disp) mesh = globdat.get("mesh") mesh.plotDeformed(globdat.get("solu"), 1, rank=2) print(globdat.get("solu"))
def run(self, output_filename=None, output_filename_order=None): Preprocessing_agg(self.data).run() M = self.Vectorization() shapley_DictEncode, shapley_DictDecode = self.Calc(M) properties = Properties(self.data, shapley_DictEncode) if properties.Efficiency(): print("Properties / Efficience : +") if properties.DummyPlayer(M): print("Properties / DummyPlayer : +") shapley_calc = pd.DataFrame(shapley_DictDecode.items(), columns=[CHANNEL_NAME, SHAPLEY_VALUE]) shapley_calcOrder = shapleyOrderLib(self.data).run( M, shapley_DictDecode, self.encryped_dict) record_paths = [output_filename, output_filename_order] returnORwrite = all([(val == None) for val in record_paths]) if returnORwrite == True: return shapley_calc, shapley_calcOrder else: shapley_calc.to_csv(output_filename) shapley_calcOrder.to_csv(output_filename_order)
def ScanForDevices(self): props = Properties() # Power off: print "Powering off..." props.PowerOff() time.sleep(5) # Power on: print "Powering back on..." props.PowerOn() adapter1 = self.bus.get(self.busName, self.path) adapter1.StartDiscovery() for i in range(10, 0, -1): print(i) time.sleep(1) # List devices: objMgr = ObjectManager() # objMgr.ListDevices() # List addresses: devices = objMgr.GetAddresses() for address in devices: print "%s\t%s" % (address, devices[address]['Name'])
def __init__(self, *args, **kwargs): # set addon object self.m_addon = kwargs["addon"] # passing "<windowId>" from RunScript() means run in background self.use_gui = kwargs["gui"] # initialize our super classes XBMCPlayer.__init__(self) Properties.__init__(self) # initialize timers self._lyric_timer = None self._fetch_timer = None # log started action self._log_addon_action("started") # initialize our Song class self.song = Song(addon=self.m_addon) # initialize our prefetched Song class # TODO: remove the self.isPlayingAudio() check when/if XBMC supports offset for player self.prefetched_song = None if (self.m_addon.getSetting("prefetch_lyrics") and self.isPlayingAudio()): self.prefetched_song = Song(addon=self.m_addon, prefetch=True) # start if (self.use_gui): self._start_gui() else: self._start_background()
def __init__(self, extractor, slide, slide_number): self.__slide = slide self.__extractor = extractor self.__slide_number = slide_number self.__external_properties = Properties(extractor.settings) super().__init__(slide_number, self.__external_properties.pathways) # Find `layer-id` text boxes so we have a valid ID **before** using # it when setting a shape's `path_id`. if slide.has_notes_slide: notes_slide = slide.notes_slide notes_text = notes_slide.notes_text_frame.text if notes_text.startswith('.'): layer_directive = Parser.layer_directive(notes_text) if 'error' in layer_directive: super().error( 'Slide {}: invalid layer directive: {}'.format( slide_number, notes_text)) else: self.layer_id = layer_directive.get('id') self.background_for = layer_directive.get('background-for', '') self.description = layer_directive.get( 'description', self.layer_id.capitalize()) self.models = layer_directive.get('models', '') self.queryable_nodes = layer_directive.get( 'queryable-nodes', False) self.selectable = self.background_for == '' and not layer_directive.get( 'not-selectable') self.selected = layer_directive.get('selected', False) self.zoom = layer_directive.get('zoom', None) self.__current_group = [] # Cannot overlap with slide shape ids... self.__next_local_id = 100001
def __init__(self, local_debug=False): self._client = None self.schedule_id = None self._init_log() # 获取runtime信息 self._fill_runtime_args() # 获取naga平台信息 properties = Properties('{0}/runtime.properties'.format( self.job_name)).get_properties() rpc_server = properties.get('api.server') rpc_port = properties.get('api.server.port') logging.info('rpcServer={},rpcPort={}'.format(rpc_server, rpc_port)) if rpc_server and rpc_port: self._naga_api = NagaApi(rpc_server, rpc_port) job_config = self._naga_api.get_job_config(self.task_name, self.job_name, self.flow_exec_id) if job_config: self.team = job_config['team'] self.config = job_config self._naga_api.save_job_runtime_config(self.task_name, self.job_name, self.flow_exec_id, self.config) else: logging.info('get job config failed!') self.hadoop_home = os.getenv('HADOOP_HOME') if self.hadoop_home is None: self.hadoop_home = '/soft/home/hadoop-2.8.5' logging.info('HADOOP_HOME:{}'.format(self.hadoop_home))
def main() : """ This is a simple script used to parse a .properties file and recurslively examine a body of code to identify properties which are no longer referenced. Usage : >> python dead-properties-finder.py propFile searchRootDir Where propFile is the properties file and searchRootDir is the root directory of the body of code to search through If a property is not found in the code base, the name of the property is printed to stdout Note : If the body of code you are searching contains the properties file you are parsing, it won't work! That file will contain all properties which are being searched for and thus the script will not flag any properties as unreferenced. It may be best to remove ALL properties files from the project temporarily before running the script to ensure there are no false negatives. Also, the class used to parse the properties file assumes property value pairs are "=" delimited. Using ":" may be accepted by other properties parsers, but the properties file class used by this script currently only accepts "=" """ propFile = sys.argv[1] searchRoot = sys.argv[2] props = Properties(propFile) for prop in props.keySet() : retCode = os.system("grep -rq %s %s" % (prop, searchRoot)) if retCode > 0 : print prop
def testKeyEscape2(self): lb = LineBuffer() lb.append("x\\ y=42\n") dict = { "x y" : "42" } p = Properties() p.load(lb) self.assertTrue(p.equalsDict(dict))
def is_client_access_allowed(url, config, allowed, excluded): # get url blocks url_parsed = urlparse(url) protocol = url_parsed.scheme hostname = url_parsed.hostname path = url_parsed.path # read config variables conf = Properties() with open(config) as f: conf.load(f) # check allowed protocols allowed_protocols = conf['ALLOWED_PROTOCOLS'].replace(' ', '').split(',') if url_parsed.scheme not in allowed_protocols: return False # check excluded file types exluded_types = conf['EXCLUDE_FILE_TYPES'].replace(' ', '').split(',') dot_index = path.rfind('.', 0) if dot_index > 0 and path[dot_index+1:].lower() in exluded_types: return False # get host groups flags exclude_privates = True if conf['EXCLUDE_PRIVATE_HOSTS'] == 'true' else False exclude_singles = True if conf['EXCLUDE_SINGLE_HOSTS'] == 'true' else False # read exluded hosts with open(excluded) as f: excluded_hosts = [host.strip() for host in f.readlines()] # read allowed hosts with open(allowed) as f: allowed_hosts = [host.strip() for host in f.readlines()] # validate address if hostname == None or len(hostname) == 0: return False; # check excluded hosts if hostname in excluded_hosts: return False # check allowed hosts if len(allowed_hosts) > 0 and (hostname not in allowed_hosts): return False # exclude private hosts if exclude_privates == True: if is_ip_address_private(hostname): return False # exclude single hosts if exclude_singles == True: if len(hostname.split('.')) == 1: return False # now we can confirm positive return True
def testOverwrite(self): lb = LineBuffer() lb.append("x=42\n") lb.append("x=44\n") dict = { "x" : "44" } p = Properties() p.load(lb) self.assertTrue(p.equalsDict(dict))
def testLineContinue(self): lb = LineBuffer() lb.append("x=42 \\\n") lb.append(" boo\n") dict = { "x" : "42 boo" } p = Properties() p.load(lb) self.assertTrue(p.equalsDict(dict))
def __init__(self, connection): props = Properties() props.define_vms(connection) self.vms_list = props.get_vms() self.vms_props = [] if self.vms_list: self.virtualization = "enabled" else: self.virtualization = "disabled"
def __init__(self, *args, propFile=None, **kwargs): super(FindFile, self).__init__(*args, **kwargs) if propFile is None: propFile = "FindFile.properties" self.propFile = propFile self.properties = Properties() if self.propFile is not None and os.path.exists(self.propFile): self.properties.load(open(self.propFile)) self.InitUI()
def __init__(self, QtCoreModule, QtGuiModule, creatorPolicy): self.factory = QObjectCreator(creatorPolicy) self.wprops = Properties(self.factory, QtCoreModule, QtGuiModule) global QtCore, QtGui QtCore = QtCoreModule QtGui = QtGuiModule self.reset()
def __init__(self, arg): tp = type(arg) self.mProperties = Properties() if tp == int: self.mTypeId = arg elif tp == Object: self.mTypeId = arg.mTypeId self.mProperties = arg.mProperties
def mkProps(props, dir): path = os.path.join(dir, 'mk.cfg') if os.path.exists(path): p = Properties(path) for name in p.keys(): value = p.get(name) if value.startswith('\\'): value = props.get(name) + ' ' + value[1:] props.assign(name, value) return props
def testIgnoreBlanks(self): lb = LineBuffer() lb.append("\n") lb.append("x=42\n") lb.append(" \n") lb.append(" # comment\n") dict = { "x" : "42" } p = Properties() p.load(lb) self.assertTrue(p.equalsDict(dict))
def mkProps(props, dir): path=os.path.join(dir,'mk.cfg') if os.path.exists(path): p=Properties(path) for name in p.keys(): value=p.get(name) if value.startswith('\\'): value=props.get(name)+' '+value[1:] props.assign(name,value) return props
def main(datasetName, probFromSource): props = Properties('config.properties', datasetName) srcfile = Properties.BASEDIR + datasetName + Properties.SRCAPPEND trgfile = Properties.BASEDIR + datasetName + Properties.TRGAPPEND mgr = Manager(srcfile, trgfile) Properties.logger.info(props.summary()) Properties.logger.info('Start Stream Simulation') mgr.startFusion(datasetName, probFromSource)
def main(): logging.basicConfig(level=logging.INFO, format='%(message)s') parser = argparse.ArgumentParser() parser.add_argument("file_path", help="Path to properties file", type=str) file = parser.parse_args().file_path start = time.time() # Props & Globdat conf = Properties() props = Properties() globdat = Properties() props.parseFile(file) # Chain module module = ChainModule() module.pushBack(InputModule("input")) module.pushBack(InitModule()) module.pushBack(NonlinModule("nonlin")) module.pushBack(SampleModule("sample")) module.pushBack(ControlModule("control")) # Execute Execute(module, conf, props, globdat) stop = time.time() logging.info("Elapsed time is {}".format(stop - start)) mesh = globdat.get("mesh") mesh.plotDeformed(globdat.get("solu"), 1)
def __init__(self, connection, vname): props = Properties() props.gather_virtual_information(connection, vname) self.name = props.get_vhostname() self.status = props.get_vstatus() self.disks = props.get_vdisks() self.memory = props.get_vmemory_usage() self.cpu = props.get_vcpu()
def on_actionNew_triggered(self): """ Create a new config """ propDialog = Properties(self.config, parent=self) if propDialog.exec_(): ovconfig = propDialog.getconfig() self.addconnection(ovconfig) ovconfig.copycerts() filename = ovconfig.writeconfig() settingkey = "connections/%s" % ovconfig.getname() self.settings.setValue(settingkey, filename)
def doProperties(self, row): if row >= 0: ovconfig = self.tableConnections.configs[row] propDialog = Properties(self.config, ovconfig, self) if propDialog.exec_(): ovconfig = propDialog.getconfig() self.editconnection(ovconfig, row) ovconfig.copycerts() filename = ovconfig.writeconfig() settingkey = "connections/%s" % ovconfig.getname() self.settings.setValue(settingkey, filename)
def main(datasetName, method): if method not in ['kmm', 'kliep', 'arulsif']: print('Methods allowed are : kmm, kliep or arulsif. Please try again.') return props = Properties('config.properties', datasetName) srcfile = Properties.BASEDIR + datasetName + Properties.SRCAPPEND trgfile = Properties.BASEDIR + datasetName + Properties.TRGAPPEND mgr = Manager(srcfile, trgfile) Properties.logger.info(props.summary(method)) Properties.logger.info('Start classification for biased label data.') mgr.startClassification(datasetName, method)
def __init__(self, connection, get_login=False): # Every new property should be defind here with the corresponding method of Properties.properties.gather_physical_information prop = Properties() prop.gather_physical_information(connection) self.hostname = prop.get_hostname() self.disks = prop.get_disks() self.cpu = prop.get_cpu_usage() self.memory = prop.get_memory_usage() self.virtualization = None login = Logon(self.hostname, 4624) try: if get_login: self.login_info = { "most_login": None, "login_info": login.get_logged_users() } login.close_log() most_login = { "amount_logins": 0, "name": "None", "last_login": None } for user in self.login_info["login_info"]: if user["name"] == "liberton": continue if int(user["amount_logins"]) > int( most_login["amount_logins"]): most_login = user self.login_info["most_login"] = most_login except Exception as e: print(e) self.login_info = None
def replacefile(source, target, propname, propcontent): # 如果target不存在,就先将source复制到target中 print '开始替换...' print 'source : ' + source print 'target : ' + target print '属性名' + propname print '属性内容' + propcontent path = os.path.split(target) if not os.path.exists(path[0]): os.makedirs(path[0]) shutil.copy2(source, target) targetfile = Properties(target) targetfile.put(propname, propcontent) print '...backup结束'
def editDebugSettings(self): item = self.currentItem() path = item.data(0, DirectoryRole).toString() mkPath = os.path.join(path, "mk.cfg") props = Properties(mkPath) d = uis.loadDialog('debug_settings') d.cwdEdit.setText(props.get("DEBUG_CWD")) d.paramsEdit.setText(props.get("DEBUG_PARAMS")) d.browseDirButton.clicked.connect( lambda: utils.browseDirectory(d.cwdEdit)) if d.exec_(): props.assign('DEBUG_CWD', d.cwdEdit.text()) props.assign('DEBUG_PARAMS', d.paramsEdit.text()) self.debug = (d.cwdEdit.text(), d.paramsEdit.text()) props.save(mkPath)
def db_configure(self): dbhost = self.ducc_properties.get('ducc.database.host') if (dbhost == self.db_disabled): self.db_bypass = True return else: self.db_bypass = False dbprops = Properties() dbprops.load(self.DUCC_HOME + '/resources.private/ducc.private.properties') self.db_password = dbprops.get('db_password') if (self.db_password == None): print "bypassing database because no password is set." self.db_bypass = True
def CreatePerObjectClassTable(self, classes): ''' Saves object keys and classes to a SQL table ''' p = Properties.getInstance() if p.class_table is None: raise ValueError('"class_table" in properties file is not set.') index_cols = dbconnect.UniqueObjectClause() class_cols = dbconnect.UniqueObjectClause() + ', class, class_number' class_col_defs = dbconnect.object_key_defs() + ', class VARCHAR (%d)'%(max([len(c.label) for c in self.classBins])+1) + ', class_number INT' # Drop must be explicitly asked for Classifier.ScoreAll db = dbconnect.DBConnect.getInstance() db.execute('DROP TABLE IF EXISTS %s'%(p.class_table)) db.execute('CREATE TABLE %s (%s)'%(p.class_table, class_col_defs)) db.execute('CREATE INDEX idx_%s ON %s (%s)'%(p.class_table, p.class_table, index_cols)) for clNum, clName in enumerate(self.perClassObjects.keys()): for obj in self.perClassObjects[clName]: query = ''.join(['INSERT INTO ',p.class_table,' (',class_cols,') VALUES (',str(obj[0]),', ',str(obj[1]),', "',clName,'", ',str(clNum+1),')']) db.execute(query) if p.db_type.lower() == 'mysql': query = ''.join(['ALTER TABLE ',p.class_table,' ORDER BY ',p.image_id,' ASC, ',p.object_id,' ASC']) db.execute(query) db.Commit()
def __init__(self, path): """ - creates power plant and storage models - reads input timeseries :param path: (string) path to *.main_ctrl.json """ self.__prop = Properties(path=path[0]) self.__gs = GeoStorage(self.__prop) self.__pp_info = pp.load_models(self.__prop) self.__directory = os.path.dirname(self.__gs.simulation_files()) self.__basename = os.path.basename(self.__gs.simulation_files()) info('INTERFACE powerplant models loaded') self.__input_ts = read_csv(os.path.join( self.__prop.working_dir, self.__prop.input_timeseries_file), delimiter=',', decimal='.') info('INTERFACE Input time series read') cols = [ 'time', 'Q_target', 'Q_actual', 'Q_sto', 'P_plant', 'ti_plant', 'T_ff_sys', 'T_rf_sys', 'T_ff_sto', 'T_rf_sto', 'v_sto', 'pp_err' ] self.__output_ts = DataFrame(index=np.arange( 0, self.__prop.t_steps_total), columns=cols)
def Save(self, filename): # check cache freshness self.cache.clear_if_objects_modified() f = open(filename, 'w') try: from properties import Properties p = Properties.getInstance() f.write('# Training set created while using properties: %s\n' % (p._filename)) f.write('label ' + ' '.join(self.labels) + '\n') for label, obKey in self.entries: line = '%s %s %s\n' % (label, ' '.join([ str(int(k)) for k in obKey ]), ' '.join([str(int(k)) for k in db.GetObjectCoords(obKey)])) f.write(line) f.write('# ' + self.cache.save_to_string([k[1] for k in self.entries]) + '\n') except: logging.error("Error saving training set %s" % (filename)) f.close() raise f.close() logging.info('Training set saved to %s' % filename) self.saved = True
def paned_window(self): self.panedwindow = ttk.Panedwindow(self.parent, orient = tk.HORIZONTAL) self.panedwindow.pack(expand = True, fill = tk.BOTH) self.left_pane = ttk.Frame(self.panedwindow, height = root.winfo_screenheight() - 140, relief = tk.SUNKEN) self.middle_pane = ttk.Frame(self.panedwindow, height = (root.winfo_screenheight() - 140), relief = tk.SUNKEN) self.right_pane = ttk.Frame(self.panedwindow, height = (root.winfo_screenheight() - 140), relief = tk.SUNKEN) self.panedwindow.add(self.left_pane, weight = 1) self.panedwindow.add(self.middle_pane, weight = 1) self.panedwindow.add(self.right_pane, weight = 10) self.panedwindow_left = ttk.Panedwindow(self.left_pane, orient = tk.VERTICAL) self.panedwindow_left.pack(expand = True, fill = tk.BOTH) self.pane_projects = ttk.Frame(self.panedwindow_left, height = (root.winfo_screenheight() - 140) / 2, relief = tk.SUNKEN) self.pane_actions = ttk.Frame(self.panedwindow_left, height = (root.winfo_screenheight() - 140) / 2, relief = tk.SUNKEN) self.panedwindow_left.add(self.pane_projects, weight = 1) self.panedwindow_left.add(self.pane_actions, weight = 1) self.panewindow_middle = ttk.PanedWindow(self.middle_pane, orient = tk.VERTICAL) self.panewindow_middle.pack(expand = True, fill = tk.BOTH) self.pane_canvas = ttk.Frame(self.panewindow_middle, relief = tk.SUNKEN) self.pane_resources = ttk.Frame(self.panewindow_middle, width = 100, relief = tk.SUNKEN) self.panewindow_middle.add(self.pane_canvas, weight = 5) self.panewindow_middle.add(self.pane_resources, weight = 1) self.menubar = Menubar(self.parent) self.properties = Properties(self.right_pane) self.canvas = Canvas(self.properties) self.toolbar = Toolbar(self.pane_canvas, self.canvas) self.project_explorer = ProjectExplorer(self.pane_projects) self.canvas.create_Ui(self.pane_canvas) self.actions = Actions(self.pane_actions, self.canvas, self.properties) self.resources = Resources(self.pane_resources)
def unpack(cls, data): # we special-case as we need properties unpack class to change according to method_class method_class, data = Octet.unpack(data) weight, data = Octet.unpack(data) body_size, data = LongLong.unpack(data) properties, data = Properties.get_by_class(method_class).unpack(data) return cls(method_class, body_size, properties)
def Save(self, filename): # check cache freshness try: self.cache.clear_if_objects_modified() except: logging.info("Couldn't check cache freshness, DB connection lost?") f = open(filename, 'w') try: from properties import Properties p = Properties.getInstance() f.write('# Training set created while using properties: %s\n'%(p._filename)) f.write('label '+' '.join(self.labels)+'\n') i = 0 for label, obKey in self.entries: line = '%s %s %s\n'%(label, ' '.join([str(int(k)) for k in obKey]), ' '.join([str(int(k)) for k in self.coordinates[i]])) f.write(line) i += 1 # increase counter to keep track of the coordinates positions try: f.write('# ' + self.cache.save_to_string([k[1] for k in self.entries]) + '\n') except: logging.error("No DB connection, couldn't save cached image strings") except: logging.error("Error saving training set %s" % (filename)) f.close() raise f.close() logging.info('Training set saved to %s'%filename) self.saved = True
def on_save_workspace(self, evt): p = Properties.getInstance() dlg = wx.FileDialog(self, message="Save workspace as...", defaultDir=os.getcwd(), defaultFile='%s_%s.workspace'%(os.path.splitext(os.path.split(p._filename)[1])[0], p.image_table), style=wx.SAVE|wx.FD_OVERWRITE_PROMPT|wx.FD_CHANGE_DIR) if dlg.ShowModal() == wx.ID_OK: wx.GetApp().save_workspace(dlg.GetPath())
def main(): props = Properties("config.properties") train_patterns = [] with open(props.training_file) as f: train_patterns = read_patterns(f) input_size = len(train_patterns[0].input) output_size = len(train_patterns[0].expected_output) layers_sizes = [input_size] + props.hidden_layer_sizes + [output_size] test_patterns = [] if props.test_file != "": with open(props.test_file) as f: test_patterns = read_patterns(f) network = NeuralNetwork(train_patterns, test_patterns, props.etha) network.init_weights(layers_sizes) network.learn_patterns(props.max_epochs) all_patterns = [] with open(props.filename) as f: all_patterns = read_patterns(f) print("Output | Expected output") with open(props.function_file, "w+") as f: for pattern in all_patterns: output = network.get_output(pattern.input) f.write(";".join(str(x) for x in pattern.input)) f.write(";") f.write(";".join(str(x) for x in output)) f.write(";") f.write(";".join(str(x) for x in pattern.expected_output)) f.write("\n") print("{} | {}".format(output, pattern.expected_output))
def cluster(): query = request.form['query'] thepath = request.form['thepath'] if 'thepath' in request.form else None if xr.is_example_query(query): pp = Properties(query, xr.get_example_query_index(query)) else: pp = Properties(query, thepath=thepath) finalresult = rr.read_file(pp.RESULTS_C, pp.RESULTS_D) selcluster = request.form['explore'] cluster_num = int(selcluster.split()[-1]) - 1 return render_template('cluster.html', cluster_num=cluster_num + 1, cluster=finalresult["clusters"][cluster_num], query=query, example_queries=xr.read_example_queries(), thepath=thepath)
def _our_json_decode(o): """Unpack JSON objects using __class__ annotations.""" if isinstance(o, dict) and '__class__' in o: class_name = o['__class__'] del o['__class__'] if class_name == 'Properties': keys = o.keys() return Properties(**dict((k, o[k]) for k in keys)) elif class_name == 'Element': allowed_users = [] if 'allowed_users' in o.keys(): if isinstance(o['allowed_users'], list): allowed_users = o['allowed_users'] del o['allowed_users'] element = Element(**o) # Unpack dict as keyword-arguments element.allowed_users = allowed_users return element elif class_name == 'Device': o['affordances'] = _our_json_decode(o['affordances']) return Device(**o) elif class_name == 'User': element_importances = {} if 'element_importances' in o.keys(): if isinstance(o['element_importances'], dict): element_importances = o['element_importances'] del o['element_importances'] user = User(**o) user.importance = element_importances return user elif isinstance(o, list): out = [] for item in o: out.append(_our_json_decode(item)) return out return o
def _init_config(self, conf_json): if self._configType == 'xml': return Xml(self._configPath, self._configPath) elif self._configType == 'properties': return Properties(self._configPath) else: raise TypeError("The [%s] configType Error!" % self._configType)
def run_tests(jmx, config_file, report_dir, soft_cleanup, hard_cleanup): """ Parse provided config file """ test_configs = parsers.TestConfigs(config_file) if not test_configs.is_test_config_provided(): print("No configurations are provided. File is empty: " + config_file) exit(-1) """ Parse configs in each section and generate commands to run """ for run_id in test_configs.get_sections(): Properties.initialize(report_dir) """ Get and execute JMeter command """ test_configs.set_options(run_id) jmeter_command = get_jmeter_command(jmx, test_configs) utils.run_command(jmeter_command) """ Generate reports """ generate_reports(run_id, test_configs) """ Cleanup """ utils.cleanup(soft_cleanup, hard_cleanup)
def print_properties(prop): def pretty(value): if type(value) == int: return str(value) else: return "%.2f" % value ps = [pretty(getattr(prop, p)) for p in Properties.attributes()] return formatString % tuple([prop.name] + ps)
def editDebugSettings(self): item = self.currentItem() path = item.data(0, DirectoryRole).toString() mkPath = os.path.join(path, "mk.cfg") props = Properties(mkPath) d = uis.loadDialog("debug_settings") d.cwdEdit.setText(props.get("DEBUG_CWD")) d.paramsEdit.setText(props.get("DEBUG_PARAMS")) d.browseDirButton.clicked.connect(lambda: utils.browseDirectory(d.cwdEdit)) if d.exec_(): props.assign("DEBUG_CWD", d.cwdEdit.text()) props.assign("DEBUG_PARAMS", d.paramsEdit.text()) self.debug = (d.cwdEdit.text(), d.paramsEdit.text()) props.save(mkPath)
def __readProperties(self): properties = Properties() while (self.xml.readNextStartElement()): if (self.xml.name() == "property"): self.__readProperty(properties) else: self.__readUnknownElement() return properties
def scanWorkspace(self): self.wsLibs = {} for dir, subdirs, files in os.walk(self.srcDir): type = "" if isSourceDir(dir, files): mkPath = os.path.join(dir, 'mk.cfg') if os.path.exists(mkPath): props = Properties(mkPath) if props.has("TYPE"): type = props.get("TYPE") if type == "": if findMain(dir): type = "APP" else: type = "LIB" if type == "LIB": dirname = (dir.split('/'))[-1] self.wsLibs[dirname] = os.path.relpath(dir, self.srcDir)
def export_all_properties(out, app, props): data = { } for prop in props: for measurement in Properties.attributes(): key = (app, prop.name, measurement) assert not data.has_key(key) data[key] = getattr(prop, measurement) pickle.dump(data, out)
def on_save_properties(self, evt): p = Properties.getInstance() dirname, filename = os.path.split(p._filename) ext = os.path.splitext(p._filename)[-1] dlg = wx.FileDialog(self, message="Save properties as...", defaultDir=dirname, defaultFile=filename, wildcard=ext, style=wx.SAVE|wx.FD_OVERWRITE_PROMPT|wx.FD_CHANGE_DIR) if dlg.ShowModal() == wx.ID_OK: p.save_file(dlg.GetPath())
def scanWorkspace(self): self.wsLibs={} for dir,subdirs,files in os.walk(self.srcDir): type="" if isSourceDir(dir,files): mkPath=os.path.join(dir,'mk.cfg') if os.path.exists(mkPath): props=Properties(mkPath) if props.has("TYPE"): type=props.get("TYPE") if type=="": if findMain(dir): type="APP" else: type="LIB" if type=="LIB": dirname=(dir.split('/'))[-1] self.wsLibs[dirname]=os.path.relpath(dir,self.srcDir)
def OnBrowse(self, evt): dlg = wx.FileDialog(self, "Select a properties file", defaultDir=os.getcwd(), style=wx.OPEN|wx.FD_CHANGE_DIR) if dlg.ShowModal() == wx.ID_OK: p = Properties.getInstance() p.LoadFile(dlg.GetPath()) self.lblDBHost.SetLabel(p.db_host) self.lblDBName.SetLabel(p.db_name) self.btnTest.SetLabel('Test') self.btnTest.Enable()
def __init__(self, arg): tp = type(arg) self.mProperties = Properties() if tp==int: self.mTypeId = arg elif tp==Object: self.mTypeId = arg.mTypeId self.mProperties = arg.mProperties
def results(): query = request.form['text'] thepath = request.form['thepath'] if 'thepath' in request.form else None if query.startswith("___"): query = query[3:] else: if not xr.is_example_query(query): thepath = main(query, request.remote_addr) if xr.is_example_query(query): pp = Properties(query, xr.get_example_query_index(query)) else: pp = Properties(query, thepath=thepath) finalresult = rr.read_file(pp.RESULTS_C, pp.RESULTS_D) return render_template('results.html', results=finalresult, query=query, example_queries=xr.read_example_queries(), thepath=thepath)
def dimensionReduction(): # Initialize PCA/tSNE plot pca_main = dr.PlotMain(self.classifier, properties = Properties.getInstance(), loadData = False) pca_main.set_data(self.classifier.trainingSet.values, dict([(index, object) for index, object in enumerate(self.classifier.trainingSet.get_object_keys())]), np.int64(self.classifier.trainingSet.label_matrix > 0), self.classifier.trainingSet.labels, np.array([len(misclassifications[i])/float(nPermutations) for i in xrange(len(misclassifications))]).round(2)) pca_main.Show(True)
def extractDataFromGroupsMysql(self, dependentDataValues, independentDataValues): from properties import Properties from dbconnect import ( DBConnect, UniqueImageClause, UniqueObjectClause, GetWhereClauseForImages, GetWhereClauseForObjects, image_key_columns, object_key_columns, ) import sqltools as sql p = Properties.getInstance() p.LoadFile("C:\\Users\\Dalitso\\Desktop\\workspace2\\abhakar\\Properties_README.txt") db = DBConnect.getInstance() def buildquery(self, theGroup, var): pairs = theGroup.pairsDict if var == "dep": for i in pairs.keys(): print i q = "SELECT " + "`" + self.dependentVariable + "`" + "FROM " + self.table + " WHERE " q2 = [i + " LIKE `" + pairs[i] + "` AND " for i in pairs.keys()] result = q + "".join(q2)[:-4] print result if var == "ind": for i in pairs.keys(): print i q = "SELECT " + "`" + self.independentVariable + "`" + "FROM " + self.table + "WHERE" q2 = [i + "`" + " LIKE `" + pairs[i] + "` AND " for i in pairs.keys()] result = q + "".join(q2)[:-4] print result return result import numpy as np dataDict = {} dependentDataValues = np.array(dependentDataValues) independentDataValues = np.array(independentDataValues) tmp = {} for iGrp in self.groupDefinitions: theGroup = iGrp theGroup.checkMatchCount tmp["dependentData"] = db.execute(buildquery(self, theGroup, "dep")) tmp["independentData"] = db.execute(buildquery(self, theGroup, "ind")) # tmp['pairs'] = theGroup.pairsDict[theGroup.description] dataDict[theGroup.description] = tmp # print iGrp,theGroup.description,tmp return dataDict
def generate(self,dir,files): #props=mkProps(Properties(),root) for d,subs,subfiles in os.walk(dir): if d!=dir: for f in subfiles: files.append(os.path.join(os.path.relpath(d,dir),f)) stack=[] curdir=dir while True: stack.append(curdir) if curdir==self.root: break curdir=os.path.abspath(os.path.join(curdir,'..')) props=Properties() self.assignDefaults(props) while len(stack)>0: props=mkProps(props,stack[-1]) del stack[-1] output=os.path.join(dir,"Makefile") o=open(output,"w") opt=props.get("OPT_Release") if len(opt)==0: opt="-O2" if opt=="Custom": opt="" o.write('INC_STD=-I{}\n'.format(' -I'.join(stdIncludes))) o.write('OPT_Release={}\n'.format(opt)) o.write('OPT_Debug=-g\n') o.write('\ndefault: Release\n\n') self.generateConfig(dir,files,"Release",o,props) if self.generateConfig(dir,files,"Debug",o,props): pass o.write('\nclang_complete:\n') o.write('\tclang -cc1 -std=c++11 -x c++ $(INC_STD) $(INC_Release) -w -fsyntax-only ') o.write('-code-completion-macros -v -code-completion-at -:$(LINE):$(COL) -\n\n') o.close()
def __init__(self, parent = None): super().__init__(parent) self.mUpdating = False self.mObject = None self.mMapDocument = None self.mVariantManager = VariantPropertyManager(self) self.mGroupManager = QtGroupPropertyManager(self) self.mCustomPropertiesGroup = None self.mCombinedProperties = Properties() self.mDrawOrderNames = QStringList() self.mPropertyToId = QHash() self.mOrientationNames = QStringList() self.mStaggerAxisNames = QStringList() self.mFlippingFlagNames = QStringList() self.mLayerFormatNames = QStringList() self.mRenderOrderNames = QStringList() self.mStaggerIndexNames = QStringList() self.mIdToProperty = QHash() self.mNameToProperty = QHash() self.setFactoryForManager(self.mVariantManager, VariantEditorFactory(self)) self.setResizeMode(QtTreePropertyBrowser.ResizeToContents) self.setRootIsDecorated(False) self.setPropertiesWithoutValueMarked(True) self.mStaggerAxisNames.append(self.tr("X")) self.mStaggerAxisNames.append(self.tr("Y")) self.mStaggerIndexNames.append(self.tr("Odd")) self.mStaggerIndexNames.append(self.tr("Even")) self.mOrientationNames.append(QCoreApplication.translate("Tiled.Internal.NewMapDialog", "Orthogonal")) self.mOrientationNames.append(QCoreApplication.translate("Tiled.Internal.NewMapDialog", "Isometric")) self.mOrientationNames.append(QCoreApplication.translate("Tiled.Internal.NewMapDialog", "Isometric (Staggered)")) self.mOrientationNames.append(QCoreApplication.translate("Tiled.Internal.NewMapDialog", "Hexagonal (Staggered)")) self.mLayerFormatNames.append(QCoreApplication.translate("PreferencesDialog", "XML")) self.mLayerFormatNames.append(QCoreApplication.translate("PreferencesDialog", "Base64 (uncompressed)")) self.mLayerFormatNames.append(QCoreApplication.translate("PreferencesDialog", "Base64 (gzip compressed)")) self.mLayerFormatNames.append(QCoreApplication.translate("PreferencesDialog", "Base64 (zlib compressed)")) self.mLayerFormatNames.append(QCoreApplication.translate("PreferencesDialog", "CSV")) self.mRenderOrderNames.append(QCoreApplication.translate("PreferencesDialog", "Right Down")) self.mRenderOrderNames.append(QCoreApplication.translate("PreferencesDialog", "Right Up")) self.mRenderOrderNames.append(QCoreApplication.translate("PreferencesDialog", "Left Down")) self.mRenderOrderNames.append(QCoreApplication.translate("PreferencesDialog", "Left Up")) self.mFlippingFlagNames.append(self.tr("Horizontal")) self.mFlippingFlagNames.append(self.tr("Vertical")) self.mDrawOrderNames.append(self.tr("Top Down")) self.mDrawOrderNames.append(self.tr("Manual")) self.mVariantManager.valueChangedSignal.connect(self.valueChanged)
def clear_link_tables(self, evt=None): p = Properties.getInstance() dlg = wx.MessageDialog(self, 'This will delete the tables ' '"%s" and "%s" from your database. ' 'CPA will automatically recreate these tables as it ' 'discovers how your database is linked. Are you sure you ' 'want to proceed?' %(p.link_tables_table, p.link_columns_table), 'Clear table linking information?', wx.YES_NO|wx.NO_DEFAULT|wx.ICON_QUESTION) response = dlg.ShowModal() if response != wx.ID_YES: return db = dbconnect.DBConnect.getInstance() db.execute('DROP TABLE IF EXISTS %s'%(p.link_tables_table)) db.execute('DROP TABLE IF EXISTS %s'%(p.link_columns_table)) db.Commit()