def run(par): scenario, cfg_dict, sc_dict = par cfg = Configuration(log_to_file=True, **cfg_dict) sc = import_object('scenarios', scenario)(cfg.rnd, cfg.seed, **sc_dict) cfg.scenario = sc cli.run(cfg)
def read_trj(trj_file): """ return: Simulation parameters: trj_file: string | name of trj file """ simulation = Simulation() with open(trj_file, 'r') as trj: while True: line = trj.readline() if not line: break lattice = Lattice() lattice.set_a(np.array(line.split(), dtype=float)) lattice.set_b(np.array(trj.readline().split(), dtype=float)) lattice.set_c(np.array(trj.readline().split(), dtype=float)) configuration = Configuration(lattice=lattice) atom_types = trj.readline().split() atom_counts = np.array(trj.readline().split(), dtype=int) natom = np.sum(atom_counts) for i in xrange(natom): atom_record = trj.readline().split() atom_name = atom_record[0] atom_position = np.array(atom_record[1:], dtype=float) configuration.insert_atom(Atom(atom_name, atom_position)) simulation.insert_configuration(configuration) return simulation
def _generateArtifactList(options): # load configuration logging.info("Loading configuration...") config = Configuration() config.load(options) # build list logging.info("Building artifact list...") listBuilder = ArtifactListBuilder(config) artifactList = listBuilder.buildList() logging.debug("Generated list contents:") for gat in artifactList: priorityList = artifactList[gat] for priority in priorityList: versionList = priorityList[priority] for version in versionList: logging.debug(" %s:%s", gat, version) #filter list logging.info("Filtering artifact list...") listFilter = Filter(config) artifactList = listFilter.filter(artifactList) logging.debug("Filtered list contents:") for gat in artifactList: priorityList = artifactList[gat] for priority in priorityList: versionList = priorityList[priority] for version in versionList: logging.debug(" %s:%s", gat, version) logging.info("Artifact list generation done") return artifactList
def url_handler(ref): uri = None prefix = None conf = Configuration() datasetBase = conf.get_value("datasetBase") webBase = conf.get_value("webBase") webResourcePrefix = conf.get_value("webResourcePrefix") if (len(webResourcePrefix) == 0): splitted = ref.split("/") prefix = splitted[0] if (prefix in get_supported_prefixes()): uri = "%s%s" % (datasetBase, ref[len(prefix)+1:]) return uri, prefix else: prefix = None uri = datasetBase + ref return uri, prefix else: if (ref.startswith(webResourcePrefix)): prefix = None uri = datasetBase + ref return uri, prefix else: splitted = ref.split("/") prefix = splitted[0] if (prefix in get_supported_prefixes()): uri = datasetBase + ref.replace(prefix+"/", conf.get_value("webResourcePrefix")) return uri, prefix else: raise ValueError("Unsupportet type '%s'" % splitted[0])
class Kobol(object): def __init__(self, directory = None): self.home = os.path.normpath(directory or os.getcwd()) + os.sep self.config = Configuration() self.config['home'] = self.home self.site = Site() self.site.config(self.config) def scaffold(self, **kwargs): if os.path.isfile(self.home + '/.kobol'): return False elif kwargs.get('dry') != True: skel = os.path.dirname(os.path.abspath(__file__)) + '/skel/' os.system("cp -R %s* %s.kobol %s" % (skel, skel, self.home)) return True def load_config_files(self, files): self.config.load(files) self.site.config(self.config) def main(self): self.scaffold()
def closeEvent(self, event): if not self.trayIcon.isVisible() and Configuration.icon: self.trayIcon.show() self.hide() event.ignore() else: termine = True # On vérifie que tous les téléchargements soient finis for download in self.downloads.instance.downloads: if download.state == 3: termine = False # Si il y a un download en cours on affiche la fenêtre if not termine and not Configuration.close_window: # Un petit messageBox avec bouton clickable :) msgBox = QMessageBox(QMessageBox.Question, u"Voulez-vous vraiment quitter?", u"Un ou plusieurs téléchargements sont en cours, et pyRex ne gère pas encore la reprise des téléchargements. Si vous quittez maintenant, toute progression sera perdue!") checkBox = QCheckBox(u"Ne plus afficher ce message", msgBox) checkBox.blockSignals(True) msgBox.addButton(checkBox, QMessageBox.ActionRole) msgBox.addButton("Annuler", QMessageBox.NoRole) yesButton = msgBox.addButton("Valider", QMessageBox.YesRole) msgBox.exec_() if msgBox.clickedButton() == yesButton: # On save l'état du bouton à cliquer if checkBox.checkState() == Qt.Checked: Configuration.close_window = True Configuration.write_config() event.accept() else: event.ignore() else: event.accept()
def TCPClient(): MYIP = Configuration.getPublicIP() ID = Configuration.getMyID() print threading.currentThread().getName(), 'TCP Client Starting. I am Node#', ID while True: user_input = raw_input("format: send <Dest Node ID> <Message>") cmd = user_input.split(" ") for ip in Configuration.IPTABLE: if ip == MYIP: continue #Ignore itself TCP_IP = ip TCP_PORT = Configuration.TCPPORT BUFFER_SIZE = 1024 MESSAGE = "Hello, World! from Node#%d" % ID s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) flag = True while flag: try: s.connect((TCP_IP, TCP_PORT)) s.send(MESSAGE) printdata("TCP Send", ID, ID, Configuration.getID(TCP_IP), MESSAGE) data = s.recv(BUFFER_SIZE) s.close() flag = False except: printdata("TCP Client Reconnect", ID, ID, Configuration.getID(TCP_IP), "@_@") time.sleep(1) #Reconnect delay time.sleep(5) print threading.currentThread().getName(), 'TCP Client Exiting. I am Node #', ID return
def TCPSend(dest, content): TCP_IP = Configuration.getIP(dest) MYIP = Configuration.getPublicIP() if TCP_IP == MYIP: print "TCPSend() terminates. (Error: sending to itself)" #Ignore itself return TCP_PORT = Configuration.TCPPORT ID = Configuration.getMyID() print threading.currentThread().getName(), 'TCP Client Starting. I am Node#', ID BUFFER_SIZE = 1024 MESSAGE = "Hello, World! from Node#%d" % ID s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) flag = True while flag: try: s.connect((TCP_IP, TCP_PORT)) s.send(content) printdata("TCP Send", ID, ID, Configuration.getID(TCP_IP), content) data = s.recv(BUFFER_SIZE) s.close() flag = False except: printdata("TCP Client Reconnect", ID, ID, Configuration.getID(TCP_IP), "@_@") time.sleep(1) #Reconnect delay time.sleep(5) print threading.currentThread().getName(), 'TCP Client Exiting Successfully. I am Node #', ID return
def test_filter_excludedTypes(self): config = Configuration() alf = Filter(config) config.excludedTypes = ["zip", "war"] al = copy.deepcopy(self.artifactList) al["foo:bar"] = { "1": { "1.0.0": ArtifactSpec("http://repo1.maven.org/maven2/", [ArtifactType("zip", True, set([''])), ArtifactType("pom", False, set(['']))]) } } alf._filterExcludedTypes(al) self.assertFalse('foo:bar' in al) al["foo:bar"] = { "1": { "1.0.0": ArtifactSpec("http://repo1.maven.org/maven2/", [ArtifactType("zip", True, set([''])), ArtifactType("pom", False, set(['']))]) } } config.gatcvWhitelist = ["*:zip:scm-sources:*"] alf._filterExcludedTypes(al) self.assertFalse('foo:bar' in al) al["foo:bar"] = { "1": { "1.0.0": ArtifactSpec("http://repo1.maven.org/maven2/", [ArtifactType("zip", True, set(['scm-sources'])), ArtifactType("pom", False, set(['']))]) } } alf._filterExcludedTypes(al) self.assertTrue('foo:bar' in al)
def test_filter_excluded_GAVs(self): config = Configuration() alf = Filter(config) config.excludedGAVs = ["com.google.guava:guava:1.1.0"] al = copy.deepcopy(self.artifactList) self.assertTrue('1.1.0' in al['com.google.guava:guava:pom']['1']) alf._filterExcludedGAVs(al) self.assertFalse('1.1.0' in al['com.google.guava:guava:pom']['1']) config.excludedGAVs = ["com.google.guava:guava:1.0*"] al = copy.deepcopy(self.artifactList) self.assertTrue('1.0.0' in al['com.google.guava:guava:pom']['1']) self.assertTrue('1.0.1' in al['com.google.guava:guava:pom']['1']) self.assertTrue('1.0.2' in al['com.google.guava:guava:pom']['2']) self.assertTrue('1.0.0' in al['com.google.guava:guava:pom']['3']) alf._filterExcludedGAVs(al) self.assertFalse('1.0.0' in al['com.google.guava:guava:pom']['1']) self.assertFalse('1.0.1' in al['com.google.guava:guava:pom']['1']) self.assertFalse('2' in al['com.google.guava:guava:pom']) self.assertFalse('1.0.0' in al['com.google.guava:guava:pom']['3']) config.excludedGAVs = ["com.google.guava:*"] al = copy.deepcopy(self.artifactList) self.assertTrue('com.google.guava:guava:pom' in al) alf._filterExcludedGAVs(al) self.assertFalse('com.google.guava:guava:pom' in al)
def show_form(self): ''' Displays this form, blocking until the user closes it. When it is closed, this method will return a Configuration object containing the settings that this dialog was displaying when it was closed (these settings were also just saved on the filesystem, so they are also the settings that this dialog will display the next time it is opened.) If the user clicks 'Cancel' then this method will simply return null. ''' log.debug("opened the settings dialog.") defaults = Configuration() defaults.load_defaults() self.__set_configuration(defaults) self.__switch_to_best_tab() dialogAnswer = self.ShowDialog() # blocks if dialogAnswer == DialogResult.OK: config = self.__get_configuration() config.save_defaults() log.debug("closed the settings dialog.") else: config = None log.debug("cancelled the settings dialog.") return config
def acceptor_udp_server(): ID = Configuration.getMyID() #UDP_PORT = Configuration.ACCEPTOR_PORT UDP_PORT = Configuration.PORTS["prepare"] print threading.currentThread().getName(), ' Acceptor UDP Server Starting. I am Node#', ID, "at port", UDP_PORT sock = socket.socket(socket.AF_INET, # Internet socket.SOCK_DGRAM) # UDP sock.bind(('', UDP_PORT)) while True: data, addr = sock.recvfrom(1024) # buffer size is 1024 bytes peerID = Configuration.getID( addr[0] ) #try json_object = transferInput(data) if json_object['msgname'] == "commit": onCommit(json_object['entryID'], json_object['msglist'], peerID) printdata("Acceptor Recv Commit", ID, peerID, ID, data) elif json_object['msgname'] == "accept": onAccept(json_object['entryID'], json_object['msglist'], peerID) printdata("Acceptor Recv Accept", ID, peerID, ID, data) elif json_object['msgname'] == "prepare": onPrepare(json_object['entryID'], json_object['msglist'], peerID) printdata("Acceptor Recv Prepare", ID, peerID, ID, data) #except: # print "Can't parse data:", data, sys.exc_info()[0] print threading.currentThread().getName(), ' Acceptor UDP Server Exiting. I am Node#', ID return
def clear(): """ Clear the screen, only if the configuration says it's OK. """ if Configuration.getClear() and Configuration.getInteractive(): sys.stdout.write(Configuration.clearcode)
def runMain(): # First, we import our devices from our configuration file. These will be split into two different groups, those # controlled by Philips Hue and those controlled by Insteon. configuration = Configuration() config = configuration.loadConfig() hueDevices = {} insteonDevices = {} for device in config['devices']['hue']: hueDevices[device] = config['devices']['hue'][device] for device in config['devices']['insteon']: insteonDevices[device] = config['devices']['insteon'][device] insteon = Insteon() hue = Hue() roomba = Roomba() # Now we set up the voice recognition using Pocketsphinx from CMU Sphinx. pocketSphinxListener = PocketSphinxListener() # We want to run forever, or until the user presses control-c, whichever comes first. while True: try: command = pocketSphinxListener.getCommand().lower() command = command.replace('the', '') if command.startswith('turn'): onOrOff = command.split()[1] deviceName = ''.join(command.split()[2:]) if deviceName in hueDevices: deviceId = hueDevices[deviceName]['deviceID'] hue.turn(deviceId=deviceId, onOrOff=onOrOff) if deviceName in insteonDevices: deviceId = insteonDevices[deviceName]['deviceID'] insteon.turn(deviceId=deviceId, onOrOff=onOrOff) if deviceName == 'roomba': roomba.turn(onOrOff) elif command.startswith('roomba'): action = ' '.join(command.split()[1:]) if action == 'clean': roomba.clean() if action == 'go home': roomba.goHome() # This will allow us to be good cooperators and sleep for a second. # This will give the other greenlets which we have created for talking # to the Hue and Insteon hubs a chance to run. gevent.sleep(1) except (KeyboardInterrupt, SystemExit): print 'People sometimes make mistakes, Goodbye.' sys.exit() except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout) sys.exit()
def do_create(args): """Create a droplet based on configuration""" config = Configuration() if not config.read_config(args.config_file): return False create(config) return True
def quote(uri): uri = uri2str(uri) #return urllib.quote(uri) conf = Configuration() fixUnescapedCharacters = conf.get_value("fixUnescapedCharacters") for c in fixUnescapedCharacters: uri = uri.replace(c, urllib.quote(c)) return uri
def do_destroy(args): """Destroy a droplet based on configuration""" config = Configuration() if not config.read_config(args.config_file): return False destroy(config) return True
def setUp(self): test_config = TestConfig() self.data_path = test_config.get_data_path() config = Configuration(os.path.join(self.data_path,'build.ini')) db_name = config.get('mongodb1','db_name') host = config.get('mongodb1','host') config = MongoDBConfig(db_name, host) self.db = MongoDB(config)
def wipeConfiguration(self): conf = Configuration() conf.wipe() msgbox = QtGui.QMessageBox(self) msgbox.setText(self.tr("The application will close. Please restart manually") ) msgbox.setModal( True ) ret = msgbox.exec_() self.mustQuit()
def __init__(self, parent=None): super(MainWindow, self).__init__(parent) self.ui = uic.loadUi("mainwindow.ui",self) self.setWindowTitle("python-whiteboard") self.setWindowFlags( qt.Qt.CustomizeWindowHint | qt.Qt.WindowMinimizeButtonHint | qt.Qt.WindowCloseButtonHint ) self.connected = False self.calibrated = False self.active = False self.mustquit = False self.wii = None self.cursor = None self.batteryLevel.reset() self.batteryLevel.setRange(0,99) self.batteryLevel.setValue(0) conf = Configuration() self.ui.pushButtonConnect.clicked.connect(self.connectWii) self.ui.pushButtonCalibrate.clicked.connect(self.calibrateWiiScreen) self.ui.pushButtonActivate.clicked.connect(self.activateWii) self.ui.pushButtonLoadCal.clicked.connect(self.calibrateWiiFromSettings) self.ui.pushButtonSettings.clicked.connect(self.showHideSettings) self.ui.comboProfiles.currentIndexChanged.connect(self.changeProfile) self.updateButtons() self.ui.actionQuit.triggered.connect(self.mustQuit) self.ui.actionHelp.triggered.connect(self.showAboutDlg) self.ui.actionNew_Profile.triggered.connect(self.addProfile) self.ui.actionDelete_Current_Profile.triggered.connect(self.delCurrentProfile) self.ui.actionWipe_configuration.triggered.connect(self.wipeConfiguration) self.ui.moveOnlyCheck.setChecked( conf.getValueStr('moveonly') == 'Yes' ) self.ui.moveOnlyCheck.stateChanged.connect(self.checkMoveOnly) if conf.getValueStr("autoconnect") == "Yes": self.timer = qt.QTimer(self) self.timer.setInterval(500) self.timer.timeout.connect(self.autoConnect) self.timer.start() self.timer2 = qt.QTimer(self) self.timer2.setInterval(4000) self.timer2.timeout.connect(self.checkWii) self.timer2.start() self.confDialog = ConfigDialog(self, self.wii) layout = QtWidgets.QGridLayout() layout.addWidget(self.confDialog) self.ui.confContainer.setLayout(layout) self.ui.confContainer.setVisible(False) self.refreshProfiles() self.center()
def SetGSize(self, widget, x=None, y=None): self.VBox.remove(self.grid.Widget()) c = Configuration() c.GridXX = x c.GridYY = y self.grid = PipesGrid.PipesGrid(c.GridXX, c.GridYY) self.grid.Widget().show_all() self.VBox.add (self.grid.Widget()) self.window.resize(1,1)
def main(): """ As per Main method, defines sequence of program. """ try: format = ['%(asctime)s %(message)s', '%Y-%m-%d %H:%M:%S'] resultfiles = [] pas = 0 total = 0 create_logger(format) opts, args = getopt.getopt(sys.argv[1:], "hc:", ["help", "config="]) source = str() if not opts: usage() for opt, arg in opts: if opt in ("-h", "--help"): usage() elif opt in ("-c", "--config"): source = arg # Configurations init Configuration.load(source, format) logging.info("="*100) logging.info("Regression started") logging.info("="*100) start_time = time.clock() # Execute Scripts resultfiles = execute() logging.info("="*100) logging.info("Results") logging.info("="*100) # Dispatch results pas, total = dispatch(resultfiles) total_time = time.clock() - start_time logging.info("="*100) logging.info("%d passed out of %d" %(pas, total)) logging.info("Total elapsed time(secs) %.2f" %(total_time)) logging.info("="*100) except (getopt.GetoptError, Exception) as e: logging.info("Exception %s" %str(e)) except: logging.info("Fatal error occured.")
def __init__(self, config: Configuration): super().__init__('RedisCassandra') redis_host = config.get('redis.host') redis_port = config.get('redis.port') redis_pass = config.get('redis.password') #TODO: change to Sentinel so we can handle multiple hosts self.redis_conn = StrictRedis(host=redis_host, port=redis_port, password=redis_pass) self.cache = get_cache()
def wait(): """ Prompt and wait for user to press enter, only if the configuration says it's OK. """ if Configuration.getClear() and Configuration.getInteractive(): print print "press enter", raw_input()
def saveCalibrationPars(self,wii): conf = Configuration() for i,p in enumerate(wii.screenPoints): conf.saveValue("screenPoint"+str(i)+"x",str(p[0])) conf.saveValue("screenPoint"+str(i)+"y",str(p[1])) for i,p in enumerate(wii.calibrationPoints): conf.saveValue("wiiPoint"+str(i)+"x",str(p[0])) conf.saveValue("wiiPoint"+str(i)+"y",str(p[1]))
def refreshProfiles(self): conf = Configuration() self.ui.comboProfiles.clear() self.ui.comboProfiles.addItem(self.tr("default")) for p in conf.getProfileList(): self.ui.comboProfiles.addItem(p) self.confDialog.refreshWidgets() self.ui.moveOnlyCheck.setChecked( conf.getValueStr('moveonly') == 'Yes' )
def checkMoveOnly(self,i): conf = Configuration() if self.sender().isChecked(): conf.saveValue('moveonly','Yes') if self.cursor: self.cursor.noClicks = True else: conf.saveValue('moveonly','No') if self.cursor: self.cursor.noClicks = False
def delCurrentProfile(self): i = self.ui.comboProfiles.currentIndex() currentProfile = unicode(self.ui.comboProfiles.currentText()) if i == 0: return conf = Configuration() profiles = conf.getProfileList() profiles = [ p for p in profiles if p != currentProfile ] conf.setProfileList(profiles) self.refreshProfiles() self.ui.comboProfiles.setCurrentIndex(0)
def get_document_url(uri, prefix, conf=None): if (conf == None): conf = Configuration() webResourcePrefix = conf.get_value("webResourcePrefix") datasetBase = conf.get_value("datasetBase") webBase = conf.get_value("webBase") if (len(webResourcePrefix) == 0): return "%s%s/%s" % (webBase, prefix, uri[len(datasetBase):]) else: return uri.replace(datasetBase, webBase).replace(webResourcePrefix, "%s/" % prefix)
def cr(ctx): try: if (not Configuration.exists()): Utils.print_encoded("Please inform your TFS' information\n") ctx.invoke(configure, url=click.prompt("Url"), username=click.prompt("Username"), password=click.prompt("Password")) ctx.exit() repo = git.Repo('.') ctx.obj = Repository(repo, RepositoryUtils(repo), Tfs(Configuration.load())) except git.exc.InvalidGitRepositoryError: Error.abort("You're not on a valid git repository")
def __init__(self): configuration = Configuration() self.token_bot = configuration.config['TELEGRAM']['BOT_TOKEN'] self.chat_id = configuration.config['TELEGRAM']['CHAT_ID'] self.bot = telegram.Bot(self.token_bot)
return global_step if __name__ == "__main__": file_name = ["data/test_batch.bin"] exp_name = sys.argv[1] a_function = tf.nn.relu if sys.argv[2] == "relu" else tf.sigmoid pipeline = ip.DataPipeline(file_name) test_x, test_y = pipeline.get_batch_op() config = Configuration(input_size=3072, output_size=10, examples_per_epoches=50000, batch_size=100, a_function=a_function) dropout = tf.placeholder(tf.float32) convo_model = m.ConvoModel(config) convo_model.initialize(test_x, dropout) predictions = convo_model.infer() top_k = tf.nn.in_top_k(predictions, test_y, 1) summary_op = tf.merge_all_summaries() summary_writer = tf.train.SummaryWriter("experiments/%s" % exp_name)
return [input_ids, attention_mask], np.array(y_batch, dtype=np.int32) def on_epoch_end(self): """Updates indexes after each epoch""" if self.shuffle: np.random.shuffle(self.indices) if __name__ == '__main__': import argparse, sys parser = argparse.ArgumentParser() # 首先是mandatory parameters parser.add_argument('--only_create_new_generator', action='store_true', help='create_new_generator') parser.add_argument('--not_save_new_generator', action='store_true', help='not_save_new_generator') parser.add_argument('--t', action='store_true', help='use torch as backend') args = parser.parse_args() not_save_new_generator = args.not_save_new_generator only_create_new_generator = args.only_create_new_generator use_torch = args.t Configuration.configure() LMTC().train(only_create_new_generator, not_save_new_generator, use_torch)
def validate(self): """validate and finalize configuration""" self.config['remote'] = self.remote if self.remote: # setup remote deviceip = self.config.get('deviceip') deviceport = self.config['deviceport'] if deviceip or deviceport == -1: self._setupRemote(deviceip, deviceport) # fix webserver for --develop mode if self.config.get('develop'): webserver = self.config.get('webserver') if (not webserver) or (webserver == 'localhost'): self.config['webserver'] = utils.getLanIp() # webServer can be used without remoteDevice, but is required when using remoteDevice if self.config.get('deviceip') or self.config.get('deviceroot'): if self.config.get( 'webserver', 'localhost' ) == 'localhost' or not self.config.get('deviceip'): raise ConfigurationError( "When running Talos on a remote device, you need to provide a webServer and optionally a remotePort" ) fennecIDs = self.config.get('fennecIDs') if fennecIDs and not os.path.exists(fennecIDs): raise ConfigurationError( "Unable to find fennce IDs file, please ensure this file exists: %s" % fennecIDs) # robocop based tests (which use fennecIDs) do not use or need the pageloader extension if fennecIDs: self.config['extensions'] = [] # generic configuration validation Configuration.validate(self) # ensure the browser_path is specified msg = "Please specify --executablePath" if not 'print_tests' in self.parsed and not self.config.get( 'browser_path'): self.error(msg) # add test_name_extension to config # http://hg.mozilla.org/build/talos/file/c702ff8892be/talos/PerfConfigurator.py#l107 noChrome = self.config.get('noChrome') mozAfterPaint = self.config.get('tpmozafterpaint') if noChrome or mozAfterPaint and not self.config.get( 'test_name_extension'): # (it would be nice to handle this more elegantly) test_name_extension = '' if noChrome: test_name_extension += '_nochrome' if mozAfterPaint: test_name_extension += '_paint' self.config['test_name_extension'] = test_name_extension # BBB: (resultsServer, resultsLink) -> results_url resultsServer = self.config.pop('resultsServer', None) resultsLink = self.config.pop('resultsLink', None) if resultsServer and resultsLink: if self.config.get('results_urls'): raise Configuration( "Can't user resultsServer/resultsLink and results_url: use results_url instead" ) self.config['results_urls'] = [ 'http://%s%s' % (resultsServer, resultsLink) ] # BBB: remove doubly-quoted xperf values from command line # (needed for buildbot) # https://bugzilla.mozilla.org/show_bug.cgi?id=704654#c43 xperf_path = self.config.get('xperf_path', '') if 'xperf_path' in self.parsed and len(xperf_path) > 2: quotes = ('"', "'") for quote in quotes: if xperf_path.startswith(quote) and xperf_path.endswith(quote): self.config['xperf_path'] = xperf_path[1:-1] break # fix options for --develop if self.config.get('develop'): if not self.config.get('webserver'): self.config['webserver'] = 'localhost:%s' % utils.findOpenPort( '127.0.0.1') # set preferences if self.config.get('tpmozafterpaint'): self.config['preferences'][ 'dom.send_after_paint_to_content'] = True extraPrefs = self.config.pop('extraPrefs', {}) extraPrefs = dict([(i, utils.parsePref(j)) for i, j in extraPrefs.items()]) self.config['preferences'].update(extraPrefs) # remove None values from preferences; # allows overrides self.config['preferences'] = dict([ (key, value) for key, value in self.config['preferences'].items() if value is not None ]) # fix talos.logfile preference to be an absolute path # http://hg.mozilla.org/build/talos/file/e1022c38a8ed/talos/PerfConfigurator.py#l129 if 'talos.logfile' in self.config['preferences']: # if set to empty string, use the "global" browser_log # in practice these should always be in sync log_file = self.config['preferences']['talos.logfile'].strip( ) or self.config['browser_log'] log_file = os.path.abspath(log_file) self.config['preferences']['talos.logfile'] = log_file if 'init_url' in self.config: # fix init_url self.config['init_url'] = self.convertUrlToRemote( self.config['init_url']) # get filters ignore_first = self.config.pop('ignore_first') if ignore_first: # BBB handle legacy ignore_first case # convert --ignoreFirst to the appropriate set of filters if self.config.get('filters'): raise ConfigurationError( "Can't use --ignoreFirst and --filter; use --filter instead" ) self.config['filters'] = ['ignore_first', 'median'] # convert options.filters to [[filter, [args]]] filters = [] _filters = self.config.get('filters', self.filters[:]) for position, filter_name in enumerate(_filters): if isinstance(filter_name, basestring): try: f = filter.parse(filter_name) # Check if last filter is scalar filter # and if all the rest are series filters if position == len(_filters) - 1: assert f[0] in filter.scalar_filters,\ "Last filter has to be a scalar filter." else: assert f[0] in filter.series_filters,\ "Any filter except the last has to be a series filter." except Exception, e: raise ConfigurationError("Bad value for filter '%s': %s" % (filter_name, e)) else: f = filter_name filters.append(f)
if __name__ == "__main__": # event loop loop = asyncio.get_event_loop() # signal signal.signal(signal.SIGINT, clash_handler) # working directory working_path = os.path.abspath(sys.argv[1]) print("working directory: {0}".format(working_path)) # make data directory _data_directory = "{0}/data".format(working_path) if os.path.exists(_data_directory): shutil.rmtree(_data_directory) print("remove old data folder") os.mkdir(_data_directory) print("make data folder") # load configuration config = Configuration("", []) _config_path = "{0}/config.yml".format(working_path) _config_loader = ConfigLoader(path=_config_path, config=config) print("Listener:\n{0}\nSpeaker:\n{1}".format(config.listener_token, config.speaker_token_list)) # lunch bot bot_manager = BotManager(config=config, loop=loop, working_path=_data_directory) # run loop loop.run_forever()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Oct 29 14:55:37 2019 @author: simona """ from mpi4py import MPI comm_world = MPI.COMM_WORLD rank_world = comm_world.Get_rank() from configuration import Configuration C = Configuration() solver_init = C.full_solver_init solver_parameters = C.full_solver_parameters no_solvers = C.no_full_solvers no_samplers = C.no_samplers # solver_init ... initializes the object of the (full, surrogate) solver # solver_parameters ... list of dictionaries with initialization parameters # no_solvers ... number of solvers to be created # no_samplers ... number of samplers that request solutions import numpy as np from collections import deque Solvers = [] for i in range(no_solvers): Solvers.append(solver_init(**solver_parameters[i])) samplers_rank = np.arange(no_samplers) is_active_sampler = np.array([True] * len(samplers_rank))
# Images can either be extracted from a video file or a batch of single photographs. Select # the example for the test run. type = 'video' if type == 'image': names = glob.glob('Images/2012*.tif') # names = glob.glob('Images/Moon_Tile-031*ap85_8b.tif') # names = glob.glob('Images/Example-3*.jpg') else: # file = 'short_video' file = 'Moon_Tile-024_043939' names = 'Videos/' + file + '.avi' print(names) start_over_all = time() # Get configuration parameters. configuration = Configuration() try: frames = Frames(names, type=type) print("Number of images read: " + str(frames.number)) print("Image shape: " + str(frames.shape)) except Exception as e: print("Error: " + e.message) exit() # Rank the frames by their overall local contrast. rank_frames = RankFrames(frames, configuration) start = time() rank_frames.frame_score() end = time() print('Elapsed time in ranking images: {}'.format(end - start)) print("Index of maximum: " + str(rank_frames.frame_ranks_max_index))
def __init__(self, parent=None): """ Initialize the Planetary System LRGB Aligner environment. :param parent: None """ # The (generated) QtGui class is contained in module main_gui.py. QtWidgets.QWidget.__init__(self, parent) self.ui = Ui_MainWindow() self.ui.setupUi(self) # Insert the photo viewer into the main GUI. self.ImageWindow = PhotoViewer(self) self.ImageWindow.setObjectName("ImageWindow") self.ui.verticalLayout_3.insertWidget(1, self.ImageWindow, stretch=1) # Connect main GUI events with method invocations. self.ui.buttonLoadBW.clicked.connect(self.load_bw_image) self.ui.buttonLoadColor.clicked.connect(self.load_color_image) self.ui.buttonRegistration.clicked.connect(self.compute_registration) self.ui.buttonComputeLRGB.clicked.connect(self.compute_lrgb) self.ui.buttonSetConfigParams.clicked.connect(self.edit_configuration) self.ui.buttonSaveRegisteredColorImage.clicked.connect( self.save_registered_image) self.ui.buttonSaveLRGB.clicked.connect(self.save_lrgb_image) self.ui.buttonExit.clicked.connect(self.closeEvent) self.ui.radioShowBW.clicked.connect( lambda: self.show_pixmap(pixmap_index=0)) self.ui.radioShowColorOrig.clicked.connect( lambda: self.show_pixmap(pixmap_index=1)) self.ui.radioShowColorRigidTransform.clicked.connect( lambda: self.show_pixmap(pixmap_index=2)) self.ui.radioShowMatches.clicked.connect( lambda: self.show_pixmap(pixmap_index=3)) self.ui.radioShowColorOptFlow.clicked.connect( lambda: self.show_pixmap(pixmap_index=4)) self.ui.radioShowLRGB.clicked.connect( lambda: self.show_pixmap(pixmap_index=5)) # Initialize the path to the home directory. self.current_dir = str(Path.home()) # Initialize instance variables. self.image_reference = None self.image_reference_8bit_gray = None self.image_target = None self.image_target_8bit_gray = None self.image_dewarped = None self.image_lrgb = None self.pixmaps = [None, None, None, None, None, None] self.current_pixmap_index = None # Initialize status variables self.status_list = [ False, False, False, False, False, False, False, False ] self.status_pointer = { "initialized": 0, "bw_loaded": 1, "color_loaded": 2, "rigid_transformed": 3, "optical_flow_computed": 4, "lrgb_computed": 5, "results_saved": 6 } self.radio_buttons = [ self.ui.radioShowBW, # 0 self.ui.radioShowColorOrig, # 1 self.ui.radioShowColorRigidTransform, # 2 self.ui.radioShowMatches, # 3 self.ui.radioShowColorOptFlow, # 4 self.ui.radioShowLRGB ] # 5 self.control_buttons = [ self.ui.buttonSetConfigParams, # 0 self.ui.buttonLoadBW, # 1 self.ui.buttonLoadColor, # 2 self.ui.buttonRegistration, # 3 self.ui.buttonSaveRegisteredColorImage, # 4 self.ui.buttonComputeLRGB, # 5 self.ui.buttonSaveLRGB, # 6 self.ui.buttonExit ] # 7 self.max_button = [0, 1, 2, 4, 5, 6, 6] self.max_control_button = [2, 3, 4, 4, 6, 7, 8] self.status_busy = False # Create configuration object and set configuration parameters to standard values. self.configuration = Configuration() # Write the program version into the window title. self.setWindowTitle(self.configuration.version) # Start the workflow thread. It controls the computations and control of external devices. # By decoupling those activities from the main thread, the GUI is kept from freezing during # long-running activities. self.workflow = Workflow(self) sleep(self.configuration.wait_for_workflow_initialization) # The workflow thread sends signals during computations. Connect those signals with the # appropriate GUI activity. self.workflow.set_status_busy_signal.connect(self.set_busy) self.workflow.set_status_signal.connect(self.set_status) self.workflow.set_error_signal.connect(self.show_error_message) # Reset downstream status flags. self.set_status(0)
from timeit import default_timer as timer from configuration import Configuration gamma = 0.001 beta = 0.100 rules = {'1': [('0', gamma)], '01': [('11', beta)], '10': [('11', beta)]} base = 2 N = 8 Nmax = 20 Nmin = 2 size = [0] * Nmax time = [0] * Nmax t1 = timer() Configuration.init_globals(base, N) adjacency = [[1] * N for i in range(N)] for k in range(N): adjacency[k][k] = 0 T = Transition(Configuration, rules) T.compute(adjacency) eig_T = T.get_eigenvals() print('Elapsed time for regular configuration: %s' % (timer() - t1)) print(sys.getsizeof(T.elements)) print(sys.getsizeof(T)) from symmetric_configuration import * t1 = timer() SymConf.init_globals(base, N) TS = Transition(SymConf, rules)
import json import helper_test from configuration import Configuration from helper import Helper debug = True configuration = Configuration(name='actioncam', path=helper_test.config_path(), debug=debug) config = configuration.config helper = Helper(config) def test_helper_infos(): print('test_infos') infos = helper.infos_self() for info in infos: print(info) assert len(infos) > 1, 'test_infos failed finding infos' def test_helper_loghome(): print("test_helper_loghome") log_good = helper.log_home("actioncam") print("actioncam log home is " + log_good) log_bad = helper.log_home("DOESNOTEXIST") print("all other log home is also " + log_bad) assert log_good == log_bad, "Failed getting valid Loghome for unknown section"
# ----------------------------------------------------------------------- # amber_test_generation.py # Author: Hari Raval # ----------------------------------------------------------------------- import sys import re from configuration import Configuration # default Configuration object to be used in the Amber test generation default_config = Configuration(timeout=20000, workgroups=65532, threads_per_workgroup=1, saturation_level=0, subgroup=0) # write the necessary "boiler plate" code to generate an Amber test, along with Shader # Storage Buffer Object(s), workgroup size, and global variable to # assign thread IDs. output is the file being written to, timeout determines (in ms) when the # program will terminate, num_testing_threads is the number of threads being tested, and saturation_level is the # type of saturation (if any) def write_amber_prologue(output, timeout, threads_per_workgroup, workgroups, num_testing_threads, saturation_level, subgroup_setting): output.write("#!amber\n") output.write("\n") output.write("SET ENGINE_DATA fence_timeout_ms " + str(timeout) + "\n") output.write("\n") output.write("SHADER compute test GLSL\n") # determine whether the same or different subgroups will be used for testing to update versions and extensions
def bill(self, session, subscriber, destination_number, context, duration): if context == 'LOCAL': bill_log.info('===========================================================================') bill_log.info('LOCAL Context') bleg_connected = session.getVariable('bleg_uuid') hangup_cause = session.getVariable('hangup_cause') subscriber = session.getVariable('bleg_destination_number') #print session.getVariable('bleg_billsec') configuration = Configuration() if context == 'OUTBOUND': bill_log.info('===========================================================================') bill_log.info('OUTBOUND Context Bill subscriber %s destination %s' % (subscriber, destination_number)) # get rate rate = self.get_rate(destination_number) call_cost = self.get_call_cost(duration, rate[3]) # set destination_name and cost for the CDR session.setVariable('destination_name', rate[1]) session.setVariable('cost', str(call_cost)) bill_log.info('Call duration: %d sec Call cost: %.2f' % (duration, call_cost)) sub = Subscriber() try: previous_balance = sub.get_balance(subscriber) current_balance = previous_balance - call_cost real_balance = 0 if current_balance < 0 else current_balance bill_log.info('Previous balance: %.2f Current Balance: %.2f' % (previous_balance, real_balance)) sub.set_balance(subscriber, real_balance) bill_log.info('Billing %s completed successfully' % subscriber) except SubscriberException as e: bill_log.error('Error during billing the subscriber: %s' % e) if context == 'INBOUND': bill_log.info('===========================================================================') bill_log.info('INBOUND Context') bleg_connected = session.getVariable('bleg_uuid') hangup_cause = session.getVariable('hangup_cause') subscriber = session.getVariable('bleg_destination_number') #print session.getVariable('bleg_billsec') configuration = Configuration() if (bleg_connected != '' and bleg_connected != None) and hangup_cause == 'NORMAL_CLEARING': bill_log.info('Call B-leg was connected. Bill subscriber %s' % subscriber) try: charge_info = configuration.get_charge_inbound_calls() if charge_info[1] == 'call': bill_log.info('Charge type: per call, Cost: %s' % charge_info[0]) call_cost = charge_info[0] try: sub = Subscriber() previous_balance = sub.get_balance(subscriber) current_balance = previous_balance - call_cost bill_log.info('Previous balance: %.2f Current Balance: %.2f' % (previous_balance, current_balance)) sub.set_balance(subscriber, current_balance) bill_log.info('Billing %s completed successfully' % subscriber) except SubscriberException as e: bill_log.error('Error during billing the subscriber: %s' % e) elif charge_info[1] == 'min': bill_log.info('Charge type rate per min, cost per min: %s' % charge_info[0]) # BUG: Cannot get b-leg billsec from FS. Use the billsec of a-leg instead call_cost = self.get_call_cost(duration, charge_info[0]) bill_log.info('Call duration %s sec Call cost: %s' % (duration, call_cost)) try: sub = Subscriber() previous_balance = sub.get_balance(subscriber) current_balance = previous_balance - call_cost bill_log.info('Previous balance: %.2f Current Balance: %.2f' % (previous_balance, current_balance)) sub.set_balance(subscriber, current_balance) bill_log.info('Billing %s completed successfully' % subscriber) except SubscriberException as e: bill_log.error('Error during billing the subscriber: %s' % e) except ConfigurationException as e: bill_log.error(e) else: bill_log.info('Call B-leg was not connected. Not billing subscriber %s' % subscriber)
def main(): global LOG_FILE parser = argparse.ArgumentParser() parser.add_argument( 'input_dir', help='Path to input directory containing test in text format') parser.add_argument('num_iterations', type=int, help='Number of iteration to run each test') parser.add_argument( '--android', action='store_true', help= 'Run on Android device. Assumes a single Android device is connected, accessible with adb, and with amber already installed as /data/local/tmp/amber_ndk' ) args = parser.parse_args() if args.android: android_sanity_check() start = time.time() input_dir = args.input_dir num_iterations = args.num_iterations # the user must input the location of the directory where the .amber files will reside output_dir_path = get_new_dir_name() # the user may change the flags used to build the amber tests with (include spaces before and after the flag(s)) amber_build_flags = " -d -t spv1.3 " os.system("mkdir " + output_dir_path) log_file_name = output_dir_path + "/output_log.txt" LOG_FILE = open(log_file_name, 'w') log_print("Date and Time:") now = datetime.now() nowpp = now.strftime("%d/%m/%Y %H:%M:%S") log_print(nowpp) log_print("Computer:") log_print(socket.gethostname()) log_print("") # Store Vulkan info vulkan_info = output_dir_path + "/vulkaninfo.txt" if args.android: log_print("No vulkaninfo on Android") else: log_print("storing vulkaninfo to: " + vulkan_info) log_print("") os.system("vulkaninfo > " + vulkan_info) if args.android: amber_build_path = "" # ignored anyway else: amber_build_path = find_amber() + " " # the user must provide all the possible configuration objects they want to test with, placing them in the # all_config_variants list below default_cfg = Configuration(timeout=2000, workgroups=65532, threads_per_workgroup=1, saturation_level=0, subgroup=0) round_r_cfg = Configuration(timeout=2000, workgroups=65532, threads_per_workgroup=1, saturation_level=1, subgroup=0) chunk_cfg = Configuration(timeout=2000, workgroups=65532, threads_per_workgroup=1, saturation_level=2, subgroup=0) diff_subgroup_cfg = Configuration(timeout=2000, workgroups=65532, threads_per_workgroup=256, saturation_level=0, subgroup=1) diff_workgroup_cfg = Configuration(timeout=2000, workgroups=65532, threads_per_workgroup=4, saturation_level=0, subgroup=0) all_config_variants = [default_cfg, diff_subgroup_cfg, diff_workgroup_cfg] # call the main driver function amber_driver(all_config_variants, input_dir, output_dir_path, amber_build_path, amber_build_flags, num_iterations, args.android) end = time.time() log_print("") log_print("Execution time (s):") log_print(str(end - start)) LOG_FILE.close()
import dropletManager from irc.dict import IRCDict from irc.bot import SingleServerIRCBot, ServerSpec from configuration import Configuration # Basic commands from command import helpCmd, running_droplet # User management commands from command import op_user, unop_user, add_user, rem_user # Droplet management commands from command import add_api_key, stop_droplet, list_images, load_most_recent_image, load_named_image # a list of all commands from command import commands default_config = Configuration.from_path("./default_config.json") def serverOfConfig(config): return ServerSpec(config.settings[Configuration.IRC_SERVER], port=config.settings[Configuration.IRC_PORT], password=config.settings[Configuration.IRC_PASSWD]) class MrDo(SingleServerIRCBot): def __init__(self, config): SingleServerIRCBot.__init__(self, [serverOfConfig(config)], config.settings[Configuration.IRC_UNAME], config.settings[Configuration.IRC_UNAME]) self.config = config self.channel = config.settings[Configuration.IRC_CHAN]
from configuration import Configuration from boe import Boe, Scraping # pip install request # pip install configparser # pip install beautifulsoup4 config = Configuration('config.properties') scraping = Scraping() scraping.get_html(config.get_url())
import googlemaps import pandas as pd sys.path.append("../route") sys.path.append("../utils") sys.path.append("../config") from geo_utils import GeoUtils from pymongo import MongoClient from configuration import Configuration from polyline_route import PolylineRoute """ Creating required instances.""" utils = GeoUtils() config = Configuration() gmaps = googlemaps.Client(key=config.API_KEY) mongo_client = MongoClient(host=config.MONGO_HOST, port=config.MONGO_PORT) """ User input.""" source = sys.argv[1] destination = sys.argv[2] mode = sys.argv[3] _id = "{}_{}_{}".format(source.lower(), destination.lower(), mode) """ Creating mongo connection.""" db = mongo_client[config.MONGO_TBT_DB] coll = db[config.MONGO_ROUTE_COLLECTION] if not coll.find_one({"_id":_id}): print ("Hi Buddy, I am calling google API")
def test_interface(self): ########################################################################### # Test create_interface Configuration._create(interface_config=str(Interfaces.TEXT)) interface = create_interface() self.assertEqual(type(interface), TextInterface)
# along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. from bottle import route, run, debug, template, request, validate, error, static_file, get from bottle import response from json import dumps from player import Player from configuration import Configuration from volume_control import VolumeController from googlemusic import GoogleMusic # Player object player = Player() # Config object config = Configuration() # Volume Control object volume_control = VolumeController() # Google Music google_music = GoogleMusic(config.app_settings().google_music_user, config.app_settings().google_music_password) #### # UI #### @route('/') def index():
def test_property_access(self): config = Configuration(key1="value1", key2=dict(key2_2="value2_2")) assert config.key1 == "value1" assert isinstance(config.key2, Section) assert config.key2.key2_2 == "value2_2"
class SystrayUi(QtGui.QDialog): def __init__(self): super(SystrayUi, self).__init__() # Get the configuration self._config = Configuration() self._socket = None self._syncFolder = None self._syncInterval = None self._remoteHost = None self._remoteUser = None self._remoteRepositoryPath = None staticPath = os.path.join(os.path.dirname(os.path.realpath(__file__)), "static") self._respath = os.path.join(staticPath, "img") self.createLocalSettingsGroupBox() self.createRemoteSettingsGroupBox() self.createButtons() self.createActions() self.createTrayIcon() mainLayout = QtGui.QVBoxLayout() mainLayout.addWidget(self.localSettingsGroupBox) mainLayout.addWidget(self.remoteSettingsGroupBox) mainLayout.addStretch(1) mainLayout.addSpacing(12) mainLayout.addLayout(self.buttonsLayout) self.setLayout(mainLayout) # Read and apply all settings #self.loadSettings() # Show the systray icon self.trayIcon.show() self.setWindowTitle("PythonDrop") self.resize(500, 300) self.hide() def createButtons(self): self.okButton = QtGui.QPushButton("&Ok") self.cancelButton = QtGui.QPushButton("&Cancel") self.okButton.clicked.connect(self.okClicked) self.cancelButton.clicked.connect(self.cancelClicked) self.buttonsLayout = QtGui.QHBoxLayout() self.buttonsLayout.addStretch(1) self.buttonsLayout.addWidget(self.okButton) self.buttonsLayout.addWidget(self.cancelButton) def createLocalSettingsGroupBox(self): self.localSettingsGroupBox = QtGui.QGroupBox("Local Settings") locationLabel = QtGui.QLabel("Sync Folder Location") self.locationLineEdit = QtGui.QLineEdit() self.changeLocationButton = QtGui.QPushButton("Change...") self.changeLocationButton.clicked.connect(self.changeReposPathClicked) updateIntervalLabel = QtGui.QLabel("Update Interval") self.updateIntervalSpinBox = QtGui.QSpinBox() labelUnit = QtGui.QLabel("s") updateIntervalLayout = QtGui.QHBoxLayout() updateIntervalLayout.addWidget(updateIntervalLabel) updateIntervalLayout.addWidget(self.updateIntervalSpinBox) updateIntervalLayout.addWidget(labelUnit) locationLayout = QtGui.QHBoxLayout() locationLayout.addSpacing(20) locationLayout.addWidget(self.locationLineEdit) locationLayout.addWidget(self.changeLocationButton) layout = QtGui.QVBoxLayout() layout.addWidget(locationLabel) layout.addLayout(locationLayout) layout.addSpacing(5) layout.addLayout(updateIntervalLayout) self.localSettingsGroupBox.setLayout(layout) def createRemoteSettingsGroupBox(self): self.remoteSettingsGroupBox = QtGui.QGroupBox("Remote Settings") formLayout = QtGui.QFormLayout() formLayout.setSpacing(5) self.remoteHostLineEdit = QtGui.QLineEdit() formLayout.addRow("Remote Host", self.remoteHostLineEdit) self.remoteUserLineEdit = QtGui.QLineEdit() formLayout.addRow("Remote User", self.remoteUserLineEdit) self.remoteReposPathLineEdit = QtGui.QLineEdit() formLayout.addRow("Remote Repository Path", self.remoteReposPathLineEdit) self.remoteSettingsGroupBox.setLayout(formLayout) def createActions(self): self.minimizeAction = QtGui.QAction("Mi&nimize", self, triggered=self.hide) self.maximizeAction = QtGui.QAction("Ma&ximize", self, triggered=self.showMaximized) self.restoreAction = QtGui.QAction("&Restore", self, triggered=self.showNormal) self.openPythonDropFolderAction = QtGui.QAction( "&Open PythonDrop Folder...", self, triggered=self.openPythonDropFolder) self.showPreferencesDialogAction = QtGui.QAction( "&Preferences...", self, triggered=self.showPreferencesDialog) self.pauseResumeDaemonAction = QtGui.QAction( "Pause syncing", self, triggered=self.pauseResumeDaemon) self.startDaemonAction = QtGui.QAction("Start syncing", self, triggered=self.startDaemon) self.aboutAction = QtGui.QAction("&About", self, triggered=self.showAbout) self.quitAction = QtGui.QAction("&Exit", self, triggered=QtGui.qApp.quit) def createTrayIcon(self): self.trayIconMenu = QtGui.QMenu(self) self.trayIconMenu.addAction(self.openPythonDropFolderAction) self.trayIconMenu.addSeparator() self.trayIconMenu.addAction(self.pauseResumeDaemonAction) #self.trayIconMenu.addAction(self.startDaemonAction) self.trayIconMenu.addSeparator() self.trayIconMenu.addAction(self.showPreferencesDialogAction) self.trayIconMenu.addAction(self.aboutAction) self.trayIconMenu.addSeparator() self.trayIconMenu.addAction(self.quitAction) self.trayIcon = QtGui.QSystemTrayIcon(self) self.trayIcon.setIcon( QtGui.QIcon(os.path.join(self._respath, "refresh.ico"))) self.trayIcon.setContextMenu(self.trayIconMenu) def startDaemon(self): pass def pauseResumeDaemon(self): self.sendApiCommand("stop") def openPythonDropFolder(self): subprocess.Popen("explorer " + self._config["roshandrop_folder"]) def showPreferencesDialog(self): self.showNormal() def cancelClicked(self): self.hide() def okClicked(self): self.applySettings() self.hide() def changeReposPathClicked(self): oldDir = self._config["roshandrop_folder"] dir = QtGui.QFileDialog.getExistingDirectory( self, "Select Folder for Repository", oldDir, QtGui.QFileDialog.ShowDirsOnly) if dir: self.locationLineEdit.setText(dir) def loadSettings(self): # Read the settings from the daemon self.readSettings() self.locationLineEdit.setText(self._syncFolder) self.updateIntervalSpinBox.setValue(int(self._syncInterval)) self.remoteHostLineEdit.setText(self._remoteHost) self.remoteUserLineEdit.setText(self._remoteUser) self.remoteReposPathLineEdit.setText(self._remoteRepositoryPath) def applySettings(self): if int(self._config["sync_interval"] ) != self.updateIntervalSpinBox.value(): self.sendApiCommand("update_interval", str(self.updateIntervalSpinBox.value())) def showAbout(self): msgBox = QtGui.QMessageBox() msgBox.setText("This Software was created by Roshan Lamichhane. ;)") msgBox.exec_() def readSettings(self): self._syncFolder = self.sendApiCommand("get", "syncfolder") self._syncInterval = self.sendApiCommand("get", "syncinterval") self._remoteHost = self.sendApiCommand("get", "remotehost") self._remoteUser = self.sendApiCommand("get", "remoteuser") self._remoteRepositoryPath = self.sendApiCommand("get", "remoterep") def writeSettings(self): pass def connect(self): self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._socket.connect( ("127.0.0.1", self._config.get_option('tcpListenPort', 'general'))) # Receive the welcome message #answer = self.recv_end(self._socket) def disconnect(self): if self._socket is not None: self._socket.close() def sendApiCommand(self, command, data=None): answer = None if self._socket == None: return if data == None: dataToSend = command + "\n\r" else: dataToSend = command + " " + data + "\n\r" self._socket.send(dataToSend) # Receive the answer answer = self.recv_end(self._socket) return answer End = "\n\r" def recv_end(self, the_socket): total_data = [] data = '' while True: data = the_socket.recv(8192) if self.End in data: total_data.append(data[:data.find(self.End)]) break total_data.append(data) if len(total_data) > 1: #check if end_of_data was split last_pair = total_data[-2] + total_data[-1] if self.End in last_pair: total_data[-2] = last_pair[:last_pair.find(self.End)] total_data.pop() break return ''.join(total_data) def recv_basic(self, the_socket): total_data = [] while True: data = the_socket.recv(8192) if not data: break total_data.append(data) return ''.join(total_data)
import sys from collections import deque import imageio import matplotlib.pyplot as plt from classifier import Classifier from configuration import Configuration from helper import Helper from lane_detection import LaneDetection from pre_processing import PreProcessing from window_search import WindowSearch config = Configuration().__dict__ sys.path.append("implementation/") def __main__(): # get video stream video_cap = imageio.get_reader(config["project_video"]) # polynomial lane fit lanes_fit = [] # history of heatmaps to reject false positives history = deque(maxlen=config["history_limit"]) # classifier and scaler classifier = Classifier.get_trained_classifier(use_pre_trained=True) # load calibration parameters: camera_matrix, dist_coef = PreProcessing.load_calibration_params()
def initLogging(verbose=False, console='stdout', consoleLevel='DEBUG'): """ Initilize NuPic logging by reading in from the logging configuration file. The logging configuration file is named 'nupic-logging.conf' and is expected to be in the format defined by the python logging module. If the environment variable 'NTA_CONF_DIR' is defined, then the logging configuration file is expected to be in the NTA_CONF_DIR directory. If NTA_CONF_DIR is not defined, then it is found in the 'conf/default' subdirectory of the NuPic installation directory (typically ~/nta/current/conf/default) The logging configuration file can use the environment variable 'NTA_LOG_DIR' to set the locations of log files. If this variable is not defined already in the environment, this method will set it to the 'logs' subdirectory of the NuPic install directory (typically ~/nta/eng/logs) before loading in the configuration file. console: Defines console output for the default "root" logging configuration; this may be one of 'stdout', 'stderr', or None; Use None to suppress console logging output consoleLevel: Logging-level filter string for console output corresponding to logging levels in the logging module; may be one of: 'DEBUG', 'INFO', 'WARNING', 'ERROR', or 'CRITICAL'. E.g., a value of'WARNING' suppresses DEBUG and INFO level output to console, but allows WARNING, ERROR, and CRITICAL """ # NOTE: If you call this twice from the same process there seems to be a # bug - logged messages don't show up for loggers that you do another # logging.getLogger() on. global gLoggingInitialized if gLoggingInitialized: if verbose: print >> sys.stderr, "Logging already initialized, doing nothing." return consoleStreamMappings = { 'stdout': 'stdoutConsoleHandler', 'stderr': 'stderrConsoleHandler', } consoleLogLevels = [ 'DEBUG', 'INFO', 'WARNING', 'WARN', 'ERROR', 'CRITICAL', 'FATAL' ] assert console is None or console in consoleStreamMappings.keys(), ( 'Unexpected console arg value: %r') % (console, ) assert consoleLevel in consoleLogLevels, ( 'Unexpected consoleLevel arg value: %r') % (consoleLevel) # ----------------------------------------------------------------------- # Setup logging. Look for the nupic-logging.conf file, first in the # NTA_CONFIG_DIR path (if defined), then in a subdirectory of the nupic # module # TODO: move into nupic.support configFilename = 'nupic-logging.conf' try: configFilePath = Configuration.findConfigFile(configFilename) except: configFilePath = None # If NTA_LOG_DIR is not defined, set it now. This is used by the logging # config file to set the path for the log files if 'NTA_LOG_DIR' not in os.environ: os.environ['NTA_LOG_DIR'] = os.path.join(nupic.rootDir, 'logs') if not os.path.exists(os.environ['NTA_LOG_DIR']): makeDirectoryFromAbsolutePath( os.path.abspath(os.environ['NTA_LOG_DIR'])) # Load in the logging configuration file if configFilePath is None: print >> sys.stderr, ( "WARNING: Could not find the logging configuration file " \ "(filename: '%s', expected to be in search path: %s). Logging is " \ " disabled.") % (configFilename, Configuration.getConfigPaths()) else: if verbose: print >> sys.stderr, ("Using logging configuration file: %s") % ( configFilePath) # This dict will hold our replacement strings for logging configuration replacements = dict() def makeKey(name): """ Makes replacement key """ return "$$%s$$" % (name) platform = sys.platform.lower() if platform.startswith('java'): # Jython import java.lang platform = java.lang.System.getProperty("os.name").lower() if platform.startswith('mac os x'): platform = 'darwin' if platform.startswith('darwin'): replacements[makeKey( 'SYSLOG_HANDLER_ADDRESS')] = '"/var/run/syslog"' elif platform.startswith('linux'): replacements[makeKey('SYSLOG_HANDLER_ADDRESS')] = '"/dev/log"' else: raise RuntimeError( "This platform is neither darwin nor linux: %s" % (sys.platform, )) if False: #os.path.isdir('/var/log/numenta/nupic'): # NOTE: Not using syslogHandler for now because it either truncates or # drops messages over ~1,400 bytes (depending on platform) # Nupic logs go to syslog. Also, SysLogHandler raises an exception # on jython (at least on 2.5.2): "AttributeError: 'module' object has no # attribute 'AF_UNIX'" (jython is used by a sub-moduleof # ClientJobManager) replacements[makeKey('PERSISTENT_LOG_HANDLER')] = 'syslogHandler' else: # Nupic logs go to file replacements[makeKey('PERSISTENT_LOG_HANDLER')] = 'fileHandler' # Set up log file path for the default file handler logFilePath = _genLoggingFilePath() makeDirectoryFromAbsolutePath(os.path.dirname(logFilePath)) replacements[makeKey('FILE_HANDLER_LOG_FILENAME')] = repr( logFilePath) # Set up root logger replacements[makeKey('ROOT_LOGGER_HANDLERS')] = ( replacements[makeKey('PERSISTENT_LOG_HANDLER')]) if console is not None: replacements[makeKey('ROOT_LOGGER_HANDLERS')] += ( ',' + consoleStreamMappings[console]) # Set up log level for console handlers replacements[makeKey('CONSOLE_LOG_LEVEL')] = consoleLevel customConfig = StringIO() with open(configFilePath) as src: for lineNum, line in enumerate(src): if "$$" in line: for (key, value) in replacements.items(): line = line.replace(key, value) # If there is still a replacement string in the line, we're missing it # from our replacements dict if "$$" in line and "$$<key>$$" not in line: raise RuntimeError( ("The text %r, found at line #%d of file %r, " "contains a string not found in our replacement " "dict.") % (line, lineNum, configFilePath)) customConfig.write(line) customConfig.seek(0) if version.StrictVersion( python_version()) >= version.StrictVersion('2.6'): # NOTE: the disable_existing_loggers arg is new as of python 2.6, so it's # not supported on our jython interperter, which was v2.5.x as of this # writing logging.config.fileConfig(customConfig, disable_existing_loggers=False) else: logging.config.fileConfig(customConfig) gLoggingInitialized = True
from tests.my_test_case import MyTestCase from configuration import Configuration configurations = Configuration() from FAdo.fa import sConcat from FAdo.reex import * #from yappy_parser import Yappy class TestFado(MyTestCase): def setUp(self): pass def test_dfa(self): r2 = str2regexp("(k+s+t)(Y)*", no_table=0).toDFA() self.write_to_dot_to_file(r2, "r2")
if __name__ == '__main__': # Images can either be extracted from a video file or a batch of single photographs. Select # the example for the test run. type = 'video' if type == 'image': names = glob('Images/2012*.tif') # names = glob.glob('Images/Moon_Tile-031*ap85_8b.tif') # names = glob.glob('Images/Example-3*.jpg') else: names = 'Videos/another_short_video.avi' print(names) # Get configuration parameters. configuration = Configuration() configuration.initialize_configuration() try: frames = Frames(configuration, names, type=type) print("Number of images read: " + str(frames.number)) print("Image shape: " + str(frames.shape)) except Error as e: print("Error: " + e.message) exit() # Rank the frames by their overall local contrast. rank_frames = RankFrames(frames, configuration) start = time() rank_frames.frame_score() end = time() print('Elapsed time in ranking images: {}'.format(end - start))
def run(config_path: Path, experiment_path: Path, skip_list: List[str], ssh_password: str) -> None: config: Configuration = Configuration.load(config_path) _build_project(config, skip_list) _deploy_project(config_path, experiment_path, skip_list, ssh_password)
def validateConfig(): try: cc = Configuration(request.form['config']) return "OK" except ConfigurationException as ce: return str(ce)
def main(): # check '~/.pyraxshell' and config files exist, create them if missing if not check_dir_home(): print("This is the first time 'pyraxshell' runs, please, configure " "'%s' according to your needs" % CONFIG_FILE) #create db DB() Sessions.Instance().create_table_sessions() # @UndefinedVariable Sessions.Instance().create_table_commands() # @UndefinedVariable # create default configuration file Configuration.Instance() # @UndefinedVariable sys.exit(0) # ######################################## # VERSION CHECK if not version.check_version_file(): sys.exit(1) # ######################################## # LOGGING start_logging() logging.debug('starting') # from baseconfigfile import BaseConfigFile # bcf = BaseConfigFile() # ######################################## # ACCOUNTS accounts = Account.Instance() # @UnusedVariable @UndefinedVariable # config file is read by 'BaseConfigFile' constructor # ######################################## # CONFIGURATION cfg = Configuration.Instance() # @UndefinedVariable # override settings with CLI params cfg.parse_cli(sys.argv) logging.debug("configuration: %s" % cfg) # set user's log level if specified if not Configuration.Instance().log_level == None: # @UndefinedVariable l = logging.getLogger() for h in l.handlers: h.setLevel(cfg.log_level) # ######################################## # START SESSION Sessions.Instance().start_session() # @UndefinedVariable # Sessions.Instance().insert_table_commands('IN', 'OUT') # @UndefinedVariable # ######################################## # DO STUFF # handle configuration if cfg.pyrax_http_debug == True: pyrax.set_http_debug(True) if cfg.pyrax_no_verify_ssl == True: # see: https://github.com/rackspace/pyrax/issues/187 pyrax.set_setting("verify_ssl", False) # start notifier Notifier().start() # main loop Cmd_Pyraxshell().cmdloop()
from feature_table import FeatureTable from constraint_set import ConstraintSet from grammar import Grammar from lexicon import Lexicon from lexicon import Word from configuration import Configuration from tests.persistence_tools import get_feature_table_fixture, get_constraint_set_fixture from simulations.dag_zook import configurations_dict from constraint import VowelHarmonyConstraint, PhonotacticConstraint from transducer import Transducer from debug_tools import write_to_dot as dot configuration = Configuration() configuration.load_configurations_from_dict(configurations_dict) feature_table = FeatureTable.load(get_feature_table_fixture("vowel_harmony_simple_feature_table.json")) constraint_set = ConstraintSet.load(get_constraint_set_fixture("vowel_harmony_simple_constraint_set.json")) data = ["unu", "uku", "nunu", "kunu", "nuku", "kuku", "ini", "iki", "nini", "kini", "niki", "kiki", "unukun", "ukukun", "nunukun", "kunukun", "nukukun", "kukukun", "inikin", "ikikin", "ninikin", "kinikin", "nikikin", "kikikin"] max_word_length_in_data = max([len(word) for word in data]) lexicon = Lexicon(data, max_word_length_in_data) grammar = Grammar(constraint_set, lexicon) grammar_transducer = grammar.get_transducer()
| (__ | | | | | (_) | | | \__ \ | | \__ \ \__ \ | __/ / / \ V / | (_) | | |_ | | | __/ | | | | | | \ V / | __/ | (_| | | |_| | \___|_|_| |_| _\___/_ |_| |___/ |_|_|___/_|___/ \___| /___| \_/ \___/ \__| |_| \___| |_| |_| |_| \_/ \___| \__,_| \__,_| __ __ __ ___ __ ____ __ / / /_ | / / |__ \ / / |___ \ \ \ | | | | / / ) | / / __) | | | | | | | / / / / / / |__ < | | | | | | / / / /_ / / ___) | | | | | |_| /_/ |____| /_/ |____/ | | \_\ /_/ """) lvl = int(input("Entrez votre niveau : ")) configuration = Configuration(lvl) Niveau = configuration.config(lvl) joueur = Niveau.getJoueur() l = Niveau.getLabyrinthe() cls() print(""" _ | | | | ___ | | / _ \ | |____ | __/ |______| \___| _ _ _ _ _
# Coroutine concurrency (http://sdiehl.github.io/gevent-tutorial/#core) imports import grequests import gevent # This method is used as a callback for the asynchronous network communications used to speak to the hub. # Our version is very simplistic and just outputs the HTTP Response Code to the console. def printStatus(response, **kwargs): print("Insteon response was {}".format(response.status_code)) # The following allows us to specify the IP address, username and password in a more friendly JSON configuration file rather than # hardcoding the values in the Python source. configuration = Configuration() config = configuration.loadConfig() hub = config['insteon'] class Insteon(): def turn(self, deviceId='', onOrOff=''): if onOrOff == 'on': self.turnLightOn(deviceId) if onOrOff == 'off': self.turnLightOff(deviceId) def turnLightOn(self, deviceId): # The grequests library sends the request as soon as we create "job" below. We then yield to the greenlet every hundredth of a second # in the main update method to ensure we capture the result. base64string = base64.encodestring(