def test_ignore_long_suspensions(self): # Ensure that we have a known value for when we start ignoring plans. conf = Config() conf.PLAN_USER_IGNORE_THRESHOLD = 30 self.plan1.member_limit = 1 user = Membership(first_name="Testy", last_name="Testerson", email="*****@*****.**", plan="plan1", status="active") user.put() # Initially, the plan should be full, for every status. self.assertTrue(self.plan1.is_full()) user.status = "suspended" user.put() self.assertTrue(self.plan1.is_full()) user.status = None user.put() self.assertTrue(self.plan1.is_full()) # If we mess with the updated time, it should be ignored when the plan is # not active. user.updated = datetime.datetime.now() - datetime.timedelta(days=31) user.status = "active" user.put(skip_time_update=True) self.assertTrue(self.plan1.is_full()) user.status = "suspended" user.put(skip_time_update=True) self.assertFalse(self.plan1.is_full()) user.status = None user.put(skip_time_update=True) self.assertFalse(self.plan1.is_full())
def __init__(self): QtGui.QMainWindow.__init__(self) self.main = Ui_MainWindow() self.flowconfig = flowconfig.FlowConfig(self) self.app = None self.streammod = StreamTable(self) # Dialogs self.aboutdialog = AboutDialog.AboutDialog(self) # Initialized after setupUi runs self.interfacesgui = None self.protocolsgui = None self.rulegui = None self.dbgui = None debugger_uri = "PYROLOC://127.0.0.1:7766/debugger" self.remote_debugger = Pyro.core.getProxyForURI(debugger_uri) #self.proxy = xmlrpclib.ServerProxy("http://localhost:20757") #self.objectproxy = xmlrpclib.ServerProxy("http://localhost:20758") self.curdebugevent = "" self.log = logging.getLogger("mallorygui") config = Config() config.logsetup(self.log)
class TestConfig(TestCase): """Test :class:`.Config`.""" def setUp(self): environ["CONFIG_OPTION"] = "override" self.config = Config() def test_defaults(self): """Test default configuration loaded from defaults.py.""" self.assertEqual(self.config.CONFIG_NAME, 'value') def test_overrides(self): """Test configuration loaded from environment variables.""" self.assertEqual(self.config.CONFIG_OPTION, 'override') def test_iter(self): """Test iterating over configuration.""" self.assertEqual([config for config in self.config], [('CONFIG_NAME', 'value'), ('CONFIG_OPTION', 'override')] ) def test_json(self): """Test JSON configuration.""" self.assertEqual(self.config.json(), ['CONFIG_NAME = value', 'CONFIG_OPTION = override'] ) def test_html(self): """Test HTML configuration.""" self.assertEqual(self.config.html(), '<ul><li>CONFIG_NAME = value<li>CONFIG_OPTION = override</ul>' )
def make_tasks_from_args(args): localtree = args.services['filesystem']('local') config = Config(args.services) if args.config: config.read_file(localtree, args.config) else: confpaths = localtree.get_config_paths_for('ebakup') for path in confpaths: config.read_file(localtree, path + ('config',)) tasks = [] tasks.append(WebUITask(config, args)) if args.command == 'backup': task = BackupTask(config, args) tasks.append(task) elif args.command == 'info': task = InfoTask(config, args) tasks.append(task) elif args.command == 'shadowcopy': task = MakeShadowTreeTask(config, args) tasks.append(task) elif args.command == 'webui': pass elif args.command == 'sync': task = SyncTask(config, args) tasks.append(task) elif args.command == 'verify': task = _make_verify_task(config, args) tasks.append(task) else: raise UnknownCommandError('Unknown command: ' + args.command) return tasks
def settype(self, name, type): """ set the type of a bot. """ cfg = Config('fleet' + os.sep + stripname(name) + os.sep + 'config') cfg['name'] = name logging.debug("%s - setting type to %s" % (self.cfile, type)) cfg.type = type cfg.save()
def main(): settings = { 'cookie_secret': '66oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=', 'login_url': r'/login', 'debug': True, } #load config xml and init logger conf = Config() conf.load('./wm.xml') LOG().setlogger('./wm.log') print conf.to_string() html_mgn().start(conf.bnum) wx_mongo.WxMongo().start() omgn = order_mgn.OrderMgn() omgn.init() onotify = order_notify.OrderNotify() onotify.start(wx_mongo.WxMongo(), conf) discount_mgn.DiscountMgn().start('./dc.xml') print 'server ip[%s],port[%d]' % (conf.ip, conf.port) # Start server app = tornado.web.Application([ (r'/wm', WMHandler), (r'/openid', OrderHandler), (r'/shopping', ShoppingHandler), (r'/test', TestHandler), ], **settings) app.listen(conf.port) tornado.ioloop.IOLoop.instance().start()
def getTodos(self): cfg = Config() self.aleph.login(*[str(cfg.value('aleph_loaned/' + key).toString()) for key in CFGS]) try: return map(self.loan2entry, self.aleph.get_loaned()) finally: self.aleph.logout()
class sncli: def __init__(self, do_server_sync, verbose=False): self.config = Config() self.do_server_sync = do_server_sync self.verbose = verbose self.do_gui = False if not os.path.exists(self.config.get_config('db_path')): os.mkdir(self.config.get_config('db_path')) # configure the logging module self.logfile = os.path.join(self.config.get_config('db_path'), 'sncli.log') self.loghandler = RotatingFileHandler(self.logfile, maxBytes=100000, backupCount=1) self.loghandler.setLevel(logging.DEBUG) self.loghandler.setFormatter(logging.Formatter(fmt='%(asctime)s [%(levelname)s] %(message)s')) self.logger = logging.getLogger() self.logger.setLevel(logging.DEBUG) self.logger.addHandler(self.loghandler) self.config.logfile = self.logfile logging.debug('sncli logging initialized') self.logs = [] try: self.ndb = NotesDB(self.config, self.log, self.gui_update_view) except Exception, e: self.log(str(e)) sys.exit(1)
class Circulation(object): ''' Makes a connection with the WMS Circulation API using the classes Connect (originally from OCLC) and Config ''' def __init__(self, service=None, useTestData = True): service = service # 'circulation' # WMS Circulation API self.config = Config(service = service, useTestData = useTestData) self.institution = self.config.getInstitutionData() self.serviceAPI = self.config.getServiceAPIData() self.institutionList = self.institution.get('institution') self.branchList = self.institution.get('branch') def get_data_response(self, institution_id = None, request_url = ''): connect = Connect(institution_id = institution_id, request_url = request_url, config = self.config) opener = urllib2.build_opener(MyHTTPSHandler) opener.addheaders = [('accept', 'application/json'),('Authorization', connect.getAuthorization_header())] response = opener.open(request_url).read() return response def get_JSON(self, data): _data = data try: return json.loads(_data) except Exception, inst: print inst
def get_feed_urls(self): ''' Open urls file in .config, make list of feeds ''' feeds = [] try: feedfile = open(CONFIGPATH + 'urls') except Exception as e: feedfile = Config().create_urls() if feedfile is True: print(f"Add urls to url file at: {CONFIGPATH + 'urls'}") sys.exit else: for line in feedfile: line = line.strip() if not line.startswith("#"): line = line.replace('\n', '').strip() line = line.split('|') try: genre = re.sub(r'[-\s]+', '-', (re.sub(r'[^\w\s-]', '', line[1]).strip().lower())) except Exception as e: genre = 'uncategorized' if line[0]: feed = line[0].strip() db.add_url(feed) feeds += [[feed, genre]] self.get_media_links(feed, genre) now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") db.update_time(feed, now) feedfile.close() return True
def makebot(self, name, cfg=None): """ create a bot with name .. use configuration is provided. """ bot = None # if not config create a default bot if not cfg: cfg = Config(self.datadir + os.sep + name, 'config', inittxt=fleetbotconfigtxt) cfg.save() # create bot based on type if cfg['type'] == 'irc': from gozerbot.irc.bot import Bot bot = Bot(cfg) elif cfg['type'] == 'jabber': from gozerbot.jabber.jabberbot import Jabberbot bot = Jabberbot(cfg) else: logging.debug('unproper type: %s' % cfg['type']) # set bot name and initialize bot if bot: cfg['name'] = bot.name = name self.initbot(bot) return bot # failed to created the bot raise Exception("can't make %s bot" % name)
def main(): '''Initialise the pipeline, then run it''' # Parse command line arguments options = parse_command_line() # Initialise the logger logger = Logger(__name__, options.log_file, options.verbose) # Log the command line used to run the pipeline logger.info(' '.join(sys.argv)) drmaa_session = None try: # Set up the DRMAA session for running cluster jobs import drmaa drmaa_session = drmaa.Session() drmaa_session.initialize() except Exception as e: print("{progname} error using DRMAA library".format(progname=program_name), file=sys.stdout) print("Error message: {msg}".format(msg=e.message, file=sys.stdout)) exit(error_codes.DRMAA_ERROR) # Parse the configuration file, and initialise global state config = Config(options.config) config.validate() state = State(options=options, config=config, logger=logger, drmaa_session=drmaa_session) # Build the pipeline workflow pipeline = make_pipeline(state) # Run (or print) the pipeline cmdline.run(options) if drmaa_session is not None: # Shut down the DRMAA session drmaa_session.exit()
def run(args): """ run the corresponding action """ my_config = Config() if args['action'] == 'config': args['list_repositories'] and my_config.print_repositories() else: module_obj = Module( args['module_name'], args['module_developers'], args['module_planners'], args['module_auditors'], folder=args['destination_folder'], init_data=args['add_init_data'], company_name=args['company_name']) if args['action'] == 'branch': branch_obj = Branch( module_obj, args['branch_suffix'], args['parent_repo'], args['oerp_version'], args['destination_folder']) branch_obj.create_branch() module_obj.create(branch_obj) elif args['action'] == 'create': module_obj.create() elif args['action'] == 'append': module_obj.append(args['append_file'], args['file_name']) #~ module_obj.branch_changes_apply() return True
def createConfig(self): config = Config() config.executionTime=1 config.programSpeed=2 config.maxTravel=3 config.formulae=["4", "5"] return config
def load_config(default_config='train_sda.cfg', reset_logging=True): parser = argparse.ArgumentParser(description='Train a Stacked Denoising Autoencoder'); parser.add_argument('-c', '--config', help='specify a configuration file'); parser.add_argument('-v', '--verbose', help='increase output verbosity", action="store_true'); args = parser.parse_args(); if args.config == None: configfile = default_config; # load default config else: configfile = args.config; config = Config(file(configfile)); if args.verbose or config.logger.level=='debug': loglevel = logging.DEBUG; else: loglevel = logging.INFO; logging.basicConfig(format=config.logger.pattern, level=loglevel); if reset_logging or config.get('reset_logging', True): reset_pylearn2_logging(); logging.info('using config {0}'.format(configfile)); # disable annoying deprecation warnings warnings.simplefilter('once', UserWarning) warnings.simplefilter('default') return config;
def __init__(self, id=None, **kwargs): Config.__init__(self) self._cur_event = None self._last_event = 0 self._rebuild_dh_params = False self._reset_ip_pool = False if id is None: self.id = uuid.uuid4().hex for name, value in kwargs.iteritems(): setattr(self, name, value) else: self.id = id self.path = os.path.join(app_server.data_path, SERVERS_DIR, self.id) self.ovpn_conf_path = os.path.join(self.path, TEMP_DIR, OVPN_CONF_NAME) self.dh_param_path = os.path.join(self.path, DH_PARAM_NAME) self.ip_pool_path = os.path.join(self.path, IP_POOL_NAME) self.ca_cert_path = os.path.join(self.path, TEMP_DIR, OVPN_CA_NAME) self.tls_verify_path = os.path.join(self.path, TEMP_DIR, TLS_VERIFY_NAME) self.user_pass_verify_path = os.path.join(self.path, TEMP_DIR, USER_PASS_VERIFY_NAME) self.client_connect_path = os.path.join(self.path, TEMP_DIR, CLIENT_CONNECT_NAME) self.client_disconnect_path = os.path.join(self.path, TEMP_DIR, CLIENT_DISCONNECT_NAME) self.ovpn_status_path = os.path.join(self.path, TEMP_DIR, OVPN_STATUS_NAME) self.auth_log_path = os.path.join(app_server.data_path, AUTH_LOG_NAME) self.set_path(os.path.join(self.path, SERVER_CONF_NAME)) if id is None: self._initialize()
def init_skin(self, skin_name, system_skin_dir, user_skin_dir, skin_config_file, app_given_id, app_given_version): '''Init skin.''' self.skin_config_file = skin_config_file if os.path.exists(skin_config_file): # Read skin name from config file. skin_config = Config(skin_config_file) skin_config.load() # Load skin. init_skin_name = skin_config.get("skin", "skin_name") else: # Create skin config if it not exists. touch_file(self.skin_config_file) init_skin_name = skin_name if self.is_skin_exist(init_skin_name, system_skin_dir, user_skin_dir): self.load_skin(init_skin_name, system_skin_dir, user_skin_dir) else: # Try load default skin if user's select skin not exists. default_skin_name = self.get_default_skin(system_skin_dir, user_skin_dir) assert(default_skin_name != None) self.load_skin(default_skin_name, system_skin_dir, user_skin_dir) self.app_given_id = app_given_id self.app_given_version = app_given_version
def create_protos(): """Create a Protocol instance with some fields.""" dissector.Protocol.protocols = {} conf = Config('tester') conf.id = [25,] conf.description = 'This is a test' rules = [Trailer(conf, {'name': 'missing', 'member': 'missing', 'size': 0}), Trailer(conf, {'name': 'simple', 'count': 1, 'size': 4}), Trailer(conf, {'name': 'bur', 'count': 3, 'size': 8}), Trailer(conf, {'name': 'ber', 'member': 'count'})] proto, diss = dissector.Protocol.create_dissector('tester', None, conf) diss.add_field(Field('one', 'float', 4, 0, Platform.big)) diss.add_field(Field('range', 'float', 4, 0, Platform.big)) diss.children[-1].set_range_validation(0, 10) field = Field('array', 'float', 4, 0, Platform.big) diss.add_field(ArrayField.create([1, 2, 3], field)) field = Field('str', 'string', 30, 0, Platform.big) diss.add_field(ArrayField.create([2], field)) diss.add_field(Field('count', 'int32', 4, 0, Platform.big)) diss.push_modifiers() yield proto dissector.Protocol.protocols = {} del proto, diss
def __init__(self, config=None, envFiles=None): self.envFilePaths = envFiles self.env = dict() self.msgSender = "EnvironmentLayout" if self.envFilePaths == None: for key, value in os.environ.iteritems(): self.env[key] = value else: assert config != None if type(self.envFilePaths) != list: self.envFilePaths = [self.envFilePaths] # sysEnvPaths = config.getEnvPaths() sysEnvPaths = list() sysEnvPaths.extend(config.getEnvPaths()) self.__resolveENVFileLocations(sysEnvPaths) for path in self.envFilePaths: cfg = Config(path) if not cfg.has_section("env"): userErrorExit("Missing mandatory section '%s'" % ("env")) envDict = cfg.get_section("env") self.__mergeEnv(envDict)
def main(): """ Initiates all components that are needed after arduino-yun's linuino has started. """ threads = [] # initiate all needed threads data_thread = Data() socket_thread = Websocket( Config.get_socket_host(), Config.get_socket_port(), data_thread ) threads.append(data_thread) threads.append(socket_thread) # start all needed threads for thread in threads: thread.daemon = True thread.start() # add observers to data-object logger = Logger("default") data_thread.add_observer(logger) data_thread.add_observer(socket_thread) # keep main() alive to prevent an exit of our # deamonized threads while True: time.sleep(1)
def ifile2dict(self, ifile_fullpath, parent_dirname='unknown'): """ Returns a dict of the attributes contained in the info_file pointed by the given fullpath. """ print 'ifile2dict on', ifile_fullpath # print 'ifile2xml() on ', ifile_fullpath ifile_path, ifile_name = os.path.split(ifile_fullpath) # once again, make sure the requested class exists if not ifile_name in (Config.instance().BT_nodes_classes_names \ #+ Config.instance().BT_leaves_classes_names\ ): s = 'in BTManager.ifile2dict(): unrecognized class \'%s\'' raise Exception(s % ifile_name) with open(ifile_fullpath) as file: lines = map(str.strip, file.readlines()) # skip empty lines lines = [line for line in lines if line] # skip comments lines = [line for line in lines if not (line.startswith('#') or line.startswith('//'))] content = { 'class':ifile_name, } if lines: # split attribute names and values splits = [([line, 'True'], line.split('='))[int('=' in line)] for line in lines] attributes, values = zip(*[(split[0], '='.join(split[1:])) for split in splits]) # make sure no attr starts with '__' for attr in attributes: if attr.startswith('__'): s = 'WARNING: in BTManager.ifile2dict(ifile_fullpath=%(ifile_fullpath)s, '\ 'parent_dirname=%(parent_dirname)s):\nattributes\' names '\ 'starting with \'__\' are not recommended.' print >> sys.__stderr__, s # interpret values as python values if possible def cast_or_str(v): try:return ast.literal_eval(v) except:return v values = map(cast_or_str, values) content.update(dict(zip(attributes, values))) if Config.instance().BT_serialization_add_debug_info: content['__parent_dir_name'] = parent_dirname content['__ifile_path'] = ifile_path content['__ifile_name'] = ifile_name return content
def __init__(self, settings_path, dependencies_path): Config.__init__(self, settings_path, dependencies_path) self.put('data_dir', '%s/data/%s' % (self.smt_semparse, self.corpus)) if self.np: self.train_name = 'train.np' else: self.train_name = 'train' self.put('srilm_ngram_count', '%s/bin/%s/ngram-count' % \ (self.srilm, self.srilm_arch)) self.put('moses_train', '%s/scripts/training/train-model.perl' % self.moses) self.put('moses_tune', '%s/scripts/training/mert-moses.pl' % self.moses) self.put('moses_decode_phrase', '%s/bin/moses' % self.moses) self.put('moses_decode_hier', '%s/bin/moses_chart' % self.moses) self.put('bleu_eval', '%s/scripts/generic/multi-bleu.perl' % self.moses) self.put('wasp_eval', '%s/data/geo-funql/eval/eval.pl' % self.wasp) if self.nlg: self.put('src', 'mrl') self.put('tgt', 'nl') else: self.put('src', 'nl') self.put('tgt', 'mrl')
def iter_list(): cfg = Config() methods = cfg.get_methods() features = cfg.get_features() c_iteration = cfg.get_c_iteration() def_c_value = cfg.get_def_c_value() i_list = [] for i in methods: if c_iteration is True: for j in range(-4,5): for k in features: i_list.append({ 'method': i, 'c_value': pow(10, j), 'feature': k }) else: for j in features: i_list.append({ 'method': i, 'c_value': def_c_value, 'feature': j }) return i_list
def clear_cache(self): cache_db.set_remove('servers', '%s_%s' % (self.id, self.type)) cache_db.list_remove('servers_sorted', '%s_%s' % (self.id, self.type)) cache_db.remove(self.get_cache_key('clients')) for period in ('1m', '5m', '30m', '2h', '1d'): persist_db.remove(self.get_cache_key('bandwidth-%s' % period)) Config.clear_cache(self)
def load_config(): """ Load configuration """ parser = argparse.ArgumentParser(description='Configure the change tracking app.') parser.add_argument('--sync_cmd', type=str, action='store', nargs='?', help='The command line to sync once there is change', default=Config.DEFAULT_SYNC_CMD) parser.add_argument('--delay', type=float, action='store', nargs='?', help='Number of seconds to batch for sync', default=Config.DEFAULT_DELAY) parser.add_argument('--filter_regexp', type=str, action='store', nargs='?', help='Regexp for filtering files from sync', default=Config.DEFAULT_FILTER_REGEXP) parser.add_argument('--watch_dir', type=str, action='store', nargs='?', help='Path of the dir to watch', default=os.getcwd()) args = parser.parse_args() # New config conf = Config() # update conf conf.delay = args.delay conf.sync_cmd = args.sync_cmd conf.filter_regexp = args.filter_regexp conf.directory = args.watch_dir # return return conf
def validate_conf(): config = Config() config.validate_config() if not config: return jsonify({'error': "Invalid config file"}) else: return jsonify({'result': 'Configuration Loaded'})
def __init__(self, id=None, name=None): Config.__init__(self) if id is None: self._initialized = False self.id = uuid.uuid4().hex else: self._initialized = True self.id = id data_path = app_server.data_path or DEFAULT_DATA_PATH self.path = os.path.join(data_path, ORGS_DIR, self.id) self.index_path = os.path.join(self.path, INDEX_NAME) self.index_attr_path = os.path.join(self.path, INDEX_NAME + '.attr') self.serial_path = os.path.join(self.path, SERIAL_NAME) self.crl_path = os.path.join(self.path, CRL_NAME) self.set_path(os.path.join(self.path, 'ca.conf')) if name is not None: self.name = name if not self._initialized: self._initialize() self.ca_cert = User(self, id=CA_CERT_ID)
def main(): bus = can.interface.Bus(channel=Config.getChannel(), bustype=Config.getBusType()) print('start_transmit()') sendMotorStatusMessage() sendEnvironmentStatusMessage() while True: time.sleep(20)
def retry_clone(git_repo): """Retry 'git clone' operation for defined number of attempts with defined intervals """ if os.path.isdir(git_repo.local_repo_path) and os.listdir(git_repo.local_repo_path) != []: # delete and recreate local repo path if not empty dir AgentGitHandler.log.debug("Local repository path not empty. Cleaning.") GitUtils.delete_folder_tree(git_repo.local_repo_path) GitUtils.create_dir(git_repo.local_repo_path) git_clone_successful = False # Read properties from agent.conf max_retry_attempts = int(Config.read_property(constants.ARTIFACT_CLONE_RETRIES, 5)) retry_interval = int(Config.read_property(constants.ARTIFACT_CLONE_INTERVAL, 10)) retry_attempts = 0 # Iterate until git clone is successful or reaches max retry attempts while git_clone_successful is False and retry_attempts < max_retry_attempts: try: retry_attempts += 1 Repo.clone_from(git_repo.repo_url, git_repo.local_repo_path) AgentGitHandler.add_repo(git_repo) AgentGitHandler.log.info( "Retrying attempt to git clone operation for tenant %s successful" % git_repo.tenant_id ) git_clone_successful = True except GitCommandError as e: AgentGitHandler.log.warn("Retrying git clone attempt %s failed" % retry_attempts) if retry_attempts < max_retry_attempts: time.sleep(retry_interval) pass else: raise GitRepositorySynchronizationException("Error while retrying git clone: %s" % e)
def test_GetRomCollectionNames(self): config_xml_file = os.path.join(os.path.dirname(__file__), 'testdata', 'config', 'romcollections_two_valid.xml') conf = Config(config_xml_file) conf.readXml() list = conf.getRomCollectionNames() self.assertEqual(list, ['Atari 2600', 'NES'])
def run(): category_frames = read_native( Config.get("feature_text_names.extraction.output.path"), 2000) category_frames = filter_frames( category_frames, get_labeled_data_filter("../../mturk/our_labels/labels.csv")) train_frame, test_frame = split_train_test(category_frames, test_size=0.3) mask = np.asarray(np.ones((1, train_frame.shape[1]), dtype=bool))[0] mask[0] = False train_matrix, test_matrix = dataframe_to_numpy_matrix( train_frame, test_frame, mask) train_x, train_y = split_target_from_data(train_matrix) test_x, test_y = split_target_from_data(test_matrix) my_stop_words = read_stop_words() print my_stop_words stop_words = text.ENGLISH_STOP_WORDS.union(my_stop_words) pipeline = Pipeline([ ('vect', CountVectorizer(stop_words=stop_words)), ('tfidf', TfidfTransformer()), #('clf', SGDClassifier(loss='log')), ('clf', OneVsRestClassifier( SVC(kernel="linear", class_weight='balanced', probability=True))), ]) # uncommenting more parameters will give better exploring power but will # increase processing time in a combinatorial way parameters = { 'vect__max_df': (0.5, 0.75, 1.0), # 'vect__max_features': (None, 5000, 10000, 50000), 'vect__ngram_range': ((1, 1), (1, 2)), # unigrams or bigrams # 'tfidf__use_idf': (True, False), # 'tfidf__norm': ('l1', 'l2'), 'clf__alpha': (0.00001, 0.000001, 0.0000001), 'clf__penalty': ('l2', 'elasticnet') # 'clf__n_iter': (10, 50, 80), } # find the best parameters for both the feature extraction and the # classifier grid_search = GridSearchCV(pipeline, parameters, n_jobs=4, verbose=10) """ cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2) grid_search = GridSearchCV(pipeline, param_grid=parameters, cv=cv, scoring='f1_weighted', verbose=10, n_jobs=4) print("Performing grid search...") print("pipeline:", [name for name, _ in pipeline.steps]) print("parameters:") pprint(parameters) t0 = time() grid_search.fit(train_x.A.ravel(), np.array(train_y).tolist()) print("done in %0.3fs" % (time() - t0)) print() print("Best score: %0.3f" % grid_search.best_score_) print("Best parameters set:") best_parameters = grid_search.best_estimator_.get_params(deep=True) for param_name in sorted(parameters.keys()): print("\t%s: %r" % (param_name, best_parameters[param_name])) """ ''' best_parameters = { 'vect__max_df': (0.5), # 'vect__max_features': (None, 5000, 10000, 50000), 'vect__ngram_range': ((1, 2)), # unigrams or bigrams # 'tfidf__use_idf': (True, False), # 'tfidf__norm': ('l1', 'l2'), 'clf__alpha': (0.00001), 'clf__penalty': ('elasticnet') # 'clf__n_iter': (10, 50, 80), } best_parameters = { 'vect__max_df': (0.5), # 'vect__max_features': (None, 5000, 10000, 50000), 'vect__ngram_range': ((1, 1)), # unigrams or bigrams # 'tfidf__use_idf': (True, False), # 'tfidf__norm': ('l1', 'l2'), 'clf__alpha': (0.0001), 'clf__penalty': ('elasticnet') # 'clf__n_iter': (10, 50, 80), } ''' # pipeline.set_params(**best_parameters) X_numpy = train_x y_numpy = train_y X = X_numpy.A.ravel() y = np.array(y_numpy).tolist() parameters = {'clf__estimator__C': np.logspace(-4, 4, 9)} clf = fit_cv(pipeline, X, y, parameters) # clf = pipeline.fit(X, y) # feature visualization vec = clf.named_steps['vect'] vocabulary = vec.vocabulary_ classifier = clf.named_steps['clf'] weights = classifier.coef_.toarray() category_list = { 0: "DATA", 1: "EDU", 2: "HW", 3: "DOCS", 4: "DEV", 5: "WEB" } k = 20 for id in range(0, len(category_list)): max_ids = np.argsort(-weights[id]) values = np.sort(-weights[id]) feature_names = [""] * k feature_weights = [0] * k for key_d, item in vocabulary.iteritems(): for i in range(0, k): if max_ids[i] == item: feature_names[i] = key_d feature_weights[i] = -values[i] print feature_names array_to_itemlist(feature_names) print feature_weights ind = np.arange(k) plt.bar(ind, feature_weights) plt.title('top ' + str(k) + ' features for ' + category_list[id]) plt.xticks(np.arange(k) + 0.5, feature_names, rotation='vertical') plt.xlabel('features') plt.ylabel('weight') plt.tight_layout() # plt.show() # y_pred = clf.predict(test_x.A.ravel()) # y_pred_proba = clf.predict_proba(test_x.A.ravel()) # print y_pred_proba # print "Test Set" # visualize(test_y, y_pred) test(clf, test_x.A.ravel(), np.array(test_y, dtype=np.float)) print "Atachment A" # attachment A attachment_a_frames = read_native( Config.get("attachmentA.feature_text_names.extraction.output.path"), 150) attachment_a_frame = concat(attachment_a_frames) attachment_a_matrix = dataframe_to_numpy_matrix_single( attachment_a_frame, mask) attachment_a_x, attachment_a_y = split_target_from_data( attachment_a_matrix) attachment_a_y_pred = clf.predict_proba(attachment_a_x.A.ravel()) # print attachment_a_y_pred # attachment_a_ypred_test = np.argmax(attachment_a_y_pred, axis=1) # choose class with highest probability per sample # print attachment_a_ypred_test # visualize(attachment_a_y, attachment_a_ypred_test) test(clf, attachment_a_x.A.ravel(), np.array(attachment_a_y, dtype=np.float))
def genJob(jobPath, testPath, logPath, pathConf): f = file(pathConf) cfg = Config(f) pathToJob = jobPath + "/launchOutStats.pbs" if os.path.exists(pathToJob): os.system("rm " + pathToJob) AllTile = cfg.chain.listTile nbTile = len(AllTile.split(" ")) if nbTile > 1: jobFile = open(pathToJob, "w") jobFile.write('#!/bin/bash\n\ #PBS -N outStats\n\ #PBS -J 0-%d:1\n\ #PBS -l select=1:ncpus=1:mem=10000mb\n\ #PBS -l walltime=02:00:00\n\ #PBS -o %s/outStats_out.log\n\ #PBS -e %s/outStats_err.log\n\ \n\ \n\ module load python/2.7.5\n\ module remove xerces/2.7\n\ module load xerces/2.8\n\ module load gdal/1.11.0-py2.7\n\ \n\ FileConfig=%s\n\ export ITK_AUTOLOAD_PATH=""\n\ export OTB_HOME=$(grep --only-matching --perl-regex "^((?!#).)*(?<=OTB_HOME\:).*" $FileConfig | cut -d "\'" -f 2)\n\ . $OTB_HOME/config_otb.sh\n\ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1\n\ \n\ PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ Nsample=$(grep --only-matching --perl-regex "^((?!#).)*(?<=runs\:).*" $FileConfig | cut -d "\'" -f 2)\n\ cd $PYPATH\n\ \n\ ListeTuile=($(grep --only-matching --perl-regex "^((?!#).)*(?<=listTile\:).*" $FileConfig | cut -d "\'" -f 2))\n\ \n\ python outStats.py -tile ${ListeTuile[${PBS_ARRAY_INDEX}]} -conf $FileConfig --sample $Nsample --wd $TMPDIR' % (nbTile - 1, logPath, logPath, pathConf)) jobFile.close() else: jobFile = open(pathToJob, "w") jobFile.write('#!/bin/bash\n\ #PBS -N outStats\n\ #PBS -l select=1:ncpus=1:mem=4000mb\n\ #PBS -l walltime=02:00:00\n\ #PBS -o %s/outStats_out.log\n\ #PBS -e %s/outStats_err.log\n\ \n\ \n\ module load python/2.7.5\n\ module remove xerces/2.7\n\ module load xerces/2.8\n\ module load gdal/1.11.0-py2.7\n\ \n\ FileConfig=%s\n\ export ITK_AUTOLOAD_PATH=""\n\ export OTB_HOME=$(grep --only-matching --perl-regex "^((?!#).)*(?<=OTB_HOME\:).*" $FileConfig | cut -d "\'" -f 2)\n\ . $OTB_HOME/config_otb.sh\n\ export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1\n\ \n\ PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ Nsample=$(grep --only-matching --perl-regex "^((?!#).)*(?<=runs\:).*" $FileConfig | cut -d "\'" -f 2)\n\ cd $PYPATH\n\ \n\ ListeTuile=($(grep --only-matching --perl-regex "^((?!#).)*(?<=listTile\:).*" $FileConfig | cut -d "\'" -f 2))\n\ \n\ python outStats.py -tile ${ListeTuile[0]} -conf $FileConfig --sample $Nsample --wd $TMPDIR' % (logPath, logPath, pathConf)) jobFile.close()
def __init__(self): super(GetLoserBoardCommand, self) config = Config() config.connect_to_db()
for line in open(os.path.join(model_path, 'top_keywords')): line = line.strip() items = line.split('\t') label = int(items[0]) words = items[1] label2topn[label] = words return label2topn print('init config params') args = vars() args['mode'] = 'infer' args['model_dir'] = 'service_model' args['beam_width'] = 5 args['length_penalty_weight'] = 0.8 config = Config(**args) stop_words = load_stop_words(config.model_dir) word2label = load_keyword_label(config.model_dir) label2topn = load_top_words(config.model_dir) model = model_factory.create_model(config) infer_model = model.create_infer_model_graph() config_proto = model_helper.get_config_proto(config.log_device) sess = tf.InteractiveSession(graph=infer_model.graph, config=config_proto) ckpt = tf.train.latest_checkpoint(config.model_dir) loaded_infer_model = model_helper.load_model(infer_model.model, ckpt, sess, "infer")
type="str", default='./config.yaml', dest="config") parser.add_option('-m', '--model_path', type="str", default='model', dest="model_path") parser.add_option('-g', '--graph_path', type="str", default='graph', dest="graph_path") opt, args = parser.parse_args() server_port = opt.port conf_path = opt.config model_path = opt.model_path graph_path = opt.graph_path system_config = Config(conf_path=conf_path, model_path=model_path, graph_path=graph_path) interface_manager = InterfaceManager() threading.Thread(target=event_loop).start() logger = system_config.logger server_host = "0.0.0.0" logger.info('Running on http://{}:{}/ <Press CTRL + C to quit>'.format( server_host, server_port)) serve()
def testprep_confobj(self): conf = Config() conf.setdefault() conf.setqualified(["A", "B", "C", "D", "E", "F", "G"]) conf.setregular(["a", "b", "c", "d", "e", "f", "g", "h", "i"]) conf.setmonth(1) conf.setyear(2001) restr = re.Restrictions() restr._setall([3, 1, 10, 1, 0, 1, 6, 3, 7, 1, 2, 3]) conf.setrestrictions(restr.dict) conf.setlanguage('en') return conf
from data_tools.target_extractors import create_corrupted_dep_vocab, create_struct_gold_distances from data_tools.data_inits import parse_all_corpora from runners.trainers import train_dep_parsing from data_tools.target_extractors import create_corrupted_dep_vocab, create_struct_gold_distances # %% # Custom Configuration| from config import Config sample_config: Config = Config( run_label='sample_data', uses_sample=True, path_to_data_train='data/sample/en_ewt-ud-train.conllu', path_to_data_valid='data/sample/en_ewt-ud-dev.conllu', feature_model_type=ARGS.feature_model_type or 'distilgpt2', will_train_simple_probe=True, will_control_task_simple_prob=True, will_train_structural_probe=True, will_train_dependency_probe=False, will_control_task_dependency_probe=False, struct_probe_train_epoch=100, dep_probe_train_epoch=1000, struct_probe_lr=0.001) full_config: Config = Config(run_label='full_run_monday', uses_sample=False, path_to_data_train='data/en_ewt-ud-train.conllu', path_to_data_valid='data/en_ewt-ud-dev.conllu', feature_model_type=ARGS.feature_model_type or 'xlm-roberta-base', will_train_simple_probe=True, will_control_task_simple_prob=True,
def main(): # configuration config = Config() config.parse_arg(FLAGS) config.setup_path() config.print_arg() # dataset if (config.dataset == 'wikibio'): dset = DatasetTable2text(config) dset.load() else: dset = Dataset(config) dset.build() config.vocab_size = len(dset.word2id) config.key_size = len(dset.key2id) config.dec_start_id = dset.word2id["_GOO"] config.dec_end_id = dset.word2id["_EOS"] config.pad_id = dset.pad_id config.stop_words = dset.stop_words # model if (config.model_name == "seq2seq"): if (config.dataset == 'wikibio'): Model = Seq2seqData2text else: Model = Seq2seq elif (config.model_name == "bow_seq2seq"): Model = BowSeq2seq elif (config.model_name == "vae"): Model = Vae elif (config.model_name == "hierarchical_vae"): Model = Hierarchical_Vae elif (config.model_name == "latent_bow"): if (config.dataset == 'wikibio'): Model = LatentBowData2text else: Model = LatentBow elif (config.model_name == "lm"): Model = LM else: msg = "the model name shoule be in ['seq2seq', 'vae', 'hierarchical_vae', 'latent_low', 'lm'], " msg += "current name: %s" % config.model_name raise Exception(msg) model = Model(config) with tf.variable_scope(config.model_name): model.build() # controller controller = Controller(config) if (config.model_name != "lm"): if ("lm" in controller.eval_metrics_list): controller.build_lm(LM, config) controller.train(model, dset) return
from config import Config from aucReader import Dataloader test = 0 user = 0 als = 0 attention = 1 config = Config() if test: config.data_path = '../data/ml-1m/sample.csv' config.num_users = 50 config.train_path = '../data/ml-1m/train2' dl = Dataloader(config) if user: from userRnn import UserRNN print('user model test:%d' % test) model = UserRNN(config, dl) elif attention: from attURnn import AttUserRNN print('attention model test:%d' % test) model = AttUserRNN(config, dl) elif als: from als import ALS print('als model test:%d' % test) model = ALS(config, dl) else: from rnnRS import RnnRs print('rnn model test:%d' % test)
def __init__(self): from config import Config from odr.container import register config = Config() register(config) self.level = config.log_level
label_token_list = [] tokens_list = [] # for index, token_list in enumerate(all_tokens_list[:146]): # label_token_list = all_label_token_list[index] # with open(data_dir + 'test.txt', 'a', encoding='utf-8') as fw: # for i, token in enumerate(token_list): # label_token = label_token_list[i] # fw.write(token + ' ' + label_token + '\n') # fw.write('\n') label_list = extract_entity(all_label_token_list, all_tokens_list) return text_list, label_list if __name__ == '__main__': config = Config() data_dir = config.processed_data text_l, label_l = get_clean_csv(data_dir + 'source_train.txt') dev_text_l, dev_label_l = get_clean_csv(data_dir + 'dev.txt') text_l.extend(dev_text_l) label_l.extend(dev_label_l) final_label_list = [] for l_l in label_l: temp_list = [] for key in l_l: temp_list.extend(l_l[key]) final_label_list.append(";".join(temp_list)) data_dict = {'text': text_l, 'unknownEntities': final_label_list} data_df = pd.DataFrame(data_dict)
def worker(action, dry): try: instance = input("Choose an instance tag to act upon(config.ini):") instance_id = Config.get_from_section(instance, 'id') if action.lower() == "on": print("You are starting an instance") try: response = ec2.start_instances(InstanceIds=[instance_id], DryRun=dry) print(response) update(instance, 'state', 'running') print( "Please remember to turn off your instance when you're done." ) except ClientError as e: if 'DryRunOperation' not in str(e): print(permission) raise print(error) elif action.lower() == "off": print("You are stopping an instance") try: response = ec2.stop_instances(InstanceIds=[instance_id], DryRun=dry) print(response) update(instance, 'state', 'stopped') except ClientError as e: if 'DryRunOperation' not in str(e): print(permission) raise print(error) elif action.lower() == "reboot": print("You are rebooting an instance") try: response = ec2.reboot_instances(InstanceIds=[instance_id], DryRun=dry) print(response) except ClientError as e: if 'DryRunOperation' not in str(e): print(e) raise print(error) elif action.lower() == "terminate": print("You are terminating an instance") try: response = ec2.terminate_instances(InstanceIds=[instance_id], DryRun=dry) print(response) update(instance, 'state', 'terminated') except ClientError as e: if 'DryRunOperation' not in str(e): print(error) raise else: print("Please enter a valid action. Look at help for instructions") except ClientError as e: print(e) except ParamValidationError as e: print("Please enter arguments and values")
from mpl_toolkits.mplot3d import Axes3D # noqa: F401 unused import import numpy as np np.set_printoptions(threshold=np.nan) import os, shutil import glob import platform import copy import time from utils import read, compute_voxelgrid_and_sceneflow, generate_numpy, create_dir from PlyFile import PlyFile from config import Config, DATA_TYPES_3D ################################################################################## system = platform.system() cfg = Config(system=system) print("Config init from ", cfg.dataset_path) ################################################################################## # generate = input("Generate numpy files? ") # if generate == "Y" or generate == "y": # generate = True # elif generate == "F" or generate == "f": # generate = False # # print(generate) SPLIT = "TRAIN" LETTER = "B" NUMBER = "0740"
class BiRNN(object): def __init__(self, wav_files, text_labels, words_size, words, word_num_map): self.conf = Config() self.wav_files = wav_files self.text_labels = text_labels self.words_size = words_size self.words = words self.word_num_map = word_num_map tf.logging.info('字表大小:' + str(self.words_size)) def add_placeholders(self): # the batch_size and max_stepsize每步都是变长的。 self.input_tensor = tf.placeholder( tf.float32, [None, None, n_input + (2 * n_input * n_context)], name='input') # 语音log filter bank or MFCC features self.text = tf.sparse_placeholder(tf.int32, name='text') # 文本 self.seq_length = tf.placeholder(tf.int32, [None], name='seq_length') # 序列长 self.keep_dropout = tf.placeholder(tf.float32) def bi_rnn_layer(self): ''' 建立网络模型 :param batch_x: :param seq_length: :param n_input: :param n_context: :param n_character: :param keep_dropout: ''' batch_x = self.input_tensor seq_length = self.seq_length n_character = self.words_size + 1 + 1 keep_dropout = self.keep_dropout # batch_x_shape: [batch_size, n_steps, n_input + 2*n_input*n_context] batch_x_shape = tf.shape(batch_x) # 将输入转成时间序列优先 batch_x = tf.transpose(batch_x, [1, 0, 2]) # 再转成2维传入第一层 batch_x = tf.reshape( batch_x, [-1, n_input + 2 * n_input * n_context ]) # (n_steps*batch_size, n_input + 2*n_input*n_context) # 1st layer with tf.name_scope('layer1'): b1 = self.variable_on_device( 'b1', [n_hidden_1], tf.random_normal_initializer(stddev=b_stddev)) h1 = self.variable_on_device( 'h1', [n_input + 2 * n_input * n_context, n_hidden_1], tf.random_normal_initializer(stddev=h_stddev)) layer_1 = tf.minimum( tf.nn.relu(tf.add(tf.matmul(batch_x, h1), b1)), relu_clip) layer_1 = tf.nn.dropout(layer_1, keep_dropout) # 2nd layer with tf.name_scope('layer2'): b2 = self.variable_on_device( 'b2', [n_hidden_2], tf.random_normal_initializer(stddev=b_stddev)) h2 = self.variable_on_device( 'h2', [n_hidden_1, n_hidden_2], tf.random_normal_initializer(stddev=h_stddev)) layer_2 = tf.minimum( tf.nn.relu(tf.add(tf.matmul(layer_1, h2), b2)), relu_clip) layer_2 = tf.nn.dropout(layer_2, keep_dropout) # 3rd layer with tf.name_scope('layer3'): b3 = self.variable_on_device( 'b3', [n_hidden_3], tf.random_normal_initializer(stddev=b_stddev)) h3 = self.variable_on_device( 'h3', [n_hidden_2, n_hidden_3], tf.random_normal_initializer(stddev=h_stddev)) layer_3 = tf.minimum( tf.nn.relu(tf.add(tf.matmul(layer_2, h3), b3)), relu_clip) layer_3 = tf.nn.dropout(layer_3, keep_dropout) # 双向rnn with tf.name_scope('birnn'): # 前向 lstm_fw_cell = tf.contrib.rnn.BasicLSTMCell(n_cell_dim, forget_bias=1.0, state_is_tuple=True) lstm_fw_cell = tf.contrib.rnn.DropoutWrapper( lstm_fw_cell, input_keep_prob=keep_dropout) # 后向 lstm_bw_cell = tf.contrib.rnn.BasicLSTMCell(n_cell_dim, forget_bias=1.0, state_is_tuple=True) lstm_bw_cell = tf.contrib.rnn.DropoutWrapper( lstm_bw_cell, input_keep_prob=keep_dropout) # `layer_3` `[n_steps, batch_size, 2*n_cell_dim]` layer_3 = tf.reshape(layer_3, [-1, batch_x_shape[0], n_hidden_3]) outputs, output_states = tf.nn.bidirectional_dynamic_rnn( cell_fw=lstm_fw_cell, cell_bw=lstm_bw_cell, inputs=layer_3, dtype=tf.float32, time_major=True, sequence_length=seq_length) # 连接正反向结果[n_steps, batch_size, 2*n_cell_dim] outputs = tf.concat(outputs, 2) # to a single tensor of shape [n_steps*batch_size, 2*n_cell_dim] outputs = tf.reshape(outputs, [-1, 2 * n_cell_dim]) with tf.name_scope('layer5'): b5 = self.variable_on_device( 'b5', [n_hidden_5], tf.random_normal_initializer(stddev=b_stddev)) h5 = self.variable_on_device( 'h5', [(2 * n_cell_dim), n_hidden_5], tf.random_normal_initializer(stddev=h_stddev)) layer_5 = tf.minimum( tf.nn.relu(tf.add(tf.matmul(outputs, h5), b5)), relu_clip) layer_5 = tf.nn.dropout(layer_5, keep_dropout) with tf.name_scope('layer6'): # 全连接层用于softmax分类 b6 = self.variable_on_device( 'b6', [n_character], tf.random_normal_initializer(stddev=b_stddev)) h6 = self.variable_on_device( 'h6', [n_hidden_5, n_character], tf.random_normal_initializer(stddev=h_stddev)) layer_6 = tf.add(tf.matmul(layer_5, h6), b6) # 将2维[n_steps*batch_size, n_character]转成3维 time-major [n_steps, batch_size, n_character]. layer_6 = tf.reshape(layer_6, [-1, batch_x_shape[0], n_character]) # Output shape: [n_steps, batch_size, n_character] self.logits = layer_6 def loss(self): """ 定义loss :return: """ # 调用ctc loss with tf.name_scope('loss'): #损失 self.avg_loss = tf.reduce_mean( ctc_ops.ctc_loss(self.text, self.logits, self.seq_length)) tf.summary.scalar('loss', self.avg_loss) # [optimizer] with tf.name_scope('train'): #训练过程 self.optimizer = tf.train.AdamOptimizer( learning_rate=learning_rate).minimize(self.avg_loss) with tf.name_scope("decode"): self.decoded, log_prob = ctc_ops.ctc_beam_search_decoder( self.logits, self.seq_length, beam_width=10, merge_repeated=True) with tf.name_scope("accuracy"): self.distance = tf.edit_distance( tf.cast(self.decoded[0], tf.int32), self.text) # 计算label error rate (accuracy) self.label_err = tf.reduce_mean(self.distance, name='label_error_rate') tf.summary.scalar('accuracy', self.label_err) def get_feed_dict(self, dropout=None): """ 定义变量 :param dropout: :return: """ feed_dict = { self.input_tensor: self.audio_features, self.text: self.sparse_labels, self.seq_length: self.audio_features_len } if dropout != None: feed_dict[self.keep_dropout] = dropout else: feed_dict[self.keep_dropout] = keep_dropout_rate return feed_dict def init_session(self): self.savedir = self.conf.get("FILE_DATA").savedir self.saver = tf.train.Saver(max_to_keep=1) # 生成saver # create the session gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.7) self.sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) # sess = tf.Session() # 没有模型的话,就重新初始化 self.sess.run(tf.global_variables_initializer()) ckpt = tf.train.latest_checkpoint(self.savedir) tf.logging.info("ckpt:" + str(ckpt)) self.startepo = 0 if ckpt != None: self.saver.restore(self.sess, ckpt) ind = ckpt.rfind("-") self.startepo = int(ckpt[ind + 1:]) tf.logging.info(str(self.startepo)) def add_summary(self): self.merged = tf.summary.merge_all() self.writer = tf.summary.FileWriter( self.conf.get("FILE_DATA").tensorboardfile, self.sess.graph) def train(self): epochs = 200 # 准备运行训练步骤 section = '\n{0:=^40}\n' tf.logging.info(section.format('开始训练')) train_start = time.time() for epoch in range(epochs): # 样本集迭代次数 epoch_start = time.time() if epoch < self.startepo: continue tf.logging.info("第" + str(epoch + 1) + "次迭代,一共要迭代" + str(epochs) + "次") #######################run batch#### n_batches_epoch = int(np.ceil(len(self.text_labels) / batch_size)) tf.logging.info("在本次迭代中一共循环" + str(n_batches_epoch) + ",每次取" + str(batch_size)) train_cost = 0 train_err = 0 next_idx = 0 for batch in range(n_batches_epoch): # 一次batch_size,取多少次 # 取数据 # temp_next_idx, temp_audio_features, temp_audio_features_len, temp_sparse_labels tf.logging.info( '%d/%d:%s', batch + 1, n_batches_epoch, datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) next_idx, self.audio_features, self.audio_features_len, self.sparse_labels, wav_files = utils.next_batch( next_idx, batch_size, n_input, n_context, self.text_labels, self.wav_files, self.word_num_map, pick_deterministically=False) # 计算 avg_loss optimizer ; batch_cost, _ = self.sess.run([self.avg_loss, self.optimizer], feed_dict=self.get_feed_dict()) train_cost += batch_cost if (batch + 1) % 100 == 0: # rs = self.sess.run(self.merged, feed_dict=self.get_feed_dict()) # self.writer.add_summary(rs, batch) tf.logging.info('循环次数:' + str(batch + 1) + '损失:' + str(train_cost / (batch + 1))) d, train_err = self.sess.run( [self.decoded[0], self.label_err], feed_dict=self.get_feed_dict(dropout=1.0)) dense_decoded = tf.sparse_tensor_to_dense( d, default_value=-1).eval(session=self.sess) dense_labels = utils.trans_tuple_to_texts_ch( self.sparse_labels, self.words) tf.logging.info('错误率:' + str(train_err)) for orig, decoded_array in zip(dense_labels, dense_decoded): # convert to strings decoded_str = utils.trans_array_to_text_ch( decoded_array, self.words) tf.logging.info('语音原始文本:{}'.format( orig.encode('utf-8'))) tf.logging.info('识别出来的文本:{}'.format( decoded_str.encode('utf-8'))) break epoch_duration = time.time() - epoch_start log = '迭代次数 {}/{}, 训练损失:{:.3f}, 错误率:{:.3f}, time:{:.2f} sec' tf.logging.info( log.format(epoch + 1, epochs, train_cost, train_err, epoch_duration)) self.saver.save(self.sess, self.savedir + self.conf.get("FILE_DATA").savefile, global_step=epoch + 1) train_duration = time.time() - train_start tf.logging.info('Training complete, total duration:{:.2f} min'.format( train_duration / 60)) self.sess.close() def test(self): index = 0 next_idx = 20 for index in range(10): next_idx, self.audio_features, self.audio_features_len, self.sparse_labels, wav_files = utils.next_batch( next_idx, 1, n_input, n_context, self.text_labels, self.wav_files, self.word_num_map, pick_deterministically=True) tf.logging.info('读入语音文件:' + wav_files[0].encode('utf-8')) tf.logging.info('开始识别语音数据......') d, train_ler = self.sess.run( [self.decoded[0], self.label_err], feed_dict=self.get_feed_dict(dropout=1.0)) dense_decoded = tf.sparse_tensor_to_dense( d, default_value=-1).eval(session=self.sess) dense_labels = utils.trans_tuple_to_texts_ch( self.sparse_labels, self.words) for orig, decoded_array in zip(dense_labels, dense_decoded): # 转成string decoded_str = utils.trans_array_to_text_ch( decoded_array, self.words) tf.logging.info('语音原始文本:{}'.format(orig.encode('utf-8'))) tf.logging.info('识别出来的文本:{}'.format( decoded_str.encode('utf-8'))) break self.sess.close() def test_target_wav_file(self, wav_files, txt_labels): tf.logging.info('读入语音文件:' + wav_files[0].encode('utf-8')) tf.logging.info('开始识别语音数据......') self.audio_features, self.audio_features_len, text_vector, text_vector_len = utils.get_audio_mfcc_features( None, wav_files, n_input, n_context, self.word_num_map, txt_labels) self.sparse_labels = utils.sparse_tuple_from(text_vector) d, train_ler = self.sess.run([self.decoded[0], self.label_err], feed_dict=self.get_feed_dict(dropout=1.0)) dense_decoded = tf.sparse_tensor_to_dense( d, default_value=-1).eval(session=self.sess) decoded_str = utils.trans_array_to_text_ch(dense_decoded[0], self.words) tf.logging.info('语音原始文本:{}'.format(txt_labels[0].encode('utf-8'))) tf.logging.info('识别出来的文本:{}'.format(decoded_str.encode('utf-8'))) self.sess.close() def build_train(self): self.add_placeholders() self.bi_rnn_layer() self.loss() self.init_session() self.add_summary() self.train() def build_test(self): self.add_placeholders() self.bi_rnn_layer() self.loss() self.init_session() self.test() def build_target_wav_file_test(self, wav_files, txt_labels): self.add_placeholders() self.bi_rnn_layer() self.loss() self.init_session() self.test_target_wav_file(wav_files, txt_labels) def variable_on_device(self, name, shape, initializer): with tf.device('/gpu:0'): var = tf.get_variable(name=name, shape=shape, initializer=initializer) return var
from math import pow, sqrt from datetime import datetime as dt from config import Config from sheet import PostUpdate import re # Logger instance used by the functions in this module _LOGGER = Config.getLogger("influence") # Determine if only looking for events today _TODAY_ONLY = Config.getBoolean('events', 'today_only', True) _ROUND_DISTANCE = Config.getInteger('events', 'distancedp', -1) # Allow specific factions to be ignored _IGNORE_FACTION_SET = set() _IGNORE_FACTIONS = Config.getString('events', 'ignore_factions') if _IGNORE_FACTIONS is not None and len(_IGNORE_FACTIONS.strip()) > 0: _IGNORE_FACTION_SET.update([faction.strip() for faction in _IGNORE_FACTIONS.split(",")]) # Interested in activity around a specified location _LOCATION_X = Config.getFloat('location', 'x') _LOCATION_Y = Config.getFloat('location', 'y') _LOCATION_Z = Config.getFloat('location', 'z') _RANGE_SQUARED = Config.getFloat('location', 'distance')**2 _LOGGER.info("Configured for %.1f LY around %s", Config.getFloat('location', 'distance'), Config.getString('location', 'name')) # Provide regular expressions to remove extraneous text specifiers _MATCH_GOV = re.compile(r'\$government_(.*);', re.IGNORECASE) _MATCH_SEC = re.compile(r'\$system_security_(.*);', re.IGNORECASE) _MATCH_SEC2 = re.compile(r'\$GAlAXY_MAP_INFO_state_(.*);', re.IGNORECASE) _MATCH_ECO = re.compile(r'\$economy_(.*);', re.IGNORECASE)
import argparse from bottle import request, response, route, run import bottle from command import Command from config import Config from hardware import Hardware import json import logging from time import time parser = argparse.ArgumentParser() c = Config() parser.add_argument('--node', help='Node id', default='master', choices=c.settings['nodes'].keys()) parser.add_argument('--loglevel', help='Log level for server & playback', default='INFO') args = parser.parse_args() logging.getLogger().setLevel(args.loglevel) hw = Hardware(c, args.node) script = [] @route('/cmd', method='POST') def start_show(): global script
""" import json import requests import datetime import argparse import os import time import sys sys.path.append(os.path.abspath('./utils')) from config import Config from database import Database from ssh import SSH nebula_config = Config( os.path.abspath('./guard/config/nebula_config.yml')).config # web configurations HTTP_HOST = nebula_config.get("http").get("host") HTTP_PORT = nebula_config.get("http").get("port") HTTP_END_POINT = "http://%s:%s" % (HTTP_HOST, HTTP_PORT) HTTP_RESOURCE_AC_MANAGER_TASK_LIST = nebula_config.get("http").get( "resource").get("AccessControlMasterService").get("acManagerTaskList") HTTP_RESOURCE_AC_MANAGER_TASK_DELETE = nebula_config.get("http").get( "resource").get("AccessControlMasterService").get("acManagerTaskDelete") HTTP_RESOURCE_TD_MANAGER_TASK_LIST = nebula_config.get("http").get( "resource").get("TDComparisonService").get("tdManagerTaskList") HTTP_RESOURCE_TD_MANAGER_TASK_DELETE = nebula_config.get("http").get( "resource").get("TDComparisonService").get("tdManagerTaskDelete") HTTP_RESOURCE_VIDEO_PROCESS_TASK_LIST = nebula_config.get("http").get( "resource").get("VideoProcessService").get("videoProcessTaskList")
str(water_pig_data.tag_id) + ", drink value: " + str(water_pig_data.val) log('info', logstr) print(logstr) water_pig_data = None time.sleep(0.001) water_rfid_thread = threading.Thread(target=water_rfid_listen) water_rfid_thread.start() water_main_thread = threading.Thread(target=water_main_listen) water_main_thread.start() if __name__ == "__main__": config = Config.get_instance() food_rfid = RFID(ip=config.FOOD_RFID_IP, port=config.FOOD_RFID_PORT, com=config.FOOD_RFID_COM, name="FOOD_RFID") food_device = Weight_Device(ip=config.WEIGHT_IP, port=config.WEIGHT_PORT, com=config.WEIGHT_COM) water_rfid = RFID(ip=config.WATER_RFID_IP, port=config.WATER_RFID_PORT, com=config.WATER_RFID_COM, name="WARTER_RFID") water_device = Water(ip=config.WATER_IP, port=config.WATER_PORT,
def main(): config = Config(DATA) controller = MainController(config) controller.run()
trunc = s[-maxlen:] elif truncating == 'post': trunc = s[:maxlen] else: raise ValueError('Truncating type "%s" not understood' % truncating) # check `trunc` has expected shape trunc = np.asarray(trunc, dtype=dtype) if trunc.shape[1:] != sample_shape: raise ValueError( 'Shape of sample %s of sequence at position %s is different from expected shape %s' % (trunc.shape[1:], idx, sample_shape)) if padding == 'post': x[idx, :len(trunc)] = trunc elif padding == 'pre': x[idx, -len(trunc):] = trunc else: raise ValueError('Padding type "%s" not understood' % padding) return x, sequences_each_len if __name__ == "__main__": conf = Config() get_wavs_lables( conf.get("FILE_DATA").wav_path, conf.get("FILE_DATA").label_file) print()
""" 利用resnet50提取图片的语义信息 并保存层results.pth """ from config import Config import tqdm import torch as t from torch.autograd import Variable import torchvision as tv from torch.utils import data import os from PIL import Image import numpy as np t.set_grad_enabled(False) opt = Config() IMAGENET_MEAN = [0.485, 0.456, 0.406] IMAGENET_STD = [0.229, 0.224, 0.225] normalize = tv.transforms.Normalize(mean=IMAGENET_MEAN, std=IMAGENET_STD) class CaptionDataset(data.Dataset): def __init__(self, caption_data_path): self.transforms = tv.transforms.Compose([ tv.transforms.Resize(256), tv.transforms.CenterCrop(256), tv.transforms.ToTensor(), normalize ]) data = t.load(caption_data_path)
import os import subprocess import sys import tempfile import time from fabric.api import local, env, run, sudo from config import Config # environment variable prefix prefix = Config.environment_variable_prefix() # disable logging (as otherwise we would have to use the production setting for the log file location) os.environ[prefix + 'WITH_LOGGING'] = '0' # get variables settings = Config.settings('production') host = os.environ[prefix + 'DEPLOY_HOST'] deploy_user = os.environ.get(prefix + 'DEPLOY_USER', 'deploy') deploy_user_group = os.environ.get(prefix + 'DEPLOY_USER_GROUP', deploy_user) repository = os.environ[prefix + 'DEPLOY_GIT_REPOSITORY'] app_dir_name = os.environ[prefix + 'DEPLOY_APP_DIR_NAME'] web_user = os.environ.get(prefix + 'DEPLOY_WEB_USER', 'www-data') web_user_group = os.environ.get(prefix + 'DEPLOY_WEB_USER_GROUP', 'www-data') domain_name = os.environ.get(prefix + 'DEPLOY_DOMAIN_NAME', host) bokeh_server_port = os.environ.get(prefix + 'DEPLOY_BOKEH_SERVER_PORT', 5100) migration_tool = settings['migration_tool'] migration_sql_dir = settings['migration_sql_dir'] site_dir = '$HOME/' + app_dir_name
# without data, or later with data. curprep.execute(prepstmt) # Insert 3 records names = ('Geert', 'Jan', 'Michel') for name in names: curprep.execute(prepstmt, (name, )) cnx.commit() # We use a normal cursor issue a SELECT output.append("Inserted data") cur.execute("SELECT id, name FROM names") for row in cur: output.append("%d | %s" % (row[0], row[1])) # Cleaning up, dropping the table again cur.execute(stmt_drop) cnx.close() return output if __name__ == '__main__': # # Configure MySQL login and database to use in config.py # from config import Config config = Config.dbinfo().copy() out = main(config) print('\n'.join(out))
def launchClassification(tempFolderSerie, Classifmask, model, stats, outputClassif, confmap, pathWd, pathConf, pixType): outputClassif = outputClassif.replace(".tif", "_TMP.tif") confmap = confmap.replace(".tif", "_TMP.tif") os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = "5" featuresPath = Config(file(pathConf)).chain.featuresPath outputPath = Config(file(pathConf)).chain.outputPath tile = outputClassif.split("/")[-1].split("_")[1] userFeatPath = Config(file(pathConf)).chain.userFeatPath if userFeatPath == "None": userFeatPath = None AllRefl = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "REFL.tif")) AllMask = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "MASK.tif")) datesInterp = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "DatesInterp")) realDates = sorted( fu.FileSearch_AND(featuresPath + "/" + tile + "/tmp/", True, "imagesDate")) tmpFolder = outputPath + "/TMPFOLDER_" + tile if not os.path.exists(tmpFolder): os.mkdir(tmpFolder) # Sensors S2 = Sensors.Sentinel_2("", Opath(tmpFolder), pathConf, "") L8 = Sensors.Landsat8("", Opath(tmpFolder), pathConf, "") L5 = Sensors.Landsat5("", Opath(tmpFolder), pathConf, "") # shutil.rmtree(tmpFolder, ignore_errors=True) SensorsList = [S2, L8, L5] # gapFill + feat features = [] concatSensors = otb.Registry.CreateApplication("ConcatenateImages") for refl, mask, datesInterp, realDates in zip(AllRefl, AllMask, datesInterp, realDates): gapFill = otb.Registry.CreateApplication("ImageTimeSeriesGapFilling") nbDate = fu.getNbDateInTile(realDates) nbReflBands = fu.getRasterNbands(refl) comp = int(nbReflBands) / int(nbDate) if not isinstance(comp, int): raise Exception("unvalid component by date (not integer) : " + comp) gapFill.SetParameterString("in", refl) gapFill.SetParameterString("mask", mask) gapFill.SetParameterString("comp", str(comp)) gapFill.SetParameterString("it", "linear") gapFill.SetParameterString("id", realDates) gapFill.SetParameterString("od", datesInterp) # gapFill.SetParameterString("ram","1024") gapFill.Execute() # featExtr = otb.Registry.CreateApplication("iota2FeatureExtraction") # featExtr.SetParameterInputImage("in",gapFill.GetParameterOutputImage("out")) # featExtr.SetParameterString("comp",str(comp)) # for currentSensor in SensorsList: # if currentSensor.name in refl: # red = str(currentSensor.bands["BANDS"]["red"]) # nir = str(currentSensor.bands["BANDS"]["NIR"]) # swir = str(currentSensor.bands["BANDS"]["SWIR"]) # featExtr.SetParameterString("red",red) # featExtr.SetParameterString("nir",nir) # featExtr.SetParameterString("swir",swir) # featExtr.Execute() # features.append(featExtr) concatSensors.AddImageToParameterInputImageList( "il", gapFill.GetParameterOutputImage("out")) features.append(gapFill) classifier = otb.Registry.CreateApplication("ImageClassifier") classifier.SetParameterString("mask", Classifmask) if stats: classifier.SetParameterString("imstat", stats) classifier.SetParameterString("out", outputClassif) classifier.SetParameterString("model", model) classifier.SetParameterString("confmap", confmap) classifier.SetParameterString("ram", "512") print "AllRefl" print AllRefl # if len(AllRefl) >1: # concatSensors.Execute() # classifier.SetParameterInputImage("in",concatSensors.GetParameterOutputImage("out")) # else: # classifier.SetParameterInputImage("in",features[0].GetParameterOutputImage("out")) # classifier.ExecuteAndWriteOutput() if len(AllRefl) > 1: concatSensors.Execute() allFeatures = concatSensors.GetParameterOutputImage("out") else: allFeatures = features[0].GetParameterOutputImage("out") if userFeatPath: print "Add user features" userFeat_arbo = Config(file(pathConf)).userFeat.arbo userFeat_pattern = (Config( file(pathConf)).userFeat.patterns).split(",") concatFeatures = otb.Registry.CreateApplication("ConcatenateImages") userFeatures = fu.getUserFeatInTile(userFeatPath, tile, userFeat_arbo, userFeat_pattern) concatFeatures.SetParameterStringList("il", userFeatures) concatFeatures.Execute() concatAllFeatures = otb.Registry.CreateApplication("ConcatenateImages") concatAllFeatures.AddImageToParameterInputImageList("il", allFeatures) concatAllFeatures.AddImageToParameterInputImageList( "il", concatFeatures.GetParameterOutputImage("out")) concatAllFeatures.Execute() allFeatures = concatAllFeatures.GetParameterOutputImage("out") classifier.SetParameterInputImage("in", allFeatures) classifier.ExecuteAndWriteOutput() expr = "im2b1>=1?im1b1:0" cmd = 'otbcli_BandMath -il ' + outputClassif + ' ' + Classifmask + ' -out ' + outputClassif.replace( "_TMP.tif", ".tif") + ' -exp "' + expr + '"' print cmd os.system(cmd) cmd = 'otbcli_BandMath -il ' + confmap + ' ' + Classifmask + ' -out ' + confmap.replace( "_TMP.tif", ".tif") + ' -exp "' + expr + '"' print cmd os.system(cmd) if pathWd: shutil.copy(outputClassif.replace("_TMP.tif", ".tif"), outputPath + "/classif") if pathWd: shutil.copy(confmap.replace("_TMP.tif", ".tif"), outputPath + "/classif") os.remove(outputClassif) os.remove(confmap)
def __init__(self, cfg_path=None): self._cfg = Config(cfg_path or get_default_str_cfg_path()) self._init_datasets()
def __init__(self, conf=Config()): Flexipy.__init__(self, config=conf)
from flask import Flask from config import Config from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate from flask_login import LoginManager from flask_bootstrap import Bootstrap import logging from logging.handlers import RotatingFileHandler import os app = Flask(__name__) conf = Config() app.config.from_object(conf) bootstrap = Bootstrap(app) login = LoginManager(app) login.login_view = 'login' db = SQLAlchemy(app) migrate = Migrate(app, db) from app import routes, models, errors if not app.debug: if not os.path.exists('logs'): os.mkdir('logs') file_handler = RotatingFileHandler('logs/RPGforumCMS.log', maxBytes=10240, backupCount=10) file_handler.setFormatter(
from detect_ball import DetectBall from config import Config from draw import Draw from preprocess import simplest_cb from db_master import DbMaster from enum import Enum import imutils class VideoType(Enum): FILE = 0 #read video from file RTSP = 1 #read video from rtsp link config = Config() videoType = VideoType(config.get('video_type')) cap = cv.VideoCapture() if videoType == VideoType.FILE: cap = cv.VideoCapture('./data/' + config.get('video_name')) elif videoType == VideoType.RTSP: cap = cv.VideoCapture(config.get('rtsp_stream')) if __name__ == '__main__': master_db = DbMaster(config.get('db_name')) if not master_db.table_exists(config.get('table_name')): master_db.create_table( config.get('table_name'), {
# TODO: try largest lr before diverging # TODO: check all plots rendered # TODO: adamw FOLDS = list(range(1, 5 + 1)) DEVICE = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') parser = argparse.ArgumentParser() parser.add_argument('--config-path', type=str, required=True) parser.add_argument('--experiment-path', type=str, default='./tf_log/frees') parser.add_argument('--dataset-path', type=str, required=True) parser.add_argument('--workers', type=int, default=os.cpu_count()) parser.add_argument('--fold', type=int, choices=FOLDS) parser.add_argument('--debug', action='store_true') args = parser.parse_args() config = Config.from_yaml(args.config_path) shutil.copy(args.config_path, utils.mkdir(args.experiment_path)) if config.aug.effects: extra_augs = [AudioEffect()] else: extra_augs = [] if config.aug.type == 'pad': train_transform = T.Compose([ LoadSignal(config.model.sample_rate), ToTensor(), ]) eval_transform = T.Compose([ LoadSignal(config.model.sample_rate), ToTensor(),
class Whost(SimpleGladeApp): def __init__(self, path="gnome-connection-manager.glade", root="wHost", domain=DOMAIN_NAME, **kwargs): path = os.path.join(GLADE_DIR, path) SimpleGladeApp.__init__(self, path, root, domain, **kwargs) self.treeModel = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING) self.treeTunel.set_model(self.treeModel) column = gtk.TreeViewColumn(_("Local"), gtk.CellRendererText(), text=0) self.treeTunel.append_column(column) column = gtk.TreeViewColumn(_("Host"), gtk.CellRendererText(), text=1) self.treeTunel.append_column(column) column = gtk.TreeViewColumn(_("Remote"), gtk.CellRendererText(), text=2) self.treeTunel.append_column(column) def new(self): self.config = Config() self.cmbGroup = self.get_widget("cmbGroup") self.txtName = self.get_widget("txtName") self.txtDescription = self.get_widget("txtDescription") self.txtHost = self.get_widget("txtHost") self.cmbType = self.get_widget("cmbType") self.txtUser = self.get_widget("txtUser") self.txtPass = self.get_widget("txtPassword") self.txtPrivateKey = self.get_widget("txtPrivateKey") self.btnBrowse = self.get_widget("btnBrowse") self.txtPort = self.get_widget("txtPort") self.cmbGroup.get_model().clear() for group in self.config.groups: self.cmbGroup.get_model().append([group]) self.isNew = True self.chkDynamic = self.get_widget("chkDynamic") self.txtLocalPort = self.get_widget("txtLocalPort") self.txtRemoteHost = self.get_widget("txtRemoteHost") self.txtRemotePort = self.get_widget("txtRemotePort") self.treeTunel = self.get_widget("treeTunel") self.txtComamnds = self.get_widget("txtCommands") self.chkComamnds = self.get_widget("chkCommands") buf = self.txtComamnds.get_buffer() buf.create_tag('DELAY1', style=pango.STYLE_ITALIC, foreground='darkgray') buf.create_tag('DELAY2', style=pango.STYLE_ITALIC, foreground='cadetblue') buf.connect("changed", self.update_texttags) self.chkKeepAlive = self.get_widget("chkKeepAlive") self.txtKeepAlive = self.get_widget("txtKeepAlive") self.btnFColor = self.get_widget("btnFColor") self.btnBColor = self.get_widget("btnBColor") self.chkX11 = self.get_widget("chkX11") self.chkAgent = self.get_widget("chkAgent") self.chkCompression = self.get_widget("chkCompression") self.txtCompressionLevel = self.get_widget("txtCompressionLevel") self.txtExtraParams = self.get_widget("txtExtraParams") self.chkLogging = self.get_widget("chkLogging") self.cmbBackspace = self.get_widget("cmbBackspace") self.cmbDelete = self.get_widget("cmbDelete") self.cmbType.set_active(0) self.cmbBackspace.set_active(0) self.cmbDelete.set_active(0) def init(self, group, host=None): self.cmbGroup.get_child().set_text(group) if host == None: self.isNew = True return self.isNew = False self.oldGroup = group self.txtName.set_text(host.name) self.oldName = host.name self.txtDescription.set_text(host.description) self.txtHost.set_text(host.host) i = self.cmbType.get_model().get_iter_first() while i != None: if (host.type == self.cmbType.get_model()[i][0]): self.cmbType.set_active_iter(i) break else: i = self.cmbType.get_model().iter_next(i) self.txtUser.set_text(host.user) self.txtPass.set_text(host.password) self.txtPrivateKey.set_text(host.private_key) self.txtPort.set_text(host.port) for t in host.tunnel: if t != "": tun = t.split(":") tun.append(t) self.treeModel.append(tun) self.txtCommands.set_sensitive(False) self.chkCommands.set_active(False) if host.commands != '' and host.commands != None: self.txtCommands.get_buffer().set_text(host.commands) self.txtCommands.set_sensitive(True) self.chkCommands.set_active(True) use_keep_alive = host.keep_alive != '' and host.keep_alive != '0' and host.keep_alive != None self.txtKeepAlive.set_sensitive(use_keep_alive) self.chkKeepAlive.set_active(use_keep_alive) self.txtKeepAlive.set_text(host.keep_alive) if host.font_color != '' and host.font_color != None and host.back_color != '' and host.back_color != None: self.get_widget("chkDefaultColors").set_active(False) self.btnFColor.set_sensitive(True) self.btnBColor.set_sensitive(True) fcolor = host.font_color bcolor = host.back_color else: self.get_widget("chkDefaultColors").set_active(True) self.btnFColor.set_sensitive(False) self.btnBColor.set_sensitive(False) fcolor = "#FFFFFF" bcolor = "#000000" self.btnFColor.set_color(gtk.gdk.Color(fcolor)) self.btnBColor.set_color(gtk.gdk.Color(bcolor)) m = self.btnFColor.get_colormap() color = m.alloc_color("red") style = self.btnFColor.get_style().copy() style.bg[gtk.STATE_NORMAL] = color self.btnFColor.set_style(style) self.btnFColor.queue_draw() self.btnFColor.selected_color = fcolor self.btnBColor.selected_color = bcolor self.chkX11.set_active(host.x11) self.chkAgent.set_active(host.agent) self.chkCompression.set_active(host.compression) self.txtCompressionLevel.set_text(host.compressionLevel) self.txtExtraParams.set_text(host.extra_params) self.chkLogging.set_active(host.log) self.cmbBackspace.set_active(host.backspace_key) self.cmbDelete.set_active(host.delete_key) self.update_texttags() def update_texttags(self, *args): buf = self.txtCommands.get_buffer() text_iter = buf.get_start_iter() buf.remove_all_tags(text_iter, buf.get_end_iter()) while True: found = text_iter.forward_search("##D=", 0, None) if not found: break start, end = found n = end.copy() end.forward_line() if buf.get_text(n, end).rstrip().isdigit(): buf.apply_tag_by_name("DELAY1", start, n) buf.apply_tag_by_name("DELAY2", n, end) text_iter = end def on_cancelbutton1_clicked(self, widget, *args): self.get_widget("wHost").destroy() def on_okbutton1_clicked(self, widget, *args): group = self.cmbGroup.get_active_text().strip() name = self.txtName.get_text().strip() description = self.txtDescription.get_text().strip() host = self.txtHost.get_text().strip() ctype = self.cmbType.get_active_text().strip() user = self.txtUser.get_text().strip() password = self.txtPass.get_text().strip() private_key = self.txtPrivateKey.get_text().strip() port = self.txtPort.get_text().strip() buf = self.txtCommands.get_buffer() commands = buf.get_text(buf.get_start_iter(), buf.get_end_iter( )).strip() if self.chkCommands.get_active() else "" keepalive = self.txtKeepAlive.get_text().strip() if self.get_widget("chkDefaultColors").get_active(): fcolor = "" bcolor = "" else: fcolor = self.btnFColor.selected_color bcolor = self.btnBColor.selected_color x11 = self.chkX11.get_active() agent = self.chkAgent.get_active() compression = self.chkCompression.get_active() compressionLevel = self.txtCompressionLevel.get_text().strip() extra_params = self.txtExtraParams.get_text() log = self.chkLogging.get_active() backspace_key = self.cmbBackspace.get_active() delete_key = self.cmbDelete.get_active() if ctype == "": ctype = "ssh" tunnel = "" if ctype == "ssh": for x in self.treeModel: tunnel = '%s,%s' % (x[3], tunnel) tunnel = tunnel[:-1] # Validar datos if group == "" or name == "" or (host == "" and ctype != 'local'): msgbox(_("Fields group, name and host are required")) return if not (port and port.isdigit() and 1 <= int(port) <= 65535): msgbox(_("Invalid port")) return host = Host(group, name, description, host, user, password, private_key, port, tunnel, ctype, commands, keepalive, fcolor, bcolor, x11, agent, compression, compressionLevel, extra_params, log, backspace_key, delete_key) try: # Guardar if not group not in self.config.groups: self.config.groups[group] = [] if self.isNew: for h in self.config.groups[group]: if h.name == name: msgbox("%s [%s] %s [%s]" % (_("Host name"), name, _("already exists for group"), group)) return # agregar host a grupo self.config.groups[group].append(host) else: if self.oldGroup != group: # revisar que no este el nombre en el nuevo grupo if not group not in self.config.groups: self.config.groups[group] = [host] else: for h in self.config.groups[group]: if h.name == name: msgbox("%s [%s] %s [%s]" % (_("Host name"), name, _("already exists for group"), group)) return self.config.groups[group].append(host) for h in self.config.groups[self.oldGroup]: if h.name == self.oldName: self.config.groups[self.oldGroup].remove(h) break else: if self.oldName != name: for h in self.config.groups[self.oldGroup]: if h.name == name: msgbox("%s [%s] %s [%s]" % (_("Host name"), name, _("already exists for group"), group)) return for h in self.config.groups[self.oldGroup]: if h.name == self.oldName: index = self.config.groups[ self.oldGroup].index(h) self.config.groups[self.oldGroup][index] = host break else: for h in self.config.groups[self.oldGroup]: if h.name == self.oldName: index = self.config.groups[ self.oldGroup].index(h) self.config.groups[self.oldGroup][index] = host break except: traceback.print_exc() msgbox("%s [%s]" % (_("Error saving host. Description"), sys.exc_info()[1])) self.config.writeConfig() self.get_widget("wHost").destroy() def on_cmbType_changed(self, widget, *args): is_local = widget.get_active_text() == "local" self.txtUser.set_sensitive(not is_local) self.txtPassword.set_sensitive(not is_local) self.txtPort.set_sensitive(not is_local) self.txtHost.set_sensitive(not is_local) self.txtExtraParams.set_sensitive(not is_local) if widget.get_active_text() == "ssh": self.get_widget("table2").show_all() self.txtKeepAlive.set_sensitive(True) self.chkKeepAlive.set_sensitive(True) self.chkX11.set_sensitive(True) self.chkAgent.set_sensitive(True) self.chkCompression.set_sensitive(True) self.txtCompressionLevel.set_sensitive( self.chkCompression.get_active()) self.txtPrivateKey.set_sensitive(True) self.btnBrowse.set_sensitive(True) port = "22" else: self.get_widget("table2").hide_all() self.txtKeepAlive.set_text('0') self.txtKeepAlive.set_sensitive(False) self.chkKeepAlive.set_sensitive(False) self.chkX11.set_sensitive(False) self.chkAgent.set_sensitive(False) self.chkCompression.set_sensitive(False) self.txtCompressionLevel.set_sensitive(False) self.txtPrivateKey.set_sensitive(False) self.btnBrowse.set_sensitive(False) port = "23" if is_local: self.txtUser.set_text('') self.txtPassword.set_text('') self.txtPort.set_text('') self.txtHost.set_text('') self.txtPort.set_text(port) def on_chkKeepAlive_toggled(self, widget, *args): if widget.get_active(): self.txtKeepAlive.set_text('120') else: self.txtKeepAlive.set_text('0') self.txtKeepAlive.set_sensitive(widget.get_active()) def on_chkCompression_toggled(self, widget, *args): self.txtCompressionLevel.set_text('') self.txtCompressionLevel.set_sensitive(widget.get_active()) def on_chkDynamic_toggled(self, widget, *args): self.txtRemoteHost.set_sensitive(not widget.get_active()) self.txtRemotePort.set_sensitive(not widget.get_active()) def on_btnAdd_clicked(self, widget, *args): local = self.txtLocalPort.get_text().strip() host = self.txtRemoteHost.get_text().strip() remote = self.txtRemotePort.get_text().strip() if self.chkDynamic.get_active(): host = '*' remote = '*' # Validar datos del tunel if host == "": msgbox(_("Enter remote host")) return for x in self.treeModel: if x[0] == local: msgbox(_("Local port already assigned")) return tunel = self.treeModel.append( [local, host, remote, '%s:%s:%s' % (local, host, remote)]) def on_btnDel_clicked(self, widget, *args): if self.treeTunel.get_selection().get_selected()[1] != None: self.treeModel.remove( self.treeTunel.get_selection().get_selected()[1]) def on_chkCommands_toggled(self, widget, *args): self.txtCommands.set_sensitive(widget.get_active()) def on_btnBColor_clicked(self, widget, *args): widget.selected_color = widget.get_color().to_string() def on_chkDefaultColors_toggled(self, widget, *args): self.btnFColor.set_sensitive(not widget.get_active()) self.btnBColor.set_sensitive(not widget.get_active()) def on_btnFColor_clicked(self, widget, *args): widget.selected_color = widget.get_color().to_string() def on_btnBrowse_clicked(self, widget, *args): filename = show_open_dialog(parent=self.main_widget, title=_("Open"), action=gtk.FILE_CHOOSER_ACTION_OPEN) if filename != None: self.txtPrivateKey.set_text(filename)