def test_option_values(self): """test for ancestor, associated and module options""" handler = DiadefsHandler( Config()) df_h = DiaDefGenerator(Linker(project), handler) cl_config = Config() cl_config.classes = ['Specialization'] cl_h = DiaDefGenerator(Linker(project), DiadefsHandler(cl_config) ) self.assertEquals( (0, 0), df_h._get_levels()) self.assertEquals( False, df_h.module_names) self.assertEquals( (-1, -1), cl_h._get_levels()) self.assertEquals( True, cl_h.module_names) for hndl in [df_h, cl_h]: hndl.config.all_ancestors = True hndl.config.all_associated = True hndl.config.module_names = True hndl._set_default_options() self.assertEquals( (-1, -1), hndl._get_levels()) self.assertEquals( True, hndl.module_names) handler = DiadefsHandler( Config()) df_h = DiaDefGenerator(Linker(project), handler) cl_config = Config() cl_config.classes = ['Specialization'] cl_h = DiaDefGenerator(Linker(project), DiadefsHandler(cl_config) ) for hndl in [df_h, cl_h]: hndl.config.show_ancestors = 2 hndl.config.show_associated = 1 hndl.config.module_names = False hndl._set_default_options() self.assertEquals( (2, 1), hndl._get_levels()) self.assertEquals( False, hndl.module_names)
def main(): application = webapp.WSGIApplication([ (r"/", HomePageHandler), (r"/login", LoginHandler), (r"/logout", LogoutHandler), (r"/entries", blog.EntryIndexHandler), (r"/feed", blog.FeedHandler), (r"/entry/([^/]+)", blog.EntryHandler), (r"/entry/([^/]+)/edit", blog.NewEntryHandler), (r"/entry/([^/]+)/del", blog.EntryDeleteHandler), (r"/([^/]+)/edit", blog.NewEntryHandler), (r"/([^/]+)/del", blog.EntryDeleteHandler), (r"/topic/([^/]+)", blog.TagHandler), (r"/admin", admin.AdminHandler), (r"/admin/new", blog.NewEntryHandler), (r"/admin/config", admin.ConfigHandler), (r"/admin/entrylist", admin.EntryListHandler), # (r"/shooin/([^/]+)", shooin.ShooinHandler), (r"/([^/]+)", blog.PageHandler), ], debug=True) config = Config.all() if config.count() > 0: config = config.fetch(1)[0] else: config = Config(title="t-ashbha", url="http://ashwin-bharambe.appspot.com") config.put() wsgiref.handlers.CGIHandler().run(application)
def main(): application = webapp.WSGIApplication([ (r"/", HomePageHandler), (r"/login", LoginHandler), (r"/logout", LogoutHandler), (r"/twits", blog.TwitHandler), (r"/entries", blog.EntryIndexHandler), (r"/feed", blog.FeedHandler), (r"/entry/([^/]+)", blog.EntryHandler), (r"/entry/([^/]+)/edit", blog.NewEntryHandler), (r"/entry/([^/]+)/del", blog.EntryDeleteHandler), (r"/([^/]+)/edit", blog.NewEntryHandler), (r"/([^/]+)/del", blog.EntryDeleteHandler), (r"/topic/([^/]+)", blog.TagHandler), (r"/admin", admin.AdminHandler), (r"/admin/new", blog.NewEntryHandler), (r"/admin/config", admin.ConfigHandler), (r"/admin/entrylist", admin.EntryListHandler), (r"/([^/]+)", blog.PageHandler), ], debug=True) config = Config.all() if config.count() > 0: config = config.fetch(1)[0] else: config1 = Config(title="Anodyne Blog") config1.put() wsgiref.handlers.CGIHandler().run(application)
class TestConfig: def setup_method(self, method): self.config = Config(CONFIG_FILE) def test_script(self): assert self.config.get('testing', 'script') == 'testing.sh' def test_mails(self): mails = '[email protected]\[email protected]' assert self.config.get('testing', 'mails') == mails
def post(self): config = Config.all() config = config.fetch(1)[0] config.title = self.request.get("title") config.disqus = self.request.get("disqus") config.put() self.redirect('/')
def __init__(self): QtGui.QMainWindow.__init__(self) self.logfile = open(BASEPATH + "/log.txt", "w") self.logfile.write("") self.logfile.close() self.logfile = open(BASEPATH + "/log.txt", "a") self.addons = [] # load the config self.config = Config(CONFIGPATH) if not os.path.exists(CONFIGPATH): self.initializeConfig() self.config.loadData() self.history = History(100) self.translations = translations.Translations(self.config["language"]) self.project = widgets.ProjectExplorer(self, "") self.projectPath = "" self.baseModClass = None self.guiClass = None self.initUI() self.initializeAddons()
class TestConfig: def setup_method(self, method): self.config = Config(CONFIG_FILE) def test_dispatch_path(self): assert self.config.get('settings', 'dispatch_path') == './' def test_default_mails(self): default_mails = '[email protected]\[email protected]' assert self.config.get('settings', 'default_mails') == default_mails def test_mailer(self): assert self.config.get('mailer', 'host') == 'mailer.smtp' assert self.config.get('mailer', 'port') == '20' assert self.config.get('mailer', 'username') == 'usermail' assert self.config.get('mailer', 'password') == 'password'
async def create_entry(config_entry, datas, ip): async with aiohttp.ClientSession() as session: post = await session.post(api_url + Config.zoneid() + "/dns_records/", data=datas, headers=HEADERS) if int(post.status) == 200: print( f"Create Entry for {config_entry['name']} with IP {ip}. Status : {post.status}" )
def delete_newrelic_server_monitor(config_dir): Config(config_dir) apt_sources_file = '/etc/apt/sources.list.d/newrelic.list' server_monitor_package = 'newrelic-sysmond' stop_newrelic_server_monitor() sudo('apt-get purge {} -y'.format(server_monitor_package), warn_only=True) sudo('rm {}'.format(apt_sources_file), warn_only=True) sudo('apt-get update', quiet=True)
def plot_error(name): # Plot RMSE fig, ax = plt.subplots() for method in Config.get()["methods"]: ax.plot(data[(name, method)][0], data[(name, method)][1], label=method) ax.legend(loc="upper right") ax.set_title("RMSE for {}".format(name)) plt.savefig("results/img/{}_rmse.png".format(name)) plt.close() # Plot MAE fig, ax = plt.subplots() for method in Config.get()["methods"]: ax.plot(data[(name, method)][0], data[(name, method)][2], label=method) ax.legend(loc="upper right") ax.set_title("MAE for {}".format(name)) plt.savefig("results/img/{}_mae.png".format(name)) plt.close()
def initialize(self, coords): self.config = Config() self.coords = coords self.methods = { 'get_raw_frames': coords.get_raw_frames, 'get_sat_info': coords.get_sat_info, 'get_pos_info': coords.get_pos_info, 'get_all_data': coords.get_all_data }
def main(args): dataset_config = Config(args.dataset_config) model_config = Config(args.model_config) exp_dir = Path("experiments") / model_config.type exp_dir = exp_dir.joinpath( f"epochs_{args.epochs}_batch_size_{args.batch_size}_learning_rate_{args.learning_rate}" ) tokenizer = get_tokenizer(dataset_config) # model (restore) checkpoint_manager = CheckpointManager(exp_dir) checkpoint = checkpoint_manager.load_checkpoint("best.tar") model = SAN(num_classes=model_config.num_classes, lstm_hidden_dim=model_config.lstm_hidden_dim, da=model_config.da, r=model_config.r, hidden_dim=model_config.hidden_dim, vocab=tokenizer.vocab) model.load_state_dict(checkpoint["model_state_dict"]) # evaluation summary_manager = SummaryManager(exp_dir) filepath = getattr(dataset_config, args.data) ds = Corpus(filepath, tokenizer.split_and_transform) dl = DataLoader(ds, batch_size=args.batch_size, num_workers=4, collate_fn=batchify) device = torch.device( "cuda") if torch.cuda.is_available() else torch.device("cpu") model.to(device) summary = evaluate(model, dl, { "loss": nn.CrossEntropyLoss(), "acc": acc }, device) summary_manager.load("summary.json") summary_manager.update({f"{args.data}": summary}) summary_manager.save("summary.json") print(f"loss: {summary['loss']:.3f}, acc: {summary['acc']:.2%}")
def run_fist_nvt_nve_extract(dict_): inputs = dict_.get('INPUTS_DICT') inputs.update(dict_) paths = dict_.get('PATHS_DICT') system = inputs.get('SYSTEM') #dict_state_temp = dict_.get('DICT_STATE_TEMP') #inputs.update(dict_state_temp[dict_['FIRST_ADIABAT']]) print "1. CONSTRUCT THE ORGANIC CRYSTAL." if system == 'CRYSTAL': from utils import OSCluster as Structure from utils import CP2KOS as Config elif system == 'SOLVENT': from utils import OSwSolvent as Structure from utils import CP2KOSwSolvent as Config elif system == 'PBC_CRYSTAL': from utils import OSCluster as Structure from utils import CP2KOSCrystal as Config else: sys.exit() if dict_.get('DENSITY'): output = Dir('output/config-%s-%s' % ( dict_['DENSITY'], paths['bucket'].split('/')[-1]) , paths ) else: output = Dir('output/config-%s' % ( paths['bucket'].split('/')[-1]) , paths ) output.rm_mkdir() paths.update({ 'output' : output.path }) structure = Structure(inputs, paths) ndir = dict_['NDIR'] print "2. RUN CP2K" print "GO FOR RUN %d" % ndir config_nvt = Config(inputs, paths, ENSEMBLE='NVT', STEPS=inputs['NEQ'], RESTART=None, TEMPLATE_FILE=inputs['TEMPLATE_FILE_NVT']) ndir = config_nvt.run(ndir) config_nve = Config(inputs, paths, ENSEMBLE='NVE', STEPS=inputs['NPROD'], RESTART=config_nvt.ndir, TEMPLATE_FILE=inputs['TEMPLATE_FILE_NVE']) ndir = config_nve.run(ndir) if os.path.exists('run-%d' % (config_nve.ndir)): fssh_parcel = FSSHParcel(inputs, paths) fssh_parcel.gather_vel_coord(config_nve.ndir, output_path=output.path) fssh_parcel.create_system_info(output_path=output.path)
def main(cfg, comet=False): cfg = Config(cfg) # comet-ml setting if comet: experiment = Experiment(api_key=cfg.api_key, project_name=cfg.project_name, workspace=cfg.workspace) experiment.log_parameters(cfg) else: experiment = None # device and dataset setting device = (torch.device(f'cuda:{cfg.gpu_id}') if torch.cuda.is_available() and cfg.gpu_id >= 0 else torch.device('cpu')) dataset = MCDataset(cfg.root, cfg.dataset_name) data = dataset[0].to(device) # add some params to config cfg.num_nodes = dataset.num_nodes cfg.num_relations = dataset.num_relations cfg.num_users = int(data.num_users) # set and init model model = GAE(cfg, random_init).to(device) model.apply(init_xavier) # optimizer optimizer = torch.optim.Adam( model.parameters(), lr=cfg.lr, weight_decay=cfg.weight_decay, ) # train trainer = Trainer( model, dataset, data, calc_rmse, optimizer, experiment, ) trainer.training(cfg.epochs)
def main(args): dataset_config = Config(args.dataset_config) model_config = Config(args.model_config) ptr_config_info = Config(f"conf/pretrained/{model_config.type}.json") exp_dir = Path("experiments") / model_config.type exp_dir = exp_dir.joinpath( f"epochs_{args.epochs}_batch_size_{args.batch_size}_learning_rate_{args.learning_rate}" f"_weight_decay_{args.weight_decay}") preprocessor = get_preprocessor(ptr_config_info, model_config) with open(ptr_config_info.config, mode="r") as io: ptr_config = json.load(io) # model (restore) checkpoint_manager = CheckpointManager(exp_dir) checkpoint = checkpoint_manager.load_checkpoint('best.tar') config = BertConfig() config.update(ptr_config) model = SentenceClassifier(config, num_classes=model_config.num_classes, vocab=preprocessor.vocab) model.load_state_dict(checkpoint['model_state_dict']) # evaluation filepath = getattr(dataset_config, args.data) ds = Corpus(filepath, preprocessor.preprocess) dl = DataLoader(ds, batch_size=args.batch_size, num_workers=4) device = torch.device( 'cuda') if torch.cuda.is_available() else torch.device('cpu') model.to(device) summary_manager = SummaryManager(exp_dir) summary = evaluate(model, dl, { 'loss': nn.CrossEntropyLoss(), 'acc': acc }, device) summary_manager.load('summary.json') summary_manager.update({'{}'.format(args.data): summary}) summary_manager.save('summary.json') print('loss: {:.3f}, acc: {:.2%}'.format(summary['loss'], summary['acc']))
def evaluate_embeddings(args): torch.manual_seed(0) dataset = args.dataset if dataset == "yaleb": batch_size = 16 z_dim = 100 if dataset == "adult": batch_size = 64 z_dim = 2 if dataset == "german": batch_size = 64 z_dim = 2 eval_on_test = True logger = None args.batch_size = batch_size args.z_dim = z_dim config = Config(args) fname = f"/models/{dataset}_vae" with open(str(PROJECT_DIR) + fname, "rb") as file: vae = torch.load(file) @torch.no_grad() def get_embs(X): return vae._enc_mu(vae.encoder.forward(X)) eval_manager_target, eval_manager_sens = get_evaluation_managers( config, get_embs) eval_manager_target.fit() eval_manager_sens.fit() with torch.no_grad(): # If we have wandb logger, or we return results, # we want to have the report as a dict. return_results = True output_dict = logger is not None or return_results _, report_target, acc_target = eval_manager_target.evaluate( output_dict) _, report_sens, acc_sens = eval_manager_sens.evaluate(output_dict) print(report_target) print(report_sens) if logger is not None: logger.log_metrics({ "target_classification_report": report_target, "sens_classification_report": report_sens, }) print("~ evaluation results ~~~~~~~~~~~~~") print("best target acc:", round(acc_target, 2)) print("best sens acc: ", round(acc_sens, 2)) print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") return acc_target, acc_sens
def prep_hypothesis_test(args): config = Config(args.config_file) # prepare input of hypothesis test framework # if schism is used to estimate cellularity, # generate mutation.cellularity # regardless of the cellularity estimator, generate # cluster.cellularity if cluster definitions are known if config.cellularity_estimation == 'schism': mutationReadFile = os.path.join(config.working_dir,\ config.mutation_raw_input) mutData = read_mutation_counts(mutationReadFile) # tumor sample purity, scale to [0,1] interval if # the user reported percentages purity = config.tumor_sample_purity for sample in purity.keys(): if purity[sample] > 1.0: purity[sample] = purity[sample] / 100.0 if sorted(purity.keys()) != sorted(mutData.keys()): print >>sys.stderr, 'Mismatch between samples for which purity is' + \ 'available and those for which read count is available.' print >> sys.stderr, 'Cellularity estimation failed.' sys.exit() mutCellularityPath = os.path.join(config.working_dir,\ config.output_prefix + '.mutation.cellularity') clusterCellularityPath = os.path.join(config.working_dir,\ config.output_prefix + '.cluster.cellularity') # no multiplicity provided if len(mutData.values()[0][0]) == 4: generate_cellularity_file(mutData, purity, mutCellularityPath,\ config.cellularity_estimator['coverage_threshold'],\ config.cellularity_estimator['absent_mode']) # multiplicity provided if len(mutData.values()[0][0]) == 5: generate_cellularity_file_mult(mutData, purity, mutCellularityPath,\ config.cellularity_estimator['coverage_threshold'],\ config.cellularity_estimator['absent_mode']) if (not (hasattr(config, 'cluster_analysis')) ) or config.cluster_analysis != 'schism': # assumes cluster definitions are provided by the user through config file average_cellularity(config, clusterCellularityPath) else: clusterCellularityPath = os.path.join(config.working_dir,\ config.output_prefix + '.cluster.cellularity') if (not (hasattr(config, 'cluster_analysis')) ) or config.cluster_analysis != 'schism': # assumes cluster definitions are provided by the user through config file average_cellularity(config, clusterCellularityPath)
def run_ga(args): config = Config(args.config_file) CPOVPath = os.path.join(config.working_dir,\ config.output_prefix + '.HT.cpov') cellularityPath = os.path.join(config.working_dir,\ config.output_prefix + '.cluster.cellularity') topologyRules = TopologyRules(CPOVPath) massRules = MassRules(cellularityPath) clusterIDs = topologyRules.clusterIDs treeOptions = {'fitnessCoefficient': config.genetic_algorithm['fitness_coefficient'],\ 'clusterIDs': clusterIDs,\ 'topologyRules': topologyRules,\ 'massRules': massRules} # gather other parameters for running ga # setup GA workflow gaOptions = {'generationCount': config.genetic_algorithm['generation_count'],\ 'generationSize': config.genetic_algorithm['generation_size'],\ 'randomObjectFraction': config.genetic_algorithm['random_object_fraction'],\ 'Pm': config.genetic_algorithm['mutation_probability'],\ 'Pc': config.genetic_algorithm['crossover_probability'],\ 'randomObjectGenerator': Node.random_topology,\ 'treeOptions': treeOptions,\ 'verbose': config.genetic_algorithm['verbose']} if args.mode == 'serial': # if run in serial mode, perform series of independent # ga runs, and index them by 1 to instanceCount instanceCount = config.genetic_algorithm['instance_count'] elif args.mode == 'parallel': # if run in parallel mode, perform a single GA run # and name it using args.runID instanceCount = 1 else: print >>sys.stderr, 'Unrecognized mode for GA run. please select '+\ ' serial or parallel as mode.' sys.exit() for instance in range(instanceCount): gaRun = GA(gaOptions) gaRun.run_first_generation() for index in range(config.genetic_algorithm['generation_count'] - 1): gaRun.add_generation() if args.mode == 'serial': path = os.path.join(config.working_dir,\ config.output_prefix + '.GA.r' + str(instance+1)+ '.trace') else: path = os.path.join(config.working_dir,\ config.output_prefix + '.GA.r' + \ str(args.runID)+ '.trace') gaRun.store_metrics(path)
def shell(config_dir): config_instance = Config(config_dir) key_file = os.path.join(config_instance.project_config_dir, config_instance.key_filename) local('ssh -C -i {key} {user}@{host}'.format( key=key_file, user=config_instance.user, host=config_instance.host, ))
def setup(token, prefix, game, mongodb): try: if token: Config.set_token(token) if prefix: Config.set_prefix(prefix) if game: Config.set_game(game) if mongodb: Config.set_mongodb(mongodb) except ValueError: print("ERROR: Not all Configurations set. Use --help to see options.")
async def update_entry(config_entry, cloudflare_entry_id, datas, ip): async with aiohttp.ClientSession() as session: put = await session.put(api_url + Config.zoneid() + "/dns_records/" + cloudflare_entry_id, data=datas, headers=HEADERS) if int(put.status) == 200: print( f"Updated Entry for {config_entry['name']} with IP {ip}. Status : {put.status}" )
def __init__(self, config_filepath) -> None: super().__init__() self.config = Config(config_filepath) set_seed(self.config.seed) self.loader = SemEvalDataLoader(self.config) self.model = BertCls(self.config) self.model = self.model.to(self.config.device) self.optimiser = optim.Adam(self.model.parameters(), lr=self.config.lr) self.criterion = nn.BCELoss()
def __init__(self, bot): self.bot = bot self.loop = bot.loop self.servercfg = bot.servercfg self.watchdogs = {} self.watchcfg = Config(f'{bot.dir}/configs/watchcfg.json', load=True, loop=self.loop) if 'notify' not in self.watchcfg: self.watchcfg['notify'] = '@here'
def __init__(self, bot): self.bot = bot config = Config.get_config() try: self.token = config["dblapi"] except Exception as ex: print(f"exception - unloading extension dblstats: {ex}") traceback.print_exc(file=sys.stdout) self.bot.unload_extension("cogs.utils.dblstats") self.dblpy = dbl.DBLClient(self.bot, self.token, autopost=True)
class NewPostAnnouncer(Announcer): CHANNEL_ID = Config.get_module_setting('reddit', 'announcements') async def announce(self, submission): if submission.is_self: desc_list = submission.selftext.split('\n') if len(desc_list) > 1: desc = desc_list[0] while not desc: desc_list.pop(0) desc = desc_list[0] i = -1 while desc[i] in ['.', '?', '!', ':', ';']: i -= 1 desc = desc[:(i + 1) or None] + '...' else: desc = desc_list[0] else: desc = 'Links to `{}`. [View the post instead.]({})'.format( submission.domain, "https://www.reddit.com" + submission.permalink) if len(desc) > 2048: last_valid_word_index = desc[:2048].rfind(' ') if last_valid_word_index == -1: last_valid_word_index = 2045 desc = desc[:last_valid_word_index] + '...' flair = submission.author_flair_css_class color = Color.default() author_icon = Embed.Empty if not flair: pass elif 'team' in flair: color = Color.blue() author_icon = 'https://cdn.discordapp.com/emojis/338117947241529344.png' elif 'mod' in flair: color = Color.green() author_icon = 'https://cdn.discordapp.com/emojis/338254475674255361.png' thumbnail = submission.thumbnail if 'http' not in thumbnail: thumbnail = Embed.Empty embed = self.module.create_discord_embed( title=submission.author, icon=author_icon, subtitle=submission.title, info=desc, subtitle_url=submission.url, color=color, thumbnail=thumbnail, footer='/r/{} - New Post'.format(self.module.subreddit_name)) return await self.send(embed=embed)
def __init__(self): Common = Config() self.basic_directions = Common.basic_directions self.extra_directions = Common.extra_directions self.original_observation_length = Common.original_observation_length self.extra_length = len(self.extra_directions) self.observation_space = Box(-np.inf, np.inf, [self.original_observation_length + self.extra_length], float) self.action_space = Box(-1.0, 1.0, [2], float) self._env_step_counter = 0 self.state = np.zeros([self.observation_space.shape[0]]) # self.level = Common.level self.position = np.zeros([2]) self.target = np.zeros([2]) self.orient = np.zeros([1]) self.speed = np.zeros([1]) self.max_speed = Common.max_speed self.min_distance_to_target = Common.min_distance_to_target self.real_action_range = Common.real_action_range # self.min_distance_to_obstacle = Common.min_distance_to_obstacle self.min_initial_starts = Common.min_initial_starts self.expand = Common.expand self.num_circle = Common.num_circle self.radius = Common.radius self.period = Common.period self.mat_height = None # self.mat_exist = None # self.lowest = Common.lowest self.delta = Common.delta self.total = Common.total # self.scope = Common.scope self.min_step = Common.min_step self.directions = self.basic_directions + self.extra_directions self.end_points = [None for _ in range(len(self.directions))] # self.margin = Common.margin self.env_params = {'cylinders': None, 'size': 1.5*(self.num_circle+self.margin*2)*self.period} self.agent_params = {'position': self.position, 'target': self.target, 'direction':None, 'rangefinders': self.end_points} self.agent_params_pre = None self.first_render = True self.terminate_render = False self.camera_alpha = Common.camera_alpha # self.is_reset = False assert self.scope > self.max_speed
def test_functional_relation_extraction(self): """functional test of relations extraction; different classes possibly in different modules""" # XXX should be catching pyreverse environnement problem but doesn't # pyreverse doesn't extracts the relations but this test ok project = MANAGER.project_from_files(['data'], astroid_wrapper) handler = DiadefsHandler( Config() ) diadefs = handler.get_diadefs(project, Linker(project, tag=True) ) cd = diadefs[1] relations = _process_relations(cd.relationships) self.assertEqual(relations, self._should_rels)
def setup(bot): if not hasattr(bot, 'servercfg'): default = { "servers": {}, "serverspath": "NONE", "backupspath": "NONE", "oldTimer": 1440 } bot.servercfg = Config(f'{bot.dir}/configs/serverCfgs.toml', default=default, load=True, loop=bot.loop) permission_nodes = ['backup', 'apply'] bot.register_nodes([f'{__name__}.{node}' for node in permission_nodes]) bot.add_cog(ServerBackups(bot))
def __init__(self): self.number = 1 self.last_db_update = None self.db_file = os.environ['TRAM_ROOT'] + '/data/' self.config = Config() self.force_update = False self.status = [('not running', str(datetime.datetime.now()))] self.db = MpkDb() self.przystanki_db = PrzystankiDb() self.mpk_link = self.config['get_db_link'] self.mpk_point_data = self.config['get_point_data_link'] self.headers = self.config['mpk_headers'] self.httpclient = AsyncHTTPClient() YieldPeriodicCallback.__init__(self, self.run, self.config['ttworker_refresh_period'] * 60000, faststart=True) self.update_status('TTworker initialised')
def generate(): config = Config.getInstance() # Собираем список выбранных тем source_names = [] for key, value in request.form.iteritems(): if key.startswith('sources_'): source_names.append(value) strophes = int(request.form['strophes']) rhyming = request.form['rhyming'] step = request.form['step'] steps = request.form['steps'] # Собираем из выбранных тем рифмы в общий пул rhymes_ds = {} for src_name in source_names: source = config['sources'][src_name] for rhyme in source: # Определяем имя рифмы, стопу, и число стоп r_name, r_step, r_steps = rhyme.split('_') # Скипаем, если стопа не та, или не тот размер, если нам не пофиг if step != r_step or steps != '0' and steps != r_steps: continue if not rhymes_ds.has_key(r_name): rhymes_ds[r_name] = set() rhymes_ds[r_name] |= set(source[rhyme]) # Преобразуем пул к списку списков для индексирования rhymes_ll = map(list, rhymes_ds.itervalues()) rhymes_count = len(rhymes_ll) text = [] for dummy in xrange(strophes): # Выбираем пару разных рифм rhyme_a, rhyme_b = map(rhymes_ll.__getitem__, distinctRandom(rhymes_count, 2)) # Выбираем пары строк для строф string_a1, string_a2 = map(rhyme_a.__getitem__, distinctRandom(len(rhyme_a), 2)) string_b1, string_b2 = map(rhyme_b.__getitem__, distinctRandom(len(rhyme_b), 2)) if rhyming == 'abab': text.append([string_a1, string_b1, string_a2, string_b2, ]) elif rhyming == 'abba': text.append([string_a1, string_b1, string_b2, string_a2, ]) elif rhyming == 'aabb': text.append([string_a1, string_a2, string_b1, string_b2, ]) return config['mako']['lookup'].get_template('result.mako').render(text=text)
def test_constructor_path_provided(self): Config._instance = None mock_path = '/mock/path/to/config' m = mock_open() with patch('utils.config.open', m, create=True): config = Config(mock_path) m.assert_called_once_with(mock_path, 'r') self.assertEqual(config.cfg, self.config_dict) self.assertEqual(config.path, mock_path) self.assertTrue(self.josn_load_mock.called)
def edit_file(config_dir, target_file): config_instance = Config(config_dir) key_file = os.path.join(config_instance.project_config_dir, config_instance.key_filename) local('chmod 600 {key}'.format(key=key_file)) local('ssh -t -i {key} {user}@{host} "nano {file}"'.format( key=key_file, user=config_instance.user, host=config_instance.host, file=target_file, ))
class InvasionPermaMessage(PermaMessage): TITLE = 'Invasions' CHANNEL_ID = Config.get_module_setting('invasion', 'perma') async def update(self, *args, **kwargs): if self.module.is_first_loop: msg = self.module.create_discord_embed(title=self.TITLE, info='Collecting the latest information...', color=Color.light_grey()) return await self.send(msg) megainvs = [] invs = [] for inv in self.module.invasions: if inv.mega_invasion: megainvs.append(inv) else: invs.append(inv) megainvs = sorted(megainvs, key=lambda k: -k.start_time) invs = sorted(invs, key=lambda k: (-k.etr if k.etr != -1 else (k.defeated/k.total))) invs = megainvs + invs if time.time() >= (assert_type(self.module.last_updated, int, otherwise=0) + 300): desc = 'We\'re experiencing some technical difficulties.\nInvasion tracking will be made reavailable as soon as possible.' msg = self.module.create_discord_embed(title=self.TITLE, info=desc, color=Color.light_grey()) msg.set_footer(text='We apologize for the inconvenience.') elif len(invs) > 0: cogs = [] districts = [] etrs = [] progress = [] for inv in invs: if inv.etr != -1: etr = get_time_from_seconds(inv.etr) etr = 'A few seconds' if inv.etr < 0 else etr etr = 'Calculating...' if time.time() - inv.start_time < 60 else etr etr = 'Mega Invasion!' if inv.mega_invasion else etr etrs.append(etr) else: p = int((inv.defeated/inv.total) * 10) # Pray this never has to be debugged. progress.append('[{}{}]'.format('■' * p, (' '*(10-p))+(' '*ceil((10-p)/2)))) cogs.append(inv.cog.plural()) districts.append(inv.district) fields = [ {'name': 'Cog', 'value': '\n'.join(cogs)}, {'name': 'District', 'value': '\n'.join(districts)}, {'name': 'Time Remaining', 'value': '\n'.join(etrs)} if etrs else {'name': 'Progress', 'value': '\n'.join(progress)} ] msg = self.module.create_discord_embed(title=self.TITLE, title_url=self.module.route[1], color=Color.light_grey(), fields=fields) else: desc = 'No invasions to report.\nThe last invasion seen was __{} ago__.'.format( get_time_from_seconds(int(time.time()) - self.module.drought_start)) msg = self.module.create_discord_embed(title=self.TITLE, info=desc, color=Color.light_grey()) return await self.send(msg)
def __init__(self, coords): self.coords = coords self.config = Config() self.rx_pin = self.config['rx_pin'] self.configure_raspi() self.run_number = 1 logging.info('starting gps worker') YieldPeriodicCallback.__init__(self, self.run, self.config['worker_period'], faststart=True)
def setup(bot): if not hasattr(bot, 'servercfg'): default = { "servers": {}, "serverspath": "NONE", "backupspath": "NONE", "oldTimer": 1440 } bot.servercfg = Config(f'{bot.dir}/configs/serverCfgs.toml', default=default, load=True, loop=bot.loop) permission_nodes = ['whitelist', 'categories', 'kick', 'ban', 'relay'] bot.register_nodes([f'{__name__}.{node}' for node in permission_nodes]) bot.add_cog(ConsoleCmds(bot))
def setup_newrelic_server_monitor(config_dir): config_instance = Config(config_dir) apt_sources_file = '/etc/apt/sources.list.d/newrelic.list' apt_repository = 'deb http://apt.newrelic.com/debian/ newrelic non-free' newrelic_gpg_key = 'https://download.newrelic.com/548C16BF.gpg' server_monitor_package = 'newrelic-sysmond' if exists(apt_sources_file): # Check if repository already in file. Otherwise, add to the end. sudo("grep -q '{0}' {1} || sudo echo '{0}' >> {1}".format( apt_repository, apt_sources_file)) else: sudo("echo '{0}' >> {1}".format(apt_repository, apt_sources_file)) # Trust the New Relic GPG key sudo("wget -O- {0} | apt-key add -".format(newrelic_gpg_key)) # Install the Server Monitor package sudo('apt-get update', warn_only=True, quiet=True) sudo('apt-get -y --no-upgrade install {}'.format(server_monitor_package)) # Add license key to config file newrelic_sysmon_config_path = '/etc/newrelic/nrsysmond.cfg' if not exists(newrelic_sysmon_config_path, use_sudo=True): # Upload config file with New Relic account license key (set in settings.base) upload_template('newrelic_system_monitor.cfg', newrelic_sysmon_config_path, template_dir=config_instance.config_path('new_relic'), context=config_instance.context, use_sudo=True, use_jinja=True) else: print( 'New Relic System Monitor configuration file is already exist. ' 'Make sure it contains the right new relic license key for Razortheory account.' ) # Start the daemon sudo('/etc/init.d/newrelic-sysmond start')
def main(inputs, paths): print """ """ # SET_UP THE DIRECTORY, CHECK ANY SUBDIR IS PRESENT bucket = Bucket(inputs) bucket.name() paths.update({'bucket': bucket.path}) task = Dir(inputs.get('INPUT_INFO')) paths.update( {'task' : task.path} ) templates = Dir('templates', paths) templates.checkdir() templates.clean() bin = Dir('bin', paths) bin.checkdir() # FIND CP2K PATHS try: local_paths = Dir('local_paths', paths) local_paths.checkdir() cp2k_file = open(paths.get('local_paths') + 'cp2k.path', 'r') paths.update({'cp2k': cp2k_file.read().rstrip()}) if not os.path.isfile(paths.get('cp2k')): raise SystemExit('WARNING: check path for CP2K executable in local_paths/cp2k.path') except: raise SystemExit("WARINING: please provide the path for CP2K executable in local_paths/cp2k.path") os.system(' cp -r %s/%s %s' % (paths.get('task'), inputs.get('FILE_INIT'), paths.get('bucket'))) initial = Dir(inputs.get('FILE_INIT'), paths) initial.checkdir() paths.update({'initial': initial.path}) system = inputs.get('SYSTEM') if system == 'CRYSTAL': from utils import CP2KOSFSSH as Config elif system == 'SOLVENT': from utils import CP2KOSwSolventFSSH as Config else: sys.exit() number_init = inputs.get('NUMBER_INIT', 1) number_random = inputs.get('NUMBER_RANDOM', 5) ndir= 0 for init in range(1, number_init + 1): for random in range(1, number_random + 1): config = Config( inputs, paths, INIT = init) ndir = config.run(ndir)
def start_tracking(self): super().start_tracking() self.post_stream = self.client.loop.create_task(self.stream_posts()) self.comment_stream = self.client.loop.create_task( self.stream_comments()) self.live = Config.get_module_setting('reddit', 'live') if self.live: self.live_announcer.CHANNEL_ID = self.live['announcements'] if self.live['id']: self.live_stream = self.client.loop.create_task( self.stream_live())
def reloadConfig(): config = Config.getInstance() config.clear() with open('config.yaml') as fin: config.update(yaml.load(fin)) for name in config.get('include', []): with open(name) as include_file: config[name] = yaml.safe_load(include_file) lookup = TemplateLookup(directories=[config['mako']['template_dir']], module_directory=config['mako']['tmp_dir'], input_encoding="utf-8", output_encoding="utf-8", encoding_errors="replace") config['mako']['lookup'] = lookup return 'Configuration successfully reloaded'
def stream_cam(): def getstream(config): while True: tmp = StringIO() tmp.write('--%s\n' % boundary) tmp.write('Content-type=image/jpeg\n') img = get_image(config) tmp.write('Content-length= %s\n\n' % str(img.len)) tmp.write(img.getvalue()) tmp.write('\n\n') yield tmp.getvalue() time.sleep(1) tmp.close() img.close() config = Config.get(session) boundary = uuid.uuid4().hex headers = [('Content-Type', 'multipart/x-mixed-replace; boundary=--%s' % boundary)] return Response(getstream(config), headers=headers, direct_passthrough=True)
def init_app(self, app): config = Config(app) redis.from_url(config.value('REDIS_URL')) self.app.config['DEBUG'] = True self.register_blueprint(app) if config.value('TOKEN_URL') is not False: app.add_url_rule( config.url_rule_for('TOKEN_URL'), view_func=views.access_token, methods=['POST'] ) if config.value('MANAGEMENT_URL') is not False: app.add_url_rule( config.url_rule_for('MANAGEMENT_URL'), view_func=views.management, methods=['POST', 'GET'] ) mongo.init_app(app, config_prefix='SENTINEL_MONGO') oauth.init_app(app) oauth._validator = MyRequestValidator()
indent=4) opter.exit(0) # Show config argument if opts.showconfig: for k, v in opts.__dict__.items(): p(k, "=", str(v)) return # Process values if opts.batch: opts.select_first = True opts.always_rename = True # Update global config object Config.update(opts.__dict__) if len(args) == 0: opter.error("No filenames or directories supplied") try: tvnamer(paths = sorted(args)) except NoValidFilesFoundError: opter.error("No valid files were supplied") except UserAbort, errormsg: opter.error(errormsg) if __name__ == '__main__': main()
def setup_method(self, method): self.config = Config(CONFIG_FILE)
class MainWindow(QtGui.QMainWindow): """ Main Window operates as a common parent of all objects, over which they can communicate. """ def __init__(self): QtGui.QMainWindow.__init__(self) self.logfile = open(BASEPATH + "/log.txt", "w") self.logfile.write("") self.logfile.close() self.logfile = open(BASEPATH + "/log.txt", "a") self.addons = [] # load the config self.config = Config(CONFIGPATH) if not os.path.exists(CONFIGPATH): self.initializeConfig() self.config.loadData() self.history = History(100) self.translations = translations.Translations(self.config["language"]) self.project = widgets.ProjectExplorer(self, "") self.projectPath = "" self.baseModClass = None self.guiClass = None self.initUI() self.initializeAddons() def initUI(self): self.ui = uic.loadUi(BASEPATH + "/ui/MainWindow.ui", self) self.editor = widgets.Editor(self) self.console = widgets.Console(self) # self.console.streamToConsole(sys.stdout) self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.project) self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.editor) self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.console) self.newProjectMenubar = QtGui.QAction( QtGui.QIcon(BASEPATH + "/assets/icons/newProject.png"), self.translations.getTranslation("newProject"), self ) self.openProjectMenubar = QtGui.QAction(self.translations.getTranslation("open"), self) self.openProjectMenubar.setShortcut("Ctrl+O") self.saveProjectMenubar = QtGui.QAction(self.translations.getTranslation("save"), self) self.saveProjectMenubar.setShortcut("Ctrl+S") self.exportProjectMenubar = QtGui.QAction( QtGui.QIcon(BASEPATH + "/assets/icons/export.png"), self.translations.getTranslation("exportProject"), self ) self.exportProjectMenubar.setShortcut("Ctrl+E") self.exportJarMenubar = QtGui.QAction(self.translations.getTranslation("exportJar"), self) self.exportJarMenubar.setShortcut("Ctrl+Shift+E") self.runClientMenubar = QtGui.QAction(self.translations.getTranslation("runClient"), self) self.runClientMenubar.setShortcut("F5") self.undoMenubar = QtGui.QAction(self.translations.getTranslation("undo"), self) self.undoMenubar.setShortcut("Ctrl+Z") self.redoMenubar = QtGui.QAction(self.translations.getTranslation("redo"), self) self.redoMenubar.setShortcut("Ctrl+Y") self.addonsMenubar = QtGui.QAction(self.translations.getTranslation("addons"), self) self.delMenubar = QtGui.QAction(self.translations.getTranslation("delete"), self) self.delMenubar.setShortcut("Del") self.menubar = self.menuBar() self.fileMenubar = self.menubar.addMenu(self.translations.getTranslation("file")) self.editMenubar = self.menubar.addMenu(self.translations.getTranslation("edit")) self.newMenubar = self.menubar.addMenu(self.translations.getTranslation("new")) self.runMenubar = self.menubar.addMenu(self.translations.getTranslation("run")) self.optionMenubar = self.menubar.addMenu(self.translations.getTranslation("options")) self.projectToolbar = self.addToolBar("Project") self.runToolbar = self.addToolBar("Run") self.fileMenubar.addAction(self.newProjectMenubar) self.fileMenubar.addAction(self.openProjectMenubar) self.fileMenubar.addAction(self.saveProjectMenubar) self.fileMenubar.addAction(self.exportProjectMenubar) self.fileMenubar.addAction(self.exportJarMenubar) self.editMenubar.addAction(self.undoMenubar) self.editMenubar.addAction(self.redoMenubar) self.editMenubar.addAction(self.delMenubar) self.runMenubar.addAction(self.runClientMenubar) self.optionMenubar.addAction(self.addonsMenubar) self.projectToolbar.addAction(self.newProjectMenubar) self.projectToolbar.addAction(self.exportProjectMenubar) self.runToolbar.addAction(self.runClientMenubar) self.connect(self.newProjectMenubar, QtCore.SIGNAL("triggered()"), self.createNewProject) self.connect(self.openProjectMenubar, QtCore.SIGNAL("triggered()"), self.openProject) self.connect(self.saveProjectMenubar, QtCore.SIGNAL("triggered()"), self.save) self.connect(self.exportProjectMenubar, QtCore.SIGNAL("triggered()"), self.exportProject) self.connect(self.exportJarMenubar, QtCore.SIGNAL("triggered()"), self.exportJar) self.connect(self.undoMenubar, QtCore.SIGNAL("triggered()"), self.undo) self.connect(self.redoMenubar, QtCore.SIGNAL("triggered()"), self.redo) self.connect(self.runClientMenubar, QtCore.SIGNAL("triggered()"), self.runClient) self.connect(self.addonsMenubar, QtCore.SIGNAL("triggered()"), self.openAddonDialog) self.connect(self.delMenubar, QtCore.SIGNAL("triggered()"), self.delete) self.setCentralWidget(None) self.setDockNestingEnabled(True) def initializeConfig(self): """ Write the most rudamentary config to disk. """ self.config["language"] = "English" self.config["addons"] = [ BASEPATH + "/addons/BaseMod/BaseMod.py", BASEPATH + "/addons/Block/Block.py", BASEPATH + "/addons/Item/Item.py", BASEPATH + "/addons/GUI/CraftingTable.py", ] self.config.saveData() def initializeAddons(self): """ Load all the addons from the paths found in the config. """ self.addons = [] for path in self.config["addons"]: for f in getPythonFiles(path): name = f.split("/")[-1].split(".")[0] mod = imp.load_source(name, f) if "init" in dir(mod): self.addons.append((name, path, mod)) for name, path, mod in self.addons: if "init" in dir(mod): mod.init(self) print("Initialized " + name) if name == "BaseMod": self.baseModClass = mod elif name == "GUI": self.guiClass = mod def openAddonDialog(self): widgets.Addons(self) @dec.accepts(_base, str) def updateName(self, obj, name): """ MainWindow.updateName(classes._base, str) Update the name of a Minecraft object. Args: obj (classes._base): The object to rename name (str): The new name of the object """ self.editor.renameTab(obj, name) self.project.renameObject(obj, name) obj.name = name @dec.accepts(_base) def addObject(self, obj): """ MainWindow.addObject(classes._base) Add a new Minecraft object to the project. Args: obj (classes._base): The object to add """ self.project.addObject(obj) self.editor.openTab(obj) self.editor.tabWidget.setCurrentWidget(obj) def delete(self): """ Delete the currently selected object, if the 'deleteable' flag is set to true. """ selected = self.project.selectedObject() if selected.deleteable: self.project.removeObject(selected) self.editor.closeTab(self.editor.tabWidget.indexOf(selected)) def runClient(self): """ Run Minecraft with the current Mod loaded. """ self.save() self.console.clear() self.exportProject() path = self.projectPath gradlew.runClient(path, self.console) def createNewProject(self): """ Create a new project and set up the forge modding environment. """ self.console.clear() projectpath = BASEPATH + "/Projects" if os.path.exists(BASEPATH + "/Projects") else "C:/" path = str( QtGui.QFileDialog.getExistingDirectory( None, "Select a folder:", projectpath, QtGui.QFileDialog.ShowDirsOnly ) ) if path != "": name, ok = QtGui.QInputDialog.getText( self, self.translations.getTranslation("newProject"), self.translations.getTranslation("name") ) name = str(name) if ok: self.projectPath = path + "/" + name self.project.name = name # build the file structure os.mkdir(self.projectPath) # file to mark the folder as a project f = open(self.projectPath + "/mcmodderproject", "w") f.write(VERSION) f.close() # install forge modloader gradlew.installForge(self.projectPath, self.console) # generate a BaseMod instance for n, p, addon in self.addons: if "onProjectCreated" in dir(addon): addon.onProjectCreated(self) self.save() def openProject(self): """ Open a project from disk. """ projectpath = BASEPATH + "/Projects" if os.path.exists(BASEPATH + "/Projects") else "C:/" path = str( QtGui.QFileDialog.getOpenFileName( self, self.translations.getTranslation("openProject"), projectpath, "JSON-Files" + " (*.json)" ) ) if not path == "": self.projectPath = "/".join(path.replace("\\", "/").split("/")[:-1]) self.project.load(path) def exportProject(self): """ Export the current project's java source code. """ self.save() # clear the current project path = self.projectPath + "/src/main" if os.path.exists(path): shutil.rmtree(path) os.makedirs(path + "/java") os.makedirs(path + "/resources") # export the newly compiled source code for t in self.project.objects.keys(): for cls in self.project.objects[t]: cls.completeModData() for t in self.project.objects.keys(): for cls in self.project.objects[t]: cls.export() def exportJar(self): """ Export the current project as a .jar file. """ path = str( QtGui.QFileDialog.getExistingDirectory( None, self.translations.getTranslation("destinationSelection"), self.projectPath + "/java/build/libs", QtGui.QFileDialog.ShowDirsOnly, ) ) if path != "": buildGradle = open(self.projectPath + "/java/build.gradle", "w") gradleSrc = source.SrcBuildGradle.main gradleSrc = gradleSrc.replace("<version>", self.project.objects["BaseMod"][0].version) gradleSrc = gradleSrc.replace("<mainPackage>", self.project.objects["BaseMod"][0].package()) gradleSrc = gradleSrc.replace("<modname>", self.project.objects["BaseMod"][0].name) buildGradle.write(gradleSrc) buildGradle.close() self.exportProject() gradlew.exportMod(self.projectPath, self.console) if ( self.projectPath + "/java/build/libs/" + self.project.objects["BaseMod"][0].modid() + "-" + self.project.objects["BaseMod"][0].version + ".jar" != path + "/" + self.project.objects["BaseMod"][0].modid() + "-" + self.project.objects["BaseMod"][0].version + ".jar" ): shutil.copy2( self.projectPath + "/java/build/libs/" + self.project.objects["BaseMod"][0].modid() + "-" + self.project.objects["BaseMod"][0].version + ".jar", path + "/" + self.project.objects["BaseMod"][0].modid() + "-" + self.project.objects["BaseMod"][0].version + ".jar", ) os.remove( self.projectPath + "/java/build/libs/" + self.project.objects["BaseMod"][0].modid() + "-" + self.project.objects["BaseMod"][0].version + ".jar" ) def save(self): """ Save the current project to disk after creating a backup of the current savefile. """ if os.path.exists(self.projectPath + "/moddata.json"): shutil.copy2(self.projectPath + "/moddata.json", self.projectPath + "/moddata.backup") f = open(self.projectPath + "/moddata.json", "w") data = self.project.save() data["Version"] = VERSION json.dump(data, f, indent=4, separators=(",", ": ")) f.close() self.console.write("Saved Mod to " + self.projectPath) self.project.unsavedChanges = False def undo(self): self.history.undo() def redo(self): self.history.redo()
def show_simple_page(): """ Display only the stream in lower resolution without any controls.""" return render_template('simple.html', config=Config.get(session))
def home(): """ Render page displaying all controls""" config = Config.get(session) if request.method == "POST": config.update(request, session) return render_template('stream.html', resolutions=RESOLUTIONS, config=config)
def welcome(): config = Config.getInstance() return config['mako']['lookup'].get_template('index.mako').render(topics=config['sources'].keys())
def main(): """Parses command line arguments, displays errors from tvnamer in terminal """ opter = OptionParser() opter.add_option( "-c", "--config", dest="config", help = "Override the config file path") opter.add_option( "-s", "--save", dest="saveconfig", help = "Save (default) config to file") opter.add_option( "-v", "--verbose", default=False, dest="verbose", action="store_true", help="show debugging information") opter.add_option( "-r", "--recursive", default = False, dest="recursive", action="store_true", help="Descend more than one level directories supplied as arguments") opter.add_option( "-a", "--always", default = False, dest="alwaysrename", action="store_true", help="always renames files (but still prompts for correct series). Can be set at runtime with the 'a' prompt-option") opter.add_option( "-f", "--selectfirst", default = False, dest="selectfirst", action="store_true", help="select first series search result (instead of showing the select-series interface") opter.add_option( "-b", "--batch", default = False, dest="batch", action="store_true", help="rename without human intervention, selects first series and always renames, same as --always and --selectfirst") opts, args = opter.parse_args() if opts.config is not None: print "Loading config from: %s" % (opts.config) try: Config.loadConfig(opts.config) except InvalidConfigFile: warn("Invalid config file %s - using default configuration" % ( opts.config)) Config.useDefaultConfig() if opts.saveconfig is not None: print "Saving current config to %s" % (opts.saveconfig) try: Config.saveConfig(opts.saveconfig) except InvalidConfigFile: opter.error("Could not save config to %s" % opts.saveconfig) else: print "Done, exiting" opter.exit(0) if opts.batch: opts.selectfirst = True opts.alwaysrename = True if not Config['verbose']: Config['verbose'] = opts.verbose if not Config['recursive']: Config['recursive'] = opts.recursive if not Config['alwaysrename']: Config['alwaysrename'] = opts.alwaysrename if not Config['selectfirst']: Config['selectfirst'] = opts.selectfirst if len(args) == 0: opter.error("No filenames or directories supplied") try: tvnamer(paths = sorted(args)) except NoValidFilesFoundError: opter.error("No valid files were supplied") except UserAbort, errormsg: opter.error(errormsg)