def __init__(self, app): Config.__init__(self, app) self.uni_file = os.path.join(self.settings_folder, 'me') self.clientconfig_file = os.path.join(self.settings_folder, 'pypsyc', 'config') #self.uni = self.get_uni() import random self.uni = 'psyc://beta.ve.symlynX.com/~pypsyctest' + ''.join([random.Random().choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ123456789') for i in [0]*10]) #self.uni = raw_input('uni?: ') self.host = get_host(self.uni) self.username = get_user(self.uni) self.ui = 'wx'
def __init__(self, url): self.config = Config() # self.driver = webdriver.Chrome() # self.driver = webdriver.Ie() self.driver = webdriver.Firefox() self.driver.get(url) self.nowhandle = self.driver.current_window_handle
def AllowTransientNetworks(self, ipv4_list: List[str], ipv6_list: List[str]): """Temporarily allow connections from a network until the next reboot (or when firewall rules are reloaded). :param ipv4_list: List of IPv4 networks to allow :type ipv4_list: list[str] :param ipv6_list: List of IPv6 networks to allow :type ipv6_list: list[str] """ parsed_ipv4 = Config.ParseNetworks(Config.IPv4, ipv4_list) parsed_ipv6 = Config.ParseNetworks(Config.IPv6, ipv6_list) if parsed_ipv4: self.RunNFT(['add', 'element', *self.TRANSIENT_IPV4_SET, '{', ",".join(parsed_ipv4), '}']) if parsed_ipv6: self.RunNFT(['add', 'element', *self.TRANSIENT_IPV6_SET, '{', ",".join(parsed_ipv6), '}']) pass
def __init__(self) -> None: """The Bot class, where the magic happens.""" intents = discord.Intents.all() super().__init__(command_prefix='!', case_insensitive=True, intents=intents) self.config = Config.load() self.load_cogs() self.run()
def __init__(self, app): Config.__init__(self, app) self.serverconfig_file = os.path.join(self.settings_folder, 'config')
import os import sys import xgbfir import pandas as pd import xgboost as xgb sys.path.append("../src") from base import Utilities, Config from common import CustomTransformation config = Config() train_module = CustomTransformation(config, 'train') watchlist = [(train_module.ddata, 'train')] print(train_module.final_columns) params = Utilities.load_json(config.params_file) history = xgb.cv(params, train_module.ddata, 300, early_stopping_rounds=30, metrics=["auc", "error"], verbose_eval=True) model = xgb.train(params, train_module.ddata, 200, verbose_eval=True) class_mapping = Utilities.load_json(config.class_mapping_file) test_module = CustomTransformation("test", class_mapping, train_module.final_columns) y_pred = model.predict(test_module.ddata) submission_df = pd.DataFrame({config.notable_columns["ID"]: list(test_module.main_column.values), config.notable_columns["Target"]: list(y_pred)}) submission_df.to_csv(os.path.join(config.home, 'submission', 'one.csv'), float_format='%0.6f', index=False)
import argparse from base import Config, train if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--lr', default=0.01, type=float) parser.add_argument('--ld', default=0.3, type=float) parser.add_argument('--wd', default=0.001, type=float) parser.add_argument('--dr', default=1, type=int) parser.add_argument('--ar', default='peng', type=str) parser.add_argument('--ch', nargs='+', default=(32, 64, 128, 256, 256, 64), type=int) parser.add_argument('--ta', default='ageC', type=str) args = parser.parse_args() cfg = Config(learning_rate=args.lr, lr_decay=args.ld, weight_decay=args.wd, dropout=bool(args.dr), arch=args.ar, channels=args.ch, target=args.ta) train(cfg)
warnings.warn = warn if __name__ == '__main__': rep = int(os.environ['SLURM_ARRAY_TASK_ID']) parser = argparse.ArgumentParser() parser.add_argument('--tss', default=100, type=int, help='training sample size') parser.add_argument( '--ssd', default='/SampleSplits/', help='sample splits directory (default: /SampleSplits/)') parser.add_argument('--nw', default=8, type=int, help='number of workers') parser.add_argument('--bs', default=16, type=int, help='batch size#') parser.add_argument('--sn', default='age', type=str, help='scorename') parser.add_argument('--lr', default=0.01, type=float) args = parser.parse_args() cfg = Config(sample_size=args.tss, repetition_num=rep, sample_splits_dir=args.ssd, num_workers=args.nw, batch_size=args.bs, scorename=args.sn, learning_rate=args.lr) train(cfg)