def save_shared(token, owners): sharedconfig = config.Config("Configs/Shared.yaml") newconfig = sharedconfig.load_config() newconfig["token"] = token newconfig["owners"] = owners sharedconfig.save_config(newconfig)
def save_shared(token, owners, config_directory): sharedconfig = config.Config(f"{config_directory}/Shared.yaml") newconfig = sharedconfig.load_config() newconfig["token"] = token newconfig["owners"] = owners sharedconfig.save_config(newconfig)
def user_is_admin(user_id): _config = config.Config() if str(_config.owner_id) == str(user_id): return True else: conn = db_access.create_connection() return db_access.is_admin(conn, user_id)
def update(): cf = config.Config() chars = manager.Chars() for friend in manager.Friends().list: for char in friend['chars']: dataChar = BeautifulSoup( urllib.request.urlopen('https://www.m3stat.com/players/' + cf.get('server') + '/' + char['name']).read(), 'html.parser') charData = chars.findByName(char['name']) oldLevel = None if charData is not None: oldLevel = charData['level'] m = re.search('(\d+)\sLevel', dataChar.find('h4').get_text()) if m is None: continue newLevel = int(m.group(1)) chars.push({ 'name': char['name'], 'level': newLevel, }) if oldLevel is None: continue if oldLevel < newLevel: link = 'https://www.m3stat.com/players/Palmyra/' + char['name'] publisher.Publisher().publishPrivate({ 'text': '<@' + friend['slack'] + '> est maintenant niveau *' + str(newLevel) + ' * avec <' + link + '|' + char['name'] + '> !' }) chars.persist()
def start(): # start message printWelcome() # python version check if not sys.version_info[0] == 2 or not sys.version_info[1] == 7: print "[Error] You are running Python %d.%d, please run the script with Python 2.7."\ % (sys.version_info[0], sys.version_info[1]) close_installer() # make sure we have all the dependencies check_dependencies() # extract server files if os.path.isdir("src/"): print "Extracting Server files...\nServer files already exists.\n" else: extract_files() # configuration try: from src import config except: print "[Error] Invalid server files..." close_installer() cfg = config.Config("properties.cfg") cfg.read() # get username from the user print "Please enter your username for the web user interface:" username = raw_input(">>> ") if len(username) == 0: username == "admin" # get password from the user print "Please enter your password:"******">>> ") if len(password) == 0: password == "@mobileVPN" print "Please enter your encryption key (leave empty for the default key):" key = raw_input(">>> ") if len(key) == 0: key = "qNX2tvW06TbkkXNb" print "" # progress bar i = InstallBar("Writing configuration file", 5) # update the configuration cfg["username"] = username i.progress(1) time.sleep(0.1) cfg["password"] = password i.progress(1) time.sleep(0.05) if len(key) > 0: cfg["key"] = key time.sleep(0.05) i.progress(1) cfg.pack() i.progress(1) cfg.write({}) i.progress(1) # done setup print "[Info] Setup done successfully." print "Server usage: 'python main.py'" close_installer()
def __init__(self, name): self.cf = config.Config() self.name = name.lower() self.inMemoryList = [] self.persistedList = [] try: file = open(self.cf.databasePath() + self.name + '.json', 'r') self.persistedList = json.load(file) except: file = open(self.cf.databasePath() + self.name + '.json', 'w+') json.dump(self.persistedList, file)
def main(): # parse input arguemts args = parse_args() conf = config.Config(args.config) # make header for the DNN layernames = make_layernames(conf.training_files[0], conf.input_timesteps) # create and compile model print("[INFO] information about generated DNN model:") model = create_model(layernames, conf.training_shape) model.summary(line_length=150) # write model to disk name = 'simple_conv__in' + str(len(layernames)) \ + '_tInputs' + str(conf.input_timesteps) \ + '_imw' + str(conf.training_shape[0]) \ + '_imh' + str(conf.training_shape[1]) \ + '_imc' + str(conf.training_shape[2]) \ + '_out6' + '.h5' model.save(os.path.join(args.model_out, name))
def main(): # parse input arguemts args = parse_args() conf = config.Config(args.config) # make header for the DNN layernames = make_layernames(conf.training_files[0], conf.input_timesteps) # create and compile model model = create_model(layernames, conf.training_shape) # write model to disk name = 'deepvo__train__'\ + 'in' + str(len(layernames)) \ + '_tInputs' + str(conf.input_timesteps) \ + '_imw' + str(conf.training_shape[0]) \ + '_imh' + str(conf.training_shape[1]) \ + '_imc' + str(conf.training_shape[2]) \ + '_out6.h5' model.save(os.path.join(args.model_out, name)) # print final model model.summary(line_length=150)
def main(): # parse input arguemts args = parse_args() conf = config.Config(args.config) # make header for the DNN layernames = make_layernames(conf.training_files[0], conf.input_timesteps) # create and compile model model = create_model(layernames, conf.training_shape) # load trained weights trained_model = tf.keras.models.load_model(args.trained_model_file, compile=False) model.set_weights(trained_model.get_weights()) # write model to disk ext = args.trained_model_file.split('.')[-1] name = args.trained_model_file.split('/')[-1].replace(ext, '')[:-1] name += '__eval.' + ext model.save(os.path.join(args.model_out, name)) # print final model model.summary(line_length=150)
return ds ###################### test code ###################### # force tensorflow to throw its inital messages on the very beginning of that script tf.config.experimental_list_devices() # define signal handler for clean exit on Ctrl+C signal.signal(signal.SIGINT, signal_handler) # give info about used tensorflow version print("[INFO] using tensorflow version {}".format(tf.__version__)) # parse config file args = parse_args() conf = config.Config(args.config) # make trainaing dataset from tfrec print("[INFO] training dataset will be generated from following files:", conf.training_files) ds_train, train_ds_info, train_ds_meta = tfrec_to_ds(conf.training_files, args.unpack_to, conf.training_shape, conf.original_shape, conf.input_timesteps,\ conf.t0, conf.t1, "training dataset", [],\ conf.subsequence_len, conf.subsequence_shift, conf.debug) num_train_obs = train_ds_info[0] layernames = train_ds_info[1] cleanup_files = train_ds_meta[0] train_label_list_dbg = train_ds_meta[1] # NOTE ds_train.shape: ((all input images),(label)) -> ((im_l_0,im_r_0,im_l_1,im_r_1,...,im_l_(input_timesteps),im_r_(input_timesteps)),(tx,ty,tz,roll,pitch,yaw)) # NOTE ds_train.shape mono case (most often used): ((im_l_0, im_l_1),(tx,ty,tz,roll,pitch,yaw)) [with mono sequences and INPUT_TIMESTEPS=2] # load validation dataset if requested
def __init__(self, name): self.cf = config.Config() self.name = name.lower() self.list = [] with open(self.cf.configPath() + self.name + '.json', 'r') as file: self.list = json.load(file)
import random import sqlite3 from time import sleep import discord try: from PIL import Image except ImportError: import Image import pytesseract import requests from discord.ext import commands import src.util as util from src import const, config, db_access config = config.Config() pytesseract.pytesseract.tesseract_cmd = config.tesseract_location bot: discord.ext.commands.Bot = None pic_ext = ['.jpg', '.png', '.jpeg'] def setup(_bot: discord.ext.commands.Bot): global bot bot = _bot bot.add_command(spam) bot.add_command(roll) bot.add_command(pick_card) bot.add_command(random_cat_fact) bot.add_command(get_weather_report) bot.add_command(read_image) bot.add_command(add_admin)
def update(): cf = config.Config() dataKills = BeautifulSoup( urllib.request.urlopen('https://www.m3stat.com/uniques/'+cf.get('server')).read(), 'html.parser' ) kills = manager.Kills() friends = manager.Friends() uniques = manager.Uniques() publisher = p.Publisher() for tr in dataKills.find('div', {'name': 'last_kills'}).table.tbody.find_all('tr', {}, False): if not tr.has_attr('style'): tds = tr.find_all('td', {}, False) unique = tds[0].get_text()[2:] oldKill = kills.findByUnique(unique) t = ts(tds[2].get_text()) newKill = { 'unique': unique, 'player': tds[1].get_text(), 'timestamp': t, 'date': datetime.datetime.fromtimestamp(t).strftime('%Y-%m-%d %H:%M:%S'), } kills.push(newKill) if oldKill == None or ( (oldKill['timestamp']+60) < newKill['timestamp'] ): if newKill['player'] == '(Spawned)': image = uniques.findByName(newKill['unique']) if image is not None: image = image['spawn'] publisher.publishPublic( { 'text': '*'+newKill['unique']+'* est apparu !', 'attachments': [ { "title": "Lieux d'apparition", 'image_url': image } ] } ) else: publisher.publishPublic( { 'text': '`'+newKill['player']+'` a éliminé *'+newKill['unique']+'*' } ) # Friends friend = friends.findByChar(newKill['player']) if friend != None: image = uniques.findByName(newKill['unique']) if image is not None: image = image['wallpaper'] link = 'https://www.m3stat.com/players/'+cf.get('server')+'/'+newKill['player'] dataPlayer = BeautifulSoup( urllib.request.urlopen(link).read(), 'html.parser' ) killStats = [] for tr in dataPlayer.select('.col-sm-offset-4 tr'): tds = tr.find_all('td', {}, False) unique2 = tds[0].get_text() kills2 = tds[1].get_text() killStats.append({ "title": unique2, "value": kills2, "short": "true" }) publisher.publishPrivate( { 'text': '<@'+friend['slack']+'> a éliminé *'+newKill['unique']+'* avec <'+link+'|'+newKill['player']+'> !', 'attachments': [ { "title": "Gratz!", "image_url": image, }, { "fields": killStats, } ] } ) kills.persist()
def __init__(self): cf = config.Config() self.publicWebhook = cf.get('webhook-sro-notifier') self.privateWebhook = cf.get('webhook-sro')
self.sidney = sidney.Sidney(self) self.shared = shared_yaml self.MessageHandler = message_handler.MessageHandler(self) self.start_background_tasks() def start_background_tasks(self): self.loop.create_task(self.messages.farm()) self.loop.create_task(self.tatsumaki.rep()) self.loop.create_task(self.sushii.rep()) self.loop.create_task(self.sushii.fishy()) self.loop.create_task(self.sidney.work()) async def on_ready(self): print("\nLogged in as") print(self.user.name) print(self.user.id) print("------") async def on_message(self, message): await self.MessageHandler.handle_message(message) shared = config.Config("Configs/Shared.yaml") shared_yaml = shared.load_config() client = DAB(command_prefix=shared_yaml["prefix"]) client.load_extension("src.COMMANDS") client.run(shared_yaml["token"], bot=False)
def setUp(self): super(ConfigTest, self).setUp() self.config = config.Config()
self.sidney = sidney.Sidney(self) self.shared = shared_yaml self.MessageHandler = message_handler.MessageHandler(self) self.start_background_tasks() def start_background_tasks(self): self.loop.create_task(self.messages.farm()) self.loop.create_task(self.tatsumaki.rep()) self.loop.create_task(self.sushii.rep()) self.loop.create_task(self.sushii.fishy()) self.loop.create_task(self.sidney.work()) async def on_ready(self): print("\nLogged in as") print(self.user.name) print(self.user.id) print("------") async def on_message(self, message): await self.MessageHandler.handle_message(message) shared = config.Config("RealConfigs/Shared.yaml") shared_yaml = shared.load_config() client = DAB(command_prefix=shared_yaml["prefix"]) client.load_extension("src.COMMANDS") client.run(shared_yaml["token"], bot=False)
def setUp(cls): cls.c = config.Config("test_config") cls.ts = time.time()
print("\nLogged in as") print(self.user.name) print(self.user.id) print("------") await self.start_background_tasks() async def on_message(self, message): await self.wait_until_ready() await self.MessageHandler.handle_message(message) parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", required=False, help="Optional directory to read YAML configs from") args = parser.parse_args() config_directory = "Configs" if args.config: config_directory = args.config shared = config.Config(f"{config_directory}/Shared.yaml") shared_yaml = shared.load_config() client = DAB(config_directory, shared_yaml["prefix"]) client.load_extension("src.COMMANDS") client.run(shared_yaml["token"], bot=False)