def getContext(args, withCreds=True, withConfig=True, failOnDiscoveryError=True): """ Get context """ setupLogging(args) ctx = types.SimpleNamespace() ctx.ar = args ctx.cr = None ctx.cf = None ctx.cs = getConstants() if withCreds: ctx.creds = Creds(ctx) ctx.cr = ctx.creds.get() if withConfig: ctx.config = Config(ctx, failOnDiscoveryError=failOnDiscoveryError) ctx.cf = ctx.config.getFull() elif withConfig: fail("Can't get configuration without credentials") if args.dump_context: _printHeader('COMMAND LINE ARGUMENTS') print(yaml.dump({'ar': nestedNsToObj(ctx.ar)})) _printHeader('CREDENTIALS') print(yaml.dump({'cr': nestedNsToObj(ctx.cr)})) _printHeader('CONFIGURATION') print(yaml.dump({'cf': nestedNsToObj(ctx.cf)})) sys.exit(0) return ctx
def main(): locale.setlocale(locale.LC_ALL, "") config = Config("./data/config.json") company = get_data_from_json("./data/company.json") config.set("payment_paypal", "PayPal address: " + company["paypal"]) with codecs.open("template/bank-details.html", "r", encoding="utf-8") as html_doc: config.set("payment_wire", html_doc.read()) template = InvoiceTemplate(config.get("html_template_path"), company) if template.is_invalid(): return invoice_list = InvoiceList(config.get("database_path")) invoice_list.parse_csv(config) htmls = map( template.get_invoices_as_html, invoice_list.db, itertools.repeat(config) ) filenames = (invoice.get_filename() for invoice in invoice_list.db) db_file_path = config.get("database_path") assert os.path.isfile(db_file_path) db_file_name = os.path.splitext(os.path.basename(db_file_path))[0] dir_out = os.path.join(config.get("output_path"), db_file_name) set_up_output_directory(dir_out) save_html_files(dir_out, htmls, filenames) render(dir_out, as_png=False)
def runWithConfig(self, cnf): config = Config(cnf) engine = Engine(config) while engine.ticks_left > 0: engine.tick() engine.ticks_left -= 1 return engine.detector.getPressure()
def __init__(self, **kwargs): self.config = Config() intents = discord.Intents.none() intents.guilds = True intents.messages = True intents.reactions = True intents.message_content = True super().__init__( help_command=helpcommand.EmbedHelpCommand(), case_insensitive=True, command_prefix=self.config.prefix, owner_id=int(self.config.owner_id), intents=intents, description="Bot for following twitter users on discord", allowed_mentions=discord.AllowedMentions(everyone=False), **kwargs, ) self.logger = log.get_logger("Siniara") self.start_time = time() self.twitter_blue = int("1da1f2", 16) self.db = maria.MariaDB(self) self.cogs_to_load = [ "cogs.commands", "cogs.errorhandler", "cogs.asyncstreamer", "cogs.twitter", ]
def __init__(self, config_file='config.txt'): CONFIGURATION = Config(config_file) #Initialization of model self.model = self.init_model( CONFIGURATION.CHANNELS_IN, CONFIGURATION.CHANNELS_OUT, CONFIGURATION.LOAD_MODEL, CONFIGURATION.MODEL_LOAD_PATH, CONFIGURATION.MODEL_NAME, CONFIGURATION.MODEL_SUFFIX, CONFIGURATION.USE_DECONV_LAYERS) #Initialization of device self.device = self.init_device(CONFIGURATION.DEVICE) #Initialization of optimizer self.optimizer = self.init_optimizer(CONFIGURATION.ADAM_LR, self.model.parameters()) #Initialization of loss function self.criterion = self.init_criterion(CONFIGURATION.CLASS_WEIGHT_0, CONFIGURATION.CLASS_WEIGHT_1, CONFIGURATION.LOSS_MODIFIER, self.device) #Initialization of metric function self.metric = self.init_metric() #Initialization of augmentation function self.aug = self.init_augmentation(CONFIGURATION) self.CONFIGURATION = CONFIGURATION
def index(): """ Posts """ conf = Config() per_page = conf.PER_PAGE page, per_page, offset = get_page_args() with MySQL() as c: c.execute("SELECT * FROM posts") c.fetchall() num_rows = c.rowcount pagination = Pagination(page=page, per_page=per_page, total=num_rows, bs_version=4, alignment="center") with MySQL() as c: c.execute( f"SELECT post_id, post_title, post_content, DATE_FORMAT(post_date, '%d, %M, %Y at %h:%i %p') as post_date, author_id FROM posts ORDER BY post_id DESC LIMIT {offset}, {per_page}" ) result_post = c.fetchall() """ Account """ # if user has ticked remember_me before, we set its session login to true and stop executing the code below. if (session.get("remember_me")): setUserLoggedIn(True) return render_template("index.html", active='home', pagination=pagination, news=result_post, admins=retrieveAdmins()) # if the method we get is not post, we send the user back to index.html if (request.method == "POST"): # set username variable to form input. # set password variable to password input. username = request.form.get("username") password = request.form.get("password") ret = loginUser(username, password) if (ret == 0): return jsonify(success=False, error_msg="Invalid username, please try again.") elif (ret == 1): return jsonify(success=False, error_msg="Wrong password, please try again.") if (ret == 2): flash("You have successfully logged in", "success") return jsonify(success=True) return render_template("index.html", active='home', pagination=pagination, news=result_post, admins=retrieveAdmins())
def __init__(self): # get the user configuration of db info: user_config = Config() user_db_param = user_config.get_config_db_info() self.db_host = user_db_param['host'] self.db_port = user_db_param['port'] self.db_user = user_db_param['user'] self.db_pass = user_db_param['pass']
def start(config_path): try: util.seperator("Starting Daily Run") config = Config(default_dir, config_path) config.update_libraries() except Exception as e: util.print_stacktrace() logger.critical(e) logger.info("") util.seperator("Finished Daily Run")
def predict_and_eval_score(configfile, data_path='data/valid/', seismic_name='seismic.npy', val_mask='horizons_1.npy'): TrainRunner(configfile).predict(data_path, seismic_name, suffix='valid') config = Config(configfile) mask = np.load(f'data/valid/{val_mask}').astype(np.uint8) predicted_mask = np.load('output/predictions/' + config.MODEL_NAME + '-valid.npy') return iou_score(mask, predicted_mask, 0.5)
def timezone(self, inifile, timezone=None): """Get or set system timezone. Pass None to parameter config (as default) to get timezone, or pass timezone full name like 'Asia/Shanghai' to set timezone. """ tzpath = '/etc/localtime' zonepath = '/usr/share/zoneinfo' config = Config(inifile) if not config.has_section('time'): config.add_section('time') if timezone == None: # firstly read from config file timezone = '' if config.has_option('time', 'timezone'): timezone = config.get('time', 'timezone') if timezone: return timezone # or else check the system config file dist = ServerInfo.dist() if dist['name'] in ('centos', 'redhat'): clockinfo = raw_loadconfig('/etc/sysconfig/clock') if clockinfo and 'ZONE' in clockinfo: timezone = clockinfo['ZONE'] return timezone else: pass # or else find the file match /etc/localtime with open(tzpath) as f: tzdata = f.read() regions = ServerSet.timezone_regions() for region in regions: regionpath = os.path.join(zonepath, region) for zonefile in os.listdir(regionpath): if not os.path.isfile(os.path.join(regionpath, zonefile)): continue with open(os.path.join(regionpath, zonefile)) as f: if f.read() == tzdata: # got it! return '%s/%s' % (region, zonefile) else: # check and set the timezone timezonefile = os.path.join(zonepath, timezone) if not os.path.exists(timezonefile): return False try: shutil.copyfile(timezonefile, tzpath) except: return False # write timezone setting to config file return config.set('time', 'timezone', timezone)
def __init__(self, **kwargs): self.config = Config("config.toml") super().__init__( command_prefix=self.config.prefix, owner_id=self.config.owner_id, **kwargs ) self.logger = logger self.start_time = time() self.global_cd = commands.CooldownMapping.from_cooldown( 10.0, 60.0, commands.BucketType.member ) self.db = maria.MariaDB(self)
def read_logs(cfn): cf = Config(cfn) metricsfile = cf.LOGDIR + cf.MODEL_NAME + '-metrics-train.txt' lossfile = cf.LOGDIR + cf.MODEL_NAME + '-loss-train.txt' with open(metricsfile) as f: metrics = list(map(float, f.read().split(' ')[:len(f.read()) - 1])) with open(lossfile) as f: loss = list(map(float, f.read().split(' ')[:len(f.read()) - 1])) return loss, metrics
def __init__(self): super(CXBot, self).__init__() self.c = Config() self.max_messages = 20000 self.ready = False self.logger = Logger('logs/%s.log' % datetime.now().date().isoformat()) self.busy = False
def main(): # settings of tornado application settings = { 'root_path': root_path, 'data_path': os.path.join(root_path, 'data'), 'conf_path': os.path.join(root_path, 'data', 'config.ini'), 'index_path': os.path.join(root_path, 'static', 'index.html'), 'static_path': os.path.join(root_path, 'static'), 'xsrf_cookies': True, 'cookie_secret': make_cookie_secret(), } application = web.Application([ (r'/xsrf', web.XsrfHandler), (r'/authstatus', web.AuthStatusHandler), (r'/login', web.LoginHandler), (r'/logout', web.LogoutHandler), (r'/query/(.+)', web.QueryHandler), (r'/utils/network/(.+?)(?:/(.+))?', web.UtilsNetworkHandler), (r'/utils/process/(.+?)(?:/(.+))?', web.UtilsProcessHandler), (r'/utils/time/(.+?)(?:/(.+))?', web.UtilsTimeHandler), (r'/utils/ssl/(.+?)(?:/(.+))?', web.UtilsSSLHandler), (r'/setting/(.+)', web.SettingHandler), (r'/operation/(.+)', web.OperationHandler), (r'/page/(.+)/(.+)', web.PageHandler), (r'/backend/(.+)', web.BackendHandler), (r'/sitepackage/(.+)', web.SitePackageHandler), (r'/client/(.+)', web.ClientHandler), (r'/((?:css|js|js.min|lib|partials|images|favicon\.ico|robots\.txt)(?:\/.*)?)', web.StaticFileHandler, { 'path': settings['static_path'] }), (r'/($)', web.StaticFileHandler, { 'path': settings['index_path'] }), (r'/file/(.+)', web.FileDownloadHandler, { 'path': '/' }), (r'/fileupload', web.FileUploadHandler), (r'/version', web.VersionHandler), (r'/.*', web.ErrorHandler, { 'status_code': 404 }), ], **settings) # read configuration from config.ini cfg = Config(settings['conf_path']) server_ip = cfg.get('server', 'ip') server_port = cfg.get('server', 'port') server = tornado.httpserver.HTTPServer(application) server.listen(server_port, address=server_ip) write_pid() tornado.ioloop.IOLoop.instance().start()
def start(config_path, test, daily, collections): if daily: type = "Daily " elif test: type = "Test " elif collections: type = "Collections " else: type = "" util.seperator("Starting {}Run".format(type)) try: config = Config(default_dir, config_path) config.update_libraries(test, collections) except Exception as e: util.print_stacktrace() logger.critical(e) logger.info("") util.seperator("Finished {}Run".format(type))
def __enter__(self): try: conf = Config() self.conn = pymysql.connect(host=conf.MYSQL_HOST, user=conf.MYSQL_USERNAME, passwd=conf.MYSQL_PASSWORD, db=conf.MYSQL_DATABASE, autocommit=True, cursorclass=pymysql.cursors.DictCursor) except pymysql.ProgrammingError as err: print(f"ERROR: Caught an Error: {err}") finally: self.cursor = self.conn.cursor() return self.cursor
def __init__(self): #get the user configuration of db info: user_config = Config() user_db_param = user_config.get_config_db_info() self.db_host = user_db_param['host'] self.db_port = user_db_param['port'] self.db_user = user_db_param['user'] self.db_pass = user_db_param['pass'] #create db if not exists self.str_db_k_data = 'db_k_data' #k_data database self.create_db(self.str_db_k_data) self.str_db_history_data = 'db_history_data' #history_data database self.create_db(self.str_db_history_data) self.str_db_investment_data = 'db_investment_data' #investment database self.create_db(self.str_db_investment_data) #stock classification database self.str_db_stock_classification = 'db_stock_class' #stock classification database self.create_db(self.str_db_stock_classification) #consolidated bs (year) database self.str_db_consolidated_bs_year = 'db_consolidated_bs_year' self.create_db(self.str_db_consolidated_bs_year) #consolidated bs (season) database self.str_db_consolidated_bs_season = 'db_consolidated_bs_season' self.create_db(self.str_db_consolidated_bs_season) #consolidated pl(year) database self.str_db_consolidated_pl_year = 'db_consolidated_pl_year' self.create_db(self.str_db_consolidated_pl_year) #consolidated pl(season) database self.str_db_consolidated_pl_season = 'db_consolidated_pl_season' self.create_db(self.str_db_consolidated_pl_season) #consolidated cash(year) database self.str_db_consolidated_cash_year = 'db_consolidated_cash_year' self.create_db(self.str_db_consolidated_cash_year) #consolidated cash(season) database self.str_db_consolidated_cash_season = 'db_consolidated_cash_season' self.create_db(self.str_db_consolidated_cash_season) #create table self.table_creator = Table_creator()
def start(config_path, is_test, daily, collections_to_run, libraries_to_run, resume_from): if daily: start_type = "Daily " elif is_test: start_type = "Test " elif collections_to_run: start_type = "Collections " elif libraries_to_run: start_type = "Libraries " else: start_type = "" start_time = datetime.now() util.separator(f"Starting {start_type}Run") try: config = Config(default_dir, config_path, libraries_to_run) config.update_libraries(is_test, collections_to_run, resume_from) except Exception as e: util.print_stacktrace() logger.critical(e) logger.info("") util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
def run_tests(default_dir): try: config = Config(default_dir) logger.info("") util.separator("Mapping Tests") for library in config.libraries: config.map_guids(library) anidb_tests(config) imdb_tests(config) mal_tests(config) tautulli_tests(config) tmdb_tests(config) trakt_tests(config) tvdb_tests(config) util.separator("Finished All Plex Meta Manager Tests") except KeyboardInterrupt: util.separator("Canceled Plex Meta Manager Tests")
def main(): # faz a veirificação da presença de complementos de linguagem necessários ao nltk try: nltk.tokenize.word_tokenize('Existe nltk punkt') except LookupError: nltk.download('punkt') try: spacy.load('pt') except IOError: download('pt') config = Config('data/configuration/', 'config.json') # executa as principais funções de cada classe, lendo arquivos de entrada e criando o modelo parser = run_data_parse(config) #model = run_model(config) # salva as principais informações do dataset create_dataset_info(parser)
def dyfimail(p): """ Simple interface to send mail. Subject, to, and from have default values which may be overridden. Usage: from mail import dyfimail dyfimail({'subject':'x','to':'recipient','text':'body'}) """ config = Config() msgsubj = 'DYFI Autolocator Alert' msgfrom = config.mail['operator'] msgto = config.mail['to'] if 'subject' in p: msgsubj = p['subject'] if 'to' in p: msgto = p['to'] if use_sendmail: sendmail(p) return from subprocess import Popen, PIPE print('Mailer:subj:', msgsubj) print('Mailer:to:', msgto) command = [config.mail['mailbin'], '-s', '"' + msgsubj + '"'] if 'attachment' in p: command.append('-a') command.append(p['attachment']) command.append(msgto) print('Mail command:', command) mailer = Popen(command, stdin=PIPE, universal_newlines=True) mailer.communicate(p['text'])
#!/usr/bin/env python # -*- coding: utf-8 -*- # https://github.com/r3nt0n/bopscrk # bopscrk - init script """ Before Outset PaSsword CRacKing is a tool to assist in the previous process of cracking passwords. """ name = 'bopscrk.py' __author__ = 'r3nt0n' __version__ = '2.3.1' __status__ = 'Development' from modules.args import Arguments from modules.config import Config args = Arguments() Config = Config(args.cfg_file) Config.setup() if __name__ == '__main__': from modules import main as bopscrk bopscrk.run()
def start(attrs): file_logger = os.path.join(default_dir, "logs", "meta.log") should_roll_over = os.path.isfile(file_logger) file_handler = RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8") util.apply_formatter(file_handler) file_handler.addFilter(fmt_filter) if should_roll_over: file_handler.doRollover() logger.addHandler(file_handler) util.separator() logger.info("") logger.info(util.centered(" ____ _ __ __ _ __ __ ")) logger.info(util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ ")) logger.info(util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|")) logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ")) logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ")) logger.info(util.centered(" |___/ ")) logger.info(f" Version: {version}") if "time" in attrs and attrs["time"]: start_type = f"{attrs['time']} " elif "test" in attrs and attrs["test"]: start_type = "Test " elif "collections" in attrs and attrs["collections"]: start_type = "Collections " elif "libraries" in attrs and attrs["libraries"]: start_type = "Libraries " else: start_type = "" start_time = datetime.now() if "time" not in attrs: attrs["time"] = start_time.strftime("%H:%M") attrs["time_obj"] = start_time util.separator(debug=True) logger.debug(f"--config (PMM_CONFIG): {config_file}") logger.debug(f"--time (PMM_TIME): {times}") logger.debug(f"--run (PMM_RUN): {run}") logger.debug(f"--run-tests (PMM_TEST): {test}") logger.debug(f"--collections-only (PMM_COLLECTIONS_ONLY): {collection_only}") logger.debug(f"--libraries-only (PMM_LIBRARIES_ONLY): {library_only}") logger.debug(f"--run-collections (PMM_COLLECTIONS): {collections}") logger.debug(f"--run-libraries (PMM_LIBRARIES): {libraries}") logger.debug(f"--resume (PMM_RESUME): {resume}") logger.debug(f"--no-countdown (PMM_NO_COUNTDOWN): {no_countdown}") logger.debug(f"--no-missing (PMM_NO_MISSING): {no_missing}") logger.debug(f"--divider (PMM_DIVIDER): {divider}") logger.debug(f"--width (PMM_WIDTH): {screen_width}") logger.debug(f"--debug (PMM_DEBUG): {debug}") logger.debug(f"--trace (PMM_TRACE): {trace}") logger.debug("") util.separator(f"Starting {start_type}Run") config = None global stats stats = {"created": 0, "modified": 0, "deleted": 0, "added": 0, "removed": 0, "radarr": 0, "sonarr": 0} try: config = Config(default_dir, attrs) except Exception as e: util.print_stacktrace() util.print_multiline(e, critical=True) else: try: update_libraries(config) except Exception as e: config.notify(e) util.print_stacktrace() util.print_multiline(e, critical=True) logger.info("") end_time = datetime.now() run_time = str(end_time - start_time).split('.')[0] if config: try: config.Webhooks.end_time_hooks(start_time, end_time, run_time, stats) except Failed as e: util.print_stacktrace() logger.error(f"Webhooks Error: {e}") util.separator(f"Finished {start_type}Run\nRun Time: {run_time}") logger.removeHandler(file_handler)
import datetime as DateTime import boto3 as boto3 import os as os from modules.config import Config currentDateTime = DateTime.datetime.now().isoformat() ROOT_PATH = Config().getRootPath() zipName = "laverna-{}.zip".format(currentDateTime) zipFilePath = "{}".format(zipName) key = "{}{}".format(ROOT_PATH, zipName) print DateTime.datetime.now() syncDirectory = Config().getSyncDirectory() syncDirectoryParent = Config().getSyncDirectoryParent() print "Creating archive from Laverna data..." os.chdir(syncDirectoryParent) os.system("zip -rq {} {}".format(zipFilePath, syncDirectory)) s3 = boto3.client('s3') bucketName = Config().getBucketName() print "Uploading archive to AWS-S3..." with open(zipFilePath, 'rb') as archive: s3.upload_fileobj(archive, bucketName, key)
import os, sys, time from datetime import datetime import threading from modules.camera import Camera from modules.body import Body from modules.config import Config from modules.sensors import Sensors body = Body() camera = Camera() config = Config() sensors = Sensors() sensors.daemon = True sensors.start() try: while True: time.sleep(0.1) Config.read_config(config) # Keep reading config for changes Body.move(body, Config.retrieve(config, 'Body', 'direction'), Sensors.retrieve(sensors), config) Camera.move(camera, Config.retrieve(config, 'Head', 'x'), Config.retrieve(config, 'Head', 'y')) except KeyboardInterrupt: Sensors.__exit__(sensors) Body.__exit__(body) Camera.__exit__(camera)
""" Main script handling any commands """ import sys from modules.config import Config if __name__ == "__main__": # If the flags weren't gotten if len(sys.argv) < 2: print("Incorrect parameters set! Try to run doondler --help") sys.exit() if "--init" in sys.argv: Config().make() from modules.action_handler import ActionHandler action_handler = ActionHandler(sys.argv) action_handler.reduce()
# Dictionary organized by extension def getFileNamesByExtension(): fileDic = {} filenames = os.listdir('.') for name in filenames: extension = name.split('.')[1] if extension not in fileDic: fileDic[extension] = [] fileDic[extension].append(name) return fileDic print datetime.datetime.now() # Create SQL connection CONNECTION_STRING = Config().getConnectionString() sql_conn = psycopg2.connect(CONNECTION_STRING) cursor = sql_conn.cursor() print "Connected to database..." ROOT_PATH = Config().getRootPath() # Determine if there are new items to insert into table tableSize = Keys.getCount(cursor) bucketSize = Bucket.getObjectCount(ROOT_PATH) numNewItems = bucketSize - tableSize hasNewVersion = numNewItems > 0 print "Calculating size..."
from modules.config import Config from modules.simplejwt import SimpleJWT config = Config('config.ini') simple_jwt = SimpleJWT(config.get('JWT', 'secret'))
import pymysql.cursors from modules.config import Config from modules.database import Database from modules.VERSION import Version logging.basicConfig( level=logging.INFO, # if appDebug else logging.INFO, format="%%(asctime)s kubesdb %s %%(levelname)s: %%(message)s" % Version, datefmt="%Y-%m-%d %H:%M:%S" ) logger = logging.getLogger("kubesdb") logger.setLevel(logging.INFO) c = Config() logger.info("Starting up...") def watch_loop(db): if "KUBERNETES_SERVICE_HOST" in os.environ: kubernetes.config.load_incluster_config() else: kubernetes.config.load_kube_config() v1 = kubernetes.client.CoreV1Api() w = kubernetes.watch.Watch() logger.info("Watching for secrets in namespace %s with label %s" % (c.namespace, c.label)) for event in w.stream(v1.list_namespaced_secret, c.namespace, label_selector=c.label):
exp = st.sidebar.slider('Num of experiments', 1, 20) train = st.sidebar.checkbox('Train', value=False) experiments = [] for i in range(exp): et = st.sidebar.text_input(f'Experiment {i} title', value='Horizon thickness 1') config = st.sidebar.text_input(f'[Train] Configfile {i}', value='configs/') pconfig = st.sidebar.text_input(f'[Predict] Configfile {i}', value='configs/') experiments.append([et, config, pconfig]) run = st.sidebar.button('Run experiments') if run: for experiment in experiments: if train: TrainRunner(experiment[1]).train('data/train/') l, m = read_logs(experiment[1]) st.header(f'Experiment {experiment[0]}') visualize_metrics(m, l, True, title=experiment[0]) scores = predict_and_eval_score(experiment[2], val_mask=Config( experiment[2]).MASK_FILENAME) visualize_predictions(scores, True, title=experiment[0]) st.info(f'Mean value: {scores.mean()} +/- {scores.std()}') st.info(f'Max value: {scores.max()}') st.info(f'Min value: {scores.min()}')