def main():
    """Call all functions needed to create a deployment."""
    args = parse_arguments()
    log_level = 'INFO'
    if args.debug:
        log_level = 'DEBUG'
    set_log_level(log_level)

    config = read_config(args.config_file)
    parameters = read_parameters(args.parameters_file)
    validate_parameters(parameters)
    deployment = Deployment(config, parameters, assume_yes=args.assumeyes)
    deployment.create()
Beispiel #2
0
def main():
    rc = 0
    options, names = parse_options()
    config_vals = config.read_config()
    class_name, cluster_name, instance_name = util.get_names(names)

    cccloud = CCCloud(config_vals)
    db = CCDatabase(options.database)

    logger.debug('db before command:')
    if options.verbose:
        db.print_db()
    ccclass = CCClass(class_name, cluster_name, instance_name, db, \
                      cccloud, options.doQuery)

    if options.startClass:
        numClusters = int(options.numClusters)
        numInstances = int(options.numInstances)
        ccclass.launch(numClusters, numInstances)
    elif options.query:
        ccclass.query()
    elif options.addCluster:
        numInstances = int(options.numInstances)
        ccclass.deploy_cluster(numInstances)
    elif options.kill:
        ccclass.kill()
    elif options.setPasswords:
        ccclass.set_root_passwords(options.passwordFile)
    elif options.configureClusters:
        ccclass.configure_hosts()
    elif options.testSSHConnectivity:
        ccclass.test_ssh_connectivity()
    else:
        logger.error('you must specify one action')
        return 1

    logger.debug('db after command:')
    if options.verbose:
        db.print_db()

    db.close()

    return rc
def main():
    args = parse_arguments()
    log_level = 'INFO'
    if args.debug:
        log_level = 'DEBUG'
    set_log_level(log_level)

    config = read_config(args.config)
    node_stats = []
    for node_info in get_lte_nodes(config):
        router_name = node_info['router']
        node_name = node_info['node']
        quota = node_info['quota']

        stats_log = collect_stats(config, node_info)
        if not stats_log:
            debug('No result for router:', router_name, 'node:', node_name)
            continue
        total = calculate_total(stats_log)
        percentage = total * 100 / quota

        # coloring progress bar
        color = 'bg-success'
        if percentage > config.get('percentage_orange', 80):
            color = 'bg-warning'
        if percentage > config.get('percentage_red', 95):
            color = 'bg-danger'

        node_stats.append({
            'router_name': router_name,
            'node_name': node_name,
            'initial_string': bytes_to_human(quota, html=True),
            'used_string': bytes_to_human(total, html=True),
            'percentage': percentage,
            'color': color,
            'timestamp_unixtime': int(time.time())
        })
    debug(node_stats)
    create_html_document(config, node_stats)
Beispiel #4
0
def main():
    opts, args = get_options()

    deploy_config = config.read_config(opts.configFile)
    if args:
        deploy_dir = os.path.expanduser(args[0])
        deploy_dir = os.path.abspath(deploy_dir)
        deploy_config.set('global', 'final-deploy-directory', deploy_dir)

    try:
        if deploy_config.get('global', 'log-level') == 'DEBUG':
            logging.getLogger('').setLevel(logging.DEBUG)
    except:
        log.debug('Missing log-level option. Setting to INFO.')

    if not dependencies.dependencies_ok(deploy_config):
        log.error('Dependency check failed.')
        return 1
    else:
        log.info('Dependencies look good.')

    return deploy_apps(deploy_config, opts.forceRemove,
                       opts.removePreviousRepos)
Beispiel #5
0
def main():
    opts, args = get_options()
    
    deploy_config = config.read_config(opts.configFile)
    if args:
        deploy_dir = args[0]
        deploy_config.set('global', 'final-deploy-directory', deploy_dir)

    try:
        if deploy_config.get('global', 'log-level') == 'DEBUG':
            logging.getLogger('').setLevel(logging.DEBUG)
    except:
        log.debug('Missing log-level option. Setting to INFO.')

    if not dependencies.dependencies_ok(deploy_config):
        log.error('Dependency check failed.')
        return 1
    else:
        log.info('Dependencies look good.')

    return deploy_apps(deploy_config,
                       opts.forceRemove, 
                       opts.removePreviousRepos)
Beispiel #6
0
def createdb(dbname):
    TABLES = (
        ("game", "CREATE TABLE `game` ("
         "  `GameID` INT(11) UNSIGNED NOT NULL PRIMARY KEY,"
         "  `Symbol` VARCHAR(1) NOT NULL,"
         "  `Currency` VARCHAR(5) NOT NULL,"
         "  `CreateDate` TIMESTAMP,"
         "  `Button` INT(1) UNSIGNED NOT NULL,"
         "  `Players` INT(1) UNSIGNED NOT NULL,"
         "  `Seats` INT(1) UNSIGNED NOT NULL,"
         "  `AmountSB` FLOAT UNSIGNED NOT NULL,"
         "  `AmountBB` FLOAT UNSIGNED NOT NULL"
         ") ENGINE=InnoDB"),
        ("player", "CREATE TABLE `player` ("
         "  `PlayerID` INT(4) UNSIGNED AUTO_INCREMENT PRIMARY KEY,"
         "  `Name` VARCHAR(50) NOT NULL"
         ") ENGINE=InnoDB"),
        ("seat", "CREATE TABLE `seat` ("
         "  `GameID` INT(11) UNSIGNED NOT NULL,"
         "  `SeatNo` INT(1) UNSIGNED NOT NULL,"
         "  `PlayerID` INT(4) UNSIGNED NOT NULL,"
         "  `Amount` FLOAT UNSIGNED NOT NULL,"
         "  `StackBB` FLOAT UNSIGNED NOT NULL,"
         "  PRIMARY KEY (`GameID`, `SeatNo`),"
         "  INDEX `IXD_seatPlayer` (`PlayerID` ASC),"
         "  CONSTRAINT `FK_seatGame` FOREIGN KEY (`GameID`) REFERENCES `" +
         dbname + "`.`game` (`GameID`) ON DELETE CASCADE ON UPDATE RESTRICT,"
         "  CONSTRAINT `FK_seatPlayer` FOREIGN KEY (`PlayerID`) REFERENCES `" +
         dbname +
         "`.`player` (`PlayerID`) ON DELETE RESTRICT ON UPDATE RESTRICT"
         ") ENGINE=InnoDB"),
        ("roundlevel", "CREATE TABLE `roundlevel` ("
         "  `RoundLevelID` INT(1) UNSIGNED AUTO_INCREMENT PRIMARY KEY,"
         "  `Name` VARCHAR(10) NOT NULL"
         ") ENGINE=InnoDB", "INSERT INTO `roundlevel` (`Name`) VALUES (%s)",
         [('Preflop', ), ('Flop', ), ('Turn', ), ('River', )]),
        ("betting", "CREATE TABLE `betting` ("
         "  `GameID` INT(11) UNSIGNED NOT NULL,"
         "  `RoundLevelID` INT(1) UNSIGNED NOT NULL,"
         "  `Sequence` INT(1) UNSIGNED NOT NULL,"
         "  `SeatNo` INT(1) UNSIGNED NOT NULL,"
         "  `PlayerID` INT(4) UNSIGNED NOT NULL,"
         "  `BetLevel` INT(1) UNSIGNED NOT NULL,"
         "  `AmountPot` FLOAT UNSIGNED NOT NULL,"
         "  `Pot` FLOAT UNSIGNED NOT NULL,"
         "  `Amount` FLOAT UNSIGNED NOT NULL,"
         "  `BB` FLOAT UNSIGNED NOT NULL,"
         "  `BetPot` FLOAT UNSIGNED NOT NULL,"
         "  PRIMARY KEY (`GameID`, `RoundLevelID`, `Sequence`),"
         "  INDEX `IXD_bettingPlayer` (`PlayerID` ASC),"
         "  INDEX `IXD_bettingRoundLevel` (`RoundLevelID` ASC),"
         "  CONSTRAINT `FK_bettingGame` FOREIGN KEY (`GameID`) REFERENCES `" +
         dbname + "`.`game` (`GameID`) ON DELETE CASCADE ON UPDATE RESTRICT,"
         "  CONSTRAINT `FK_bettingPlayer` FOREIGN KEY (`PlayerID`) REFERENCES `"
         + dbname +
         "`.`player` (`PlayerID`) ON DELETE RESTRICT ON UPDATE RESTRICT"
         ") ENGINE=InnoDB"),
        ("queue", "CREATE TABLE `queue` ("
         "  `QueueID` INT(16) UNSIGNED AUTO_INCREMENT PRIMARY KEY,"
         "  `ActionID` INT(11) UNSIGNED NOT NULL,"
         "  `ValueID` INT(11) UNSIGNED"
         ") ENGINE=InnoDB"),
        ("importgame", "CREATE TABLE `importgame` ("
         "  `ImportFileID` INT(11) UNSIGNED AUTO_INCREMENT PRIMARY KEY,"
         "  `StatusID` INT(11) UNSIGNED,"
         "  `Game` VARCHAR(2048) NOT NULL"
         ") ENGINE=InnoDB"))

    dbconfig = config.read_config(section='mysql')
    db = dbmysql.db(dbconfig)
    db.create(dbname, TABLES)
    db.close()
Beispiel #7
0
"""
import os
import shutil
import bsddb3

import subprocess as sp

import xarray as xr
import zarr as zr

from pathlib import Path

from lib.config import read_config
from lib.handler import update_dict, find_exclusion_list

CONFIG = read_config()
BIN3D2NC = Path(f"{CONFIG['root']}/{CONFIG['bin3D2nc']}")
DEST = Path(f"{CONFIG['dst']}")
TMPDIR = Path(os.environ["TMPDIR"])


def xopen(target):
    """
    Helper function for xarray.open_dataset()
    """
    if target.suffix == ".nc":
        ds = xr.open_dataset(target)
    elif target.suffix == ".db":
        ds = xr.open_zarr(zr.DBMStore(target, open=bsddb3.btopen))
    else:
        raise TypeError("Dataset type not recognized")
#!/usr/bin/env python3

#
# Analyse files with essentia, and provide an API to retrieve similar tracks
#
# Copyright (c) 2020-2021 Craig Drummond <*****@*****.**>
# GPLv3 license.
#

import argparse
import logging
import os
from lib import analysis, config, tags, version

_LOGGER = logging.getLogger(__name__)
        
if __name__=='__main__':
    parser = argparse.ArgumentParser(description='Essentia analyzer (v%s)' % version.ESSENTIA_ANALYZER_VERSION)
    parser.add_argument('-c', '--config', type=str, help='Config file (default: config.json)', default='config.json')
    parser.add_argument('-l', '--log-level', action='store', choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'], default='INFO', help='Set log level (default: %(default)s)')
    parser.add_argument('-m', '--meta-only', action='store_true', default=False, help='Update metadata database only')
    parser.add_argument('-k', '--keep-old', action='store_true', default=False, help='Do not remove non-existant tracks from DB')
    args = parser.parse_args()
    logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', level=args.log_level, datefmt='%Y-%m-%d %H:%M:%S')
    cfg = config.read_config(args.config)
    analysis.analyse_files(cfg, not args.keep_old, args.meta_only)

Beispiel #9
0
from lib import config
from lib import processors

# Set basic options
options.define("port", default=6060, type=int, help="What port to run on")
options.define("debug", default=False, type=bool, help="Debug Mode")
options.define("config",
               default='dev',
               type=str,
               help="Section of config file to read")

if __name__ == "__main__":
    options.parse_command_line()
    port = options.options.port
    debug = options.options.debug
    config.read_config(options.options.config)
    CONFIG = config.CONFIG

    processor_handlers = [
        ph for p in processors.processors for ph in p.register_handlers()
    ]
    print("Registering process handlers:")
    print('\t' + "\n\t".join(ph[0] for ph in processor_handlers))

    app = web.Application(
        [
            (r'/api/showtime/unlock', UnlockShowtime),
            (r'/api/showtime/process', ShowtimeProcess),
            (r'/api/users/unlocked', ListUnlockedUsers),
            (r'/api/user/associate', AssociateRFID),
            (r'/api/user/process', UserProcess),
target = "status in ('approved') and default_duration > 0 and card_code not like 'V%'"
org_user_ids = [
    3209  # Himura
]
show_org_comments = True

from lib.authenticator import Authenticator
from lib.api import Cosplay2API, Requester
import os
import csv
import sqlite3
from time import sleep
from lib.config import read_config

config = read_config()
db_path, event_name = config['db_path'], config['event_name']
c2_login, c2_password = config['admin_cs2_name'], config[
    'admin_cs2_password'] if 'admin_cs2_password' in config else None
api = Cosplay2API(event_name)

with sqlite3.connect(db_path, isolation_level=None) as db:
    c = db.cursor()
    c.execute('PRAGMA encoding = "UTF-8"')
    c.execute(f"""
        SELECT DISTINCT
            requests.id,
            '[' || status || '] ' || card_code || ' ' || voting_number || ' (№' || number || ')' || IFNULL('. ' || voting_title, '') AS details
        FROM requests, list, [values]
        WHERE topic_id = list.id
          AND request_id = requests.id
from lib.config import read_config
from lib.update import update_records


if __name__ == '__main__':
    config_file = 'namecheap.cfg'
    search_path = ['./', '~/', '/etc']

    domains = read_config(config_file, search_path, kill=True)

    for domain in domains:
        update_records(domain)
Beispiel #12
0
# from lib import dl
# dl.trace_start("trace.html",interval=5,auto=True)
if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('command', metavar='COMMAND', help='command: init, tests')
    parser.add_argument('-c', '--config', metavar='CONFIG', help='config file')
    args = parser.parse_args()

    if args.command == 'tests':
        suite = TestLoader().discover('tests', pattern='*.py')
        result = TextTestRunner(verbosity=2).run(suite)
        result = 0 if result.wasSuccessful() else 1
        exit(result)

    cfg = read_config(args.config)
    logger = init_logger()

    renderer = DistributedRenderer()
    qualifier = DistributedQualifier()
    base_image_path = cfg['main']['populationPath'] + basename(cfg['main']['baseImage'])
    fitnessMachine = MeshFitnessMachine(base_image_path, renderer, qualifier)
    population = Population(MeshGenome, fitnessMachine)
    population.generation = int(db.get('generation', default=0))

    accuracy.register(population)
    monitor.register(population)

    if args.command == 'reset' or not population.generation:
        population.initialize()
    else:
Beispiel #13
0
     '--keep-old',
     action='store_true',
     default=False,
     help=
     'Do not remove non-existant tracks from DB (used in conjuction with --analyse)'
 )
 parser.add_argument('-t',
                     '--test',
                     action='store_true',
                     default=False,
                     help='Test musly')
 args = parser.parse_args()
 logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s',
                     level=args.log_level,
                     datefmt='%Y-%m-%d %H:%M:%S')
 cfg = config.read_config(args.config, args.analyse)
 _LOGGER.debug('Init DB')
 lib = cfg['libmusly']
 if not lib.startswith('/'):
     lib = os.path.join(os.path.dirname(os.path.abspath(__file__)), lib)
 _LOGGER.debug('Init Musly')
 mus = musly.Musly(lib)
 jukebox_file = os.path.join(cfg['paths']['db'], JUKEBOX_FILE)
 if args.analyse:
     path = cfg['paths']['musly'] if args.analyse == 'm' else args.analyse
     analysis.analyse_files(mus, cfg, path, not args.keep_old,
                            args.meta_only, jukebox_file)
 elif args.test:
     test.test_jukebox(mus, cfg, jukebox_file)
 else:
     app.start_app(args, mus, cfg, jukebox_file)
class UserUnauth3(web.RequestHandler):
    def get(self):
        self.render("deauth3.html")


class NewsHandler(web.RequestHandler):
    def get(self):
        self.render("news.html")


if __name__ == "__main__":
    options.parse_command_line()
    port = options.options.port
    debug = options.options.debug
    config.read_config(options.options.config)

    oauth_creds = {}
    services = 'google facebook spotify twitter tumblr instagram reddit'
    CONFIG = config.CONFIG
    for service in services.split():
        oauth_creds[service + '_oauth'] = {
            "key": CONFIG.get("{}_client_id".format(service)),
            "secret": CONFIG.get("{}_client_secret".format(service)),
        }

    app = web.Application(
        [
            (r'/', MainHandler),
            (r'/test', TestHandler),
            (r'/news', NewsHandler),
Beispiel #15
0
def run_audio_proc(filename):
    if SHOW_LOG:
        print("[ debug ]: Run child process to handle %s (%s)..." % (filename, os.getpid()))
    time.sleep(5)
    print("the end")
    cmds = audio_slice.get_ffmpeg_cmds(filename, ".")
    # run_cmds(cmds)


if __name__ == "__main__":
    config_dic = {}
    file_dic = {}
    process_dic = {}

    config.read_config(CONFIG_FILENAME, config_dic)
    input_filepath = config_dic["path"]["scan_path"]
    output_m3u8path = config_dic["path"]["output_m3u8_path"]
    slice_time = config_dic["slice"]["hls_time"]
    max_process = int(config_dic["process"]["max_process"])
    if SHOW_LOG:
        print("[ debug ]: Monitor directory is %s" % input_filepath)

    p = multiprocessing.current_process()
    p.daemon = True

    while True:
        cur_process = len(process_dic)

        if max_process > cur_process:
            files = get_unhandled_files(input_filepath, file_dic)
def main():
    """Call all functions needed to create a deployment."""
    args = parse_arguments()
    log_level = 'INFO'
    if args.debug:
        log_level = 'DEBUG'
    set_log_level(log_level)

    config = read_config(args.config_file)
    for index, host in enumerate(config.get('hypervisors')):
        node = ''
        if ':' in host:
            host, node = host.split(':')
        proxmox = ProxmoxAPI(
            host=host,
            user=config['username'],
            password=config['password'],
            verify_ssl=config.get('verify_ssl', True))
        nodes = [d['node'] for d in proxmox.nodes.get()]
        if node:
            if node not in nodes:
                fatal('Specified node {} not configured on host {}'.format(
                    host, node))
            default_node = node
        else:
            default_node = nodes[0]
        new_id = get_free_vmid(proxmox, default_node, (index+1)*1000)
        iso_image = get_iso_image(proxmox, default_node)
        version = iso_image.replace(
            'local:iso/128T-', '').replace(
            '-cloudinit.x86_64.iso', '')
        template_name = TEMPLATE_NAME.format(datetime.now(), version)
        info('Creating a new template:\n * ID: {}\n * Name: {}\n * Host: {}'.format(
             new_id, template_name, host))
        if not args.assumeyes:
            yn = input('Continue [yN]? ')
            if yn != 'y' and yn != 'Y':
                continue

        vm_options = DEFAULTS.copy()
        if 'vm_options' in config:
            vm_options.update(config['vm_options'])
        vm_options['vmid'] = new_id
        vm_options['name'] = template_name
        vm_options['ide2'] = iso_image + ',media=cdrom'
        proxmox.nodes(default_node).qemu.create(**vm_options)

        vm = proxmox.nodes(default_node).qemu(new_id)
        info('Waiting until vm is stopped.')
        running = True
        while running:
            print('.', end='', flush=True)
            time.sleep(30)
            status = vm.status.current().get().get('status')
            running = (status == 'running')
        print('')
        info('VM has been stopped.')
        info('Adding CloudInit.')
        vm.config.set(ide0='local:cloudinit')
        vm.config.set(delete='ide2')
        info('Migrating to template.')
        vm.template().post()